tree.h (PHI_CHAIN): New.
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
29
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type.
32
33 fold takes a tree as argument and returns a simplified tree.
34
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
38
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
41
42 force_fit_type takes a constant and prior overflow indicator, and
43 forces the value to fit the type. It returns an overflow indicator. */
44
45 #include "config.h"
46 #include "system.h"
47 #include "coretypes.h"
48 #include "tm.h"
49 #include "flags.h"
50 #include "tree.h"
51 #include "real.h"
52 #include "rtl.h"
53 #include "expr.h"
54 #include "tm_p.h"
55 #include "toplev.h"
56 #include "ggc.h"
57 #include "hashtab.h"
58 #include "langhooks.h"
59 #include "md5.h"
60
61 /* The following constants represent a bit based encoding of GCC's
62 comparison operators. This encoding simplifies transformations
63 on relational comparison operators, such as AND and OR. */
64 enum comparison_code {
65 COMPCODE_FALSE = 0,
66 COMPCODE_LT = 1,
67 COMPCODE_EQ = 2,
68 COMPCODE_LE = 3,
69 COMPCODE_GT = 4,
70 COMPCODE_LTGT = 5,
71 COMPCODE_GE = 6,
72 COMPCODE_ORD = 7,
73 COMPCODE_UNORD = 8,
74 COMPCODE_UNLT = 9,
75 COMPCODE_UNEQ = 10,
76 COMPCODE_UNLE = 11,
77 COMPCODE_UNGT = 12,
78 COMPCODE_NE = 13,
79 COMPCODE_UNGE = 14,
80 COMPCODE_TRUE = 15
81 };
82
83 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
84 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
85 static bool negate_mathfn_p (enum built_in_function);
86 static bool negate_expr_p (tree);
87 static tree negate_expr (tree);
88 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
89 static tree associate_trees (tree, tree, enum tree_code, tree);
90 static tree const_binop (enum tree_code, tree, tree, int);
91 static hashval_t size_htab_hash (const void *);
92 static int size_htab_eq (const void *, const void *);
93 static tree fold_convert_const (enum tree_code, tree, tree);
94 static enum tree_code invert_tree_comparison (enum tree_code, bool);
95 static enum comparison_code comparison_to_compcode (enum tree_code);
96 static enum tree_code compcode_to_comparison (enum comparison_code);
97 static tree combine_comparisons (enum tree_code, enum tree_code,
98 enum tree_code, tree, tree, tree);
99 static int truth_value_p (enum tree_code);
100 static int operand_equal_for_comparison_p (tree, tree, tree);
101 static int twoval_comparison_p (tree, tree *, tree *, int *);
102 static tree eval_subst (tree, tree, tree, tree, tree);
103 static tree pedantic_omit_one_operand (tree, tree, tree);
104 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
105 static tree make_bit_field_ref (tree, tree, int, int, int);
106 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
107 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
108 enum machine_mode *, int *, int *,
109 tree *, tree *);
110 static int all_ones_mask_p (tree, int);
111 static tree sign_bit_p (tree, tree);
112 static int simple_operand_p (tree);
113 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
114 static tree make_range (tree, int *, tree *, tree *);
115 static tree build_range_check (tree, tree, int, tree, tree);
116 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
117 tree);
118 static tree fold_range_test (tree);
119 static tree unextend (tree, int, int, tree);
120 static tree fold_truthop (enum tree_code, tree, tree, tree);
121 static tree optimize_minmax_comparison (tree);
122 static tree extract_muldiv (tree, tree, enum tree_code, tree);
123 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
124 static int multiple_of_p (tree, tree, tree);
125 static tree constant_boolean_node (int, tree);
126 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree, tree,
127 tree, int);
128 static bool fold_real_zero_addition_p (tree, tree, int);
129 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
130 tree, tree, tree);
131 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
132 static tree fold_div_compare (enum tree_code, tree, tree, tree);
133 static bool reorder_operands_p (tree, tree);
134 static tree fold_negate_const (tree, tree);
135 static tree fold_not_const (tree, tree);
136 static tree fold_relational_const (enum tree_code, tree, tree, tree);
137 static tree fold_relational_hi_lo (enum tree_code *, const tree,
138 tree *, tree *);
139
140 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
141 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
142 and SUM1. Then this yields nonzero if overflow occurred during the
143 addition.
144
145 Overflow occurs if A and B have the same sign, but A and SUM differ in
146 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
147 sign. */
148 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
149 \f
150 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
151 We do that by representing the two-word integer in 4 words, with only
152 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
153 number. The value of the word is LOWPART + HIGHPART * BASE. */
154
155 #define LOWPART(x) \
156 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
157 #define HIGHPART(x) \
158 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
159 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
160
161 /* Unpack a two-word integer into 4 words.
162 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
163 WORDS points to the array of HOST_WIDE_INTs. */
164
165 static void
166 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
167 {
168 words[0] = LOWPART (low);
169 words[1] = HIGHPART (low);
170 words[2] = LOWPART (hi);
171 words[3] = HIGHPART (hi);
172 }
173
174 /* Pack an array of 4 words into a two-word integer.
175 WORDS points to the array of words.
176 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
177
178 static void
179 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
180 HOST_WIDE_INT *hi)
181 {
182 *low = words[0] + words[1] * BASE;
183 *hi = words[2] + words[3] * BASE;
184 }
185 \f
186 /* Make the integer constant T valid for its type by setting to 0 or 1 all
187 the bits in the constant that don't belong in the type.
188
189 Return 1 if a signed overflow occurs, 0 otherwise. If OVERFLOW is
190 nonzero, a signed overflow has already occurred in calculating T, so
191 propagate it. */
192
193 int
194 force_fit_type (tree t, int overflow)
195 {
196 unsigned HOST_WIDE_INT low;
197 HOST_WIDE_INT high;
198 unsigned int prec;
199
200 if (TREE_CODE (t) == REAL_CST)
201 {
202 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
203 Consider doing it via real_convert now. */
204 return overflow;
205 }
206
207 else if (TREE_CODE (t) != INTEGER_CST)
208 return overflow;
209
210 low = TREE_INT_CST_LOW (t);
211 high = TREE_INT_CST_HIGH (t);
212
213 if (POINTER_TYPE_P (TREE_TYPE (t))
214 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
215 prec = POINTER_SIZE;
216 else
217 prec = TYPE_PRECISION (TREE_TYPE (t));
218
219 /* First clear all bits that are beyond the type's precision. */
220
221 if (prec == 2 * HOST_BITS_PER_WIDE_INT)
222 ;
223 else if (prec > HOST_BITS_PER_WIDE_INT)
224 TREE_INT_CST_HIGH (t)
225 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
226 else
227 {
228 TREE_INT_CST_HIGH (t) = 0;
229 if (prec < HOST_BITS_PER_WIDE_INT)
230 TREE_INT_CST_LOW (t) &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
231 }
232
233 /* Unsigned types do not suffer sign extension or overflow unless they
234 are a sizetype. */
235 if (TYPE_UNSIGNED (TREE_TYPE (t))
236 && ! (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
237 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
238 return overflow;
239
240 /* If the value's sign bit is set, extend the sign. */
241 if (prec != 2 * HOST_BITS_PER_WIDE_INT
242 && (prec > HOST_BITS_PER_WIDE_INT
243 ? 0 != (TREE_INT_CST_HIGH (t)
244 & ((HOST_WIDE_INT) 1
245 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
246 : 0 != (TREE_INT_CST_LOW (t)
247 & ((unsigned HOST_WIDE_INT) 1 << (prec - 1)))))
248 {
249 /* Value is negative:
250 set to 1 all the bits that are outside this type's precision. */
251 if (prec > HOST_BITS_PER_WIDE_INT)
252 TREE_INT_CST_HIGH (t)
253 |= ((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
254 else
255 {
256 TREE_INT_CST_HIGH (t) = -1;
257 if (prec < HOST_BITS_PER_WIDE_INT)
258 TREE_INT_CST_LOW (t) |= ((unsigned HOST_WIDE_INT) (-1) << prec);
259 }
260 }
261
262 /* Return nonzero if signed overflow occurred. */
263 return
264 ((overflow | (low ^ TREE_INT_CST_LOW (t)) | (high ^ TREE_INT_CST_HIGH (t)))
265 != 0);
266 }
267 \f
268 /* Add two doubleword integers with doubleword result.
269 Each argument is given as two `HOST_WIDE_INT' pieces.
270 One argument is L1 and H1; the other, L2 and H2.
271 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
272
273 int
274 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
275 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
276 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
277 {
278 unsigned HOST_WIDE_INT l;
279 HOST_WIDE_INT h;
280
281 l = l1 + l2;
282 h = h1 + h2 + (l < l1);
283
284 *lv = l;
285 *hv = h;
286 return OVERFLOW_SUM_SIGN (h1, h2, h);
287 }
288
289 /* Negate a doubleword integer with doubleword result.
290 Return nonzero if the operation overflows, assuming it's signed.
291 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
292 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
293
294 int
295 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
296 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
297 {
298 if (l1 == 0)
299 {
300 *lv = 0;
301 *hv = - h1;
302 return (*hv & h1) < 0;
303 }
304 else
305 {
306 *lv = -l1;
307 *hv = ~h1;
308 return 0;
309 }
310 }
311 \f
312 /* Multiply two doubleword integers with doubleword result.
313 Return nonzero if the operation overflows, assuming it's signed.
314 Each argument is given as two `HOST_WIDE_INT' pieces.
315 One argument is L1 and H1; the other, L2 and H2.
316 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
317
318 int
319 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
320 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
321 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
322 {
323 HOST_WIDE_INT arg1[4];
324 HOST_WIDE_INT arg2[4];
325 HOST_WIDE_INT prod[4 * 2];
326 unsigned HOST_WIDE_INT carry;
327 int i, j, k;
328 unsigned HOST_WIDE_INT toplow, neglow;
329 HOST_WIDE_INT tophigh, neghigh;
330
331 encode (arg1, l1, h1);
332 encode (arg2, l2, h2);
333
334 memset (prod, 0, sizeof prod);
335
336 for (i = 0; i < 4; i++)
337 {
338 carry = 0;
339 for (j = 0; j < 4; j++)
340 {
341 k = i + j;
342 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
343 carry += arg1[i] * arg2[j];
344 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
345 carry += prod[k];
346 prod[k] = LOWPART (carry);
347 carry = HIGHPART (carry);
348 }
349 prod[i + 4] = carry;
350 }
351
352 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
353
354 /* Check for overflow by calculating the top half of the answer in full;
355 it should agree with the low half's sign bit. */
356 decode (prod + 4, &toplow, &tophigh);
357 if (h1 < 0)
358 {
359 neg_double (l2, h2, &neglow, &neghigh);
360 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
361 }
362 if (h2 < 0)
363 {
364 neg_double (l1, h1, &neglow, &neghigh);
365 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
366 }
367 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
368 }
369 \f
370 /* Shift the doubleword integer in L1, H1 left by COUNT places
371 keeping only PREC bits of result.
372 Shift right if COUNT is negative.
373 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
374 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
375
376 void
377 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
378 HOST_WIDE_INT count, unsigned int prec,
379 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
380 {
381 unsigned HOST_WIDE_INT signmask;
382
383 if (count < 0)
384 {
385 rshift_double (l1, h1, -count, prec, lv, hv, arith);
386 return;
387 }
388
389 if (SHIFT_COUNT_TRUNCATED)
390 count %= prec;
391
392 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
393 {
394 /* Shifting by the host word size is undefined according to the
395 ANSI standard, so we must handle this as a special case. */
396 *hv = 0;
397 *lv = 0;
398 }
399 else if (count >= HOST_BITS_PER_WIDE_INT)
400 {
401 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
402 *lv = 0;
403 }
404 else
405 {
406 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
407 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
408 *lv = l1 << count;
409 }
410
411 /* Sign extend all bits that are beyond the precision. */
412
413 signmask = -((prec > HOST_BITS_PER_WIDE_INT
414 ? ((unsigned HOST_WIDE_INT) *hv
415 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
416 : (*lv >> (prec - 1))) & 1);
417
418 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
419 ;
420 else if (prec >= HOST_BITS_PER_WIDE_INT)
421 {
422 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
423 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
424 }
425 else
426 {
427 *hv = signmask;
428 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
429 *lv |= signmask << prec;
430 }
431 }
432
433 /* Shift the doubleword integer in L1, H1 right by COUNT places
434 keeping only PREC bits of result. COUNT must be positive.
435 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
436 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
437
438 void
439 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
440 HOST_WIDE_INT count, unsigned int prec,
441 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
442 int arith)
443 {
444 unsigned HOST_WIDE_INT signmask;
445
446 signmask = (arith
447 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
448 : 0);
449
450 if (SHIFT_COUNT_TRUNCATED)
451 count %= prec;
452
453 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
454 {
455 /* Shifting by the host word size is undefined according to the
456 ANSI standard, so we must handle this as a special case. */
457 *hv = 0;
458 *lv = 0;
459 }
460 else if (count >= HOST_BITS_PER_WIDE_INT)
461 {
462 *hv = 0;
463 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
464 }
465 else
466 {
467 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
468 *lv = ((l1 >> count)
469 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
470 }
471
472 /* Zero / sign extend all bits that are beyond the precision. */
473
474 if (count >= (HOST_WIDE_INT)prec)
475 {
476 *hv = signmask;
477 *lv = signmask;
478 }
479 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
480 ;
481 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
482 {
483 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
484 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
485 }
486 else
487 {
488 *hv = signmask;
489 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
490 *lv |= signmask << (prec - count);
491 }
492 }
493 \f
494 /* Rotate the doubleword integer in L1, H1 left by COUNT places
495 keeping only PREC bits of result.
496 Rotate right if COUNT is negative.
497 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
498
499 void
500 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
501 HOST_WIDE_INT count, unsigned int prec,
502 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
503 {
504 unsigned HOST_WIDE_INT s1l, s2l;
505 HOST_WIDE_INT s1h, s2h;
506
507 count %= prec;
508 if (count < 0)
509 count += prec;
510
511 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
512 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
513 *lv = s1l | s2l;
514 *hv = s1h | s2h;
515 }
516
517 /* Rotate the doubleword integer in L1, H1 left by COUNT places
518 keeping only PREC bits of result. COUNT must be positive.
519 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
520
521 void
522 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
523 HOST_WIDE_INT count, unsigned int prec,
524 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
525 {
526 unsigned HOST_WIDE_INT s1l, s2l;
527 HOST_WIDE_INT s1h, s2h;
528
529 count %= prec;
530 if (count < 0)
531 count += prec;
532
533 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
534 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
535 *lv = s1l | s2l;
536 *hv = s1h | s2h;
537 }
538 \f
539 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
540 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
541 CODE is a tree code for a kind of division, one of
542 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
543 or EXACT_DIV_EXPR
544 It controls how the quotient is rounded to an integer.
545 Return nonzero if the operation overflows.
546 UNS nonzero says do unsigned division. */
547
548 int
549 div_and_round_double (enum tree_code code, int uns,
550 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
551 HOST_WIDE_INT hnum_orig,
552 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
553 HOST_WIDE_INT hden_orig,
554 unsigned HOST_WIDE_INT *lquo,
555 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
556 HOST_WIDE_INT *hrem)
557 {
558 int quo_neg = 0;
559 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
560 HOST_WIDE_INT den[4], quo[4];
561 int i, j;
562 unsigned HOST_WIDE_INT work;
563 unsigned HOST_WIDE_INT carry = 0;
564 unsigned HOST_WIDE_INT lnum = lnum_orig;
565 HOST_WIDE_INT hnum = hnum_orig;
566 unsigned HOST_WIDE_INT lden = lden_orig;
567 HOST_WIDE_INT hden = hden_orig;
568 int overflow = 0;
569
570 if (hden == 0 && lden == 0)
571 overflow = 1, lden = 1;
572
573 /* Calculate quotient sign and convert operands to unsigned. */
574 if (!uns)
575 {
576 if (hnum < 0)
577 {
578 quo_neg = ~ quo_neg;
579 /* (minimum integer) / (-1) is the only overflow case. */
580 if (neg_double (lnum, hnum, &lnum, &hnum)
581 && ((HOST_WIDE_INT) lden & hden) == -1)
582 overflow = 1;
583 }
584 if (hden < 0)
585 {
586 quo_neg = ~ quo_neg;
587 neg_double (lden, hden, &lden, &hden);
588 }
589 }
590
591 if (hnum == 0 && hden == 0)
592 { /* single precision */
593 *hquo = *hrem = 0;
594 /* This unsigned division rounds toward zero. */
595 *lquo = lnum / lden;
596 goto finish_up;
597 }
598
599 if (hnum == 0)
600 { /* trivial case: dividend < divisor */
601 /* hden != 0 already checked. */
602 *hquo = *lquo = 0;
603 *hrem = hnum;
604 *lrem = lnum;
605 goto finish_up;
606 }
607
608 memset (quo, 0, sizeof quo);
609
610 memset (num, 0, sizeof num); /* to zero 9th element */
611 memset (den, 0, sizeof den);
612
613 encode (num, lnum, hnum);
614 encode (den, lden, hden);
615
616 /* Special code for when the divisor < BASE. */
617 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
618 {
619 /* hnum != 0 already checked. */
620 for (i = 4 - 1; i >= 0; i--)
621 {
622 work = num[i] + carry * BASE;
623 quo[i] = work / lden;
624 carry = work % lden;
625 }
626 }
627 else
628 {
629 /* Full double precision division,
630 with thanks to Don Knuth's "Seminumerical Algorithms". */
631 int num_hi_sig, den_hi_sig;
632 unsigned HOST_WIDE_INT quo_est, scale;
633
634 /* Find the highest nonzero divisor digit. */
635 for (i = 4 - 1;; i--)
636 if (den[i] != 0)
637 {
638 den_hi_sig = i;
639 break;
640 }
641
642 /* Insure that the first digit of the divisor is at least BASE/2.
643 This is required by the quotient digit estimation algorithm. */
644
645 scale = BASE / (den[den_hi_sig] + 1);
646 if (scale > 1)
647 { /* scale divisor and dividend */
648 carry = 0;
649 for (i = 0; i <= 4 - 1; i++)
650 {
651 work = (num[i] * scale) + carry;
652 num[i] = LOWPART (work);
653 carry = HIGHPART (work);
654 }
655
656 num[4] = carry;
657 carry = 0;
658 for (i = 0; i <= 4 - 1; i++)
659 {
660 work = (den[i] * scale) + carry;
661 den[i] = LOWPART (work);
662 carry = HIGHPART (work);
663 if (den[i] != 0) den_hi_sig = i;
664 }
665 }
666
667 num_hi_sig = 4;
668
669 /* Main loop */
670 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
671 {
672 /* Guess the next quotient digit, quo_est, by dividing the first
673 two remaining dividend digits by the high order quotient digit.
674 quo_est is never low and is at most 2 high. */
675 unsigned HOST_WIDE_INT tmp;
676
677 num_hi_sig = i + den_hi_sig + 1;
678 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
679 if (num[num_hi_sig] != den[den_hi_sig])
680 quo_est = work / den[den_hi_sig];
681 else
682 quo_est = BASE - 1;
683
684 /* Refine quo_est so it's usually correct, and at most one high. */
685 tmp = work - quo_est * den[den_hi_sig];
686 if (tmp < BASE
687 && (den[den_hi_sig - 1] * quo_est
688 > (tmp * BASE + num[num_hi_sig - 2])))
689 quo_est--;
690
691 /* Try QUO_EST as the quotient digit, by multiplying the
692 divisor by QUO_EST and subtracting from the remaining dividend.
693 Keep in mind that QUO_EST is the I - 1st digit. */
694
695 carry = 0;
696 for (j = 0; j <= den_hi_sig; j++)
697 {
698 work = quo_est * den[j] + carry;
699 carry = HIGHPART (work);
700 work = num[i + j] - LOWPART (work);
701 num[i + j] = LOWPART (work);
702 carry += HIGHPART (work) != 0;
703 }
704
705 /* If quo_est was high by one, then num[i] went negative and
706 we need to correct things. */
707 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
708 {
709 quo_est--;
710 carry = 0; /* add divisor back in */
711 for (j = 0; j <= den_hi_sig; j++)
712 {
713 work = num[i + j] + den[j] + carry;
714 carry = HIGHPART (work);
715 num[i + j] = LOWPART (work);
716 }
717
718 num [num_hi_sig] += carry;
719 }
720
721 /* Store the quotient digit. */
722 quo[i] = quo_est;
723 }
724 }
725
726 decode (quo, lquo, hquo);
727
728 finish_up:
729 /* If result is negative, make it so. */
730 if (quo_neg)
731 neg_double (*lquo, *hquo, lquo, hquo);
732
733 /* Compute trial remainder: rem = num - (quo * den) */
734 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
735 neg_double (*lrem, *hrem, lrem, hrem);
736 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
737
738 switch (code)
739 {
740 case TRUNC_DIV_EXPR:
741 case TRUNC_MOD_EXPR: /* round toward zero */
742 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
743 return overflow;
744
745 case FLOOR_DIV_EXPR:
746 case FLOOR_MOD_EXPR: /* round toward negative infinity */
747 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
748 {
749 /* quo = quo - 1; */
750 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
751 lquo, hquo);
752 }
753 else
754 return overflow;
755 break;
756
757 case CEIL_DIV_EXPR:
758 case CEIL_MOD_EXPR: /* round toward positive infinity */
759 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
760 {
761 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
762 lquo, hquo);
763 }
764 else
765 return overflow;
766 break;
767
768 case ROUND_DIV_EXPR:
769 case ROUND_MOD_EXPR: /* round to closest integer */
770 {
771 unsigned HOST_WIDE_INT labs_rem = *lrem;
772 HOST_WIDE_INT habs_rem = *hrem;
773 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
774 HOST_WIDE_INT habs_den = hden, htwice;
775
776 /* Get absolute values. */
777 if (*hrem < 0)
778 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
779 if (hden < 0)
780 neg_double (lden, hden, &labs_den, &habs_den);
781
782 /* If (2 * abs (lrem) >= abs (lden)) */
783 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
784 labs_rem, habs_rem, &ltwice, &htwice);
785
786 if (((unsigned HOST_WIDE_INT) habs_den
787 < (unsigned HOST_WIDE_INT) htwice)
788 || (((unsigned HOST_WIDE_INT) habs_den
789 == (unsigned HOST_WIDE_INT) htwice)
790 && (labs_den < ltwice)))
791 {
792 if (*hquo < 0)
793 /* quo = quo - 1; */
794 add_double (*lquo, *hquo,
795 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
796 else
797 /* quo = quo + 1; */
798 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
799 lquo, hquo);
800 }
801 else
802 return overflow;
803 }
804 break;
805
806 default:
807 abort ();
808 }
809
810 /* Compute true remainder: rem = num - (quo * den) */
811 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
812 neg_double (*lrem, *hrem, lrem, hrem);
813 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
814 return overflow;
815 }
816 \f
817 /* Return true if built-in mathematical function specified by CODE
818 preserves the sign of it argument, i.e. -f(x) == f(-x). */
819
820 static bool
821 negate_mathfn_p (enum built_in_function code)
822 {
823 switch (code)
824 {
825 case BUILT_IN_ASIN:
826 case BUILT_IN_ASINF:
827 case BUILT_IN_ASINL:
828 case BUILT_IN_ATAN:
829 case BUILT_IN_ATANF:
830 case BUILT_IN_ATANL:
831 case BUILT_IN_SIN:
832 case BUILT_IN_SINF:
833 case BUILT_IN_SINL:
834 case BUILT_IN_TAN:
835 case BUILT_IN_TANF:
836 case BUILT_IN_TANL:
837 return true;
838
839 default:
840 break;
841 }
842 return false;
843 }
844
845 /* Determine whether an expression T can be cheaply negated using
846 the function negate_expr. */
847
848 static bool
849 negate_expr_p (tree t)
850 {
851 unsigned HOST_WIDE_INT val;
852 unsigned int prec;
853 tree type;
854
855 if (t == 0)
856 return false;
857
858 type = TREE_TYPE (t);
859
860 STRIP_SIGN_NOPS (t);
861 switch (TREE_CODE (t))
862 {
863 case INTEGER_CST:
864 if (TYPE_UNSIGNED (type) || ! flag_trapv)
865 return true;
866
867 /* Check that -CST will not overflow type. */
868 prec = TYPE_PRECISION (type);
869 if (prec > HOST_BITS_PER_WIDE_INT)
870 {
871 if (TREE_INT_CST_LOW (t) != 0)
872 return true;
873 prec -= HOST_BITS_PER_WIDE_INT;
874 val = TREE_INT_CST_HIGH (t);
875 }
876 else
877 val = TREE_INT_CST_LOW (t);
878 if (prec < HOST_BITS_PER_WIDE_INT)
879 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
880 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
881
882 case REAL_CST:
883 case NEGATE_EXPR:
884 return true;
885
886 case COMPLEX_CST:
887 return negate_expr_p (TREE_REALPART (t))
888 && negate_expr_p (TREE_IMAGPART (t));
889
890 case PLUS_EXPR:
891 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
892 return false;
893 /* -(A + B) -> (-B) - A. */
894 if (negate_expr_p (TREE_OPERAND (t, 1))
895 && reorder_operands_p (TREE_OPERAND (t, 0),
896 TREE_OPERAND (t, 1)))
897 return true;
898 /* -(A + B) -> (-A) - B. */
899 return negate_expr_p (TREE_OPERAND (t, 0));
900
901 case MINUS_EXPR:
902 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
903 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
904 && reorder_operands_p (TREE_OPERAND (t, 0),
905 TREE_OPERAND (t, 1));
906
907 case MULT_EXPR:
908 if (TYPE_UNSIGNED (TREE_TYPE (t)))
909 break;
910
911 /* Fall through. */
912
913 case RDIV_EXPR:
914 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
915 return negate_expr_p (TREE_OPERAND (t, 1))
916 || negate_expr_p (TREE_OPERAND (t, 0));
917 break;
918
919 case NOP_EXPR:
920 /* Negate -((double)float) as (double)(-float). */
921 if (TREE_CODE (type) == REAL_TYPE)
922 {
923 tree tem = strip_float_extensions (t);
924 if (tem != t)
925 return negate_expr_p (tem);
926 }
927 break;
928
929 case CALL_EXPR:
930 /* Negate -f(x) as f(-x). */
931 if (negate_mathfn_p (builtin_mathfn_code (t)))
932 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
933 break;
934
935 case RSHIFT_EXPR:
936 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
937 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
938 {
939 tree op1 = TREE_OPERAND (t, 1);
940 if (TREE_INT_CST_HIGH (op1) == 0
941 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
942 == TREE_INT_CST_LOW (op1))
943 return true;
944 }
945 break;
946
947 default:
948 break;
949 }
950 return false;
951 }
952
953 /* Given T, an expression, return the negation of T. Allow for T to be
954 null, in which case return null. */
955
956 static tree
957 negate_expr (tree t)
958 {
959 tree type;
960 tree tem;
961
962 if (t == 0)
963 return 0;
964
965 type = TREE_TYPE (t);
966 STRIP_SIGN_NOPS (t);
967
968 switch (TREE_CODE (t))
969 {
970 case INTEGER_CST:
971 tem = fold_negate_const (t, type);
972 if (! TREE_OVERFLOW (tem)
973 || TYPE_UNSIGNED (type)
974 || ! flag_trapv)
975 return tem;
976 break;
977
978 case REAL_CST:
979 tem = fold_negate_const (t, type);
980 /* Two's complement FP formats, such as c4x, may overflow. */
981 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
982 return fold_convert (type, tem);
983 break;
984
985 case COMPLEX_CST:
986 {
987 tree rpart = negate_expr (TREE_REALPART (t));
988 tree ipart = negate_expr (TREE_IMAGPART (t));
989
990 if ((TREE_CODE (rpart) == REAL_CST
991 && TREE_CODE (ipart) == REAL_CST)
992 || (TREE_CODE (rpart) == INTEGER_CST
993 && TREE_CODE (ipart) == INTEGER_CST))
994 return build_complex (type, rpart, ipart);
995 }
996 break;
997
998 case NEGATE_EXPR:
999 return fold_convert (type, TREE_OPERAND (t, 0));
1000
1001 case PLUS_EXPR:
1002 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1003 {
1004 /* -(A + B) -> (-B) - A. */
1005 if (negate_expr_p (TREE_OPERAND (t, 1))
1006 && reorder_operands_p (TREE_OPERAND (t, 0),
1007 TREE_OPERAND (t, 1)))
1008 {
1009 tem = negate_expr (TREE_OPERAND (t, 1));
1010 tem = fold (build2 (MINUS_EXPR, TREE_TYPE (t),
1011 tem, TREE_OPERAND (t, 0)));
1012 return fold_convert (type, tem);
1013 }
1014
1015 /* -(A + B) -> (-A) - B. */
1016 if (negate_expr_p (TREE_OPERAND (t, 0)))
1017 {
1018 tem = negate_expr (TREE_OPERAND (t, 0));
1019 tem = fold (build2 (MINUS_EXPR, TREE_TYPE (t),
1020 tem, TREE_OPERAND (t, 1)));
1021 return fold_convert (type, tem);
1022 }
1023 }
1024 break;
1025
1026 case MINUS_EXPR:
1027 /* - (A - B) -> B - A */
1028 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1029 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1030 return fold_convert (type,
1031 fold (build2 (MINUS_EXPR, TREE_TYPE (t),
1032 TREE_OPERAND (t, 1),
1033 TREE_OPERAND (t, 0))));
1034 break;
1035
1036 case MULT_EXPR:
1037 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1038 break;
1039
1040 /* Fall through. */
1041
1042 case RDIV_EXPR:
1043 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1044 {
1045 tem = TREE_OPERAND (t, 1);
1046 if (negate_expr_p (tem))
1047 return fold_convert (type,
1048 fold (build2 (TREE_CODE (t), TREE_TYPE (t),
1049 TREE_OPERAND (t, 0),
1050 negate_expr (tem))));
1051 tem = TREE_OPERAND (t, 0);
1052 if (negate_expr_p (tem))
1053 return fold_convert (type,
1054 fold (build2 (TREE_CODE (t), TREE_TYPE (t),
1055 negate_expr (tem),
1056 TREE_OPERAND (t, 1))));
1057 }
1058 break;
1059
1060 case NOP_EXPR:
1061 /* Convert -((double)float) into (double)(-float). */
1062 if (TREE_CODE (type) == REAL_TYPE)
1063 {
1064 tem = strip_float_extensions (t);
1065 if (tem != t && negate_expr_p (tem))
1066 return fold_convert (type, negate_expr (tem));
1067 }
1068 break;
1069
1070 case CALL_EXPR:
1071 /* Negate -f(x) as f(-x). */
1072 if (negate_mathfn_p (builtin_mathfn_code (t))
1073 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1074 {
1075 tree fndecl, arg, arglist;
1076
1077 fndecl = get_callee_fndecl (t);
1078 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1079 arglist = build_tree_list (NULL_TREE, arg);
1080 return build_function_call_expr (fndecl, arglist);
1081 }
1082 break;
1083
1084 case RSHIFT_EXPR:
1085 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1086 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1087 {
1088 tree op1 = TREE_OPERAND (t, 1);
1089 if (TREE_INT_CST_HIGH (op1) == 0
1090 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1091 == TREE_INT_CST_LOW (op1))
1092 {
1093 tree ntype = TYPE_UNSIGNED (type)
1094 ? lang_hooks.types.signed_type (type)
1095 : lang_hooks.types.unsigned_type (type);
1096 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1097 temp = fold (build2 (RSHIFT_EXPR, ntype, temp, op1));
1098 return fold_convert (type, temp);
1099 }
1100 }
1101 break;
1102
1103 default:
1104 break;
1105 }
1106
1107 tem = fold (build1 (NEGATE_EXPR, TREE_TYPE (t), t));
1108 return fold_convert (type, tem);
1109 }
1110 \f
1111 /* Split a tree IN into a constant, literal and variable parts that could be
1112 combined with CODE to make IN. "constant" means an expression with
1113 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1114 commutative arithmetic operation. Store the constant part into *CONP,
1115 the literal in *LITP and return the variable part. If a part isn't
1116 present, set it to null. If the tree does not decompose in this way,
1117 return the entire tree as the variable part and the other parts as null.
1118
1119 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1120 case, we negate an operand that was subtracted. Except if it is a
1121 literal for which we use *MINUS_LITP instead.
1122
1123 If NEGATE_P is true, we are negating all of IN, again except a literal
1124 for which we use *MINUS_LITP instead.
1125
1126 If IN is itself a literal or constant, return it as appropriate.
1127
1128 Note that we do not guarantee that any of the three values will be the
1129 same type as IN, but they will have the same signedness and mode. */
1130
1131 static tree
1132 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1133 tree *minus_litp, int negate_p)
1134 {
1135 tree var = 0;
1136
1137 *conp = 0;
1138 *litp = 0;
1139 *minus_litp = 0;
1140
1141 /* Strip any conversions that don't change the machine mode or signedness. */
1142 STRIP_SIGN_NOPS (in);
1143
1144 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1145 *litp = in;
1146 else if (TREE_CODE (in) == code
1147 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1148 /* We can associate addition and subtraction together (even
1149 though the C standard doesn't say so) for integers because
1150 the value is not affected. For reals, the value might be
1151 affected, so we can't. */
1152 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1153 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1154 {
1155 tree op0 = TREE_OPERAND (in, 0);
1156 tree op1 = TREE_OPERAND (in, 1);
1157 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1158 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1159
1160 /* First see if either of the operands is a literal, then a constant. */
1161 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1162 *litp = op0, op0 = 0;
1163 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1164 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1165
1166 if (op0 != 0 && TREE_CONSTANT (op0))
1167 *conp = op0, op0 = 0;
1168 else if (op1 != 0 && TREE_CONSTANT (op1))
1169 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1170
1171 /* If we haven't dealt with either operand, this is not a case we can
1172 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1173 if (op0 != 0 && op1 != 0)
1174 var = in;
1175 else if (op0 != 0)
1176 var = op0;
1177 else
1178 var = op1, neg_var_p = neg1_p;
1179
1180 /* Now do any needed negations. */
1181 if (neg_litp_p)
1182 *minus_litp = *litp, *litp = 0;
1183 if (neg_conp_p)
1184 *conp = negate_expr (*conp);
1185 if (neg_var_p)
1186 var = negate_expr (var);
1187 }
1188 else if (TREE_CONSTANT (in))
1189 *conp = in;
1190 else
1191 var = in;
1192
1193 if (negate_p)
1194 {
1195 if (*litp)
1196 *minus_litp = *litp, *litp = 0;
1197 else if (*minus_litp)
1198 *litp = *minus_litp, *minus_litp = 0;
1199 *conp = negate_expr (*conp);
1200 var = negate_expr (var);
1201 }
1202
1203 return var;
1204 }
1205
1206 /* Re-associate trees split by the above function. T1 and T2 are either
1207 expressions to associate or null. Return the new expression, if any. If
1208 we build an operation, do it in TYPE and with CODE. */
1209
1210 static tree
1211 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1212 {
1213 if (t1 == 0)
1214 return t2;
1215 else if (t2 == 0)
1216 return t1;
1217
1218 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1219 try to fold this since we will have infinite recursion. But do
1220 deal with any NEGATE_EXPRs. */
1221 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1222 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1223 {
1224 if (code == PLUS_EXPR)
1225 {
1226 if (TREE_CODE (t1) == NEGATE_EXPR)
1227 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1228 fold_convert (type, TREE_OPERAND (t1, 0)));
1229 else if (TREE_CODE (t2) == NEGATE_EXPR)
1230 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1231 fold_convert (type, TREE_OPERAND (t2, 0)));
1232 }
1233 return build2 (code, type, fold_convert (type, t1),
1234 fold_convert (type, t2));
1235 }
1236
1237 return fold (build2 (code, type, fold_convert (type, t1),
1238 fold_convert (type, t2)));
1239 }
1240 \f
1241 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1242 to produce a new constant.
1243
1244 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1245
1246 tree
1247 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1248 {
1249 unsigned HOST_WIDE_INT int1l, int2l;
1250 HOST_WIDE_INT int1h, int2h;
1251 unsigned HOST_WIDE_INT low;
1252 HOST_WIDE_INT hi;
1253 unsigned HOST_WIDE_INT garbagel;
1254 HOST_WIDE_INT garbageh;
1255 tree t;
1256 tree type = TREE_TYPE (arg1);
1257 int uns = TYPE_UNSIGNED (type);
1258 int is_sizetype
1259 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1260 int overflow = 0;
1261 int no_overflow = 0;
1262
1263 int1l = TREE_INT_CST_LOW (arg1);
1264 int1h = TREE_INT_CST_HIGH (arg1);
1265 int2l = TREE_INT_CST_LOW (arg2);
1266 int2h = TREE_INT_CST_HIGH (arg2);
1267
1268 switch (code)
1269 {
1270 case BIT_IOR_EXPR:
1271 low = int1l | int2l, hi = int1h | int2h;
1272 break;
1273
1274 case BIT_XOR_EXPR:
1275 low = int1l ^ int2l, hi = int1h ^ int2h;
1276 break;
1277
1278 case BIT_AND_EXPR:
1279 low = int1l & int2l, hi = int1h & int2h;
1280 break;
1281
1282 case RSHIFT_EXPR:
1283 int2l = -int2l;
1284 case LSHIFT_EXPR:
1285 /* It's unclear from the C standard whether shifts can overflow.
1286 The following code ignores overflow; perhaps a C standard
1287 interpretation ruling is needed. */
1288 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1289 &low, &hi, !uns);
1290 no_overflow = 1;
1291 break;
1292
1293 case RROTATE_EXPR:
1294 int2l = - int2l;
1295 case LROTATE_EXPR:
1296 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1297 &low, &hi);
1298 break;
1299
1300 case PLUS_EXPR:
1301 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1302 break;
1303
1304 case MINUS_EXPR:
1305 neg_double (int2l, int2h, &low, &hi);
1306 add_double (int1l, int1h, low, hi, &low, &hi);
1307 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1308 break;
1309
1310 case MULT_EXPR:
1311 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1312 break;
1313
1314 case TRUNC_DIV_EXPR:
1315 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1316 case EXACT_DIV_EXPR:
1317 /* This is a shortcut for a common special case. */
1318 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1319 && ! TREE_CONSTANT_OVERFLOW (arg1)
1320 && ! TREE_CONSTANT_OVERFLOW (arg2)
1321 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1322 {
1323 if (code == CEIL_DIV_EXPR)
1324 int1l += int2l - 1;
1325
1326 low = int1l / int2l, hi = 0;
1327 break;
1328 }
1329
1330 /* ... fall through ... */
1331
1332 case ROUND_DIV_EXPR:
1333 if (int2h == 0 && int2l == 1)
1334 {
1335 low = int1l, hi = int1h;
1336 break;
1337 }
1338 if (int1l == int2l && int1h == int2h
1339 && ! (int1l == 0 && int1h == 0))
1340 {
1341 low = 1, hi = 0;
1342 break;
1343 }
1344 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1345 &low, &hi, &garbagel, &garbageh);
1346 break;
1347
1348 case TRUNC_MOD_EXPR:
1349 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1350 /* This is a shortcut for a common special case. */
1351 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1352 && ! TREE_CONSTANT_OVERFLOW (arg1)
1353 && ! TREE_CONSTANT_OVERFLOW (arg2)
1354 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1355 {
1356 if (code == CEIL_MOD_EXPR)
1357 int1l += int2l - 1;
1358 low = int1l % int2l, hi = 0;
1359 break;
1360 }
1361
1362 /* ... fall through ... */
1363
1364 case ROUND_MOD_EXPR:
1365 overflow = div_and_round_double (code, uns,
1366 int1l, int1h, int2l, int2h,
1367 &garbagel, &garbageh, &low, &hi);
1368 break;
1369
1370 case MIN_EXPR:
1371 case MAX_EXPR:
1372 if (uns)
1373 low = (((unsigned HOST_WIDE_INT) int1h
1374 < (unsigned HOST_WIDE_INT) int2h)
1375 || (((unsigned HOST_WIDE_INT) int1h
1376 == (unsigned HOST_WIDE_INT) int2h)
1377 && int1l < int2l));
1378 else
1379 low = (int1h < int2h
1380 || (int1h == int2h && int1l < int2l));
1381
1382 if (low == (code == MIN_EXPR))
1383 low = int1l, hi = int1h;
1384 else
1385 low = int2l, hi = int2h;
1386 break;
1387
1388 default:
1389 abort ();
1390 }
1391
1392 /* If this is for a sizetype, can be represented as one (signed)
1393 HOST_WIDE_INT word, and doesn't overflow, use size_int since it caches
1394 constants. */
1395 if (is_sizetype
1396 && ((hi == 0 && (HOST_WIDE_INT) low >= 0)
1397 || (hi == -1 && (HOST_WIDE_INT) low < 0))
1398 && overflow == 0 && ! TREE_OVERFLOW (arg1) && ! TREE_OVERFLOW (arg2))
1399 return size_int_type_wide (low, type);
1400 else
1401 {
1402 t = build_int_2 (low, hi);
1403 TREE_TYPE (t) = TREE_TYPE (arg1);
1404 }
1405
1406 TREE_OVERFLOW (t)
1407 = ((notrunc
1408 ? (!uns || is_sizetype) && overflow
1409 : (force_fit_type (t, (!uns || is_sizetype) && overflow)
1410 && ! no_overflow))
1411 | TREE_OVERFLOW (arg1)
1412 | TREE_OVERFLOW (arg2));
1413
1414 /* If we're doing a size calculation, unsigned arithmetic does overflow.
1415 So check if force_fit_type truncated the value. */
1416 if (is_sizetype
1417 && ! TREE_OVERFLOW (t)
1418 && (TREE_INT_CST_HIGH (t) != hi
1419 || TREE_INT_CST_LOW (t) != low))
1420 TREE_OVERFLOW (t) = 1;
1421
1422 TREE_CONSTANT_OVERFLOW (t) = (TREE_OVERFLOW (t)
1423 | TREE_CONSTANT_OVERFLOW (arg1)
1424 | TREE_CONSTANT_OVERFLOW (arg2));
1425 return t;
1426 }
1427
1428 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1429 constant. We assume ARG1 and ARG2 have the same data type, or at least
1430 are the same kind of constant and the same machine mode.
1431
1432 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1433
1434 static tree
1435 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1436 {
1437 STRIP_NOPS (arg1);
1438 STRIP_NOPS (arg2);
1439
1440 if (TREE_CODE (arg1) == INTEGER_CST)
1441 return int_const_binop (code, arg1, arg2, notrunc);
1442
1443 if (TREE_CODE (arg1) == REAL_CST)
1444 {
1445 enum machine_mode mode;
1446 REAL_VALUE_TYPE d1;
1447 REAL_VALUE_TYPE d2;
1448 REAL_VALUE_TYPE value;
1449 tree t, type;
1450
1451 d1 = TREE_REAL_CST (arg1);
1452 d2 = TREE_REAL_CST (arg2);
1453
1454 type = TREE_TYPE (arg1);
1455 mode = TYPE_MODE (type);
1456
1457 /* Don't perform operation if we honor signaling NaNs and
1458 either operand is a NaN. */
1459 if (HONOR_SNANS (mode)
1460 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1461 return NULL_TREE;
1462
1463 /* Don't perform operation if it would raise a division
1464 by zero exception. */
1465 if (code == RDIV_EXPR
1466 && REAL_VALUES_EQUAL (d2, dconst0)
1467 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1468 return NULL_TREE;
1469
1470 /* If either operand is a NaN, just return it. Otherwise, set up
1471 for floating-point trap; we return an overflow. */
1472 if (REAL_VALUE_ISNAN (d1))
1473 return arg1;
1474 else if (REAL_VALUE_ISNAN (d2))
1475 return arg2;
1476
1477 REAL_ARITHMETIC (value, code, d1, d2);
1478
1479 t = build_real (type, real_value_truncate (mode, value));
1480
1481 TREE_OVERFLOW (t)
1482 = (force_fit_type (t, 0)
1483 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1484 TREE_CONSTANT_OVERFLOW (t)
1485 = TREE_OVERFLOW (t)
1486 | TREE_CONSTANT_OVERFLOW (arg1)
1487 | TREE_CONSTANT_OVERFLOW (arg2);
1488 return t;
1489 }
1490 if (TREE_CODE (arg1) == COMPLEX_CST)
1491 {
1492 tree type = TREE_TYPE (arg1);
1493 tree r1 = TREE_REALPART (arg1);
1494 tree i1 = TREE_IMAGPART (arg1);
1495 tree r2 = TREE_REALPART (arg2);
1496 tree i2 = TREE_IMAGPART (arg2);
1497 tree t;
1498
1499 switch (code)
1500 {
1501 case PLUS_EXPR:
1502 t = build_complex (type,
1503 const_binop (PLUS_EXPR, r1, r2, notrunc),
1504 const_binop (PLUS_EXPR, i1, i2, notrunc));
1505 break;
1506
1507 case MINUS_EXPR:
1508 t = build_complex (type,
1509 const_binop (MINUS_EXPR, r1, r2, notrunc),
1510 const_binop (MINUS_EXPR, i1, i2, notrunc));
1511 break;
1512
1513 case MULT_EXPR:
1514 t = build_complex (type,
1515 const_binop (MINUS_EXPR,
1516 const_binop (MULT_EXPR,
1517 r1, r2, notrunc),
1518 const_binop (MULT_EXPR,
1519 i1, i2, notrunc),
1520 notrunc),
1521 const_binop (PLUS_EXPR,
1522 const_binop (MULT_EXPR,
1523 r1, i2, notrunc),
1524 const_binop (MULT_EXPR,
1525 i1, r2, notrunc),
1526 notrunc));
1527 break;
1528
1529 case RDIV_EXPR:
1530 {
1531 tree magsquared
1532 = const_binop (PLUS_EXPR,
1533 const_binop (MULT_EXPR, r2, r2, notrunc),
1534 const_binop (MULT_EXPR, i2, i2, notrunc),
1535 notrunc);
1536
1537 t = build_complex (type,
1538 const_binop
1539 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1540 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1541 const_binop (PLUS_EXPR,
1542 const_binop (MULT_EXPR, r1, r2,
1543 notrunc),
1544 const_binop (MULT_EXPR, i1, i2,
1545 notrunc),
1546 notrunc),
1547 magsquared, notrunc),
1548 const_binop
1549 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1550 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1551 const_binop (MINUS_EXPR,
1552 const_binop (MULT_EXPR, i1, r2,
1553 notrunc),
1554 const_binop (MULT_EXPR, r1, i2,
1555 notrunc),
1556 notrunc),
1557 magsquared, notrunc));
1558 }
1559 break;
1560
1561 default:
1562 abort ();
1563 }
1564 return t;
1565 }
1566 return 0;
1567 }
1568
1569 /* These are the hash table functions for the hash table of INTEGER_CST
1570 nodes of a sizetype. */
1571
1572 /* Return the hash code code X, an INTEGER_CST. */
1573
1574 static hashval_t
1575 size_htab_hash (const void *x)
1576 {
1577 tree t = (tree) x;
1578
1579 return (TREE_INT_CST_HIGH (t) ^ TREE_INT_CST_LOW (t)
1580 ^ htab_hash_pointer (TREE_TYPE (t))
1581 ^ (TREE_OVERFLOW (t) << 20));
1582 }
1583
1584 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1585 is the same as that given by *Y, which is the same. */
1586
1587 static int
1588 size_htab_eq (const void *x, const void *y)
1589 {
1590 tree xt = (tree) x;
1591 tree yt = (tree) y;
1592
1593 return (TREE_INT_CST_HIGH (xt) == TREE_INT_CST_HIGH (yt)
1594 && TREE_INT_CST_LOW (xt) == TREE_INT_CST_LOW (yt)
1595 && TREE_TYPE (xt) == TREE_TYPE (yt)
1596 && TREE_OVERFLOW (xt) == TREE_OVERFLOW (yt));
1597 }
1598 \f
1599 /* Return an INTEGER_CST with value whose low-order HOST_BITS_PER_WIDE_INT
1600 bits are given by NUMBER and of the sizetype represented by KIND. */
1601
1602 tree
1603 size_int_wide (HOST_WIDE_INT number, enum size_type_kind kind)
1604 {
1605 return size_int_type_wide (number, sizetype_tab[(int) kind]);
1606 }
1607
1608 /* Likewise, but the desired type is specified explicitly. */
1609
1610 static GTY (()) tree new_const;
1611 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
1612 htab_t size_htab;
1613
1614 tree
1615 size_int_type_wide (HOST_WIDE_INT number, tree type)
1616 {
1617 void **slot;
1618
1619 if (size_htab == 0)
1620 {
1621 size_htab = htab_create_ggc (1024, size_htab_hash, size_htab_eq, NULL);
1622 new_const = make_node (INTEGER_CST);
1623 }
1624
1625 /* Adjust NEW_CONST to be the constant we want. If it's already in the
1626 hash table, we return the value from the hash table. Otherwise, we
1627 place that in the hash table and make a new node for the next time. */
1628 TREE_INT_CST_LOW (new_const) = number;
1629 TREE_INT_CST_HIGH (new_const) = number < 0 ? -1 : 0;
1630 TREE_TYPE (new_const) = type;
1631 TREE_OVERFLOW (new_const) = TREE_CONSTANT_OVERFLOW (new_const)
1632 = force_fit_type (new_const, 0);
1633
1634 slot = htab_find_slot (size_htab, new_const, INSERT);
1635 if (*slot == 0)
1636 {
1637 tree t = new_const;
1638
1639 *slot = new_const;
1640 new_const = make_node (INTEGER_CST);
1641 return t;
1642 }
1643 else
1644 return (tree) *slot;
1645 }
1646
1647 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1648 is a tree code. The type of the result is taken from the operands.
1649 Both must be the same type integer type and it must be a size type.
1650 If the operands are constant, so is the result. */
1651
1652 tree
1653 size_binop (enum tree_code code, tree arg0, tree arg1)
1654 {
1655 tree type = TREE_TYPE (arg0);
1656
1657 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1658 || type != TREE_TYPE (arg1))
1659 abort ();
1660
1661 /* Handle the special case of two integer constants faster. */
1662 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1663 {
1664 /* And some specific cases even faster than that. */
1665 if (code == PLUS_EXPR && integer_zerop (arg0))
1666 return arg1;
1667 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1668 && integer_zerop (arg1))
1669 return arg0;
1670 else if (code == MULT_EXPR && integer_onep (arg0))
1671 return arg1;
1672
1673 /* Handle general case of two integer constants. */
1674 return int_const_binop (code, arg0, arg1, 0);
1675 }
1676
1677 if (arg0 == error_mark_node || arg1 == error_mark_node)
1678 return error_mark_node;
1679
1680 return fold (build2 (code, type, arg0, arg1));
1681 }
1682
1683 /* Given two values, either both of sizetype or both of bitsizetype,
1684 compute the difference between the two values. Return the value
1685 in signed type corresponding to the type of the operands. */
1686
1687 tree
1688 size_diffop (tree arg0, tree arg1)
1689 {
1690 tree type = TREE_TYPE (arg0);
1691 tree ctype;
1692
1693 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1694 || type != TREE_TYPE (arg1))
1695 abort ();
1696
1697 /* If the type is already signed, just do the simple thing. */
1698 if (!TYPE_UNSIGNED (type))
1699 return size_binop (MINUS_EXPR, arg0, arg1);
1700
1701 ctype = (type == bitsizetype || type == ubitsizetype
1702 ? sbitsizetype : ssizetype);
1703
1704 /* If either operand is not a constant, do the conversions to the signed
1705 type and subtract. The hardware will do the right thing with any
1706 overflow in the subtraction. */
1707 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1708 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1709 fold_convert (ctype, arg1));
1710
1711 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1712 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1713 overflow) and negate (which can't either). Special-case a result
1714 of zero while we're here. */
1715 if (tree_int_cst_equal (arg0, arg1))
1716 return fold_convert (ctype, integer_zero_node);
1717 else if (tree_int_cst_lt (arg1, arg0))
1718 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1719 else
1720 return size_binop (MINUS_EXPR, fold_convert (ctype, integer_zero_node),
1721 fold_convert (ctype, size_binop (MINUS_EXPR,
1722 arg1, arg0)));
1723 }
1724 \f
1725
1726 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1727 type TYPE. If no simplification can be done return NULL_TREE. */
1728
1729 static tree
1730 fold_convert_const (enum tree_code code, tree type, tree arg1)
1731 {
1732 int overflow = 0;
1733 tree t;
1734
1735 if (TREE_TYPE (arg1) == type)
1736 return arg1;
1737
1738 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1739 {
1740 if (TREE_CODE (arg1) == INTEGER_CST)
1741 {
1742 /* If we would build a constant wider than GCC supports,
1743 leave the conversion unfolded. */
1744 if (TYPE_PRECISION (type) > 2 * HOST_BITS_PER_WIDE_INT)
1745 return NULL_TREE;
1746
1747 /* If we are trying to make a sizetype for a small integer, use
1748 size_int to pick up cached types to reduce duplicate nodes. */
1749 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1750 && !TREE_CONSTANT_OVERFLOW (arg1)
1751 && compare_tree_int (arg1, 10000) < 0)
1752 return size_int_type_wide (TREE_INT_CST_LOW (arg1), type);
1753
1754 /* Given an integer constant, make new constant with new type,
1755 appropriately sign-extended or truncated. */
1756 t = build_int_2 (TREE_INT_CST_LOW (arg1),
1757 TREE_INT_CST_HIGH (arg1));
1758 TREE_TYPE (t) = type;
1759 /* Indicate an overflow if (1) ARG1 already overflowed,
1760 or (2) force_fit_type indicates an overflow.
1761 Tell force_fit_type that an overflow has already occurred
1762 if ARG1 is a too-large unsigned value and T is signed.
1763 But don't indicate an overflow if converting a pointer. */
1764 TREE_OVERFLOW (t)
1765 = ((force_fit_type (t,
1766 (TREE_INT_CST_HIGH (arg1) < 0
1767 && (TYPE_UNSIGNED (type)
1768 < TYPE_UNSIGNED (TREE_TYPE (arg1)))))
1769 && ! POINTER_TYPE_P (TREE_TYPE (arg1)))
1770 || TREE_OVERFLOW (arg1));
1771 TREE_CONSTANT_OVERFLOW (t)
1772 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1773 return t;
1774 }
1775 else if (TREE_CODE (arg1) == REAL_CST)
1776 {
1777 /* The following code implements the floating point to integer
1778 conversion rules required by the Java Language Specification,
1779 that IEEE NaNs are mapped to zero and values that overflow
1780 the target precision saturate, i.e. values greater than
1781 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1782 are mapped to INT_MIN. These semantics are allowed by the
1783 C and C++ standards that simply state that the behavior of
1784 FP-to-integer conversion is unspecified upon overflow. */
1785
1786 HOST_WIDE_INT high, low;
1787
1788 REAL_VALUE_TYPE r;
1789 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1790
1791 switch (code)
1792 {
1793 case FIX_TRUNC_EXPR:
1794 real_trunc (&r, VOIDmode, &x);
1795 break;
1796
1797 case FIX_CEIL_EXPR:
1798 real_ceil (&r, VOIDmode, &x);
1799 break;
1800
1801 case FIX_FLOOR_EXPR:
1802 real_floor (&r, VOIDmode, &x);
1803 break;
1804
1805 case FIX_ROUND_EXPR:
1806 real_round (&r, VOIDmode, &x);
1807 break;
1808
1809 default:
1810 abort ();
1811 }
1812
1813 /* If R is NaN, return zero and show we have an overflow. */
1814 if (REAL_VALUE_ISNAN (r))
1815 {
1816 overflow = 1;
1817 high = 0;
1818 low = 0;
1819 }
1820
1821 /* See if R is less than the lower bound or greater than the
1822 upper bound. */
1823
1824 if (! overflow)
1825 {
1826 tree lt = TYPE_MIN_VALUE (type);
1827 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1828 if (REAL_VALUES_LESS (r, l))
1829 {
1830 overflow = 1;
1831 high = TREE_INT_CST_HIGH (lt);
1832 low = TREE_INT_CST_LOW (lt);
1833 }
1834 }
1835
1836 if (! overflow)
1837 {
1838 tree ut = TYPE_MAX_VALUE (type);
1839 if (ut)
1840 {
1841 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1842 if (REAL_VALUES_LESS (u, r))
1843 {
1844 overflow = 1;
1845 high = TREE_INT_CST_HIGH (ut);
1846 low = TREE_INT_CST_LOW (ut);
1847 }
1848 }
1849 }
1850
1851 if (! overflow)
1852 REAL_VALUE_TO_INT (&low, &high, r);
1853
1854 t = build_int_2 (low, high);
1855 TREE_TYPE (t) = type;
1856 TREE_OVERFLOW (t)
1857 = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
1858 TREE_CONSTANT_OVERFLOW (t)
1859 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1860 return t;
1861 }
1862 }
1863 else if (TREE_CODE (type) == REAL_TYPE)
1864 {
1865 if (TREE_CODE (arg1) == INTEGER_CST)
1866 return build_real_from_int_cst (type, arg1);
1867 if (TREE_CODE (arg1) == REAL_CST)
1868 {
1869 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
1870 {
1871 /* We make a copy of ARG1 so that we don't modify an
1872 existing constant tree. */
1873 t = copy_node (arg1);
1874 TREE_TYPE (t) = type;
1875 return t;
1876 }
1877
1878 t = build_real (type,
1879 real_value_truncate (TYPE_MODE (type),
1880 TREE_REAL_CST (arg1)));
1881
1882 TREE_OVERFLOW (t)
1883 = TREE_OVERFLOW (arg1) | force_fit_type (t, 0);
1884 TREE_CONSTANT_OVERFLOW (t)
1885 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1886 return t;
1887 }
1888 }
1889 return NULL_TREE;
1890 }
1891
1892 /* Convert expression ARG to type TYPE. Used by the middle-end for
1893 simple conversions in preference to calling the front-end's convert. */
1894
1895 tree
1896 fold_convert (tree type, tree arg)
1897 {
1898 tree orig = TREE_TYPE (arg);
1899 tree tem;
1900
1901 if (type == orig)
1902 return arg;
1903
1904 if (TREE_CODE (arg) == ERROR_MARK
1905 || TREE_CODE (type) == ERROR_MARK
1906 || TREE_CODE (orig) == ERROR_MARK)
1907 return error_mark_node;
1908
1909 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1910 return fold (build1 (NOP_EXPR, type, arg));
1911
1912 if (INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type)
1913 || TREE_CODE (type) == OFFSET_TYPE)
1914 {
1915 if (TREE_CODE (arg) == INTEGER_CST)
1916 {
1917 tem = fold_convert_const (NOP_EXPR, type, arg);
1918 if (tem != NULL_TREE)
1919 return tem;
1920 }
1921 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1922 || TREE_CODE (orig) == OFFSET_TYPE)
1923 return fold (build1 (NOP_EXPR, type, arg));
1924 if (TREE_CODE (orig) == COMPLEX_TYPE)
1925 {
1926 tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1927 return fold_convert (type, tem);
1928 }
1929 if (TREE_CODE (orig) == VECTOR_TYPE
1930 && GET_MODE_SIZE (TYPE_MODE (type))
1931 == GET_MODE_SIZE (TYPE_MODE (orig)))
1932 return fold (build1 (NOP_EXPR, type, arg));
1933 }
1934 else if (TREE_CODE (type) == REAL_TYPE)
1935 {
1936 if (TREE_CODE (arg) == INTEGER_CST)
1937 {
1938 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1939 if (tem != NULL_TREE)
1940 return tem;
1941 }
1942 else if (TREE_CODE (arg) == REAL_CST)
1943 {
1944 tem = fold_convert_const (NOP_EXPR, type, arg);
1945 if (tem != NULL_TREE)
1946 return tem;
1947 }
1948
1949 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig))
1950 return fold (build1 (FLOAT_EXPR, type, arg));
1951 if (TREE_CODE (orig) == REAL_TYPE)
1952 return fold (build1 (flag_float_store ? CONVERT_EXPR : NOP_EXPR,
1953 type, arg));
1954 if (TREE_CODE (orig) == COMPLEX_TYPE)
1955 {
1956 tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1957 return fold_convert (type, tem);
1958 }
1959 }
1960 else if (TREE_CODE (type) == COMPLEX_TYPE)
1961 {
1962 if (INTEGRAL_TYPE_P (orig)
1963 || POINTER_TYPE_P (orig)
1964 || TREE_CODE (orig) == REAL_TYPE)
1965 return build2 (COMPLEX_EXPR, type,
1966 fold_convert (TREE_TYPE (type), arg),
1967 fold_convert (TREE_TYPE (type), integer_zero_node));
1968 if (TREE_CODE (orig) == COMPLEX_TYPE)
1969 {
1970 tree rpart, ipart;
1971
1972 if (TREE_CODE (arg) == COMPLEX_EXPR)
1973 {
1974 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
1975 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
1976 return fold (build2 (COMPLEX_EXPR, type, rpart, ipart));
1977 }
1978
1979 arg = save_expr (arg);
1980 rpart = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1981 ipart = fold (build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg));
1982 rpart = fold_convert (TREE_TYPE (type), rpart);
1983 ipart = fold_convert (TREE_TYPE (type), ipart);
1984 return fold (build2 (COMPLEX_EXPR, type, rpart, ipart));
1985 }
1986 }
1987 else if (TREE_CODE (type) == VECTOR_TYPE)
1988 {
1989 if ((INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig))
1990 && GET_MODE_SIZE (TYPE_MODE (type))
1991 == GET_MODE_SIZE (TYPE_MODE (orig)))
1992 return fold (build1 (NOP_EXPR, type, arg));
1993 if (TREE_CODE (orig) == VECTOR_TYPE
1994 && GET_MODE_SIZE (TYPE_MODE (type))
1995 == GET_MODE_SIZE (TYPE_MODE (orig)))
1996 return fold (build1 (NOP_EXPR, type, arg));
1997 }
1998 else if (VOID_TYPE_P (type))
1999 return fold (build1 (CONVERT_EXPR, type, arg));
2000 abort ();
2001 }
2002 \f
2003 /* Return an expr equal to X but certainly not valid as an lvalue. */
2004
2005 tree
2006 non_lvalue (tree x)
2007 {
2008 /* We only need to wrap lvalue tree codes. */
2009 switch (TREE_CODE (x))
2010 {
2011 case VAR_DECL:
2012 case PARM_DECL:
2013 case RESULT_DECL:
2014 case LABEL_DECL:
2015 case FUNCTION_DECL:
2016 case SSA_NAME:
2017
2018 case COMPONENT_REF:
2019 case INDIRECT_REF:
2020 case ARRAY_REF:
2021 case BIT_FIELD_REF:
2022 case BUFFER_REF:
2023 case ARRAY_RANGE_REF:
2024 case VTABLE_REF:
2025
2026 case REALPART_EXPR:
2027 case IMAGPART_EXPR:
2028 case PREINCREMENT_EXPR:
2029 case PREDECREMENT_EXPR:
2030 case SAVE_EXPR:
2031 case UNSAVE_EXPR:
2032 case TRY_CATCH_EXPR:
2033 case WITH_CLEANUP_EXPR:
2034 case COMPOUND_EXPR:
2035 case MODIFY_EXPR:
2036 case TARGET_EXPR:
2037 case COND_EXPR:
2038 case BIND_EXPR:
2039 case MIN_EXPR:
2040 case MAX_EXPR:
2041 case RTL_EXPR:
2042 break;
2043
2044 default:
2045 /* Assume the worst for front-end tree codes. */
2046 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2047 break;
2048 return x;
2049 }
2050 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2051 }
2052
2053 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2054 Zero means allow extended lvalues. */
2055
2056 int pedantic_lvalues;
2057
2058 /* When pedantic, return an expr equal to X but certainly not valid as a
2059 pedantic lvalue. Otherwise, return X. */
2060
2061 tree
2062 pedantic_non_lvalue (tree x)
2063 {
2064 if (pedantic_lvalues)
2065 return non_lvalue (x);
2066 else
2067 return x;
2068 }
2069 \f
2070 /* Given a tree comparison code, return the code that is the logical inverse
2071 of the given code. It is not safe to do this for floating-point
2072 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2073 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2074
2075 static enum tree_code
2076 invert_tree_comparison (enum tree_code code, bool honor_nans)
2077 {
2078 if (honor_nans && flag_trapping_math)
2079 return ERROR_MARK;
2080
2081 switch (code)
2082 {
2083 case EQ_EXPR:
2084 return NE_EXPR;
2085 case NE_EXPR:
2086 return EQ_EXPR;
2087 case GT_EXPR:
2088 return honor_nans ? UNLE_EXPR : LE_EXPR;
2089 case GE_EXPR:
2090 return honor_nans ? UNLT_EXPR : LT_EXPR;
2091 case LT_EXPR:
2092 return honor_nans ? UNGE_EXPR : GE_EXPR;
2093 case LE_EXPR:
2094 return honor_nans ? UNGT_EXPR : GT_EXPR;
2095 case LTGT_EXPR:
2096 return UNEQ_EXPR;
2097 case UNEQ_EXPR:
2098 return LTGT_EXPR;
2099 case UNGT_EXPR:
2100 return LE_EXPR;
2101 case UNGE_EXPR:
2102 return LT_EXPR;
2103 case UNLT_EXPR:
2104 return GE_EXPR;
2105 case UNLE_EXPR:
2106 return GT_EXPR;
2107 case ORDERED_EXPR:
2108 return UNORDERED_EXPR;
2109 case UNORDERED_EXPR:
2110 return ORDERED_EXPR;
2111 default:
2112 abort ();
2113 }
2114 }
2115
2116 /* Similar, but return the comparison that results if the operands are
2117 swapped. This is safe for floating-point. */
2118
2119 enum tree_code
2120 swap_tree_comparison (enum tree_code code)
2121 {
2122 switch (code)
2123 {
2124 case EQ_EXPR:
2125 case NE_EXPR:
2126 return code;
2127 case GT_EXPR:
2128 return LT_EXPR;
2129 case GE_EXPR:
2130 return LE_EXPR;
2131 case LT_EXPR:
2132 return GT_EXPR;
2133 case LE_EXPR:
2134 return GE_EXPR;
2135 default:
2136 abort ();
2137 }
2138 }
2139
2140
2141 /* Convert a comparison tree code from an enum tree_code representation
2142 into a compcode bit-based encoding. This function is the inverse of
2143 compcode_to_comparison. */
2144
2145 static enum comparison_code
2146 comparison_to_compcode (enum tree_code code)
2147 {
2148 switch (code)
2149 {
2150 case LT_EXPR:
2151 return COMPCODE_LT;
2152 case EQ_EXPR:
2153 return COMPCODE_EQ;
2154 case LE_EXPR:
2155 return COMPCODE_LE;
2156 case GT_EXPR:
2157 return COMPCODE_GT;
2158 case NE_EXPR:
2159 return COMPCODE_NE;
2160 case GE_EXPR:
2161 return COMPCODE_GE;
2162 case ORDERED_EXPR:
2163 return COMPCODE_ORD;
2164 case UNORDERED_EXPR:
2165 return COMPCODE_UNORD;
2166 case UNLT_EXPR:
2167 return COMPCODE_UNLT;
2168 case UNEQ_EXPR:
2169 return COMPCODE_UNEQ;
2170 case UNLE_EXPR:
2171 return COMPCODE_UNLE;
2172 case UNGT_EXPR:
2173 return COMPCODE_UNGT;
2174 case LTGT_EXPR:
2175 return COMPCODE_LTGT;
2176 case UNGE_EXPR:
2177 return COMPCODE_UNGE;
2178 default:
2179 abort ();
2180 }
2181 }
2182
2183 /* Convert a compcode bit-based encoding of a comparison operator back
2184 to GCC's enum tree_code representation. This function is the
2185 inverse of comparison_to_compcode. */
2186
2187 static enum tree_code
2188 compcode_to_comparison (enum comparison_code code)
2189 {
2190 switch (code)
2191 {
2192 case COMPCODE_LT:
2193 return LT_EXPR;
2194 case COMPCODE_EQ:
2195 return EQ_EXPR;
2196 case COMPCODE_LE:
2197 return LE_EXPR;
2198 case COMPCODE_GT:
2199 return GT_EXPR;
2200 case COMPCODE_NE:
2201 return NE_EXPR;
2202 case COMPCODE_GE:
2203 return GE_EXPR;
2204 case COMPCODE_ORD:
2205 return ORDERED_EXPR;
2206 case COMPCODE_UNORD:
2207 return UNORDERED_EXPR;
2208 case COMPCODE_UNLT:
2209 return UNLT_EXPR;
2210 case COMPCODE_UNEQ:
2211 return UNEQ_EXPR;
2212 case COMPCODE_UNLE:
2213 return UNLE_EXPR;
2214 case COMPCODE_UNGT:
2215 return UNGT_EXPR;
2216 case COMPCODE_LTGT:
2217 return LTGT_EXPR;
2218 case COMPCODE_UNGE:
2219 return UNGE_EXPR;
2220 default:
2221 abort ();
2222 }
2223 }
2224
2225 /* Return a tree for the comparison which is the combination of
2226 doing the AND or OR (depending on CODE) of the two operations LCODE
2227 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2228 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2229 if this makes the transformation invalid. */
2230
2231 tree
2232 combine_comparisons (enum tree_code code, enum tree_code lcode,
2233 enum tree_code rcode, tree truth_type,
2234 tree ll_arg, tree lr_arg)
2235 {
2236 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2237 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2238 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2239 enum comparison_code compcode;
2240
2241 switch (code)
2242 {
2243 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2244 compcode = lcompcode & rcompcode;
2245 break;
2246
2247 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2248 compcode = lcompcode | rcompcode;
2249 break;
2250
2251 default:
2252 return NULL_TREE;
2253 }
2254
2255 if (!honor_nans)
2256 {
2257 /* Eliminate unordered comparisons, as well as LTGT and ORD
2258 which are not used unless the mode has NaNs. */
2259 compcode &= ~COMPCODE_UNORD;
2260 if (compcode == COMPCODE_LTGT)
2261 compcode = COMPCODE_NE;
2262 else if (compcode == COMPCODE_ORD)
2263 compcode = COMPCODE_TRUE;
2264 }
2265 else if (flag_trapping_math)
2266 {
2267 /* Check that the original operation and the optimized ones will trap
2268 under the same condition. */
2269 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2270 && (lcompcode != COMPCODE_EQ)
2271 && (lcompcode != COMPCODE_ORD);
2272 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2273 && (rcompcode != COMPCODE_EQ)
2274 && (rcompcode != COMPCODE_ORD);
2275 bool trap = (compcode & COMPCODE_UNORD) == 0
2276 && (compcode != COMPCODE_EQ)
2277 && (compcode != COMPCODE_ORD);
2278
2279 /* In a short-circuited boolean expression the LHS might be
2280 such that the RHS, if evaluated, will never trap. For
2281 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2282 if neither x nor y is NaN. (This is a mixed blessing: for
2283 example, the expression above will never trap, hence
2284 optimizing it to x < y would be invalid). */
2285 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2286 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2287 rtrap = false;
2288
2289 /* If the comparison was short-circuited, and only the RHS
2290 trapped, we may now generate a spurious trap. */
2291 if (rtrap && !ltrap
2292 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2293 return NULL_TREE;
2294
2295 /* If we changed the conditions that cause a trap, we lose. */
2296 if ((ltrap || rtrap) != trap)
2297 return NULL_TREE;
2298 }
2299
2300 if (compcode == COMPCODE_TRUE)
2301 return constant_boolean_node (true, truth_type);
2302 else if (compcode == COMPCODE_FALSE)
2303 return constant_boolean_node (false, truth_type);
2304 else
2305 return fold (build2 (compcode_to_comparison (compcode),
2306 truth_type, ll_arg, lr_arg));
2307 }
2308
2309 /* Return nonzero if CODE is a tree code that represents a truth value. */
2310
2311 static int
2312 truth_value_p (enum tree_code code)
2313 {
2314 return (TREE_CODE_CLASS (code) == '<'
2315 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2316 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2317 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2318 }
2319 \f
2320 /* Return nonzero if two operands (typically of the same tree node)
2321 are necessarily equal. If either argument has side-effects this
2322 function returns zero. FLAGS modifies behavior as follows:
2323
2324 If OEP_ONLY_CONST is set, only return nonzero for constants.
2325 This function tests whether the operands are indistinguishable;
2326 it does not test whether they are equal using C's == operation.
2327 The distinction is important for IEEE floating point, because
2328 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2329 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2330
2331 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2332 even though it may hold multiple values during a function.
2333 This is because a GCC tree node guarantees that nothing else is
2334 executed between the evaluation of its "operands" (which may often
2335 be evaluated in arbitrary order). Hence if the operands themselves
2336 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2337 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2338 unset means assuming isochronic (or instantaneous) tree equivalence.
2339 Unless comparing arbitrary expression trees, such as from different
2340 statements, this flag can usually be left unset.
2341
2342 If OEP_PURE_SAME is set, then pure functions with identical arguments
2343 are considered the same. It is used when the caller has other ways
2344 to ensure that global memory is unchanged in between. */
2345
2346 int
2347 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2348 {
2349 /* If either is ERROR_MARK, they aren't equal. */
2350 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2351 return 0;
2352
2353 /* If both types don't have the same signedness, then we can't consider
2354 them equal. We must check this before the STRIP_NOPS calls
2355 because they may change the signedness of the arguments. */
2356 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2357 return 0;
2358
2359 STRIP_NOPS (arg0);
2360 STRIP_NOPS (arg1);
2361
2362 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2363 /* This is needed for conversions and for COMPONENT_REF.
2364 Might as well play it safe and always test this. */
2365 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2366 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2367 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2368 return 0;
2369
2370 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2371 We don't care about side effects in that case because the SAVE_EXPR
2372 takes care of that for us. In all other cases, two expressions are
2373 equal if they have no side effects. If we have two identical
2374 expressions with side effects that should be treated the same due
2375 to the only side effects being identical SAVE_EXPR's, that will
2376 be detected in the recursive calls below. */
2377 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2378 && (TREE_CODE (arg0) == SAVE_EXPR
2379 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2380 return 1;
2381
2382 /* Next handle constant cases, those for which we can return 1 even
2383 if ONLY_CONST is set. */
2384 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2385 switch (TREE_CODE (arg0))
2386 {
2387 case INTEGER_CST:
2388 return (! TREE_CONSTANT_OVERFLOW (arg0)
2389 && ! TREE_CONSTANT_OVERFLOW (arg1)
2390 && tree_int_cst_equal (arg0, arg1));
2391
2392 case REAL_CST:
2393 return (! TREE_CONSTANT_OVERFLOW (arg0)
2394 && ! TREE_CONSTANT_OVERFLOW (arg1)
2395 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2396 TREE_REAL_CST (arg1)));
2397
2398 case VECTOR_CST:
2399 {
2400 tree v1, v2;
2401
2402 if (TREE_CONSTANT_OVERFLOW (arg0)
2403 || TREE_CONSTANT_OVERFLOW (arg1))
2404 return 0;
2405
2406 v1 = TREE_VECTOR_CST_ELTS (arg0);
2407 v2 = TREE_VECTOR_CST_ELTS (arg1);
2408 while (v1 && v2)
2409 {
2410 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2411 flags))
2412 return 0;
2413 v1 = TREE_CHAIN (v1);
2414 v2 = TREE_CHAIN (v2);
2415 }
2416
2417 return 1;
2418 }
2419
2420 case COMPLEX_CST:
2421 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2422 flags)
2423 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2424 flags));
2425
2426 case STRING_CST:
2427 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2428 && ! memcmp (TREE_STRING_POINTER (arg0),
2429 TREE_STRING_POINTER (arg1),
2430 TREE_STRING_LENGTH (arg0)));
2431
2432 case ADDR_EXPR:
2433 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2434 0);
2435 default:
2436 break;
2437 }
2438
2439 if (flags & OEP_ONLY_CONST)
2440 return 0;
2441
2442 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2443 {
2444 case '1':
2445 /* Two conversions are equal only if signedness and modes match. */
2446 if ((TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == CONVERT_EXPR)
2447 && (TYPE_UNSIGNED (TREE_TYPE (arg0))
2448 != TYPE_UNSIGNED (TREE_TYPE (arg1))))
2449 return 0;
2450
2451 return operand_equal_p (TREE_OPERAND (arg0, 0),
2452 TREE_OPERAND (arg1, 0), flags);
2453
2454 case '<':
2455 case '2':
2456 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0)
2457 && operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1),
2458 0))
2459 return 1;
2460
2461 /* For commutative ops, allow the other order. */
2462 return (commutative_tree_code (TREE_CODE (arg0))
2463 && operand_equal_p (TREE_OPERAND (arg0, 0),
2464 TREE_OPERAND (arg1, 1), flags)
2465 && operand_equal_p (TREE_OPERAND (arg0, 1),
2466 TREE_OPERAND (arg1, 0), flags));
2467
2468 case 'r':
2469 /* If either of the pointer (or reference) expressions we are
2470 dereferencing contain a side effect, these cannot be equal. */
2471 if (TREE_SIDE_EFFECTS (arg0)
2472 || TREE_SIDE_EFFECTS (arg1))
2473 return 0;
2474
2475 switch (TREE_CODE (arg0))
2476 {
2477 case INDIRECT_REF:
2478 return operand_equal_p (TREE_OPERAND (arg0, 0),
2479 TREE_OPERAND (arg1, 0), flags);
2480
2481 case COMPONENT_REF:
2482 case ARRAY_REF:
2483 case ARRAY_RANGE_REF:
2484 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2485 TREE_OPERAND (arg1, 0), flags)
2486 && operand_equal_p (TREE_OPERAND (arg0, 1),
2487 TREE_OPERAND (arg1, 1), flags));
2488
2489 case BIT_FIELD_REF:
2490 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2491 TREE_OPERAND (arg1, 0), flags)
2492 && operand_equal_p (TREE_OPERAND (arg0, 1),
2493 TREE_OPERAND (arg1, 1), flags)
2494 && operand_equal_p (TREE_OPERAND (arg0, 2),
2495 TREE_OPERAND (arg1, 2), flags));
2496 default:
2497 return 0;
2498 }
2499
2500 case 'e':
2501 switch (TREE_CODE (arg0))
2502 {
2503 case ADDR_EXPR:
2504 case TRUTH_NOT_EXPR:
2505 return operand_equal_p (TREE_OPERAND (arg0, 0),
2506 TREE_OPERAND (arg1, 0), flags);
2507
2508 case RTL_EXPR:
2509 return rtx_equal_p (RTL_EXPR_RTL (arg0), RTL_EXPR_RTL (arg1));
2510
2511 case CALL_EXPR:
2512 /* If the CALL_EXPRs call different functions, then they
2513 clearly can not be equal. */
2514 if (! operand_equal_p (TREE_OPERAND (arg0, 0),
2515 TREE_OPERAND (arg1, 0), flags))
2516 return 0;
2517
2518 {
2519 unsigned int cef = call_expr_flags (arg0);
2520 if (flags & OEP_PURE_SAME)
2521 cef &= ECF_CONST | ECF_PURE;
2522 else
2523 cef &= ECF_CONST;
2524 if (!cef)
2525 return 0;
2526 }
2527
2528 /* Now see if all the arguments are the same. operand_equal_p
2529 does not handle TREE_LIST, so we walk the operands here
2530 feeding them to operand_equal_p. */
2531 arg0 = TREE_OPERAND (arg0, 1);
2532 arg1 = TREE_OPERAND (arg1, 1);
2533 while (arg0 && arg1)
2534 {
2535 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2536 flags))
2537 return 0;
2538
2539 arg0 = TREE_CHAIN (arg0);
2540 arg1 = TREE_CHAIN (arg1);
2541 }
2542
2543 /* If we get here and both argument lists are exhausted
2544 then the CALL_EXPRs are equal. */
2545 return ! (arg0 || arg1);
2546
2547 default:
2548 return 0;
2549 }
2550
2551 case 'd':
2552 /* Consider __builtin_sqrt equal to sqrt. */
2553 return (TREE_CODE (arg0) == FUNCTION_DECL
2554 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2555 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2556 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2557
2558 default:
2559 return 0;
2560 }
2561 }
2562 \f
2563 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2564 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2565
2566 When in doubt, return 0. */
2567
2568 static int
2569 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2570 {
2571 int unsignedp1, unsignedpo;
2572 tree primarg0, primarg1, primother;
2573 unsigned int correct_width;
2574
2575 if (operand_equal_p (arg0, arg1, 0))
2576 return 1;
2577
2578 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2579 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2580 return 0;
2581
2582 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2583 and see if the inner values are the same. This removes any
2584 signedness comparison, which doesn't matter here. */
2585 primarg0 = arg0, primarg1 = arg1;
2586 STRIP_NOPS (primarg0);
2587 STRIP_NOPS (primarg1);
2588 if (operand_equal_p (primarg0, primarg1, 0))
2589 return 1;
2590
2591 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2592 actual comparison operand, ARG0.
2593
2594 First throw away any conversions to wider types
2595 already present in the operands. */
2596
2597 primarg1 = get_narrower (arg1, &unsignedp1);
2598 primother = get_narrower (other, &unsignedpo);
2599
2600 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2601 if (unsignedp1 == unsignedpo
2602 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2603 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2604 {
2605 tree type = TREE_TYPE (arg0);
2606
2607 /* Make sure shorter operand is extended the right way
2608 to match the longer operand. */
2609 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2610 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2611
2612 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2613 return 1;
2614 }
2615
2616 return 0;
2617 }
2618 \f
2619 /* See if ARG is an expression that is either a comparison or is performing
2620 arithmetic on comparisons. The comparisons must only be comparing
2621 two different values, which will be stored in *CVAL1 and *CVAL2; if
2622 they are nonzero it means that some operands have already been found.
2623 No variables may be used anywhere else in the expression except in the
2624 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2625 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2626
2627 If this is true, return 1. Otherwise, return zero. */
2628
2629 static int
2630 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2631 {
2632 enum tree_code code = TREE_CODE (arg);
2633 char class = TREE_CODE_CLASS (code);
2634
2635 /* We can handle some of the 'e' cases here. */
2636 if (class == 'e' && code == TRUTH_NOT_EXPR)
2637 class = '1';
2638 else if (class == 'e'
2639 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2640 || code == COMPOUND_EXPR))
2641 class = '2';
2642
2643 else if (class == 'e' && code == SAVE_EXPR && SAVE_EXPR_RTL (arg) == 0
2644 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2645 {
2646 /* If we've already found a CVAL1 or CVAL2, this expression is
2647 two complex to handle. */
2648 if (*cval1 || *cval2)
2649 return 0;
2650
2651 class = '1';
2652 *save_p = 1;
2653 }
2654
2655 switch (class)
2656 {
2657 case '1':
2658 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2659
2660 case '2':
2661 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2662 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2663 cval1, cval2, save_p));
2664
2665 case 'c':
2666 return 1;
2667
2668 case 'e':
2669 if (code == COND_EXPR)
2670 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2671 cval1, cval2, save_p)
2672 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2673 cval1, cval2, save_p)
2674 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2675 cval1, cval2, save_p));
2676 return 0;
2677
2678 case '<':
2679 /* First see if we can handle the first operand, then the second. For
2680 the second operand, we know *CVAL1 can't be zero. It must be that
2681 one side of the comparison is each of the values; test for the
2682 case where this isn't true by failing if the two operands
2683 are the same. */
2684
2685 if (operand_equal_p (TREE_OPERAND (arg, 0),
2686 TREE_OPERAND (arg, 1), 0))
2687 return 0;
2688
2689 if (*cval1 == 0)
2690 *cval1 = TREE_OPERAND (arg, 0);
2691 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2692 ;
2693 else if (*cval2 == 0)
2694 *cval2 = TREE_OPERAND (arg, 0);
2695 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2696 ;
2697 else
2698 return 0;
2699
2700 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2701 ;
2702 else if (*cval2 == 0)
2703 *cval2 = TREE_OPERAND (arg, 1);
2704 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2705 ;
2706 else
2707 return 0;
2708
2709 return 1;
2710
2711 default:
2712 return 0;
2713 }
2714 }
2715 \f
2716 /* ARG is a tree that is known to contain just arithmetic operations and
2717 comparisons. Evaluate the operations in the tree substituting NEW0 for
2718 any occurrence of OLD0 as an operand of a comparison and likewise for
2719 NEW1 and OLD1. */
2720
2721 static tree
2722 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2723 {
2724 tree type = TREE_TYPE (arg);
2725 enum tree_code code = TREE_CODE (arg);
2726 char class = TREE_CODE_CLASS (code);
2727
2728 /* We can handle some of the 'e' cases here. */
2729 if (class == 'e' && code == TRUTH_NOT_EXPR)
2730 class = '1';
2731 else if (class == 'e'
2732 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2733 class = '2';
2734
2735 switch (class)
2736 {
2737 case '1':
2738 return fold (build1 (code, type,
2739 eval_subst (TREE_OPERAND (arg, 0),
2740 old0, new0, old1, new1)));
2741
2742 case '2':
2743 return fold (build2 (code, type,
2744 eval_subst (TREE_OPERAND (arg, 0),
2745 old0, new0, old1, new1),
2746 eval_subst (TREE_OPERAND (arg, 1),
2747 old0, new0, old1, new1)));
2748
2749 case 'e':
2750 switch (code)
2751 {
2752 case SAVE_EXPR:
2753 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2754
2755 case COMPOUND_EXPR:
2756 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2757
2758 case COND_EXPR:
2759 return fold (build3 (code, type,
2760 eval_subst (TREE_OPERAND (arg, 0),
2761 old0, new0, old1, new1),
2762 eval_subst (TREE_OPERAND (arg, 1),
2763 old0, new0, old1, new1),
2764 eval_subst (TREE_OPERAND (arg, 2),
2765 old0, new0, old1, new1)));
2766 default:
2767 break;
2768 }
2769 /* Fall through - ??? */
2770
2771 case '<':
2772 {
2773 tree arg0 = TREE_OPERAND (arg, 0);
2774 tree arg1 = TREE_OPERAND (arg, 1);
2775
2776 /* We need to check both for exact equality and tree equality. The
2777 former will be true if the operand has a side-effect. In that
2778 case, we know the operand occurred exactly once. */
2779
2780 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2781 arg0 = new0;
2782 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2783 arg0 = new1;
2784
2785 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2786 arg1 = new0;
2787 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2788 arg1 = new1;
2789
2790 return fold (build2 (code, type, arg0, arg1));
2791 }
2792
2793 default:
2794 return arg;
2795 }
2796 }
2797 \f
2798 /* Return a tree for the case when the result of an expression is RESULT
2799 converted to TYPE and OMITTED was previously an operand of the expression
2800 but is now not needed (e.g., we folded OMITTED * 0).
2801
2802 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2803 the conversion of RESULT to TYPE. */
2804
2805 tree
2806 omit_one_operand (tree type, tree result, tree omitted)
2807 {
2808 tree t = fold_convert (type, result);
2809
2810 if (TREE_SIDE_EFFECTS (omitted))
2811 return build2 (COMPOUND_EXPR, type, omitted, t);
2812
2813 return non_lvalue (t);
2814 }
2815
2816 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2817
2818 static tree
2819 pedantic_omit_one_operand (tree type, tree result, tree omitted)
2820 {
2821 tree t = fold_convert (type, result);
2822
2823 if (TREE_SIDE_EFFECTS (omitted))
2824 return build2 (COMPOUND_EXPR, type, omitted, t);
2825
2826 return pedantic_non_lvalue (t);
2827 }
2828
2829 /* Return a tree for the case when the result of an expression is RESULT
2830 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
2831 of the expression but are now not needed.
2832
2833 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
2834 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
2835 evaluated before OMITTED2. Otherwise, if neither has side effects,
2836 just do the conversion of RESULT to TYPE. */
2837
2838 tree
2839 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
2840 {
2841 tree t = fold_convert (type, result);
2842
2843 if (TREE_SIDE_EFFECTS (omitted2))
2844 t = build2 (COMPOUND_EXPR, type, omitted2, t);
2845 if (TREE_SIDE_EFFECTS (omitted1))
2846 t = build2 (COMPOUND_EXPR, type, omitted1, t);
2847
2848 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
2849 }
2850
2851 \f
2852 /* Return a simplified tree node for the truth-negation of ARG. This
2853 never alters ARG itself. We assume that ARG is an operation that
2854 returns a truth value (0 or 1).
2855
2856 FIXME: one would think we would fold the result, but it causes
2857 problems with the dominator optimizer. */
2858 tree
2859 invert_truthvalue (tree arg)
2860 {
2861 tree type = TREE_TYPE (arg);
2862 enum tree_code code = TREE_CODE (arg);
2863
2864 if (code == ERROR_MARK)
2865 return arg;
2866
2867 /* If this is a comparison, we can simply invert it, except for
2868 floating-point non-equality comparisons, in which case we just
2869 enclose a TRUTH_NOT_EXPR around what we have. */
2870
2871 if (TREE_CODE_CLASS (code) == '<')
2872 {
2873 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
2874 if (FLOAT_TYPE_P (op_type)
2875 && flag_trapping_math
2876 && code != ORDERED_EXPR && code != UNORDERED_EXPR
2877 && code != NE_EXPR && code != EQ_EXPR)
2878 return build1 (TRUTH_NOT_EXPR, type, arg);
2879 else
2880 {
2881 code = invert_tree_comparison (code,
2882 HONOR_NANS (TYPE_MODE (op_type)));
2883 if (code == ERROR_MARK)
2884 return build1 (TRUTH_NOT_EXPR, type, arg);
2885 else
2886 return build2 (code, type,
2887 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2888 }
2889 }
2890
2891 switch (code)
2892 {
2893 case INTEGER_CST:
2894 return fold_convert (type, build_int_2 (integer_zerop (arg), 0));
2895
2896 case TRUTH_AND_EXPR:
2897 return build2 (TRUTH_OR_EXPR, type,
2898 invert_truthvalue (TREE_OPERAND (arg, 0)),
2899 invert_truthvalue (TREE_OPERAND (arg, 1)));
2900
2901 case TRUTH_OR_EXPR:
2902 return build2 (TRUTH_AND_EXPR, type,
2903 invert_truthvalue (TREE_OPERAND (arg, 0)),
2904 invert_truthvalue (TREE_OPERAND (arg, 1)));
2905
2906 case TRUTH_XOR_EXPR:
2907 /* Here we can invert either operand. We invert the first operand
2908 unless the second operand is a TRUTH_NOT_EXPR in which case our
2909 result is the XOR of the first operand with the inside of the
2910 negation of the second operand. */
2911
2912 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2913 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2914 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2915 else
2916 return build2 (TRUTH_XOR_EXPR, type,
2917 invert_truthvalue (TREE_OPERAND (arg, 0)),
2918 TREE_OPERAND (arg, 1));
2919
2920 case TRUTH_ANDIF_EXPR:
2921 return build2 (TRUTH_ORIF_EXPR, type,
2922 invert_truthvalue (TREE_OPERAND (arg, 0)),
2923 invert_truthvalue (TREE_OPERAND (arg, 1)));
2924
2925 case TRUTH_ORIF_EXPR:
2926 return build2 (TRUTH_ANDIF_EXPR, type,
2927 invert_truthvalue (TREE_OPERAND (arg, 0)),
2928 invert_truthvalue (TREE_OPERAND (arg, 1)));
2929
2930 case TRUTH_NOT_EXPR:
2931 return TREE_OPERAND (arg, 0);
2932
2933 case COND_EXPR:
2934 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
2935 invert_truthvalue (TREE_OPERAND (arg, 1)),
2936 invert_truthvalue (TREE_OPERAND (arg, 2)));
2937
2938 case COMPOUND_EXPR:
2939 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2940 invert_truthvalue (TREE_OPERAND (arg, 1)));
2941
2942 case NON_LVALUE_EXPR:
2943 return invert_truthvalue (TREE_OPERAND (arg, 0));
2944
2945 case NOP_EXPR:
2946 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
2947 break;
2948
2949 case CONVERT_EXPR:
2950 case FLOAT_EXPR:
2951 return build1 (TREE_CODE (arg), type,
2952 invert_truthvalue (TREE_OPERAND (arg, 0)));
2953
2954 case BIT_AND_EXPR:
2955 if (!integer_onep (TREE_OPERAND (arg, 1)))
2956 break;
2957 return build2 (EQ_EXPR, type, arg,
2958 fold_convert (type, integer_zero_node));
2959
2960 case SAVE_EXPR:
2961 return build1 (TRUTH_NOT_EXPR, type, arg);
2962
2963 case CLEANUP_POINT_EXPR:
2964 return build1 (CLEANUP_POINT_EXPR, type,
2965 invert_truthvalue (TREE_OPERAND (arg, 0)));
2966
2967 default:
2968 break;
2969 }
2970 if (TREE_CODE (TREE_TYPE (arg)) != BOOLEAN_TYPE)
2971 abort ();
2972 return build1 (TRUTH_NOT_EXPR, type, arg);
2973 }
2974
2975 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
2976 operands are another bit-wise operation with a common input. If so,
2977 distribute the bit operations to save an operation and possibly two if
2978 constants are involved. For example, convert
2979 (A | B) & (A | C) into A | (B & C)
2980 Further simplification will occur if B and C are constants.
2981
2982 If this optimization cannot be done, 0 will be returned. */
2983
2984 static tree
2985 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
2986 {
2987 tree common;
2988 tree left, right;
2989
2990 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2991 || TREE_CODE (arg0) == code
2992 || (TREE_CODE (arg0) != BIT_AND_EXPR
2993 && TREE_CODE (arg0) != BIT_IOR_EXPR))
2994 return 0;
2995
2996 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
2997 {
2998 common = TREE_OPERAND (arg0, 0);
2999 left = TREE_OPERAND (arg0, 1);
3000 right = TREE_OPERAND (arg1, 1);
3001 }
3002 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3003 {
3004 common = TREE_OPERAND (arg0, 0);
3005 left = TREE_OPERAND (arg0, 1);
3006 right = TREE_OPERAND (arg1, 0);
3007 }
3008 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3009 {
3010 common = TREE_OPERAND (arg0, 1);
3011 left = TREE_OPERAND (arg0, 0);
3012 right = TREE_OPERAND (arg1, 1);
3013 }
3014 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3015 {
3016 common = TREE_OPERAND (arg0, 1);
3017 left = TREE_OPERAND (arg0, 0);
3018 right = TREE_OPERAND (arg1, 0);
3019 }
3020 else
3021 return 0;
3022
3023 return fold (build2 (TREE_CODE (arg0), type, common,
3024 fold (build2 (code, type, left, right))));
3025 }
3026 \f
3027 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3028 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3029
3030 static tree
3031 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3032 int unsignedp)
3033 {
3034 tree result = build3 (BIT_FIELD_REF, type, inner,
3035 size_int (bitsize), bitsize_int (bitpos));
3036
3037 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3038
3039 return result;
3040 }
3041
3042 /* Optimize a bit-field compare.
3043
3044 There are two cases: First is a compare against a constant and the
3045 second is a comparison of two items where the fields are at the same
3046 bit position relative to the start of a chunk (byte, halfword, word)
3047 large enough to contain it. In these cases we can avoid the shift
3048 implicit in bitfield extractions.
3049
3050 For constants, we emit a compare of the shifted constant with the
3051 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3052 compared. For two fields at the same position, we do the ANDs with the
3053 similar mask and compare the result of the ANDs.
3054
3055 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3056 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3057 are the left and right operands of the comparison, respectively.
3058
3059 If the optimization described above can be done, we return the resulting
3060 tree. Otherwise we return zero. */
3061
3062 static tree
3063 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3064 tree lhs, tree rhs)
3065 {
3066 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3067 tree type = TREE_TYPE (lhs);
3068 tree signed_type, unsigned_type;
3069 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3070 enum machine_mode lmode, rmode, nmode;
3071 int lunsignedp, runsignedp;
3072 int lvolatilep = 0, rvolatilep = 0;
3073 tree linner, rinner = NULL_TREE;
3074 tree mask;
3075 tree offset;
3076
3077 /* Get all the information about the extractions being done. If the bit size
3078 if the same as the size of the underlying object, we aren't doing an
3079 extraction at all and so can do nothing. We also don't want to
3080 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3081 then will no longer be able to replace it. */
3082 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3083 &lunsignedp, &lvolatilep);
3084 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3085 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3086 return 0;
3087
3088 if (!const_p)
3089 {
3090 /* If this is not a constant, we can only do something if bit positions,
3091 sizes, and signedness are the same. */
3092 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3093 &runsignedp, &rvolatilep);
3094
3095 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3096 || lunsignedp != runsignedp || offset != 0
3097 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3098 return 0;
3099 }
3100
3101 /* See if we can find a mode to refer to this field. We should be able to,
3102 but fail if we can't. */
3103 nmode = get_best_mode (lbitsize, lbitpos,
3104 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3105 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3106 TYPE_ALIGN (TREE_TYPE (rinner))),
3107 word_mode, lvolatilep || rvolatilep);
3108 if (nmode == VOIDmode)
3109 return 0;
3110
3111 /* Set signed and unsigned types of the precision of this mode for the
3112 shifts below. */
3113 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3114 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3115
3116 /* Compute the bit position and size for the new reference and our offset
3117 within it. If the new reference is the same size as the original, we
3118 won't optimize anything, so return zero. */
3119 nbitsize = GET_MODE_BITSIZE (nmode);
3120 nbitpos = lbitpos & ~ (nbitsize - 1);
3121 lbitpos -= nbitpos;
3122 if (nbitsize == lbitsize)
3123 return 0;
3124
3125 if (BYTES_BIG_ENDIAN)
3126 lbitpos = nbitsize - lbitsize - lbitpos;
3127
3128 /* Make the mask to be used against the extracted field. */
3129 mask = build_int_2 (~0, ~0);
3130 TREE_TYPE (mask) = unsigned_type;
3131 force_fit_type (mask, 0);
3132 mask = fold_convert (unsigned_type, mask);
3133 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3134 mask = const_binop (RSHIFT_EXPR, mask,
3135 size_int (nbitsize - lbitsize - lbitpos), 0);
3136
3137 if (! const_p)
3138 /* If not comparing with constant, just rework the comparison
3139 and return. */
3140 return build2 (code, compare_type,
3141 build2 (BIT_AND_EXPR, unsigned_type,
3142 make_bit_field_ref (linner, unsigned_type,
3143 nbitsize, nbitpos, 1),
3144 mask),
3145 build2 (BIT_AND_EXPR, unsigned_type,
3146 make_bit_field_ref (rinner, unsigned_type,
3147 nbitsize, nbitpos, 1),
3148 mask));
3149
3150 /* Otherwise, we are handling the constant case. See if the constant is too
3151 big for the field. Warn and return a tree of for 0 (false) if so. We do
3152 this not only for its own sake, but to avoid having to test for this
3153 error case below. If we didn't, we might generate wrong code.
3154
3155 For unsigned fields, the constant shifted right by the field length should
3156 be all zero. For signed fields, the high-order bits should agree with
3157 the sign bit. */
3158
3159 if (lunsignedp)
3160 {
3161 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3162 fold_convert (unsigned_type, rhs),
3163 size_int (lbitsize), 0)))
3164 {
3165 warning ("comparison is always %d due to width of bit-field",
3166 code == NE_EXPR);
3167 return constant_boolean_node (code == NE_EXPR, compare_type);
3168 }
3169 }
3170 else
3171 {
3172 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3173 size_int (lbitsize - 1), 0);
3174 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3175 {
3176 warning ("comparison is always %d due to width of bit-field",
3177 code == NE_EXPR);
3178 return constant_boolean_node (code == NE_EXPR, compare_type);
3179 }
3180 }
3181
3182 /* Single-bit compares should always be against zero. */
3183 if (lbitsize == 1 && ! integer_zerop (rhs))
3184 {
3185 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3186 rhs = fold_convert (type, integer_zero_node);
3187 }
3188
3189 /* Make a new bitfield reference, shift the constant over the
3190 appropriate number of bits and mask it with the computed mask
3191 (in case this was a signed field). If we changed it, make a new one. */
3192 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3193 if (lvolatilep)
3194 {
3195 TREE_SIDE_EFFECTS (lhs) = 1;
3196 TREE_THIS_VOLATILE (lhs) = 1;
3197 }
3198
3199 rhs = fold (const_binop (BIT_AND_EXPR,
3200 const_binop (LSHIFT_EXPR,
3201 fold_convert (unsigned_type, rhs),
3202 size_int (lbitpos), 0),
3203 mask, 0));
3204
3205 return build2 (code, compare_type,
3206 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3207 rhs);
3208 }
3209 \f
3210 /* Subroutine for fold_truthop: decode a field reference.
3211
3212 If EXP is a comparison reference, we return the innermost reference.
3213
3214 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3215 set to the starting bit number.
3216
3217 If the innermost field can be completely contained in a mode-sized
3218 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3219
3220 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3221 otherwise it is not changed.
3222
3223 *PUNSIGNEDP is set to the signedness of the field.
3224
3225 *PMASK is set to the mask used. This is either contained in a
3226 BIT_AND_EXPR or derived from the width of the field.
3227
3228 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3229
3230 Return 0 if this is not a component reference or is one that we can't
3231 do anything with. */
3232
3233 static tree
3234 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3235 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3236 int *punsignedp, int *pvolatilep,
3237 tree *pmask, tree *pand_mask)
3238 {
3239 tree outer_type = 0;
3240 tree and_mask = 0;
3241 tree mask, inner, offset;
3242 tree unsigned_type;
3243 unsigned int precision;
3244
3245 /* All the optimizations using this function assume integer fields.
3246 There are problems with FP fields since the type_for_size call
3247 below can fail for, e.g., XFmode. */
3248 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3249 return 0;
3250
3251 /* We are interested in the bare arrangement of bits, so strip everything
3252 that doesn't affect the machine mode. However, record the type of the
3253 outermost expression if it may matter below. */
3254 if (TREE_CODE (exp) == NOP_EXPR
3255 || TREE_CODE (exp) == CONVERT_EXPR
3256 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3257 outer_type = TREE_TYPE (exp);
3258 STRIP_NOPS (exp);
3259
3260 if (TREE_CODE (exp) == BIT_AND_EXPR)
3261 {
3262 and_mask = TREE_OPERAND (exp, 1);
3263 exp = TREE_OPERAND (exp, 0);
3264 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3265 if (TREE_CODE (and_mask) != INTEGER_CST)
3266 return 0;
3267 }
3268
3269 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3270 punsignedp, pvolatilep);
3271 if ((inner == exp && and_mask == 0)
3272 || *pbitsize < 0 || offset != 0
3273 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3274 return 0;
3275
3276 /* If the number of bits in the reference is the same as the bitsize of
3277 the outer type, then the outer type gives the signedness. Otherwise
3278 (in case of a small bitfield) the signedness is unchanged. */
3279 if (outer_type && *pbitsize == tree_low_cst (TYPE_SIZE (outer_type), 1))
3280 *punsignedp = TYPE_UNSIGNED (outer_type);
3281
3282 /* Compute the mask to access the bitfield. */
3283 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3284 precision = TYPE_PRECISION (unsigned_type);
3285
3286 mask = build_int_2 (~0, ~0);
3287 TREE_TYPE (mask) = unsigned_type;
3288 force_fit_type (mask, 0);
3289 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3290 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3291
3292 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3293 if (and_mask != 0)
3294 mask = fold (build2 (BIT_AND_EXPR, unsigned_type,
3295 fold_convert (unsigned_type, and_mask), mask));
3296
3297 *pmask = mask;
3298 *pand_mask = and_mask;
3299 return inner;
3300 }
3301
3302 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3303 bit positions. */
3304
3305 static int
3306 all_ones_mask_p (tree mask, int size)
3307 {
3308 tree type = TREE_TYPE (mask);
3309 unsigned int precision = TYPE_PRECISION (type);
3310 tree tmask;
3311
3312 tmask = build_int_2 (~0, ~0);
3313 TREE_TYPE (tmask) = lang_hooks.types.signed_type (type);
3314 force_fit_type (tmask, 0);
3315 return
3316 tree_int_cst_equal (mask,
3317 const_binop (RSHIFT_EXPR,
3318 const_binop (LSHIFT_EXPR, tmask,
3319 size_int (precision - size),
3320 0),
3321 size_int (precision - size), 0));
3322 }
3323
3324 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3325 represents the sign bit of EXP's type. If EXP represents a sign
3326 or zero extension, also test VAL against the unextended type.
3327 The return value is the (sub)expression whose sign bit is VAL,
3328 or NULL_TREE otherwise. */
3329
3330 static tree
3331 sign_bit_p (tree exp, tree val)
3332 {
3333 unsigned HOST_WIDE_INT mask_lo, lo;
3334 HOST_WIDE_INT mask_hi, hi;
3335 int width;
3336 tree t;
3337
3338 /* Tree EXP must have an integral type. */
3339 t = TREE_TYPE (exp);
3340 if (! INTEGRAL_TYPE_P (t))
3341 return NULL_TREE;
3342
3343 /* Tree VAL must be an integer constant. */
3344 if (TREE_CODE (val) != INTEGER_CST
3345 || TREE_CONSTANT_OVERFLOW (val))
3346 return NULL_TREE;
3347
3348 width = TYPE_PRECISION (t);
3349 if (width > HOST_BITS_PER_WIDE_INT)
3350 {
3351 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3352 lo = 0;
3353
3354 mask_hi = ((unsigned HOST_WIDE_INT) -1
3355 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3356 mask_lo = -1;
3357 }
3358 else
3359 {
3360 hi = 0;
3361 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3362
3363 mask_hi = 0;
3364 mask_lo = ((unsigned HOST_WIDE_INT) -1
3365 >> (HOST_BITS_PER_WIDE_INT - width));
3366 }
3367
3368 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3369 treat VAL as if it were unsigned. */
3370 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3371 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3372 return exp;
3373
3374 /* Handle extension from a narrower type. */
3375 if (TREE_CODE (exp) == NOP_EXPR
3376 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3377 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3378
3379 return NULL_TREE;
3380 }
3381
3382 /* Subroutine for fold_truthop: determine if an operand is simple enough
3383 to be evaluated unconditionally. */
3384
3385 static int
3386 simple_operand_p (tree exp)
3387 {
3388 /* Strip any conversions that don't change the machine mode. */
3389 while ((TREE_CODE (exp) == NOP_EXPR
3390 || TREE_CODE (exp) == CONVERT_EXPR)
3391 && (TYPE_MODE (TREE_TYPE (exp))
3392 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
3393 exp = TREE_OPERAND (exp, 0);
3394
3395 return (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c'
3396 || (DECL_P (exp)
3397 && ! TREE_ADDRESSABLE (exp)
3398 && ! TREE_THIS_VOLATILE (exp)
3399 && ! DECL_NONLOCAL (exp)
3400 /* Don't regard global variables as simple. They may be
3401 allocated in ways unknown to the compiler (shared memory,
3402 #pragma weak, etc). */
3403 && ! TREE_PUBLIC (exp)
3404 && ! DECL_EXTERNAL (exp)
3405 /* Loading a static variable is unduly expensive, but global
3406 registers aren't expensive. */
3407 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3408 }
3409 \f
3410 /* The following functions are subroutines to fold_range_test and allow it to
3411 try to change a logical combination of comparisons into a range test.
3412
3413 For example, both
3414 X == 2 || X == 3 || X == 4 || X == 5
3415 and
3416 X >= 2 && X <= 5
3417 are converted to
3418 (unsigned) (X - 2) <= 3
3419
3420 We describe each set of comparisons as being either inside or outside
3421 a range, using a variable named like IN_P, and then describe the
3422 range with a lower and upper bound. If one of the bounds is omitted,
3423 it represents either the highest or lowest value of the type.
3424
3425 In the comments below, we represent a range by two numbers in brackets
3426 preceded by a "+" to designate being inside that range, or a "-" to
3427 designate being outside that range, so the condition can be inverted by
3428 flipping the prefix. An omitted bound is represented by a "-". For
3429 example, "- [-, 10]" means being outside the range starting at the lowest
3430 possible value and ending at 10, in other words, being greater than 10.
3431 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3432 always false.
3433
3434 We set up things so that the missing bounds are handled in a consistent
3435 manner so neither a missing bound nor "true" and "false" need to be
3436 handled using a special case. */
3437
3438 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3439 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3440 and UPPER1_P are nonzero if the respective argument is an upper bound
3441 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3442 must be specified for a comparison. ARG1 will be converted to ARG0's
3443 type if both are specified. */
3444
3445 static tree
3446 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3447 tree arg1, int upper1_p)
3448 {
3449 tree tem;
3450 int result;
3451 int sgn0, sgn1;
3452
3453 /* If neither arg represents infinity, do the normal operation.
3454 Else, if not a comparison, return infinity. Else handle the special
3455 comparison rules. Note that most of the cases below won't occur, but
3456 are handled for consistency. */
3457
3458 if (arg0 != 0 && arg1 != 0)
3459 {
3460 tem = fold (build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3461 arg0, fold_convert (TREE_TYPE (arg0), arg1)));
3462 STRIP_NOPS (tem);
3463 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3464 }
3465
3466 if (TREE_CODE_CLASS (code) != '<')
3467 return 0;
3468
3469 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3470 for neither. In real maths, we cannot assume open ended ranges are
3471 the same. But, this is computer arithmetic, where numbers are finite.
3472 We can therefore make the transformation of any unbounded range with
3473 the value Z, Z being greater than any representable number. This permits
3474 us to treat unbounded ranges as equal. */
3475 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3476 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3477 switch (code)
3478 {
3479 case EQ_EXPR:
3480 result = sgn0 == sgn1;
3481 break;
3482 case NE_EXPR:
3483 result = sgn0 != sgn1;
3484 break;
3485 case LT_EXPR:
3486 result = sgn0 < sgn1;
3487 break;
3488 case LE_EXPR:
3489 result = sgn0 <= sgn1;
3490 break;
3491 case GT_EXPR:
3492 result = sgn0 > sgn1;
3493 break;
3494 case GE_EXPR:
3495 result = sgn0 >= sgn1;
3496 break;
3497 default:
3498 abort ();
3499 }
3500
3501 return constant_boolean_node (result, type);
3502 }
3503 \f
3504 /* Given EXP, a logical expression, set the range it is testing into
3505 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3506 actually being tested. *PLOW and *PHIGH will be made of the same type
3507 as the returned expression. If EXP is not a comparison, we will most
3508 likely not be returning a useful value and range. */
3509
3510 static tree
3511 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3512 {
3513 enum tree_code code;
3514 tree arg0 = NULL_TREE, arg1 = NULL_TREE, type = NULL_TREE;
3515 tree orig_type = NULL_TREE;
3516 int in_p, n_in_p;
3517 tree low, high, n_low, n_high;
3518
3519 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3520 and see if we can refine the range. Some of the cases below may not
3521 happen, but it doesn't seem worth worrying about this. We "continue"
3522 the outer loop when we've changed something; otherwise we "break"
3523 the switch, which will "break" the while. */
3524
3525 in_p = 0;
3526 low = high = fold_convert (TREE_TYPE (exp), integer_zero_node);
3527
3528 while (1)
3529 {
3530 code = TREE_CODE (exp);
3531
3532 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3533 {
3534 if (first_rtl_op (code) > 0)
3535 arg0 = TREE_OPERAND (exp, 0);
3536 if (TREE_CODE_CLASS (code) == '<'
3537 || TREE_CODE_CLASS (code) == '1'
3538 || TREE_CODE_CLASS (code) == '2')
3539 type = TREE_TYPE (arg0);
3540 if (TREE_CODE_CLASS (code) == '2'
3541 || TREE_CODE_CLASS (code) == '<'
3542 || (TREE_CODE_CLASS (code) == 'e'
3543 && TREE_CODE_LENGTH (code) > 1))
3544 arg1 = TREE_OPERAND (exp, 1);
3545 }
3546
3547 /* Set ORIG_TYPE as soon as TYPE is non-null so that we do not
3548 lose a cast by accident. */
3549 if (type != NULL_TREE && orig_type == NULL_TREE)
3550 orig_type = type;
3551
3552 switch (code)
3553 {
3554 case TRUTH_NOT_EXPR:
3555 in_p = ! in_p, exp = arg0;
3556 continue;
3557
3558 case EQ_EXPR: case NE_EXPR:
3559 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3560 /* We can only do something if the range is testing for zero
3561 and if the second operand is an integer constant. Note that
3562 saying something is "in" the range we make is done by
3563 complementing IN_P since it will set in the initial case of
3564 being not equal to zero; "out" is leaving it alone. */
3565 if (low == 0 || high == 0
3566 || ! integer_zerop (low) || ! integer_zerop (high)
3567 || TREE_CODE (arg1) != INTEGER_CST)
3568 break;
3569
3570 switch (code)
3571 {
3572 case NE_EXPR: /* - [c, c] */
3573 low = high = arg1;
3574 break;
3575 case EQ_EXPR: /* + [c, c] */
3576 in_p = ! in_p, low = high = arg1;
3577 break;
3578 case GT_EXPR: /* - [-, c] */
3579 low = 0, high = arg1;
3580 break;
3581 case GE_EXPR: /* + [c, -] */
3582 in_p = ! in_p, low = arg1, high = 0;
3583 break;
3584 case LT_EXPR: /* - [c, -] */
3585 low = arg1, high = 0;
3586 break;
3587 case LE_EXPR: /* + [-, c] */
3588 in_p = ! in_p, low = 0, high = arg1;
3589 break;
3590 default:
3591 abort ();
3592 }
3593
3594 exp = arg0;
3595
3596 /* If this is an unsigned comparison, we also know that EXP is
3597 greater than or equal to zero. We base the range tests we make
3598 on that fact, so we record it here so we can parse existing
3599 range tests. */
3600 if (TYPE_UNSIGNED (type) && (low == 0 || high == 0))
3601 {
3602 if (! merge_ranges (&n_in_p, &n_low, &n_high, in_p, low, high,
3603 1, fold_convert (type, integer_zero_node),
3604 NULL_TREE))
3605 break;
3606
3607 in_p = n_in_p, low = n_low, high = n_high;
3608
3609 /* If the high bound is missing, but we have a nonzero low
3610 bound, reverse the range so it goes from zero to the low bound
3611 minus 1. */
3612 if (high == 0 && low && ! integer_zerop (low))
3613 {
3614 in_p = ! in_p;
3615 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3616 integer_one_node, 0);
3617 low = fold_convert (type, integer_zero_node);
3618 }
3619 }
3620 continue;
3621
3622 case NEGATE_EXPR:
3623 /* (-x) IN [a,b] -> x in [-b, -a] */
3624 n_low = range_binop (MINUS_EXPR, type,
3625 fold_convert (type, integer_zero_node),
3626 0, high, 1);
3627 n_high = range_binop (MINUS_EXPR, type,
3628 fold_convert (type, integer_zero_node),
3629 0, low, 0);
3630 low = n_low, high = n_high;
3631 exp = arg0;
3632 continue;
3633
3634 case BIT_NOT_EXPR:
3635 /* ~ X -> -X - 1 */
3636 exp = build2 (MINUS_EXPR, type, negate_expr (arg0),
3637 fold_convert (type, integer_one_node));
3638 continue;
3639
3640 case PLUS_EXPR: case MINUS_EXPR:
3641 if (TREE_CODE (arg1) != INTEGER_CST)
3642 break;
3643
3644 /* If EXP is signed, any overflow in the computation is undefined,
3645 so we don't worry about it so long as our computations on
3646 the bounds don't overflow. For unsigned, overflow is defined
3647 and this is exactly the right thing. */
3648 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3649 type, low, 0, arg1, 0);
3650 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3651 type, high, 1, arg1, 0);
3652 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3653 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3654 break;
3655
3656 /* Check for an unsigned range which has wrapped around the maximum
3657 value thus making n_high < n_low, and normalize it. */
3658 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3659 {
3660 low = range_binop (PLUS_EXPR, type, n_high, 0,
3661 integer_one_node, 0);
3662 high = range_binop (MINUS_EXPR, type, n_low, 0,
3663 integer_one_node, 0);
3664
3665 /* If the range is of the form +/- [ x+1, x ], we won't
3666 be able to normalize it. But then, it represents the
3667 whole range or the empty set, so make it
3668 +/- [ -, - ]. */
3669 if (tree_int_cst_equal (n_low, low)
3670 && tree_int_cst_equal (n_high, high))
3671 low = high = 0;
3672 else
3673 in_p = ! in_p;
3674 }
3675 else
3676 low = n_low, high = n_high;
3677
3678 exp = arg0;
3679 continue;
3680
3681 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3682 if (TYPE_PRECISION (type) > TYPE_PRECISION (orig_type))
3683 break;
3684
3685 if (! INTEGRAL_TYPE_P (type)
3686 || (low != 0 && ! int_fits_type_p (low, type))
3687 || (high != 0 && ! int_fits_type_p (high, type)))
3688 break;
3689
3690 n_low = low, n_high = high;
3691
3692 if (n_low != 0)
3693 n_low = fold_convert (type, n_low);
3694
3695 if (n_high != 0)
3696 n_high = fold_convert (type, n_high);
3697
3698 /* If we're converting from an unsigned to a signed type,
3699 we will be doing the comparison as unsigned. The tests above
3700 have already verified that LOW and HIGH are both positive.
3701
3702 So we have to make sure that the original unsigned value will
3703 be interpreted as positive. */
3704 if (TYPE_UNSIGNED (type) && ! TYPE_UNSIGNED (TREE_TYPE (exp)))
3705 {
3706 tree equiv_type = lang_hooks.types.type_for_mode
3707 (TYPE_MODE (type), 1);
3708 tree high_positive;
3709
3710 /* A range without an upper bound is, naturally, unbounded.
3711 Since convert would have cropped a very large value, use
3712 the max value for the destination type. */
3713 high_positive
3714 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3715 : TYPE_MAX_VALUE (type);
3716
3717 if (TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (exp)))
3718 high_positive = fold (build2 (RSHIFT_EXPR, type,
3719 fold_convert (type,
3720 high_positive),
3721 fold_convert (type,
3722 integer_one_node)));
3723
3724 /* If the low bound is specified, "and" the range with the
3725 range for which the original unsigned value will be
3726 positive. */
3727 if (low != 0)
3728 {
3729 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3730 1, n_low, n_high, 1,
3731 fold_convert (type, integer_zero_node),
3732 high_positive))
3733 break;
3734
3735 in_p = (n_in_p == in_p);
3736 }
3737 else
3738 {
3739 /* Otherwise, "or" the range with the range of the input
3740 that will be interpreted as negative. */
3741 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3742 0, n_low, n_high, 1,
3743 fold_convert (type, integer_zero_node),
3744 high_positive))
3745 break;
3746
3747 in_p = (in_p != n_in_p);
3748 }
3749 }
3750
3751 exp = arg0;
3752 low = n_low, high = n_high;
3753 continue;
3754
3755 default:
3756 break;
3757 }
3758
3759 break;
3760 }
3761
3762 /* If EXP is a constant, we can evaluate whether this is true or false. */
3763 if (TREE_CODE (exp) == INTEGER_CST)
3764 {
3765 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3766 exp, 0, low, 0))
3767 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3768 exp, 1, high, 1)));
3769 low = high = 0;
3770 exp = 0;
3771 }
3772
3773 *pin_p = in_p, *plow = low, *phigh = high;
3774 return exp;
3775 }
3776 \f
3777 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3778 type, TYPE, return an expression to test if EXP is in (or out of, depending
3779 on IN_P) the range. */
3780
3781 static tree
3782 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
3783 {
3784 tree etype = TREE_TYPE (exp);
3785 tree value;
3786
3787 if (! in_p
3788 && (0 != (value = build_range_check (type, exp, 1, low, high))))
3789 return invert_truthvalue (value);
3790
3791 if (low == 0 && high == 0)
3792 return fold_convert (type, integer_one_node);
3793
3794 if (low == 0)
3795 return fold (build2 (LE_EXPR, type, exp, high));
3796
3797 if (high == 0)
3798 return fold (build2 (GE_EXPR, type, exp, low));
3799
3800 if (operand_equal_p (low, high, 0))
3801 return fold (build2 (EQ_EXPR, type, exp, low));
3802
3803 if (integer_zerop (low))
3804 {
3805 if (! TYPE_UNSIGNED (etype))
3806 {
3807 etype = lang_hooks.types.unsigned_type (etype);
3808 high = fold_convert (etype, high);
3809 exp = fold_convert (etype, exp);
3810 }
3811 return build_range_check (type, exp, 1, 0, high);
3812 }
3813
3814 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3815 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
3816 {
3817 unsigned HOST_WIDE_INT lo;
3818 HOST_WIDE_INT hi;
3819 int prec;
3820
3821 prec = TYPE_PRECISION (etype);
3822 if (prec <= HOST_BITS_PER_WIDE_INT)
3823 {
3824 hi = 0;
3825 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
3826 }
3827 else
3828 {
3829 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
3830 lo = (unsigned HOST_WIDE_INT) -1;
3831 }
3832
3833 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
3834 {
3835 if (TYPE_UNSIGNED (etype))
3836 {
3837 etype = lang_hooks.types.signed_type (etype);
3838 exp = fold_convert (etype, exp);
3839 }
3840 return fold (build2 (GT_EXPR, type, exp,
3841 fold_convert (etype, integer_zero_node)));
3842 }
3843 }
3844
3845 if (0 != (value = const_binop (MINUS_EXPR, high, low, 0))
3846 && ! TREE_OVERFLOW (value))
3847 return build_range_check (type,
3848 fold (build2 (MINUS_EXPR, etype, exp, low)),
3849 1, fold_convert (etype, integer_zero_node),
3850 value);
3851
3852 return 0;
3853 }
3854 \f
3855 /* Given two ranges, see if we can merge them into one. Return 1 if we
3856 can, 0 if we can't. Set the output range into the specified parameters. */
3857
3858 static int
3859 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
3860 tree high0, int in1_p, tree low1, tree high1)
3861 {
3862 int no_overlap;
3863 int subset;
3864 int temp;
3865 tree tem;
3866 int in_p;
3867 tree low, high;
3868 int lowequal = ((low0 == 0 && low1 == 0)
3869 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3870 low0, 0, low1, 0)));
3871 int highequal = ((high0 == 0 && high1 == 0)
3872 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3873 high0, 1, high1, 1)));
3874
3875 /* Make range 0 be the range that starts first, or ends last if they
3876 start at the same value. Swap them if it isn't. */
3877 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
3878 low0, 0, low1, 0))
3879 || (lowequal
3880 && integer_onep (range_binop (GT_EXPR, integer_type_node,
3881 high1, 1, high0, 1))))
3882 {
3883 temp = in0_p, in0_p = in1_p, in1_p = temp;
3884 tem = low0, low0 = low1, low1 = tem;
3885 tem = high0, high0 = high1, high1 = tem;
3886 }
3887
3888 /* Now flag two cases, whether the ranges are disjoint or whether the
3889 second range is totally subsumed in the first. Note that the tests
3890 below are simplified by the ones above. */
3891 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
3892 high0, 1, low1, 0));
3893 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
3894 high1, 1, high0, 1));
3895
3896 /* We now have four cases, depending on whether we are including or
3897 excluding the two ranges. */
3898 if (in0_p && in1_p)
3899 {
3900 /* If they don't overlap, the result is false. If the second range
3901 is a subset it is the result. Otherwise, the range is from the start
3902 of the second to the end of the first. */
3903 if (no_overlap)
3904 in_p = 0, low = high = 0;
3905 else if (subset)
3906 in_p = 1, low = low1, high = high1;
3907 else
3908 in_p = 1, low = low1, high = high0;
3909 }
3910
3911 else if (in0_p && ! in1_p)
3912 {
3913 /* If they don't overlap, the result is the first range. If they are
3914 equal, the result is false. If the second range is a subset of the
3915 first, and the ranges begin at the same place, we go from just after
3916 the end of the first range to the end of the second. If the second
3917 range is not a subset of the first, or if it is a subset and both
3918 ranges end at the same place, the range starts at the start of the
3919 first range and ends just before the second range.
3920 Otherwise, we can't describe this as a single range. */
3921 if (no_overlap)
3922 in_p = 1, low = low0, high = high0;
3923 else if (lowequal && highequal)
3924 in_p = 0, low = high = 0;
3925 else if (subset && lowequal)
3926 {
3927 in_p = 1, high = high0;
3928 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
3929 integer_one_node, 0);
3930 }
3931 else if (! subset || highequal)
3932 {
3933 in_p = 1, low = low0;
3934 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
3935 integer_one_node, 0);
3936 }
3937 else
3938 return 0;
3939 }
3940
3941 else if (! in0_p && in1_p)
3942 {
3943 /* If they don't overlap, the result is the second range. If the second
3944 is a subset of the first, the result is false. Otherwise,
3945 the range starts just after the first range and ends at the
3946 end of the second. */
3947 if (no_overlap)
3948 in_p = 1, low = low1, high = high1;
3949 else if (subset || highequal)
3950 in_p = 0, low = high = 0;
3951 else
3952 {
3953 in_p = 1, high = high1;
3954 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
3955 integer_one_node, 0);
3956 }
3957 }
3958
3959 else
3960 {
3961 /* The case where we are excluding both ranges. Here the complex case
3962 is if they don't overlap. In that case, the only time we have a
3963 range is if they are adjacent. If the second is a subset of the
3964 first, the result is the first. Otherwise, the range to exclude
3965 starts at the beginning of the first range and ends at the end of the
3966 second. */
3967 if (no_overlap)
3968 {
3969 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
3970 range_binop (PLUS_EXPR, NULL_TREE,
3971 high0, 1,
3972 integer_one_node, 1),
3973 1, low1, 0)))
3974 in_p = 0, low = low0, high = high1;
3975 else
3976 return 0;
3977 }
3978 else if (subset)
3979 in_p = 0, low = low0, high = high0;
3980 else
3981 in_p = 0, low = low0, high = high1;
3982 }
3983
3984 *pin_p = in_p, *plow = low, *phigh = high;
3985 return 1;
3986 }
3987 \f
3988 #ifndef RANGE_TEST_NON_SHORT_CIRCUIT
3989 #define RANGE_TEST_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
3990 #endif
3991
3992 /* EXP is some logical combination of boolean tests. See if we can
3993 merge it into some range test. Return the new tree if so. */
3994
3995 static tree
3996 fold_range_test (tree exp)
3997 {
3998 int or_op = (TREE_CODE (exp) == TRUTH_ORIF_EXPR
3999 || TREE_CODE (exp) == TRUTH_OR_EXPR);
4000 int in0_p, in1_p, in_p;
4001 tree low0, low1, low, high0, high1, high;
4002 tree lhs = make_range (TREE_OPERAND (exp, 0), &in0_p, &low0, &high0);
4003 tree rhs = make_range (TREE_OPERAND (exp, 1), &in1_p, &low1, &high1);
4004 tree tem;
4005
4006 /* If this is an OR operation, invert both sides; we will invert
4007 again at the end. */
4008 if (or_op)
4009 in0_p = ! in0_p, in1_p = ! in1_p;
4010
4011 /* If both expressions are the same, if we can merge the ranges, and we
4012 can build the range test, return it or it inverted. If one of the
4013 ranges is always true or always false, consider it to be the same
4014 expression as the other. */
4015 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4016 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4017 in1_p, low1, high1)
4018 && 0 != (tem = (build_range_check (TREE_TYPE (exp),
4019 lhs != 0 ? lhs
4020 : rhs != 0 ? rhs : integer_zero_node,
4021 in_p, low, high))))
4022 return or_op ? invert_truthvalue (tem) : tem;
4023
4024 /* On machines where the branch cost is expensive, if this is a
4025 short-circuited branch and the underlying object on both sides
4026 is the same, make a non-short-circuit operation. */
4027 else if (RANGE_TEST_NON_SHORT_CIRCUIT
4028 && lhs != 0 && rhs != 0
4029 && (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
4030 || TREE_CODE (exp) == TRUTH_ORIF_EXPR)
4031 && operand_equal_p (lhs, rhs, 0))
4032 {
4033 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4034 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4035 which cases we can't do this. */
4036 if (simple_operand_p (lhs))
4037 return build2 (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
4038 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4039 TREE_TYPE (exp), TREE_OPERAND (exp, 0),
4040 TREE_OPERAND (exp, 1));
4041
4042 else if (lang_hooks.decls.global_bindings_p () == 0
4043 && ! CONTAINS_PLACEHOLDER_P (lhs))
4044 {
4045 tree common = save_expr (lhs);
4046
4047 if (0 != (lhs = build_range_check (TREE_TYPE (exp), common,
4048 or_op ? ! in0_p : in0_p,
4049 low0, high0))
4050 && (0 != (rhs = build_range_check (TREE_TYPE (exp), common,
4051 or_op ? ! in1_p : in1_p,
4052 low1, high1))))
4053 return build2 (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
4054 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4055 TREE_TYPE (exp), lhs, rhs);
4056 }
4057 }
4058
4059 return 0;
4060 }
4061 \f
4062 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4063 bit value. Arrange things so the extra bits will be set to zero if and
4064 only if C is signed-extended to its full width. If MASK is nonzero,
4065 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4066
4067 static tree
4068 unextend (tree c, int p, int unsignedp, tree mask)
4069 {
4070 tree type = TREE_TYPE (c);
4071 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4072 tree temp;
4073
4074 if (p == modesize || unsignedp)
4075 return c;
4076
4077 /* We work by getting just the sign bit into the low-order bit, then
4078 into the high-order bit, then sign-extend. We then XOR that value
4079 with C. */
4080 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4081 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4082
4083 /* We must use a signed type in order to get an arithmetic right shift.
4084 However, we must also avoid introducing accidental overflows, so that
4085 a subsequent call to integer_zerop will work. Hence we must
4086 do the type conversion here. At this point, the constant is either
4087 zero or one, and the conversion to a signed type can never overflow.
4088 We could get an overflow if this conversion is done anywhere else. */
4089 if (TYPE_UNSIGNED (type))
4090 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4091
4092 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4093 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4094 if (mask != 0)
4095 temp = const_binop (BIT_AND_EXPR, temp,
4096 fold_convert (TREE_TYPE (c), mask), 0);
4097 /* If necessary, convert the type back to match the type of C. */
4098 if (TYPE_UNSIGNED (type))
4099 temp = fold_convert (type, temp);
4100
4101 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4102 }
4103 \f
4104 /* Find ways of folding logical expressions of LHS and RHS:
4105 Try to merge two comparisons to the same innermost item.
4106 Look for range tests like "ch >= '0' && ch <= '9'".
4107 Look for combinations of simple terms on machines with expensive branches
4108 and evaluate the RHS unconditionally.
4109
4110 For example, if we have p->a == 2 && p->b == 4 and we can make an
4111 object large enough to span both A and B, we can do this with a comparison
4112 against the object ANDed with the a mask.
4113
4114 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4115 operations to do this with one comparison.
4116
4117 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4118 function and the one above.
4119
4120 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4121 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4122
4123 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4124 two operands.
4125
4126 We return the simplified tree or 0 if no optimization is possible. */
4127
4128 static tree
4129 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
4130 {
4131 /* If this is the "or" of two comparisons, we can do something if
4132 the comparisons are NE_EXPR. If this is the "and", we can do something
4133 if the comparisons are EQ_EXPR. I.e.,
4134 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4135
4136 WANTED_CODE is this operation code. For single bit fields, we can
4137 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4138 comparison for one-bit fields. */
4139
4140 enum tree_code wanted_code;
4141 enum tree_code lcode, rcode;
4142 tree ll_arg, lr_arg, rl_arg, rr_arg;
4143 tree ll_inner, lr_inner, rl_inner, rr_inner;
4144 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
4145 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
4146 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
4147 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
4148 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
4149 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
4150 enum machine_mode lnmode, rnmode;
4151 tree ll_mask, lr_mask, rl_mask, rr_mask;
4152 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
4153 tree l_const, r_const;
4154 tree lntype, rntype, result;
4155 int first_bit, end_bit;
4156 int volatilep;
4157
4158 /* Start by getting the comparison codes. Fail if anything is volatile.
4159 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4160 it were surrounded with a NE_EXPR. */
4161
4162 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
4163 return 0;
4164
4165 lcode = TREE_CODE (lhs);
4166 rcode = TREE_CODE (rhs);
4167
4168 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
4169 {
4170 lhs = build2 (NE_EXPR, truth_type, lhs, integer_zero_node);
4171 lcode = NE_EXPR;
4172 }
4173
4174 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
4175 {
4176 rhs = build2 (NE_EXPR, truth_type, rhs, integer_zero_node);
4177 rcode = NE_EXPR;
4178 }
4179
4180 if (TREE_CODE_CLASS (lcode) != '<' || TREE_CODE_CLASS (rcode) != '<')
4181 return 0;
4182
4183 ll_arg = TREE_OPERAND (lhs, 0);
4184 lr_arg = TREE_OPERAND (lhs, 1);
4185 rl_arg = TREE_OPERAND (rhs, 0);
4186 rr_arg = TREE_OPERAND (rhs, 1);
4187
4188 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4189 if (simple_operand_p (ll_arg)
4190 && simple_operand_p (lr_arg))
4191 {
4192 tree result;
4193 if (operand_equal_p (ll_arg, rl_arg, 0)
4194 && operand_equal_p (lr_arg, rr_arg, 0))
4195 {
4196 result = combine_comparisons (code, lcode, rcode,
4197 truth_type, ll_arg, lr_arg);
4198 if (result)
4199 return result;
4200 }
4201 else if (operand_equal_p (ll_arg, rr_arg, 0)
4202 && operand_equal_p (lr_arg, rl_arg, 0))
4203 {
4204 result = combine_comparisons (code, lcode,
4205 swap_tree_comparison (rcode),
4206 truth_type, ll_arg, lr_arg);
4207 if (result)
4208 return result;
4209 }
4210 }
4211
4212 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
4213 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
4214
4215 /* If the RHS can be evaluated unconditionally and its operands are
4216 simple, it wins to evaluate the RHS unconditionally on machines
4217 with expensive branches. In this case, this isn't a comparison
4218 that can be merged. Avoid doing this if the RHS is a floating-point
4219 comparison since those can trap. */
4220
4221 if (BRANCH_COST >= 2
4222 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
4223 && simple_operand_p (rl_arg)
4224 && simple_operand_p (rr_arg))
4225 {
4226 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4227 if (code == TRUTH_OR_EXPR
4228 && lcode == NE_EXPR && integer_zerop (lr_arg)
4229 && rcode == NE_EXPR && integer_zerop (rr_arg)
4230 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4231 return build2 (NE_EXPR, truth_type,
4232 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4233 ll_arg, rl_arg),
4234 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4235
4236 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4237 if (code == TRUTH_AND_EXPR
4238 && lcode == EQ_EXPR && integer_zerop (lr_arg)
4239 && rcode == EQ_EXPR && integer_zerop (rr_arg)
4240 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4241 return build2 (EQ_EXPR, truth_type,
4242 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4243 ll_arg, rl_arg),
4244 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4245
4246 return build2 (code, truth_type, lhs, rhs);
4247 }
4248
4249 /* See if the comparisons can be merged. Then get all the parameters for
4250 each side. */
4251
4252 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
4253 || (rcode != EQ_EXPR && rcode != NE_EXPR))
4254 return 0;
4255
4256 volatilep = 0;
4257 ll_inner = decode_field_reference (ll_arg,
4258 &ll_bitsize, &ll_bitpos, &ll_mode,
4259 &ll_unsignedp, &volatilep, &ll_mask,
4260 &ll_and_mask);
4261 lr_inner = decode_field_reference (lr_arg,
4262 &lr_bitsize, &lr_bitpos, &lr_mode,
4263 &lr_unsignedp, &volatilep, &lr_mask,
4264 &lr_and_mask);
4265 rl_inner = decode_field_reference (rl_arg,
4266 &rl_bitsize, &rl_bitpos, &rl_mode,
4267 &rl_unsignedp, &volatilep, &rl_mask,
4268 &rl_and_mask);
4269 rr_inner = decode_field_reference (rr_arg,
4270 &rr_bitsize, &rr_bitpos, &rr_mode,
4271 &rr_unsignedp, &volatilep, &rr_mask,
4272 &rr_and_mask);
4273
4274 /* It must be true that the inner operation on the lhs of each
4275 comparison must be the same if we are to be able to do anything.
4276 Then see if we have constants. If not, the same must be true for
4277 the rhs's. */
4278 if (volatilep || ll_inner == 0 || rl_inner == 0
4279 || ! operand_equal_p (ll_inner, rl_inner, 0))
4280 return 0;
4281
4282 if (TREE_CODE (lr_arg) == INTEGER_CST
4283 && TREE_CODE (rr_arg) == INTEGER_CST)
4284 l_const = lr_arg, r_const = rr_arg;
4285 else if (lr_inner == 0 || rr_inner == 0
4286 || ! operand_equal_p (lr_inner, rr_inner, 0))
4287 return 0;
4288 else
4289 l_const = r_const = 0;
4290
4291 /* If either comparison code is not correct for our logical operation,
4292 fail. However, we can convert a one-bit comparison against zero into
4293 the opposite comparison against that bit being set in the field. */
4294
4295 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
4296 if (lcode != wanted_code)
4297 {
4298 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
4299 {
4300 /* Make the left operand unsigned, since we are only interested
4301 in the value of one bit. Otherwise we are doing the wrong
4302 thing below. */
4303 ll_unsignedp = 1;
4304 l_const = ll_mask;
4305 }
4306 else
4307 return 0;
4308 }
4309
4310 /* This is analogous to the code for l_const above. */
4311 if (rcode != wanted_code)
4312 {
4313 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
4314 {
4315 rl_unsignedp = 1;
4316 r_const = rl_mask;
4317 }
4318 else
4319 return 0;
4320 }
4321
4322 /* After this point all optimizations will generate bit-field
4323 references, which we might not want. */
4324 if (! lang_hooks.can_use_bit_fields_p ())
4325 return 0;
4326
4327 /* See if we can find a mode that contains both fields being compared on
4328 the left. If we can't, fail. Otherwise, update all constants and masks
4329 to be relative to a field of that size. */
4330 first_bit = MIN (ll_bitpos, rl_bitpos);
4331 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
4332 lnmode = get_best_mode (end_bit - first_bit, first_bit,
4333 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
4334 volatilep);
4335 if (lnmode == VOIDmode)
4336 return 0;
4337
4338 lnbitsize = GET_MODE_BITSIZE (lnmode);
4339 lnbitpos = first_bit & ~ (lnbitsize - 1);
4340 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
4341 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
4342
4343 if (BYTES_BIG_ENDIAN)
4344 {
4345 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
4346 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
4347 }
4348
4349 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
4350 size_int (xll_bitpos), 0);
4351 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
4352 size_int (xrl_bitpos), 0);
4353
4354 if (l_const)
4355 {
4356 l_const = fold_convert (lntype, l_const);
4357 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
4358 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
4359 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
4360 fold (build1 (BIT_NOT_EXPR,
4361 lntype, ll_mask)),
4362 0)))
4363 {
4364 warning ("comparison is always %d", wanted_code == NE_EXPR);
4365
4366 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4367 }
4368 }
4369 if (r_const)
4370 {
4371 r_const = fold_convert (lntype, r_const);
4372 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
4373 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
4374 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
4375 fold (build1 (BIT_NOT_EXPR,
4376 lntype, rl_mask)),
4377 0)))
4378 {
4379 warning ("comparison is always %d", wanted_code == NE_EXPR);
4380
4381 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4382 }
4383 }
4384
4385 /* If the right sides are not constant, do the same for it. Also,
4386 disallow this optimization if a size or signedness mismatch occurs
4387 between the left and right sides. */
4388 if (l_const == 0)
4389 {
4390 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
4391 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
4392 /* Make sure the two fields on the right
4393 correspond to the left without being swapped. */
4394 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
4395 return 0;
4396
4397 first_bit = MIN (lr_bitpos, rr_bitpos);
4398 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
4399 rnmode = get_best_mode (end_bit - first_bit, first_bit,
4400 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
4401 volatilep);
4402 if (rnmode == VOIDmode)
4403 return 0;
4404
4405 rnbitsize = GET_MODE_BITSIZE (rnmode);
4406 rnbitpos = first_bit & ~ (rnbitsize - 1);
4407 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
4408 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
4409
4410 if (BYTES_BIG_ENDIAN)
4411 {
4412 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
4413 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
4414 }
4415
4416 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
4417 size_int (xlr_bitpos), 0);
4418 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
4419 size_int (xrr_bitpos), 0);
4420
4421 /* Make a mask that corresponds to both fields being compared.
4422 Do this for both items being compared. If the operands are the
4423 same size and the bits being compared are in the same position
4424 then we can do this by masking both and comparing the masked
4425 results. */
4426 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4427 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
4428 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
4429 {
4430 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4431 ll_unsignedp || rl_unsignedp);
4432 if (! all_ones_mask_p (ll_mask, lnbitsize))
4433 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
4434
4435 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
4436 lr_unsignedp || rr_unsignedp);
4437 if (! all_ones_mask_p (lr_mask, rnbitsize))
4438 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
4439
4440 return build2 (wanted_code, truth_type, lhs, rhs);
4441 }
4442
4443 /* There is still another way we can do something: If both pairs of
4444 fields being compared are adjacent, we may be able to make a wider
4445 field containing them both.
4446
4447 Note that we still must mask the lhs/rhs expressions. Furthermore,
4448 the mask must be shifted to account for the shift done by
4449 make_bit_field_ref. */
4450 if ((ll_bitsize + ll_bitpos == rl_bitpos
4451 && lr_bitsize + lr_bitpos == rr_bitpos)
4452 || (ll_bitpos == rl_bitpos + rl_bitsize
4453 && lr_bitpos == rr_bitpos + rr_bitsize))
4454 {
4455 tree type;
4456
4457 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
4458 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
4459 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
4460 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
4461
4462 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
4463 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
4464 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
4465 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
4466
4467 /* Convert to the smaller type before masking out unwanted bits. */
4468 type = lntype;
4469 if (lntype != rntype)
4470 {
4471 if (lnbitsize > rnbitsize)
4472 {
4473 lhs = fold_convert (rntype, lhs);
4474 ll_mask = fold_convert (rntype, ll_mask);
4475 type = rntype;
4476 }
4477 else if (lnbitsize < rnbitsize)
4478 {
4479 rhs = fold_convert (lntype, rhs);
4480 lr_mask = fold_convert (lntype, lr_mask);
4481 type = lntype;
4482 }
4483 }
4484
4485 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
4486 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
4487
4488 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
4489 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
4490
4491 return build2 (wanted_code, truth_type, lhs, rhs);
4492 }
4493
4494 return 0;
4495 }
4496
4497 /* Handle the case of comparisons with constants. If there is something in
4498 common between the masks, those bits of the constants must be the same.
4499 If not, the condition is always false. Test for this to avoid generating
4500 incorrect code below. */
4501 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
4502 if (! integer_zerop (result)
4503 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
4504 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
4505 {
4506 if (wanted_code == NE_EXPR)
4507 {
4508 warning ("`or' of unmatched not-equal tests is always 1");
4509 return constant_boolean_node (true, truth_type);
4510 }
4511 else
4512 {
4513 warning ("`and' of mutually exclusive equal-tests is always 0");
4514 return constant_boolean_node (false, truth_type);
4515 }
4516 }
4517
4518 /* Construct the expression we will return. First get the component
4519 reference we will make. Unless the mask is all ones the width of
4520 that field, perform the mask operation. Then compare with the
4521 merged constant. */
4522 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4523 ll_unsignedp || rl_unsignedp);
4524
4525 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4526 if (! all_ones_mask_p (ll_mask, lnbitsize))
4527 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
4528
4529 return build2 (wanted_code, truth_type, result,
4530 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
4531 }
4532 \f
4533 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
4534 constant. */
4535
4536 static tree
4537 optimize_minmax_comparison (tree t)
4538 {
4539 tree type = TREE_TYPE (t);
4540 tree arg0 = TREE_OPERAND (t, 0);
4541 enum tree_code op_code;
4542 tree comp_const = TREE_OPERAND (t, 1);
4543 tree minmax_const;
4544 int consts_equal, consts_lt;
4545 tree inner;
4546
4547 STRIP_SIGN_NOPS (arg0);
4548
4549 op_code = TREE_CODE (arg0);
4550 minmax_const = TREE_OPERAND (arg0, 1);
4551 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
4552 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
4553 inner = TREE_OPERAND (arg0, 0);
4554
4555 /* If something does not permit us to optimize, return the original tree. */
4556 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
4557 || TREE_CODE (comp_const) != INTEGER_CST
4558 || TREE_CONSTANT_OVERFLOW (comp_const)
4559 || TREE_CODE (minmax_const) != INTEGER_CST
4560 || TREE_CONSTANT_OVERFLOW (minmax_const))
4561 return t;
4562
4563 /* Now handle all the various comparison codes. We only handle EQ_EXPR
4564 and GT_EXPR, doing the rest with recursive calls using logical
4565 simplifications. */
4566 switch (TREE_CODE (t))
4567 {
4568 case NE_EXPR: case LT_EXPR: case LE_EXPR:
4569 return
4570 invert_truthvalue (optimize_minmax_comparison (invert_truthvalue (t)));
4571
4572 case GE_EXPR:
4573 return
4574 fold (build2 (TRUTH_ORIF_EXPR, type,
4575 optimize_minmax_comparison
4576 (build2 (EQ_EXPR, type, arg0, comp_const)),
4577 optimize_minmax_comparison
4578 (build2 (GT_EXPR, type, arg0, comp_const))));
4579
4580 case EQ_EXPR:
4581 if (op_code == MAX_EXPR && consts_equal)
4582 /* MAX (X, 0) == 0 -> X <= 0 */
4583 return fold (build2 (LE_EXPR, type, inner, comp_const));
4584
4585 else if (op_code == MAX_EXPR && consts_lt)
4586 /* MAX (X, 0) == 5 -> X == 5 */
4587 return fold (build2 (EQ_EXPR, type, inner, comp_const));
4588
4589 else if (op_code == MAX_EXPR)
4590 /* MAX (X, 0) == -1 -> false */
4591 return omit_one_operand (type, integer_zero_node, inner);
4592
4593 else if (consts_equal)
4594 /* MIN (X, 0) == 0 -> X >= 0 */
4595 return fold (build2 (GE_EXPR, type, inner, comp_const));
4596
4597 else if (consts_lt)
4598 /* MIN (X, 0) == 5 -> false */
4599 return omit_one_operand (type, integer_zero_node, inner);
4600
4601 else
4602 /* MIN (X, 0) == -1 -> X == -1 */
4603 return fold (build2 (EQ_EXPR, type, inner, comp_const));
4604
4605 case GT_EXPR:
4606 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
4607 /* MAX (X, 0) > 0 -> X > 0
4608 MAX (X, 0) > 5 -> X > 5 */
4609 return fold (build2 (GT_EXPR, type, inner, comp_const));
4610
4611 else if (op_code == MAX_EXPR)
4612 /* MAX (X, 0) > -1 -> true */
4613 return omit_one_operand (type, integer_one_node, inner);
4614
4615 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
4616 /* MIN (X, 0) > 0 -> false
4617 MIN (X, 0) > 5 -> false */
4618 return omit_one_operand (type, integer_zero_node, inner);
4619
4620 else
4621 /* MIN (X, 0) > -1 -> X > -1 */
4622 return fold (build2 (GT_EXPR, type, inner, comp_const));
4623
4624 default:
4625 return t;
4626 }
4627 }
4628 \f
4629 /* T is an integer expression that is being multiplied, divided, or taken a
4630 modulus (CODE says which and what kind of divide or modulus) by a
4631 constant C. See if we can eliminate that operation by folding it with
4632 other operations already in T. WIDE_TYPE, if non-null, is a type that
4633 should be used for the computation if wider than our type.
4634
4635 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
4636 (X * 2) + (Y * 4). We must, however, be assured that either the original
4637 expression would not overflow or that overflow is undefined for the type
4638 in the language in question.
4639
4640 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
4641 the machine has a multiply-accumulate insn or that this is part of an
4642 addressing calculation.
4643
4644 If we return a non-null expression, it is an equivalent form of the
4645 original computation, but need not be in the original type. */
4646
4647 static tree
4648 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
4649 {
4650 /* To avoid exponential search depth, refuse to allow recursion past
4651 three levels. Beyond that (1) it's highly unlikely that we'll find
4652 something interesting and (2) we've probably processed it before
4653 when we built the inner expression. */
4654
4655 static int depth;
4656 tree ret;
4657
4658 if (depth > 3)
4659 return NULL;
4660
4661 depth++;
4662 ret = extract_muldiv_1 (t, c, code, wide_type);
4663 depth--;
4664
4665 return ret;
4666 }
4667
4668 static tree
4669 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
4670 {
4671 tree type = TREE_TYPE (t);
4672 enum tree_code tcode = TREE_CODE (t);
4673 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
4674 > GET_MODE_SIZE (TYPE_MODE (type)))
4675 ? wide_type : type);
4676 tree t1, t2;
4677 int same_p = tcode == code;
4678 tree op0 = NULL_TREE, op1 = NULL_TREE;
4679
4680 /* Don't deal with constants of zero here; they confuse the code below. */
4681 if (integer_zerop (c))
4682 return NULL_TREE;
4683
4684 if (TREE_CODE_CLASS (tcode) == '1')
4685 op0 = TREE_OPERAND (t, 0);
4686
4687 if (TREE_CODE_CLASS (tcode) == '2')
4688 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
4689
4690 /* Note that we need not handle conditional operations here since fold
4691 already handles those cases. So just do arithmetic here. */
4692 switch (tcode)
4693 {
4694 case INTEGER_CST:
4695 /* For a constant, we can always simplify if we are a multiply
4696 or (for divide and modulus) if it is a multiple of our constant. */
4697 if (code == MULT_EXPR
4698 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
4699 return const_binop (code, fold_convert (ctype, t),
4700 fold_convert (ctype, c), 0);
4701 break;
4702
4703 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
4704 /* If op0 is an expression ... */
4705 if ((TREE_CODE_CLASS (TREE_CODE (op0)) == '<'
4706 || TREE_CODE_CLASS (TREE_CODE (op0)) == '1'
4707 || TREE_CODE_CLASS (TREE_CODE (op0)) == '2'
4708 || TREE_CODE_CLASS (TREE_CODE (op0)) == 'e')
4709 /* ... and is unsigned, and its type is smaller than ctype,
4710 then we cannot pass through as widening. */
4711 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
4712 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4713 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
4714 && (GET_MODE_SIZE (TYPE_MODE (ctype))
4715 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
4716 /* ... or its type is larger than ctype,
4717 then we cannot pass through this truncation. */
4718 || (GET_MODE_SIZE (TYPE_MODE (ctype))
4719 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
4720 /* ... or signedness changes for division or modulus,
4721 then we cannot pass through this conversion. */
4722 || (code != MULT_EXPR
4723 && (TYPE_UNSIGNED (ctype)
4724 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
4725 break;
4726
4727 /* Pass the constant down and see if we can make a simplification. If
4728 we can, replace this expression with the inner simplification for
4729 possible later conversion to our or some other type. */
4730 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
4731 && TREE_CODE (t2) == INTEGER_CST
4732 && ! TREE_CONSTANT_OVERFLOW (t2)
4733 && (0 != (t1 = extract_muldiv (op0, t2, code,
4734 code == MULT_EXPR
4735 ? ctype : NULL_TREE))))
4736 return t1;
4737 break;
4738
4739 case NEGATE_EXPR: case ABS_EXPR:
4740 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4741 return fold (build1 (tcode, ctype, fold_convert (ctype, t1)));
4742 break;
4743
4744 case MIN_EXPR: case MAX_EXPR:
4745 /* If widening the type changes the signedness, then we can't perform
4746 this optimization as that changes the result. */
4747 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
4748 break;
4749
4750 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
4751 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
4752 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
4753 {
4754 if (tree_int_cst_sgn (c) < 0)
4755 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
4756
4757 return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
4758 fold_convert (ctype, t2)));
4759 }
4760 break;
4761
4762 case LSHIFT_EXPR: case RSHIFT_EXPR:
4763 /* If the second operand is constant, this is a multiplication
4764 or floor division, by a power of two, so we can treat it that
4765 way unless the multiplier or divisor overflows. */
4766 if (TREE_CODE (op1) == INTEGER_CST
4767 /* const_binop may not detect overflow correctly,
4768 so check for it explicitly here. */
4769 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
4770 && TREE_INT_CST_HIGH (op1) == 0
4771 && 0 != (t1 = fold_convert (ctype,
4772 const_binop (LSHIFT_EXPR,
4773 size_one_node,
4774 op1, 0)))
4775 && ! TREE_OVERFLOW (t1))
4776 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
4777 ? MULT_EXPR : FLOOR_DIV_EXPR,
4778 ctype, fold_convert (ctype, op0), t1),
4779 c, code, wide_type);
4780 break;
4781
4782 case PLUS_EXPR: case MINUS_EXPR:
4783 /* See if we can eliminate the operation on both sides. If we can, we
4784 can return a new PLUS or MINUS. If we can't, the only remaining
4785 cases where we can do anything are if the second operand is a
4786 constant. */
4787 t1 = extract_muldiv (op0, c, code, wide_type);
4788 t2 = extract_muldiv (op1, c, code, wide_type);
4789 if (t1 != 0 && t2 != 0
4790 && (code == MULT_EXPR
4791 /* If not multiplication, we can only do this if both operands
4792 are divisible by c. */
4793 || (multiple_of_p (ctype, op0, c)
4794 && multiple_of_p (ctype, op1, c))))
4795 return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
4796 fold_convert (ctype, t2)));
4797
4798 /* If this was a subtraction, negate OP1 and set it to be an addition.
4799 This simplifies the logic below. */
4800 if (tcode == MINUS_EXPR)
4801 tcode = PLUS_EXPR, op1 = negate_expr (op1);
4802
4803 if (TREE_CODE (op1) != INTEGER_CST)
4804 break;
4805
4806 /* If either OP1 or C are negative, this optimization is not safe for
4807 some of the division and remainder types while for others we need
4808 to change the code. */
4809 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
4810 {
4811 if (code == CEIL_DIV_EXPR)
4812 code = FLOOR_DIV_EXPR;
4813 else if (code == FLOOR_DIV_EXPR)
4814 code = CEIL_DIV_EXPR;
4815 else if (code != MULT_EXPR
4816 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
4817 break;
4818 }
4819
4820 /* If it's a multiply or a division/modulus operation of a multiple
4821 of our constant, do the operation and verify it doesn't overflow. */
4822 if (code == MULT_EXPR
4823 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4824 {
4825 op1 = const_binop (code, fold_convert (ctype, op1),
4826 fold_convert (ctype, c), 0);
4827 /* We allow the constant to overflow with wrapping semantics. */
4828 if (op1 == 0
4829 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
4830 break;
4831 }
4832 else
4833 break;
4834
4835 /* If we have an unsigned type is not a sizetype, we cannot widen
4836 the operation since it will change the result if the original
4837 computation overflowed. */
4838 if (TYPE_UNSIGNED (ctype)
4839 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
4840 && ctype != type)
4841 break;
4842
4843 /* If we were able to eliminate our operation from the first side,
4844 apply our operation to the second side and reform the PLUS. */
4845 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
4846 return fold (build2 (tcode, ctype, fold_convert (ctype, t1), op1));
4847
4848 /* The last case is if we are a multiply. In that case, we can
4849 apply the distributive law to commute the multiply and addition
4850 if the multiplication of the constants doesn't overflow. */
4851 if (code == MULT_EXPR)
4852 return fold (build2 (tcode, ctype,
4853 fold (build2 (code, ctype,
4854 fold_convert (ctype, op0),
4855 fold_convert (ctype, c))),
4856 op1));
4857
4858 break;
4859
4860 case MULT_EXPR:
4861 /* We have a special case here if we are doing something like
4862 (C * 8) % 4 since we know that's zero. */
4863 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
4864 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
4865 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
4866 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4867 return omit_one_operand (type, integer_zero_node, op0);
4868
4869 /* ... fall through ... */
4870
4871 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
4872 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
4873 /* If we can extract our operation from the LHS, do so and return a
4874 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
4875 do something only if the second operand is a constant. */
4876 if (same_p
4877 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4878 return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
4879 fold_convert (ctype, op1)));
4880 else if (tcode == MULT_EXPR && code == MULT_EXPR
4881 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
4882 return fold (build2 (tcode, ctype, fold_convert (ctype, op0),
4883 fold_convert (ctype, t1)));
4884 else if (TREE_CODE (op1) != INTEGER_CST)
4885 return 0;
4886
4887 /* If these are the same operation types, we can associate them
4888 assuming no overflow. */
4889 if (tcode == code
4890 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
4891 fold_convert (ctype, c), 0))
4892 && ! TREE_OVERFLOW (t1))
4893 return fold (build2 (tcode, ctype, fold_convert (ctype, op0), t1));
4894
4895 /* If these operations "cancel" each other, we have the main
4896 optimizations of this pass, which occur when either constant is a
4897 multiple of the other, in which case we replace this with either an
4898 operation or CODE or TCODE.
4899
4900 If we have an unsigned type that is not a sizetype, we cannot do
4901 this since it will change the result if the original computation
4902 overflowed. */
4903 if ((! TYPE_UNSIGNED (ctype)
4904 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
4905 && ! flag_wrapv
4906 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
4907 || (tcode == MULT_EXPR
4908 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
4909 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
4910 {
4911 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4912 return fold (build2 (tcode, ctype, fold_convert (ctype, op0),
4913 fold_convert (ctype,
4914 const_binop (TRUNC_DIV_EXPR,
4915 op1, c, 0))));
4916 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
4917 return fold (build2 (code, ctype, fold_convert (ctype, op0),
4918 fold_convert (ctype,
4919 const_binop (TRUNC_DIV_EXPR,
4920 c, op1, 0))));
4921 }
4922 break;
4923
4924 default:
4925 break;
4926 }
4927
4928 return 0;
4929 }
4930 \f
4931 /* Return a node which has the indicated constant VALUE (either 0 or
4932 1), and is of the indicated TYPE. */
4933
4934 static tree
4935 constant_boolean_node (int value, tree type)
4936 {
4937 if (type == integer_type_node)
4938 return value ? integer_one_node : integer_zero_node;
4939 else if (TREE_CODE (type) == BOOLEAN_TYPE)
4940 return lang_hooks.truthvalue_conversion (value ? integer_one_node
4941 : integer_zero_node);
4942 else
4943 {
4944 tree t = build_int_2 (value, 0);
4945
4946 TREE_TYPE (t) = type;
4947 return t;
4948 }
4949 }
4950
4951 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
4952 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
4953 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
4954 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
4955 COND is the first argument to CODE; otherwise (as in the example
4956 given here), it is the second argument. TYPE is the type of the
4957 original expression. Return NULL_TREE if no simplification is
4958 possible. */
4959
4960 static tree
4961 fold_binary_op_with_conditional_arg (enum tree_code code, tree type,
4962 tree cond, tree arg, int cond_first_p)
4963 {
4964 tree test, true_value, false_value;
4965 tree lhs = NULL_TREE;
4966 tree rhs = NULL_TREE;
4967
4968 /* This transformation is only worthwhile if we don't have to wrap
4969 arg in a SAVE_EXPR, and the operation can be simplified on atleast
4970 one of the branches once its pushed inside the COND_EXPR. */
4971 if (!TREE_CONSTANT (arg))
4972 return NULL_TREE;
4973
4974 if (TREE_CODE (cond) == COND_EXPR)
4975 {
4976 test = TREE_OPERAND (cond, 0);
4977 true_value = TREE_OPERAND (cond, 1);
4978 false_value = TREE_OPERAND (cond, 2);
4979 /* If this operand throws an expression, then it does not make
4980 sense to try to perform a logical or arithmetic operation
4981 involving it. */
4982 if (VOID_TYPE_P (TREE_TYPE (true_value)))
4983 lhs = true_value;
4984 if (VOID_TYPE_P (TREE_TYPE (false_value)))
4985 rhs = false_value;
4986 }
4987 else
4988 {
4989 tree testtype = TREE_TYPE (cond);
4990 test = cond;
4991 true_value = constant_boolean_node (true, testtype);
4992 false_value = constant_boolean_node (false, testtype);
4993 }
4994
4995 if (lhs == 0)
4996 lhs = fold (cond_first_p ? build2 (code, type, true_value, arg)
4997 : build2 (code, type, arg, true_value));
4998 if (rhs == 0)
4999 rhs = fold (cond_first_p ? build2 (code, type, false_value, arg)
5000 : build2 (code, type, arg, false_value));
5001
5002 test = fold (build3 (COND_EXPR, type, test, lhs, rhs));
5003 return fold_convert (type, test);
5004 }
5005
5006 \f
5007 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5008
5009 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5010 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5011 ADDEND is the same as X.
5012
5013 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5014 and finite. The problematic cases are when X is zero, and its mode
5015 has signed zeros. In the case of rounding towards -infinity,
5016 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5017 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5018
5019 static bool
5020 fold_real_zero_addition_p (tree type, tree addend, int negate)
5021 {
5022 if (!real_zerop (addend))
5023 return false;
5024
5025 /* Don't allow the fold with -fsignaling-nans. */
5026 if (HONOR_SNANS (TYPE_MODE (type)))
5027 return false;
5028
5029 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5030 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
5031 return true;
5032
5033 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5034 if (TREE_CODE (addend) == REAL_CST
5035 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
5036 negate = !negate;
5037
5038 /* The mode has signed zeros, and we have to honor their sign.
5039 In this situation, there is only one case we can return true for.
5040 X - 0 is the same as X unless rounding towards -infinity is
5041 supported. */
5042 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
5043 }
5044
5045 /* Subroutine of fold() that checks comparisons of built-in math
5046 functions against real constants.
5047
5048 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5049 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5050 is the type of the result and ARG0 and ARG1 are the operands of the
5051 comparison. ARG1 must be a TREE_REAL_CST.
5052
5053 The function returns the constant folded tree if a simplification
5054 can be made, and NULL_TREE otherwise. */
5055
5056 static tree
5057 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5058 tree type, tree arg0, tree arg1)
5059 {
5060 REAL_VALUE_TYPE c;
5061
5062 if (BUILTIN_SQRT_P (fcode))
5063 {
5064 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5065 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5066
5067 c = TREE_REAL_CST (arg1);
5068 if (REAL_VALUE_NEGATIVE (c))
5069 {
5070 /* sqrt(x) < y is always false, if y is negative. */
5071 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5072 return omit_one_operand (type, integer_zero_node, arg);
5073
5074 /* sqrt(x) > y is always true, if y is negative and we
5075 don't care about NaNs, i.e. negative values of x. */
5076 if (code == NE_EXPR || !HONOR_NANS (mode))
5077 return omit_one_operand (type, integer_one_node, arg);
5078
5079 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5080 return fold (build2 (GE_EXPR, type, arg,
5081 build_real (TREE_TYPE (arg), dconst0)));
5082 }
5083 else if (code == GT_EXPR || code == GE_EXPR)
5084 {
5085 REAL_VALUE_TYPE c2;
5086
5087 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5088 real_convert (&c2, mode, &c2);
5089
5090 if (REAL_VALUE_ISINF (c2))
5091 {
5092 /* sqrt(x) > y is x == +Inf, when y is very large. */
5093 if (HONOR_INFINITIES (mode))
5094 return fold (build2 (EQ_EXPR, type, arg,
5095 build_real (TREE_TYPE (arg), c2)));
5096
5097 /* sqrt(x) > y is always false, when y is very large
5098 and we don't care about infinities. */
5099 return omit_one_operand (type, integer_zero_node, arg);
5100 }
5101
5102 /* sqrt(x) > c is the same as x > c*c. */
5103 return fold (build2 (code, type, arg,
5104 build_real (TREE_TYPE (arg), c2)));
5105 }
5106 else if (code == LT_EXPR || code == LE_EXPR)
5107 {
5108 REAL_VALUE_TYPE c2;
5109
5110 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5111 real_convert (&c2, mode, &c2);
5112
5113 if (REAL_VALUE_ISINF (c2))
5114 {
5115 /* sqrt(x) < y is always true, when y is a very large
5116 value and we don't care about NaNs or Infinities. */
5117 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5118 return omit_one_operand (type, integer_one_node, arg);
5119
5120 /* sqrt(x) < y is x != +Inf when y is very large and we
5121 don't care about NaNs. */
5122 if (! HONOR_NANS (mode))
5123 return fold (build2 (NE_EXPR, type, arg,
5124 build_real (TREE_TYPE (arg), c2)));
5125
5126 /* sqrt(x) < y is x >= 0 when y is very large and we
5127 don't care about Infinities. */
5128 if (! HONOR_INFINITIES (mode))
5129 return fold (build2 (GE_EXPR, type, arg,
5130 build_real (TREE_TYPE (arg), dconst0)));
5131
5132 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5133 if (lang_hooks.decls.global_bindings_p () != 0
5134 || CONTAINS_PLACEHOLDER_P (arg))
5135 return NULL_TREE;
5136
5137 arg = save_expr (arg);
5138 return fold (build2 (TRUTH_ANDIF_EXPR, type,
5139 fold (build2 (GE_EXPR, type, arg,
5140 build_real (TREE_TYPE (arg),
5141 dconst0))),
5142 fold (build2 (NE_EXPR, type, arg,
5143 build_real (TREE_TYPE (arg),
5144 c2)))));
5145 }
5146
5147 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5148 if (! HONOR_NANS (mode))
5149 return fold (build2 (code, type, arg,
5150 build_real (TREE_TYPE (arg), c2)));
5151
5152 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5153 if (lang_hooks.decls.global_bindings_p () == 0
5154 && ! CONTAINS_PLACEHOLDER_P (arg))
5155 {
5156 arg = save_expr (arg);
5157 return fold (build2 (TRUTH_ANDIF_EXPR, type,
5158 fold (build2 (GE_EXPR, type, arg,
5159 build_real (TREE_TYPE (arg),
5160 dconst0))),
5161 fold (build2 (code, type, arg,
5162 build_real (TREE_TYPE (arg),
5163 c2)))));
5164 }
5165 }
5166 }
5167
5168 return NULL_TREE;
5169 }
5170
5171 /* Subroutine of fold() that optimizes comparisons against Infinities,
5172 either +Inf or -Inf.
5173
5174 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5175 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5176 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5177
5178 The function returns the constant folded tree if a simplification
5179 can be made, and NULL_TREE otherwise. */
5180
5181 static tree
5182 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5183 {
5184 enum machine_mode mode;
5185 REAL_VALUE_TYPE max;
5186 tree temp;
5187 bool neg;
5188
5189 mode = TYPE_MODE (TREE_TYPE (arg0));
5190
5191 /* For negative infinity swap the sense of the comparison. */
5192 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
5193 if (neg)
5194 code = swap_tree_comparison (code);
5195
5196 switch (code)
5197 {
5198 case GT_EXPR:
5199 /* x > +Inf is always false, if with ignore sNANs. */
5200 if (HONOR_SNANS (mode))
5201 return NULL_TREE;
5202 return omit_one_operand (type, integer_zero_node, arg0);
5203
5204 case LE_EXPR:
5205 /* x <= +Inf is always true, if we don't case about NaNs. */
5206 if (! HONOR_NANS (mode))
5207 return omit_one_operand (type, integer_one_node, arg0);
5208
5209 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5210 if (lang_hooks.decls.global_bindings_p () == 0
5211 && ! CONTAINS_PLACEHOLDER_P (arg0))
5212 {
5213 arg0 = save_expr (arg0);
5214 return fold (build2 (EQ_EXPR, type, arg0, arg0));
5215 }
5216 break;
5217
5218 case EQ_EXPR:
5219 case GE_EXPR:
5220 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
5221 real_maxval (&max, neg, mode);
5222 return fold (build2 (neg ? LT_EXPR : GT_EXPR, type,
5223 arg0, build_real (TREE_TYPE (arg0), max)));
5224
5225 case LT_EXPR:
5226 /* x < +Inf is always equal to x <= DBL_MAX. */
5227 real_maxval (&max, neg, mode);
5228 return fold (build2 (neg ? GE_EXPR : LE_EXPR, type,
5229 arg0, build_real (TREE_TYPE (arg0), max)));
5230
5231 case NE_EXPR:
5232 /* x != +Inf is always equal to !(x > DBL_MAX). */
5233 real_maxval (&max, neg, mode);
5234 if (! HONOR_NANS (mode))
5235 return fold (build2 (neg ? GE_EXPR : LE_EXPR, type,
5236 arg0, build_real (TREE_TYPE (arg0), max)));
5237
5238 /* The transformation below creates non-gimple code and thus is
5239 not appropriate if we are in gimple form. */
5240 if (in_gimple_form)
5241 return NULL_TREE;
5242
5243 temp = fold (build2 (neg ? LT_EXPR : GT_EXPR, type,
5244 arg0, build_real (TREE_TYPE (arg0), max)));
5245 return fold (build1 (TRUTH_NOT_EXPR, type, temp));
5246
5247 default:
5248 break;
5249 }
5250
5251 return NULL_TREE;
5252 }
5253
5254 /* Subroutine of fold() that optimizes comparisons of a division by
5255 a nonzero integer constant against an integer constant, i.e.
5256 X/C1 op C2.
5257
5258 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5259 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5260 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5261
5262 The function returns the constant folded tree if a simplification
5263 can be made, and NULL_TREE otherwise. */
5264
5265 static tree
5266 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5267 {
5268 tree prod, tmp, hi, lo;
5269 tree arg00 = TREE_OPERAND (arg0, 0);
5270 tree arg01 = TREE_OPERAND (arg0, 1);
5271 unsigned HOST_WIDE_INT lpart;
5272 HOST_WIDE_INT hpart;
5273 int overflow;
5274
5275 /* We have to do this the hard way to detect unsigned overflow.
5276 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
5277 overflow = mul_double (TREE_INT_CST_LOW (arg01),
5278 TREE_INT_CST_HIGH (arg01),
5279 TREE_INT_CST_LOW (arg1),
5280 TREE_INT_CST_HIGH (arg1), &lpart, &hpart);
5281 prod = build_int_2 (lpart, hpart);
5282 TREE_TYPE (prod) = TREE_TYPE (arg00);
5283 TREE_OVERFLOW (prod) = force_fit_type (prod, overflow)
5284 || TREE_INT_CST_HIGH (prod) != hpart
5285 || TREE_INT_CST_LOW (prod) != lpart;
5286 TREE_CONSTANT_OVERFLOW (prod) = TREE_OVERFLOW (prod);
5287
5288 if (TYPE_UNSIGNED (TREE_TYPE (arg0)))
5289 {
5290 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5291 lo = prod;
5292
5293 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
5294 overflow = add_double (TREE_INT_CST_LOW (prod),
5295 TREE_INT_CST_HIGH (prod),
5296 TREE_INT_CST_LOW (tmp),
5297 TREE_INT_CST_HIGH (tmp),
5298 &lpart, &hpart);
5299 hi = build_int_2 (lpart, hpart);
5300 TREE_TYPE (hi) = TREE_TYPE (arg00);
5301 TREE_OVERFLOW (hi) = force_fit_type (hi, overflow)
5302 || TREE_INT_CST_HIGH (hi) != hpart
5303 || TREE_INT_CST_LOW (hi) != lpart
5304 || TREE_OVERFLOW (prod);
5305 TREE_CONSTANT_OVERFLOW (hi) = TREE_OVERFLOW (hi);
5306 }
5307 else if (tree_int_cst_sgn (arg01) >= 0)
5308 {
5309 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5310 switch (tree_int_cst_sgn (arg1))
5311 {
5312 case -1:
5313 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5314 hi = prod;
5315 break;
5316
5317 case 0:
5318 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
5319 hi = tmp;
5320 break;
5321
5322 case 1:
5323 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
5324 lo = prod;
5325 break;
5326
5327 default:
5328 abort ();
5329 }
5330 }
5331 else
5332 {
5333 tmp = int_const_binop (PLUS_EXPR, arg01, integer_one_node, 0);
5334 switch (tree_int_cst_sgn (arg1))
5335 {
5336 case -1:
5337 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5338 lo = prod;
5339 break;
5340
5341 case 0:
5342 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
5343 lo = tmp;
5344 break;
5345
5346 case 1:
5347 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
5348 hi = prod;
5349 break;
5350
5351 default:
5352 abort ();
5353 }
5354 }
5355
5356 switch (code)
5357 {
5358 case EQ_EXPR:
5359 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
5360 return omit_one_operand (type, integer_zero_node, arg00);
5361 if (TREE_OVERFLOW (hi))
5362 return fold (build2 (GE_EXPR, type, arg00, lo));
5363 if (TREE_OVERFLOW (lo))
5364 return fold (build2 (LE_EXPR, type, arg00, hi));
5365 return build_range_check (type, arg00, 1, lo, hi);
5366
5367 case NE_EXPR:
5368 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
5369 return omit_one_operand (type, integer_one_node, arg00);
5370 if (TREE_OVERFLOW (hi))
5371 return fold (build2 (LT_EXPR, type, arg00, lo));
5372 if (TREE_OVERFLOW (lo))
5373 return fold (build2 (GT_EXPR, type, arg00, hi));
5374 return build_range_check (type, arg00, 0, lo, hi);
5375
5376 case LT_EXPR:
5377 if (TREE_OVERFLOW (lo))
5378 return omit_one_operand (type, integer_zero_node, arg00);
5379 return fold (build2 (LT_EXPR, type, arg00, lo));
5380
5381 case LE_EXPR:
5382 if (TREE_OVERFLOW (hi))
5383 return omit_one_operand (type, integer_one_node, arg00);
5384 return fold (build2 (LE_EXPR, type, arg00, hi));
5385
5386 case GT_EXPR:
5387 if (TREE_OVERFLOW (hi))
5388 return omit_one_operand (type, integer_zero_node, arg00);
5389 return fold (build2 (GT_EXPR, type, arg00, hi));
5390
5391 case GE_EXPR:
5392 if (TREE_OVERFLOW (lo))
5393 return omit_one_operand (type, integer_one_node, arg00);
5394 return fold (build2 (GE_EXPR, type, arg00, lo));
5395
5396 default:
5397 break;
5398 }
5399
5400 return NULL_TREE;
5401 }
5402
5403
5404 /* If CODE with arguments ARG0 and ARG1 represents a single bit
5405 equality/inequality test, then return a simplified form of
5406 the test using shifts and logical operations. Otherwise return
5407 NULL. TYPE is the desired result type. */
5408
5409 tree
5410 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
5411 tree result_type)
5412 {
5413 /* If this is a TRUTH_NOT_EXPR, it may have a single bit test inside
5414 operand 0. */
5415 if (code == TRUTH_NOT_EXPR)
5416 {
5417 code = TREE_CODE (arg0);
5418 if (code != NE_EXPR && code != EQ_EXPR)
5419 return NULL_TREE;
5420
5421 /* Extract the arguments of the EQ/NE. */
5422 arg1 = TREE_OPERAND (arg0, 1);
5423 arg0 = TREE_OPERAND (arg0, 0);
5424
5425 /* This requires us to invert the code. */
5426 code = (code == EQ_EXPR ? NE_EXPR : EQ_EXPR);
5427 }
5428
5429 /* If this is testing a single bit, we can optimize the test. */
5430 if ((code == NE_EXPR || code == EQ_EXPR)
5431 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
5432 && integer_pow2p (TREE_OPERAND (arg0, 1)))
5433 {
5434 tree inner = TREE_OPERAND (arg0, 0);
5435 tree type = TREE_TYPE (arg0);
5436 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
5437 enum machine_mode operand_mode = TYPE_MODE (type);
5438 int ops_unsigned;
5439 tree signed_type, unsigned_type, intermediate_type;
5440 tree arg00;
5441
5442 /* If we have (A & C) != 0 where C is the sign bit of A, convert
5443 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
5444 arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
5445 if (arg00 != NULL_TREE
5446 /* This is only a win if casting to a signed type is cheap,
5447 i.e. when arg00's type is not a partial mode. */
5448 && TYPE_PRECISION (TREE_TYPE (arg00))
5449 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
5450 {
5451 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
5452 return fold (build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
5453 result_type, fold_convert (stype, arg00),
5454 fold_convert (stype, integer_zero_node)));
5455 }
5456
5457 /* Otherwise we have (A & C) != 0 where C is a single bit,
5458 convert that into ((A >> C2) & 1). Where C2 = log2(C).
5459 Similarly for (A & C) == 0. */
5460
5461 /* If INNER is a right shift of a constant and it plus BITNUM does
5462 not overflow, adjust BITNUM and INNER. */
5463 if (TREE_CODE (inner) == RSHIFT_EXPR
5464 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
5465 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
5466 && bitnum < TYPE_PRECISION (type)
5467 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
5468 bitnum - TYPE_PRECISION (type)))
5469 {
5470 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
5471 inner = TREE_OPERAND (inner, 0);
5472 }
5473
5474 /* If we are going to be able to omit the AND below, we must do our
5475 operations as unsigned. If we must use the AND, we have a choice.
5476 Normally unsigned is faster, but for some machines signed is. */
5477 #ifdef LOAD_EXTEND_OP
5478 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1);
5479 #else
5480 ops_unsigned = 1;
5481 #endif
5482
5483 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
5484 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
5485 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
5486 inner = fold_convert (intermediate_type, inner);
5487
5488 if (bitnum != 0)
5489 inner = build2 (RSHIFT_EXPR, intermediate_type,
5490 inner, size_int (bitnum));
5491
5492 if (code == EQ_EXPR)
5493 inner = build2 (BIT_XOR_EXPR, intermediate_type,
5494 inner, integer_one_node);
5495
5496 /* Put the AND last so it can combine with more things. */
5497 inner = build2 (BIT_AND_EXPR, intermediate_type,
5498 inner, integer_one_node);
5499
5500 /* Make sure to return the proper type. */
5501 inner = fold_convert (result_type, inner);
5502
5503 return inner;
5504 }
5505 return NULL_TREE;
5506 }
5507
5508 /* Check whether we are allowed to reorder operands arg0 and arg1,
5509 such that the evaluation of arg1 occurs before arg0. */
5510
5511 static bool
5512 reorder_operands_p (tree arg0, tree arg1)
5513 {
5514 if (! flag_evaluation_order)
5515 return true;
5516 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
5517 return true;
5518 return ! TREE_SIDE_EFFECTS (arg0)
5519 && ! TREE_SIDE_EFFECTS (arg1);
5520 }
5521
5522 /* Test whether it is preferable two swap two operands, ARG0 and
5523 ARG1, for example because ARG0 is an integer constant and ARG1
5524 isn't. If REORDER is true, only recommend swapping if we can
5525 evaluate the operands in reverse order. */
5526
5527 bool
5528 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
5529 {
5530 STRIP_SIGN_NOPS (arg0);
5531 STRIP_SIGN_NOPS (arg1);
5532
5533 if (TREE_CODE (arg1) == INTEGER_CST)
5534 return 0;
5535 if (TREE_CODE (arg0) == INTEGER_CST)
5536 return 1;
5537
5538 if (TREE_CODE (arg1) == REAL_CST)
5539 return 0;
5540 if (TREE_CODE (arg0) == REAL_CST)
5541 return 1;
5542
5543 if (TREE_CODE (arg1) == COMPLEX_CST)
5544 return 0;
5545 if (TREE_CODE (arg0) == COMPLEX_CST)
5546 return 1;
5547
5548 if (TREE_CONSTANT (arg1))
5549 return 0;
5550 if (TREE_CONSTANT (arg0))
5551 return 1;
5552
5553 if (optimize_size)
5554 return 0;
5555
5556 if (reorder && flag_evaluation_order
5557 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
5558 return 0;
5559
5560 if (DECL_P (arg1))
5561 return 0;
5562 if (DECL_P (arg0))
5563 return 1;
5564
5565 if (reorder && flag_evaluation_order
5566 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
5567 return 0;
5568
5569 if (DECL_P (arg1))
5570 return 0;
5571 if (DECL_P (arg0))
5572 return 1;
5573
5574 /* It is preferable to swap two SSA_NAME to ensure a canonical form
5575 for commutative and comparison operators. Ensuring a canonical
5576 form allows the optimizers to find additional redundancies without
5577 having to explicitly check for both orderings. */
5578 if (TREE_CODE (arg0) == SSA_NAME
5579 && TREE_CODE (arg1) == SSA_NAME
5580 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
5581 return 1;
5582
5583 return 0;
5584 }
5585
5586 /* Perform constant folding and related simplification of EXPR.
5587 The related simplifications include x*1 => x, x*0 => 0, etc.,
5588 and application of the associative law.
5589 NOP_EXPR conversions may be removed freely (as long as we
5590 are careful not to change the type of the overall expression).
5591 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
5592 but we can constant-fold them if they have constant operands. */
5593
5594 #ifdef ENABLE_FOLD_CHECKING
5595 # define fold(x) fold_1 (x)
5596 static tree fold_1 (tree);
5597 static
5598 #endif
5599 tree
5600 fold (tree expr)
5601 {
5602 const tree t = expr;
5603 const tree type = TREE_TYPE (expr);
5604 tree t1 = NULL_TREE;
5605 tree tem;
5606 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
5607 enum tree_code code = TREE_CODE (t);
5608 int kind = TREE_CODE_CLASS (code);
5609
5610 /* WINS will be nonzero when the switch is done
5611 if all operands are constant. */
5612 int wins = 1;
5613
5614 /* Don't try to process an RTL_EXPR since its operands aren't trees.
5615 Likewise for a SAVE_EXPR that's already been evaluated. */
5616 if (code == RTL_EXPR || (code == SAVE_EXPR && SAVE_EXPR_RTL (t) != 0))
5617 return t;
5618
5619 /* Return right away if a constant. */
5620 if (kind == 'c')
5621 return t;
5622
5623 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
5624 {
5625 tree subop;
5626
5627 /* Special case for conversion ops that can have fixed point args. */
5628 arg0 = TREE_OPERAND (t, 0);
5629
5630 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
5631 if (arg0 != 0)
5632 STRIP_SIGN_NOPS (arg0);
5633
5634 if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
5635 subop = TREE_REALPART (arg0);
5636 else
5637 subop = arg0;
5638
5639 if (subop != 0 && TREE_CODE (subop) != INTEGER_CST
5640 && TREE_CODE (subop) != REAL_CST)
5641 /* Note that TREE_CONSTANT isn't enough:
5642 static var addresses are constant but we can't
5643 do arithmetic on them. */
5644 wins = 0;
5645 }
5646 else if (IS_EXPR_CODE_CLASS (kind))
5647 {
5648 int len = first_rtl_op (code);
5649 int i;
5650 for (i = 0; i < len; i++)
5651 {
5652 tree op = TREE_OPERAND (t, i);
5653 tree subop;
5654
5655 if (op == 0)
5656 continue; /* Valid for CALL_EXPR, at least. */
5657
5658 /* Strip any conversions that don't change the mode. This is
5659 safe for every expression, except for a comparison expression
5660 because its signedness is derived from its operands. So, in
5661 the latter case, only strip conversions that don't change the
5662 signedness.
5663
5664 Note that this is done as an internal manipulation within the
5665 constant folder, in order to find the simplest representation
5666 of the arguments so that their form can be studied. In any
5667 cases, the appropriate type conversions should be put back in
5668 the tree that will get out of the constant folder. */
5669 if (kind == '<')
5670 STRIP_SIGN_NOPS (op);
5671 else
5672 STRIP_NOPS (op);
5673
5674 if (TREE_CODE (op) == COMPLEX_CST)
5675 subop = TREE_REALPART (op);
5676 else
5677 subop = op;
5678
5679 if (TREE_CODE (subop) != INTEGER_CST
5680 && TREE_CODE (subop) != REAL_CST)
5681 /* Note that TREE_CONSTANT isn't enough:
5682 static var addresses are constant but we can't
5683 do arithmetic on them. */
5684 wins = 0;
5685
5686 if (i == 0)
5687 arg0 = op;
5688 else if (i == 1)
5689 arg1 = op;
5690 }
5691 }
5692
5693 /* If this is a commutative operation, and ARG0 is a constant, move it
5694 to ARG1 to reduce the number of tests below. */
5695 if (commutative_tree_code (code)
5696 && tree_swap_operands_p (arg0, arg1, true))
5697 return fold (build2 (code, type, TREE_OPERAND (t, 1),
5698 TREE_OPERAND (t, 0)));
5699
5700 /* Now WINS is set as described above,
5701 ARG0 is the first operand of EXPR,
5702 and ARG1 is the second operand (if it has more than one operand).
5703
5704 First check for cases where an arithmetic operation is applied to a
5705 compound, conditional, or comparison operation. Push the arithmetic
5706 operation inside the compound or conditional to see if any folding
5707 can then be done. Convert comparison to conditional for this purpose.
5708 The also optimizes non-constant cases that used to be done in
5709 expand_expr.
5710
5711 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
5712 one of the operands is a comparison and the other is a comparison, a
5713 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
5714 code below would make the expression more complex. Change it to a
5715 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
5716 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
5717
5718 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
5719 || code == EQ_EXPR || code == NE_EXPR)
5720 && ((truth_value_p (TREE_CODE (arg0))
5721 && (truth_value_p (TREE_CODE (arg1))
5722 || (TREE_CODE (arg1) == BIT_AND_EXPR
5723 && integer_onep (TREE_OPERAND (arg1, 1)))))
5724 || (truth_value_p (TREE_CODE (arg1))
5725 && (truth_value_p (TREE_CODE (arg0))
5726 || (TREE_CODE (arg0) == BIT_AND_EXPR
5727 && integer_onep (TREE_OPERAND (arg0, 1)))))))
5728 {
5729 tem = fold (build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
5730 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
5731 : TRUTH_XOR_EXPR,
5732 type, fold_convert (boolean_type_node, arg0),
5733 fold_convert (boolean_type_node, arg1)));
5734
5735 if (code == EQ_EXPR)
5736 tem = invert_truthvalue (tem);
5737
5738 return tem;
5739 }
5740
5741 if (TREE_CODE_CLASS (code) == '1')
5742 {
5743 if (TREE_CODE (arg0) == COMPOUND_EXPR)
5744 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5745 fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
5746 else if (TREE_CODE (arg0) == COND_EXPR)
5747 {
5748 tree arg01 = TREE_OPERAND (arg0, 1);
5749 tree arg02 = TREE_OPERAND (arg0, 2);
5750 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
5751 arg01 = fold (build1 (code, type, arg01));
5752 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
5753 arg02 = fold (build1 (code, type, arg02));
5754 tem = fold (build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
5755 arg01, arg02));
5756
5757 /* If this was a conversion, and all we did was to move into
5758 inside the COND_EXPR, bring it back out. But leave it if
5759 it is a conversion from integer to integer and the
5760 result precision is no wider than a word since such a
5761 conversion is cheap and may be optimized away by combine,
5762 while it couldn't if it were outside the COND_EXPR. Then return
5763 so we don't get into an infinite recursion loop taking the
5764 conversion out and then back in. */
5765
5766 if ((code == NOP_EXPR || code == CONVERT_EXPR
5767 || code == NON_LVALUE_EXPR)
5768 && TREE_CODE (tem) == COND_EXPR
5769 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
5770 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
5771 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
5772 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
5773 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
5774 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
5775 && ! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
5776 && (INTEGRAL_TYPE_P
5777 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
5778 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD))
5779 tem = build1 (code, type,
5780 build3 (COND_EXPR,
5781 TREE_TYPE (TREE_OPERAND
5782 (TREE_OPERAND (tem, 1), 0)),
5783 TREE_OPERAND (tem, 0),
5784 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
5785 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
5786 return tem;
5787 }
5788 else if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
5789 {
5790 if (TREE_CODE (type) == BOOLEAN_TYPE)
5791 {
5792 arg0 = copy_node (arg0);
5793 TREE_TYPE (arg0) = type;
5794 return arg0;
5795 }
5796 else if (TREE_CODE (type) != INTEGER_TYPE)
5797 return fold (build3 (COND_EXPR, type, arg0,
5798 fold (build1 (code, type,
5799 integer_one_node)),
5800 fold (build1 (code, type,
5801 integer_zero_node))));
5802 }
5803 }
5804 else if (TREE_CODE_CLASS (code) == '<'
5805 && TREE_CODE (arg0) == COMPOUND_EXPR)
5806 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5807 fold (build2 (code, type, TREE_OPERAND (arg0, 1), arg1)));
5808 else if (TREE_CODE_CLASS (code) == '<'
5809 && TREE_CODE (arg1) == COMPOUND_EXPR)
5810 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
5811 fold (build2 (code, type, arg0, TREE_OPERAND (arg1, 1))));
5812 else if (TREE_CODE_CLASS (code) == '2'
5813 || TREE_CODE_CLASS (code) == '<')
5814 {
5815 if (TREE_CODE (arg0) == COMPOUND_EXPR)
5816 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5817 fold (build2 (code, type, TREE_OPERAND (arg0, 1),
5818 arg1)));
5819 if (TREE_CODE (arg1) == COMPOUND_EXPR
5820 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
5821 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
5822 fold (build2 (code, type,
5823 arg0, TREE_OPERAND (arg1, 1))));
5824
5825 if (TREE_CODE (arg0) == COND_EXPR
5826 || TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
5827 {
5828 tem = fold_binary_op_with_conditional_arg (code, type, arg0, arg1,
5829 /*cond_first_p=*/1);
5830 if (tem != NULL_TREE)
5831 return tem;
5832 }
5833
5834 if (TREE_CODE (arg1) == COND_EXPR
5835 || TREE_CODE_CLASS (TREE_CODE (arg1)) == '<')
5836 {
5837 tem = fold_binary_op_with_conditional_arg (code, type, arg1, arg0,
5838 /*cond_first_p=*/0);
5839 if (tem != NULL_TREE)
5840 return tem;
5841 }
5842 }
5843
5844 switch (code)
5845 {
5846 case CONST_DECL:
5847 return fold (DECL_INITIAL (t));
5848
5849 case NOP_EXPR:
5850 case FLOAT_EXPR:
5851 case CONVERT_EXPR:
5852 case FIX_TRUNC_EXPR:
5853 case FIX_CEIL_EXPR:
5854 case FIX_FLOOR_EXPR:
5855 case FIX_ROUND_EXPR:
5856 if (TREE_TYPE (TREE_OPERAND (t, 0)) == type)
5857 return TREE_OPERAND (t, 0);
5858
5859 /* Handle cases of two conversions in a row. */
5860 if (TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
5861 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
5862 {
5863 tree inside_type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5864 tree inter_type = TREE_TYPE (TREE_OPERAND (t, 0));
5865 int inside_int = INTEGRAL_TYPE_P (inside_type);
5866 int inside_ptr = POINTER_TYPE_P (inside_type);
5867 int inside_float = FLOAT_TYPE_P (inside_type);
5868 unsigned int inside_prec = TYPE_PRECISION (inside_type);
5869 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
5870 int inter_int = INTEGRAL_TYPE_P (inter_type);
5871 int inter_ptr = POINTER_TYPE_P (inter_type);
5872 int inter_float = FLOAT_TYPE_P (inter_type);
5873 unsigned int inter_prec = TYPE_PRECISION (inter_type);
5874 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
5875 int final_int = INTEGRAL_TYPE_P (type);
5876 int final_ptr = POINTER_TYPE_P (type);
5877 int final_float = FLOAT_TYPE_P (type);
5878 unsigned int final_prec = TYPE_PRECISION (type);
5879 int final_unsignedp = TYPE_UNSIGNED (type);
5880
5881 /* In addition to the cases of two conversions in a row
5882 handled below, if we are converting something to its own
5883 type via an object of identical or wider precision, neither
5884 conversion is needed. */
5885 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
5886 && ((inter_int && final_int) || (inter_float && final_float))
5887 && inter_prec >= final_prec)
5888 return fold (build1 (code, type,
5889 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
5890
5891 /* Likewise, if the intermediate and final types are either both
5892 float or both integer, we don't need the middle conversion if
5893 it is wider than the final type and doesn't change the signedness
5894 (for integers). Avoid this if the final type is a pointer
5895 since then we sometimes need the inner conversion. Likewise if
5896 the outer has a precision not equal to the size of its mode. */
5897 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
5898 || (inter_float && inside_float))
5899 && inter_prec >= inside_prec
5900 && (inter_float || inter_unsignedp == inside_unsignedp)
5901 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
5902 && TYPE_MODE (type) == TYPE_MODE (inter_type))
5903 && ! final_ptr)
5904 return fold (build1 (code, type,
5905 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
5906
5907 /* If we have a sign-extension of a zero-extended value, we can
5908 replace that by a single zero-extension. */
5909 if (inside_int && inter_int && final_int
5910 && inside_prec < inter_prec && inter_prec < final_prec
5911 && inside_unsignedp && !inter_unsignedp)
5912 return fold (build1 (code, type,
5913 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
5914
5915 /* Two conversions in a row are not needed unless:
5916 - some conversion is floating-point (overstrict for now), or
5917 - the intermediate type is narrower than both initial and
5918 final, or
5919 - the intermediate type and innermost type differ in signedness,
5920 and the outermost type is wider than the intermediate, or
5921 - the initial type is a pointer type and the precisions of the
5922 intermediate and final types differ, or
5923 - the final type is a pointer type and the precisions of the
5924 initial and intermediate types differ. */
5925 if (! inside_float && ! inter_float && ! final_float
5926 && (inter_prec > inside_prec || inter_prec > final_prec)
5927 && ! (inside_int && inter_int
5928 && inter_unsignedp != inside_unsignedp
5929 && inter_prec < final_prec)
5930 && ((inter_unsignedp && inter_prec > inside_prec)
5931 == (final_unsignedp && final_prec > inter_prec))
5932 && ! (inside_ptr && inter_prec != final_prec)
5933 && ! (final_ptr && inside_prec != inter_prec)
5934 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
5935 && TYPE_MODE (type) == TYPE_MODE (inter_type))
5936 && ! final_ptr)
5937 return fold (build1 (code, type,
5938 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
5939 }
5940
5941 if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
5942 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
5943 /* Detect assigning a bitfield. */
5944 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == COMPONENT_REF
5945 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 1))))
5946 {
5947 /* Don't leave an assignment inside a conversion
5948 unless assigning a bitfield. */
5949 tree prev = TREE_OPERAND (t, 0);
5950 tem = copy_node (t);
5951 TREE_OPERAND (tem, 0) = TREE_OPERAND (prev, 1);
5952 /* First do the assignment, then return converted constant. */
5953 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), prev, fold (tem));
5954 TREE_NO_WARNING (tem) = 1;
5955 TREE_USED (tem) = 1;
5956 return tem;
5957 }
5958
5959 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
5960 constants (if x has signed type, the sign bit cannot be set
5961 in c). This folds extension into the BIT_AND_EXPR. */
5962 if (INTEGRAL_TYPE_P (type)
5963 && TREE_CODE (type) != BOOLEAN_TYPE
5964 && TREE_CODE (TREE_OPERAND (t, 0)) == BIT_AND_EXPR
5965 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST)
5966 {
5967 tree and = TREE_OPERAND (t, 0);
5968 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
5969 int change = 0;
5970
5971 if (TYPE_UNSIGNED (TREE_TYPE (and))
5972 || (TYPE_PRECISION (type)
5973 <= TYPE_PRECISION (TREE_TYPE (and))))
5974 change = 1;
5975 else if (TYPE_PRECISION (TREE_TYPE (and1))
5976 <= HOST_BITS_PER_WIDE_INT
5977 && host_integerp (and1, 1))
5978 {
5979 unsigned HOST_WIDE_INT cst;
5980
5981 cst = tree_low_cst (and1, 1);
5982 cst &= (HOST_WIDE_INT) -1
5983 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
5984 change = (cst == 0);
5985 #ifdef LOAD_EXTEND_OP
5986 if (change
5987 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
5988 == ZERO_EXTEND))
5989 {
5990 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
5991 and0 = fold_convert (uns, and0);
5992 and1 = fold_convert (uns, and1);
5993 }
5994 #endif
5995 }
5996 if (change)
5997 return fold (build2 (BIT_AND_EXPR, type,
5998 fold_convert (type, and0),
5999 fold_convert (type, and1)));
6000 }
6001
6002 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
6003 T2 being pointers to types of the same size. */
6004 if (POINTER_TYPE_P (TREE_TYPE (t))
6005 && TREE_CODE_CLASS (TREE_CODE (arg0)) == '2'
6006 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
6007 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6008 {
6009 tree arg00 = TREE_OPERAND (arg0, 0);
6010 tree t0 = TREE_TYPE (t);
6011 tree t1 = TREE_TYPE (arg00);
6012 tree tt0 = TREE_TYPE (t0);
6013 tree tt1 = TREE_TYPE (t1);
6014 tree s0 = TYPE_SIZE (tt0);
6015 tree s1 = TYPE_SIZE (tt1);
6016
6017 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
6018 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
6019 TREE_OPERAND (arg0, 1));
6020 }
6021
6022 tem = fold_convert_const (code, type, arg0);
6023 return tem ? tem : t;
6024
6025 case VIEW_CONVERT_EXPR:
6026 if (TREE_CODE (TREE_OPERAND (t, 0)) == VIEW_CONVERT_EXPR)
6027 return build1 (VIEW_CONVERT_EXPR, type,
6028 TREE_OPERAND (TREE_OPERAND (t, 0), 0));
6029 return t;
6030
6031 case COMPONENT_REF:
6032 if (TREE_CODE (arg0) == CONSTRUCTOR
6033 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
6034 {
6035 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
6036 if (m)
6037 return TREE_VALUE (m);
6038 }
6039 return t;
6040
6041 case RANGE_EXPR:
6042 if (TREE_CONSTANT (t) != wins)
6043 {
6044 tem = copy_node (t);
6045 TREE_CONSTANT (tem) = wins;
6046 TREE_INVARIANT (tem) = wins;
6047 return tem;
6048 }
6049 return t;
6050
6051 case NEGATE_EXPR:
6052 if (negate_expr_p (arg0))
6053 return fold_convert (type, negate_expr (arg0));
6054 return t;
6055
6056 case ABS_EXPR:
6057 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
6058 return fold_abs_const (arg0, type);
6059 else if (TREE_CODE (arg0) == NEGATE_EXPR)
6060 return fold (build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0)));
6061 /* Convert fabs((double)float) into (double)fabsf(float). */
6062 else if (TREE_CODE (arg0) == NOP_EXPR
6063 && TREE_CODE (type) == REAL_TYPE)
6064 {
6065 tree targ0 = strip_float_extensions (arg0);
6066 if (targ0 != arg0)
6067 return fold_convert (type, fold (build1 (ABS_EXPR,
6068 TREE_TYPE (targ0),
6069 targ0)));
6070 }
6071 else if (tree_expr_nonnegative_p (arg0))
6072 return arg0;
6073 return t;
6074
6075 case CONJ_EXPR:
6076 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
6077 return fold_convert (type, arg0);
6078 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
6079 return build2 (COMPLEX_EXPR, type,
6080 TREE_OPERAND (arg0, 0),
6081 negate_expr (TREE_OPERAND (arg0, 1)));
6082 else if (TREE_CODE (arg0) == COMPLEX_CST)
6083 return build_complex (type, TREE_REALPART (arg0),
6084 negate_expr (TREE_IMAGPART (arg0)));
6085 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
6086 return fold (build2 (TREE_CODE (arg0), type,
6087 fold (build1 (CONJ_EXPR, type,
6088 TREE_OPERAND (arg0, 0))),
6089 fold (build1 (CONJ_EXPR, type,
6090 TREE_OPERAND (arg0, 1)))));
6091 else if (TREE_CODE (arg0) == CONJ_EXPR)
6092 return TREE_OPERAND (arg0, 0);
6093 return t;
6094
6095 case BIT_NOT_EXPR:
6096 if (TREE_CODE (arg0) == INTEGER_CST)
6097 return fold_not_const (arg0, type);
6098 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
6099 return TREE_OPERAND (arg0, 0);
6100 return t;
6101
6102 case PLUS_EXPR:
6103 /* A + (-B) -> A - B */
6104 if (TREE_CODE (arg1) == NEGATE_EXPR)
6105 return fold (build2 (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
6106 /* (-A) + B -> B - A */
6107 if (TREE_CODE (arg0) == NEGATE_EXPR
6108 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
6109 return fold (build2 (MINUS_EXPR, type, arg1, TREE_OPERAND (arg0, 0)));
6110 if (! FLOAT_TYPE_P (type))
6111 {
6112 if (integer_zerop (arg1))
6113 return non_lvalue (fold_convert (type, arg0));
6114
6115 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
6116 with a constant, and the two constants have no bits in common,
6117 we should treat this as a BIT_IOR_EXPR since this may produce more
6118 simplifications. */
6119 if (TREE_CODE (arg0) == BIT_AND_EXPR
6120 && TREE_CODE (arg1) == BIT_AND_EXPR
6121 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6122 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
6123 && integer_zerop (const_binop (BIT_AND_EXPR,
6124 TREE_OPERAND (arg0, 1),
6125 TREE_OPERAND (arg1, 1), 0)))
6126 {
6127 code = BIT_IOR_EXPR;
6128 goto bit_ior;
6129 }
6130
6131 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
6132 (plus (plus (mult) (mult)) (foo)) so that we can
6133 take advantage of the factoring cases below. */
6134 if ((TREE_CODE (arg0) == PLUS_EXPR
6135 && TREE_CODE (arg1) == MULT_EXPR)
6136 || (TREE_CODE (arg1) == PLUS_EXPR
6137 && TREE_CODE (arg0) == MULT_EXPR))
6138 {
6139 tree parg0, parg1, parg, marg;
6140
6141 if (TREE_CODE (arg0) == PLUS_EXPR)
6142 parg = arg0, marg = arg1;
6143 else
6144 parg = arg1, marg = arg0;
6145 parg0 = TREE_OPERAND (parg, 0);
6146 parg1 = TREE_OPERAND (parg, 1);
6147 STRIP_NOPS (parg0);
6148 STRIP_NOPS (parg1);
6149
6150 if (TREE_CODE (parg0) == MULT_EXPR
6151 && TREE_CODE (parg1) != MULT_EXPR)
6152 return fold (build2 (PLUS_EXPR, type,
6153 fold (build2 (PLUS_EXPR, type,
6154 fold_convert (type, parg0),
6155 fold_convert (type, marg))),
6156 fold_convert (type, parg1)));
6157 if (TREE_CODE (parg0) != MULT_EXPR
6158 && TREE_CODE (parg1) == MULT_EXPR)
6159 return fold (build2 (PLUS_EXPR, type,
6160 fold (build2 (PLUS_EXPR, type,
6161 fold_convert (type, parg1),
6162 fold_convert (type, marg))),
6163 fold_convert (type, parg0)));
6164 }
6165
6166 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
6167 {
6168 tree arg00, arg01, arg10, arg11;
6169 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6170
6171 /* (A * C) + (B * C) -> (A+B) * C.
6172 We are most concerned about the case where C is a constant,
6173 but other combinations show up during loop reduction. Since
6174 it is not difficult, try all four possibilities. */
6175
6176 arg00 = TREE_OPERAND (arg0, 0);
6177 arg01 = TREE_OPERAND (arg0, 1);
6178 arg10 = TREE_OPERAND (arg1, 0);
6179 arg11 = TREE_OPERAND (arg1, 1);
6180 same = NULL_TREE;
6181
6182 if (operand_equal_p (arg01, arg11, 0))
6183 same = arg01, alt0 = arg00, alt1 = arg10;
6184 else if (operand_equal_p (arg00, arg10, 0))
6185 same = arg00, alt0 = arg01, alt1 = arg11;
6186 else if (operand_equal_p (arg00, arg11, 0))
6187 same = arg00, alt0 = arg01, alt1 = arg10;
6188 else if (operand_equal_p (arg01, arg10, 0))
6189 same = arg01, alt0 = arg00, alt1 = arg11;
6190
6191 /* No identical multiplicands; see if we can find a common
6192 power-of-two factor in non-power-of-two multiplies. This
6193 can help in multi-dimensional array access. */
6194 else if (TREE_CODE (arg01) == INTEGER_CST
6195 && TREE_CODE (arg11) == INTEGER_CST
6196 && TREE_INT_CST_HIGH (arg01) == 0
6197 && TREE_INT_CST_HIGH (arg11) == 0)
6198 {
6199 HOST_WIDE_INT int01, int11, tmp;
6200 int01 = TREE_INT_CST_LOW (arg01);
6201 int11 = TREE_INT_CST_LOW (arg11);
6202
6203 /* Move min of absolute values to int11. */
6204 if ((int01 >= 0 ? int01 : -int01)
6205 < (int11 >= 0 ? int11 : -int11))
6206 {
6207 tmp = int01, int01 = int11, int11 = tmp;
6208 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6209 alt0 = arg01, arg01 = arg11, arg11 = alt0;
6210 }
6211
6212 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
6213 {
6214 alt0 = fold (build2 (MULT_EXPR, type, arg00,
6215 build_int_2 (int01 / int11, 0)));
6216 alt1 = arg10;
6217 same = arg11;
6218 }
6219 }
6220
6221 if (same)
6222 return fold (build2 (MULT_EXPR, type,
6223 fold (build2 (PLUS_EXPR, type,
6224 alt0, alt1)),
6225 same));
6226 }
6227 }
6228 else
6229 {
6230 /* See if ARG1 is zero and X + ARG1 reduces to X. */
6231 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
6232 return non_lvalue (fold_convert (type, arg0));
6233
6234 /* Likewise if the operands are reversed. */
6235 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
6236 return non_lvalue (fold_convert (type, arg1));
6237
6238 /* Convert x+x into x*2.0. */
6239 if (operand_equal_p (arg0, arg1, 0)
6240 && SCALAR_FLOAT_TYPE_P (type))
6241 return fold (build2 (MULT_EXPR, type, arg0,
6242 build_real (type, dconst2)));
6243
6244 /* Convert x*c+x into x*(c+1). */
6245 if (flag_unsafe_math_optimizations
6246 && TREE_CODE (arg0) == MULT_EXPR
6247 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6248 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
6249 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
6250 {
6251 REAL_VALUE_TYPE c;
6252
6253 c = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
6254 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6255 return fold (build2 (MULT_EXPR, type, arg1,
6256 build_real (type, c)));
6257 }
6258
6259 /* Convert x+x*c into x*(c+1). */
6260 if (flag_unsafe_math_optimizations
6261 && TREE_CODE (arg1) == MULT_EXPR
6262 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
6263 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
6264 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
6265 {
6266 REAL_VALUE_TYPE c;
6267
6268 c = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
6269 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6270 return fold (build2 (MULT_EXPR, type, arg0,
6271 build_real (type, c)));
6272 }
6273
6274 /* Convert x*c1+x*c2 into x*(c1+c2). */
6275 if (flag_unsafe_math_optimizations
6276 && TREE_CODE (arg0) == MULT_EXPR
6277 && TREE_CODE (arg1) == MULT_EXPR
6278 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6279 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
6280 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
6281 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
6282 && operand_equal_p (TREE_OPERAND (arg0, 0),
6283 TREE_OPERAND (arg1, 0), 0))
6284 {
6285 REAL_VALUE_TYPE c1, c2;
6286
6287 c1 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
6288 c2 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
6289 real_arithmetic (&c1, PLUS_EXPR, &c1, &c2);
6290 return fold (build2 (MULT_EXPR, type,
6291 TREE_OPERAND (arg0, 0),
6292 build_real (type, c1)));
6293 }
6294 /* Convert a + (b*c + d*e) into (a + b*c) + d*e */
6295 if (flag_unsafe_math_optimizations
6296 && TREE_CODE (arg1) == PLUS_EXPR
6297 && TREE_CODE (arg0) != MULT_EXPR)
6298 {
6299 tree tree10 = TREE_OPERAND (arg1, 0);
6300 tree tree11 = TREE_OPERAND (arg1, 1);
6301 if (TREE_CODE (tree11) == MULT_EXPR
6302 && TREE_CODE (tree10) == MULT_EXPR)
6303 {
6304 tree tree0;
6305 tree0 = fold (build2 (PLUS_EXPR, type, arg0, tree10));
6306 return fold (build2 (PLUS_EXPR, type, tree0, tree11));
6307 }
6308 }
6309 /* Convert (b*c + d*e) + a into b*c + (d*e +a) */
6310 if (flag_unsafe_math_optimizations
6311 && TREE_CODE (arg0) == PLUS_EXPR
6312 && TREE_CODE (arg1) != MULT_EXPR)
6313 {
6314 tree tree00 = TREE_OPERAND (arg0, 0);
6315 tree tree01 = TREE_OPERAND (arg0, 1);
6316 if (TREE_CODE (tree01) == MULT_EXPR
6317 && TREE_CODE (tree00) == MULT_EXPR)
6318 {
6319 tree tree0;
6320 tree0 = fold (build2 (PLUS_EXPR, type, tree01, arg1));
6321 return fold (build2 (PLUS_EXPR, type, tree00, tree0));
6322 }
6323 }
6324 }
6325
6326 bit_rotate:
6327 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
6328 is a rotate of A by C1 bits. */
6329 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
6330 is a rotate of A by B bits. */
6331 {
6332 enum tree_code code0, code1;
6333 code0 = TREE_CODE (arg0);
6334 code1 = TREE_CODE (arg1);
6335 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
6336 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
6337 && operand_equal_p (TREE_OPERAND (arg0, 0),
6338 TREE_OPERAND (arg1, 0), 0)
6339 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6340 {
6341 tree tree01, tree11;
6342 enum tree_code code01, code11;
6343
6344 tree01 = TREE_OPERAND (arg0, 1);
6345 tree11 = TREE_OPERAND (arg1, 1);
6346 STRIP_NOPS (tree01);
6347 STRIP_NOPS (tree11);
6348 code01 = TREE_CODE (tree01);
6349 code11 = TREE_CODE (tree11);
6350 if (code01 == INTEGER_CST
6351 && code11 == INTEGER_CST
6352 && TREE_INT_CST_HIGH (tree01) == 0
6353 && TREE_INT_CST_HIGH (tree11) == 0
6354 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
6355 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
6356 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
6357 code0 == LSHIFT_EXPR ? tree01 : tree11);
6358 else if (code11 == MINUS_EXPR)
6359 {
6360 tree tree110, tree111;
6361 tree110 = TREE_OPERAND (tree11, 0);
6362 tree111 = TREE_OPERAND (tree11, 1);
6363 STRIP_NOPS (tree110);
6364 STRIP_NOPS (tree111);
6365 if (TREE_CODE (tree110) == INTEGER_CST
6366 && 0 == compare_tree_int (tree110,
6367 TYPE_PRECISION
6368 (TREE_TYPE (TREE_OPERAND
6369 (arg0, 0))))
6370 && operand_equal_p (tree01, tree111, 0))
6371 return build2 ((code0 == LSHIFT_EXPR
6372 ? LROTATE_EXPR
6373 : RROTATE_EXPR),
6374 type, TREE_OPERAND (arg0, 0), tree01);
6375 }
6376 else if (code01 == MINUS_EXPR)
6377 {
6378 tree tree010, tree011;
6379 tree010 = TREE_OPERAND (tree01, 0);
6380 tree011 = TREE_OPERAND (tree01, 1);
6381 STRIP_NOPS (tree010);
6382 STRIP_NOPS (tree011);
6383 if (TREE_CODE (tree010) == INTEGER_CST
6384 && 0 == compare_tree_int (tree010,
6385 TYPE_PRECISION
6386 (TREE_TYPE (TREE_OPERAND
6387 (arg0, 0))))
6388 && operand_equal_p (tree11, tree011, 0))
6389 return build2 ((code0 != LSHIFT_EXPR
6390 ? LROTATE_EXPR
6391 : RROTATE_EXPR),
6392 type, TREE_OPERAND (arg0, 0), tree11);
6393 }
6394 }
6395 }
6396
6397 associate:
6398 /* In most languages, can't associate operations on floats through
6399 parentheses. Rather than remember where the parentheses were, we
6400 don't associate floats at all, unless the user has specified
6401 -funsafe-math-optimizations. */
6402
6403 if (! wins
6404 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
6405 {
6406 tree var0, con0, lit0, minus_lit0;
6407 tree var1, con1, lit1, minus_lit1;
6408
6409 /* Split both trees into variables, constants, and literals. Then
6410 associate each group together, the constants with literals,
6411 then the result with variables. This increases the chances of
6412 literals being recombined later and of generating relocatable
6413 expressions for the sum of a constant and literal. */
6414 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
6415 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
6416 code == MINUS_EXPR);
6417
6418 /* Only do something if we found more than two objects. Otherwise,
6419 nothing has changed and we risk infinite recursion. */
6420 if (2 < ((var0 != 0) + (var1 != 0)
6421 + (con0 != 0) + (con1 != 0)
6422 + (lit0 != 0) + (lit1 != 0)
6423 + (minus_lit0 != 0) + (minus_lit1 != 0)))
6424 {
6425 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
6426 if (code == MINUS_EXPR)
6427 code = PLUS_EXPR;
6428
6429 var0 = associate_trees (var0, var1, code, type);
6430 con0 = associate_trees (con0, con1, code, type);
6431 lit0 = associate_trees (lit0, lit1, code, type);
6432 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
6433
6434 /* Preserve the MINUS_EXPR if the negative part of the literal is
6435 greater than the positive part. Otherwise, the multiplicative
6436 folding code (i.e extract_muldiv) may be fooled in case
6437 unsigned constants are subtracted, like in the following
6438 example: ((X*2 + 4) - 8U)/2. */
6439 if (minus_lit0 && lit0)
6440 {
6441 if (TREE_CODE (lit0) == INTEGER_CST
6442 && TREE_CODE (minus_lit0) == INTEGER_CST
6443 && tree_int_cst_lt (lit0, minus_lit0))
6444 {
6445 minus_lit0 = associate_trees (minus_lit0, lit0,
6446 MINUS_EXPR, type);
6447 lit0 = 0;
6448 }
6449 else
6450 {
6451 lit0 = associate_trees (lit0, minus_lit0,
6452 MINUS_EXPR, type);
6453 minus_lit0 = 0;
6454 }
6455 }
6456 if (minus_lit0)
6457 {
6458 if (con0 == 0)
6459 return fold_convert (type,
6460 associate_trees (var0, minus_lit0,
6461 MINUS_EXPR, type));
6462 else
6463 {
6464 con0 = associate_trees (con0, minus_lit0,
6465 MINUS_EXPR, type);
6466 return fold_convert (type,
6467 associate_trees (var0, con0,
6468 PLUS_EXPR, type));
6469 }
6470 }
6471
6472 con0 = associate_trees (con0, lit0, code, type);
6473 return fold_convert (type, associate_trees (var0, con0,
6474 code, type));
6475 }
6476 }
6477
6478 binary:
6479 if (wins)
6480 t1 = const_binop (code, arg0, arg1, 0);
6481 if (t1 != NULL_TREE)
6482 {
6483 /* The return value should always have
6484 the same type as the original expression. */
6485 if (TREE_TYPE (t1) != type)
6486 t1 = fold_convert (type, t1);
6487
6488 return t1;
6489 }
6490 return t;
6491
6492 case MINUS_EXPR:
6493 /* A - (-B) -> A + B */
6494 if (TREE_CODE (arg1) == NEGATE_EXPR)
6495 return fold (build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
6496 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
6497 if (TREE_CODE (arg0) == NEGATE_EXPR
6498 && (FLOAT_TYPE_P (type)
6499 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
6500 && negate_expr_p (arg1)
6501 && reorder_operands_p (arg0, arg1))
6502 return fold (build2 (MINUS_EXPR, type, negate_expr (arg1),
6503 TREE_OPERAND (arg0, 0)));
6504
6505 if (! FLOAT_TYPE_P (type))
6506 {
6507 if (! wins && integer_zerop (arg0))
6508 return negate_expr (fold_convert (type, arg1));
6509 if (integer_zerop (arg1))
6510 return non_lvalue (fold_convert (type, arg0));
6511
6512 /* Fold A - (A & B) into ~B & A. */
6513 if (!TREE_SIDE_EFFECTS (arg0)
6514 && TREE_CODE (arg1) == BIT_AND_EXPR)
6515 {
6516 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
6517 return fold (build2 (BIT_AND_EXPR, type,
6518 fold (build1 (BIT_NOT_EXPR, type,
6519 TREE_OPERAND (arg1, 0))),
6520 arg0));
6521 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
6522 return fold (build2 (BIT_AND_EXPR, type,
6523 fold (build1 (BIT_NOT_EXPR, type,
6524 TREE_OPERAND (arg1, 1))),
6525 arg0));
6526 }
6527
6528 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
6529 any power of 2 minus 1. */
6530 if (TREE_CODE (arg0) == BIT_AND_EXPR
6531 && TREE_CODE (arg1) == BIT_AND_EXPR
6532 && operand_equal_p (TREE_OPERAND (arg0, 0),
6533 TREE_OPERAND (arg1, 0), 0))
6534 {
6535 tree mask0 = TREE_OPERAND (arg0, 1);
6536 tree mask1 = TREE_OPERAND (arg1, 1);
6537 tree tem = fold (build1 (BIT_NOT_EXPR, type, mask0));
6538
6539 if (operand_equal_p (tem, mask1, 0))
6540 {
6541 tem = fold (build2 (BIT_XOR_EXPR, type,
6542 TREE_OPERAND (arg0, 0), mask1));
6543 return fold (build2 (MINUS_EXPR, type, tem, mask1));
6544 }
6545 }
6546 }
6547
6548 /* See if ARG1 is zero and X - ARG1 reduces to X. */
6549 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
6550 return non_lvalue (fold_convert (type, arg0));
6551
6552 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
6553 ARG0 is zero and X + ARG0 reduces to X, since that would mean
6554 (-ARG1 + ARG0) reduces to -ARG1. */
6555 else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
6556 return negate_expr (fold_convert (type, arg1));
6557
6558 /* Fold &x - &x. This can happen from &x.foo - &x.
6559 This is unsafe for certain floats even in non-IEEE formats.
6560 In IEEE, it is unsafe because it does wrong for NaNs.
6561 Also note that operand_equal_p is always false if an operand
6562 is volatile. */
6563
6564 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
6565 && operand_equal_p (arg0, arg1, 0))
6566 return fold_convert (type, integer_zero_node);
6567
6568 /* A - B -> A + (-B) if B is easily negatable. */
6569 if (!wins && negate_expr_p (arg1)
6570 && (FLOAT_TYPE_P (type)
6571 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
6572 return fold (build2 (PLUS_EXPR, type, arg0, negate_expr (arg1)));
6573
6574 if (TREE_CODE (arg0) == MULT_EXPR
6575 && TREE_CODE (arg1) == MULT_EXPR
6576 && (INTEGRAL_TYPE_P (type) || flag_unsafe_math_optimizations))
6577 {
6578 /* (A * C) - (B * C) -> (A-B) * C. */
6579 if (operand_equal_p (TREE_OPERAND (arg0, 1),
6580 TREE_OPERAND (arg1, 1), 0))
6581 return fold (build2 (MULT_EXPR, type,
6582 fold (build2 (MINUS_EXPR, type,
6583 TREE_OPERAND (arg0, 0),
6584 TREE_OPERAND (arg1, 0))),
6585 TREE_OPERAND (arg0, 1)));
6586 /* (A * C1) - (A * C2) -> A * (C1-C2). */
6587 if (operand_equal_p (TREE_OPERAND (arg0, 0),
6588 TREE_OPERAND (arg1, 0), 0))
6589 return fold (build2 (MULT_EXPR, type,
6590 TREE_OPERAND (arg0, 0),
6591 fold (build2 (MINUS_EXPR, type,
6592 TREE_OPERAND (arg0, 1),
6593 TREE_OPERAND (arg1, 1)))));
6594 }
6595
6596 goto associate;
6597
6598 case MULT_EXPR:
6599 /* (-A) * (-B) -> A * B */
6600 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
6601 return fold (build2 (MULT_EXPR, type,
6602 TREE_OPERAND (arg0, 0),
6603 negate_expr (arg1)));
6604 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
6605 return fold (build2 (MULT_EXPR, type,
6606 negate_expr (arg0),
6607 TREE_OPERAND (arg1, 0)));
6608
6609 if (! FLOAT_TYPE_P (type))
6610 {
6611 if (integer_zerop (arg1))
6612 return omit_one_operand (type, arg1, arg0);
6613 if (integer_onep (arg1))
6614 return non_lvalue (fold_convert (type, arg0));
6615
6616 /* (a * (1 << b)) is (a << b) */
6617 if (TREE_CODE (arg1) == LSHIFT_EXPR
6618 && integer_onep (TREE_OPERAND (arg1, 0)))
6619 return fold (build2 (LSHIFT_EXPR, type, arg0,
6620 TREE_OPERAND (arg1, 1)));
6621 if (TREE_CODE (arg0) == LSHIFT_EXPR
6622 && integer_onep (TREE_OPERAND (arg0, 0)))
6623 return fold (build2 (LSHIFT_EXPR, type, arg1,
6624 TREE_OPERAND (arg0, 1)));
6625
6626 if (TREE_CODE (arg1) == INTEGER_CST
6627 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0),
6628 fold_convert (type, arg1),
6629 code, NULL_TREE)))
6630 return fold_convert (type, tem);
6631
6632 }
6633 else
6634 {
6635 /* Maybe fold x * 0 to 0. The expressions aren't the same
6636 when x is NaN, since x * 0 is also NaN. Nor are they the
6637 same in modes with signed zeros, since multiplying a
6638 negative value by 0 gives -0, not +0. */
6639 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
6640 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
6641 && real_zerop (arg1))
6642 return omit_one_operand (type, arg1, arg0);
6643 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
6644 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6645 && real_onep (arg1))
6646 return non_lvalue (fold_convert (type, arg0));
6647
6648 /* Transform x * -1.0 into -x. */
6649 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6650 && real_minus_onep (arg1))
6651 return fold_convert (type, negate_expr (arg0));
6652
6653 /* Convert (C1/X)*C2 into (C1*C2)/X. */
6654 if (flag_unsafe_math_optimizations
6655 && TREE_CODE (arg0) == RDIV_EXPR
6656 && TREE_CODE (arg1) == REAL_CST
6657 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
6658 {
6659 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
6660 arg1, 0);
6661 if (tem)
6662 return fold (build2 (RDIV_EXPR, type, tem,
6663 TREE_OPERAND (arg0, 1)));
6664 }
6665
6666 if (flag_unsafe_math_optimizations)
6667 {
6668 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
6669 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
6670
6671 /* Optimizations of root(...)*root(...). */
6672 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
6673 {
6674 tree rootfn, arg, arglist;
6675 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6676 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6677
6678 /* Optimize sqrt(x)*sqrt(x) as x. */
6679 if (BUILTIN_SQRT_P (fcode0)
6680 && operand_equal_p (arg00, arg10, 0)
6681 && ! HONOR_SNANS (TYPE_MODE (type)))
6682 return arg00;
6683
6684 /* Optimize root(x)*root(y) as root(x*y). */
6685 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6686 arg = fold (build2 (MULT_EXPR, type, arg00, arg10));
6687 arglist = build_tree_list (NULL_TREE, arg);
6688 return build_function_call_expr (rootfn, arglist);
6689 }
6690
6691 /* Optimize expN(x)*expN(y) as expN(x+y). */
6692 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
6693 {
6694 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6695 tree arg = build2 (PLUS_EXPR, type,
6696 TREE_VALUE (TREE_OPERAND (arg0, 1)),
6697 TREE_VALUE (TREE_OPERAND (arg1, 1)));
6698 tree arglist = build_tree_list (NULL_TREE, fold (arg));
6699 return build_function_call_expr (expfn, arglist);
6700 }
6701
6702 /* Optimizations of pow(...)*pow(...). */
6703 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
6704 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
6705 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
6706 {
6707 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6708 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
6709 1)));
6710 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6711 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
6712 1)));
6713
6714 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
6715 if (operand_equal_p (arg01, arg11, 0))
6716 {
6717 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6718 tree arg = build2 (MULT_EXPR, type, arg00, arg10);
6719 tree arglist = tree_cons (NULL_TREE, fold (arg),
6720 build_tree_list (NULL_TREE,
6721 arg01));
6722 return build_function_call_expr (powfn, arglist);
6723 }
6724
6725 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
6726 if (operand_equal_p (arg00, arg10, 0))
6727 {
6728 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6729 tree arg = fold (build2 (PLUS_EXPR, type, arg01, arg11));
6730 tree arglist = tree_cons (NULL_TREE, arg00,
6731 build_tree_list (NULL_TREE,
6732 arg));
6733 return build_function_call_expr (powfn, arglist);
6734 }
6735 }
6736
6737 /* Optimize tan(x)*cos(x) as sin(x). */
6738 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
6739 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
6740 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
6741 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
6742 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
6743 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
6744 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
6745 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
6746 {
6747 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
6748
6749 if (sinfn != NULL_TREE)
6750 return build_function_call_expr (sinfn,
6751 TREE_OPERAND (arg0, 1));
6752 }
6753
6754 /* Optimize x*pow(x,c) as pow(x,c+1). */
6755 if (fcode1 == BUILT_IN_POW
6756 || fcode1 == BUILT_IN_POWF
6757 || fcode1 == BUILT_IN_POWL)
6758 {
6759 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6760 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
6761 1)));
6762 if (TREE_CODE (arg11) == REAL_CST
6763 && ! TREE_CONSTANT_OVERFLOW (arg11)
6764 && operand_equal_p (arg0, arg10, 0))
6765 {
6766 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6767 REAL_VALUE_TYPE c;
6768 tree arg, arglist;
6769
6770 c = TREE_REAL_CST (arg11);
6771 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6772 arg = build_real (type, c);
6773 arglist = build_tree_list (NULL_TREE, arg);
6774 arglist = tree_cons (NULL_TREE, arg0, arglist);
6775 return build_function_call_expr (powfn, arglist);
6776 }
6777 }
6778
6779 /* Optimize pow(x,c)*x as pow(x,c+1). */
6780 if (fcode0 == BUILT_IN_POW
6781 || fcode0 == BUILT_IN_POWF
6782 || fcode0 == BUILT_IN_POWL)
6783 {
6784 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6785 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
6786 1)));
6787 if (TREE_CODE (arg01) == REAL_CST
6788 && ! TREE_CONSTANT_OVERFLOW (arg01)
6789 && operand_equal_p (arg1, arg00, 0))
6790 {
6791 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6792 REAL_VALUE_TYPE c;
6793 tree arg, arglist;
6794
6795 c = TREE_REAL_CST (arg01);
6796 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6797 arg = build_real (type, c);
6798 arglist = build_tree_list (NULL_TREE, arg);
6799 arglist = tree_cons (NULL_TREE, arg1, arglist);
6800 return build_function_call_expr (powfn, arglist);
6801 }
6802 }
6803
6804 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
6805 if (! optimize_size
6806 && operand_equal_p (arg0, arg1, 0))
6807 {
6808 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
6809
6810 if (powfn)
6811 {
6812 tree arg = build_real (type, dconst2);
6813 tree arglist = build_tree_list (NULL_TREE, arg);
6814 arglist = tree_cons (NULL_TREE, arg0, arglist);
6815 return build_function_call_expr (powfn, arglist);
6816 }
6817 }
6818 }
6819 }
6820 goto associate;
6821
6822 case BIT_IOR_EXPR:
6823 bit_ior:
6824 if (integer_all_onesp (arg1))
6825 return omit_one_operand (type, arg1, arg0);
6826 if (integer_zerop (arg1))
6827 return non_lvalue (fold_convert (type, arg0));
6828 if (operand_equal_p (arg0, arg1, 0))
6829 return non_lvalue (fold_convert (type, arg0));
6830 t1 = distribute_bit_expr (code, type, arg0, arg1);
6831 if (t1 != NULL_TREE)
6832 return t1;
6833
6834 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
6835
6836 This results in more efficient code for machines without a NAND
6837 instruction. Combine will canonicalize to the first form
6838 which will allow use of NAND instructions provided by the
6839 backend if they exist. */
6840 if (TREE_CODE (arg0) == BIT_NOT_EXPR
6841 && TREE_CODE (arg1) == BIT_NOT_EXPR)
6842 {
6843 return fold (build1 (BIT_NOT_EXPR, type,
6844 build2 (BIT_AND_EXPR, type,
6845 TREE_OPERAND (arg0, 0),
6846 TREE_OPERAND (arg1, 0))));
6847 }
6848
6849 /* See if this can be simplified into a rotate first. If that
6850 is unsuccessful continue in the association code. */
6851 goto bit_rotate;
6852
6853 case BIT_XOR_EXPR:
6854 if (integer_zerop (arg1))
6855 return non_lvalue (fold_convert (type, arg0));
6856 if (integer_all_onesp (arg1))
6857 return fold (build1 (BIT_NOT_EXPR, type, arg0));
6858 if (operand_equal_p (arg0, arg1, 0))
6859 return omit_one_operand (type, integer_zero_node, arg0);
6860
6861 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
6862 with a constant, and the two constants have no bits in common,
6863 we should treat this as a BIT_IOR_EXPR since this may produce more
6864 simplifications. */
6865 if (TREE_CODE (arg0) == BIT_AND_EXPR
6866 && TREE_CODE (arg1) == BIT_AND_EXPR
6867 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6868 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
6869 && integer_zerop (const_binop (BIT_AND_EXPR,
6870 TREE_OPERAND (arg0, 1),
6871 TREE_OPERAND (arg1, 1), 0)))
6872 {
6873 code = BIT_IOR_EXPR;
6874 goto bit_ior;
6875 }
6876
6877 /* See if this can be simplified into a rotate first. If that
6878 is unsuccessful continue in the association code. */
6879 goto bit_rotate;
6880
6881 case BIT_AND_EXPR:
6882 if (integer_all_onesp (arg1))
6883 return non_lvalue (fold_convert (type, arg0));
6884 if (integer_zerop (arg1))
6885 return omit_one_operand (type, arg1, arg0);
6886 if (operand_equal_p (arg0, arg1, 0))
6887 return non_lvalue (fold_convert (type, arg0));
6888 t1 = distribute_bit_expr (code, type, arg0, arg1);
6889 if (t1 != NULL_TREE)
6890 return t1;
6891 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
6892 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
6893 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6894 {
6895 unsigned int prec
6896 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
6897
6898 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
6899 && (~TREE_INT_CST_LOW (arg1)
6900 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
6901 return fold_convert (type, TREE_OPERAND (arg0, 0));
6902 }
6903
6904 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
6905
6906 This results in more efficient code for machines without a NOR
6907 instruction. Combine will canonicalize to the first form
6908 which will allow use of NOR instructions provided by the
6909 backend if they exist. */
6910 if (TREE_CODE (arg0) == BIT_NOT_EXPR
6911 && TREE_CODE (arg1) == BIT_NOT_EXPR)
6912 {
6913 return fold (build1 (BIT_NOT_EXPR, type,
6914 build2 (BIT_IOR_EXPR, type,
6915 TREE_OPERAND (arg0, 0),
6916 TREE_OPERAND (arg1, 0))));
6917 }
6918
6919 goto associate;
6920
6921 case RDIV_EXPR:
6922 /* Don't touch a floating-point divide by zero unless the mode
6923 of the constant can represent infinity. */
6924 if (TREE_CODE (arg1) == REAL_CST
6925 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
6926 && real_zerop (arg1))
6927 return t;
6928
6929 /* (-A) / (-B) -> A / B */
6930 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
6931 return fold (build2 (RDIV_EXPR, type,
6932 TREE_OPERAND (arg0, 0),
6933 negate_expr (arg1)));
6934 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
6935 return fold (build2 (RDIV_EXPR, type,
6936 negate_expr (arg0),
6937 TREE_OPERAND (arg1, 0)));
6938
6939 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
6940 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6941 && real_onep (arg1))
6942 return non_lvalue (fold_convert (type, arg0));
6943
6944 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
6945 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6946 && real_minus_onep (arg1))
6947 return non_lvalue (fold_convert (type, negate_expr (arg0)));
6948
6949 /* If ARG1 is a constant, we can convert this to a multiply by the
6950 reciprocal. This does not have the same rounding properties,
6951 so only do this if -funsafe-math-optimizations. We can actually
6952 always safely do it if ARG1 is a power of two, but it's hard to
6953 tell if it is or not in a portable manner. */
6954 if (TREE_CODE (arg1) == REAL_CST)
6955 {
6956 if (flag_unsafe_math_optimizations
6957 && 0 != (tem = const_binop (code, build_real (type, dconst1),
6958 arg1, 0)))
6959 return fold (build2 (MULT_EXPR, type, arg0, tem));
6960 /* Find the reciprocal if optimizing and the result is exact. */
6961 if (optimize)
6962 {
6963 REAL_VALUE_TYPE r;
6964 r = TREE_REAL_CST (arg1);
6965 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
6966 {
6967 tem = build_real (type, r);
6968 return fold (build2 (MULT_EXPR, type, arg0, tem));
6969 }
6970 }
6971 }
6972 /* Convert A/B/C to A/(B*C). */
6973 if (flag_unsafe_math_optimizations
6974 && TREE_CODE (arg0) == RDIV_EXPR)
6975 return fold (build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
6976 fold (build2 (MULT_EXPR, type,
6977 TREE_OPERAND (arg0, 1), arg1))));
6978
6979 /* Convert A/(B/C) to (A/B)*C. */
6980 if (flag_unsafe_math_optimizations
6981 && TREE_CODE (arg1) == RDIV_EXPR)
6982 return fold (build2 (MULT_EXPR, type,
6983 fold (build2 (RDIV_EXPR, type, arg0,
6984 TREE_OPERAND (arg1, 0))),
6985 TREE_OPERAND (arg1, 1)));
6986
6987 /* Convert C1/(X*C2) into (C1/C2)/X. */
6988 if (flag_unsafe_math_optimizations
6989 && TREE_CODE (arg1) == MULT_EXPR
6990 && TREE_CODE (arg0) == REAL_CST
6991 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
6992 {
6993 tree tem = const_binop (RDIV_EXPR, arg0,
6994 TREE_OPERAND (arg1, 1), 0);
6995 if (tem)
6996 return fold (build2 (RDIV_EXPR, type, tem,
6997 TREE_OPERAND (arg1, 0)));
6998 }
6999
7000 if (flag_unsafe_math_optimizations)
7001 {
7002 enum built_in_function fcode = builtin_mathfn_code (arg1);
7003 /* Optimize x/expN(y) into x*expN(-y). */
7004 if (BUILTIN_EXPONENT_P (fcode))
7005 {
7006 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
7007 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
7008 tree arglist = build_tree_list (NULL_TREE,
7009 fold_convert (type, arg));
7010 arg1 = build_function_call_expr (expfn, arglist);
7011 return fold (build2 (MULT_EXPR, type, arg0, arg1));
7012 }
7013
7014 /* Optimize x/pow(y,z) into x*pow(y,-z). */
7015 if (fcode == BUILT_IN_POW
7016 || fcode == BUILT_IN_POWF
7017 || fcode == BUILT_IN_POWL)
7018 {
7019 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
7020 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7021 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
7022 tree neg11 = fold_convert (type, negate_expr (arg11));
7023 tree arglist = tree_cons(NULL_TREE, arg10,
7024 build_tree_list (NULL_TREE, neg11));
7025 arg1 = build_function_call_expr (powfn, arglist);
7026 return fold (build2 (MULT_EXPR, type, arg0, arg1));
7027 }
7028 }
7029
7030 if (flag_unsafe_math_optimizations)
7031 {
7032 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
7033 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
7034
7035 /* Optimize sin(x)/cos(x) as tan(x). */
7036 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
7037 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
7038 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
7039 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7040 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7041 {
7042 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
7043
7044 if (tanfn != NULL_TREE)
7045 return build_function_call_expr (tanfn,
7046 TREE_OPERAND (arg0, 1));
7047 }
7048
7049 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
7050 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
7051 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
7052 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
7053 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7054 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7055 {
7056 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
7057
7058 if (tanfn != NULL_TREE)
7059 {
7060 tree tmp = TREE_OPERAND (arg0, 1);
7061 tmp = build_function_call_expr (tanfn, tmp);
7062 return fold (build2 (RDIV_EXPR, type,
7063 build_real (type, dconst1), tmp));
7064 }
7065 }
7066
7067 /* Optimize pow(x,c)/x as pow(x,c-1). */
7068 if (fcode0 == BUILT_IN_POW
7069 || fcode0 == BUILT_IN_POWF
7070 || fcode0 == BUILT_IN_POWL)
7071 {
7072 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7073 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
7074 if (TREE_CODE (arg01) == REAL_CST
7075 && ! TREE_CONSTANT_OVERFLOW (arg01)
7076 && operand_equal_p (arg1, arg00, 0))
7077 {
7078 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7079 REAL_VALUE_TYPE c;
7080 tree arg, arglist;
7081
7082 c = TREE_REAL_CST (arg01);
7083 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
7084 arg = build_real (type, c);
7085 arglist = build_tree_list (NULL_TREE, arg);
7086 arglist = tree_cons (NULL_TREE, arg1, arglist);
7087 return build_function_call_expr (powfn, arglist);
7088 }
7089 }
7090 }
7091 goto binary;
7092
7093 case TRUNC_DIV_EXPR:
7094 case ROUND_DIV_EXPR:
7095 case FLOOR_DIV_EXPR:
7096 case CEIL_DIV_EXPR:
7097 case EXACT_DIV_EXPR:
7098 if (integer_onep (arg1))
7099 return non_lvalue (fold_convert (type, arg0));
7100 if (integer_zerop (arg1))
7101 return t;
7102 /* X / -1 is -X. */
7103 if (!TYPE_UNSIGNED (type)
7104 && TREE_CODE (arg1) == INTEGER_CST
7105 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
7106 && TREE_INT_CST_HIGH (arg1) == -1)
7107 return fold_convert (type, negate_expr (arg0));
7108
7109 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
7110 operation, EXACT_DIV_EXPR.
7111
7112 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
7113 At one time others generated faster code, it's not clear if they do
7114 after the last round to changes to the DIV code in expmed.c. */
7115 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
7116 && multiple_of_p (type, arg0, arg1))
7117 return fold (build2 (EXACT_DIV_EXPR, type, arg0, arg1));
7118
7119 if (TREE_CODE (arg1) == INTEGER_CST
7120 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
7121 code, NULL_TREE)))
7122 return fold_convert (type, tem);
7123
7124 goto binary;
7125
7126 case CEIL_MOD_EXPR:
7127 case FLOOR_MOD_EXPR:
7128 case ROUND_MOD_EXPR:
7129 case TRUNC_MOD_EXPR:
7130 if (integer_onep (arg1))
7131 return omit_one_operand (type, integer_zero_node, arg0);
7132 if (integer_zerop (arg1))
7133 return t;
7134 /* X % -1 is zero. */
7135 if (!TYPE_UNSIGNED (type)
7136 && TREE_CODE (arg1) == INTEGER_CST
7137 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
7138 && TREE_INT_CST_HIGH (arg1) == -1)
7139 return omit_one_operand (type, integer_zero_node, arg0);
7140
7141 if (TREE_CODE (arg1) == INTEGER_CST
7142 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
7143 code, NULL_TREE)))
7144 return fold_convert (type, tem);
7145
7146 goto binary;
7147
7148 case LROTATE_EXPR:
7149 case RROTATE_EXPR:
7150 if (integer_all_onesp (arg0))
7151 return omit_one_operand (type, arg0, arg1);
7152 goto shift;
7153
7154 case RSHIFT_EXPR:
7155 /* Optimize -1 >> x for arithmetic right shifts. */
7156 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
7157 return omit_one_operand (type, arg0, arg1);
7158 /* ... fall through ... */
7159
7160 case LSHIFT_EXPR:
7161 shift:
7162 if (integer_zerop (arg1))
7163 return non_lvalue (fold_convert (type, arg0));
7164 if (integer_zerop (arg0))
7165 return omit_one_operand (type, arg0, arg1);
7166
7167 /* Since negative shift count is not well-defined,
7168 don't try to compute it in the compiler. */
7169 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
7170 return t;
7171 /* Rewrite an LROTATE_EXPR by a constant into an
7172 RROTATE_EXPR by a new constant. */
7173 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
7174 {
7175 tree tem = build_int_2 (GET_MODE_BITSIZE (TYPE_MODE (type)), 0);
7176 tem = fold_convert (TREE_TYPE (arg1), tem);
7177 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
7178 return fold (build2 (RROTATE_EXPR, type, arg0, tem));
7179 }
7180
7181 /* If we have a rotate of a bit operation with the rotate count and
7182 the second operand of the bit operation both constant,
7183 permute the two operations. */
7184 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
7185 && (TREE_CODE (arg0) == BIT_AND_EXPR
7186 || TREE_CODE (arg0) == BIT_IOR_EXPR
7187 || TREE_CODE (arg0) == BIT_XOR_EXPR)
7188 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7189 return fold (build2 (TREE_CODE (arg0), type,
7190 fold (build2 (code, type,
7191 TREE_OPERAND (arg0, 0), arg1)),
7192 fold (build2 (code, type,
7193 TREE_OPERAND (arg0, 1), arg1))));
7194
7195 /* Two consecutive rotates adding up to the width of the mode can
7196 be ignored. */
7197 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
7198 && TREE_CODE (arg0) == RROTATE_EXPR
7199 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7200 && TREE_INT_CST_HIGH (arg1) == 0
7201 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
7202 && ((TREE_INT_CST_LOW (arg1)
7203 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
7204 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
7205 return TREE_OPERAND (arg0, 0);
7206
7207 goto binary;
7208
7209 case MIN_EXPR:
7210 if (operand_equal_p (arg0, arg1, 0))
7211 return omit_one_operand (type, arg0, arg1);
7212 if (INTEGRAL_TYPE_P (type)
7213 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
7214 return omit_one_operand (type, arg1, arg0);
7215 goto associate;
7216
7217 case MAX_EXPR:
7218 if (operand_equal_p (arg0, arg1, 0))
7219 return omit_one_operand (type, arg0, arg1);
7220 if (INTEGRAL_TYPE_P (type)
7221 && TYPE_MAX_VALUE (type)
7222 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
7223 return omit_one_operand (type, arg1, arg0);
7224 goto associate;
7225
7226 case TRUTH_NOT_EXPR:
7227 /* The argument to invert_truthvalue must have Boolean type. */
7228 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
7229 arg0 = fold_convert (boolean_type_node, arg0);
7230
7231 /* Note that the operand of this must be an int
7232 and its values must be 0 or 1.
7233 ("true" is a fixed value perhaps depending on the language,
7234 but we don't handle values other than 1 correctly yet.) */
7235 tem = invert_truthvalue (arg0);
7236 /* Avoid infinite recursion. */
7237 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
7238 {
7239 tem = fold_single_bit_test (code, arg0, arg1, type);
7240 if (tem)
7241 return tem;
7242 return t;
7243 }
7244 return fold_convert (type, tem);
7245
7246 case TRUTH_ANDIF_EXPR:
7247 /* Note that the operands of this must be ints
7248 and their values must be 0 or 1.
7249 ("true" is a fixed value perhaps depending on the language.) */
7250 /* If first arg is constant zero, return it. */
7251 if (integer_zerop (arg0))
7252 return fold_convert (type, arg0);
7253 case TRUTH_AND_EXPR:
7254 /* If either arg is constant true, drop it. */
7255 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7256 return non_lvalue (fold_convert (type, arg1));
7257 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
7258 /* Preserve sequence points. */
7259 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
7260 return non_lvalue (fold_convert (type, arg0));
7261 /* If second arg is constant zero, result is zero, but first arg
7262 must be evaluated. */
7263 if (integer_zerop (arg1))
7264 return omit_one_operand (type, arg1, arg0);
7265 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
7266 case will be handled here. */
7267 if (integer_zerop (arg0))
7268 return omit_one_operand (type, arg0, arg1);
7269
7270 truth_andor:
7271 /* We only do these simplifications if we are optimizing. */
7272 if (!optimize)
7273 return t;
7274
7275 /* Check for things like (A || B) && (A || C). We can convert this
7276 to A || (B && C). Note that either operator can be any of the four
7277 truth and/or operations and the transformation will still be
7278 valid. Also note that we only care about order for the
7279 ANDIF and ORIF operators. If B contains side effects, this
7280 might change the truth-value of A. */
7281 if (TREE_CODE (arg0) == TREE_CODE (arg1)
7282 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
7283 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
7284 || TREE_CODE (arg0) == TRUTH_AND_EXPR
7285 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
7286 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
7287 {
7288 tree a00 = TREE_OPERAND (arg0, 0);
7289 tree a01 = TREE_OPERAND (arg0, 1);
7290 tree a10 = TREE_OPERAND (arg1, 0);
7291 tree a11 = TREE_OPERAND (arg1, 1);
7292 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
7293 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
7294 && (code == TRUTH_AND_EXPR
7295 || code == TRUTH_OR_EXPR));
7296
7297 if (operand_equal_p (a00, a10, 0))
7298 return fold (build2 (TREE_CODE (arg0), type, a00,
7299 fold (build2 (code, type, a01, a11))));
7300 else if (commutative && operand_equal_p (a00, a11, 0))
7301 return fold (build2 (TREE_CODE (arg0), type, a00,
7302 fold (build2 (code, type, a01, a10))));
7303 else if (commutative && operand_equal_p (a01, a10, 0))
7304 return fold (build2 (TREE_CODE (arg0), type, a01,
7305 fold (build2 (code, type, a00, a11))));
7306
7307 /* This case if tricky because we must either have commutative
7308 operators or else A10 must not have side-effects. */
7309
7310 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
7311 && operand_equal_p (a01, a11, 0))
7312 return fold (build2 (TREE_CODE (arg0), type,
7313 fold (build2 (code, type, a00, a10)),
7314 a01));
7315 }
7316
7317 /* See if we can build a range comparison. */
7318 if (0 != (tem = fold_range_test (t)))
7319 return tem;
7320
7321 /* Check for the possibility of merging component references. If our
7322 lhs is another similar operation, try to merge its rhs with our
7323 rhs. Then try to merge our lhs and rhs. */
7324 if (TREE_CODE (arg0) == code
7325 && 0 != (tem = fold_truthop (code, type,
7326 TREE_OPERAND (arg0, 1), arg1)))
7327 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
7328
7329 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
7330 return tem;
7331
7332 return t;
7333
7334 case TRUTH_ORIF_EXPR:
7335 /* Note that the operands of this must be ints
7336 and their values must be 0 or true.
7337 ("true" is a fixed value perhaps depending on the language.) */
7338 /* If first arg is constant true, return it. */
7339 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7340 return fold_convert (type, arg0);
7341 case TRUTH_OR_EXPR:
7342 /* If either arg is constant zero, drop it. */
7343 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
7344 return non_lvalue (fold_convert (type, arg1));
7345 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
7346 /* Preserve sequence points. */
7347 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
7348 return non_lvalue (fold_convert (type, arg0));
7349 /* If second arg is constant true, result is true, but we must
7350 evaluate first arg. */
7351 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
7352 return omit_one_operand (type, arg1, arg0);
7353 /* Likewise for first arg, but note this only occurs here for
7354 TRUTH_OR_EXPR. */
7355 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7356 return omit_one_operand (type, arg0, arg1);
7357 goto truth_andor;
7358
7359 case TRUTH_XOR_EXPR:
7360 /* If either arg is constant zero, drop it. */
7361 if (integer_zerop (arg0))
7362 return non_lvalue (fold_convert (type, arg1));
7363 if (integer_zerop (arg1))
7364 return non_lvalue (fold_convert (type, arg0));
7365 /* If either arg is constant true, this is a logical inversion. */
7366 if (integer_onep (arg0))
7367 return non_lvalue (fold_convert (type, invert_truthvalue (arg1)));
7368 if (integer_onep (arg1))
7369 return non_lvalue (fold_convert (type, invert_truthvalue (arg0)));
7370 /* Identical arguments cancel to zero. */
7371 if (operand_equal_p (arg0, arg1, 0))
7372 return omit_one_operand (type, integer_zero_node, arg0);
7373 return t;
7374
7375 case EQ_EXPR:
7376 case NE_EXPR:
7377 case LT_EXPR:
7378 case GT_EXPR:
7379 case LE_EXPR:
7380 case GE_EXPR:
7381 /* If one arg is a real or integer constant, put it last. */
7382 if (tree_swap_operands_p (arg0, arg1, true))
7383 return fold (build2 (swap_tree_comparison (code), type, arg1, arg0));
7384
7385 /* If this is an equality comparison of the address of a non-weak
7386 object against zero, then we know the result. */
7387 if ((code == EQ_EXPR || code == NE_EXPR)
7388 && TREE_CODE (arg0) == ADDR_EXPR
7389 && DECL_P (TREE_OPERAND (arg0, 0))
7390 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
7391 && integer_zerop (arg1))
7392 return constant_boolean_node (code != EQ_EXPR, type);
7393
7394 /* If this is an equality comparison of the address of two non-weak,
7395 unaliased symbols neither of which are extern (since we do not
7396 have access to attributes for externs), then we know the result. */
7397 if ((code == EQ_EXPR || code == NE_EXPR)
7398 && TREE_CODE (arg0) == ADDR_EXPR
7399 && DECL_P (TREE_OPERAND (arg0, 0))
7400 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
7401 && ! lookup_attribute ("alias",
7402 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
7403 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
7404 && TREE_CODE (arg1) == ADDR_EXPR
7405 && DECL_P (TREE_OPERAND (arg1, 0))
7406 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
7407 && ! lookup_attribute ("alias",
7408 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
7409 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
7410 return constant_boolean_node (operand_equal_p (arg0, arg1, 0)
7411 ? code == EQ_EXPR : code != EQ_EXPR,
7412 type);
7413
7414 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
7415 {
7416 tree targ0 = strip_float_extensions (arg0);
7417 tree targ1 = strip_float_extensions (arg1);
7418 tree newtype = TREE_TYPE (targ0);
7419
7420 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
7421 newtype = TREE_TYPE (targ1);
7422
7423 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
7424 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
7425 return fold (build2 (code, type, fold_convert (newtype, targ0),
7426 fold_convert (newtype, targ1)));
7427
7428 /* (-a) CMP (-b) -> b CMP a */
7429 if (TREE_CODE (arg0) == NEGATE_EXPR
7430 && TREE_CODE (arg1) == NEGATE_EXPR)
7431 return fold (build2 (code, type, TREE_OPERAND (arg1, 0),
7432 TREE_OPERAND (arg0, 0)));
7433
7434 if (TREE_CODE (arg1) == REAL_CST)
7435 {
7436 REAL_VALUE_TYPE cst;
7437 cst = TREE_REAL_CST (arg1);
7438
7439 /* (-a) CMP CST -> a swap(CMP) (-CST) */
7440 if (TREE_CODE (arg0) == NEGATE_EXPR)
7441 return
7442 fold (build2 (swap_tree_comparison (code), type,
7443 TREE_OPERAND (arg0, 0),
7444 build_real (TREE_TYPE (arg1),
7445 REAL_VALUE_NEGATE (cst))));
7446
7447 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
7448 /* a CMP (-0) -> a CMP 0 */
7449 if (REAL_VALUE_MINUS_ZERO (cst))
7450 return fold (build2 (code, type, arg0,
7451 build_real (TREE_TYPE (arg1), dconst0)));
7452
7453 /* x != NaN is always true, other ops are always false. */
7454 if (REAL_VALUE_ISNAN (cst)
7455 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
7456 {
7457 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
7458 return omit_one_operand (type, tem, arg0);
7459 }
7460
7461 /* Fold comparisons against infinity. */
7462 if (REAL_VALUE_ISINF (cst))
7463 {
7464 tem = fold_inf_compare (code, type, arg0, arg1);
7465 if (tem != NULL_TREE)
7466 return tem;
7467 }
7468 }
7469
7470 /* If this is a comparison of a real constant with a PLUS_EXPR
7471 or a MINUS_EXPR of a real constant, we can convert it into a
7472 comparison with a revised real constant as long as no overflow
7473 occurs when unsafe_math_optimizations are enabled. */
7474 if (flag_unsafe_math_optimizations
7475 && TREE_CODE (arg1) == REAL_CST
7476 && (TREE_CODE (arg0) == PLUS_EXPR
7477 || TREE_CODE (arg0) == MINUS_EXPR)
7478 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7479 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
7480 ? MINUS_EXPR : PLUS_EXPR,
7481 arg1, TREE_OPERAND (arg0, 1), 0))
7482 && ! TREE_CONSTANT_OVERFLOW (tem))
7483 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
7484
7485 /* Likewise, we can simplify a comparison of a real constant with
7486 a MINUS_EXPR whose first operand is also a real constant, i.e.
7487 (c1 - x) < c2 becomes x > c1-c2. */
7488 if (flag_unsafe_math_optimizations
7489 && TREE_CODE (arg1) == REAL_CST
7490 && TREE_CODE (arg0) == MINUS_EXPR
7491 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
7492 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
7493 arg1, 0))
7494 && ! TREE_CONSTANT_OVERFLOW (tem))
7495 return fold (build2 (swap_tree_comparison (code), type,
7496 TREE_OPERAND (arg0, 1), tem));
7497
7498 /* Fold comparisons against built-in math functions. */
7499 if (TREE_CODE (arg1) == REAL_CST
7500 && flag_unsafe_math_optimizations
7501 && ! flag_errno_math)
7502 {
7503 enum built_in_function fcode = builtin_mathfn_code (arg0);
7504
7505 if (fcode != END_BUILTINS)
7506 {
7507 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
7508 if (tem != NULL_TREE)
7509 return tem;
7510 }
7511 }
7512 }
7513
7514 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
7515 if (TREE_CONSTANT (arg1)
7516 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
7517 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
7518 /* This optimization is invalid for ordered comparisons
7519 if CONST+INCR overflows or if foo+incr might overflow.
7520 This optimization is invalid for floating point due to rounding.
7521 For pointer types we assume overflow doesn't happen. */
7522 && (POINTER_TYPE_P (TREE_TYPE (arg0))
7523 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
7524 && (code == EQ_EXPR || code == NE_EXPR))))
7525 {
7526 tree varop, newconst;
7527
7528 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
7529 {
7530 newconst = fold (build2 (PLUS_EXPR, TREE_TYPE (arg0),
7531 arg1, TREE_OPERAND (arg0, 1)));
7532 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
7533 TREE_OPERAND (arg0, 0),
7534 TREE_OPERAND (arg0, 1));
7535 }
7536 else
7537 {
7538 newconst = fold (build2 (MINUS_EXPR, TREE_TYPE (arg0),
7539 arg1, TREE_OPERAND (arg0, 1)));
7540 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
7541 TREE_OPERAND (arg0, 0),
7542 TREE_OPERAND (arg0, 1));
7543 }
7544
7545
7546 /* If VAROP is a reference to a bitfield, we must mask
7547 the constant by the width of the field. */
7548 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
7549 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1)))
7550 {
7551 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
7552 int size = TREE_INT_CST_LOW (DECL_SIZE (fielddecl));
7553 tree folded_compare, shift;
7554
7555 /* First check whether the comparison would come out
7556 always the same. If we don't do that we would
7557 change the meaning with the masking. */
7558 folded_compare = fold (build2 (code, type,
7559 TREE_OPERAND (varop, 0),
7560 arg1));
7561 if (integer_zerop (folded_compare)
7562 || integer_onep (folded_compare))
7563 return omit_one_operand (type, folded_compare, varop);
7564
7565 shift = build_int_2 (TYPE_PRECISION (TREE_TYPE (varop)) - size,
7566 0);
7567 newconst = fold (build2 (LSHIFT_EXPR, TREE_TYPE (varop),
7568 newconst, shift));
7569 newconst = fold (build2 (RSHIFT_EXPR, TREE_TYPE (varop),
7570 newconst, shift));
7571 }
7572
7573 return fold (build2 (code, type, varop, newconst));
7574 }
7575
7576 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
7577 This transformation affects the cases which are handled in later
7578 optimizations involving comparisons with non-negative constants. */
7579 if (TREE_CODE (arg1) == INTEGER_CST
7580 && TREE_CODE (arg0) != INTEGER_CST
7581 && tree_int_cst_sgn (arg1) > 0)
7582 {
7583 switch (code)
7584 {
7585 case GE_EXPR:
7586 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7587 return fold (build2 (GT_EXPR, type, arg0, arg1));
7588
7589 case LT_EXPR:
7590 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7591 return fold (build2 (LE_EXPR, type, arg0, arg1));
7592
7593 default:
7594 break;
7595 }
7596 }
7597
7598 /* Comparisons with the highest or lowest possible integer of
7599 the specified size will have known values.
7600
7601 This is quite similar to fold_relational_hi_lo; however, my
7602 attempts to share the code have been nothing but trouble.
7603 I give up for now. */
7604 {
7605 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
7606
7607 if (TREE_CODE (arg1) == INTEGER_CST
7608 && ! TREE_CONSTANT_OVERFLOW (arg1)
7609 && width <= HOST_BITS_PER_WIDE_INT
7610 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
7611 || POINTER_TYPE_P (TREE_TYPE (arg1))))
7612 {
7613 unsigned HOST_WIDE_INT signed_max;
7614 unsigned HOST_WIDE_INT max, min;
7615
7616 signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
7617
7618 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
7619 {
7620 max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
7621 min = 0;
7622 }
7623 else
7624 {
7625 max = signed_max;
7626 min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
7627 }
7628
7629 if (TREE_INT_CST_HIGH (arg1) == 0
7630 && TREE_INT_CST_LOW (arg1) == max)
7631 switch (code)
7632 {
7633 case GT_EXPR:
7634 return omit_one_operand (type, integer_zero_node, arg0);
7635
7636 case GE_EXPR:
7637 return fold (build2 (EQ_EXPR, type, arg0, arg1));
7638
7639 case LE_EXPR:
7640 return omit_one_operand (type, integer_one_node, arg0);
7641
7642 case LT_EXPR:
7643 return fold (build2 (NE_EXPR, type, arg0, arg1));
7644
7645 /* The GE_EXPR and LT_EXPR cases above are not normally
7646 reached because of previous transformations. */
7647
7648 default:
7649 break;
7650 }
7651 else if (TREE_INT_CST_HIGH (arg1) == 0
7652 && TREE_INT_CST_LOW (arg1) == max - 1)
7653 switch (code)
7654 {
7655 case GT_EXPR:
7656 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
7657 return fold (build2 (EQ_EXPR, type, arg0, arg1));
7658 case LE_EXPR:
7659 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
7660 return fold (build2 (NE_EXPR, type, arg0, arg1));
7661 default:
7662 break;
7663 }
7664 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
7665 && TREE_INT_CST_LOW (arg1) == min)
7666 switch (code)
7667 {
7668 case LT_EXPR:
7669 return omit_one_operand (type, integer_zero_node, arg0);
7670
7671 case LE_EXPR:
7672 return fold (build2 (EQ_EXPR, type, arg0, arg1));
7673
7674 case GE_EXPR:
7675 return omit_one_operand (type, integer_one_node, arg0);
7676
7677 case GT_EXPR:
7678 return fold (build2 (NE_EXPR, type, arg0, arg1));
7679
7680 default:
7681 break;
7682 }
7683 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
7684 && TREE_INT_CST_LOW (arg1) == min + 1)
7685 switch (code)
7686 {
7687 case GE_EXPR:
7688 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7689 return fold (build2 (NE_EXPR, type, arg0, arg1));
7690 case LT_EXPR:
7691 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7692 return fold (build2 (EQ_EXPR, type, arg0, arg1));
7693 default:
7694 break;
7695 }
7696
7697 else if (!in_gimple_form
7698 && TREE_INT_CST_HIGH (arg1) == 0
7699 && TREE_INT_CST_LOW (arg1) == signed_max
7700 && TYPE_UNSIGNED (TREE_TYPE (arg1))
7701 /* signed_type does not work on pointer types. */
7702 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
7703 {
7704 /* The following case also applies to X < signed_max+1
7705 and X >= signed_max+1 because previous transformations. */
7706 if (code == LE_EXPR || code == GT_EXPR)
7707 {
7708 tree st0, st1;
7709 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
7710 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
7711 return fold
7712 (build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
7713 type, fold_convert (st0, arg0),
7714 fold_convert (st1, integer_zero_node)));
7715 }
7716 }
7717 }
7718 }
7719
7720 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
7721 a MINUS_EXPR of a constant, we can convert it into a comparison with
7722 a revised constant as long as no overflow occurs. */
7723 if ((code == EQ_EXPR || code == NE_EXPR)
7724 && TREE_CODE (arg1) == INTEGER_CST
7725 && (TREE_CODE (arg0) == PLUS_EXPR
7726 || TREE_CODE (arg0) == MINUS_EXPR)
7727 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7728 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
7729 ? MINUS_EXPR : PLUS_EXPR,
7730 arg1, TREE_OPERAND (arg0, 1), 0))
7731 && ! TREE_CONSTANT_OVERFLOW (tem))
7732 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
7733
7734 /* Similarly for a NEGATE_EXPR. */
7735 else if ((code == EQ_EXPR || code == NE_EXPR)
7736 && TREE_CODE (arg0) == NEGATE_EXPR
7737 && TREE_CODE (arg1) == INTEGER_CST
7738 && 0 != (tem = negate_expr (arg1))
7739 && TREE_CODE (tem) == INTEGER_CST
7740 && ! TREE_CONSTANT_OVERFLOW (tem))
7741 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
7742
7743 /* If we have X - Y == 0, we can convert that to X == Y and similarly
7744 for !=. Don't do this for ordered comparisons due to overflow. */
7745 else if ((code == NE_EXPR || code == EQ_EXPR)
7746 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
7747 return fold (build2 (code, type,
7748 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)));
7749
7750 /* If we are widening one operand of an integer comparison,
7751 see if the other operand is similarly being widened. Perhaps we
7752 can do the comparison in the narrower type. */
7753 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
7754 && TREE_CODE (arg0) == NOP_EXPR
7755 && (tem = get_unwidened (arg0, NULL_TREE)) != arg0
7756 && (code == EQ_EXPR || code == NE_EXPR
7757 || TYPE_UNSIGNED (TREE_TYPE (arg0))
7758 == TYPE_UNSIGNED (TREE_TYPE (tem)))
7759 && (t1 = get_unwidened (arg1, TREE_TYPE (tem))) != 0
7760 && (TREE_TYPE (t1) == TREE_TYPE (tem)
7761 || (TREE_CODE (t1) == INTEGER_CST
7762 && int_fits_type_p (t1, TREE_TYPE (tem)))))
7763 return fold (build2 (code, type, tem,
7764 fold_convert (TREE_TYPE (tem), t1)));
7765
7766 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
7767 constant, we can simplify it. */
7768 else if (TREE_CODE (arg1) == INTEGER_CST
7769 && (TREE_CODE (arg0) == MIN_EXPR
7770 || TREE_CODE (arg0) == MAX_EXPR)
7771 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7772 return optimize_minmax_comparison (t);
7773
7774 /* If we are comparing an ABS_EXPR with a constant, we can
7775 convert all the cases into explicit comparisons, but they may
7776 well not be faster than doing the ABS and one comparison.
7777 But ABS (X) <= C is a range comparison, which becomes a subtraction
7778 and a comparison, and is probably faster. */
7779 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
7780 && TREE_CODE (arg0) == ABS_EXPR
7781 && ! TREE_SIDE_EFFECTS (arg0)
7782 && (0 != (tem = negate_expr (arg1)))
7783 && TREE_CODE (tem) == INTEGER_CST
7784 && ! TREE_CONSTANT_OVERFLOW (tem))
7785 return fold (build2 (TRUTH_ANDIF_EXPR, type,
7786 build2 (GE_EXPR, type,
7787 TREE_OPERAND (arg0, 0), tem),
7788 build2 (LE_EXPR, type,
7789 TREE_OPERAND (arg0, 0), arg1)));
7790
7791 /* If this is an EQ or NE comparison with zero and ARG0 is
7792 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
7793 two operations, but the latter can be done in one less insn
7794 on machines that have only two-operand insns or on which a
7795 constant cannot be the first operand. */
7796 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
7797 && TREE_CODE (arg0) == BIT_AND_EXPR)
7798 {
7799 tree arg00 = TREE_OPERAND (arg0, 0);
7800 tree arg01 = TREE_OPERAND (arg0, 1);
7801 if (TREE_CODE (arg00) == LSHIFT_EXPR
7802 && integer_onep (TREE_OPERAND (arg00, 0)))
7803 return
7804 fold (build2 (code, type,
7805 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
7806 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
7807 arg01, TREE_OPERAND (arg00, 1)),
7808 fold_convert (TREE_TYPE (arg0),
7809 integer_one_node)),
7810 arg1));
7811 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
7812 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
7813 return
7814 fold (build2 (code, type,
7815 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
7816 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
7817 arg00, TREE_OPERAND (arg01, 1)),
7818 fold_convert (TREE_TYPE (arg0),
7819 integer_one_node)),
7820 arg1));
7821 }
7822
7823 /* If this is an NE or EQ comparison of zero against the result of a
7824 signed MOD operation whose second operand is a power of 2, make
7825 the MOD operation unsigned since it is simpler and equivalent. */
7826 if ((code == NE_EXPR || code == EQ_EXPR)
7827 && integer_zerop (arg1)
7828 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
7829 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
7830 || TREE_CODE (arg0) == CEIL_MOD_EXPR
7831 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
7832 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
7833 && integer_pow2p (TREE_OPERAND (arg0, 1)))
7834 {
7835 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
7836 tree newmod = build2 (TREE_CODE (arg0), newtype,
7837 fold_convert (newtype,
7838 TREE_OPERAND (arg0, 0)),
7839 fold_convert (newtype,
7840 TREE_OPERAND (arg0, 1)));
7841
7842 return build2 (code, type, newmod, fold_convert (newtype, arg1));
7843 }
7844
7845 /* If this is an NE comparison of zero with an AND of one, remove the
7846 comparison since the AND will give the correct value. */
7847 if (code == NE_EXPR && integer_zerop (arg1)
7848 && TREE_CODE (arg0) == BIT_AND_EXPR
7849 && integer_onep (TREE_OPERAND (arg0, 1)))
7850 return fold_convert (type, arg0);
7851
7852 /* If we have (A & C) == C where C is a power of 2, convert this into
7853 (A & C) != 0. Similarly for NE_EXPR. */
7854 if ((code == EQ_EXPR || code == NE_EXPR)
7855 && TREE_CODE (arg0) == BIT_AND_EXPR
7856 && integer_pow2p (TREE_OPERAND (arg0, 1))
7857 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
7858 return fold (build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
7859 arg0, integer_zero_node));
7860
7861 /* If we have (A & C) != 0 or (A & C) == 0 and C is a power of
7862 2, then fold the expression into shifts and logical operations. */
7863 tem = fold_single_bit_test (code, arg0, arg1, type);
7864 if (tem)
7865 return tem;
7866
7867 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
7868 Similarly for NE_EXPR. */
7869 if ((code == EQ_EXPR || code == NE_EXPR)
7870 && TREE_CODE (arg0) == BIT_AND_EXPR
7871 && TREE_CODE (arg1) == INTEGER_CST
7872 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7873 {
7874 tree dandnotc
7875 = fold (build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
7876 arg1, build1 (BIT_NOT_EXPR,
7877 TREE_TYPE (TREE_OPERAND (arg0, 1)),
7878 TREE_OPERAND (arg0, 1))));
7879 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
7880 if (integer_nonzerop (dandnotc))
7881 return omit_one_operand (type, rslt, arg0);
7882 }
7883
7884 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
7885 Similarly for NE_EXPR. */
7886 if ((code == EQ_EXPR || code == NE_EXPR)
7887 && TREE_CODE (arg0) == BIT_IOR_EXPR
7888 && TREE_CODE (arg1) == INTEGER_CST
7889 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7890 {
7891 tree candnotd
7892 = fold (build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
7893 TREE_OPERAND (arg0, 1),
7894 build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1)));
7895 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
7896 if (integer_nonzerop (candnotd))
7897 return omit_one_operand (type, rslt, arg0);
7898 }
7899
7900 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
7901 and similarly for >= into !=. */
7902 if ((code == LT_EXPR || code == GE_EXPR)
7903 && TYPE_UNSIGNED (TREE_TYPE (arg0))
7904 && TREE_CODE (arg1) == LSHIFT_EXPR
7905 && integer_onep (TREE_OPERAND (arg1, 0)))
7906 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
7907 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
7908 TREE_OPERAND (arg1, 1)),
7909 fold_convert (TREE_TYPE (arg0), integer_zero_node));
7910
7911 else if ((code == LT_EXPR || code == GE_EXPR)
7912 && TYPE_UNSIGNED (TREE_TYPE (arg0))
7913 && (TREE_CODE (arg1) == NOP_EXPR
7914 || TREE_CODE (arg1) == CONVERT_EXPR)
7915 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
7916 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
7917 return
7918 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
7919 fold_convert (TREE_TYPE (arg0),
7920 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
7921 TREE_OPERAND (TREE_OPERAND (arg1, 0),
7922 1))),
7923 fold_convert (TREE_TYPE (arg0), integer_zero_node));
7924
7925 /* Simplify comparison of something with itself. (For IEEE
7926 floating-point, we can only do some of these simplifications.) */
7927 if (operand_equal_p (arg0, arg1, 0))
7928 {
7929 switch (code)
7930 {
7931 case EQ_EXPR:
7932 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
7933 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7934 return constant_boolean_node (1, type);
7935 break;
7936
7937 case GE_EXPR:
7938 case LE_EXPR:
7939 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
7940 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7941 return constant_boolean_node (1, type);
7942 return fold (build2 (EQ_EXPR, type, arg0, arg1));
7943
7944 case NE_EXPR:
7945 /* For NE, we can only do this simplification if integer
7946 or we don't honor IEEE floating point NaNs. */
7947 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
7948 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7949 break;
7950 /* ... fall through ... */
7951 case GT_EXPR:
7952 case LT_EXPR:
7953 return constant_boolean_node (0, type);
7954 default:
7955 abort ();
7956 }
7957 }
7958
7959 /* If we are comparing an expression that just has comparisons
7960 of two integer values, arithmetic expressions of those comparisons,
7961 and constants, we can simplify it. There are only three cases
7962 to check: the two values can either be equal, the first can be
7963 greater, or the second can be greater. Fold the expression for
7964 those three values. Since each value must be 0 or 1, we have
7965 eight possibilities, each of which corresponds to the constant 0
7966 or 1 or one of the six possible comparisons.
7967
7968 This handles common cases like (a > b) == 0 but also handles
7969 expressions like ((x > y) - (y > x)) > 0, which supposedly
7970 occur in macroized code. */
7971
7972 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
7973 {
7974 tree cval1 = 0, cval2 = 0;
7975 int save_p = 0;
7976
7977 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
7978 /* Don't handle degenerate cases here; they should already
7979 have been handled anyway. */
7980 && cval1 != 0 && cval2 != 0
7981 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
7982 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
7983 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
7984 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
7985 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
7986 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
7987 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
7988 {
7989 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
7990 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
7991
7992 /* We can't just pass T to eval_subst in case cval1 or cval2
7993 was the same as ARG1. */
7994
7995 tree high_result
7996 = fold (build2 (code, type,
7997 eval_subst (arg0, cval1, maxval,
7998 cval2, minval),
7999 arg1));
8000 tree equal_result
8001 = fold (build2 (code, type,
8002 eval_subst (arg0, cval1, maxval,
8003 cval2, maxval),
8004 arg1));
8005 tree low_result
8006 = fold (build2 (code, type,
8007 eval_subst (arg0, cval1, minval,
8008 cval2, maxval),
8009 arg1));
8010
8011 /* All three of these results should be 0 or 1. Confirm they
8012 are. Then use those values to select the proper code
8013 to use. */
8014
8015 if ((integer_zerop (high_result)
8016 || integer_onep (high_result))
8017 && (integer_zerop (equal_result)
8018 || integer_onep (equal_result))
8019 && (integer_zerop (low_result)
8020 || integer_onep (low_result)))
8021 {
8022 /* Make a 3-bit mask with the high-order bit being the
8023 value for `>', the next for '=', and the low for '<'. */
8024 switch ((integer_onep (high_result) * 4)
8025 + (integer_onep (equal_result) * 2)
8026 + integer_onep (low_result))
8027 {
8028 case 0:
8029 /* Always false. */
8030 return omit_one_operand (type, integer_zero_node, arg0);
8031 case 1:
8032 code = LT_EXPR;
8033 break;
8034 case 2:
8035 code = EQ_EXPR;
8036 break;
8037 case 3:
8038 code = LE_EXPR;
8039 break;
8040 case 4:
8041 code = GT_EXPR;
8042 break;
8043 case 5:
8044 code = NE_EXPR;
8045 break;
8046 case 6:
8047 code = GE_EXPR;
8048 break;
8049 case 7:
8050 /* Always true. */
8051 return omit_one_operand (type, integer_one_node, arg0);
8052 }
8053
8054 tem = build2 (code, type, cval1, cval2);
8055 if (save_p)
8056 return save_expr (tem);
8057 else
8058 return fold (tem);
8059 }
8060 }
8061 }
8062
8063 /* If this is a comparison of a field, we may be able to simplify it. */
8064 if (((TREE_CODE (arg0) == COMPONENT_REF
8065 && lang_hooks.can_use_bit_fields_p ())
8066 || TREE_CODE (arg0) == BIT_FIELD_REF)
8067 && (code == EQ_EXPR || code == NE_EXPR)
8068 /* Handle the constant case even without -O
8069 to make sure the warnings are given. */
8070 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
8071 {
8072 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
8073 if (t1)
8074 return t1;
8075 }
8076
8077 /* If this is a comparison of complex values and either or both sides
8078 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
8079 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
8080 This may prevent needless evaluations. */
8081 if ((code == EQ_EXPR || code == NE_EXPR)
8082 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
8083 && (TREE_CODE (arg0) == COMPLEX_EXPR
8084 || TREE_CODE (arg1) == COMPLEX_EXPR
8085 || TREE_CODE (arg0) == COMPLEX_CST
8086 || TREE_CODE (arg1) == COMPLEX_CST))
8087 {
8088 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
8089 tree real0, imag0, real1, imag1;
8090
8091 arg0 = save_expr (arg0);
8092 arg1 = save_expr (arg1);
8093 real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
8094 imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
8095 real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
8096 imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
8097
8098 return fold (build2 ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
8099 : TRUTH_ORIF_EXPR),
8100 type,
8101 fold (build2 (code, type, real0, real1)),
8102 fold (build2 (code, type, imag0, imag1))));
8103 }
8104
8105 /* Optimize comparisons of strlen vs zero to a compare of the
8106 first character of the string vs zero. To wit,
8107 strlen(ptr) == 0 => *ptr == 0
8108 strlen(ptr) != 0 => *ptr != 0
8109 Other cases should reduce to one of these two (or a constant)
8110 due to the return value of strlen being unsigned. */
8111 if ((code == EQ_EXPR || code == NE_EXPR)
8112 && integer_zerop (arg1)
8113 && TREE_CODE (arg0) == CALL_EXPR)
8114 {
8115 tree fndecl = get_callee_fndecl (arg0);
8116 tree arglist;
8117
8118 if (fndecl
8119 && DECL_BUILT_IN (fndecl)
8120 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD
8121 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
8122 && (arglist = TREE_OPERAND (arg0, 1))
8123 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
8124 && ! TREE_CHAIN (arglist))
8125 return fold (build2 (code, type,
8126 build1 (INDIRECT_REF, char_type_node,
8127 TREE_VALUE(arglist)),
8128 integer_zero_node));
8129 }
8130
8131 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8132 into a single range test. */
8133 if (TREE_CODE (arg0) == TRUNC_DIV_EXPR
8134 && TREE_CODE (arg1) == INTEGER_CST
8135 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8136 && !integer_zerop (TREE_OPERAND (arg0, 1))
8137 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8138 && !TREE_OVERFLOW (arg1))
8139 {
8140 t1 = fold_div_compare (code, type, arg0, arg1);
8141 if (t1 != NULL_TREE)
8142 return t1;
8143 }
8144
8145 /* Both ARG0 and ARG1 are known to be constants at this point. */
8146 t1 = fold_relational_const (code, type, arg0, arg1);
8147 return (t1 == NULL_TREE ? t : t1);
8148
8149 case COND_EXPR:
8150 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
8151 so all simple results must be passed through pedantic_non_lvalue. */
8152 if (TREE_CODE (arg0) == INTEGER_CST)
8153 {
8154 tem = TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1));
8155 /* Only optimize constant conditions when the selected branch
8156 has the same type as the COND_EXPR. This avoids optimizing
8157 away "c ? x : throw", where the throw has a void type. */
8158 if (! VOID_TYPE_P (TREE_TYPE (tem))
8159 || VOID_TYPE_P (type))
8160 return pedantic_non_lvalue (tem);
8161 return t;
8162 }
8163 if (operand_equal_p (arg1, TREE_OPERAND (t, 2), 0))
8164 return pedantic_omit_one_operand (type, arg1, arg0);
8165
8166 /* If we have A op B ? A : C, we may be able to convert this to a
8167 simpler expression, depending on the operation and the values
8168 of B and C. Signed zeros prevent all of these transformations,
8169 for reasons given above each one. */
8170
8171 if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
8172 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
8173 arg1, TREE_OPERAND (arg0, 1))
8174 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
8175 {
8176 tree arg2 = TREE_OPERAND (t, 2);
8177 enum tree_code comp_code = TREE_CODE (arg0);
8178
8179 STRIP_NOPS (arg2);
8180
8181 /* If we have A op 0 ? A : -A, consider applying the following
8182 transformations:
8183
8184 A == 0? A : -A same as -A
8185 A != 0? A : -A same as A
8186 A >= 0? A : -A same as abs (A)
8187 A > 0? A : -A same as abs (A)
8188 A <= 0? A : -A same as -abs (A)
8189 A < 0? A : -A same as -abs (A)
8190
8191 None of these transformations work for modes with signed
8192 zeros. If A is +/-0, the first two transformations will
8193 change the sign of the result (from +0 to -0, or vice
8194 versa). The last four will fix the sign of the result,
8195 even though the original expressions could be positive or
8196 negative, depending on the sign of A.
8197
8198 Note that all these transformations are correct if A is
8199 NaN, since the two alternatives (A and -A) are also NaNs. */
8200 if ((FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 1)))
8201 ? real_zerop (TREE_OPERAND (arg0, 1))
8202 : integer_zerop (TREE_OPERAND (arg0, 1)))
8203 && TREE_CODE (arg2) == NEGATE_EXPR
8204 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
8205 switch (comp_code)
8206 {
8207 case EQ_EXPR:
8208 tem = fold_convert (TREE_TYPE (TREE_OPERAND (t, 1)), arg1);
8209 tem = fold_convert (type, negate_expr (tem));
8210 return pedantic_non_lvalue (tem);
8211 case NE_EXPR:
8212 return pedantic_non_lvalue (fold_convert (type, arg1));
8213 case GE_EXPR:
8214 case GT_EXPR:
8215 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
8216 arg1 = fold_convert (lang_hooks.types.signed_type
8217 (TREE_TYPE (arg1)), arg1);
8218 arg1 = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
8219 return pedantic_non_lvalue (fold_convert (type, arg1));
8220 case LE_EXPR:
8221 case LT_EXPR:
8222 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
8223 arg1 = fold_convert (lang_hooks.types.signed_type
8224 (TREE_TYPE (arg1)), arg1);
8225 arg1 = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
8226 arg1 = negate_expr (fold_convert (type, arg1));
8227 return pedantic_non_lvalue (arg1);
8228 default:
8229 abort ();
8230 }
8231
8232 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
8233 A == 0 ? A : 0 is always 0 unless A is -0. Note that
8234 both transformations are correct when A is NaN: A != 0
8235 is then true, and A == 0 is false. */
8236
8237 if (integer_zerop (TREE_OPERAND (arg0, 1)) && integer_zerop (arg2))
8238 {
8239 if (comp_code == NE_EXPR)
8240 return pedantic_non_lvalue (fold_convert (type, arg1));
8241 else if (comp_code == EQ_EXPR)
8242 return pedantic_non_lvalue (fold_convert (type, integer_zero_node));
8243 }
8244
8245 /* Try some transformations of A op B ? A : B.
8246
8247 A == B? A : B same as B
8248 A != B? A : B same as A
8249 A >= B? A : B same as max (A, B)
8250 A > B? A : B same as max (B, A)
8251 A <= B? A : B same as min (A, B)
8252 A < B? A : B same as min (B, A)
8253
8254 As above, these transformations don't work in the presence
8255 of signed zeros. For example, if A and B are zeros of
8256 opposite sign, the first two transformations will change
8257 the sign of the result. In the last four, the original
8258 expressions give different results for (A=+0, B=-0) and
8259 (A=-0, B=+0), but the transformed expressions do not.
8260
8261 The first two transformations are correct if either A or B
8262 is a NaN. In the first transformation, the condition will
8263 be false, and B will indeed be chosen. In the case of the
8264 second transformation, the condition A != B will be true,
8265 and A will be chosen.
8266
8267 The conversions to max() and min() are not correct if B is
8268 a number and A is not. The conditions in the original
8269 expressions will be false, so all four give B. The min()
8270 and max() versions would give a NaN instead. */
8271 if (operand_equal_for_comparison_p (TREE_OPERAND (arg0, 1),
8272 arg2, TREE_OPERAND (arg0, 0)))
8273 {
8274 tree comp_op0 = TREE_OPERAND (arg0, 0);
8275 tree comp_op1 = TREE_OPERAND (arg0, 1);
8276 tree comp_type = TREE_TYPE (comp_op0);
8277
8278 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
8279 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
8280 {
8281 comp_type = type;
8282 comp_op0 = arg1;
8283 comp_op1 = arg2;
8284 }
8285
8286 switch (comp_code)
8287 {
8288 case EQ_EXPR:
8289 return pedantic_non_lvalue (fold_convert (type, arg2));
8290 case NE_EXPR:
8291 return pedantic_non_lvalue (fold_convert (type, arg1));
8292 case LE_EXPR:
8293 case LT_EXPR:
8294 /* In C++ a ?: expression can be an lvalue, so put the
8295 operand which will be used if they are equal first
8296 so that we can convert this back to the
8297 corresponding COND_EXPR. */
8298 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
8299 return pedantic_non_lvalue (fold_convert
8300 (type, fold (build2 (MIN_EXPR, comp_type,
8301 (comp_code == LE_EXPR
8302 ? comp_op0 : comp_op1),
8303 (comp_code == LE_EXPR
8304 ? comp_op1 : comp_op0)))));
8305 break;
8306 case GE_EXPR:
8307 case GT_EXPR:
8308 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
8309 return pedantic_non_lvalue (fold_convert
8310 (type, fold (build2 (MAX_EXPR, comp_type,
8311 (comp_code == GE_EXPR
8312 ? comp_op0 : comp_op1),
8313 (comp_code == GE_EXPR
8314 ? comp_op1 : comp_op0)))));
8315 break;
8316 default:
8317 abort ();
8318 }
8319 }
8320
8321 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
8322 we might still be able to simplify this. For example,
8323 if C1 is one less or one more than C2, this might have started
8324 out as a MIN or MAX and been transformed by this function.
8325 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
8326
8327 if (INTEGRAL_TYPE_P (type)
8328 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8329 && TREE_CODE (arg2) == INTEGER_CST)
8330 switch (comp_code)
8331 {
8332 case EQ_EXPR:
8333 /* We can replace A with C1 in this case. */
8334 arg1 = fold_convert (type, TREE_OPERAND (arg0, 1));
8335 return fold (build3 (code, type, TREE_OPERAND (t, 0), arg1,
8336 TREE_OPERAND (t, 2)));
8337
8338 case LT_EXPR:
8339 /* If C1 is C2 + 1, this is min(A, C2). */
8340 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
8341 OEP_ONLY_CONST)
8342 && operand_equal_p (TREE_OPERAND (arg0, 1),
8343 const_binop (PLUS_EXPR, arg2,
8344 integer_one_node, 0),
8345 OEP_ONLY_CONST))
8346 return pedantic_non_lvalue
8347 (fold (build2 (MIN_EXPR, type, arg1, arg2)));
8348 break;
8349
8350 case LE_EXPR:
8351 /* If C1 is C2 - 1, this is min(A, C2). */
8352 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
8353 OEP_ONLY_CONST)
8354 && operand_equal_p (TREE_OPERAND (arg0, 1),
8355 const_binop (MINUS_EXPR, arg2,
8356 integer_one_node, 0),
8357 OEP_ONLY_CONST))
8358 return pedantic_non_lvalue
8359 (fold (build2 (MIN_EXPR, type, arg1, arg2)));
8360 break;
8361
8362 case GT_EXPR:
8363 /* If C1 is C2 - 1, this is max(A, C2). */
8364 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
8365 OEP_ONLY_CONST)
8366 && operand_equal_p (TREE_OPERAND (arg0, 1),
8367 const_binop (MINUS_EXPR, arg2,
8368 integer_one_node, 0),
8369 OEP_ONLY_CONST))
8370 return pedantic_non_lvalue
8371 (fold (build2 (MAX_EXPR, type, arg1, arg2)));
8372 break;
8373
8374 case GE_EXPR:
8375 /* If C1 is C2 + 1, this is max(A, C2). */
8376 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
8377 OEP_ONLY_CONST)
8378 && operand_equal_p (TREE_OPERAND (arg0, 1),
8379 const_binop (PLUS_EXPR, arg2,
8380 integer_one_node, 0),
8381 OEP_ONLY_CONST))
8382 return pedantic_non_lvalue
8383 (fold (build2 (MAX_EXPR, type, arg1, arg2)));
8384 break;
8385 case NE_EXPR:
8386 break;
8387 default:
8388 abort ();
8389 }
8390 }
8391
8392 /* If the second operand is simpler than the third, swap them
8393 since that produces better jump optimization results. */
8394 if (tree_swap_operands_p (TREE_OPERAND (t, 1),
8395 TREE_OPERAND (t, 2), false))
8396 {
8397 /* See if this can be inverted. If it can't, possibly because
8398 it was a floating-point inequality comparison, don't do
8399 anything. */
8400 tem = invert_truthvalue (arg0);
8401
8402 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8403 return fold (build3 (code, type, tem,
8404 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1)));
8405 }
8406
8407 /* Convert A ? 1 : 0 to simply A. */
8408 if (integer_onep (TREE_OPERAND (t, 1))
8409 && integer_zerop (TREE_OPERAND (t, 2))
8410 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
8411 call to fold will try to move the conversion inside
8412 a COND, which will recurse. In that case, the COND_EXPR
8413 is probably the best choice, so leave it alone. */
8414 && type == TREE_TYPE (arg0))
8415 return pedantic_non_lvalue (arg0);
8416
8417 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
8418 over COND_EXPR in cases such as floating point comparisons. */
8419 if (integer_zerop (TREE_OPERAND (t, 1))
8420 && integer_onep (TREE_OPERAND (t, 2))
8421 && truth_value_p (TREE_CODE (arg0)))
8422 return pedantic_non_lvalue (fold_convert (type,
8423 invert_truthvalue (arg0)));
8424
8425 /* Look for expressions of the form A & 2 ? 2 : 0. The result of this
8426 operation is simply A & 2. */
8427
8428 if (integer_zerop (TREE_OPERAND (t, 2))
8429 && TREE_CODE (arg0) == NE_EXPR
8430 && integer_zerop (TREE_OPERAND (arg0, 1))
8431 && integer_pow2p (arg1)
8432 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
8433 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
8434 arg1, OEP_ONLY_CONST))
8435 return pedantic_non_lvalue (fold_convert (type,
8436 TREE_OPERAND (arg0, 0)));
8437
8438 /* Convert A ? B : 0 into A && B if A and B are truth values. */
8439 if (integer_zerop (TREE_OPERAND (t, 2))
8440 && truth_value_p (TREE_CODE (arg0))
8441 && truth_value_p (TREE_CODE (arg1)))
8442 return pedantic_non_lvalue (fold (build2 (TRUTH_ANDIF_EXPR, type,
8443 arg0, arg1)));
8444
8445 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
8446 if (integer_onep (TREE_OPERAND (t, 2))
8447 && truth_value_p (TREE_CODE (arg0))
8448 && truth_value_p (TREE_CODE (arg1)))
8449 {
8450 /* Only perform transformation if ARG0 is easily inverted. */
8451 tem = invert_truthvalue (arg0);
8452 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8453 return pedantic_non_lvalue (fold (build2 (TRUTH_ORIF_EXPR, type,
8454 tem, arg1)));
8455 }
8456
8457 return t;
8458
8459 case COMPOUND_EXPR:
8460 /* When pedantic, a compound expression can be neither an lvalue
8461 nor an integer constant expression. */
8462 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
8463 return t;
8464 /* Don't let (0, 0) be null pointer constant. */
8465 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
8466 : fold_convert (type, arg1);
8467 return pedantic_non_lvalue (tem);
8468
8469 case COMPLEX_EXPR:
8470 if (wins)
8471 return build_complex (type, arg0, arg1);
8472 return t;
8473
8474 case REALPART_EXPR:
8475 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8476 return t;
8477 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
8478 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
8479 TREE_OPERAND (arg0, 1));
8480 else if (TREE_CODE (arg0) == COMPLEX_CST)
8481 return TREE_REALPART (arg0);
8482 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8483 return fold (build2 (TREE_CODE (arg0), type,
8484 fold (build1 (REALPART_EXPR, type,
8485 TREE_OPERAND (arg0, 0))),
8486 fold (build1 (REALPART_EXPR, type,
8487 TREE_OPERAND (arg0, 1)))));
8488 return t;
8489
8490 case IMAGPART_EXPR:
8491 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8492 return fold_convert (type, integer_zero_node);
8493 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
8494 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
8495 TREE_OPERAND (arg0, 0));
8496 else if (TREE_CODE (arg0) == COMPLEX_CST)
8497 return TREE_IMAGPART (arg0);
8498 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8499 return fold (build2 (TREE_CODE (arg0), type,
8500 fold (build1 (IMAGPART_EXPR, type,
8501 TREE_OPERAND (arg0, 0))),
8502 fold (build1 (IMAGPART_EXPR, type,
8503 TREE_OPERAND (arg0, 1)))));
8504 return t;
8505
8506 /* Pull arithmetic ops out of the CLEANUP_POINT_EXPR where
8507 appropriate. */
8508 case CLEANUP_POINT_EXPR:
8509 if (! has_cleanups (arg0))
8510 return TREE_OPERAND (t, 0);
8511
8512 {
8513 enum tree_code code0 = TREE_CODE (arg0);
8514 int kind0 = TREE_CODE_CLASS (code0);
8515 tree arg00 = TREE_OPERAND (arg0, 0);
8516 tree arg01;
8517
8518 if (kind0 == '1' || code0 == TRUTH_NOT_EXPR)
8519 return fold (build1 (code0, type,
8520 fold (build1 (CLEANUP_POINT_EXPR,
8521 TREE_TYPE (arg00), arg00))));
8522
8523 if (kind0 == '<' || kind0 == '2'
8524 || code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR
8525 || code0 == TRUTH_AND_EXPR || code0 == TRUTH_OR_EXPR
8526 || code0 == TRUTH_XOR_EXPR)
8527 {
8528 arg01 = TREE_OPERAND (arg0, 1);
8529
8530 if (TREE_CONSTANT (arg00)
8531 || ((code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR)
8532 && ! has_cleanups (arg00)))
8533 return fold (build2 (code0, type, arg00,
8534 fold (build1 (CLEANUP_POINT_EXPR,
8535 TREE_TYPE (arg01), arg01))));
8536
8537 if (TREE_CONSTANT (arg01))
8538 return fold (build2 (code0, type,
8539 fold (build1 (CLEANUP_POINT_EXPR,
8540 TREE_TYPE (arg00), arg00)),
8541 arg01));
8542 }
8543
8544 return t;
8545 }
8546
8547 case CALL_EXPR:
8548 /* Check for a built-in function. */
8549 if (TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
8550 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
8551 == FUNCTION_DECL)
8552 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
8553 {
8554 tree tmp = fold_builtin (t);
8555 if (tmp)
8556 return tmp;
8557 }
8558 return t;
8559
8560 default:
8561 return t;
8562 } /* switch (code) */
8563 }
8564
8565 #ifdef ENABLE_FOLD_CHECKING
8566 #undef fold
8567
8568 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
8569 static void fold_check_failed (tree, tree);
8570 void print_fold_checksum (tree);
8571
8572 /* When --enable-checking=fold, compute a digest of expr before
8573 and after actual fold call to see if fold did not accidentally
8574 change original expr. */
8575
8576 tree
8577 fold (tree expr)
8578 {
8579 tree ret;
8580 struct md5_ctx ctx;
8581 unsigned char checksum_before[16], checksum_after[16];
8582 htab_t ht;
8583
8584 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
8585 md5_init_ctx (&ctx);
8586 fold_checksum_tree (expr, &ctx, ht);
8587 md5_finish_ctx (&ctx, checksum_before);
8588 htab_empty (ht);
8589
8590 ret = fold_1 (expr);
8591
8592 md5_init_ctx (&ctx);
8593 fold_checksum_tree (expr, &ctx, ht);
8594 md5_finish_ctx (&ctx, checksum_after);
8595 htab_delete (ht);
8596
8597 if (memcmp (checksum_before, checksum_after, 16))
8598 fold_check_failed (expr, ret);
8599
8600 return ret;
8601 }
8602
8603 void
8604 print_fold_checksum (tree expr)
8605 {
8606 struct md5_ctx ctx;
8607 unsigned char checksum[16], cnt;
8608 htab_t ht;
8609
8610 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
8611 md5_init_ctx (&ctx);
8612 fold_checksum_tree (expr, &ctx, ht);
8613 md5_finish_ctx (&ctx, checksum);
8614 htab_delete (ht);
8615 for (cnt = 0; cnt < 16; ++cnt)
8616 fprintf (stderr, "%02x", checksum[cnt]);
8617 putc ('\n', stderr);
8618 }
8619
8620 static void
8621 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
8622 {
8623 internal_error ("fold check: original tree changed by fold");
8624 }
8625
8626 static void
8627 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
8628 {
8629 void **slot;
8630 enum tree_code code;
8631 char buf[sizeof (struct tree_decl)];
8632 int i, len;
8633
8634 if (sizeof (struct tree_exp) + 5 * sizeof (tree)
8635 > sizeof (struct tree_decl)
8636 || sizeof (struct tree_type) > sizeof (struct tree_decl))
8637 abort ();
8638 if (expr == NULL)
8639 return;
8640 slot = htab_find_slot (ht, expr, INSERT);
8641 if (*slot != NULL)
8642 return;
8643 *slot = expr;
8644 code = TREE_CODE (expr);
8645 if (code == SAVE_EXPR && SAVE_EXPR_NOPLACEHOLDER (expr))
8646 {
8647 /* Allow SAVE_EXPR_NOPLACEHOLDER flag to be modified. */
8648 memcpy (buf, expr, tree_size (expr));
8649 expr = (tree) buf;
8650 SAVE_EXPR_NOPLACEHOLDER (expr) = 0;
8651 }
8652 else if (TREE_CODE_CLASS (code) == 'd' && DECL_ASSEMBLER_NAME_SET_P (expr))
8653 {
8654 /* Allow DECL_ASSEMBLER_NAME to be modified. */
8655 memcpy (buf, expr, tree_size (expr));
8656 expr = (tree) buf;
8657 SET_DECL_ASSEMBLER_NAME (expr, NULL);
8658 }
8659 else if (TREE_CODE_CLASS (code) == 't'
8660 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)))
8661 {
8662 /* Allow TYPE_POINTER_TO and TYPE_REFERENCE_TO to be modified. */
8663 memcpy (buf, expr, tree_size (expr));
8664 expr = (tree) buf;
8665 TYPE_POINTER_TO (expr) = NULL;
8666 TYPE_REFERENCE_TO (expr) = NULL;
8667 }
8668 md5_process_bytes (expr, tree_size (expr), ctx);
8669 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
8670 if (TREE_CODE_CLASS (code) != 't' && TREE_CODE_CLASS (code) != 'd')
8671 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
8672 len = TREE_CODE_LENGTH (code);
8673 switch (TREE_CODE_CLASS (code))
8674 {
8675 case 'c':
8676 switch (code)
8677 {
8678 case STRING_CST:
8679 md5_process_bytes (TREE_STRING_POINTER (expr),
8680 TREE_STRING_LENGTH (expr), ctx);
8681 break;
8682 case COMPLEX_CST:
8683 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
8684 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
8685 break;
8686 case VECTOR_CST:
8687 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
8688 break;
8689 default:
8690 break;
8691 }
8692 break;
8693 case 'x':
8694 switch (code)
8695 {
8696 case TREE_LIST:
8697 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
8698 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
8699 break;
8700 case TREE_VEC:
8701 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
8702 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
8703 break;
8704 default:
8705 break;
8706 }
8707 break;
8708 case 'e':
8709 switch (code)
8710 {
8711 case SAVE_EXPR: len = 2; break;
8712 case GOTO_SUBROUTINE_EXPR: len = 0; break;
8713 case RTL_EXPR: len = 0; break;
8714 case WITH_CLEANUP_EXPR: len = 2; break;
8715 default: break;
8716 }
8717 /* Fall through. */
8718 case 'r':
8719 case '<':
8720 case '1':
8721 case '2':
8722 case 's':
8723 for (i = 0; i < len; ++i)
8724 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
8725 break;
8726 case 'd':
8727 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
8728 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
8729 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
8730 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
8731 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
8732 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
8733 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
8734 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
8735 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
8736 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
8737 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
8738 break;
8739 case 't':
8740 if (TREE_CODE (expr) == ENUMERAL_TYPE)
8741 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
8742 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
8743 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
8744 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
8745 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
8746 if (INTEGRAL_TYPE_P (expr)
8747 || SCALAR_FLOAT_TYPE_P (expr))
8748 {
8749 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
8750 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
8751 }
8752 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
8753 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
8754 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
8755 break;
8756 default:
8757 break;
8758 }
8759 }
8760
8761 #endif
8762
8763 /* Perform constant folding and related simplification of initializer
8764 expression EXPR. This behaves identically to "fold" but ignores
8765 potential run-time traps and exceptions that fold must preserve. */
8766
8767 tree
8768 fold_initializer (tree expr)
8769 {
8770 int saved_signaling_nans = flag_signaling_nans;
8771 int saved_trapping_math = flag_trapping_math;
8772 int saved_trapv = flag_trapv;
8773 tree result;
8774
8775 flag_signaling_nans = 0;
8776 flag_trapping_math = 0;
8777 flag_trapv = 0;
8778
8779 result = fold (expr);
8780
8781 flag_signaling_nans = saved_signaling_nans;
8782 flag_trapping_math = saved_trapping_math;
8783 flag_trapv = saved_trapv;
8784
8785 return result;
8786 }
8787
8788 /* Determine if first argument is a multiple of second argument. Return 0 if
8789 it is not, or we cannot easily determined it to be.
8790
8791 An example of the sort of thing we care about (at this point; this routine
8792 could surely be made more general, and expanded to do what the *_DIV_EXPR's
8793 fold cases do now) is discovering that
8794
8795 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
8796
8797 is a multiple of
8798
8799 SAVE_EXPR (J * 8)
8800
8801 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
8802
8803 This code also handles discovering that
8804
8805 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
8806
8807 is a multiple of 8 so we don't have to worry about dealing with a
8808 possible remainder.
8809
8810 Note that we *look* inside a SAVE_EXPR only to determine how it was
8811 calculated; it is not safe for fold to do much of anything else with the
8812 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
8813 at run time. For example, the latter example above *cannot* be implemented
8814 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
8815 evaluation time of the original SAVE_EXPR is not necessarily the same at
8816 the time the new expression is evaluated. The only optimization of this
8817 sort that would be valid is changing
8818
8819 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
8820
8821 divided by 8 to
8822
8823 SAVE_EXPR (I) * SAVE_EXPR (J)
8824
8825 (where the same SAVE_EXPR (J) is used in the original and the
8826 transformed version). */
8827
8828 static int
8829 multiple_of_p (tree type, tree top, tree bottom)
8830 {
8831 if (operand_equal_p (top, bottom, 0))
8832 return 1;
8833
8834 if (TREE_CODE (type) != INTEGER_TYPE)
8835 return 0;
8836
8837 switch (TREE_CODE (top))
8838 {
8839 case MULT_EXPR:
8840 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
8841 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
8842
8843 case PLUS_EXPR:
8844 case MINUS_EXPR:
8845 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
8846 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
8847
8848 case LSHIFT_EXPR:
8849 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
8850 {
8851 tree op1, t1;
8852
8853 op1 = TREE_OPERAND (top, 1);
8854 /* const_binop may not detect overflow correctly,
8855 so check for it explicitly here. */
8856 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
8857 > TREE_INT_CST_LOW (op1)
8858 && TREE_INT_CST_HIGH (op1) == 0
8859 && 0 != (t1 = fold_convert (type,
8860 const_binop (LSHIFT_EXPR,
8861 size_one_node,
8862 op1, 0)))
8863 && ! TREE_OVERFLOW (t1))
8864 return multiple_of_p (type, t1, bottom);
8865 }
8866 return 0;
8867
8868 case NOP_EXPR:
8869 /* Can't handle conversions from non-integral or wider integral type. */
8870 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
8871 || (TYPE_PRECISION (type)
8872 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
8873 return 0;
8874
8875 /* .. fall through ... */
8876
8877 case SAVE_EXPR:
8878 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
8879
8880 case INTEGER_CST:
8881 if (TREE_CODE (bottom) != INTEGER_CST
8882 || (TYPE_UNSIGNED (type)
8883 && (tree_int_cst_sgn (top) < 0
8884 || tree_int_cst_sgn (bottom) < 0)))
8885 return 0;
8886 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
8887 top, bottom, 0));
8888
8889 default:
8890 return 0;
8891 }
8892 }
8893
8894 /* Return true if `t' is known to be non-negative. */
8895
8896 int
8897 tree_expr_nonnegative_p (tree t)
8898 {
8899 switch (TREE_CODE (t))
8900 {
8901 case ABS_EXPR:
8902 return 1;
8903
8904 case INTEGER_CST:
8905 return tree_int_cst_sgn (t) >= 0;
8906
8907 case REAL_CST:
8908 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
8909
8910 case PLUS_EXPR:
8911 if (FLOAT_TYPE_P (TREE_TYPE (t)))
8912 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8913 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8914
8915 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
8916 both unsigned and at least 2 bits shorter than the result. */
8917 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
8918 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
8919 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
8920 {
8921 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
8922 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
8923 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
8924 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
8925 {
8926 unsigned int prec = MAX (TYPE_PRECISION (inner1),
8927 TYPE_PRECISION (inner2)) + 1;
8928 return prec < TYPE_PRECISION (TREE_TYPE (t));
8929 }
8930 }
8931 break;
8932
8933 case MULT_EXPR:
8934 if (FLOAT_TYPE_P (TREE_TYPE (t)))
8935 {
8936 /* x * x for floating point x is always non-negative. */
8937 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
8938 return 1;
8939 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8940 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8941 }
8942
8943 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
8944 both unsigned and their total bits is shorter than the result. */
8945 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
8946 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
8947 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
8948 {
8949 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
8950 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
8951 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
8952 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
8953 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
8954 < TYPE_PRECISION (TREE_TYPE (t));
8955 }
8956 return 0;
8957
8958 case TRUNC_DIV_EXPR:
8959 case CEIL_DIV_EXPR:
8960 case FLOOR_DIV_EXPR:
8961 case ROUND_DIV_EXPR:
8962 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8963 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8964
8965 case TRUNC_MOD_EXPR:
8966 case CEIL_MOD_EXPR:
8967 case FLOOR_MOD_EXPR:
8968 case ROUND_MOD_EXPR:
8969 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8970
8971 case RDIV_EXPR:
8972 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8973 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8974
8975 case BIT_AND_EXPR:
8976 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
8977 || tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8978 case BIT_IOR_EXPR:
8979 case BIT_XOR_EXPR:
8980 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8981 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8982
8983 case NOP_EXPR:
8984 {
8985 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
8986 tree outer_type = TREE_TYPE (t);
8987
8988 if (TREE_CODE (outer_type) == REAL_TYPE)
8989 {
8990 if (TREE_CODE (inner_type) == REAL_TYPE)
8991 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8992 if (TREE_CODE (inner_type) == INTEGER_TYPE)
8993 {
8994 if (TYPE_UNSIGNED (inner_type))
8995 return 1;
8996 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8997 }
8998 }
8999 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
9000 {
9001 if (TREE_CODE (inner_type) == REAL_TYPE)
9002 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
9003 if (TREE_CODE (inner_type) == INTEGER_TYPE)
9004 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
9005 && TYPE_UNSIGNED (inner_type);
9006 }
9007 }
9008 break;
9009
9010 case COND_EXPR:
9011 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
9012 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
9013 case COMPOUND_EXPR:
9014 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9015 case MIN_EXPR:
9016 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9017 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9018 case MAX_EXPR:
9019 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9020 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9021 case MODIFY_EXPR:
9022 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9023 case BIND_EXPR:
9024 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t, 1)));
9025 case SAVE_EXPR:
9026 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9027 case NON_LVALUE_EXPR:
9028 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9029 case FLOAT_EXPR:
9030 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9031 case RTL_EXPR:
9032 return rtl_expr_nonnegative_p (RTL_EXPR_RTL (t));
9033
9034 case CALL_EXPR:
9035 {
9036 tree fndecl = get_callee_fndecl (t);
9037 tree arglist = TREE_OPERAND (t, 1);
9038 if (fndecl
9039 && DECL_BUILT_IN (fndecl)
9040 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD)
9041 switch (DECL_FUNCTION_CODE (fndecl))
9042 {
9043 #define CASE_BUILTIN_F(BUILT_IN_FN) \
9044 case BUILT_IN_FN: case BUILT_IN_FN##F: case BUILT_IN_FN##L:
9045 #define CASE_BUILTIN_I(BUILT_IN_FN) \
9046 case BUILT_IN_FN: case BUILT_IN_FN##L: case BUILT_IN_FN##LL:
9047
9048 CASE_BUILTIN_F (BUILT_IN_ACOS)
9049 CASE_BUILTIN_F (BUILT_IN_ACOSH)
9050 CASE_BUILTIN_F (BUILT_IN_CABS)
9051 CASE_BUILTIN_F (BUILT_IN_COSH)
9052 CASE_BUILTIN_F (BUILT_IN_ERFC)
9053 CASE_BUILTIN_F (BUILT_IN_EXP)
9054 CASE_BUILTIN_F (BUILT_IN_EXP10)
9055 CASE_BUILTIN_F (BUILT_IN_EXP2)
9056 CASE_BUILTIN_F (BUILT_IN_FABS)
9057 CASE_BUILTIN_F (BUILT_IN_FDIM)
9058 CASE_BUILTIN_F (BUILT_IN_FREXP)
9059 CASE_BUILTIN_F (BUILT_IN_HYPOT)
9060 CASE_BUILTIN_F (BUILT_IN_POW10)
9061 CASE_BUILTIN_I (BUILT_IN_FFS)
9062 CASE_BUILTIN_I (BUILT_IN_PARITY)
9063 CASE_BUILTIN_I (BUILT_IN_POPCOUNT)
9064 /* Always true. */
9065 return 1;
9066
9067 CASE_BUILTIN_F (BUILT_IN_SQRT)
9068 /* sqrt(-0.0) is -0.0. */
9069 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
9070 return 1;
9071 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
9072
9073 CASE_BUILTIN_F (BUILT_IN_ASINH)
9074 CASE_BUILTIN_F (BUILT_IN_ATAN)
9075 CASE_BUILTIN_F (BUILT_IN_ATANH)
9076 CASE_BUILTIN_F (BUILT_IN_CBRT)
9077 CASE_BUILTIN_F (BUILT_IN_CEIL)
9078 CASE_BUILTIN_F (BUILT_IN_ERF)
9079 CASE_BUILTIN_F (BUILT_IN_EXPM1)
9080 CASE_BUILTIN_F (BUILT_IN_FLOOR)
9081 CASE_BUILTIN_F (BUILT_IN_FMOD)
9082 CASE_BUILTIN_F (BUILT_IN_LDEXP)
9083 CASE_BUILTIN_F (BUILT_IN_LLRINT)
9084 CASE_BUILTIN_F (BUILT_IN_LLROUND)
9085 CASE_BUILTIN_F (BUILT_IN_LRINT)
9086 CASE_BUILTIN_F (BUILT_IN_LROUND)
9087 CASE_BUILTIN_F (BUILT_IN_MODF)
9088 CASE_BUILTIN_F (BUILT_IN_NEARBYINT)
9089 CASE_BUILTIN_F (BUILT_IN_POW)
9090 CASE_BUILTIN_F (BUILT_IN_RINT)
9091 CASE_BUILTIN_F (BUILT_IN_ROUND)
9092 CASE_BUILTIN_F (BUILT_IN_SIGNBIT)
9093 CASE_BUILTIN_F (BUILT_IN_SINH)
9094 CASE_BUILTIN_F (BUILT_IN_TANH)
9095 CASE_BUILTIN_F (BUILT_IN_TRUNC)
9096 /* True if the 1st argument is nonnegative. */
9097 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
9098
9099 CASE_BUILTIN_F (BUILT_IN_FMAX)
9100 /* True if the 1st OR 2nd arguments are nonnegative. */
9101 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
9102 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
9103
9104 CASE_BUILTIN_F (BUILT_IN_FMIN)
9105 /* True if the 1st AND 2nd arguments are nonnegative. */
9106 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
9107 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
9108
9109 CASE_BUILTIN_F (BUILT_IN_COPYSIGN)
9110 /* True if the 2nd argument is nonnegative. */
9111 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
9112
9113 default:
9114 break;
9115 #undef CASE_BUILTIN_F
9116 #undef CASE_BUILTIN_I
9117 }
9118 }
9119
9120 /* ... fall through ... */
9121
9122 default:
9123 if (truth_value_p (TREE_CODE (t)))
9124 /* Truth values evaluate to 0 or 1, which is nonnegative. */
9125 return 1;
9126 }
9127
9128 /* We don't know sign of `t', so be conservative and return false. */
9129 return 0;
9130 }
9131
9132 /* Return true when T is an address and is known to be nonzero.
9133 For floating point we further ensure that T is not denormal.
9134 Similar logic is present in nonzero_address in rtlanal.h */
9135
9136 static bool
9137 tree_expr_nonzero_p (tree t)
9138 {
9139 tree type = TREE_TYPE (t);
9140
9141 /* Doing something useful for floating point would need more work. */
9142 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9143 return false;
9144
9145 switch (TREE_CODE (t))
9146 {
9147 case ABS_EXPR:
9148 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
9149 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
9150
9151 case INTEGER_CST:
9152 return !integer_zerop (t);
9153
9154 case PLUS_EXPR:
9155 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
9156 {
9157 /* With the presence of negative values it is hard
9158 to say something. */
9159 if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9160 || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
9161 return false;
9162 /* One of operands must be positive and the other non-negative. */
9163 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
9164 || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
9165 }
9166 break;
9167
9168 case MULT_EXPR:
9169 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
9170 {
9171 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
9172 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
9173 }
9174 break;
9175
9176 case NOP_EXPR:
9177 {
9178 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
9179 tree outer_type = TREE_TYPE (t);
9180
9181 return (TYPE_PRECISION (inner_type) >= TYPE_PRECISION (outer_type)
9182 && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
9183 }
9184 break;
9185
9186 case ADDR_EXPR:
9187 /* Weak declarations may link to NULL. */
9188 if (DECL_P (TREE_OPERAND (t, 0)))
9189 return !DECL_WEAK (TREE_OPERAND (t, 0));
9190 /* Constants and all other cases are never weak. */
9191 return true;
9192
9193 case COND_EXPR:
9194 return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
9195 && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
9196
9197 case MIN_EXPR:
9198 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
9199 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
9200
9201 case MAX_EXPR:
9202 if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
9203 {
9204 /* When both operands are nonzero, then MAX must be too. */
9205 if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
9206 return true;
9207
9208 /* MAX where operand 0 is positive is positive. */
9209 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9210 }
9211 /* MAX where operand 1 is positive is positive. */
9212 else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
9213 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
9214 return true;
9215 break;
9216
9217 case COMPOUND_EXPR:
9218 case MODIFY_EXPR:
9219 case BIND_EXPR:
9220 return tree_expr_nonzero_p (TREE_OPERAND (t, 1));
9221
9222 case SAVE_EXPR:
9223 case NON_LVALUE_EXPR:
9224 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
9225
9226 case BIT_IOR_EXPR:
9227 return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
9228 || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
9229
9230 default:
9231 break;
9232 }
9233 return false;
9234 }
9235
9236 /* Return true if `r' is known to be non-negative.
9237 Only handles constants at the moment. */
9238
9239 int
9240 rtl_expr_nonnegative_p (rtx r)
9241 {
9242 switch (GET_CODE (r))
9243 {
9244 case CONST_INT:
9245 return INTVAL (r) >= 0;
9246
9247 case CONST_DOUBLE:
9248 if (GET_MODE (r) == VOIDmode)
9249 return CONST_DOUBLE_HIGH (r) >= 0;
9250 return 0;
9251
9252 case CONST_VECTOR:
9253 {
9254 int units, i;
9255 rtx elt;
9256
9257 units = CONST_VECTOR_NUNITS (r);
9258
9259 for (i = 0; i < units; ++i)
9260 {
9261 elt = CONST_VECTOR_ELT (r, i);
9262 if (!rtl_expr_nonnegative_p (elt))
9263 return 0;
9264 }
9265
9266 return 1;
9267 }
9268
9269 case SYMBOL_REF:
9270 case LABEL_REF:
9271 /* These are always nonnegative. */
9272 return 1;
9273
9274 default:
9275 return 0;
9276 }
9277 }
9278
9279
9280 /* See if we are applying CODE, a relational to the highest or lowest
9281 possible integer of TYPE. If so, then the result is a compile
9282 time constant. */
9283
9284 static tree
9285 fold_relational_hi_lo (enum tree_code *code_p, const tree type, tree *op0_p,
9286 tree *op1_p)
9287 {
9288 tree op0 = *op0_p;
9289 tree op1 = *op1_p;
9290 enum tree_code code = *code_p;
9291 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (op1)));
9292
9293 if (TREE_CODE (op1) == INTEGER_CST
9294 && ! TREE_CONSTANT_OVERFLOW (op1)
9295 && width <= HOST_BITS_PER_WIDE_INT
9296 && (INTEGRAL_TYPE_P (TREE_TYPE (op1))
9297 || POINTER_TYPE_P (TREE_TYPE (op1))))
9298 {
9299 unsigned HOST_WIDE_INT signed_max;
9300 unsigned HOST_WIDE_INT max, min;
9301
9302 signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
9303
9304 if (TYPE_UNSIGNED (TREE_TYPE (op1)))
9305 {
9306 max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
9307 min = 0;
9308 }
9309 else
9310 {
9311 max = signed_max;
9312 min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
9313 }
9314
9315 if (TREE_INT_CST_HIGH (op1) == 0
9316 && TREE_INT_CST_LOW (op1) == max)
9317 switch (code)
9318 {
9319 case GT_EXPR:
9320 return omit_one_operand (type, integer_zero_node, op0);
9321
9322 case GE_EXPR:
9323 *code_p = EQ_EXPR;
9324 break;
9325 case LE_EXPR:
9326 return omit_one_operand (type, integer_one_node, op0);
9327
9328 case LT_EXPR:
9329 *code_p = NE_EXPR;
9330 break;
9331
9332 /* The GE_EXPR and LT_EXPR cases above are not normally
9333 reached because of previous transformations. */
9334
9335 default:
9336 break;
9337 }
9338 else if (TREE_INT_CST_HIGH (op1) == 0
9339 && TREE_INT_CST_LOW (op1) == max - 1)
9340 switch (code)
9341 {
9342 case GT_EXPR:
9343 *code_p = EQ_EXPR;
9344 *op1_p = const_binop (PLUS_EXPR, op1, integer_one_node, 0);
9345 break;
9346 case LE_EXPR:
9347 *code_p = NE_EXPR;
9348 *op1_p = const_binop (PLUS_EXPR, op1, integer_one_node, 0);
9349 break;
9350 default:
9351 break;
9352 }
9353 else if (TREE_INT_CST_HIGH (op1) == (min ? -1 : 0)
9354 && TREE_INT_CST_LOW (op1) == min)
9355 switch (code)
9356 {
9357 case LT_EXPR:
9358 return omit_one_operand (type, integer_zero_node, op0);
9359
9360 case LE_EXPR:
9361 *code_p = EQ_EXPR;
9362 break;
9363
9364 case GE_EXPR:
9365 return omit_one_operand (type, integer_one_node, op0);
9366
9367 case GT_EXPR:
9368 *code_p = NE_EXPR;
9369 break;
9370
9371 default:
9372 break;
9373 }
9374 else if (TREE_INT_CST_HIGH (op1) == (min ? -1 : 0)
9375 && TREE_INT_CST_LOW (op1) == min + 1)
9376 switch (code)
9377 {
9378 case GE_EXPR:
9379 *code_p = NE_EXPR;
9380 *op1_p = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
9381 break;
9382 case LT_EXPR:
9383 *code_p = EQ_EXPR;
9384 *op1_p = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
9385 break;
9386 default:
9387 break;
9388 }
9389
9390 else if (TREE_INT_CST_HIGH (op1) == 0
9391 && TREE_INT_CST_LOW (op1) == signed_max
9392 && TYPE_UNSIGNED (TREE_TYPE (op1))
9393 /* signed_type does not work on pointer types. */
9394 && INTEGRAL_TYPE_P (TREE_TYPE (op1)))
9395 {
9396 /* The following case also applies to X < signed_max+1
9397 and X >= signed_max+1 because previous transformations. */
9398 if (code == LE_EXPR || code == GT_EXPR)
9399 {
9400 tree st0, st1, exp, retval;
9401 st0 = lang_hooks.types.signed_type (TREE_TYPE (op0));
9402 st1 = lang_hooks.types.signed_type (TREE_TYPE (op1));
9403
9404 exp = build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
9405 type,
9406 fold_convert (st0, op0),
9407 fold_convert (st1, integer_zero_node));
9408
9409 retval
9410 = nondestructive_fold_binary_to_constant (TREE_CODE (exp),
9411 TREE_TYPE (exp),
9412 TREE_OPERAND (exp, 0),
9413 TREE_OPERAND (exp, 1));
9414
9415 /* If we are in gimple form, then returning EXP would create
9416 non-gimple expressions. Clearing it is safe and insures
9417 we do not allow a non-gimple expression to escape. */
9418 if (in_gimple_form)
9419 exp = NULL;
9420
9421 return (retval ? retval : exp);
9422 }
9423 }
9424 }
9425
9426 return NULL_TREE;
9427 }
9428
9429
9430 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
9431 attempt to fold the expression to a constant without modifying TYPE,
9432 OP0 or OP1.
9433
9434 If the expression could be simplified to a constant, then return
9435 the constant. If the expression would not be simplified to a
9436 constant, then return NULL_TREE.
9437
9438 Note this is primarily designed to be called after gimplification
9439 of the tree structures and when at least one operand is a constant.
9440 As a result of those simplifying assumptions this routine is far
9441 simpler than the generic fold routine. */
9442
9443 tree
9444 nondestructive_fold_binary_to_constant (enum tree_code code, tree type,
9445 tree op0, tree op1)
9446 {
9447 int wins = 1;
9448 tree subop0;
9449 tree subop1;
9450 tree tem;
9451
9452 /* If this is a commutative operation, and ARG0 is a constant, move it
9453 to ARG1 to reduce the number of tests below. */
9454 if (commutative_tree_code (code)
9455 && (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST))
9456 {
9457 tem = op0;
9458 op0 = op1;
9459 op1 = tem;
9460 }
9461
9462 /* If either operand is a complex type, extract its real component. */
9463 if (TREE_CODE (op0) == COMPLEX_CST)
9464 subop0 = TREE_REALPART (op0);
9465 else
9466 subop0 = op0;
9467
9468 if (TREE_CODE (op1) == COMPLEX_CST)
9469 subop1 = TREE_REALPART (op1);
9470 else
9471 subop1 = op1;
9472
9473 /* Note if either argument is not a real or integer constant.
9474 With a few exceptions, simplification is limited to cases
9475 where both arguments are constants. */
9476 if ((TREE_CODE (subop0) != INTEGER_CST
9477 && TREE_CODE (subop0) != REAL_CST)
9478 || (TREE_CODE (subop1) != INTEGER_CST
9479 && TREE_CODE (subop1) != REAL_CST))
9480 wins = 0;
9481
9482 switch (code)
9483 {
9484 case PLUS_EXPR:
9485 /* (plus (address) (const_int)) is a constant. */
9486 if (TREE_CODE (op0) == PLUS_EXPR
9487 && TREE_CODE (op1) == INTEGER_CST
9488 && (TREE_CODE (TREE_OPERAND (op0, 0)) == ADDR_EXPR
9489 || (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
9490 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (op0, 0), 0))
9491 == ADDR_EXPR)))
9492 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
9493 {
9494 return build2 (PLUS_EXPR, type, TREE_OPERAND (op0, 0),
9495 const_binop (PLUS_EXPR, op1,
9496 TREE_OPERAND (op0, 1), 0));
9497 }
9498 case BIT_XOR_EXPR:
9499
9500 binary:
9501 if (!wins)
9502 return NULL_TREE;
9503
9504 /* Both arguments are constants. Simplify. */
9505 tem = const_binop (code, op0, op1, 0);
9506 if (tem != NULL_TREE)
9507 {
9508 /* The return value should always have the same type as
9509 the original expression. */
9510 if (TREE_TYPE (tem) != type)
9511 tem = fold_convert (type, tem);
9512
9513 return tem;
9514 }
9515 return NULL_TREE;
9516
9517 case MINUS_EXPR:
9518 /* Fold &x - &x. This can happen from &x.foo - &x.
9519 This is unsafe for certain floats even in non-IEEE formats.
9520 In IEEE, it is unsafe because it does wrong for NaNs.
9521 Also note that operand_equal_p is always false if an
9522 operand is volatile. */
9523 if (! FLOAT_TYPE_P (type) && operand_equal_p (op0, op1, 0))
9524 return fold_convert (type, integer_zero_node);
9525
9526 goto binary;
9527
9528 case MULT_EXPR:
9529 case BIT_AND_EXPR:
9530 /* Special case multiplication or bitwise AND where one argument
9531 is zero. */
9532 if (! FLOAT_TYPE_P (type) && integer_zerop (op1))
9533 return omit_one_operand (type, op1, op0);
9534 else
9535 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (op0)))
9536 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op0)))
9537 && real_zerop (op1))
9538 return omit_one_operand (type, op1, op0);
9539
9540 goto binary;
9541
9542 case BIT_IOR_EXPR:
9543 /* Special case when we know the result will be all ones. */
9544 if (integer_all_onesp (op1))
9545 return omit_one_operand (type, op1, op0);
9546
9547 goto binary;
9548
9549 case TRUNC_DIV_EXPR:
9550 case ROUND_DIV_EXPR:
9551 case FLOOR_DIV_EXPR:
9552 case CEIL_DIV_EXPR:
9553 case EXACT_DIV_EXPR:
9554 case TRUNC_MOD_EXPR:
9555 case ROUND_MOD_EXPR:
9556 case FLOOR_MOD_EXPR:
9557 case CEIL_MOD_EXPR:
9558 case RDIV_EXPR:
9559 /* Division by zero is undefined. */
9560 if (integer_zerop (op1))
9561 return NULL_TREE;
9562
9563 if (TREE_CODE (op1) == REAL_CST
9564 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (op1)))
9565 && real_zerop (op1))
9566 return NULL_TREE;
9567
9568 goto binary;
9569
9570 case MIN_EXPR:
9571 if (INTEGRAL_TYPE_P (type)
9572 && operand_equal_p (op1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
9573 return omit_one_operand (type, op1, op0);
9574
9575 goto binary;
9576
9577 case MAX_EXPR:
9578 if (INTEGRAL_TYPE_P (type)
9579 && TYPE_MAX_VALUE (type)
9580 && operand_equal_p (op1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
9581 return omit_one_operand (type, op1, op0);
9582
9583 goto binary;
9584
9585 case RSHIFT_EXPR:
9586 /* Optimize -1 >> x for arithmetic right shifts. */
9587 if (integer_all_onesp (op0) && ! TYPE_UNSIGNED (type))
9588 return omit_one_operand (type, op0, op1);
9589 /* ... fall through ... */
9590
9591 case LSHIFT_EXPR:
9592 if (integer_zerop (op0))
9593 return omit_one_operand (type, op0, op1);
9594
9595 /* Since negative shift count is not well-defined, don't
9596 try to compute it in the compiler. */
9597 if (TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sgn (op1) < 0)
9598 return NULL_TREE;
9599
9600 goto binary;
9601
9602 case LROTATE_EXPR:
9603 case RROTATE_EXPR:
9604 /* -1 rotated either direction by any amount is still -1. */
9605 if (integer_all_onesp (op0))
9606 return omit_one_operand (type, op0, op1);
9607
9608 /* 0 rotated either direction by any amount is still zero. */
9609 if (integer_zerop (op0))
9610 return omit_one_operand (type, op0, op1);
9611
9612 goto binary;
9613
9614 case COMPLEX_EXPR:
9615 if (wins)
9616 return build_complex (type, op0, op1);
9617 return NULL_TREE;
9618
9619 case LT_EXPR:
9620 case LE_EXPR:
9621 case GT_EXPR:
9622 case GE_EXPR:
9623 case EQ_EXPR:
9624 case NE_EXPR:
9625 /* If one arg is a real or integer constant, put it last. */
9626 if ((TREE_CODE (op0) == INTEGER_CST
9627 && TREE_CODE (op1) != INTEGER_CST)
9628 || (TREE_CODE (op0) == REAL_CST
9629 && TREE_CODE (op0) != REAL_CST))
9630 {
9631 tree temp;
9632
9633 temp = op0;
9634 op0 = op1;
9635 op1 = temp;
9636 code = swap_tree_comparison (code);
9637 }
9638
9639 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
9640 This transformation affects the cases which are handled in later
9641 optimizations involving comparisons with non-negative constants. */
9642 if (TREE_CODE (op1) == INTEGER_CST
9643 && TREE_CODE (op0) != INTEGER_CST
9644 && tree_int_cst_sgn (op1) > 0)
9645 {
9646 switch (code)
9647 {
9648 case GE_EXPR:
9649 code = GT_EXPR;
9650 op1 = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
9651 break;
9652
9653 case LT_EXPR:
9654 code = LE_EXPR;
9655 op1 = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
9656 break;
9657
9658 default:
9659 break;
9660 }
9661 }
9662
9663 tem = fold_relational_hi_lo (&code, type, &op0, &op1);
9664 if (tem)
9665 return tem;
9666
9667 if (!wins)
9668 return NULL_TREE;
9669
9670 return fold_relational_const (code, type, op0, op1);
9671
9672 case RANGE_EXPR:
9673 /* This could probably be handled. */
9674 return NULL_TREE;
9675
9676 case TRUTH_AND_EXPR:
9677 /* If second arg is constant zero, result is zero, but first arg
9678 must be evaluated. */
9679 if (integer_zerop (op1))
9680 return omit_one_operand (type, op1, op0);
9681 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
9682 case will be handled here. */
9683 if (integer_zerop (op0))
9684 return omit_one_operand (type, op0, op1);
9685 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
9686 return constant_boolean_node (true, type);
9687 return NULL_TREE;
9688
9689 case TRUTH_OR_EXPR:
9690 /* If second arg is constant true, result is true, but we must
9691 evaluate first arg. */
9692 if (TREE_CODE (op1) == INTEGER_CST && ! integer_zerop (op1))
9693 return omit_one_operand (type, op1, op0);
9694 /* Likewise for first arg, but note this only occurs here for
9695 TRUTH_OR_EXPR. */
9696 if (TREE_CODE (op0) == INTEGER_CST && ! integer_zerop (op0))
9697 return omit_one_operand (type, op0, op1);
9698 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
9699 return constant_boolean_node (false, type);
9700 return NULL_TREE;
9701
9702 case TRUTH_XOR_EXPR:
9703 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
9704 {
9705 int x = ! integer_zerop (op0) ^ ! integer_zerop (op1);
9706 return constant_boolean_node (x, type);
9707 }
9708 return NULL_TREE;
9709
9710 default:
9711 return NULL_TREE;
9712 }
9713 }
9714
9715 /* Given the components of a unary expression CODE, TYPE and OP0,
9716 attempt to fold the expression to a constant without modifying
9717 TYPE or OP0.
9718
9719 If the expression could be simplified to a constant, then return
9720 the constant. If the expression would not be simplified to a
9721 constant, then return NULL_TREE.
9722
9723 Note this is primarily designed to be called after gimplification
9724 of the tree structures and when op0 is a constant. As a result
9725 of those simplifying assumptions this routine is far simpler than
9726 the generic fold routine. */
9727
9728 tree
9729 nondestructive_fold_unary_to_constant (enum tree_code code, tree type,
9730 tree op0)
9731 {
9732 /* Make sure we have a suitable constant argument. */
9733 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
9734 {
9735 tree subop;
9736
9737 if (TREE_CODE (op0) == COMPLEX_CST)
9738 subop = TREE_REALPART (op0);
9739 else
9740 subop = op0;
9741
9742 if (TREE_CODE (subop) != INTEGER_CST && TREE_CODE (subop) != REAL_CST)
9743 return NULL_TREE;
9744 }
9745
9746 switch (code)
9747 {
9748 case NOP_EXPR:
9749 case FLOAT_EXPR:
9750 case CONVERT_EXPR:
9751 case FIX_TRUNC_EXPR:
9752 case FIX_FLOOR_EXPR:
9753 case FIX_CEIL_EXPR:
9754 return fold_convert_const (code, type, op0);
9755
9756 case NEGATE_EXPR:
9757 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
9758 return fold_negate_const (op0, type);
9759 else
9760 return NULL_TREE;
9761
9762 case ABS_EXPR:
9763 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
9764 return fold_abs_const (op0, type);
9765 else
9766 return NULL_TREE;
9767
9768 case BIT_NOT_EXPR:
9769 if (TREE_CODE (op0) == INTEGER_CST)
9770 return fold_not_const (op0, type);
9771 else
9772 return NULL_TREE;
9773
9774 case REALPART_EXPR:
9775 if (TREE_CODE (op0) == COMPLEX_CST)
9776 return TREE_REALPART (op0);
9777 else
9778 return NULL_TREE;
9779
9780 case IMAGPART_EXPR:
9781 if (TREE_CODE (op0) == COMPLEX_CST)
9782 return TREE_IMAGPART (op0);
9783 else
9784 return NULL_TREE;
9785
9786 case CONJ_EXPR:
9787 if (TREE_CODE (op0) == COMPLEX_CST
9788 && TREE_CODE (TREE_TYPE (op0)) == COMPLEX_TYPE)
9789 return build_complex (type, TREE_REALPART (op0),
9790 negate_expr (TREE_IMAGPART (op0)));
9791 return NULL_TREE;
9792
9793 default:
9794 return NULL_TREE;
9795 }
9796 }
9797
9798 /* If EXP represents referencing an element in a constant string
9799 (either via pointer arithmetic or array indexing), return the
9800 tree representing the value accessed, otherwise return NULL. */
9801
9802 tree
9803 fold_read_from_constant_string (tree exp)
9804 {
9805 if (TREE_CODE (exp) == INDIRECT_REF || TREE_CODE (exp) == ARRAY_REF)
9806 {
9807 tree exp1 = TREE_OPERAND (exp, 0);
9808 tree index;
9809 tree string;
9810
9811 if (TREE_CODE (exp) == INDIRECT_REF)
9812 {
9813 string = string_constant (exp1, &index);
9814 }
9815 else
9816 {
9817 tree domain = TYPE_DOMAIN (TREE_TYPE (exp1));
9818 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
9819 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
9820
9821 /* Optimize the special-case of a zero lower bound.
9822
9823 We convert the low_bound to sizetype to avoid some problems
9824 with constant folding. (E.g. suppose the lower bound is 1,
9825 and its mode is QI. Without the conversion,l (ARRAY
9826 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
9827 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
9828 if (! integer_zerop (low_bound))
9829 index = size_diffop (index, fold_convert (sizetype, low_bound));
9830
9831 string = exp1;
9832 }
9833
9834 if (string
9835 && TREE_CODE (string) == STRING_CST
9836 && TREE_CODE (index) == INTEGER_CST
9837 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
9838 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
9839 == MODE_INT)
9840 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
9841 return fold_convert (TREE_TYPE (exp),
9842 build_int_2 ((TREE_STRING_POINTER (string)
9843 [TREE_INT_CST_LOW (index)]), 0));
9844 }
9845 return NULL;
9846 }
9847
9848 /* Return the tree for neg (ARG0) when ARG0 is known to be either
9849 an integer constant or real constant.
9850
9851 TYPE is the type of the result. */
9852
9853 static tree
9854 fold_negate_const (tree arg0, tree type)
9855 {
9856 tree t = NULL_TREE;
9857
9858 if (TREE_CODE (arg0) == INTEGER_CST)
9859 {
9860 unsigned HOST_WIDE_INT low;
9861 HOST_WIDE_INT high;
9862 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
9863 TREE_INT_CST_HIGH (arg0),
9864 &low, &high);
9865 t = build_int_2 (low, high);
9866 TREE_TYPE (t) = type;
9867 TREE_OVERFLOW (t)
9868 = (TREE_OVERFLOW (arg0)
9869 | force_fit_type (t, overflow && !TYPE_UNSIGNED (type)));
9870 TREE_CONSTANT_OVERFLOW (t)
9871 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
9872 }
9873 else if (TREE_CODE (arg0) == REAL_CST)
9874 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
9875 #ifdef ENABLE_CHECKING
9876 else
9877 abort ();
9878 #endif
9879
9880 return t;
9881 }
9882
9883 /* Return the tree for abs (ARG0) when ARG0 is known to be either
9884 an integer constant or real constant.
9885
9886 TYPE is the type of the result. */
9887
9888 tree
9889 fold_abs_const (tree arg0, tree type)
9890 {
9891 tree t = NULL_TREE;
9892
9893 if (TREE_CODE (arg0) == INTEGER_CST)
9894 {
9895 /* If the value is unsigned, then the absolute value is
9896 the same as the ordinary value. */
9897 if (TYPE_UNSIGNED (type))
9898 return arg0;
9899 /* Similarly, if the value is non-negative. */
9900 else if (INT_CST_LT (integer_minus_one_node, arg0))
9901 return arg0;
9902 /* If the value is negative, then the absolute value is
9903 its negation. */
9904 else
9905 {
9906 unsigned HOST_WIDE_INT low;
9907 HOST_WIDE_INT high;
9908 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
9909 TREE_INT_CST_HIGH (arg0),
9910 &low, &high);
9911 t = build_int_2 (low, high);
9912 TREE_TYPE (t) = type;
9913 TREE_OVERFLOW (t)
9914 = (TREE_OVERFLOW (arg0)
9915 | force_fit_type (t, overflow));
9916 TREE_CONSTANT_OVERFLOW (t)
9917 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
9918 return t;
9919 }
9920 }
9921 else if (TREE_CODE (arg0) == REAL_CST)
9922 {
9923 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
9924 return build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
9925 else
9926 return arg0;
9927 }
9928 #ifdef ENABLE_CHECKING
9929 else
9930 abort ();
9931 #endif
9932
9933 return t;
9934 }
9935
9936 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
9937 constant. TYPE is the type of the result. */
9938
9939 static tree
9940 fold_not_const (tree arg0, tree type)
9941 {
9942 tree t = NULL_TREE;
9943
9944 if (TREE_CODE (arg0) == INTEGER_CST)
9945 {
9946 t = build_int_2 (~ TREE_INT_CST_LOW (arg0),
9947 ~ TREE_INT_CST_HIGH (arg0));
9948 TREE_TYPE (t) = type;
9949 force_fit_type (t, 0);
9950 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg0);
9951 TREE_CONSTANT_OVERFLOW (t) = TREE_CONSTANT_OVERFLOW (arg0);
9952 }
9953 #ifdef ENABLE_CHECKING
9954 else
9955 abort ();
9956 #endif
9957
9958 return t;
9959 }
9960
9961 /* Given CODE, a relational operator, the target type, TYPE and two
9962 constant operands OP0 and OP1, return the result of the
9963 relational operation. If the result is not a compile time
9964 constant, then return NULL_TREE. */
9965
9966 static tree
9967 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
9968 {
9969 tree tem;
9970 int invert;
9971
9972 /* From here on, the only cases we handle are when the result is
9973 known to be a constant.
9974
9975 To compute GT, swap the arguments and do LT.
9976 To compute GE, do LT and invert the result.
9977 To compute LE, swap the arguments, do LT and invert the result.
9978 To compute NE, do EQ and invert the result.
9979
9980 Therefore, the code below must handle only EQ and LT. */
9981
9982 if (code == LE_EXPR || code == GT_EXPR)
9983 {
9984 tem = op0, op0 = op1, op1 = tem;
9985 code = swap_tree_comparison (code);
9986 }
9987
9988 /* Note that it is safe to invert for real values here because we
9989 will check below in the one case that it matters. */
9990
9991 tem = NULL_TREE;
9992 invert = 0;
9993 if (code == NE_EXPR || code == GE_EXPR)
9994 {
9995 invert = 1;
9996 code = invert_tree_comparison (code, false);
9997 }
9998
9999 /* Compute a result for LT or EQ if args permit;
10000 Otherwise return T. */
10001 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
10002 {
10003 if (code == EQ_EXPR)
10004 tem = build_int_2 (tree_int_cst_equal (op0, op1), 0);
10005 else
10006 tem = build_int_2 ((TYPE_UNSIGNED (TREE_TYPE (op0))
10007 ? INT_CST_LT_UNSIGNED (op0, op1)
10008 : INT_CST_LT (op0, op1)),
10009 0);
10010 }
10011
10012 else if (code == EQ_EXPR && !TREE_SIDE_EFFECTS (op0)
10013 && integer_zerop (op1) && tree_expr_nonzero_p (op0))
10014 tem = build_int_2 (0, 0);
10015
10016 /* Two real constants can be compared explicitly. */
10017 else if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
10018 {
10019 /* If either operand is a NaN, the result is false with two
10020 exceptions: First, an NE_EXPR is true on NaNs, but that case
10021 is already handled correctly since we will be inverting the
10022 result for NE_EXPR. Second, if we had inverted a LE_EXPR
10023 or a GE_EXPR into a LT_EXPR, we must return true so that it
10024 will be inverted into false. */
10025
10026 if (REAL_VALUE_ISNAN (TREE_REAL_CST (op0))
10027 || REAL_VALUE_ISNAN (TREE_REAL_CST (op1)))
10028 tem = build_int_2 (invert && code == LT_EXPR, 0);
10029
10030 else if (code == EQ_EXPR)
10031 tem = build_int_2 (REAL_VALUES_EQUAL (TREE_REAL_CST (op0),
10032 TREE_REAL_CST (op1)),
10033 0);
10034 else
10035 tem = build_int_2 (REAL_VALUES_LESS (TREE_REAL_CST (op0),
10036 TREE_REAL_CST (op1)),
10037 0);
10038 }
10039
10040 if (tem == NULL_TREE)
10041 return NULL_TREE;
10042
10043 if (invert)
10044 TREE_INT_CST_LOW (tem) ^= 1;
10045
10046 TREE_TYPE (tem) = type;
10047 if (TREE_CODE (type) == BOOLEAN_TYPE)
10048 return lang_hooks.truthvalue_conversion (tem);
10049 return tem;
10050 }
10051
10052 /* Build an expression for the address of T. Folds away INDIRECT_REF to
10053 avoid confusing the gimplify process. */
10054
10055 tree
10056 build_fold_addr_expr_with_type (tree t, tree ptrtype)
10057 {
10058 if (TREE_CODE (t) == INDIRECT_REF)
10059 {
10060 t = TREE_OPERAND (t, 0);
10061 if (TREE_TYPE (t) != ptrtype)
10062 t = build1 (NOP_EXPR, ptrtype, t);
10063 }
10064 else
10065 {
10066 tree base = t;
10067 while (TREE_CODE (base) == COMPONENT_REF
10068 || TREE_CODE (base) == ARRAY_REF)
10069 base = TREE_OPERAND (base, 0);
10070 if (DECL_P (base))
10071 TREE_ADDRESSABLE (base) = 1;
10072
10073 t = build1 (ADDR_EXPR, ptrtype, t);
10074 }
10075
10076 return t;
10077 }
10078
10079 tree
10080 build_fold_addr_expr (tree t)
10081 {
10082 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
10083 }
10084
10085 /* Builds an expression for an indirection through T, simplifying some
10086 cases. */
10087
10088 tree
10089 build_fold_indirect_ref (tree t)
10090 {
10091 tree type = TREE_TYPE (TREE_TYPE (t));
10092 tree sub = t;
10093 tree subtype;
10094
10095 STRIP_NOPS (sub);
10096 if (TREE_CODE (sub) == ADDR_EXPR)
10097 {
10098 tree op = TREE_OPERAND (sub, 0);
10099 tree optype = TREE_TYPE (op);
10100 /* *&p => p */
10101 if (lang_hooks.types_compatible_p (type, optype))
10102 return op;
10103 /* *(foo *)&fooarray => fooarray[0] */
10104 else if (TREE_CODE (optype) == ARRAY_TYPE
10105 && lang_hooks.types_compatible_p (type, TREE_TYPE (optype)))
10106 return build2 (ARRAY_REF, type, op, size_zero_node);
10107 }
10108
10109 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
10110 subtype = TREE_TYPE (sub);
10111 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
10112 && lang_hooks.types_compatible_p (type, TREE_TYPE (TREE_TYPE (subtype))))
10113 {
10114 sub = build_fold_indirect_ref (sub);
10115 return build2 (ARRAY_REF, type, sub, size_zero_node);
10116 }
10117
10118 return build1 (INDIRECT_REF, type, t);
10119 }
10120
10121 #include "gt-fold-const.h"