fold-const.c (operand_equal_p): Update comment.
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
29
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type.
32
33 fold takes a tree as argument and returns a simplified tree.
34
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
38
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
41
42 force_fit_type takes a constant and prior overflow indicator, and
43 forces the value to fit the type. It returns an overflow indicator. */
44
45 #include "config.h"
46 #include "system.h"
47 #include "coretypes.h"
48 #include "tm.h"
49 #include "flags.h"
50 #include "tree.h"
51 #include "real.h"
52 #include "rtl.h"
53 #include "expr.h"
54 #include "tm_p.h"
55 #include "toplev.h"
56 #include "ggc.h"
57 #include "hashtab.h"
58 #include "langhooks.h"
59 #include "md5.h"
60
61 /* The following constants represent a bit based encoding of GCC's
62 comparison operators. This encoding simplifies transformations
63 on relational comparison operators, such as AND and OR. */
64 enum comparison_code {
65 COMPCODE_FALSE = 0,
66 COMPCODE_LT = 1,
67 COMPCODE_EQ = 2,
68 COMPCODE_LE = 3,
69 COMPCODE_GT = 4,
70 COMPCODE_LTGT = 5,
71 COMPCODE_GE = 6,
72 COMPCODE_ORD = 7,
73 COMPCODE_UNORD = 8,
74 COMPCODE_UNLT = 9,
75 COMPCODE_UNEQ = 10,
76 COMPCODE_UNLE = 11,
77 COMPCODE_UNGT = 12,
78 COMPCODE_NE = 13,
79 COMPCODE_UNGE = 14,
80 COMPCODE_TRUE = 15
81 };
82
83 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
84 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
85 static bool negate_mathfn_p (enum built_in_function);
86 static bool negate_expr_p (tree);
87 static tree negate_expr (tree);
88 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
89 static tree associate_trees (tree, tree, enum tree_code, tree);
90 static tree const_binop (enum tree_code, tree, tree, int);
91 static hashval_t size_htab_hash (const void *);
92 static int size_htab_eq (const void *, const void *);
93 static tree fold_convert_const (enum tree_code, tree, tree);
94 static enum tree_code invert_tree_comparison (enum tree_code, bool);
95 static enum tree_code swap_tree_comparison (enum tree_code);
96 static enum comparison_code comparison_to_compcode (enum tree_code);
97 static enum tree_code compcode_to_comparison (enum comparison_code);
98 static tree combine_comparisons (enum tree_code, enum tree_code,
99 enum tree_code, tree, tree, tree);
100 static int truth_value_p (enum tree_code);
101 static int operand_equal_for_comparison_p (tree, tree, tree);
102 static int twoval_comparison_p (tree, tree *, tree *, int *);
103 static tree eval_subst (tree, tree, tree, tree, tree);
104 static tree pedantic_omit_one_operand (tree, tree, tree);
105 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
106 static tree make_bit_field_ref (tree, tree, int, int, int);
107 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
108 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
109 enum machine_mode *, int *, int *,
110 tree *, tree *);
111 static int all_ones_mask_p (tree, int);
112 static tree sign_bit_p (tree, tree);
113 static int simple_operand_p (tree);
114 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
115 static tree make_range (tree, int *, tree *, tree *);
116 static tree build_range_check (tree, tree, int, tree, tree);
117 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
118 tree);
119 static tree fold_range_test (tree);
120 static tree unextend (tree, int, int, tree);
121 static tree fold_truthop (enum tree_code, tree, tree, tree);
122 static tree optimize_minmax_comparison (tree);
123 static tree extract_muldiv (tree, tree, enum tree_code, tree);
124 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
125 static int multiple_of_p (tree, tree, tree);
126 static tree constant_boolean_node (int, tree);
127 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree, tree,
128 tree, int);
129 static bool fold_real_zero_addition_p (tree, tree, int);
130 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
131 tree, tree, tree);
132 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
133 static tree fold_div_compare (enum tree_code, tree, tree, tree);
134 static bool reorder_operands_p (tree, tree);
135 static bool tree_swap_operands_p (tree, tree, bool);
136
137 static tree fold_negate_const (tree, tree);
138 static tree fold_not_const (tree, tree);
139 static tree fold_relational_const (enum tree_code, tree, tree, tree);
140 static tree fold_relational_hi_lo (enum tree_code *, const tree,
141 tree *, tree *);
142
143 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
144 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
145 and SUM1. Then this yields nonzero if overflow occurred during the
146 addition.
147
148 Overflow occurs if A and B have the same sign, but A and SUM differ in
149 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
150 sign. */
151 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
152 \f
153 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
154 We do that by representing the two-word integer in 4 words, with only
155 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
156 number. The value of the word is LOWPART + HIGHPART * BASE. */
157
158 #define LOWPART(x) \
159 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
160 #define HIGHPART(x) \
161 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
162 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
163
164 /* Unpack a two-word integer into 4 words.
165 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
166 WORDS points to the array of HOST_WIDE_INTs. */
167
168 static void
169 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
170 {
171 words[0] = LOWPART (low);
172 words[1] = HIGHPART (low);
173 words[2] = LOWPART (hi);
174 words[3] = HIGHPART (hi);
175 }
176
177 /* Pack an array of 4 words into a two-word integer.
178 WORDS points to the array of words.
179 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
180
181 static void
182 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
183 HOST_WIDE_INT *hi)
184 {
185 *low = words[0] + words[1] * BASE;
186 *hi = words[2] + words[3] * BASE;
187 }
188 \f
189 /* Make the integer constant T valid for its type by setting to 0 or 1 all
190 the bits in the constant that don't belong in the type.
191
192 Return 1 if a signed overflow occurs, 0 otherwise. If OVERFLOW is
193 nonzero, a signed overflow has already occurred in calculating T, so
194 propagate it. */
195
196 int
197 force_fit_type (tree t, int overflow)
198 {
199 unsigned HOST_WIDE_INT low;
200 HOST_WIDE_INT high;
201 unsigned int prec;
202
203 if (TREE_CODE (t) == REAL_CST)
204 {
205 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
206 Consider doing it via real_convert now. */
207 return overflow;
208 }
209
210 else if (TREE_CODE (t) != INTEGER_CST)
211 return overflow;
212
213 low = TREE_INT_CST_LOW (t);
214 high = TREE_INT_CST_HIGH (t);
215
216 if (POINTER_TYPE_P (TREE_TYPE (t))
217 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
218 prec = POINTER_SIZE;
219 else
220 prec = TYPE_PRECISION (TREE_TYPE (t));
221
222 /* First clear all bits that are beyond the type's precision. */
223
224 if (prec == 2 * HOST_BITS_PER_WIDE_INT)
225 ;
226 else if (prec > HOST_BITS_PER_WIDE_INT)
227 TREE_INT_CST_HIGH (t)
228 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
229 else
230 {
231 TREE_INT_CST_HIGH (t) = 0;
232 if (prec < HOST_BITS_PER_WIDE_INT)
233 TREE_INT_CST_LOW (t) &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
234 }
235
236 /* Unsigned types do not suffer sign extension or overflow unless they
237 are a sizetype. */
238 if (TYPE_UNSIGNED (TREE_TYPE (t))
239 && ! (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
240 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
241 return overflow;
242
243 /* If the value's sign bit is set, extend the sign. */
244 if (prec != 2 * HOST_BITS_PER_WIDE_INT
245 && (prec > HOST_BITS_PER_WIDE_INT
246 ? 0 != (TREE_INT_CST_HIGH (t)
247 & ((HOST_WIDE_INT) 1
248 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
249 : 0 != (TREE_INT_CST_LOW (t)
250 & ((unsigned HOST_WIDE_INT) 1 << (prec - 1)))))
251 {
252 /* Value is negative:
253 set to 1 all the bits that are outside this type's precision. */
254 if (prec > HOST_BITS_PER_WIDE_INT)
255 TREE_INT_CST_HIGH (t)
256 |= ((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
257 else
258 {
259 TREE_INT_CST_HIGH (t) = -1;
260 if (prec < HOST_BITS_PER_WIDE_INT)
261 TREE_INT_CST_LOW (t) |= ((unsigned HOST_WIDE_INT) (-1) << prec);
262 }
263 }
264
265 /* Return nonzero if signed overflow occurred. */
266 return
267 ((overflow | (low ^ TREE_INT_CST_LOW (t)) | (high ^ TREE_INT_CST_HIGH (t)))
268 != 0);
269 }
270 \f
271 /* Add two doubleword integers with doubleword result.
272 Each argument is given as two `HOST_WIDE_INT' pieces.
273 One argument is L1 and H1; the other, L2 and H2.
274 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
275
276 int
277 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
278 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
279 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
280 {
281 unsigned HOST_WIDE_INT l;
282 HOST_WIDE_INT h;
283
284 l = l1 + l2;
285 h = h1 + h2 + (l < l1);
286
287 *lv = l;
288 *hv = h;
289 return OVERFLOW_SUM_SIGN (h1, h2, h);
290 }
291
292 /* Negate a doubleword integer with doubleword result.
293 Return nonzero if the operation overflows, assuming it's signed.
294 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
295 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
296
297 int
298 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
299 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
300 {
301 if (l1 == 0)
302 {
303 *lv = 0;
304 *hv = - h1;
305 return (*hv & h1) < 0;
306 }
307 else
308 {
309 *lv = -l1;
310 *hv = ~h1;
311 return 0;
312 }
313 }
314 \f
315 /* Multiply two doubleword integers with doubleword result.
316 Return nonzero if the operation overflows, assuming it's signed.
317 Each argument is given as two `HOST_WIDE_INT' pieces.
318 One argument is L1 and H1; the other, L2 and H2.
319 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
320
321 int
322 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
323 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
324 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
325 {
326 HOST_WIDE_INT arg1[4];
327 HOST_WIDE_INT arg2[4];
328 HOST_WIDE_INT prod[4 * 2];
329 unsigned HOST_WIDE_INT carry;
330 int i, j, k;
331 unsigned HOST_WIDE_INT toplow, neglow;
332 HOST_WIDE_INT tophigh, neghigh;
333
334 encode (arg1, l1, h1);
335 encode (arg2, l2, h2);
336
337 memset (prod, 0, sizeof prod);
338
339 for (i = 0; i < 4; i++)
340 {
341 carry = 0;
342 for (j = 0; j < 4; j++)
343 {
344 k = i + j;
345 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
346 carry += arg1[i] * arg2[j];
347 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
348 carry += prod[k];
349 prod[k] = LOWPART (carry);
350 carry = HIGHPART (carry);
351 }
352 prod[i + 4] = carry;
353 }
354
355 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
356
357 /* Check for overflow by calculating the top half of the answer in full;
358 it should agree with the low half's sign bit. */
359 decode (prod + 4, &toplow, &tophigh);
360 if (h1 < 0)
361 {
362 neg_double (l2, h2, &neglow, &neghigh);
363 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
364 }
365 if (h2 < 0)
366 {
367 neg_double (l1, h1, &neglow, &neghigh);
368 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
369 }
370 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
371 }
372 \f
373 /* Shift the doubleword integer in L1, H1 left by COUNT places
374 keeping only PREC bits of result.
375 Shift right if COUNT is negative.
376 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
377 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
378
379 void
380 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
381 HOST_WIDE_INT count, unsigned int prec,
382 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
383 {
384 unsigned HOST_WIDE_INT signmask;
385
386 if (count < 0)
387 {
388 rshift_double (l1, h1, -count, prec, lv, hv, arith);
389 return;
390 }
391
392 if (SHIFT_COUNT_TRUNCATED)
393 count %= prec;
394
395 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
396 {
397 /* Shifting by the host word size is undefined according to the
398 ANSI standard, so we must handle this as a special case. */
399 *hv = 0;
400 *lv = 0;
401 }
402 else if (count >= HOST_BITS_PER_WIDE_INT)
403 {
404 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
405 *lv = 0;
406 }
407 else
408 {
409 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
410 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
411 *lv = l1 << count;
412 }
413
414 /* Sign extend all bits that are beyond the precision. */
415
416 signmask = -((prec > HOST_BITS_PER_WIDE_INT
417 ? ((unsigned HOST_WIDE_INT) *hv
418 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
419 : (*lv >> (prec - 1))) & 1);
420
421 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
422 ;
423 else if (prec >= HOST_BITS_PER_WIDE_INT)
424 {
425 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
426 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
427 }
428 else
429 {
430 *hv = signmask;
431 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
432 *lv |= signmask << prec;
433 }
434 }
435
436 /* Shift the doubleword integer in L1, H1 right by COUNT places
437 keeping only PREC bits of result. COUNT must be positive.
438 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
439 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
440
441 void
442 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
443 HOST_WIDE_INT count, unsigned int prec,
444 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
445 int arith)
446 {
447 unsigned HOST_WIDE_INT signmask;
448
449 signmask = (arith
450 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
451 : 0);
452
453 if (SHIFT_COUNT_TRUNCATED)
454 count %= prec;
455
456 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
457 {
458 /* Shifting by the host word size is undefined according to the
459 ANSI standard, so we must handle this as a special case. */
460 *hv = 0;
461 *lv = 0;
462 }
463 else if (count >= HOST_BITS_PER_WIDE_INT)
464 {
465 *hv = 0;
466 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
467 }
468 else
469 {
470 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
471 *lv = ((l1 >> count)
472 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
473 }
474
475 /* Zero / sign extend all bits that are beyond the precision. */
476
477 if (count >= (HOST_WIDE_INT)prec)
478 {
479 *hv = signmask;
480 *lv = signmask;
481 }
482 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
483 ;
484 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
485 {
486 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
487 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
488 }
489 else
490 {
491 *hv = signmask;
492 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
493 *lv |= signmask << (prec - count);
494 }
495 }
496 \f
497 /* Rotate the doubleword integer in L1, H1 left by COUNT places
498 keeping only PREC bits of result.
499 Rotate right if COUNT is negative.
500 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
501
502 void
503 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
504 HOST_WIDE_INT count, unsigned int prec,
505 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
506 {
507 unsigned HOST_WIDE_INT s1l, s2l;
508 HOST_WIDE_INT s1h, s2h;
509
510 count %= prec;
511 if (count < 0)
512 count += prec;
513
514 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
515 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
516 *lv = s1l | s2l;
517 *hv = s1h | s2h;
518 }
519
520 /* Rotate the doubleword integer in L1, H1 left by COUNT places
521 keeping only PREC bits of result. COUNT must be positive.
522 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
523
524 void
525 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
526 HOST_WIDE_INT count, unsigned int prec,
527 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
528 {
529 unsigned HOST_WIDE_INT s1l, s2l;
530 HOST_WIDE_INT s1h, s2h;
531
532 count %= prec;
533 if (count < 0)
534 count += prec;
535
536 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
537 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
538 *lv = s1l | s2l;
539 *hv = s1h | s2h;
540 }
541 \f
542 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
543 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
544 CODE is a tree code for a kind of division, one of
545 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
546 or EXACT_DIV_EXPR
547 It controls how the quotient is rounded to an integer.
548 Return nonzero if the operation overflows.
549 UNS nonzero says do unsigned division. */
550
551 int
552 div_and_round_double (enum tree_code code, int uns,
553 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
554 HOST_WIDE_INT hnum_orig,
555 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
556 HOST_WIDE_INT hden_orig,
557 unsigned HOST_WIDE_INT *lquo,
558 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
559 HOST_WIDE_INT *hrem)
560 {
561 int quo_neg = 0;
562 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
563 HOST_WIDE_INT den[4], quo[4];
564 int i, j;
565 unsigned HOST_WIDE_INT work;
566 unsigned HOST_WIDE_INT carry = 0;
567 unsigned HOST_WIDE_INT lnum = lnum_orig;
568 HOST_WIDE_INT hnum = hnum_orig;
569 unsigned HOST_WIDE_INT lden = lden_orig;
570 HOST_WIDE_INT hden = hden_orig;
571 int overflow = 0;
572
573 if (hden == 0 && lden == 0)
574 overflow = 1, lden = 1;
575
576 /* Calculate quotient sign and convert operands to unsigned. */
577 if (!uns)
578 {
579 if (hnum < 0)
580 {
581 quo_neg = ~ quo_neg;
582 /* (minimum integer) / (-1) is the only overflow case. */
583 if (neg_double (lnum, hnum, &lnum, &hnum)
584 && ((HOST_WIDE_INT) lden & hden) == -1)
585 overflow = 1;
586 }
587 if (hden < 0)
588 {
589 quo_neg = ~ quo_neg;
590 neg_double (lden, hden, &lden, &hden);
591 }
592 }
593
594 if (hnum == 0 && hden == 0)
595 { /* single precision */
596 *hquo = *hrem = 0;
597 /* This unsigned division rounds toward zero. */
598 *lquo = lnum / lden;
599 goto finish_up;
600 }
601
602 if (hnum == 0)
603 { /* trivial case: dividend < divisor */
604 /* hden != 0 already checked. */
605 *hquo = *lquo = 0;
606 *hrem = hnum;
607 *lrem = lnum;
608 goto finish_up;
609 }
610
611 memset (quo, 0, sizeof quo);
612
613 memset (num, 0, sizeof num); /* to zero 9th element */
614 memset (den, 0, sizeof den);
615
616 encode (num, lnum, hnum);
617 encode (den, lden, hden);
618
619 /* Special code for when the divisor < BASE. */
620 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
621 {
622 /* hnum != 0 already checked. */
623 for (i = 4 - 1; i >= 0; i--)
624 {
625 work = num[i] + carry * BASE;
626 quo[i] = work / lden;
627 carry = work % lden;
628 }
629 }
630 else
631 {
632 /* Full double precision division,
633 with thanks to Don Knuth's "Seminumerical Algorithms". */
634 int num_hi_sig, den_hi_sig;
635 unsigned HOST_WIDE_INT quo_est, scale;
636
637 /* Find the highest nonzero divisor digit. */
638 for (i = 4 - 1;; i--)
639 if (den[i] != 0)
640 {
641 den_hi_sig = i;
642 break;
643 }
644
645 /* Insure that the first digit of the divisor is at least BASE/2.
646 This is required by the quotient digit estimation algorithm. */
647
648 scale = BASE / (den[den_hi_sig] + 1);
649 if (scale > 1)
650 { /* scale divisor and dividend */
651 carry = 0;
652 for (i = 0; i <= 4 - 1; i++)
653 {
654 work = (num[i] * scale) + carry;
655 num[i] = LOWPART (work);
656 carry = HIGHPART (work);
657 }
658
659 num[4] = carry;
660 carry = 0;
661 for (i = 0; i <= 4 - 1; i++)
662 {
663 work = (den[i] * scale) + carry;
664 den[i] = LOWPART (work);
665 carry = HIGHPART (work);
666 if (den[i] != 0) den_hi_sig = i;
667 }
668 }
669
670 num_hi_sig = 4;
671
672 /* Main loop */
673 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
674 {
675 /* Guess the next quotient digit, quo_est, by dividing the first
676 two remaining dividend digits by the high order quotient digit.
677 quo_est is never low and is at most 2 high. */
678 unsigned HOST_WIDE_INT tmp;
679
680 num_hi_sig = i + den_hi_sig + 1;
681 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
682 if (num[num_hi_sig] != den[den_hi_sig])
683 quo_est = work / den[den_hi_sig];
684 else
685 quo_est = BASE - 1;
686
687 /* Refine quo_est so it's usually correct, and at most one high. */
688 tmp = work - quo_est * den[den_hi_sig];
689 if (tmp < BASE
690 && (den[den_hi_sig - 1] * quo_est
691 > (tmp * BASE + num[num_hi_sig - 2])))
692 quo_est--;
693
694 /* Try QUO_EST as the quotient digit, by multiplying the
695 divisor by QUO_EST and subtracting from the remaining dividend.
696 Keep in mind that QUO_EST is the I - 1st digit. */
697
698 carry = 0;
699 for (j = 0; j <= den_hi_sig; j++)
700 {
701 work = quo_est * den[j] + carry;
702 carry = HIGHPART (work);
703 work = num[i + j] - LOWPART (work);
704 num[i + j] = LOWPART (work);
705 carry += HIGHPART (work) != 0;
706 }
707
708 /* If quo_est was high by one, then num[i] went negative and
709 we need to correct things. */
710 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
711 {
712 quo_est--;
713 carry = 0; /* add divisor back in */
714 for (j = 0; j <= den_hi_sig; j++)
715 {
716 work = num[i + j] + den[j] + carry;
717 carry = HIGHPART (work);
718 num[i + j] = LOWPART (work);
719 }
720
721 num [num_hi_sig] += carry;
722 }
723
724 /* Store the quotient digit. */
725 quo[i] = quo_est;
726 }
727 }
728
729 decode (quo, lquo, hquo);
730
731 finish_up:
732 /* If result is negative, make it so. */
733 if (quo_neg)
734 neg_double (*lquo, *hquo, lquo, hquo);
735
736 /* Compute trial remainder: rem = num - (quo * den) */
737 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
738 neg_double (*lrem, *hrem, lrem, hrem);
739 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
740
741 switch (code)
742 {
743 case TRUNC_DIV_EXPR:
744 case TRUNC_MOD_EXPR: /* round toward zero */
745 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
746 return overflow;
747
748 case FLOOR_DIV_EXPR:
749 case FLOOR_MOD_EXPR: /* round toward negative infinity */
750 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
751 {
752 /* quo = quo - 1; */
753 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
754 lquo, hquo);
755 }
756 else
757 return overflow;
758 break;
759
760 case CEIL_DIV_EXPR:
761 case CEIL_MOD_EXPR: /* round toward positive infinity */
762 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
763 {
764 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
765 lquo, hquo);
766 }
767 else
768 return overflow;
769 break;
770
771 case ROUND_DIV_EXPR:
772 case ROUND_MOD_EXPR: /* round to closest integer */
773 {
774 unsigned HOST_WIDE_INT labs_rem = *lrem;
775 HOST_WIDE_INT habs_rem = *hrem;
776 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
777 HOST_WIDE_INT habs_den = hden, htwice;
778
779 /* Get absolute values. */
780 if (*hrem < 0)
781 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
782 if (hden < 0)
783 neg_double (lden, hden, &labs_den, &habs_den);
784
785 /* If (2 * abs (lrem) >= abs (lden)) */
786 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
787 labs_rem, habs_rem, &ltwice, &htwice);
788
789 if (((unsigned HOST_WIDE_INT) habs_den
790 < (unsigned HOST_WIDE_INT) htwice)
791 || (((unsigned HOST_WIDE_INT) habs_den
792 == (unsigned HOST_WIDE_INT) htwice)
793 && (labs_den < ltwice)))
794 {
795 if (*hquo < 0)
796 /* quo = quo - 1; */
797 add_double (*lquo, *hquo,
798 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
799 else
800 /* quo = quo + 1; */
801 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
802 lquo, hquo);
803 }
804 else
805 return overflow;
806 }
807 break;
808
809 default:
810 abort ();
811 }
812
813 /* Compute true remainder: rem = num - (quo * den) */
814 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
815 neg_double (*lrem, *hrem, lrem, hrem);
816 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
817 return overflow;
818 }
819 \f
820 /* Return true if built-in mathematical function specified by CODE
821 preserves the sign of it argument, i.e. -f(x) == f(-x). */
822
823 static bool
824 negate_mathfn_p (enum built_in_function code)
825 {
826 switch (code)
827 {
828 case BUILT_IN_ASIN:
829 case BUILT_IN_ASINF:
830 case BUILT_IN_ASINL:
831 case BUILT_IN_ATAN:
832 case BUILT_IN_ATANF:
833 case BUILT_IN_ATANL:
834 case BUILT_IN_SIN:
835 case BUILT_IN_SINF:
836 case BUILT_IN_SINL:
837 case BUILT_IN_TAN:
838 case BUILT_IN_TANF:
839 case BUILT_IN_TANL:
840 return true;
841
842 default:
843 break;
844 }
845 return false;
846 }
847
848 /* Determine whether an expression T can be cheaply negated using
849 the function negate_expr. */
850
851 static bool
852 negate_expr_p (tree t)
853 {
854 unsigned HOST_WIDE_INT val;
855 unsigned int prec;
856 tree type;
857
858 if (t == 0)
859 return false;
860
861 type = TREE_TYPE (t);
862
863 STRIP_SIGN_NOPS (t);
864 switch (TREE_CODE (t))
865 {
866 case INTEGER_CST:
867 if (TYPE_UNSIGNED (type) || ! flag_trapv)
868 return true;
869
870 /* Check that -CST will not overflow type. */
871 prec = TYPE_PRECISION (type);
872 if (prec > HOST_BITS_PER_WIDE_INT)
873 {
874 if (TREE_INT_CST_LOW (t) != 0)
875 return true;
876 prec -= HOST_BITS_PER_WIDE_INT;
877 val = TREE_INT_CST_HIGH (t);
878 }
879 else
880 val = TREE_INT_CST_LOW (t);
881 if (prec < HOST_BITS_PER_WIDE_INT)
882 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
883 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
884
885 case REAL_CST:
886 case NEGATE_EXPR:
887 return true;
888
889 case COMPLEX_CST:
890 return negate_expr_p (TREE_REALPART (t))
891 && negate_expr_p (TREE_IMAGPART (t));
892
893 case PLUS_EXPR:
894 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
895 return false;
896 /* -(A + B) -> (-B) - A. */
897 if (negate_expr_p (TREE_OPERAND (t, 1))
898 && reorder_operands_p (TREE_OPERAND (t, 0),
899 TREE_OPERAND (t, 1)))
900 return true;
901 /* -(A + B) -> (-A) - B. */
902 return negate_expr_p (TREE_OPERAND (t, 0));
903
904 case MINUS_EXPR:
905 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
906 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
907 && reorder_operands_p (TREE_OPERAND (t, 0),
908 TREE_OPERAND (t, 1));
909
910 case MULT_EXPR:
911 if (TYPE_UNSIGNED (TREE_TYPE (t)))
912 break;
913
914 /* Fall through. */
915
916 case RDIV_EXPR:
917 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
918 return negate_expr_p (TREE_OPERAND (t, 1))
919 || negate_expr_p (TREE_OPERAND (t, 0));
920 break;
921
922 case NOP_EXPR:
923 /* Negate -((double)float) as (double)(-float). */
924 if (TREE_CODE (type) == REAL_TYPE)
925 {
926 tree tem = strip_float_extensions (t);
927 if (tem != t)
928 return negate_expr_p (tem);
929 }
930 break;
931
932 case CALL_EXPR:
933 /* Negate -f(x) as f(-x). */
934 if (negate_mathfn_p (builtin_mathfn_code (t)))
935 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
936 break;
937
938 case RSHIFT_EXPR:
939 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
940 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
941 {
942 tree op1 = TREE_OPERAND (t, 1);
943 if (TREE_INT_CST_HIGH (op1) == 0
944 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
945 == TREE_INT_CST_LOW (op1))
946 return true;
947 }
948 break;
949
950 default:
951 break;
952 }
953 return false;
954 }
955
956 /* Given T, an expression, return the negation of T. Allow for T to be
957 null, in which case return null. */
958
959 static tree
960 negate_expr (tree t)
961 {
962 tree type;
963 tree tem;
964
965 if (t == 0)
966 return 0;
967
968 type = TREE_TYPE (t);
969 STRIP_SIGN_NOPS (t);
970
971 switch (TREE_CODE (t))
972 {
973 case INTEGER_CST:
974 tem = fold_negate_const (t, type);
975 if (! TREE_OVERFLOW (tem)
976 || TYPE_UNSIGNED (type)
977 || ! flag_trapv)
978 return tem;
979 break;
980
981 case REAL_CST:
982 tem = fold_negate_const (t, type);
983 /* Two's complement FP formats, such as c4x, may overflow. */
984 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
985 return fold_convert (type, tem);
986 break;
987
988 case COMPLEX_CST:
989 {
990 tree rpart = negate_expr (TREE_REALPART (t));
991 tree ipart = negate_expr (TREE_IMAGPART (t));
992
993 if ((TREE_CODE (rpart) == REAL_CST
994 && TREE_CODE (ipart) == REAL_CST)
995 || (TREE_CODE (rpart) == INTEGER_CST
996 && TREE_CODE (ipart) == INTEGER_CST))
997 return build_complex (type, rpart, ipart);
998 }
999 break;
1000
1001 case NEGATE_EXPR:
1002 return fold_convert (type, TREE_OPERAND (t, 0));
1003
1004 case PLUS_EXPR:
1005 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1006 {
1007 /* -(A + B) -> (-B) - A. */
1008 if (negate_expr_p (TREE_OPERAND (t, 1))
1009 && reorder_operands_p (TREE_OPERAND (t, 0),
1010 TREE_OPERAND (t, 1)))
1011 {
1012 tem = negate_expr (TREE_OPERAND (t, 1));
1013 tem = fold (build2 (MINUS_EXPR, TREE_TYPE (t),
1014 tem, TREE_OPERAND (t, 0)));
1015 return fold_convert (type, tem);
1016 }
1017
1018 /* -(A + B) -> (-A) - B. */
1019 if (negate_expr_p (TREE_OPERAND (t, 0)))
1020 {
1021 tem = negate_expr (TREE_OPERAND (t, 0));
1022 tem = fold (build2 (MINUS_EXPR, TREE_TYPE (t),
1023 tem, TREE_OPERAND (t, 1)));
1024 return fold_convert (type, tem);
1025 }
1026 }
1027 break;
1028
1029 case MINUS_EXPR:
1030 /* - (A - B) -> B - A */
1031 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1032 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1033 return fold_convert (type,
1034 fold (build2 (MINUS_EXPR, TREE_TYPE (t),
1035 TREE_OPERAND (t, 1),
1036 TREE_OPERAND (t, 0))));
1037 break;
1038
1039 case MULT_EXPR:
1040 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1041 break;
1042
1043 /* Fall through. */
1044
1045 case RDIV_EXPR:
1046 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1047 {
1048 tem = TREE_OPERAND (t, 1);
1049 if (negate_expr_p (tem))
1050 return fold_convert (type,
1051 fold (build2 (TREE_CODE (t), TREE_TYPE (t),
1052 TREE_OPERAND (t, 0),
1053 negate_expr (tem))));
1054 tem = TREE_OPERAND (t, 0);
1055 if (negate_expr_p (tem))
1056 return fold_convert (type,
1057 fold (build2 (TREE_CODE (t), TREE_TYPE (t),
1058 negate_expr (tem),
1059 TREE_OPERAND (t, 1))));
1060 }
1061 break;
1062
1063 case NOP_EXPR:
1064 /* Convert -((double)float) into (double)(-float). */
1065 if (TREE_CODE (type) == REAL_TYPE)
1066 {
1067 tem = strip_float_extensions (t);
1068 if (tem != t && negate_expr_p (tem))
1069 return fold_convert (type, negate_expr (tem));
1070 }
1071 break;
1072
1073 case CALL_EXPR:
1074 /* Negate -f(x) as f(-x). */
1075 if (negate_mathfn_p (builtin_mathfn_code (t))
1076 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1077 {
1078 tree fndecl, arg, arglist;
1079
1080 fndecl = get_callee_fndecl (t);
1081 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1082 arglist = build_tree_list (NULL_TREE, arg);
1083 return build_function_call_expr (fndecl, arglist);
1084 }
1085 break;
1086
1087 case RSHIFT_EXPR:
1088 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1089 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1090 {
1091 tree op1 = TREE_OPERAND (t, 1);
1092 if (TREE_INT_CST_HIGH (op1) == 0
1093 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1094 == TREE_INT_CST_LOW (op1))
1095 {
1096 tree ntype = TYPE_UNSIGNED (type)
1097 ? lang_hooks.types.signed_type (type)
1098 : lang_hooks.types.unsigned_type (type);
1099 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1100 temp = fold (build2 (RSHIFT_EXPR, ntype, temp, op1));
1101 return fold_convert (type, temp);
1102 }
1103 }
1104 break;
1105
1106 default:
1107 break;
1108 }
1109
1110 tem = fold (build1 (NEGATE_EXPR, TREE_TYPE (t), t));
1111 return fold_convert (type, tem);
1112 }
1113 \f
1114 /* Split a tree IN into a constant, literal and variable parts that could be
1115 combined with CODE to make IN. "constant" means an expression with
1116 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1117 commutative arithmetic operation. Store the constant part into *CONP,
1118 the literal in *LITP and return the variable part. If a part isn't
1119 present, set it to null. If the tree does not decompose in this way,
1120 return the entire tree as the variable part and the other parts as null.
1121
1122 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1123 case, we negate an operand that was subtracted. Except if it is a
1124 literal for which we use *MINUS_LITP instead.
1125
1126 If NEGATE_P is true, we are negating all of IN, again except a literal
1127 for which we use *MINUS_LITP instead.
1128
1129 If IN is itself a literal or constant, return it as appropriate.
1130
1131 Note that we do not guarantee that any of the three values will be the
1132 same type as IN, but they will have the same signedness and mode. */
1133
1134 static tree
1135 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1136 tree *minus_litp, int negate_p)
1137 {
1138 tree var = 0;
1139
1140 *conp = 0;
1141 *litp = 0;
1142 *minus_litp = 0;
1143
1144 /* Strip any conversions that don't change the machine mode or signedness. */
1145 STRIP_SIGN_NOPS (in);
1146
1147 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1148 *litp = in;
1149 else if (TREE_CODE (in) == code
1150 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1151 /* We can associate addition and subtraction together (even
1152 though the C standard doesn't say so) for integers because
1153 the value is not affected. For reals, the value might be
1154 affected, so we can't. */
1155 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1156 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1157 {
1158 tree op0 = TREE_OPERAND (in, 0);
1159 tree op1 = TREE_OPERAND (in, 1);
1160 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1161 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1162
1163 /* First see if either of the operands is a literal, then a constant. */
1164 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1165 *litp = op0, op0 = 0;
1166 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1167 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1168
1169 if (op0 != 0 && TREE_CONSTANT (op0))
1170 *conp = op0, op0 = 0;
1171 else if (op1 != 0 && TREE_CONSTANT (op1))
1172 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1173
1174 /* If we haven't dealt with either operand, this is not a case we can
1175 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1176 if (op0 != 0 && op1 != 0)
1177 var = in;
1178 else if (op0 != 0)
1179 var = op0;
1180 else
1181 var = op1, neg_var_p = neg1_p;
1182
1183 /* Now do any needed negations. */
1184 if (neg_litp_p)
1185 *minus_litp = *litp, *litp = 0;
1186 if (neg_conp_p)
1187 *conp = negate_expr (*conp);
1188 if (neg_var_p)
1189 var = negate_expr (var);
1190 }
1191 else if (TREE_CONSTANT (in))
1192 *conp = in;
1193 else
1194 var = in;
1195
1196 if (negate_p)
1197 {
1198 if (*litp)
1199 *minus_litp = *litp, *litp = 0;
1200 else if (*minus_litp)
1201 *litp = *minus_litp, *minus_litp = 0;
1202 *conp = negate_expr (*conp);
1203 var = negate_expr (var);
1204 }
1205
1206 return var;
1207 }
1208
1209 /* Re-associate trees split by the above function. T1 and T2 are either
1210 expressions to associate or null. Return the new expression, if any. If
1211 we build an operation, do it in TYPE and with CODE. */
1212
1213 static tree
1214 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1215 {
1216 if (t1 == 0)
1217 return t2;
1218 else if (t2 == 0)
1219 return t1;
1220
1221 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1222 try to fold this since we will have infinite recursion. But do
1223 deal with any NEGATE_EXPRs. */
1224 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1225 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1226 {
1227 if (code == PLUS_EXPR)
1228 {
1229 if (TREE_CODE (t1) == NEGATE_EXPR)
1230 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1231 fold_convert (type, TREE_OPERAND (t1, 0)));
1232 else if (TREE_CODE (t2) == NEGATE_EXPR)
1233 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1234 fold_convert (type, TREE_OPERAND (t2, 0)));
1235 }
1236 return build2 (code, type, fold_convert (type, t1),
1237 fold_convert (type, t2));
1238 }
1239
1240 return fold (build2 (code, type, fold_convert (type, t1),
1241 fold_convert (type, t2)));
1242 }
1243 \f
1244 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1245 to produce a new constant.
1246
1247 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1248
1249 tree
1250 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1251 {
1252 unsigned HOST_WIDE_INT int1l, int2l;
1253 HOST_WIDE_INT int1h, int2h;
1254 unsigned HOST_WIDE_INT low;
1255 HOST_WIDE_INT hi;
1256 unsigned HOST_WIDE_INT garbagel;
1257 HOST_WIDE_INT garbageh;
1258 tree t;
1259 tree type = TREE_TYPE (arg1);
1260 int uns = TYPE_UNSIGNED (type);
1261 int is_sizetype
1262 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1263 int overflow = 0;
1264 int no_overflow = 0;
1265
1266 int1l = TREE_INT_CST_LOW (arg1);
1267 int1h = TREE_INT_CST_HIGH (arg1);
1268 int2l = TREE_INT_CST_LOW (arg2);
1269 int2h = TREE_INT_CST_HIGH (arg2);
1270
1271 switch (code)
1272 {
1273 case BIT_IOR_EXPR:
1274 low = int1l | int2l, hi = int1h | int2h;
1275 break;
1276
1277 case BIT_XOR_EXPR:
1278 low = int1l ^ int2l, hi = int1h ^ int2h;
1279 break;
1280
1281 case BIT_AND_EXPR:
1282 low = int1l & int2l, hi = int1h & int2h;
1283 break;
1284
1285 case RSHIFT_EXPR:
1286 int2l = -int2l;
1287 case LSHIFT_EXPR:
1288 /* It's unclear from the C standard whether shifts can overflow.
1289 The following code ignores overflow; perhaps a C standard
1290 interpretation ruling is needed. */
1291 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1292 &low, &hi, !uns);
1293 no_overflow = 1;
1294 break;
1295
1296 case RROTATE_EXPR:
1297 int2l = - int2l;
1298 case LROTATE_EXPR:
1299 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1300 &low, &hi);
1301 break;
1302
1303 case PLUS_EXPR:
1304 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1305 break;
1306
1307 case MINUS_EXPR:
1308 neg_double (int2l, int2h, &low, &hi);
1309 add_double (int1l, int1h, low, hi, &low, &hi);
1310 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1311 break;
1312
1313 case MULT_EXPR:
1314 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1315 break;
1316
1317 case TRUNC_DIV_EXPR:
1318 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1319 case EXACT_DIV_EXPR:
1320 /* This is a shortcut for a common special case. */
1321 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1322 && ! TREE_CONSTANT_OVERFLOW (arg1)
1323 && ! TREE_CONSTANT_OVERFLOW (arg2)
1324 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1325 {
1326 if (code == CEIL_DIV_EXPR)
1327 int1l += int2l - 1;
1328
1329 low = int1l / int2l, hi = 0;
1330 break;
1331 }
1332
1333 /* ... fall through ... */
1334
1335 case ROUND_DIV_EXPR:
1336 if (int2h == 0 && int2l == 1)
1337 {
1338 low = int1l, hi = int1h;
1339 break;
1340 }
1341 if (int1l == int2l && int1h == int2h
1342 && ! (int1l == 0 && int1h == 0))
1343 {
1344 low = 1, hi = 0;
1345 break;
1346 }
1347 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1348 &low, &hi, &garbagel, &garbageh);
1349 break;
1350
1351 case TRUNC_MOD_EXPR:
1352 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1353 /* This is a shortcut for a common special case. */
1354 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1355 && ! TREE_CONSTANT_OVERFLOW (arg1)
1356 && ! TREE_CONSTANT_OVERFLOW (arg2)
1357 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1358 {
1359 if (code == CEIL_MOD_EXPR)
1360 int1l += int2l - 1;
1361 low = int1l % int2l, hi = 0;
1362 break;
1363 }
1364
1365 /* ... fall through ... */
1366
1367 case ROUND_MOD_EXPR:
1368 overflow = div_and_round_double (code, uns,
1369 int1l, int1h, int2l, int2h,
1370 &garbagel, &garbageh, &low, &hi);
1371 break;
1372
1373 case MIN_EXPR:
1374 case MAX_EXPR:
1375 if (uns)
1376 low = (((unsigned HOST_WIDE_INT) int1h
1377 < (unsigned HOST_WIDE_INT) int2h)
1378 || (((unsigned HOST_WIDE_INT) int1h
1379 == (unsigned HOST_WIDE_INT) int2h)
1380 && int1l < int2l));
1381 else
1382 low = (int1h < int2h
1383 || (int1h == int2h && int1l < int2l));
1384
1385 if (low == (code == MIN_EXPR))
1386 low = int1l, hi = int1h;
1387 else
1388 low = int2l, hi = int2h;
1389 break;
1390
1391 default:
1392 abort ();
1393 }
1394
1395 /* If this is for a sizetype, can be represented as one (signed)
1396 HOST_WIDE_INT word, and doesn't overflow, use size_int since it caches
1397 constants. */
1398 if (is_sizetype
1399 && ((hi == 0 && (HOST_WIDE_INT) low >= 0)
1400 || (hi == -1 && (HOST_WIDE_INT) low < 0))
1401 && overflow == 0 && ! TREE_OVERFLOW (arg1) && ! TREE_OVERFLOW (arg2))
1402 return size_int_type_wide (low, type);
1403 else
1404 {
1405 t = build_int_2 (low, hi);
1406 TREE_TYPE (t) = TREE_TYPE (arg1);
1407 }
1408
1409 TREE_OVERFLOW (t)
1410 = ((notrunc
1411 ? (!uns || is_sizetype) && overflow
1412 : (force_fit_type (t, (!uns || is_sizetype) && overflow)
1413 && ! no_overflow))
1414 | TREE_OVERFLOW (arg1)
1415 | TREE_OVERFLOW (arg2));
1416
1417 /* If we're doing a size calculation, unsigned arithmetic does overflow.
1418 So check if force_fit_type truncated the value. */
1419 if (is_sizetype
1420 && ! TREE_OVERFLOW (t)
1421 && (TREE_INT_CST_HIGH (t) != hi
1422 || TREE_INT_CST_LOW (t) != low))
1423 TREE_OVERFLOW (t) = 1;
1424
1425 TREE_CONSTANT_OVERFLOW (t) = (TREE_OVERFLOW (t)
1426 | TREE_CONSTANT_OVERFLOW (arg1)
1427 | TREE_CONSTANT_OVERFLOW (arg2));
1428 return t;
1429 }
1430
1431 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1432 constant. We assume ARG1 and ARG2 have the same data type, or at least
1433 are the same kind of constant and the same machine mode.
1434
1435 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1436
1437 static tree
1438 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1439 {
1440 STRIP_NOPS (arg1);
1441 STRIP_NOPS (arg2);
1442
1443 if (TREE_CODE (arg1) == INTEGER_CST)
1444 return int_const_binop (code, arg1, arg2, notrunc);
1445
1446 if (TREE_CODE (arg1) == REAL_CST)
1447 {
1448 enum machine_mode mode;
1449 REAL_VALUE_TYPE d1;
1450 REAL_VALUE_TYPE d2;
1451 REAL_VALUE_TYPE value;
1452 tree t, type;
1453
1454 d1 = TREE_REAL_CST (arg1);
1455 d2 = TREE_REAL_CST (arg2);
1456
1457 type = TREE_TYPE (arg1);
1458 mode = TYPE_MODE (type);
1459
1460 /* Don't perform operation if we honor signaling NaNs and
1461 either operand is a NaN. */
1462 if (HONOR_SNANS (mode)
1463 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1464 return NULL_TREE;
1465
1466 /* Don't perform operation if it would raise a division
1467 by zero exception. */
1468 if (code == RDIV_EXPR
1469 && REAL_VALUES_EQUAL (d2, dconst0)
1470 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1471 return NULL_TREE;
1472
1473 /* If either operand is a NaN, just return it. Otherwise, set up
1474 for floating-point trap; we return an overflow. */
1475 if (REAL_VALUE_ISNAN (d1))
1476 return arg1;
1477 else if (REAL_VALUE_ISNAN (d2))
1478 return arg2;
1479
1480 REAL_ARITHMETIC (value, code, d1, d2);
1481
1482 t = build_real (type, real_value_truncate (mode, value));
1483
1484 TREE_OVERFLOW (t)
1485 = (force_fit_type (t, 0)
1486 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1487 TREE_CONSTANT_OVERFLOW (t)
1488 = TREE_OVERFLOW (t)
1489 | TREE_CONSTANT_OVERFLOW (arg1)
1490 | TREE_CONSTANT_OVERFLOW (arg2);
1491 return t;
1492 }
1493 if (TREE_CODE (arg1) == COMPLEX_CST)
1494 {
1495 tree type = TREE_TYPE (arg1);
1496 tree r1 = TREE_REALPART (arg1);
1497 tree i1 = TREE_IMAGPART (arg1);
1498 tree r2 = TREE_REALPART (arg2);
1499 tree i2 = TREE_IMAGPART (arg2);
1500 tree t;
1501
1502 switch (code)
1503 {
1504 case PLUS_EXPR:
1505 t = build_complex (type,
1506 const_binop (PLUS_EXPR, r1, r2, notrunc),
1507 const_binop (PLUS_EXPR, i1, i2, notrunc));
1508 break;
1509
1510 case MINUS_EXPR:
1511 t = build_complex (type,
1512 const_binop (MINUS_EXPR, r1, r2, notrunc),
1513 const_binop (MINUS_EXPR, i1, i2, notrunc));
1514 break;
1515
1516 case MULT_EXPR:
1517 t = build_complex (type,
1518 const_binop (MINUS_EXPR,
1519 const_binop (MULT_EXPR,
1520 r1, r2, notrunc),
1521 const_binop (MULT_EXPR,
1522 i1, i2, notrunc),
1523 notrunc),
1524 const_binop (PLUS_EXPR,
1525 const_binop (MULT_EXPR,
1526 r1, i2, notrunc),
1527 const_binop (MULT_EXPR,
1528 i1, r2, notrunc),
1529 notrunc));
1530 break;
1531
1532 case RDIV_EXPR:
1533 {
1534 tree magsquared
1535 = const_binop (PLUS_EXPR,
1536 const_binop (MULT_EXPR, r2, r2, notrunc),
1537 const_binop (MULT_EXPR, i2, i2, notrunc),
1538 notrunc);
1539
1540 t = build_complex (type,
1541 const_binop
1542 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1543 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1544 const_binop (PLUS_EXPR,
1545 const_binop (MULT_EXPR, r1, r2,
1546 notrunc),
1547 const_binop (MULT_EXPR, i1, i2,
1548 notrunc),
1549 notrunc),
1550 magsquared, notrunc),
1551 const_binop
1552 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1553 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1554 const_binop (MINUS_EXPR,
1555 const_binop (MULT_EXPR, i1, r2,
1556 notrunc),
1557 const_binop (MULT_EXPR, r1, i2,
1558 notrunc),
1559 notrunc),
1560 magsquared, notrunc));
1561 }
1562 break;
1563
1564 default:
1565 abort ();
1566 }
1567 return t;
1568 }
1569 return 0;
1570 }
1571
1572 /* These are the hash table functions for the hash table of INTEGER_CST
1573 nodes of a sizetype. */
1574
1575 /* Return the hash code code X, an INTEGER_CST. */
1576
1577 static hashval_t
1578 size_htab_hash (const void *x)
1579 {
1580 tree t = (tree) x;
1581
1582 return (TREE_INT_CST_HIGH (t) ^ TREE_INT_CST_LOW (t)
1583 ^ htab_hash_pointer (TREE_TYPE (t))
1584 ^ (TREE_OVERFLOW (t) << 20));
1585 }
1586
1587 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1588 is the same as that given by *Y, which is the same. */
1589
1590 static int
1591 size_htab_eq (const void *x, const void *y)
1592 {
1593 tree xt = (tree) x;
1594 tree yt = (tree) y;
1595
1596 return (TREE_INT_CST_HIGH (xt) == TREE_INT_CST_HIGH (yt)
1597 && TREE_INT_CST_LOW (xt) == TREE_INT_CST_LOW (yt)
1598 && TREE_TYPE (xt) == TREE_TYPE (yt)
1599 && TREE_OVERFLOW (xt) == TREE_OVERFLOW (yt));
1600 }
1601 \f
1602 /* Return an INTEGER_CST with value whose low-order HOST_BITS_PER_WIDE_INT
1603 bits are given by NUMBER and of the sizetype represented by KIND. */
1604
1605 tree
1606 size_int_wide (HOST_WIDE_INT number, enum size_type_kind kind)
1607 {
1608 return size_int_type_wide (number, sizetype_tab[(int) kind]);
1609 }
1610
1611 /* Likewise, but the desired type is specified explicitly. */
1612
1613 static GTY (()) tree new_const;
1614 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
1615 htab_t size_htab;
1616
1617 tree
1618 size_int_type_wide (HOST_WIDE_INT number, tree type)
1619 {
1620 void **slot;
1621
1622 if (size_htab == 0)
1623 {
1624 size_htab = htab_create_ggc (1024, size_htab_hash, size_htab_eq, NULL);
1625 new_const = make_node (INTEGER_CST);
1626 }
1627
1628 /* Adjust NEW_CONST to be the constant we want. If it's already in the
1629 hash table, we return the value from the hash table. Otherwise, we
1630 place that in the hash table and make a new node for the next time. */
1631 TREE_INT_CST_LOW (new_const) = number;
1632 TREE_INT_CST_HIGH (new_const) = number < 0 ? -1 : 0;
1633 TREE_TYPE (new_const) = type;
1634 TREE_OVERFLOW (new_const) = TREE_CONSTANT_OVERFLOW (new_const)
1635 = force_fit_type (new_const, 0);
1636
1637 slot = htab_find_slot (size_htab, new_const, INSERT);
1638 if (*slot == 0)
1639 {
1640 tree t = new_const;
1641
1642 *slot = new_const;
1643 new_const = make_node (INTEGER_CST);
1644 return t;
1645 }
1646 else
1647 return (tree) *slot;
1648 }
1649
1650 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1651 is a tree code. The type of the result is taken from the operands.
1652 Both must be the same type integer type and it must be a size type.
1653 If the operands are constant, so is the result. */
1654
1655 tree
1656 size_binop (enum tree_code code, tree arg0, tree arg1)
1657 {
1658 tree type = TREE_TYPE (arg0);
1659
1660 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1661 || type != TREE_TYPE (arg1))
1662 abort ();
1663
1664 /* Handle the special case of two integer constants faster. */
1665 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1666 {
1667 /* And some specific cases even faster than that. */
1668 if (code == PLUS_EXPR && integer_zerop (arg0))
1669 return arg1;
1670 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1671 && integer_zerop (arg1))
1672 return arg0;
1673 else if (code == MULT_EXPR && integer_onep (arg0))
1674 return arg1;
1675
1676 /* Handle general case of two integer constants. */
1677 return int_const_binop (code, arg0, arg1, 0);
1678 }
1679
1680 if (arg0 == error_mark_node || arg1 == error_mark_node)
1681 return error_mark_node;
1682
1683 return fold (build2 (code, type, arg0, arg1));
1684 }
1685
1686 /* Given two values, either both of sizetype or both of bitsizetype,
1687 compute the difference between the two values. Return the value
1688 in signed type corresponding to the type of the operands. */
1689
1690 tree
1691 size_diffop (tree arg0, tree arg1)
1692 {
1693 tree type = TREE_TYPE (arg0);
1694 tree ctype;
1695
1696 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1697 || type != TREE_TYPE (arg1))
1698 abort ();
1699
1700 /* If the type is already signed, just do the simple thing. */
1701 if (!TYPE_UNSIGNED (type))
1702 return size_binop (MINUS_EXPR, arg0, arg1);
1703
1704 ctype = (type == bitsizetype || type == ubitsizetype
1705 ? sbitsizetype : ssizetype);
1706
1707 /* If either operand is not a constant, do the conversions to the signed
1708 type and subtract. The hardware will do the right thing with any
1709 overflow in the subtraction. */
1710 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1711 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1712 fold_convert (ctype, arg1));
1713
1714 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1715 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1716 overflow) and negate (which can't either). Special-case a result
1717 of zero while we're here. */
1718 if (tree_int_cst_equal (arg0, arg1))
1719 return fold_convert (ctype, integer_zero_node);
1720 else if (tree_int_cst_lt (arg1, arg0))
1721 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1722 else
1723 return size_binop (MINUS_EXPR, fold_convert (ctype, integer_zero_node),
1724 fold_convert (ctype, size_binop (MINUS_EXPR,
1725 arg1, arg0)));
1726 }
1727 \f
1728
1729 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1730 type TYPE. If no simplification can be done return NULL_TREE. */
1731
1732 static tree
1733 fold_convert_const (enum tree_code code, tree type, tree arg1)
1734 {
1735 int overflow = 0;
1736 tree t;
1737
1738 if (TREE_TYPE (arg1) == type)
1739 return arg1;
1740
1741 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1742 {
1743 if (TREE_CODE (arg1) == INTEGER_CST)
1744 {
1745 /* If we would build a constant wider than GCC supports,
1746 leave the conversion unfolded. */
1747 if (TYPE_PRECISION (type) > 2 * HOST_BITS_PER_WIDE_INT)
1748 return NULL_TREE;
1749
1750 /* If we are trying to make a sizetype for a small integer, use
1751 size_int to pick up cached types to reduce duplicate nodes. */
1752 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1753 && !TREE_CONSTANT_OVERFLOW (arg1)
1754 && compare_tree_int (arg1, 10000) < 0)
1755 return size_int_type_wide (TREE_INT_CST_LOW (arg1), type);
1756
1757 /* Given an integer constant, make new constant with new type,
1758 appropriately sign-extended or truncated. */
1759 t = build_int_2 (TREE_INT_CST_LOW (arg1),
1760 TREE_INT_CST_HIGH (arg1));
1761 TREE_TYPE (t) = type;
1762 /* Indicate an overflow if (1) ARG1 already overflowed,
1763 or (2) force_fit_type indicates an overflow.
1764 Tell force_fit_type that an overflow has already occurred
1765 if ARG1 is a too-large unsigned value and T is signed.
1766 But don't indicate an overflow if converting a pointer. */
1767 TREE_OVERFLOW (t)
1768 = ((force_fit_type (t,
1769 (TREE_INT_CST_HIGH (arg1) < 0
1770 && (TYPE_UNSIGNED (type)
1771 < TYPE_UNSIGNED (TREE_TYPE (arg1)))))
1772 && ! POINTER_TYPE_P (TREE_TYPE (arg1)))
1773 || TREE_OVERFLOW (arg1));
1774 TREE_CONSTANT_OVERFLOW (t)
1775 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1776 return t;
1777 }
1778 else if (TREE_CODE (arg1) == REAL_CST)
1779 {
1780 /* The following code implements the floating point to integer
1781 conversion rules required by the Java Language Specification,
1782 that IEEE NaNs are mapped to zero and values that overflow
1783 the target precision saturate, i.e. values greater than
1784 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1785 are mapped to INT_MIN. These semantics are allowed by the
1786 C and C++ standards that simply state that the behavior of
1787 FP-to-integer conversion is unspecified upon overflow. */
1788
1789 HOST_WIDE_INT high, low;
1790
1791 REAL_VALUE_TYPE r;
1792 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1793
1794 switch (code)
1795 {
1796 case FIX_TRUNC_EXPR:
1797 real_trunc (&r, VOIDmode, &x);
1798 break;
1799
1800 case FIX_CEIL_EXPR:
1801 real_ceil (&r, VOIDmode, &x);
1802 break;
1803
1804 case FIX_FLOOR_EXPR:
1805 real_floor (&r, VOIDmode, &x);
1806 break;
1807
1808 case FIX_ROUND_EXPR:
1809 real_round (&r, VOIDmode, &x);
1810 break;
1811
1812 default:
1813 abort ();
1814 }
1815
1816 /* If R is NaN, return zero and show we have an overflow. */
1817 if (REAL_VALUE_ISNAN (r))
1818 {
1819 overflow = 1;
1820 high = 0;
1821 low = 0;
1822 }
1823
1824 /* See if R is less than the lower bound or greater than the
1825 upper bound. */
1826
1827 if (! overflow)
1828 {
1829 tree lt = TYPE_MIN_VALUE (type);
1830 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1831 if (REAL_VALUES_LESS (r, l))
1832 {
1833 overflow = 1;
1834 high = TREE_INT_CST_HIGH (lt);
1835 low = TREE_INT_CST_LOW (lt);
1836 }
1837 }
1838
1839 if (! overflow)
1840 {
1841 tree ut = TYPE_MAX_VALUE (type);
1842 if (ut)
1843 {
1844 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1845 if (REAL_VALUES_LESS (u, r))
1846 {
1847 overflow = 1;
1848 high = TREE_INT_CST_HIGH (ut);
1849 low = TREE_INT_CST_LOW (ut);
1850 }
1851 }
1852 }
1853
1854 if (! overflow)
1855 REAL_VALUE_TO_INT (&low, &high, r);
1856
1857 t = build_int_2 (low, high);
1858 TREE_TYPE (t) = type;
1859 TREE_OVERFLOW (t)
1860 = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
1861 TREE_CONSTANT_OVERFLOW (t)
1862 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1863 return t;
1864 }
1865 }
1866 else if (TREE_CODE (type) == REAL_TYPE)
1867 {
1868 if (TREE_CODE (arg1) == INTEGER_CST)
1869 return build_real_from_int_cst (type, arg1);
1870 if (TREE_CODE (arg1) == REAL_CST)
1871 {
1872 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
1873 {
1874 /* We make a copy of ARG1 so that we don't modify an
1875 existing constant tree. */
1876 t = copy_node (arg1);
1877 TREE_TYPE (t) = type;
1878 return t;
1879 }
1880
1881 t = build_real (type,
1882 real_value_truncate (TYPE_MODE (type),
1883 TREE_REAL_CST (arg1)));
1884
1885 TREE_OVERFLOW (t)
1886 = TREE_OVERFLOW (arg1) | force_fit_type (t, 0);
1887 TREE_CONSTANT_OVERFLOW (t)
1888 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1889 return t;
1890 }
1891 }
1892 return NULL_TREE;
1893 }
1894
1895 /* Convert expression ARG to type TYPE. Used by the middle-end for
1896 simple conversions in preference to calling the front-end's convert. */
1897
1898 tree
1899 fold_convert (tree type, tree arg)
1900 {
1901 tree orig = TREE_TYPE (arg);
1902 tree tem;
1903
1904 if (type == orig)
1905 return arg;
1906
1907 if (TREE_CODE (arg) == ERROR_MARK
1908 || TREE_CODE (type) == ERROR_MARK
1909 || TREE_CODE (orig) == ERROR_MARK)
1910 return error_mark_node;
1911
1912 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1913 return fold (build1 (NOP_EXPR, type, arg));
1914
1915 if (INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type)
1916 || TREE_CODE (type) == OFFSET_TYPE)
1917 {
1918 if (TREE_CODE (arg) == INTEGER_CST)
1919 {
1920 tem = fold_convert_const (NOP_EXPR, type, arg);
1921 if (tem != NULL_TREE)
1922 return tem;
1923 }
1924 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1925 || TREE_CODE (orig) == OFFSET_TYPE)
1926 return fold (build1 (NOP_EXPR, type, arg));
1927 if (TREE_CODE (orig) == COMPLEX_TYPE)
1928 {
1929 tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1930 return fold_convert (type, tem);
1931 }
1932 if (TREE_CODE (orig) == VECTOR_TYPE
1933 && GET_MODE_SIZE (TYPE_MODE (type))
1934 == GET_MODE_SIZE (TYPE_MODE (orig)))
1935 return fold (build1 (NOP_EXPR, type, arg));
1936 }
1937 else if (TREE_CODE (type) == REAL_TYPE)
1938 {
1939 if (TREE_CODE (arg) == INTEGER_CST)
1940 {
1941 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1942 if (tem != NULL_TREE)
1943 return tem;
1944 }
1945 else if (TREE_CODE (arg) == REAL_CST)
1946 {
1947 tem = fold_convert_const (NOP_EXPR, type, arg);
1948 if (tem != NULL_TREE)
1949 return tem;
1950 }
1951
1952 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig))
1953 return fold (build1 (FLOAT_EXPR, type, arg));
1954 if (TREE_CODE (orig) == REAL_TYPE)
1955 return fold (build1 (flag_float_store ? CONVERT_EXPR : NOP_EXPR,
1956 type, arg));
1957 if (TREE_CODE (orig) == COMPLEX_TYPE)
1958 {
1959 tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1960 return fold_convert (type, tem);
1961 }
1962 }
1963 else if (TREE_CODE (type) == COMPLEX_TYPE)
1964 {
1965 if (INTEGRAL_TYPE_P (orig)
1966 || POINTER_TYPE_P (orig)
1967 || TREE_CODE (orig) == REAL_TYPE)
1968 return build2 (COMPLEX_EXPR, type,
1969 fold_convert (TREE_TYPE (type), arg),
1970 fold_convert (TREE_TYPE (type), integer_zero_node));
1971 if (TREE_CODE (orig) == COMPLEX_TYPE)
1972 {
1973 tree rpart, ipart;
1974
1975 if (TREE_CODE (arg) == COMPLEX_EXPR)
1976 {
1977 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
1978 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
1979 return fold (build2 (COMPLEX_EXPR, type, rpart, ipart));
1980 }
1981
1982 arg = save_expr (arg);
1983 rpart = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1984 ipart = fold (build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg));
1985 rpart = fold_convert (TREE_TYPE (type), rpart);
1986 ipart = fold_convert (TREE_TYPE (type), ipart);
1987 return fold (build2 (COMPLEX_EXPR, type, rpart, ipart));
1988 }
1989 }
1990 else if (TREE_CODE (type) == VECTOR_TYPE)
1991 {
1992 if ((INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig))
1993 && GET_MODE_SIZE (TYPE_MODE (type))
1994 == GET_MODE_SIZE (TYPE_MODE (orig)))
1995 return fold (build1 (NOP_EXPR, type, arg));
1996 if (TREE_CODE (orig) == VECTOR_TYPE
1997 && GET_MODE_SIZE (TYPE_MODE (type))
1998 == GET_MODE_SIZE (TYPE_MODE (orig)))
1999 return fold (build1 (NOP_EXPR, type, arg));
2000 }
2001 else if (VOID_TYPE_P (type))
2002 return fold (build1 (CONVERT_EXPR, type, arg));
2003 abort ();
2004 }
2005 \f
2006 /* Return an expr equal to X but certainly not valid as an lvalue. */
2007
2008 tree
2009 non_lvalue (tree x)
2010 {
2011 /* We only need to wrap lvalue tree codes. */
2012 switch (TREE_CODE (x))
2013 {
2014 case VAR_DECL:
2015 case PARM_DECL:
2016 case RESULT_DECL:
2017 case LABEL_DECL:
2018 case FUNCTION_DECL:
2019 case SSA_NAME:
2020
2021 case COMPONENT_REF:
2022 case INDIRECT_REF:
2023 case ARRAY_REF:
2024 case BIT_FIELD_REF:
2025 case BUFFER_REF:
2026 case ARRAY_RANGE_REF:
2027 case VTABLE_REF:
2028
2029 case REALPART_EXPR:
2030 case IMAGPART_EXPR:
2031 case PREINCREMENT_EXPR:
2032 case PREDECREMENT_EXPR:
2033 case SAVE_EXPR:
2034 case UNSAVE_EXPR:
2035 case TRY_CATCH_EXPR:
2036 case WITH_CLEANUP_EXPR:
2037 case COMPOUND_EXPR:
2038 case MODIFY_EXPR:
2039 case TARGET_EXPR:
2040 case COND_EXPR:
2041 case BIND_EXPR:
2042 case MIN_EXPR:
2043 case MAX_EXPR:
2044 case RTL_EXPR:
2045 break;
2046
2047 default:
2048 /* Assume the worst for front-end tree codes. */
2049 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2050 break;
2051 return x;
2052 }
2053 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2054 }
2055
2056 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2057 Zero means allow extended lvalues. */
2058
2059 int pedantic_lvalues;
2060
2061 /* When pedantic, return an expr equal to X but certainly not valid as a
2062 pedantic lvalue. Otherwise, return X. */
2063
2064 tree
2065 pedantic_non_lvalue (tree x)
2066 {
2067 if (pedantic_lvalues)
2068 return non_lvalue (x);
2069 else
2070 return x;
2071 }
2072 \f
2073 /* Given a tree comparison code, return the code that is the logical inverse
2074 of the given code. It is not safe to do this for floating-point
2075 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2076 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2077
2078 static enum tree_code
2079 invert_tree_comparison (enum tree_code code, bool honor_nans)
2080 {
2081 if (honor_nans && flag_trapping_math)
2082 return ERROR_MARK;
2083
2084 switch (code)
2085 {
2086 case EQ_EXPR:
2087 return NE_EXPR;
2088 case NE_EXPR:
2089 return EQ_EXPR;
2090 case GT_EXPR:
2091 return honor_nans ? UNLE_EXPR : LE_EXPR;
2092 case GE_EXPR:
2093 return honor_nans ? UNLT_EXPR : LT_EXPR;
2094 case LT_EXPR:
2095 return honor_nans ? UNGE_EXPR : GE_EXPR;
2096 case LE_EXPR:
2097 return honor_nans ? UNGT_EXPR : GT_EXPR;
2098 case LTGT_EXPR:
2099 return UNEQ_EXPR;
2100 case UNEQ_EXPR:
2101 return LTGT_EXPR;
2102 case UNGT_EXPR:
2103 return LE_EXPR;
2104 case UNGE_EXPR:
2105 return LT_EXPR;
2106 case UNLT_EXPR:
2107 return GE_EXPR;
2108 case UNLE_EXPR:
2109 return GT_EXPR;
2110 case ORDERED_EXPR:
2111 return UNORDERED_EXPR;
2112 case UNORDERED_EXPR:
2113 return ORDERED_EXPR;
2114 default:
2115 abort ();
2116 }
2117 }
2118
2119 /* Similar, but return the comparison that results if the operands are
2120 swapped. This is safe for floating-point. */
2121
2122 static enum tree_code
2123 swap_tree_comparison (enum tree_code code)
2124 {
2125 switch (code)
2126 {
2127 case EQ_EXPR:
2128 case NE_EXPR:
2129 return code;
2130 case GT_EXPR:
2131 return LT_EXPR;
2132 case GE_EXPR:
2133 return LE_EXPR;
2134 case LT_EXPR:
2135 return GT_EXPR;
2136 case LE_EXPR:
2137 return GE_EXPR;
2138 default:
2139 abort ();
2140 }
2141 }
2142
2143
2144 /* Convert a comparison tree code from an enum tree_code representation
2145 into a compcode bit-based encoding. This function is the inverse of
2146 compcode_to_comparison. */
2147
2148 static enum comparison_code
2149 comparison_to_compcode (enum tree_code code)
2150 {
2151 switch (code)
2152 {
2153 case LT_EXPR:
2154 return COMPCODE_LT;
2155 case EQ_EXPR:
2156 return COMPCODE_EQ;
2157 case LE_EXPR:
2158 return COMPCODE_LE;
2159 case GT_EXPR:
2160 return COMPCODE_GT;
2161 case NE_EXPR:
2162 return COMPCODE_NE;
2163 case GE_EXPR:
2164 return COMPCODE_GE;
2165 case ORDERED_EXPR:
2166 return COMPCODE_ORD;
2167 case UNORDERED_EXPR:
2168 return COMPCODE_UNORD;
2169 case UNLT_EXPR:
2170 return COMPCODE_UNLT;
2171 case UNEQ_EXPR:
2172 return COMPCODE_UNEQ;
2173 case UNLE_EXPR:
2174 return COMPCODE_UNLE;
2175 case UNGT_EXPR:
2176 return COMPCODE_UNGT;
2177 case LTGT_EXPR:
2178 return COMPCODE_LTGT;
2179 case UNGE_EXPR:
2180 return COMPCODE_UNGE;
2181 default:
2182 abort ();
2183 }
2184 }
2185
2186 /* Convert a compcode bit-based encoding of a comparison operator back
2187 to GCC's enum tree_code representation. This function is the
2188 inverse of comparison_to_compcode. */
2189
2190 static enum tree_code
2191 compcode_to_comparison (enum comparison_code code)
2192 {
2193 switch (code)
2194 {
2195 case COMPCODE_LT:
2196 return LT_EXPR;
2197 case COMPCODE_EQ:
2198 return EQ_EXPR;
2199 case COMPCODE_LE:
2200 return LE_EXPR;
2201 case COMPCODE_GT:
2202 return GT_EXPR;
2203 case COMPCODE_NE:
2204 return NE_EXPR;
2205 case COMPCODE_GE:
2206 return GE_EXPR;
2207 case COMPCODE_ORD:
2208 return ORDERED_EXPR;
2209 case COMPCODE_UNORD:
2210 return UNORDERED_EXPR;
2211 case COMPCODE_UNLT:
2212 return UNLT_EXPR;
2213 case COMPCODE_UNEQ:
2214 return UNEQ_EXPR;
2215 case COMPCODE_UNLE:
2216 return UNLE_EXPR;
2217 case COMPCODE_UNGT:
2218 return UNGT_EXPR;
2219 case COMPCODE_LTGT:
2220 return LTGT_EXPR;
2221 case COMPCODE_UNGE:
2222 return UNGE_EXPR;
2223 default:
2224 abort ();
2225 }
2226 }
2227
2228 /* Return a tree for the comparison which is the combination of
2229 doing the AND or OR (depending on CODE) of the two operations LCODE
2230 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2231 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2232 if this makes the transformation invalid. */
2233
2234 tree
2235 combine_comparisons (enum tree_code code, enum tree_code lcode,
2236 enum tree_code rcode, tree truth_type,
2237 tree ll_arg, tree lr_arg)
2238 {
2239 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2240 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2241 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2242 enum comparison_code compcode;
2243
2244 switch (code)
2245 {
2246 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2247 compcode = lcompcode & rcompcode;
2248 break;
2249
2250 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2251 compcode = lcompcode | rcompcode;
2252 break;
2253
2254 default:
2255 return NULL_TREE;
2256 }
2257
2258 if (!honor_nans)
2259 {
2260 /* Eliminate unordered comparisons, as well as LTGT and ORD
2261 which are not used unless the mode has NaNs. */
2262 compcode &= ~COMPCODE_UNORD;
2263 if (compcode == COMPCODE_LTGT)
2264 compcode = COMPCODE_NE;
2265 else if (compcode == COMPCODE_ORD)
2266 compcode = COMPCODE_TRUE;
2267 }
2268 else if (flag_trapping_math)
2269 {
2270 /* Check that the original operation and the optimized ones will trap
2271 under the same condition. */
2272 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2273 && (lcompcode != COMPCODE_EQ)
2274 && (lcompcode != COMPCODE_ORD);
2275 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2276 && (rcompcode != COMPCODE_EQ)
2277 && (rcompcode != COMPCODE_ORD);
2278 bool trap = (compcode & COMPCODE_UNORD) == 0
2279 && (compcode != COMPCODE_EQ)
2280 && (compcode != COMPCODE_ORD);
2281
2282 /* In a short-circuited boolean expression the LHS might be
2283 such that the RHS, if evaluated, will never trap. For
2284 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2285 if neither x nor y is NaN. (This is a mixed blessing: for
2286 example, the expression above will never trap, hence
2287 optimizing it to x < y would be invalid). */
2288 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2289 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2290 rtrap = false;
2291
2292 /* If the comparison was short-circuited, and only the RHS
2293 trapped, we may now generate a spurious trap. */
2294 if (rtrap && !ltrap
2295 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2296 return NULL_TREE;
2297
2298 /* If we changed the conditions that cause a trap, we lose. */
2299 if ((ltrap || rtrap) != trap)
2300 return NULL_TREE;
2301 }
2302
2303 if (compcode == COMPCODE_TRUE)
2304 return constant_boolean_node (true, truth_type);
2305 else if (compcode == COMPCODE_FALSE)
2306 return constant_boolean_node (false, truth_type);
2307 else
2308 return fold (build2 (compcode_to_comparison (compcode),
2309 truth_type, ll_arg, lr_arg));
2310 }
2311
2312 /* Return nonzero if CODE is a tree code that represents a truth value. */
2313
2314 static int
2315 truth_value_p (enum tree_code code)
2316 {
2317 return (TREE_CODE_CLASS (code) == '<'
2318 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2319 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2320 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2321 }
2322 \f
2323 /* Return nonzero if two operands (typically of the same tree node)
2324 are necessarily equal. If either argument has side-effects this
2325 function returns zero. FLAGS modifies behavior as follows:
2326
2327 If OEP_ONLY_CONST is set, only return nonzero for constants.
2328 This function tests whether the operands are indistinguishable;
2329 it does not test whether they are equal using C's == operation.
2330 The distinction is important for IEEE floating point, because
2331 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2332 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2333
2334 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2335 even though it may hold multiple values during a function.
2336 This is because a GCC tree node guarantees that nothing else is
2337 executed between the evaluation of its "operands" (which may often
2338 be evaluated in arbitrary order). Hence if the operands themselves
2339 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2340 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2341 unset means assuming isochronic (or instantaneous) tree equivalence.
2342 Unless comparing arbitrary expression trees, such as from different
2343 statements, this flag can usually be left unset.
2344
2345 If OEP_PURE_SAME is set, then pure functions with identical arguments
2346 are considered the same. It is used when the caller has other ways
2347 to ensure that global memory is unchanged in between. */
2348
2349 int
2350 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2351 {
2352 /* If either is ERROR_MARK, they aren't equal. */
2353 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2354 return 0;
2355
2356 /* If both types don't have the same signedness, then we can't consider
2357 them equal. We must check this before the STRIP_NOPS calls
2358 because they may change the signedness of the arguments. */
2359 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2360 return 0;
2361
2362 STRIP_NOPS (arg0);
2363 STRIP_NOPS (arg1);
2364
2365 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2366 /* This is needed for conversions and for COMPONENT_REF.
2367 Might as well play it safe and always test this. */
2368 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2369 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2370 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2371 return 0;
2372
2373 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2374 We don't care about side effects in that case because the SAVE_EXPR
2375 takes care of that for us. In all other cases, two expressions are
2376 equal if they have no side effects. If we have two identical
2377 expressions with side effects that should be treated the same due
2378 to the only side effects being identical SAVE_EXPR's, that will
2379 be detected in the recursive calls below. */
2380 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2381 && (TREE_CODE (arg0) == SAVE_EXPR
2382 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2383 return 1;
2384
2385 /* Next handle constant cases, those for which we can return 1 even
2386 if ONLY_CONST is set. */
2387 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2388 switch (TREE_CODE (arg0))
2389 {
2390 case INTEGER_CST:
2391 return (! TREE_CONSTANT_OVERFLOW (arg0)
2392 && ! TREE_CONSTANT_OVERFLOW (arg1)
2393 && tree_int_cst_equal (arg0, arg1));
2394
2395 case REAL_CST:
2396 return (! TREE_CONSTANT_OVERFLOW (arg0)
2397 && ! TREE_CONSTANT_OVERFLOW (arg1)
2398 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2399 TREE_REAL_CST (arg1)));
2400
2401 case VECTOR_CST:
2402 {
2403 tree v1, v2;
2404
2405 if (TREE_CONSTANT_OVERFLOW (arg0)
2406 || TREE_CONSTANT_OVERFLOW (arg1))
2407 return 0;
2408
2409 v1 = TREE_VECTOR_CST_ELTS (arg0);
2410 v2 = TREE_VECTOR_CST_ELTS (arg1);
2411 while (v1 && v2)
2412 {
2413 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2414 flags))
2415 return 0;
2416 v1 = TREE_CHAIN (v1);
2417 v2 = TREE_CHAIN (v2);
2418 }
2419
2420 return 1;
2421 }
2422
2423 case COMPLEX_CST:
2424 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2425 flags)
2426 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2427 flags));
2428
2429 case STRING_CST:
2430 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2431 && ! memcmp (TREE_STRING_POINTER (arg0),
2432 TREE_STRING_POINTER (arg1),
2433 TREE_STRING_LENGTH (arg0)));
2434
2435 case ADDR_EXPR:
2436 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2437 0);
2438 default:
2439 break;
2440 }
2441
2442 if (flags & OEP_ONLY_CONST)
2443 return 0;
2444
2445 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2446 {
2447 case '1':
2448 /* Two conversions are equal only if signedness and modes match. */
2449 if ((TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == CONVERT_EXPR)
2450 && (TYPE_UNSIGNED (TREE_TYPE (arg0))
2451 != TYPE_UNSIGNED (TREE_TYPE (arg1))))
2452 return 0;
2453
2454 return operand_equal_p (TREE_OPERAND (arg0, 0),
2455 TREE_OPERAND (arg1, 0), flags);
2456
2457 case '<':
2458 case '2':
2459 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0)
2460 && operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1),
2461 0))
2462 return 1;
2463
2464 /* For commutative ops, allow the other order. */
2465 return (commutative_tree_code (TREE_CODE (arg0))
2466 && operand_equal_p (TREE_OPERAND (arg0, 0),
2467 TREE_OPERAND (arg1, 1), flags)
2468 && operand_equal_p (TREE_OPERAND (arg0, 1),
2469 TREE_OPERAND (arg1, 0), flags));
2470
2471 case 'r':
2472 /* If either of the pointer (or reference) expressions we are
2473 dereferencing contain a side effect, these cannot be equal. */
2474 if (TREE_SIDE_EFFECTS (arg0)
2475 || TREE_SIDE_EFFECTS (arg1))
2476 return 0;
2477
2478 switch (TREE_CODE (arg0))
2479 {
2480 case INDIRECT_REF:
2481 return operand_equal_p (TREE_OPERAND (arg0, 0),
2482 TREE_OPERAND (arg1, 0), flags);
2483
2484 case COMPONENT_REF:
2485 case ARRAY_REF:
2486 case ARRAY_RANGE_REF:
2487 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2488 TREE_OPERAND (arg1, 0), flags)
2489 && operand_equal_p (TREE_OPERAND (arg0, 1),
2490 TREE_OPERAND (arg1, 1), flags));
2491
2492 case BIT_FIELD_REF:
2493 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2494 TREE_OPERAND (arg1, 0), flags)
2495 && operand_equal_p (TREE_OPERAND (arg0, 1),
2496 TREE_OPERAND (arg1, 1), flags)
2497 && operand_equal_p (TREE_OPERAND (arg0, 2),
2498 TREE_OPERAND (arg1, 2), flags));
2499 default:
2500 return 0;
2501 }
2502
2503 case 'e':
2504 switch (TREE_CODE (arg0))
2505 {
2506 case ADDR_EXPR:
2507 case TRUTH_NOT_EXPR:
2508 return operand_equal_p (TREE_OPERAND (arg0, 0),
2509 TREE_OPERAND (arg1, 0), flags);
2510
2511 case RTL_EXPR:
2512 return rtx_equal_p (RTL_EXPR_RTL (arg0), RTL_EXPR_RTL (arg1));
2513
2514 case CALL_EXPR:
2515 /* If the CALL_EXPRs call different functions, then they
2516 clearly can not be equal. */
2517 if (! operand_equal_p (TREE_OPERAND (arg0, 0),
2518 TREE_OPERAND (arg1, 0), flags))
2519 return 0;
2520
2521 {
2522 unsigned int cef = call_expr_flags (arg0);
2523 if (flags & OEP_PURE_SAME)
2524 cef &= ECF_CONST | ECF_PURE;
2525 else
2526 cef &= ECF_CONST;
2527 if (!cef)
2528 return 0;
2529 }
2530
2531 /* Now see if all the arguments are the same. operand_equal_p
2532 does not handle TREE_LIST, so we walk the operands here
2533 feeding them to operand_equal_p. */
2534 arg0 = TREE_OPERAND (arg0, 1);
2535 arg1 = TREE_OPERAND (arg1, 1);
2536 while (arg0 && arg1)
2537 {
2538 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2539 flags))
2540 return 0;
2541
2542 arg0 = TREE_CHAIN (arg0);
2543 arg1 = TREE_CHAIN (arg1);
2544 }
2545
2546 /* If we get here and both argument lists are exhausted
2547 then the CALL_EXPRs are equal. */
2548 return ! (arg0 || arg1);
2549
2550 default:
2551 return 0;
2552 }
2553
2554 case 'd':
2555 /* Consider __builtin_sqrt equal to sqrt. */
2556 return (TREE_CODE (arg0) == FUNCTION_DECL
2557 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2558 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2559 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2560
2561 default:
2562 return 0;
2563 }
2564 }
2565 \f
2566 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2567 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2568
2569 When in doubt, return 0. */
2570
2571 static int
2572 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2573 {
2574 int unsignedp1, unsignedpo;
2575 tree primarg0, primarg1, primother;
2576 unsigned int correct_width;
2577
2578 if (operand_equal_p (arg0, arg1, 0))
2579 return 1;
2580
2581 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2582 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2583 return 0;
2584
2585 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2586 and see if the inner values are the same. This removes any
2587 signedness comparison, which doesn't matter here. */
2588 primarg0 = arg0, primarg1 = arg1;
2589 STRIP_NOPS (primarg0);
2590 STRIP_NOPS (primarg1);
2591 if (operand_equal_p (primarg0, primarg1, 0))
2592 return 1;
2593
2594 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2595 actual comparison operand, ARG0.
2596
2597 First throw away any conversions to wider types
2598 already present in the operands. */
2599
2600 primarg1 = get_narrower (arg1, &unsignedp1);
2601 primother = get_narrower (other, &unsignedpo);
2602
2603 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2604 if (unsignedp1 == unsignedpo
2605 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2606 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2607 {
2608 tree type = TREE_TYPE (arg0);
2609
2610 /* Make sure shorter operand is extended the right way
2611 to match the longer operand. */
2612 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2613 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2614
2615 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2616 return 1;
2617 }
2618
2619 return 0;
2620 }
2621 \f
2622 /* See if ARG is an expression that is either a comparison or is performing
2623 arithmetic on comparisons. The comparisons must only be comparing
2624 two different values, which will be stored in *CVAL1 and *CVAL2; if
2625 they are nonzero it means that some operands have already been found.
2626 No variables may be used anywhere else in the expression except in the
2627 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2628 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2629
2630 If this is true, return 1. Otherwise, return zero. */
2631
2632 static int
2633 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2634 {
2635 enum tree_code code = TREE_CODE (arg);
2636 char class = TREE_CODE_CLASS (code);
2637
2638 /* We can handle some of the 'e' cases here. */
2639 if (class == 'e' && code == TRUTH_NOT_EXPR)
2640 class = '1';
2641 else if (class == 'e'
2642 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2643 || code == COMPOUND_EXPR))
2644 class = '2';
2645
2646 else if (class == 'e' && code == SAVE_EXPR && SAVE_EXPR_RTL (arg) == 0
2647 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2648 {
2649 /* If we've already found a CVAL1 or CVAL2, this expression is
2650 two complex to handle. */
2651 if (*cval1 || *cval2)
2652 return 0;
2653
2654 class = '1';
2655 *save_p = 1;
2656 }
2657
2658 switch (class)
2659 {
2660 case '1':
2661 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2662
2663 case '2':
2664 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2665 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2666 cval1, cval2, save_p));
2667
2668 case 'c':
2669 return 1;
2670
2671 case 'e':
2672 if (code == COND_EXPR)
2673 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2674 cval1, cval2, save_p)
2675 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2676 cval1, cval2, save_p)
2677 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2678 cval1, cval2, save_p));
2679 return 0;
2680
2681 case '<':
2682 /* First see if we can handle the first operand, then the second. For
2683 the second operand, we know *CVAL1 can't be zero. It must be that
2684 one side of the comparison is each of the values; test for the
2685 case where this isn't true by failing if the two operands
2686 are the same. */
2687
2688 if (operand_equal_p (TREE_OPERAND (arg, 0),
2689 TREE_OPERAND (arg, 1), 0))
2690 return 0;
2691
2692 if (*cval1 == 0)
2693 *cval1 = TREE_OPERAND (arg, 0);
2694 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2695 ;
2696 else if (*cval2 == 0)
2697 *cval2 = TREE_OPERAND (arg, 0);
2698 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2699 ;
2700 else
2701 return 0;
2702
2703 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2704 ;
2705 else if (*cval2 == 0)
2706 *cval2 = TREE_OPERAND (arg, 1);
2707 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2708 ;
2709 else
2710 return 0;
2711
2712 return 1;
2713
2714 default:
2715 return 0;
2716 }
2717 }
2718 \f
2719 /* ARG is a tree that is known to contain just arithmetic operations and
2720 comparisons. Evaluate the operations in the tree substituting NEW0 for
2721 any occurrence of OLD0 as an operand of a comparison and likewise for
2722 NEW1 and OLD1. */
2723
2724 static tree
2725 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2726 {
2727 tree type = TREE_TYPE (arg);
2728 enum tree_code code = TREE_CODE (arg);
2729 char class = TREE_CODE_CLASS (code);
2730
2731 /* We can handle some of the 'e' cases here. */
2732 if (class == 'e' && code == TRUTH_NOT_EXPR)
2733 class = '1';
2734 else if (class == 'e'
2735 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2736 class = '2';
2737
2738 switch (class)
2739 {
2740 case '1':
2741 return fold (build1 (code, type,
2742 eval_subst (TREE_OPERAND (arg, 0),
2743 old0, new0, old1, new1)));
2744
2745 case '2':
2746 return fold (build2 (code, type,
2747 eval_subst (TREE_OPERAND (arg, 0),
2748 old0, new0, old1, new1),
2749 eval_subst (TREE_OPERAND (arg, 1),
2750 old0, new0, old1, new1)));
2751
2752 case 'e':
2753 switch (code)
2754 {
2755 case SAVE_EXPR:
2756 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2757
2758 case COMPOUND_EXPR:
2759 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2760
2761 case COND_EXPR:
2762 return fold (build3 (code, type,
2763 eval_subst (TREE_OPERAND (arg, 0),
2764 old0, new0, old1, new1),
2765 eval_subst (TREE_OPERAND (arg, 1),
2766 old0, new0, old1, new1),
2767 eval_subst (TREE_OPERAND (arg, 2),
2768 old0, new0, old1, new1)));
2769 default:
2770 break;
2771 }
2772 /* Fall through - ??? */
2773
2774 case '<':
2775 {
2776 tree arg0 = TREE_OPERAND (arg, 0);
2777 tree arg1 = TREE_OPERAND (arg, 1);
2778
2779 /* We need to check both for exact equality and tree equality. The
2780 former will be true if the operand has a side-effect. In that
2781 case, we know the operand occurred exactly once. */
2782
2783 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2784 arg0 = new0;
2785 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2786 arg0 = new1;
2787
2788 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2789 arg1 = new0;
2790 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2791 arg1 = new1;
2792
2793 return fold (build2 (code, type, arg0, arg1));
2794 }
2795
2796 default:
2797 return arg;
2798 }
2799 }
2800 \f
2801 /* Return a tree for the case when the result of an expression is RESULT
2802 converted to TYPE and OMITTED was previously an operand of the expression
2803 but is now not needed (e.g., we folded OMITTED * 0).
2804
2805 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2806 the conversion of RESULT to TYPE. */
2807
2808 tree
2809 omit_one_operand (tree type, tree result, tree omitted)
2810 {
2811 tree t = fold_convert (type, result);
2812
2813 if (TREE_SIDE_EFFECTS (omitted))
2814 return build2 (COMPOUND_EXPR, type, omitted, t);
2815
2816 return non_lvalue (t);
2817 }
2818
2819 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2820
2821 static tree
2822 pedantic_omit_one_operand (tree type, tree result, tree omitted)
2823 {
2824 tree t = fold_convert (type, result);
2825
2826 if (TREE_SIDE_EFFECTS (omitted))
2827 return build2 (COMPOUND_EXPR, type, omitted, t);
2828
2829 return pedantic_non_lvalue (t);
2830 }
2831
2832 /* Return a tree for the case when the result of an expression is RESULT
2833 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
2834 of the expression but are now not needed.
2835
2836 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
2837 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
2838 evaluated before OMITTED2. Otherwise, if neither has side effects,
2839 just do the conversion of RESULT to TYPE. */
2840
2841 tree
2842 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
2843 {
2844 tree t = fold_convert (type, result);
2845
2846 if (TREE_SIDE_EFFECTS (omitted2))
2847 t = build2 (COMPOUND_EXPR, type, omitted2, t);
2848 if (TREE_SIDE_EFFECTS (omitted1))
2849 t = build2 (COMPOUND_EXPR, type, omitted1, t);
2850
2851 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
2852 }
2853
2854 \f
2855 /* Return a simplified tree node for the truth-negation of ARG. This
2856 never alters ARG itself. We assume that ARG is an operation that
2857 returns a truth value (0 or 1).
2858
2859 FIXME: one would think we would fold the result, but it causes
2860 problems with the dominator optimizer. */
2861 tree
2862 invert_truthvalue (tree arg)
2863 {
2864 tree type = TREE_TYPE (arg);
2865 enum tree_code code = TREE_CODE (arg);
2866
2867 if (code == ERROR_MARK)
2868 return arg;
2869
2870 /* If this is a comparison, we can simply invert it, except for
2871 floating-point non-equality comparisons, in which case we just
2872 enclose a TRUTH_NOT_EXPR around what we have. */
2873
2874 if (TREE_CODE_CLASS (code) == '<')
2875 {
2876 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
2877 if (FLOAT_TYPE_P (op_type)
2878 && flag_trapping_math
2879 && code != ORDERED_EXPR && code != UNORDERED_EXPR
2880 && code != NE_EXPR && code != EQ_EXPR)
2881 return build1 (TRUTH_NOT_EXPR, type, arg);
2882 else
2883 {
2884 code = invert_tree_comparison (code,
2885 HONOR_NANS (TYPE_MODE (op_type)));
2886 if (code == ERROR_MARK)
2887 return build1 (TRUTH_NOT_EXPR, type, arg);
2888 else
2889 return build2 (code, type,
2890 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2891 }
2892 }
2893
2894 switch (code)
2895 {
2896 case INTEGER_CST:
2897 return fold_convert (type, build_int_2 (integer_zerop (arg), 0));
2898
2899 case TRUTH_AND_EXPR:
2900 return build2 (TRUTH_OR_EXPR, type,
2901 invert_truthvalue (TREE_OPERAND (arg, 0)),
2902 invert_truthvalue (TREE_OPERAND (arg, 1)));
2903
2904 case TRUTH_OR_EXPR:
2905 return build2 (TRUTH_AND_EXPR, type,
2906 invert_truthvalue (TREE_OPERAND (arg, 0)),
2907 invert_truthvalue (TREE_OPERAND (arg, 1)));
2908
2909 case TRUTH_XOR_EXPR:
2910 /* Here we can invert either operand. We invert the first operand
2911 unless the second operand is a TRUTH_NOT_EXPR in which case our
2912 result is the XOR of the first operand with the inside of the
2913 negation of the second operand. */
2914
2915 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2916 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2917 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2918 else
2919 return build2 (TRUTH_XOR_EXPR, type,
2920 invert_truthvalue (TREE_OPERAND (arg, 0)),
2921 TREE_OPERAND (arg, 1));
2922
2923 case TRUTH_ANDIF_EXPR:
2924 return build2 (TRUTH_ORIF_EXPR, type,
2925 invert_truthvalue (TREE_OPERAND (arg, 0)),
2926 invert_truthvalue (TREE_OPERAND (arg, 1)));
2927
2928 case TRUTH_ORIF_EXPR:
2929 return build2 (TRUTH_ANDIF_EXPR, type,
2930 invert_truthvalue (TREE_OPERAND (arg, 0)),
2931 invert_truthvalue (TREE_OPERAND (arg, 1)));
2932
2933 case TRUTH_NOT_EXPR:
2934 return TREE_OPERAND (arg, 0);
2935
2936 case COND_EXPR:
2937 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
2938 invert_truthvalue (TREE_OPERAND (arg, 1)),
2939 invert_truthvalue (TREE_OPERAND (arg, 2)));
2940
2941 case COMPOUND_EXPR:
2942 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2943 invert_truthvalue (TREE_OPERAND (arg, 1)));
2944
2945 case NON_LVALUE_EXPR:
2946 return invert_truthvalue (TREE_OPERAND (arg, 0));
2947
2948 case NOP_EXPR:
2949 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
2950 break;
2951
2952 case CONVERT_EXPR:
2953 case FLOAT_EXPR:
2954 return build1 (TREE_CODE (arg), type,
2955 invert_truthvalue (TREE_OPERAND (arg, 0)));
2956
2957 case BIT_AND_EXPR:
2958 if (!integer_onep (TREE_OPERAND (arg, 1)))
2959 break;
2960 return build2 (EQ_EXPR, type, arg,
2961 fold_convert (type, integer_zero_node));
2962
2963 case SAVE_EXPR:
2964 return build1 (TRUTH_NOT_EXPR, type, arg);
2965
2966 case CLEANUP_POINT_EXPR:
2967 return build1 (CLEANUP_POINT_EXPR, type,
2968 invert_truthvalue (TREE_OPERAND (arg, 0)));
2969
2970 default:
2971 break;
2972 }
2973 if (TREE_CODE (TREE_TYPE (arg)) != BOOLEAN_TYPE)
2974 abort ();
2975 return build1 (TRUTH_NOT_EXPR, type, arg);
2976 }
2977
2978 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
2979 operands are another bit-wise operation with a common input. If so,
2980 distribute the bit operations to save an operation and possibly two if
2981 constants are involved. For example, convert
2982 (A | B) & (A | C) into A | (B & C)
2983 Further simplification will occur if B and C are constants.
2984
2985 If this optimization cannot be done, 0 will be returned. */
2986
2987 static tree
2988 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
2989 {
2990 tree common;
2991 tree left, right;
2992
2993 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2994 || TREE_CODE (arg0) == code
2995 || (TREE_CODE (arg0) != BIT_AND_EXPR
2996 && TREE_CODE (arg0) != BIT_IOR_EXPR))
2997 return 0;
2998
2999 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3000 {
3001 common = TREE_OPERAND (arg0, 0);
3002 left = TREE_OPERAND (arg0, 1);
3003 right = TREE_OPERAND (arg1, 1);
3004 }
3005 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3006 {
3007 common = TREE_OPERAND (arg0, 0);
3008 left = TREE_OPERAND (arg0, 1);
3009 right = TREE_OPERAND (arg1, 0);
3010 }
3011 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3012 {
3013 common = TREE_OPERAND (arg0, 1);
3014 left = TREE_OPERAND (arg0, 0);
3015 right = TREE_OPERAND (arg1, 1);
3016 }
3017 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3018 {
3019 common = TREE_OPERAND (arg0, 1);
3020 left = TREE_OPERAND (arg0, 0);
3021 right = TREE_OPERAND (arg1, 0);
3022 }
3023 else
3024 return 0;
3025
3026 return fold (build2 (TREE_CODE (arg0), type, common,
3027 fold (build2 (code, type, left, right))));
3028 }
3029 \f
3030 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3031 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3032
3033 static tree
3034 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3035 int unsignedp)
3036 {
3037 tree result = build3 (BIT_FIELD_REF, type, inner,
3038 size_int (bitsize), bitsize_int (bitpos));
3039
3040 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3041
3042 return result;
3043 }
3044
3045 /* Optimize a bit-field compare.
3046
3047 There are two cases: First is a compare against a constant and the
3048 second is a comparison of two items where the fields are at the same
3049 bit position relative to the start of a chunk (byte, halfword, word)
3050 large enough to contain it. In these cases we can avoid the shift
3051 implicit in bitfield extractions.
3052
3053 For constants, we emit a compare of the shifted constant with the
3054 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3055 compared. For two fields at the same position, we do the ANDs with the
3056 similar mask and compare the result of the ANDs.
3057
3058 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3059 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3060 are the left and right operands of the comparison, respectively.
3061
3062 If the optimization described above can be done, we return the resulting
3063 tree. Otherwise we return zero. */
3064
3065 static tree
3066 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3067 tree lhs, tree rhs)
3068 {
3069 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3070 tree type = TREE_TYPE (lhs);
3071 tree signed_type, unsigned_type;
3072 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3073 enum machine_mode lmode, rmode, nmode;
3074 int lunsignedp, runsignedp;
3075 int lvolatilep = 0, rvolatilep = 0;
3076 tree linner, rinner = NULL_TREE;
3077 tree mask;
3078 tree offset;
3079
3080 /* Get all the information about the extractions being done. If the bit size
3081 if the same as the size of the underlying object, we aren't doing an
3082 extraction at all and so can do nothing. We also don't want to
3083 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3084 then will no longer be able to replace it. */
3085 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3086 &lunsignedp, &lvolatilep);
3087 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3088 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3089 return 0;
3090
3091 if (!const_p)
3092 {
3093 /* If this is not a constant, we can only do something if bit positions,
3094 sizes, and signedness are the same. */
3095 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3096 &runsignedp, &rvolatilep);
3097
3098 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3099 || lunsignedp != runsignedp || offset != 0
3100 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3101 return 0;
3102 }
3103
3104 /* See if we can find a mode to refer to this field. We should be able to,
3105 but fail if we can't. */
3106 nmode = get_best_mode (lbitsize, lbitpos,
3107 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3108 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3109 TYPE_ALIGN (TREE_TYPE (rinner))),
3110 word_mode, lvolatilep || rvolatilep);
3111 if (nmode == VOIDmode)
3112 return 0;
3113
3114 /* Set signed and unsigned types of the precision of this mode for the
3115 shifts below. */
3116 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3117 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3118
3119 /* Compute the bit position and size for the new reference and our offset
3120 within it. If the new reference is the same size as the original, we
3121 won't optimize anything, so return zero. */
3122 nbitsize = GET_MODE_BITSIZE (nmode);
3123 nbitpos = lbitpos & ~ (nbitsize - 1);
3124 lbitpos -= nbitpos;
3125 if (nbitsize == lbitsize)
3126 return 0;
3127
3128 if (BYTES_BIG_ENDIAN)
3129 lbitpos = nbitsize - lbitsize - lbitpos;
3130
3131 /* Make the mask to be used against the extracted field. */
3132 mask = build_int_2 (~0, ~0);
3133 TREE_TYPE (mask) = unsigned_type;
3134 force_fit_type (mask, 0);
3135 mask = fold_convert (unsigned_type, mask);
3136 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3137 mask = const_binop (RSHIFT_EXPR, mask,
3138 size_int (nbitsize - lbitsize - lbitpos), 0);
3139
3140 if (! const_p)
3141 /* If not comparing with constant, just rework the comparison
3142 and return. */
3143 return build2 (code, compare_type,
3144 build2 (BIT_AND_EXPR, unsigned_type,
3145 make_bit_field_ref (linner, unsigned_type,
3146 nbitsize, nbitpos, 1),
3147 mask),
3148 build2 (BIT_AND_EXPR, unsigned_type,
3149 make_bit_field_ref (rinner, unsigned_type,
3150 nbitsize, nbitpos, 1),
3151 mask));
3152
3153 /* Otherwise, we are handling the constant case. See if the constant is too
3154 big for the field. Warn and return a tree of for 0 (false) if so. We do
3155 this not only for its own sake, but to avoid having to test for this
3156 error case below. If we didn't, we might generate wrong code.
3157
3158 For unsigned fields, the constant shifted right by the field length should
3159 be all zero. For signed fields, the high-order bits should agree with
3160 the sign bit. */
3161
3162 if (lunsignedp)
3163 {
3164 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3165 fold_convert (unsigned_type, rhs),
3166 size_int (lbitsize), 0)))
3167 {
3168 warning ("comparison is always %d due to width of bit-field",
3169 code == NE_EXPR);
3170 return constant_boolean_node (code == NE_EXPR, compare_type);
3171 }
3172 }
3173 else
3174 {
3175 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3176 size_int (lbitsize - 1), 0);
3177 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3178 {
3179 warning ("comparison is always %d due to width of bit-field",
3180 code == NE_EXPR);
3181 return constant_boolean_node (code == NE_EXPR, compare_type);
3182 }
3183 }
3184
3185 /* Single-bit compares should always be against zero. */
3186 if (lbitsize == 1 && ! integer_zerop (rhs))
3187 {
3188 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3189 rhs = fold_convert (type, integer_zero_node);
3190 }
3191
3192 /* Make a new bitfield reference, shift the constant over the
3193 appropriate number of bits and mask it with the computed mask
3194 (in case this was a signed field). If we changed it, make a new one. */
3195 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3196 if (lvolatilep)
3197 {
3198 TREE_SIDE_EFFECTS (lhs) = 1;
3199 TREE_THIS_VOLATILE (lhs) = 1;
3200 }
3201
3202 rhs = fold (const_binop (BIT_AND_EXPR,
3203 const_binop (LSHIFT_EXPR,
3204 fold_convert (unsigned_type, rhs),
3205 size_int (lbitpos), 0),
3206 mask, 0));
3207
3208 return build2 (code, compare_type,
3209 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3210 rhs);
3211 }
3212 \f
3213 /* Subroutine for fold_truthop: decode a field reference.
3214
3215 If EXP is a comparison reference, we return the innermost reference.
3216
3217 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3218 set to the starting bit number.
3219
3220 If the innermost field can be completely contained in a mode-sized
3221 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3222
3223 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3224 otherwise it is not changed.
3225
3226 *PUNSIGNEDP is set to the signedness of the field.
3227
3228 *PMASK is set to the mask used. This is either contained in a
3229 BIT_AND_EXPR or derived from the width of the field.
3230
3231 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3232
3233 Return 0 if this is not a component reference or is one that we can't
3234 do anything with. */
3235
3236 static tree
3237 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3238 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3239 int *punsignedp, int *pvolatilep,
3240 tree *pmask, tree *pand_mask)
3241 {
3242 tree outer_type = 0;
3243 tree and_mask = 0;
3244 tree mask, inner, offset;
3245 tree unsigned_type;
3246 unsigned int precision;
3247
3248 /* All the optimizations using this function assume integer fields.
3249 There are problems with FP fields since the type_for_size call
3250 below can fail for, e.g., XFmode. */
3251 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3252 return 0;
3253
3254 /* We are interested in the bare arrangement of bits, so strip everything
3255 that doesn't affect the machine mode. However, record the type of the
3256 outermost expression if it may matter below. */
3257 if (TREE_CODE (exp) == NOP_EXPR
3258 || TREE_CODE (exp) == CONVERT_EXPR
3259 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3260 outer_type = TREE_TYPE (exp);
3261 STRIP_NOPS (exp);
3262
3263 if (TREE_CODE (exp) == BIT_AND_EXPR)
3264 {
3265 and_mask = TREE_OPERAND (exp, 1);
3266 exp = TREE_OPERAND (exp, 0);
3267 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3268 if (TREE_CODE (and_mask) != INTEGER_CST)
3269 return 0;
3270 }
3271
3272 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3273 punsignedp, pvolatilep);
3274 if ((inner == exp && and_mask == 0)
3275 || *pbitsize < 0 || offset != 0
3276 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3277 return 0;
3278
3279 /* If the number of bits in the reference is the same as the bitsize of
3280 the outer type, then the outer type gives the signedness. Otherwise
3281 (in case of a small bitfield) the signedness is unchanged. */
3282 if (outer_type && *pbitsize == tree_low_cst (TYPE_SIZE (outer_type), 1))
3283 *punsignedp = TYPE_UNSIGNED (outer_type);
3284
3285 /* Compute the mask to access the bitfield. */
3286 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3287 precision = TYPE_PRECISION (unsigned_type);
3288
3289 mask = build_int_2 (~0, ~0);
3290 TREE_TYPE (mask) = unsigned_type;
3291 force_fit_type (mask, 0);
3292 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3293 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3294
3295 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3296 if (and_mask != 0)
3297 mask = fold (build2 (BIT_AND_EXPR, unsigned_type,
3298 fold_convert (unsigned_type, and_mask), mask));
3299
3300 *pmask = mask;
3301 *pand_mask = and_mask;
3302 return inner;
3303 }
3304
3305 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3306 bit positions. */
3307
3308 static int
3309 all_ones_mask_p (tree mask, int size)
3310 {
3311 tree type = TREE_TYPE (mask);
3312 unsigned int precision = TYPE_PRECISION (type);
3313 tree tmask;
3314
3315 tmask = build_int_2 (~0, ~0);
3316 TREE_TYPE (tmask) = lang_hooks.types.signed_type (type);
3317 force_fit_type (tmask, 0);
3318 return
3319 tree_int_cst_equal (mask,
3320 const_binop (RSHIFT_EXPR,
3321 const_binop (LSHIFT_EXPR, tmask,
3322 size_int (precision - size),
3323 0),
3324 size_int (precision - size), 0));
3325 }
3326
3327 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3328 represents the sign bit of EXP's type. If EXP represents a sign
3329 or zero extension, also test VAL against the unextended type.
3330 The return value is the (sub)expression whose sign bit is VAL,
3331 or NULL_TREE otherwise. */
3332
3333 static tree
3334 sign_bit_p (tree exp, tree val)
3335 {
3336 unsigned HOST_WIDE_INT mask_lo, lo;
3337 HOST_WIDE_INT mask_hi, hi;
3338 int width;
3339 tree t;
3340
3341 /* Tree EXP must have an integral type. */
3342 t = TREE_TYPE (exp);
3343 if (! INTEGRAL_TYPE_P (t))
3344 return NULL_TREE;
3345
3346 /* Tree VAL must be an integer constant. */
3347 if (TREE_CODE (val) != INTEGER_CST
3348 || TREE_CONSTANT_OVERFLOW (val))
3349 return NULL_TREE;
3350
3351 width = TYPE_PRECISION (t);
3352 if (width > HOST_BITS_PER_WIDE_INT)
3353 {
3354 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3355 lo = 0;
3356
3357 mask_hi = ((unsigned HOST_WIDE_INT) -1
3358 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3359 mask_lo = -1;
3360 }
3361 else
3362 {
3363 hi = 0;
3364 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3365
3366 mask_hi = 0;
3367 mask_lo = ((unsigned HOST_WIDE_INT) -1
3368 >> (HOST_BITS_PER_WIDE_INT - width));
3369 }
3370
3371 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3372 treat VAL as if it were unsigned. */
3373 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3374 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3375 return exp;
3376
3377 /* Handle extension from a narrower type. */
3378 if (TREE_CODE (exp) == NOP_EXPR
3379 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3380 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3381
3382 return NULL_TREE;
3383 }
3384
3385 /* Subroutine for fold_truthop: determine if an operand is simple enough
3386 to be evaluated unconditionally. */
3387
3388 static int
3389 simple_operand_p (tree exp)
3390 {
3391 /* Strip any conversions that don't change the machine mode. */
3392 while ((TREE_CODE (exp) == NOP_EXPR
3393 || TREE_CODE (exp) == CONVERT_EXPR)
3394 && (TYPE_MODE (TREE_TYPE (exp))
3395 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
3396 exp = TREE_OPERAND (exp, 0);
3397
3398 return (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c'
3399 || (DECL_P (exp)
3400 && ! TREE_ADDRESSABLE (exp)
3401 && ! TREE_THIS_VOLATILE (exp)
3402 && ! DECL_NONLOCAL (exp)
3403 /* Don't regard global variables as simple. They may be
3404 allocated in ways unknown to the compiler (shared memory,
3405 #pragma weak, etc). */
3406 && ! TREE_PUBLIC (exp)
3407 && ! DECL_EXTERNAL (exp)
3408 /* Loading a static variable is unduly expensive, but global
3409 registers aren't expensive. */
3410 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3411 }
3412 \f
3413 /* The following functions are subroutines to fold_range_test and allow it to
3414 try to change a logical combination of comparisons into a range test.
3415
3416 For example, both
3417 X == 2 || X == 3 || X == 4 || X == 5
3418 and
3419 X >= 2 && X <= 5
3420 are converted to
3421 (unsigned) (X - 2) <= 3
3422
3423 We describe each set of comparisons as being either inside or outside
3424 a range, using a variable named like IN_P, and then describe the
3425 range with a lower and upper bound. If one of the bounds is omitted,
3426 it represents either the highest or lowest value of the type.
3427
3428 In the comments below, we represent a range by two numbers in brackets
3429 preceded by a "+" to designate being inside that range, or a "-" to
3430 designate being outside that range, so the condition can be inverted by
3431 flipping the prefix. An omitted bound is represented by a "-". For
3432 example, "- [-, 10]" means being outside the range starting at the lowest
3433 possible value and ending at 10, in other words, being greater than 10.
3434 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3435 always false.
3436
3437 We set up things so that the missing bounds are handled in a consistent
3438 manner so neither a missing bound nor "true" and "false" need to be
3439 handled using a special case. */
3440
3441 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3442 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3443 and UPPER1_P are nonzero if the respective argument is an upper bound
3444 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3445 must be specified for a comparison. ARG1 will be converted to ARG0's
3446 type if both are specified. */
3447
3448 static tree
3449 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3450 tree arg1, int upper1_p)
3451 {
3452 tree tem;
3453 int result;
3454 int sgn0, sgn1;
3455
3456 /* If neither arg represents infinity, do the normal operation.
3457 Else, if not a comparison, return infinity. Else handle the special
3458 comparison rules. Note that most of the cases below won't occur, but
3459 are handled for consistency. */
3460
3461 if (arg0 != 0 && arg1 != 0)
3462 {
3463 tem = fold (build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3464 arg0, fold_convert (TREE_TYPE (arg0), arg1)));
3465 STRIP_NOPS (tem);
3466 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3467 }
3468
3469 if (TREE_CODE_CLASS (code) != '<')
3470 return 0;
3471
3472 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3473 for neither. In real maths, we cannot assume open ended ranges are
3474 the same. But, this is computer arithmetic, where numbers are finite.
3475 We can therefore make the transformation of any unbounded range with
3476 the value Z, Z being greater than any representable number. This permits
3477 us to treat unbounded ranges as equal. */
3478 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3479 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3480 switch (code)
3481 {
3482 case EQ_EXPR:
3483 result = sgn0 == sgn1;
3484 break;
3485 case NE_EXPR:
3486 result = sgn0 != sgn1;
3487 break;
3488 case LT_EXPR:
3489 result = sgn0 < sgn1;
3490 break;
3491 case LE_EXPR:
3492 result = sgn0 <= sgn1;
3493 break;
3494 case GT_EXPR:
3495 result = sgn0 > sgn1;
3496 break;
3497 case GE_EXPR:
3498 result = sgn0 >= sgn1;
3499 break;
3500 default:
3501 abort ();
3502 }
3503
3504 return constant_boolean_node (result, type);
3505 }
3506 \f
3507 /* Given EXP, a logical expression, set the range it is testing into
3508 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3509 actually being tested. *PLOW and *PHIGH will be made of the same type
3510 as the returned expression. If EXP is not a comparison, we will most
3511 likely not be returning a useful value and range. */
3512
3513 static tree
3514 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3515 {
3516 enum tree_code code;
3517 tree arg0 = NULL_TREE, arg1 = NULL_TREE, type = NULL_TREE;
3518 tree orig_type = NULL_TREE;
3519 int in_p, n_in_p;
3520 tree low, high, n_low, n_high;
3521
3522 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3523 and see if we can refine the range. Some of the cases below may not
3524 happen, but it doesn't seem worth worrying about this. We "continue"
3525 the outer loop when we've changed something; otherwise we "break"
3526 the switch, which will "break" the while. */
3527
3528 in_p = 0;
3529 low = high = fold_convert (TREE_TYPE (exp), integer_zero_node);
3530
3531 while (1)
3532 {
3533 code = TREE_CODE (exp);
3534
3535 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3536 {
3537 if (first_rtl_op (code) > 0)
3538 arg0 = TREE_OPERAND (exp, 0);
3539 if (TREE_CODE_CLASS (code) == '<'
3540 || TREE_CODE_CLASS (code) == '1'
3541 || TREE_CODE_CLASS (code) == '2')
3542 type = TREE_TYPE (arg0);
3543 if (TREE_CODE_CLASS (code) == '2'
3544 || TREE_CODE_CLASS (code) == '<'
3545 || (TREE_CODE_CLASS (code) == 'e'
3546 && TREE_CODE_LENGTH (code) > 1))
3547 arg1 = TREE_OPERAND (exp, 1);
3548 }
3549
3550 /* Set ORIG_TYPE as soon as TYPE is non-null so that we do not
3551 lose a cast by accident. */
3552 if (type != NULL_TREE && orig_type == NULL_TREE)
3553 orig_type = type;
3554
3555 switch (code)
3556 {
3557 case TRUTH_NOT_EXPR:
3558 in_p = ! in_p, exp = arg0;
3559 continue;
3560
3561 case EQ_EXPR: case NE_EXPR:
3562 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3563 /* We can only do something if the range is testing for zero
3564 and if the second operand is an integer constant. Note that
3565 saying something is "in" the range we make is done by
3566 complementing IN_P since it will set in the initial case of
3567 being not equal to zero; "out" is leaving it alone. */
3568 if (low == 0 || high == 0
3569 || ! integer_zerop (low) || ! integer_zerop (high)
3570 || TREE_CODE (arg1) != INTEGER_CST)
3571 break;
3572
3573 switch (code)
3574 {
3575 case NE_EXPR: /* - [c, c] */
3576 low = high = arg1;
3577 break;
3578 case EQ_EXPR: /* + [c, c] */
3579 in_p = ! in_p, low = high = arg1;
3580 break;
3581 case GT_EXPR: /* - [-, c] */
3582 low = 0, high = arg1;
3583 break;
3584 case GE_EXPR: /* + [c, -] */
3585 in_p = ! in_p, low = arg1, high = 0;
3586 break;
3587 case LT_EXPR: /* - [c, -] */
3588 low = arg1, high = 0;
3589 break;
3590 case LE_EXPR: /* + [-, c] */
3591 in_p = ! in_p, low = 0, high = arg1;
3592 break;
3593 default:
3594 abort ();
3595 }
3596
3597 exp = arg0;
3598
3599 /* If this is an unsigned comparison, we also know that EXP is
3600 greater than or equal to zero. We base the range tests we make
3601 on that fact, so we record it here so we can parse existing
3602 range tests. */
3603 if (TYPE_UNSIGNED (type) && (low == 0 || high == 0))
3604 {
3605 if (! merge_ranges (&n_in_p, &n_low, &n_high, in_p, low, high,
3606 1, fold_convert (type, integer_zero_node),
3607 NULL_TREE))
3608 break;
3609
3610 in_p = n_in_p, low = n_low, high = n_high;
3611
3612 /* If the high bound is missing, but we have a nonzero low
3613 bound, reverse the range so it goes from zero to the low bound
3614 minus 1. */
3615 if (high == 0 && low && ! integer_zerop (low))
3616 {
3617 in_p = ! in_p;
3618 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3619 integer_one_node, 0);
3620 low = fold_convert (type, integer_zero_node);
3621 }
3622 }
3623 continue;
3624
3625 case NEGATE_EXPR:
3626 /* (-x) IN [a,b] -> x in [-b, -a] */
3627 n_low = range_binop (MINUS_EXPR, type,
3628 fold_convert (type, integer_zero_node),
3629 0, high, 1);
3630 n_high = range_binop (MINUS_EXPR, type,
3631 fold_convert (type, integer_zero_node),
3632 0, low, 0);
3633 low = n_low, high = n_high;
3634 exp = arg0;
3635 continue;
3636
3637 case BIT_NOT_EXPR:
3638 /* ~ X -> -X - 1 */
3639 exp = build2 (MINUS_EXPR, type, negate_expr (arg0),
3640 fold_convert (type, integer_one_node));
3641 continue;
3642
3643 case PLUS_EXPR: case MINUS_EXPR:
3644 if (TREE_CODE (arg1) != INTEGER_CST)
3645 break;
3646
3647 /* If EXP is signed, any overflow in the computation is undefined,
3648 so we don't worry about it so long as our computations on
3649 the bounds don't overflow. For unsigned, overflow is defined
3650 and this is exactly the right thing. */
3651 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3652 type, low, 0, arg1, 0);
3653 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3654 type, high, 1, arg1, 0);
3655 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3656 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3657 break;
3658
3659 /* Check for an unsigned range which has wrapped around the maximum
3660 value thus making n_high < n_low, and normalize it. */
3661 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3662 {
3663 low = range_binop (PLUS_EXPR, type, n_high, 0,
3664 integer_one_node, 0);
3665 high = range_binop (MINUS_EXPR, type, n_low, 0,
3666 integer_one_node, 0);
3667
3668 /* If the range is of the form +/- [ x+1, x ], we won't
3669 be able to normalize it. But then, it represents the
3670 whole range or the empty set, so make it
3671 +/- [ -, - ]. */
3672 if (tree_int_cst_equal (n_low, low)
3673 && tree_int_cst_equal (n_high, high))
3674 low = high = 0;
3675 else
3676 in_p = ! in_p;
3677 }
3678 else
3679 low = n_low, high = n_high;
3680
3681 exp = arg0;
3682 continue;
3683
3684 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3685 if (TYPE_PRECISION (type) > TYPE_PRECISION (orig_type))
3686 break;
3687
3688 if (! INTEGRAL_TYPE_P (type)
3689 || (low != 0 && ! int_fits_type_p (low, type))
3690 || (high != 0 && ! int_fits_type_p (high, type)))
3691 break;
3692
3693 n_low = low, n_high = high;
3694
3695 if (n_low != 0)
3696 n_low = fold_convert (type, n_low);
3697
3698 if (n_high != 0)
3699 n_high = fold_convert (type, n_high);
3700
3701 /* If we're converting from an unsigned to a signed type,
3702 we will be doing the comparison as unsigned. The tests above
3703 have already verified that LOW and HIGH are both positive.
3704
3705 So we have to make sure that the original unsigned value will
3706 be interpreted as positive. */
3707 if (TYPE_UNSIGNED (type) && ! TYPE_UNSIGNED (TREE_TYPE (exp)))
3708 {
3709 tree equiv_type = lang_hooks.types.type_for_mode
3710 (TYPE_MODE (type), 1);
3711 tree high_positive;
3712
3713 /* A range without an upper bound is, naturally, unbounded.
3714 Since convert would have cropped a very large value, use
3715 the max value for the destination type. */
3716 high_positive
3717 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3718 : TYPE_MAX_VALUE (type);
3719
3720 if (TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (exp)))
3721 high_positive = fold (build2 (RSHIFT_EXPR, type,
3722 fold_convert (type,
3723 high_positive),
3724 fold_convert (type,
3725 integer_one_node)));
3726
3727 /* If the low bound is specified, "and" the range with the
3728 range for which the original unsigned value will be
3729 positive. */
3730 if (low != 0)
3731 {
3732 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3733 1, n_low, n_high, 1,
3734 fold_convert (type, integer_zero_node),
3735 high_positive))
3736 break;
3737
3738 in_p = (n_in_p == in_p);
3739 }
3740 else
3741 {
3742 /* Otherwise, "or" the range with the range of the input
3743 that will be interpreted as negative. */
3744 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3745 0, n_low, n_high, 1,
3746 fold_convert (type, integer_zero_node),
3747 high_positive))
3748 break;
3749
3750 in_p = (in_p != n_in_p);
3751 }
3752 }
3753
3754 exp = arg0;
3755 low = n_low, high = n_high;
3756 continue;
3757
3758 default:
3759 break;
3760 }
3761
3762 break;
3763 }
3764
3765 /* If EXP is a constant, we can evaluate whether this is true or false. */
3766 if (TREE_CODE (exp) == INTEGER_CST)
3767 {
3768 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3769 exp, 0, low, 0))
3770 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3771 exp, 1, high, 1)));
3772 low = high = 0;
3773 exp = 0;
3774 }
3775
3776 *pin_p = in_p, *plow = low, *phigh = high;
3777 return exp;
3778 }
3779 \f
3780 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3781 type, TYPE, return an expression to test if EXP is in (or out of, depending
3782 on IN_P) the range. */
3783
3784 static tree
3785 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
3786 {
3787 tree etype = TREE_TYPE (exp);
3788 tree value;
3789
3790 if (! in_p
3791 && (0 != (value = build_range_check (type, exp, 1, low, high))))
3792 return invert_truthvalue (value);
3793
3794 if (low == 0 && high == 0)
3795 return fold_convert (type, integer_one_node);
3796
3797 if (low == 0)
3798 return fold (build2 (LE_EXPR, type, exp, high));
3799
3800 if (high == 0)
3801 return fold (build2 (GE_EXPR, type, exp, low));
3802
3803 if (operand_equal_p (low, high, 0))
3804 return fold (build2 (EQ_EXPR, type, exp, low));
3805
3806 if (integer_zerop (low))
3807 {
3808 if (! TYPE_UNSIGNED (etype))
3809 {
3810 etype = lang_hooks.types.unsigned_type (etype);
3811 high = fold_convert (etype, high);
3812 exp = fold_convert (etype, exp);
3813 }
3814 return build_range_check (type, exp, 1, 0, high);
3815 }
3816
3817 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3818 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
3819 {
3820 unsigned HOST_WIDE_INT lo;
3821 HOST_WIDE_INT hi;
3822 int prec;
3823
3824 prec = TYPE_PRECISION (etype);
3825 if (prec <= HOST_BITS_PER_WIDE_INT)
3826 {
3827 hi = 0;
3828 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
3829 }
3830 else
3831 {
3832 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
3833 lo = (unsigned HOST_WIDE_INT) -1;
3834 }
3835
3836 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
3837 {
3838 if (TYPE_UNSIGNED (etype))
3839 {
3840 etype = lang_hooks.types.signed_type (etype);
3841 exp = fold_convert (etype, exp);
3842 }
3843 return fold (build2 (GT_EXPR, type, exp,
3844 fold_convert (etype, integer_zero_node)));
3845 }
3846 }
3847
3848 if (0 != (value = const_binop (MINUS_EXPR, high, low, 0))
3849 && ! TREE_OVERFLOW (value))
3850 return build_range_check (type,
3851 fold (build2 (MINUS_EXPR, etype, exp, low)),
3852 1, fold_convert (etype, integer_zero_node),
3853 value);
3854
3855 return 0;
3856 }
3857 \f
3858 /* Given two ranges, see if we can merge them into one. Return 1 if we
3859 can, 0 if we can't. Set the output range into the specified parameters. */
3860
3861 static int
3862 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
3863 tree high0, int in1_p, tree low1, tree high1)
3864 {
3865 int no_overlap;
3866 int subset;
3867 int temp;
3868 tree tem;
3869 int in_p;
3870 tree low, high;
3871 int lowequal = ((low0 == 0 && low1 == 0)
3872 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3873 low0, 0, low1, 0)));
3874 int highequal = ((high0 == 0 && high1 == 0)
3875 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3876 high0, 1, high1, 1)));
3877
3878 /* Make range 0 be the range that starts first, or ends last if they
3879 start at the same value. Swap them if it isn't. */
3880 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
3881 low0, 0, low1, 0))
3882 || (lowequal
3883 && integer_onep (range_binop (GT_EXPR, integer_type_node,
3884 high1, 1, high0, 1))))
3885 {
3886 temp = in0_p, in0_p = in1_p, in1_p = temp;
3887 tem = low0, low0 = low1, low1 = tem;
3888 tem = high0, high0 = high1, high1 = tem;
3889 }
3890
3891 /* Now flag two cases, whether the ranges are disjoint or whether the
3892 second range is totally subsumed in the first. Note that the tests
3893 below are simplified by the ones above. */
3894 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
3895 high0, 1, low1, 0));
3896 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
3897 high1, 1, high0, 1));
3898
3899 /* We now have four cases, depending on whether we are including or
3900 excluding the two ranges. */
3901 if (in0_p && in1_p)
3902 {
3903 /* If they don't overlap, the result is false. If the second range
3904 is a subset it is the result. Otherwise, the range is from the start
3905 of the second to the end of the first. */
3906 if (no_overlap)
3907 in_p = 0, low = high = 0;
3908 else if (subset)
3909 in_p = 1, low = low1, high = high1;
3910 else
3911 in_p = 1, low = low1, high = high0;
3912 }
3913
3914 else if (in0_p && ! in1_p)
3915 {
3916 /* If they don't overlap, the result is the first range. If they are
3917 equal, the result is false. If the second range is a subset of the
3918 first, and the ranges begin at the same place, we go from just after
3919 the end of the first range to the end of the second. If the second
3920 range is not a subset of the first, or if it is a subset and both
3921 ranges end at the same place, the range starts at the start of the
3922 first range and ends just before the second range.
3923 Otherwise, we can't describe this as a single range. */
3924 if (no_overlap)
3925 in_p = 1, low = low0, high = high0;
3926 else if (lowequal && highequal)
3927 in_p = 0, low = high = 0;
3928 else if (subset && lowequal)
3929 {
3930 in_p = 1, high = high0;
3931 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
3932 integer_one_node, 0);
3933 }
3934 else if (! subset || highequal)
3935 {
3936 in_p = 1, low = low0;
3937 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
3938 integer_one_node, 0);
3939 }
3940 else
3941 return 0;
3942 }
3943
3944 else if (! in0_p && in1_p)
3945 {
3946 /* If they don't overlap, the result is the second range. If the second
3947 is a subset of the first, the result is false. Otherwise,
3948 the range starts just after the first range and ends at the
3949 end of the second. */
3950 if (no_overlap)
3951 in_p = 1, low = low1, high = high1;
3952 else if (subset || highequal)
3953 in_p = 0, low = high = 0;
3954 else
3955 {
3956 in_p = 1, high = high1;
3957 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
3958 integer_one_node, 0);
3959 }
3960 }
3961
3962 else
3963 {
3964 /* The case where we are excluding both ranges. Here the complex case
3965 is if they don't overlap. In that case, the only time we have a
3966 range is if they are adjacent. If the second is a subset of the
3967 first, the result is the first. Otherwise, the range to exclude
3968 starts at the beginning of the first range and ends at the end of the
3969 second. */
3970 if (no_overlap)
3971 {
3972 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
3973 range_binop (PLUS_EXPR, NULL_TREE,
3974 high0, 1,
3975 integer_one_node, 1),
3976 1, low1, 0)))
3977 in_p = 0, low = low0, high = high1;
3978 else
3979 return 0;
3980 }
3981 else if (subset)
3982 in_p = 0, low = low0, high = high0;
3983 else
3984 in_p = 0, low = low0, high = high1;
3985 }
3986
3987 *pin_p = in_p, *plow = low, *phigh = high;
3988 return 1;
3989 }
3990 \f
3991 #ifndef RANGE_TEST_NON_SHORT_CIRCUIT
3992 #define RANGE_TEST_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
3993 #endif
3994
3995 /* EXP is some logical combination of boolean tests. See if we can
3996 merge it into some range test. Return the new tree if so. */
3997
3998 static tree
3999 fold_range_test (tree exp)
4000 {
4001 int or_op = (TREE_CODE (exp) == TRUTH_ORIF_EXPR
4002 || TREE_CODE (exp) == TRUTH_OR_EXPR);
4003 int in0_p, in1_p, in_p;
4004 tree low0, low1, low, high0, high1, high;
4005 tree lhs = make_range (TREE_OPERAND (exp, 0), &in0_p, &low0, &high0);
4006 tree rhs = make_range (TREE_OPERAND (exp, 1), &in1_p, &low1, &high1);
4007 tree tem;
4008
4009 /* If this is an OR operation, invert both sides; we will invert
4010 again at the end. */
4011 if (or_op)
4012 in0_p = ! in0_p, in1_p = ! in1_p;
4013
4014 /* If both expressions are the same, if we can merge the ranges, and we
4015 can build the range test, return it or it inverted. If one of the
4016 ranges is always true or always false, consider it to be the same
4017 expression as the other. */
4018 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4019 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4020 in1_p, low1, high1)
4021 && 0 != (tem = (build_range_check (TREE_TYPE (exp),
4022 lhs != 0 ? lhs
4023 : rhs != 0 ? rhs : integer_zero_node,
4024 in_p, low, high))))
4025 return or_op ? invert_truthvalue (tem) : tem;
4026
4027 /* On machines where the branch cost is expensive, if this is a
4028 short-circuited branch and the underlying object on both sides
4029 is the same, make a non-short-circuit operation. */
4030 else if (RANGE_TEST_NON_SHORT_CIRCUIT
4031 && lhs != 0 && rhs != 0
4032 && (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
4033 || TREE_CODE (exp) == TRUTH_ORIF_EXPR)
4034 && operand_equal_p (lhs, rhs, 0))
4035 {
4036 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4037 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4038 which cases we can't do this. */
4039 if (simple_operand_p (lhs))
4040 return build2 (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
4041 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4042 TREE_TYPE (exp), TREE_OPERAND (exp, 0),
4043 TREE_OPERAND (exp, 1));
4044
4045 else if (lang_hooks.decls.global_bindings_p () == 0
4046 && ! CONTAINS_PLACEHOLDER_P (lhs))
4047 {
4048 tree common = save_expr (lhs);
4049
4050 if (0 != (lhs = build_range_check (TREE_TYPE (exp), common,
4051 or_op ? ! in0_p : in0_p,
4052 low0, high0))
4053 && (0 != (rhs = build_range_check (TREE_TYPE (exp), common,
4054 or_op ? ! in1_p : in1_p,
4055 low1, high1))))
4056 return build2 (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
4057 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4058 TREE_TYPE (exp), lhs, rhs);
4059 }
4060 }
4061
4062 return 0;
4063 }
4064 \f
4065 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4066 bit value. Arrange things so the extra bits will be set to zero if and
4067 only if C is signed-extended to its full width. If MASK is nonzero,
4068 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4069
4070 static tree
4071 unextend (tree c, int p, int unsignedp, tree mask)
4072 {
4073 tree type = TREE_TYPE (c);
4074 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4075 tree temp;
4076
4077 if (p == modesize || unsignedp)
4078 return c;
4079
4080 /* We work by getting just the sign bit into the low-order bit, then
4081 into the high-order bit, then sign-extend. We then XOR that value
4082 with C. */
4083 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4084 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4085
4086 /* We must use a signed type in order to get an arithmetic right shift.
4087 However, we must also avoid introducing accidental overflows, so that
4088 a subsequent call to integer_zerop will work. Hence we must
4089 do the type conversion here. At this point, the constant is either
4090 zero or one, and the conversion to a signed type can never overflow.
4091 We could get an overflow if this conversion is done anywhere else. */
4092 if (TYPE_UNSIGNED (type))
4093 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4094
4095 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4096 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4097 if (mask != 0)
4098 temp = const_binop (BIT_AND_EXPR, temp,
4099 fold_convert (TREE_TYPE (c), mask), 0);
4100 /* If necessary, convert the type back to match the type of C. */
4101 if (TYPE_UNSIGNED (type))
4102 temp = fold_convert (type, temp);
4103
4104 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4105 }
4106 \f
4107 /* Find ways of folding logical expressions of LHS and RHS:
4108 Try to merge two comparisons to the same innermost item.
4109 Look for range tests like "ch >= '0' && ch <= '9'".
4110 Look for combinations of simple terms on machines with expensive branches
4111 and evaluate the RHS unconditionally.
4112
4113 For example, if we have p->a == 2 && p->b == 4 and we can make an
4114 object large enough to span both A and B, we can do this with a comparison
4115 against the object ANDed with the a mask.
4116
4117 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4118 operations to do this with one comparison.
4119
4120 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4121 function and the one above.
4122
4123 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4124 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4125
4126 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4127 two operands.
4128
4129 We return the simplified tree or 0 if no optimization is possible. */
4130
4131 static tree
4132 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
4133 {
4134 /* If this is the "or" of two comparisons, we can do something if
4135 the comparisons are NE_EXPR. If this is the "and", we can do something
4136 if the comparisons are EQ_EXPR. I.e.,
4137 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4138
4139 WANTED_CODE is this operation code. For single bit fields, we can
4140 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4141 comparison for one-bit fields. */
4142
4143 enum tree_code wanted_code;
4144 enum tree_code lcode, rcode;
4145 tree ll_arg, lr_arg, rl_arg, rr_arg;
4146 tree ll_inner, lr_inner, rl_inner, rr_inner;
4147 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
4148 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
4149 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
4150 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
4151 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
4152 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
4153 enum machine_mode lnmode, rnmode;
4154 tree ll_mask, lr_mask, rl_mask, rr_mask;
4155 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
4156 tree l_const, r_const;
4157 tree lntype, rntype, result;
4158 int first_bit, end_bit;
4159 int volatilep;
4160
4161 /* Start by getting the comparison codes. Fail if anything is volatile.
4162 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4163 it were surrounded with a NE_EXPR. */
4164
4165 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
4166 return 0;
4167
4168 lcode = TREE_CODE (lhs);
4169 rcode = TREE_CODE (rhs);
4170
4171 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
4172 {
4173 lhs = build2 (NE_EXPR, truth_type, lhs, integer_zero_node);
4174 lcode = NE_EXPR;
4175 }
4176
4177 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
4178 {
4179 rhs = build2 (NE_EXPR, truth_type, rhs, integer_zero_node);
4180 rcode = NE_EXPR;
4181 }
4182
4183 if (TREE_CODE_CLASS (lcode) != '<' || TREE_CODE_CLASS (rcode) != '<')
4184 return 0;
4185
4186 ll_arg = TREE_OPERAND (lhs, 0);
4187 lr_arg = TREE_OPERAND (lhs, 1);
4188 rl_arg = TREE_OPERAND (rhs, 0);
4189 rr_arg = TREE_OPERAND (rhs, 1);
4190
4191 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4192 if (simple_operand_p (ll_arg)
4193 && simple_operand_p (lr_arg))
4194 {
4195 tree result;
4196 if (operand_equal_p (ll_arg, rl_arg, 0)
4197 && operand_equal_p (lr_arg, rr_arg, 0))
4198 {
4199 result = combine_comparisons (code, lcode, rcode,
4200 truth_type, ll_arg, lr_arg);
4201 if (result)
4202 return result;
4203 }
4204 else if (operand_equal_p (ll_arg, rr_arg, 0)
4205 && operand_equal_p (lr_arg, rl_arg, 0))
4206 {
4207 result = combine_comparisons (code, lcode,
4208 swap_tree_comparison (rcode),
4209 truth_type, ll_arg, lr_arg);
4210 if (result)
4211 return result;
4212 }
4213 }
4214
4215 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
4216 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
4217
4218 /* If the RHS can be evaluated unconditionally and its operands are
4219 simple, it wins to evaluate the RHS unconditionally on machines
4220 with expensive branches. In this case, this isn't a comparison
4221 that can be merged. Avoid doing this if the RHS is a floating-point
4222 comparison since those can trap. */
4223
4224 if (BRANCH_COST >= 2
4225 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
4226 && simple_operand_p (rl_arg)
4227 && simple_operand_p (rr_arg))
4228 {
4229 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4230 if (code == TRUTH_OR_EXPR
4231 && lcode == NE_EXPR && integer_zerop (lr_arg)
4232 && rcode == NE_EXPR && integer_zerop (rr_arg)
4233 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4234 return build2 (NE_EXPR, truth_type,
4235 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4236 ll_arg, rl_arg),
4237 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4238
4239 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4240 if (code == TRUTH_AND_EXPR
4241 && lcode == EQ_EXPR && integer_zerop (lr_arg)
4242 && rcode == EQ_EXPR && integer_zerop (rr_arg)
4243 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4244 return build2 (EQ_EXPR, truth_type,
4245 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4246 ll_arg, rl_arg),
4247 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4248
4249 return build2 (code, truth_type, lhs, rhs);
4250 }
4251
4252 /* See if the comparisons can be merged. Then get all the parameters for
4253 each side. */
4254
4255 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
4256 || (rcode != EQ_EXPR && rcode != NE_EXPR))
4257 return 0;
4258
4259 volatilep = 0;
4260 ll_inner = decode_field_reference (ll_arg,
4261 &ll_bitsize, &ll_bitpos, &ll_mode,
4262 &ll_unsignedp, &volatilep, &ll_mask,
4263 &ll_and_mask);
4264 lr_inner = decode_field_reference (lr_arg,
4265 &lr_bitsize, &lr_bitpos, &lr_mode,
4266 &lr_unsignedp, &volatilep, &lr_mask,
4267 &lr_and_mask);
4268 rl_inner = decode_field_reference (rl_arg,
4269 &rl_bitsize, &rl_bitpos, &rl_mode,
4270 &rl_unsignedp, &volatilep, &rl_mask,
4271 &rl_and_mask);
4272 rr_inner = decode_field_reference (rr_arg,
4273 &rr_bitsize, &rr_bitpos, &rr_mode,
4274 &rr_unsignedp, &volatilep, &rr_mask,
4275 &rr_and_mask);
4276
4277 /* It must be true that the inner operation on the lhs of each
4278 comparison must be the same if we are to be able to do anything.
4279 Then see if we have constants. If not, the same must be true for
4280 the rhs's. */
4281 if (volatilep || ll_inner == 0 || rl_inner == 0
4282 || ! operand_equal_p (ll_inner, rl_inner, 0))
4283 return 0;
4284
4285 if (TREE_CODE (lr_arg) == INTEGER_CST
4286 && TREE_CODE (rr_arg) == INTEGER_CST)
4287 l_const = lr_arg, r_const = rr_arg;
4288 else if (lr_inner == 0 || rr_inner == 0
4289 || ! operand_equal_p (lr_inner, rr_inner, 0))
4290 return 0;
4291 else
4292 l_const = r_const = 0;
4293
4294 /* If either comparison code is not correct for our logical operation,
4295 fail. However, we can convert a one-bit comparison against zero into
4296 the opposite comparison against that bit being set in the field. */
4297
4298 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
4299 if (lcode != wanted_code)
4300 {
4301 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
4302 {
4303 /* Make the left operand unsigned, since we are only interested
4304 in the value of one bit. Otherwise we are doing the wrong
4305 thing below. */
4306 ll_unsignedp = 1;
4307 l_const = ll_mask;
4308 }
4309 else
4310 return 0;
4311 }
4312
4313 /* This is analogous to the code for l_const above. */
4314 if (rcode != wanted_code)
4315 {
4316 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
4317 {
4318 rl_unsignedp = 1;
4319 r_const = rl_mask;
4320 }
4321 else
4322 return 0;
4323 }
4324
4325 /* After this point all optimizations will generate bit-field
4326 references, which we might not want. */
4327 if (! lang_hooks.can_use_bit_fields_p ())
4328 return 0;
4329
4330 /* See if we can find a mode that contains both fields being compared on
4331 the left. If we can't, fail. Otherwise, update all constants and masks
4332 to be relative to a field of that size. */
4333 first_bit = MIN (ll_bitpos, rl_bitpos);
4334 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
4335 lnmode = get_best_mode (end_bit - first_bit, first_bit,
4336 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
4337 volatilep);
4338 if (lnmode == VOIDmode)
4339 return 0;
4340
4341 lnbitsize = GET_MODE_BITSIZE (lnmode);
4342 lnbitpos = first_bit & ~ (lnbitsize - 1);
4343 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
4344 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
4345
4346 if (BYTES_BIG_ENDIAN)
4347 {
4348 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
4349 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
4350 }
4351
4352 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
4353 size_int (xll_bitpos), 0);
4354 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
4355 size_int (xrl_bitpos), 0);
4356
4357 if (l_const)
4358 {
4359 l_const = fold_convert (lntype, l_const);
4360 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
4361 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
4362 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
4363 fold (build1 (BIT_NOT_EXPR,
4364 lntype, ll_mask)),
4365 0)))
4366 {
4367 warning ("comparison is always %d", wanted_code == NE_EXPR);
4368
4369 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4370 }
4371 }
4372 if (r_const)
4373 {
4374 r_const = fold_convert (lntype, r_const);
4375 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
4376 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
4377 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
4378 fold (build1 (BIT_NOT_EXPR,
4379 lntype, rl_mask)),
4380 0)))
4381 {
4382 warning ("comparison is always %d", wanted_code == NE_EXPR);
4383
4384 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4385 }
4386 }
4387
4388 /* If the right sides are not constant, do the same for it. Also,
4389 disallow this optimization if a size or signedness mismatch occurs
4390 between the left and right sides. */
4391 if (l_const == 0)
4392 {
4393 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
4394 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
4395 /* Make sure the two fields on the right
4396 correspond to the left without being swapped. */
4397 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
4398 return 0;
4399
4400 first_bit = MIN (lr_bitpos, rr_bitpos);
4401 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
4402 rnmode = get_best_mode (end_bit - first_bit, first_bit,
4403 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
4404 volatilep);
4405 if (rnmode == VOIDmode)
4406 return 0;
4407
4408 rnbitsize = GET_MODE_BITSIZE (rnmode);
4409 rnbitpos = first_bit & ~ (rnbitsize - 1);
4410 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
4411 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
4412
4413 if (BYTES_BIG_ENDIAN)
4414 {
4415 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
4416 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
4417 }
4418
4419 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
4420 size_int (xlr_bitpos), 0);
4421 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
4422 size_int (xrr_bitpos), 0);
4423
4424 /* Make a mask that corresponds to both fields being compared.
4425 Do this for both items being compared. If the operands are the
4426 same size and the bits being compared are in the same position
4427 then we can do this by masking both and comparing the masked
4428 results. */
4429 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4430 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
4431 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
4432 {
4433 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4434 ll_unsignedp || rl_unsignedp);
4435 if (! all_ones_mask_p (ll_mask, lnbitsize))
4436 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
4437
4438 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
4439 lr_unsignedp || rr_unsignedp);
4440 if (! all_ones_mask_p (lr_mask, rnbitsize))
4441 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
4442
4443 return build2 (wanted_code, truth_type, lhs, rhs);
4444 }
4445
4446 /* There is still another way we can do something: If both pairs of
4447 fields being compared are adjacent, we may be able to make a wider
4448 field containing them both.
4449
4450 Note that we still must mask the lhs/rhs expressions. Furthermore,
4451 the mask must be shifted to account for the shift done by
4452 make_bit_field_ref. */
4453 if ((ll_bitsize + ll_bitpos == rl_bitpos
4454 && lr_bitsize + lr_bitpos == rr_bitpos)
4455 || (ll_bitpos == rl_bitpos + rl_bitsize
4456 && lr_bitpos == rr_bitpos + rr_bitsize))
4457 {
4458 tree type;
4459
4460 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
4461 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
4462 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
4463 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
4464
4465 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
4466 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
4467 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
4468 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
4469
4470 /* Convert to the smaller type before masking out unwanted bits. */
4471 type = lntype;
4472 if (lntype != rntype)
4473 {
4474 if (lnbitsize > rnbitsize)
4475 {
4476 lhs = fold_convert (rntype, lhs);
4477 ll_mask = fold_convert (rntype, ll_mask);
4478 type = rntype;
4479 }
4480 else if (lnbitsize < rnbitsize)
4481 {
4482 rhs = fold_convert (lntype, rhs);
4483 lr_mask = fold_convert (lntype, lr_mask);
4484 type = lntype;
4485 }
4486 }
4487
4488 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
4489 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
4490
4491 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
4492 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
4493
4494 return build2 (wanted_code, truth_type, lhs, rhs);
4495 }
4496
4497 return 0;
4498 }
4499
4500 /* Handle the case of comparisons with constants. If there is something in
4501 common between the masks, those bits of the constants must be the same.
4502 If not, the condition is always false. Test for this to avoid generating
4503 incorrect code below. */
4504 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
4505 if (! integer_zerop (result)
4506 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
4507 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
4508 {
4509 if (wanted_code == NE_EXPR)
4510 {
4511 warning ("`or' of unmatched not-equal tests is always 1");
4512 return constant_boolean_node (true, truth_type);
4513 }
4514 else
4515 {
4516 warning ("`and' of mutually exclusive equal-tests is always 0");
4517 return constant_boolean_node (false, truth_type);
4518 }
4519 }
4520
4521 /* Construct the expression we will return. First get the component
4522 reference we will make. Unless the mask is all ones the width of
4523 that field, perform the mask operation. Then compare with the
4524 merged constant. */
4525 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4526 ll_unsignedp || rl_unsignedp);
4527
4528 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4529 if (! all_ones_mask_p (ll_mask, lnbitsize))
4530 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
4531
4532 return build2 (wanted_code, truth_type, result,
4533 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
4534 }
4535 \f
4536 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
4537 constant. */
4538
4539 static tree
4540 optimize_minmax_comparison (tree t)
4541 {
4542 tree type = TREE_TYPE (t);
4543 tree arg0 = TREE_OPERAND (t, 0);
4544 enum tree_code op_code;
4545 tree comp_const = TREE_OPERAND (t, 1);
4546 tree minmax_const;
4547 int consts_equal, consts_lt;
4548 tree inner;
4549
4550 STRIP_SIGN_NOPS (arg0);
4551
4552 op_code = TREE_CODE (arg0);
4553 minmax_const = TREE_OPERAND (arg0, 1);
4554 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
4555 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
4556 inner = TREE_OPERAND (arg0, 0);
4557
4558 /* If something does not permit us to optimize, return the original tree. */
4559 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
4560 || TREE_CODE (comp_const) != INTEGER_CST
4561 || TREE_CONSTANT_OVERFLOW (comp_const)
4562 || TREE_CODE (minmax_const) != INTEGER_CST
4563 || TREE_CONSTANT_OVERFLOW (minmax_const))
4564 return t;
4565
4566 /* Now handle all the various comparison codes. We only handle EQ_EXPR
4567 and GT_EXPR, doing the rest with recursive calls using logical
4568 simplifications. */
4569 switch (TREE_CODE (t))
4570 {
4571 case NE_EXPR: case LT_EXPR: case LE_EXPR:
4572 return
4573 invert_truthvalue (optimize_minmax_comparison (invert_truthvalue (t)));
4574
4575 case GE_EXPR:
4576 return
4577 fold (build2 (TRUTH_ORIF_EXPR, type,
4578 optimize_minmax_comparison
4579 (build2 (EQ_EXPR, type, arg0, comp_const)),
4580 optimize_minmax_comparison
4581 (build2 (GT_EXPR, type, arg0, comp_const))));
4582
4583 case EQ_EXPR:
4584 if (op_code == MAX_EXPR && consts_equal)
4585 /* MAX (X, 0) == 0 -> X <= 0 */
4586 return fold (build2 (LE_EXPR, type, inner, comp_const));
4587
4588 else if (op_code == MAX_EXPR && consts_lt)
4589 /* MAX (X, 0) == 5 -> X == 5 */
4590 return fold (build2 (EQ_EXPR, type, inner, comp_const));
4591
4592 else if (op_code == MAX_EXPR)
4593 /* MAX (X, 0) == -1 -> false */
4594 return omit_one_operand (type, integer_zero_node, inner);
4595
4596 else if (consts_equal)
4597 /* MIN (X, 0) == 0 -> X >= 0 */
4598 return fold (build2 (GE_EXPR, type, inner, comp_const));
4599
4600 else if (consts_lt)
4601 /* MIN (X, 0) == 5 -> false */
4602 return omit_one_operand (type, integer_zero_node, inner);
4603
4604 else
4605 /* MIN (X, 0) == -1 -> X == -1 */
4606 return fold (build2 (EQ_EXPR, type, inner, comp_const));
4607
4608 case GT_EXPR:
4609 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
4610 /* MAX (X, 0) > 0 -> X > 0
4611 MAX (X, 0) > 5 -> X > 5 */
4612 return fold (build2 (GT_EXPR, type, inner, comp_const));
4613
4614 else if (op_code == MAX_EXPR)
4615 /* MAX (X, 0) > -1 -> true */
4616 return omit_one_operand (type, integer_one_node, inner);
4617
4618 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
4619 /* MIN (X, 0) > 0 -> false
4620 MIN (X, 0) > 5 -> false */
4621 return omit_one_operand (type, integer_zero_node, inner);
4622
4623 else
4624 /* MIN (X, 0) > -1 -> X > -1 */
4625 return fold (build2 (GT_EXPR, type, inner, comp_const));
4626
4627 default:
4628 return t;
4629 }
4630 }
4631 \f
4632 /* T is an integer expression that is being multiplied, divided, or taken a
4633 modulus (CODE says which and what kind of divide or modulus) by a
4634 constant C. See if we can eliminate that operation by folding it with
4635 other operations already in T. WIDE_TYPE, if non-null, is a type that
4636 should be used for the computation if wider than our type.
4637
4638 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
4639 (X * 2) + (Y * 4). We must, however, be assured that either the original
4640 expression would not overflow or that overflow is undefined for the type
4641 in the language in question.
4642
4643 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
4644 the machine has a multiply-accumulate insn or that this is part of an
4645 addressing calculation.
4646
4647 If we return a non-null expression, it is an equivalent form of the
4648 original computation, but need not be in the original type. */
4649
4650 static tree
4651 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
4652 {
4653 /* To avoid exponential search depth, refuse to allow recursion past
4654 three levels. Beyond that (1) it's highly unlikely that we'll find
4655 something interesting and (2) we've probably processed it before
4656 when we built the inner expression. */
4657
4658 static int depth;
4659 tree ret;
4660
4661 if (depth > 3)
4662 return NULL;
4663
4664 depth++;
4665 ret = extract_muldiv_1 (t, c, code, wide_type);
4666 depth--;
4667
4668 return ret;
4669 }
4670
4671 static tree
4672 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
4673 {
4674 tree type = TREE_TYPE (t);
4675 enum tree_code tcode = TREE_CODE (t);
4676 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
4677 > GET_MODE_SIZE (TYPE_MODE (type)))
4678 ? wide_type : type);
4679 tree t1, t2;
4680 int same_p = tcode == code;
4681 tree op0 = NULL_TREE, op1 = NULL_TREE;
4682
4683 /* Don't deal with constants of zero here; they confuse the code below. */
4684 if (integer_zerop (c))
4685 return NULL_TREE;
4686
4687 if (TREE_CODE_CLASS (tcode) == '1')
4688 op0 = TREE_OPERAND (t, 0);
4689
4690 if (TREE_CODE_CLASS (tcode) == '2')
4691 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
4692
4693 /* Note that we need not handle conditional operations here since fold
4694 already handles those cases. So just do arithmetic here. */
4695 switch (tcode)
4696 {
4697 case INTEGER_CST:
4698 /* For a constant, we can always simplify if we are a multiply
4699 or (for divide and modulus) if it is a multiple of our constant. */
4700 if (code == MULT_EXPR
4701 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
4702 return const_binop (code, fold_convert (ctype, t),
4703 fold_convert (ctype, c), 0);
4704 break;
4705
4706 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
4707 /* If op0 is an expression ... */
4708 if ((TREE_CODE_CLASS (TREE_CODE (op0)) == '<'
4709 || TREE_CODE_CLASS (TREE_CODE (op0)) == '1'
4710 || TREE_CODE_CLASS (TREE_CODE (op0)) == '2'
4711 || TREE_CODE_CLASS (TREE_CODE (op0)) == 'e')
4712 /* ... and is unsigned, and its type is smaller than ctype,
4713 then we cannot pass through as widening. */
4714 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
4715 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4716 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
4717 && (GET_MODE_SIZE (TYPE_MODE (ctype))
4718 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
4719 /* ... or its type is larger than ctype,
4720 then we cannot pass through this truncation. */
4721 || (GET_MODE_SIZE (TYPE_MODE (ctype))
4722 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
4723 /* ... or signedness changes for division or modulus,
4724 then we cannot pass through this conversion. */
4725 || (code != MULT_EXPR
4726 && (TYPE_UNSIGNED (ctype)
4727 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
4728 break;
4729
4730 /* Pass the constant down and see if we can make a simplification. If
4731 we can, replace this expression with the inner simplification for
4732 possible later conversion to our or some other type. */
4733 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
4734 && TREE_CODE (t2) == INTEGER_CST
4735 && ! TREE_CONSTANT_OVERFLOW (t2)
4736 && (0 != (t1 = extract_muldiv (op0, t2, code,
4737 code == MULT_EXPR
4738 ? ctype : NULL_TREE))))
4739 return t1;
4740 break;
4741
4742 case NEGATE_EXPR: case ABS_EXPR:
4743 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4744 return fold (build1 (tcode, ctype, fold_convert (ctype, t1)));
4745 break;
4746
4747 case MIN_EXPR: case MAX_EXPR:
4748 /* If widening the type changes the signedness, then we can't perform
4749 this optimization as that changes the result. */
4750 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
4751 break;
4752
4753 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
4754 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
4755 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
4756 {
4757 if (tree_int_cst_sgn (c) < 0)
4758 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
4759
4760 return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
4761 fold_convert (ctype, t2)));
4762 }
4763 break;
4764
4765 case LSHIFT_EXPR: case RSHIFT_EXPR:
4766 /* If the second operand is constant, this is a multiplication
4767 or floor division, by a power of two, so we can treat it that
4768 way unless the multiplier or divisor overflows. */
4769 if (TREE_CODE (op1) == INTEGER_CST
4770 /* const_binop may not detect overflow correctly,
4771 so check for it explicitly here. */
4772 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
4773 && TREE_INT_CST_HIGH (op1) == 0
4774 && 0 != (t1 = fold_convert (ctype,
4775 const_binop (LSHIFT_EXPR,
4776 size_one_node,
4777 op1, 0)))
4778 && ! TREE_OVERFLOW (t1))
4779 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
4780 ? MULT_EXPR : FLOOR_DIV_EXPR,
4781 ctype, fold_convert (ctype, op0), t1),
4782 c, code, wide_type);
4783 break;
4784
4785 case PLUS_EXPR: case MINUS_EXPR:
4786 /* See if we can eliminate the operation on both sides. If we can, we
4787 can return a new PLUS or MINUS. If we can't, the only remaining
4788 cases where we can do anything are if the second operand is a
4789 constant. */
4790 t1 = extract_muldiv (op0, c, code, wide_type);
4791 t2 = extract_muldiv (op1, c, code, wide_type);
4792 if (t1 != 0 && t2 != 0
4793 && (code == MULT_EXPR
4794 /* If not multiplication, we can only do this if both operands
4795 are divisible by c. */
4796 || (multiple_of_p (ctype, op0, c)
4797 && multiple_of_p (ctype, op1, c))))
4798 return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
4799 fold_convert (ctype, t2)));
4800
4801 /* If this was a subtraction, negate OP1 and set it to be an addition.
4802 This simplifies the logic below. */
4803 if (tcode == MINUS_EXPR)
4804 tcode = PLUS_EXPR, op1 = negate_expr (op1);
4805
4806 if (TREE_CODE (op1) != INTEGER_CST)
4807 break;
4808
4809 /* If either OP1 or C are negative, this optimization is not safe for
4810 some of the division and remainder types while for others we need
4811 to change the code. */
4812 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
4813 {
4814 if (code == CEIL_DIV_EXPR)
4815 code = FLOOR_DIV_EXPR;
4816 else if (code == FLOOR_DIV_EXPR)
4817 code = CEIL_DIV_EXPR;
4818 else if (code != MULT_EXPR
4819 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
4820 break;
4821 }
4822
4823 /* If it's a multiply or a division/modulus operation of a multiple
4824 of our constant, do the operation and verify it doesn't overflow. */
4825 if (code == MULT_EXPR
4826 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4827 {
4828 op1 = const_binop (code, fold_convert (ctype, op1),
4829 fold_convert (ctype, c), 0);
4830 /* We allow the constant to overflow with wrapping semantics. */
4831 if (op1 == 0
4832 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
4833 break;
4834 }
4835 else
4836 break;
4837
4838 /* If we have an unsigned type is not a sizetype, we cannot widen
4839 the operation since it will change the result if the original
4840 computation overflowed. */
4841 if (TYPE_UNSIGNED (ctype)
4842 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
4843 && ctype != type)
4844 break;
4845
4846 /* If we were able to eliminate our operation from the first side,
4847 apply our operation to the second side and reform the PLUS. */
4848 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
4849 return fold (build2 (tcode, ctype, fold_convert (ctype, t1), op1));
4850
4851 /* The last case is if we are a multiply. In that case, we can
4852 apply the distributive law to commute the multiply and addition
4853 if the multiplication of the constants doesn't overflow. */
4854 if (code == MULT_EXPR)
4855 return fold (build2 (tcode, ctype,
4856 fold (build2 (code, ctype,
4857 fold_convert (ctype, op0),
4858 fold_convert (ctype, c))),
4859 op1));
4860
4861 break;
4862
4863 case MULT_EXPR:
4864 /* We have a special case here if we are doing something like
4865 (C * 8) % 4 since we know that's zero. */
4866 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
4867 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
4868 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
4869 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4870 return omit_one_operand (type, integer_zero_node, op0);
4871
4872 /* ... fall through ... */
4873
4874 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
4875 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
4876 /* If we can extract our operation from the LHS, do so and return a
4877 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
4878 do something only if the second operand is a constant. */
4879 if (same_p
4880 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4881 return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
4882 fold_convert (ctype, op1)));
4883 else if (tcode == MULT_EXPR && code == MULT_EXPR
4884 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
4885 return fold (build2 (tcode, ctype, fold_convert (ctype, op0),
4886 fold_convert (ctype, t1)));
4887 else if (TREE_CODE (op1) != INTEGER_CST)
4888 return 0;
4889
4890 /* If these are the same operation types, we can associate them
4891 assuming no overflow. */
4892 if (tcode == code
4893 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
4894 fold_convert (ctype, c), 0))
4895 && ! TREE_OVERFLOW (t1))
4896 return fold (build2 (tcode, ctype, fold_convert (ctype, op0), t1));
4897
4898 /* If these operations "cancel" each other, we have the main
4899 optimizations of this pass, which occur when either constant is a
4900 multiple of the other, in which case we replace this with either an
4901 operation or CODE or TCODE.
4902
4903 If we have an unsigned type that is not a sizetype, we cannot do
4904 this since it will change the result if the original computation
4905 overflowed. */
4906 if ((! TYPE_UNSIGNED (ctype)
4907 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
4908 && ! flag_wrapv
4909 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
4910 || (tcode == MULT_EXPR
4911 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
4912 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
4913 {
4914 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4915 return fold (build2 (tcode, ctype, fold_convert (ctype, op0),
4916 fold_convert (ctype,
4917 const_binop (TRUNC_DIV_EXPR,
4918 op1, c, 0))));
4919 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
4920 return fold (build2 (code, ctype, fold_convert (ctype, op0),
4921 fold_convert (ctype,
4922 const_binop (TRUNC_DIV_EXPR,
4923 c, op1, 0))));
4924 }
4925 break;
4926
4927 default:
4928 break;
4929 }
4930
4931 return 0;
4932 }
4933 \f
4934 /* Return a node which has the indicated constant VALUE (either 0 or
4935 1), and is of the indicated TYPE. */
4936
4937 static tree
4938 constant_boolean_node (int value, tree type)
4939 {
4940 if (type == integer_type_node)
4941 return value ? integer_one_node : integer_zero_node;
4942 else if (TREE_CODE (type) == BOOLEAN_TYPE)
4943 return lang_hooks.truthvalue_conversion (value ? integer_one_node
4944 : integer_zero_node);
4945 else
4946 {
4947 tree t = build_int_2 (value, 0);
4948
4949 TREE_TYPE (t) = type;
4950 return t;
4951 }
4952 }
4953
4954 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
4955 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
4956 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
4957 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
4958 COND is the first argument to CODE; otherwise (as in the example
4959 given here), it is the second argument. TYPE is the type of the
4960 original expression. Return NULL_TREE if no simplification is
4961 possible. */
4962
4963 static tree
4964 fold_binary_op_with_conditional_arg (enum tree_code code, tree type,
4965 tree cond, tree arg, int cond_first_p)
4966 {
4967 tree test, true_value, false_value;
4968 tree lhs = NULL_TREE;
4969 tree rhs = NULL_TREE;
4970
4971 /* This transformation is only worthwhile if we don't have to wrap
4972 arg in a SAVE_EXPR, and the operation can be simplified on atleast
4973 one of the branches once its pushed inside the COND_EXPR. */
4974 if (!TREE_CONSTANT (arg))
4975 return NULL_TREE;
4976
4977 if (TREE_CODE (cond) == COND_EXPR)
4978 {
4979 test = TREE_OPERAND (cond, 0);
4980 true_value = TREE_OPERAND (cond, 1);
4981 false_value = TREE_OPERAND (cond, 2);
4982 /* If this operand throws an expression, then it does not make
4983 sense to try to perform a logical or arithmetic operation
4984 involving it. */
4985 if (VOID_TYPE_P (TREE_TYPE (true_value)))
4986 lhs = true_value;
4987 if (VOID_TYPE_P (TREE_TYPE (false_value)))
4988 rhs = false_value;
4989 }
4990 else
4991 {
4992 tree testtype = TREE_TYPE (cond);
4993 test = cond;
4994 true_value = constant_boolean_node (true, testtype);
4995 false_value = constant_boolean_node (false, testtype);
4996 }
4997
4998 if (lhs == 0)
4999 lhs = fold (cond_first_p ? build2 (code, type, true_value, arg)
5000 : build2 (code, type, arg, true_value));
5001 if (rhs == 0)
5002 rhs = fold (cond_first_p ? build2 (code, type, false_value, arg)
5003 : build2 (code, type, arg, false_value));
5004
5005 test = fold (build3 (COND_EXPR, type, test, lhs, rhs));
5006 return fold_convert (type, test);
5007 }
5008
5009 \f
5010 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5011
5012 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5013 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5014 ADDEND is the same as X.
5015
5016 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5017 and finite. The problematic cases are when X is zero, and its mode
5018 has signed zeros. In the case of rounding towards -infinity,
5019 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5020 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5021
5022 static bool
5023 fold_real_zero_addition_p (tree type, tree addend, int negate)
5024 {
5025 if (!real_zerop (addend))
5026 return false;
5027
5028 /* Don't allow the fold with -fsignaling-nans. */
5029 if (HONOR_SNANS (TYPE_MODE (type)))
5030 return false;
5031
5032 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5033 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
5034 return true;
5035
5036 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5037 if (TREE_CODE (addend) == REAL_CST
5038 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
5039 negate = !negate;
5040
5041 /* The mode has signed zeros, and we have to honor their sign.
5042 In this situation, there is only one case we can return true for.
5043 X - 0 is the same as X unless rounding towards -infinity is
5044 supported. */
5045 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
5046 }
5047
5048 /* Subroutine of fold() that checks comparisons of built-in math
5049 functions against real constants.
5050
5051 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5052 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5053 is the type of the result and ARG0 and ARG1 are the operands of the
5054 comparison. ARG1 must be a TREE_REAL_CST.
5055
5056 The function returns the constant folded tree if a simplification
5057 can be made, and NULL_TREE otherwise. */
5058
5059 static tree
5060 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5061 tree type, tree arg0, tree arg1)
5062 {
5063 REAL_VALUE_TYPE c;
5064
5065 if (BUILTIN_SQRT_P (fcode))
5066 {
5067 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5068 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5069
5070 c = TREE_REAL_CST (arg1);
5071 if (REAL_VALUE_NEGATIVE (c))
5072 {
5073 /* sqrt(x) < y is always false, if y is negative. */
5074 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5075 return omit_one_operand (type, integer_zero_node, arg);
5076
5077 /* sqrt(x) > y is always true, if y is negative and we
5078 don't care about NaNs, i.e. negative values of x. */
5079 if (code == NE_EXPR || !HONOR_NANS (mode))
5080 return omit_one_operand (type, integer_one_node, arg);
5081
5082 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5083 return fold (build2 (GE_EXPR, type, arg,
5084 build_real (TREE_TYPE (arg), dconst0)));
5085 }
5086 else if (code == GT_EXPR || code == GE_EXPR)
5087 {
5088 REAL_VALUE_TYPE c2;
5089
5090 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5091 real_convert (&c2, mode, &c2);
5092
5093 if (REAL_VALUE_ISINF (c2))
5094 {
5095 /* sqrt(x) > y is x == +Inf, when y is very large. */
5096 if (HONOR_INFINITIES (mode))
5097 return fold (build2 (EQ_EXPR, type, arg,
5098 build_real (TREE_TYPE (arg), c2)));
5099
5100 /* sqrt(x) > y is always false, when y is very large
5101 and we don't care about infinities. */
5102 return omit_one_operand (type, integer_zero_node, arg);
5103 }
5104
5105 /* sqrt(x) > c is the same as x > c*c. */
5106 return fold (build2 (code, type, arg,
5107 build_real (TREE_TYPE (arg), c2)));
5108 }
5109 else if (code == LT_EXPR || code == LE_EXPR)
5110 {
5111 REAL_VALUE_TYPE c2;
5112
5113 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5114 real_convert (&c2, mode, &c2);
5115
5116 if (REAL_VALUE_ISINF (c2))
5117 {
5118 /* sqrt(x) < y is always true, when y is a very large
5119 value and we don't care about NaNs or Infinities. */
5120 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5121 return omit_one_operand (type, integer_one_node, arg);
5122
5123 /* sqrt(x) < y is x != +Inf when y is very large and we
5124 don't care about NaNs. */
5125 if (! HONOR_NANS (mode))
5126 return fold (build2 (NE_EXPR, type, arg,
5127 build_real (TREE_TYPE (arg), c2)));
5128
5129 /* sqrt(x) < y is x >= 0 when y is very large and we
5130 don't care about Infinities. */
5131 if (! HONOR_INFINITIES (mode))
5132 return fold (build2 (GE_EXPR, type, arg,
5133 build_real (TREE_TYPE (arg), dconst0)));
5134
5135 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5136 if (lang_hooks.decls.global_bindings_p () != 0
5137 || CONTAINS_PLACEHOLDER_P (arg))
5138 return NULL_TREE;
5139
5140 arg = save_expr (arg);
5141 return fold (build2 (TRUTH_ANDIF_EXPR, type,
5142 fold (build2 (GE_EXPR, type, arg,
5143 build_real (TREE_TYPE (arg),
5144 dconst0))),
5145 fold (build2 (NE_EXPR, type, arg,
5146 build_real (TREE_TYPE (arg),
5147 c2)))));
5148 }
5149
5150 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5151 if (! HONOR_NANS (mode))
5152 return fold (build2 (code, type, arg,
5153 build_real (TREE_TYPE (arg), c2)));
5154
5155 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5156 if (lang_hooks.decls.global_bindings_p () == 0
5157 && ! CONTAINS_PLACEHOLDER_P (arg))
5158 {
5159 arg = save_expr (arg);
5160 return fold (build2 (TRUTH_ANDIF_EXPR, type,
5161 fold (build2 (GE_EXPR, type, arg,
5162 build_real (TREE_TYPE (arg),
5163 dconst0))),
5164 fold (build2 (code, type, arg,
5165 build_real (TREE_TYPE (arg),
5166 c2)))));
5167 }
5168 }
5169 }
5170
5171 return NULL_TREE;
5172 }
5173
5174 /* Subroutine of fold() that optimizes comparisons against Infinities,
5175 either +Inf or -Inf.
5176
5177 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5178 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5179 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5180
5181 The function returns the constant folded tree if a simplification
5182 can be made, and NULL_TREE otherwise. */
5183
5184 static tree
5185 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5186 {
5187 enum machine_mode mode;
5188 REAL_VALUE_TYPE max;
5189 tree temp;
5190 bool neg;
5191
5192 mode = TYPE_MODE (TREE_TYPE (arg0));
5193
5194 /* For negative infinity swap the sense of the comparison. */
5195 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
5196 if (neg)
5197 code = swap_tree_comparison (code);
5198
5199 switch (code)
5200 {
5201 case GT_EXPR:
5202 /* x > +Inf is always false, if with ignore sNANs. */
5203 if (HONOR_SNANS (mode))
5204 return NULL_TREE;
5205 return omit_one_operand (type, integer_zero_node, arg0);
5206
5207 case LE_EXPR:
5208 /* x <= +Inf is always true, if we don't case about NaNs. */
5209 if (! HONOR_NANS (mode))
5210 return omit_one_operand (type, integer_one_node, arg0);
5211
5212 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5213 if (lang_hooks.decls.global_bindings_p () == 0
5214 && ! CONTAINS_PLACEHOLDER_P (arg0))
5215 {
5216 arg0 = save_expr (arg0);
5217 return fold (build2 (EQ_EXPR, type, arg0, arg0));
5218 }
5219 break;
5220
5221 case EQ_EXPR:
5222 case GE_EXPR:
5223 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
5224 real_maxval (&max, neg, mode);
5225 return fold (build2 (neg ? LT_EXPR : GT_EXPR, type,
5226 arg0, build_real (TREE_TYPE (arg0), max)));
5227
5228 case LT_EXPR:
5229 /* x < +Inf is always equal to x <= DBL_MAX. */
5230 real_maxval (&max, neg, mode);
5231 return fold (build2 (neg ? GE_EXPR : LE_EXPR, type,
5232 arg0, build_real (TREE_TYPE (arg0), max)));
5233
5234 case NE_EXPR:
5235 /* x != +Inf is always equal to !(x > DBL_MAX). */
5236 real_maxval (&max, neg, mode);
5237 if (! HONOR_NANS (mode))
5238 return fold (build2 (neg ? GE_EXPR : LE_EXPR, type,
5239 arg0, build_real (TREE_TYPE (arg0), max)));
5240
5241 /* The transformation below creates non-gimple code and thus is
5242 not appropriate if we are in gimple form. */
5243 if (in_gimple_form)
5244 return NULL_TREE;
5245
5246 temp = fold (build2 (neg ? LT_EXPR : GT_EXPR, type,
5247 arg0, build_real (TREE_TYPE (arg0), max)));
5248 return fold (build1 (TRUTH_NOT_EXPR, type, temp));
5249
5250 default:
5251 break;
5252 }
5253
5254 return NULL_TREE;
5255 }
5256
5257 /* Subroutine of fold() that optimizes comparisons of a division by
5258 a nonzero integer constant against an integer constant, i.e.
5259 X/C1 op C2.
5260
5261 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5262 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5263 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5264
5265 The function returns the constant folded tree if a simplification
5266 can be made, and NULL_TREE otherwise. */
5267
5268 static tree
5269 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5270 {
5271 tree prod, tmp, hi, lo;
5272 tree arg00 = TREE_OPERAND (arg0, 0);
5273 tree arg01 = TREE_OPERAND (arg0, 1);
5274 unsigned HOST_WIDE_INT lpart;
5275 HOST_WIDE_INT hpart;
5276 int overflow;
5277
5278 /* We have to do this the hard way to detect unsigned overflow.
5279 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
5280 overflow = mul_double (TREE_INT_CST_LOW (arg01),
5281 TREE_INT_CST_HIGH (arg01),
5282 TREE_INT_CST_LOW (arg1),
5283 TREE_INT_CST_HIGH (arg1), &lpart, &hpart);
5284 prod = build_int_2 (lpart, hpart);
5285 TREE_TYPE (prod) = TREE_TYPE (arg00);
5286 TREE_OVERFLOW (prod) = force_fit_type (prod, overflow)
5287 || TREE_INT_CST_HIGH (prod) != hpart
5288 || TREE_INT_CST_LOW (prod) != lpart;
5289 TREE_CONSTANT_OVERFLOW (prod) = TREE_OVERFLOW (prod);
5290
5291 if (TYPE_UNSIGNED (TREE_TYPE (arg0)))
5292 {
5293 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5294 lo = prod;
5295
5296 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
5297 overflow = add_double (TREE_INT_CST_LOW (prod),
5298 TREE_INT_CST_HIGH (prod),
5299 TREE_INT_CST_LOW (tmp),
5300 TREE_INT_CST_HIGH (tmp),
5301 &lpart, &hpart);
5302 hi = build_int_2 (lpart, hpart);
5303 TREE_TYPE (hi) = TREE_TYPE (arg00);
5304 TREE_OVERFLOW (hi) = force_fit_type (hi, overflow)
5305 || TREE_INT_CST_HIGH (hi) != hpart
5306 || TREE_INT_CST_LOW (hi) != lpart
5307 || TREE_OVERFLOW (prod);
5308 TREE_CONSTANT_OVERFLOW (hi) = TREE_OVERFLOW (hi);
5309 }
5310 else if (tree_int_cst_sgn (arg01) >= 0)
5311 {
5312 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5313 switch (tree_int_cst_sgn (arg1))
5314 {
5315 case -1:
5316 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5317 hi = prod;
5318 break;
5319
5320 case 0:
5321 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
5322 hi = tmp;
5323 break;
5324
5325 case 1:
5326 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
5327 lo = prod;
5328 break;
5329
5330 default:
5331 abort ();
5332 }
5333 }
5334 else
5335 {
5336 tmp = int_const_binop (PLUS_EXPR, arg01, integer_one_node, 0);
5337 switch (tree_int_cst_sgn (arg1))
5338 {
5339 case -1:
5340 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5341 lo = prod;
5342 break;
5343
5344 case 0:
5345 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
5346 lo = tmp;
5347 break;
5348
5349 case 1:
5350 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
5351 hi = prod;
5352 break;
5353
5354 default:
5355 abort ();
5356 }
5357 }
5358
5359 switch (code)
5360 {
5361 case EQ_EXPR:
5362 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
5363 return omit_one_operand (type, integer_zero_node, arg00);
5364 if (TREE_OVERFLOW (hi))
5365 return fold (build2 (GE_EXPR, type, arg00, lo));
5366 if (TREE_OVERFLOW (lo))
5367 return fold (build2 (LE_EXPR, type, arg00, hi));
5368 return build_range_check (type, arg00, 1, lo, hi);
5369
5370 case NE_EXPR:
5371 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
5372 return omit_one_operand (type, integer_one_node, arg00);
5373 if (TREE_OVERFLOW (hi))
5374 return fold (build2 (LT_EXPR, type, arg00, lo));
5375 if (TREE_OVERFLOW (lo))
5376 return fold (build2 (GT_EXPR, type, arg00, hi));
5377 return build_range_check (type, arg00, 0, lo, hi);
5378
5379 case LT_EXPR:
5380 if (TREE_OVERFLOW (lo))
5381 return omit_one_operand (type, integer_zero_node, arg00);
5382 return fold (build2 (LT_EXPR, type, arg00, lo));
5383
5384 case LE_EXPR:
5385 if (TREE_OVERFLOW (hi))
5386 return omit_one_operand (type, integer_one_node, arg00);
5387 return fold (build2 (LE_EXPR, type, arg00, hi));
5388
5389 case GT_EXPR:
5390 if (TREE_OVERFLOW (hi))
5391 return omit_one_operand (type, integer_zero_node, arg00);
5392 return fold (build2 (GT_EXPR, type, arg00, hi));
5393
5394 case GE_EXPR:
5395 if (TREE_OVERFLOW (lo))
5396 return omit_one_operand (type, integer_one_node, arg00);
5397 return fold (build2 (GE_EXPR, type, arg00, lo));
5398
5399 default:
5400 break;
5401 }
5402
5403 return NULL_TREE;
5404 }
5405
5406
5407 /* If CODE with arguments ARG0 and ARG1 represents a single bit
5408 equality/inequality test, then return a simplified form of
5409 the test using shifts and logical operations. Otherwise return
5410 NULL. TYPE is the desired result type. */
5411
5412 tree
5413 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
5414 tree result_type)
5415 {
5416 /* If this is a TRUTH_NOT_EXPR, it may have a single bit test inside
5417 operand 0. */
5418 if (code == TRUTH_NOT_EXPR)
5419 {
5420 code = TREE_CODE (arg0);
5421 if (code != NE_EXPR && code != EQ_EXPR)
5422 return NULL_TREE;
5423
5424 /* Extract the arguments of the EQ/NE. */
5425 arg1 = TREE_OPERAND (arg0, 1);
5426 arg0 = TREE_OPERAND (arg0, 0);
5427
5428 /* This requires us to invert the code. */
5429 code = (code == EQ_EXPR ? NE_EXPR : EQ_EXPR);
5430 }
5431
5432 /* If this is testing a single bit, we can optimize the test. */
5433 if ((code == NE_EXPR || code == EQ_EXPR)
5434 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
5435 && integer_pow2p (TREE_OPERAND (arg0, 1)))
5436 {
5437 tree inner = TREE_OPERAND (arg0, 0);
5438 tree type = TREE_TYPE (arg0);
5439 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
5440 enum machine_mode operand_mode = TYPE_MODE (type);
5441 int ops_unsigned;
5442 tree signed_type, unsigned_type, intermediate_type;
5443 tree arg00;
5444
5445 /* If we have (A & C) != 0 where C is the sign bit of A, convert
5446 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
5447 arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
5448 if (arg00 != NULL_TREE
5449 /* This is only a win if casting to a signed type is cheap,
5450 i.e. when arg00's type is not a partial mode. */
5451 && TYPE_PRECISION (TREE_TYPE (arg00))
5452 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
5453 {
5454 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
5455 return fold (build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
5456 result_type, fold_convert (stype, arg00),
5457 fold_convert (stype, integer_zero_node)));
5458 }
5459
5460 /* Otherwise we have (A & C) != 0 where C is a single bit,
5461 convert that into ((A >> C2) & 1). Where C2 = log2(C).
5462 Similarly for (A & C) == 0. */
5463
5464 /* If INNER is a right shift of a constant and it plus BITNUM does
5465 not overflow, adjust BITNUM and INNER. */
5466 if (TREE_CODE (inner) == RSHIFT_EXPR
5467 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
5468 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
5469 && bitnum < TYPE_PRECISION (type)
5470 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
5471 bitnum - TYPE_PRECISION (type)))
5472 {
5473 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
5474 inner = TREE_OPERAND (inner, 0);
5475 }
5476
5477 /* If we are going to be able to omit the AND below, we must do our
5478 operations as unsigned. If we must use the AND, we have a choice.
5479 Normally unsigned is faster, but for some machines signed is. */
5480 #ifdef LOAD_EXTEND_OP
5481 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1);
5482 #else
5483 ops_unsigned = 1;
5484 #endif
5485
5486 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
5487 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
5488 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
5489 inner = fold_convert (intermediate_type, inner);
5490
5491 if (bitnum != 0)
5492 inner = build2 (RSHIFT_EXPR, intermediate_type,
5493 inner, size_int (bitnum));
5494
5495 if (code == EQ_EXPR)
5496 inner = build2 (BIT_XOR_EXPR, intermediate_type,
5497 inner, integer_one_node);
5498
5499 /* Put the AND last so it can combine with more things. */
5500 inner = build2 (BIT_AND_EXPR, intermediate_type,
5501 inner, integer_one_node);
5502
5503 /* Make sure to return the proper type. */
5504 inner = fold_convert (result_type, inner);
5505
5506 return inner;
5507 }
5508 return NULL_TREE;
5509 }
5510
5511 /* Check whether we are allowed to reorder operands arg0 and arg1,
5512 such that the evaluation of arg1 occurs before arg0. */
5513
5514 static bool
5515 reorder_operands_p (tree arg0, tree arg1)
5516 {
5517 if (! flag_evaluation_order)
5518 return true;
5519 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
5520 return true;
5521 return ! TREE_SIDE_EFFECTS (arg0)
5522 && ! TREE_SIDE_EFFECTS (arg1);
5523 }
5524
5525 /* Test whether it is preferable two swap two operands, ARG0 and
5526 ARG1, for example because ARG0 is an integer constant and ARG1
5527 isn't. If REORDER is true, only recommend swapping if we can
5528 evaluate the operands in reverse order. */
5529
5530 static bool
5531 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
5532 {
5533 STRIP_SIGN_NOPS (arg0);
5534 STRIP_SIGN_NOPS (arg1);
5535
5536 if (TREE_CODE (arg1) == INTEGER_CST)
5537 return 0;
5538 if (TREE_CODE (arg0) == INTEGER_CST)
5539 return 1;
5540
5541 if (TREE_CODE (arg1) == REAL_CST)
5542 return 0;
5543 if (TREE_CODE (arg0) == REAL_CST)
5544 return 1;
5545
5546 if (TREE_CODE (arg1) == COMPLEX_CST)
5547 return 0;
5548 if (TREE_CODE (arg0) == COMPLEX_CST)
5549 return 1;
5550
5551 if (TREE_CONSTANT (arg1))
5552 return 0;
5553 if (TREE_CONSTANT (arg0))
5554 return 1;
5555
5556 if (optimize_size)
5557 return 0;
5558
5559 if (reorder && flag_evaluation_order
5560 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
5561 return 0;
5562
5563 if (DECL_P (arg1))
5564 return 0;
5565 if (DECL_P (arg0))
5566 return 1;
5567
5568 if (reorder && flag_evaluation_order
5569 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
5570 return 0;
5571
5572 if (DECL_P (arg1))
5573 return 0;
5574 if (DECL_P (arg0))
5575 return 1;
5576
5577 return 0;
5578 }
5579
5580 /* Perform constant folding and related simplification of EXPR.
5581 The related simplifications include x*1 => x, x*0 => 0, etc.,
5582 and application of the associative law.
5583 NOP_EXPR conversions may be removed freely (as long as we
5584 are careful not to change the type of the overall expression).
5585 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
5586 but we can constant-fold them if they have constant operands. */
5587
5588 #ifdef ENABLE_FOLD_CHECKING
5589 # define fold(x) fold_1 (x)
5590 static tree fold_1 (tree);
5591 static
5592 #endif
5593 tree
5594 fold (tree expr)
5595 {
5596 const tree t = expr;
5597 const tree type = TREE_TYPE (expr);
5598 tree t1 = NULL_TREE;
5599 tree tem;
5600 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
5601 enum tree_code code = TREE_CODE (t);
5602 int kind = TREE_CODE_CLASS (code);
5603
5604 /* WINS will be nonzero when the switch is done
5605 if all operands are constant. */
5606 int wins = 1;
5607
5608 /* Don't try to process an RTL_EXPR since its operands aren't trees.
5609 Likewise for a SAVE_EXPR that's already been evaluated. */
5610 if (code == RTL_EXPR || (code == SAVE_EXPR && SAVE_EXPR_RTL (t) != 0))
5611 return t;
5612
5613 /* Return right away if a constant. */
5614 if (kind == 'c')
5615 return t;
5616
5617 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
5618 {
5619 tree subop;
5620
5621 /* Special case for conversion ops that can have fixed point args. */
5622 arg0 = TREE_OPERAND (t, 0);
5623
5624 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
5625 if (arg0 != 0)
5626 STRIP_SIGN_NOPS (arg0);
5627
5628 if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
5629 subop = TREE_REALPART (arg0);
5630 else
5631 subop = arg0;
5632
5633 if (subop != 0 && TREE_CODE (subop) != INTEGER_CST
5634 && TREE_CODE (subop) != REAL_CST)
5635 /* Note that TREE_CONSTANT isn't enough:
5636 static var addresses are constant but we can't
5637 do arithmetic on them. */
5638 wins = 0;
5639 }
5640 else if (IS_EXPR_CODE_CLASS (kind))
5641 {
5642 int len = first_rtl_op (code);
5643 int i;
5644 for (i = 0; i < len; i++)
5645 {
5646 tree op = TREE_OPERAND (t, i);
5647 tree subop;
5648
5649 if (op == 0)
5650 continue; /* Valid for CALL_EXPR, at least. */
5651
5652 /* Strip any conversions that don't change the mode. This is
5653 safe for every expression, except for a comparison expression
5654 because its signedness is derived from its operands. So, in
5655 the latter case, only strip conversions that don't change the
5656 signedness.
5657
5658 Note that this is done as an internal manipulation within the
5659 constant folder, in order to find the simplest representation
5660 of the arguments so that their form can be studied. In any
5661 cases, the appropriate type conversions should be put back in
5662 the tree that will get out of the constant folder. */
5663 if (kind == '<')
5664 STRIP_SIGN_NOPS (op);
5665 else
5666 STRIP_NOPS (op);
5667
5668 if (TREE_CODE (op) == COMPLEX_CST)
5669 subop = TREE_REALPART (op);
5670 else
5671 subop = op;
5672
5673 if (TREE_CODE (subop) != INTEGER_CST
5674 && TREE_CODE (subop) != REAL_CST)
5675 /* Note that TREE_CONSTANT isn't enough:
5676 static var addresses are constant but we can't
5677 do arithmetic on them. */
5678 wins = 0;
5679
5680 if (i == 0)
5681 arg0 = op;
5682 else if (i == 1)
5683 arg1 = op;
5684 }
5685 }
5686
5687 /* If this is a commutative operation, and ARG0 is a constant, move it
5688 to ARG1 to reduce the number of tests below. */
5689 if (commutative_tree_code (code)
5690 && tree_swap_operands_p (arg0, arg1, true))
5691 return fold (build2 (code, type, TREE_OPERAND (t, 1),
5692 TREE_OPERAND (t, 0)));
5693
5694 /* Now WINS is set as described above,
5695 ARG0 is the first operand of EXPR,
5696 and ARG1 is the second operand (if it has more than one operand).
5697
5698 First check for cases where an arithmetic operation is applied to a
5699 compound, conditional, or comparison operation. Push the arithmetic
5700 operation inside the compound or conditional to see if any folding
5701 can then be done. Convert comparison to conditional for this purpose.
5702 The also optimizes non-constant cases that used to be done in
5703 expand_expr.
5704
5705 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
5706 one of the operands is a comparison and the other is a comparison, a
5707 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
5708 code below would make the expression more complex. Change it to a
5709 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
5710 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
5711
5712 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
5713 || code == EQ_EXPR || code == NE_EXPR)
5714 && ((truth_value_p (TREE_CODE (arg0))
5715 && (truth_value_p (TREE_CODE (arg1))
5716 || (TREE_CODE (arg1) == BIT_AND_EXPR
5717 && integer_onep (TREE_OPERAND (arg1, 1)))))
5718 || (truth_value_p (TREE_CODE (arg1))
5719 && (truth_value_p (TREE_CODE (arg0))
5720 || (TREE_CODE (arg0) == BIT_AND_EXPR
5721 && integer_onep (TREE_OPERAND (arg0, 1)))))))
5722 {
5723 tem = fold (build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
5724 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
5725 : TRUTH_XOR_EXPR,
5726 type, fold_convert (boolean_type_node, arg0),
5727 fold_convert (boolean_type_node, arg1)));
5728
5729 if (code == EQ_EXPR)
5730 tem = invert_truthvalue (tem);
5731
5732 return tem;
5733 }
5734
5735 if (TREE_CODE_CLASS (code) == '1')
5736 {
5737 if (TREE_CODE (arg0) == COMPOUND_EXPR)
5738 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5739 fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
5740 else if (TREE_CODE (arg0) == COND_EXPR)
5741 {
5742 tree arg01 = TREE_OPERAND (arg0, 1);
5743 tree arg02 = TREE_OPERAND (arg0, 2);
5744 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
5745 arg01 = fold (build1 (code, type, arg01));
5746 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
5747 arg02 = fold (build1 (code, type, arg02));
5748 tem = fold (build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
5749 arg01, arg02));
5750
5751 /* If this was a conversion, and all we did was to move into
5752 inside the COND_EXPR, bring it back out. But leave it if
5753 it is a conversion from integer to integer and the
5754 result precision is no wider than a word since such a
5755 conversion is cheap and may be optimized away by combine,
5756 while it couldn't if it were outside the COND_EXPR. Then return
5757 so we don't get into an infinite recursion loop taking the
5758 conversion out and then back in. */
5759
5760 if ((code == NOP_EXPR || code == CONVERT_EXPR
5761 || code == NON_LVALUE_EXPR)
5762 && TREE_CODE (tem) == COND_EXPR
5763 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
5764 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
5765 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
5766 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
5767 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
5768 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
5769 && ! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
5770 && (INTEGRAL_TYPE_P
5771 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
5772 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD))
5773 tem = build1 (code, type,
5774 build3 (COND_EXPR,
5775 TREE_TYPE (TREE_OPERAND
5776 (TREE_OPERAND (tem, 1), 0)),
5777 TREE_OPERAND (tem, 0),
5778 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
5779 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
5780 return tem;
5781 }
5782 else if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
5783 {
5784 if (TREE_CODE (type) == BOOLEAN_TYPE)
5785 {
5786 arg0 = copy_node (arg0);
5787 TREE_TYPE (arg0) = type;
5788 return arg0;
5789 }
5790 else if (TREE_CODE (type) != INTEGER_TYPE)
5791 return fold (build3 (COND_EXPR, type, arg0,
5792 fold (build1 (code, type,
5793 integer_one_node)),
5794 fold (build1 (code, type,
5795 integer_zero_node))));
5796 }
5797 }
5798 else if (TREE_CODE_CLASS (code) == '<'
5799 && TREE_CODE (arg0) == COMPOUND_EXPR)
5800 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5801 fold (build2 (code, type, TREE_OPERAND (arg0, 1), arg1)));
5802 else if (TREE_CODE_CLASS (code) == '<'
5803 && TREE_CODE (arg1) == COMPOUND_EXPR)
5804 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
5805 fold (build2 (code, type, arg0, TREE_OPERAND (arg1, 1))));
5806 else if (TREE_CODE_CLASS (code) == '2'
5807 || TREE_CODE_CLASS (code) == '<')
5808 {
5809 if (TREE_CODE (arg0) == COMPOUND_EXPR)
5810 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5811 fold (build2 (code, type, TREE_OPERAND (arg0, 1),
5812 arg1)));
5813 if (TREE_CODE (arg1) == COMPOUND_EXPR
5814 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
5815 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
5816 fold (build2 (code, type,
5817 arg0, TREE_OPERAND (arg1, 1))));
5818
5819 if (TREE_CODE (arg0) == COND_EXPR
5820 || TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
5821 {
5822 tem = fold_binary_op_with_conditional_arg (code, type, arg0, arg1,
5823 /*cond_first_p=*/1);
5824 if (tem != NULL_TREE)
5825 return tem;
5826 }
5827
5828 if (TREE_CODE (arg1) == COND_EXPR
5829 || TREE_CODE_CLASS (TREE_CODE (arg1)) == '<')
5830 {
5831 tem = fold_binary_op_with_conditional_arg (code, type, arg1, arg0,
5832 /*cond_first_p=*/0);
5833 if (tem != NULL_TREE)
5834 return tem;
5835 }
5836 }
5837
5838 switch (code)
5839 {
5840 case CONST_DECL:
5841 return fold (DECL_INITIAL (t));
5842
5843 case NOP_EXPR:
5844 case FLOAT_EXPR:
5845 case CONVERT_EXPR:
5846 case FIX_TRUNC_EXPR:
5847 case FIX_CEIL_EXPR:
5848 case FIX_FLOOR_EXPR:
5849 case FIX_ROUND_EXPR:
5850 if (TREE_TYPE (TREE_OPERAND (t, 0)) == type)
5851 return TREE_OPERAND (t, 0);
5852
5853 /* Handle cases of two conversions in a row. */
5854 if (TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
5855 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
5856 {
5857 tree inside_type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5858 tree inter_type = TREE_TYPE (TREE_OPERAND (t, 0));
5859 int inside_int = INTEGRAL_TYPE_P (inside_type);
5860 int inside_ptr = POINTER_TYPE_P (inside_type);
5861 int inside_float = FLOAT_TYPE_P (inside_type);
5862 unsigned int inside_prec = TYPE_PRECISION (inside_type);
5863 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
5864 int inter_int = INTEGRAL_TYPE_P (inter_type);
5865 int inter_ptr = POINTER_TYPE_P (inter_type);
5866 int inter_float = FLOAT_TYPE_P (inter_type);
5867 unsigned int inter_prec = TYPE_PRECISION (inter_type);
5868 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
5869 int final_int = INTEGRAL_TYPE_P (type);
5870 int final_ptr = POINTER_TYPE_P (type);
5871 int final_float = FLOAT_TYPE_P (type);
5872 unsigned int final_prec = TYPE_PRECISION (type);
5873 int final_unsignedp = TYPE_UNSIGNED (type);
5874
5875 /* In addition to the cases of two conversions in a row
5876 handled below, if we are converting something to its own
5877 type via an object of identical or wider precision, neither
5878 conversion is needed. */
5879 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
5880 && ((inter_int && final_int) || (inter_float && final_float))
5881 && inter_prec >= final_prec)
5882 return fold (build1 (code, type,
5883 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
5884
5885 /* Likewise, if the intermediate and final types are either both
5886 float or both integer, we don't need the middle conversion if
5887 it is wider than the final type and doesn't change the signedness
5888 (for integers). Avoid this if the final type is a pointer
5889 since then we sometimes need the inner conversion. Likewise if
5890 the outer has a precision not equal to the size of its mode. */
5891 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
5892 || (inter_float && inside_float))
5893 && inter_prec >= inside_prec
5894 && (inter_float || inter_unsignedp == inside_unsignedp)
5895 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
5896 && TYPE_MODE (type) == TYPE_MODE (inter_type))
5897 && ! final_ptr)
5898 return fold (build1 (code, type,
5899 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
5900
5901 /* If we have a sign-extension of a zero-extended value, we can
5902 replace that by a single zero-extension. */
5903 if (inside_int && inter_int && final_int
5904 && inside_prec < inter_prec && inter_prec < final_prec
5905 && inside_unsignedp && !inter_unsignedp)
5906 return fold (build1 (code, type,
5907 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
5908
5909 /* Two conversions in a row are not needed unless:
5910 - some conversion is floating-point (overstrict for now), or
5911 - the intermediate type is narrower than both initial and
5912 final, or
5913 - the intermediate type and innermost type differ in signedness,
5914 and the outermost type is wider than the intermediate, or
5915 - the initial type is a pointer type and the precisions of the
5916 intermediate and final types differ, or
5917 - the final type is a pointer type and the precisions of the
5918 initial and intermediate types differ. */
5919 if (! inside_float && ! inter_float && ! final_float
5920 && (inter_prec > inside_prec || inter_prec > final_prec)
5921 && ! (inside_int && inter_int
5922 && inter_unsignedp != inside_unsignedp
5923 && inter_prec < final_prec)
5924 && ((inter_unsignedp && inter_prec > inside_prec)
5925 == (final_unsignedp && final_prec > inter_prec))
5926 && ! (inside_ptr && inter_prec != final_prec)
5927 && ! (final_ptr && inside_prec != inter_prec)
5928 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
5929 && TYPE_MODE (type) == TYPE_MODE (inter_type))
5930 && ! final_ptr)
5931 return fold (build1 (code, type,
5932 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
5933 }
5934
5935 if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
5936 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
5937 /* Detect assigning a bitfield. */
5938 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == COMPONENT_REF
5939 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 1))))
5940 {
5941 /* Don't leave an assignment inside a conversion
5942 unless assigning a bitfield. */
5943 tree prev = TREE_OPERAND (t, 0);
5944 tem = copy_node (t);
5945 TREE_OPERAND (tem, 0) = TREE_OPERAND (prev, 1);
5946 /* First do the assignment, then return converted constant. */
5947 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), prev, fold (tem));
5948 TREE_NO_WARNING (tem) = 1;
5949 TREE_USED (tem) = 1;
5950 return tem;
5951 }
5952
5953 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
5954 constants (if x has signed type, the sign bit cannot be set
5955 in c). This folds extension into the BIT_AND_EXPR. */
5956 if (INTEGRAL_TYPE_P (type)
5957 && TREE_CODE (type) != BOOLEAN_TYPE
5958 && TREE_CODE (TREE_OPERAND (t, 0)) == BIT_AND_EXPR
5959 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST)
5960 {
5961 tree and = TREE_OPERAND (t, 0);
5962 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
5963 int change = 0;
5964
5965 if (TYPE_UNSIGNED (TREE_TYPE (and))
5966 || (TYPE_PRECISION (type)
5967 <= TYPE_PRECISION (TREE_TYPE (and))))
5968 change = 1;
5969 else if (TYPE_PRECISION (TREE_TYPE (and1))
5970 <= HOST_BITS_PER_WIDE_INT
5971 && host_integerp (and1, 1))
5972 {
5973 unsigned HOST_WIDE_INT cst;
5974
5975 cst = tree_low_cst (and1, 1);
5976 cst &= (HOST_WIDE_INT) -1
5977 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
5978 change = (cst == 0);
5979 #ifdef LOAD_EXTEND_OP
5980 if (change
5981 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
5982 == ZERO_EXTEND))
5983 {
5984 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
5985 and0 = fold_convert (uns, and0);
5986 and1 = fold_convert (uns, and1);
5987 }
5988 #endif
5989 }
5990 if (change)
5991 return fold (build2 (BIT_AND_EXPR, type,
5992 fold_convert (type, and0),
5993 fold_convert (type, and1)));
5994 }
5995
5996 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
5997 T2 being pointers to types of the same size. */
5998 if (POINTER_TYPE_P (TREE_TYPE (t))
5999 && TREE_CODE_CLASS (TREE_CODE (arg0)) == '2'
6000 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
6001 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6002 {
6003 tree arg00 = TREE_OPERAND (arg0, 0);
6004 tree t0 = TREE_TYPE (t);
6005 tree t1 = TREE_TYPE (arg00);
6006 tree tt0 = TREE_TYPE (t0);
6007 tree tt1 = TREE_TYPE (t1);
6008 tree s0 = TYPE_SIZE (tt0);
6009 tree s1 = TYPE_SIZE (tt1);
6010
6011 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
6012 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
6013 TREE_OPERAND (arg0, 1));
6014 }
6015
6016 tem = fold_convert_const (code, type, arg0);
6017 return tem ? tem : t;
6018
6019 case VIEW_CONVERT_EXPR:
6020 if (TREE_CODE (TREE_OPERAND (t, 0)) == VIEW_CONVERT_EXPR)
6021 return build1 (VIEW_CONVERT_EXPR, type,
6022 TREE_OPERAND (TREE_OPERAND (t, 0), 0));
6023 return t;
6024
6025 case COMPONENT_REF:
6026 if (TREE_CODE (arg0) == CONSTRUCTOR
6027 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
6028 {
6029 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
6030 if (m)
6031 return TREE_VALUE (m);
6032 }
6033 return t;
6034
6035 case RANGE_EXPR:
6036 if (TREE_CONSTANT (t) != wins)
6037 {
6038 tem = copy_node (t);
6039 TREE_CONSTANT (tem) = wins;
6040 TREE_INVARIANT (tem) = wins;
6041 return tem;
6042 }
6043 return t;
6044
6045 case NEGATE_EXPR:
6046 if (negate_expr_p (arg0))
6047 return fold_convert (type, negate_expr (arg0));
6048 return t;
6049
6050 case ABS_EXPR:
6051 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
6052 return fold_abs_const (arg0, type);
6053 else if (TREE_CODE (arg0) == NEGATE_EXPR)
6054 return fold (build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0)));
6055 /* Convert fabs((double)float) into (double)fabsf(float). */
6056 else if (TREE_CODE (arg0) == NOP_EXPR
6057 && TREE_CODE (type) == REAL_TYPE)
6058 {
6059 tree targ0 = strip_float_extensions (arg0);
6060 if (targ0 != arg0)
6061 return fold_convert (type, fold (build1 (ABS_EXPR,
6062 TREE_TYPE (targ0),
6063 targ0)));
6064 }
6065 else if (tree_expr_nonnegative_p (arg0))
6066 return arg0;
6067 return t;
6068
6069 case CONJ_EXPR:
6070 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
6071 return fold_convert (type, arg0);
6072 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
6073 return build2 (COMPLEX_EXPR, type,
6074 TREE_OPERAND (arg0, 0),
6075 negate_expr (TREE_OPERAND (arg0, 1)));
6076 else if (TREE_CODE (arg0) == COMPLEX_CST)
6077 return build_complex (type, TREE_REALPART (arg0),
6078 negate_expr (TREE_IMAGPART (arg0)));
6079 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
6080 return fold (build2 (TREE_CODE (arg0), type,
6081 fold (build1 (CONJ_EXPR, type,
6082 TREE_OPERAND (arg0, 0))),
6083 fold (build1 (CONJ_EXPR, type,
6084 TREE_OPERAND (arg0, 1)))));
6085 else if (TREE_CODE (arg0) == CONJ_EXPR)
6086 return TREE_OPERAND (arg0, 0);
6087 return t;
6088
6089 case BIT_NOT_EXPR:
6090 if (TREE_CODE (arg0) == INTEGER_CST)
6091 return fold_not_const (arg0, type);
6092 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
6093 return TREE_OPERAND (arg0, 0);
6094 return t;
6095
6096 case PLUS_EXPR:
6097 /* A + (-B) -> A - B */
6098 if (TREE_CODE (arg1) == NEGATE_EXPR)
6099 return fold (build2 (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
6100 /* (-A) + B -> B - A */
6101 if (TREE_CODE (arg0) == NEGATE_EXPR
6102 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
6103 return fold (build2 (MINUS_EXPR, type, arg1, TREE_OPERAND (arg0, 0)));
6104 if (! FLOAT_TYPE_P (type))
6105 {
6106 if (integer_zerop (arg1))
6107 return non_lvalue (fold_convert (type, arg0));
6108
6109 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
6110 with a constant, and the two constants have no bits in common,
6111 we should treat this as a BIT_IOR_EXPR since this may produce more
6112 simplifications. */
6113 if (TREE_CODE (arg0) == BIT_AND_EXPR
6114 && TREE_CODE (arg1) == BIT_AND_EXPR
6115 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6116 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
6117 && integer_zerop (const_binop (BIT_AND_EXPR,
6118 TREE_OPERAND (arg0, 1),
6119 TREE_OPERAND (arg1, 1), 0)))
6120 {
6121 code = BIT_IOR_EXPR;
6122 goto bit_ior;
6123 }
6124
6125 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
6126 (plus (plus (mult) (mult)) (foo)) so that we can
6127 take advantage of the factoring cases below. */
6128 if ((TREE_CODE (arg0) == PLUS_EXPR
6129 && TREE_CODE (arg1) == MULT_EXPR)
6130 || (TREE_CODE (arg1) == PLUS_EXPR
6131 && TREE_CODE (arg0) == MULT_EXPR))
6132 {
6133 tree parg0, parg1, parg, marg;
6134
6135 if (TREE_CODE (arg0) == PLUS_EXPR)
6136 parg = arg0, marg = arg1;
6137 else
6138 parg = arg1, marg = arg0;
6139 parg0 = TREE_OPERAND (parg, 0);
6140 parg1 = TREE_OPERAND (parg, 1);
6141 STRIP_NOPS (parg0);
6142 STRIP_NOPS (parg1);
6143
6144 if (TREE_CODE (parg0) == MULT_EXPR
6145 && TREE_CODE (parg1) != MULT_EXPR)
6146 return fold (build2 (PLUS_EXPR, type,
6147 fold (build2 (PLUS_EXPR, type,
6148 fold_convert (type, parg0),
6149 fold_convert (type, marg))),
6150 fold_convert (type, parg1)));
6151 if (TREE_CODE (parg0) != MULT_EXPR
6152 && TREE_CODE (parg1) == MULT_EXPR)
6153 return fold (build2 (PLUS_EXPR, type,
6154 fold (build2 (PLUS_EXPR, type,
6155 fold_convert (type, parg1),
6156 fold_convert (type, marg))),
6157 fold_convert (type, parg0)));
6158 }
6159
6160 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
6161 {
6162 tree arg00, arg01, arg10, arg11;
6163 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6164
6165 /* (A * C) + (B * C) -> (A+B) * C.
6166 We are most concerned about the case where C is a constant,
6167 but other combinations show up during loop reduction. Since
6168 it is not difficult, try all four possibilities. */
6169
6170 arg00 = TREE_OPERAND (arg0, 0);
6171 arg01 = TREE_OPERAND (arg0, 1);
6172 arg10 = TREE_OPERAND (arg1, 0);
6173 arg11 = TREE_OPERAND (arg1, 1);
6174 same = NULL_TREE;
6175
6176 if (operand_equal_p (arg01, arg11, 0))
6177 same = arg01, alt0 = arg00, alt1 = arg10;
6178 else if (operand_equal_p (arg00, arg10, 0))
6179 same = arg00, alt0 = arg01, alt1 = arg11;
6180 else if (operand_equal_p (arg00, arg11, 0))
6181 same = arg00, alt0 = arg01, alt1 = arg10;
6182 else if (operand_equal_p (arg01, arg10, 0))
6183 same = arg01, alt0 = arg00, alt1 = arg11;
6184
6185 /* No identical multiplicands; see if we can find a common
6186 power-of-two factor in non-power-of-two multiplies. This
6187 can help in multi-dimensional array access. */
6188 else if (TREE_CODE (arg01) == INTEGER_CST
6189 && TREE_CODE (arg11) == INTEGER_CST
6190 && TREE_INT_CST_HIGH (arg01) == 0
6191 && TREE_INT_CST_HIGH (arg11) == 0)
6192 {
6193 HOST_WIDE_INT int01, int11, tmp;
6194 int01 = TREE_INT_CST_LOW (arg01);
6195 int11 = TREE_INT_CST_LOW (arg11);
6196
6197 /* Move min of absolute values to int11. */
6198 if ((int01 >= 0 ? int01 : -int01)
6199 < (int11 >= 0 ? int11 : -int11))
6200 {
6201 tmp = int01, int01 = int11, int11 = tmp;
6202 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6203 alt0 = arg01, arg01 = arg11, arg11 = alt0;
6204 }
6205
6206 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
6207 {
6208 alt0 = fold (build2 (MULT_EXPR, type, arg00,
6209 build_int_2 (int01 / int11, 0)));
6210 alt1 = arg10;
6211 same = arg11;
6212 }
6213 }
6214
6215 if (same)
6216 return fold (build2 (MULT_EXPR, type,
6217 fold (build2 (PLUS_EXPR, type,
6218 alt0, alt1)),
6219 same));
6220 }
6221 }
6222 else
6223 {
6224 /* See if ARG1 is zero and X + ARG1 reduces to X. */
6225 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
6226 return non_lvalue (fold_convert (type, arg0));
6227
6228 /* Likewise if the operands are reversed. */
6229 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
6230 return non_lvalue (fold_convert (type, arg1));
6231
6232 /* Convert x+x into x*2.0. */
6233 if (operand_equal_p (arg0, arg1, 0)
6234 && SCALAR_FLOAT_TYPE_P (type))
6235 return fold (build2 (MULT_EXPR, type, arg0,
6236 build_real (type, dconst2)));
6237
6238 /* Convert x*c+x into x*(c+1). */
6239 if (flag_unsafe_math_optimizations
6240 && TREE_CODE (arg0) == MULT_EXPR
6241 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6242 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
6243 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
6244 {
6245 REAL_VALUE_TYPE c;
6246
6247 c = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
6248 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6249 return fold (build2 (MULT_EXPR, type, arg1,
6250 build_real (type, c)));
6251 }
6252
6253 /* Convert x+x*c into x*(c+1). */
6254 if (flag_unsafe_math_optimizations
6255 && TREE_CODE (arg1) == MULT_EXPR
6256 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
6257 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
6258 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
6259 {
6260 REAL_VALUE_TYPE c;
6261
6262 c = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
6263 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6264 return fold (build2 (MULT_EXPR, type, arg0,
6265 build_real (type, c)));
6266 }
6267
6268 /* Convert x*c1+x*c2 into x*(c1+c2). */
6269 if (flag_unsafe_math_optimizations
6270 && TREE_CODE (arg0) == MULT_EXPR
6271 && TREE_CODE (arg1) == MULT_EXPR
6272 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6273 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
6274 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
6275 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
6276 && operand_equal_p (TREE_OPERAND (arg0, 0),
6277 TREE_OPERAND (arg1, 0), 0))
6278 {
6279 REAL_VALUE_TYPE c1, c2;
6280
6281 c1 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
6282 c2 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
6283 real_arithmetic (&c1, PLUS_EXPR, &c1, &c2);
6284 return fold (build2 (MULT_EXPR, type,
6285 TREE_OPERAND (arg0, 0),
6286 build_real (type, c1)));
6287 }
6288 /* Convert a + (b*c + d*e) into (a + b*c) + d*e */
6289 if (flag_unsafe_math_optimizations
6290 && TREE_CODE (arg1) == PLUS_EXPR
6291 && TREE_CODE (arg0) != MULT_EXPR)
6292 {
6293 tree tree10 = TREE_OPERAND (arg1, 0);
6294 tree tree11 = TREE_OPERAND (arg1, 1);
6295 if (TREE_CODE (tree11) == MULT_EXPR
6296 && TREE_CODE (tree10) == MULT_EXPR)
6297 {
6298 tree tree0;
6299 tree0 = fold (build2 (PLUS_EXPR, type, arg0, tree10));
6300 return fold (build2 (PLUS_EXPR, type, tree0, tree11));
6301 }
6302 }
6303 /* Convert (b*c + d*e) + a into b*c + (d*e +a) */
6304 if (flag_unsafe_math_optimizations
6305 && TREE_CODE (arg0) == PLUS_EXPR
6306 && TREE_CODE (arg1) != MULT_EXPR)
6307 {
6308 tree tree00 = TREE_OPERAND (arg0, 0);
6309 tree tree01 = TREE_OPERAND (arg0, 1);
6310 if (TREE_CODE (tree01) == MULT_EXPR
6311 && TREE_CODE (tree00) == MULT_EXPR)
6312 {
6313 tree tree0;
6314 tree0 = fold (build2 (PLUS_EXPR, type, tree01, arg1));
6315 return fold (build2 (PLUS_EXPR, type, tree00, tree0));
6316 }
6317 }
6318 }
6319
6320 bit_rotate:
6321 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
6322 is a rotate of A by C1 bits. */
6323 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
6324 is a rotate of A by B bits. */
6325 {
6326 enum tree_code code0, code1;
6327 code0 = TREE_CODE (arg0);
6328 code1 = TREE_CODE (arg1);
6329 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
6330 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
6331 && operand_equal_p (TREE_OPERAND (arg0, 0),
6332 TREE_OPERAND (arg1, 0), 0)
6333 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6334 {
6335 tree tree01, tree11;
6336 enum tree_code code01, code11;
6337
6338 tree01 = TREE_OPERAND (arg0, 1);
6339 tree11 = TREE_OPERAND (arg1, 1);
6340 STRIP_NOPS (tree01);
6341 STRIP_NOPS (tree11);
6342 code01 = TREE_CODE (tree01);
6343 code11 = TREE_CODE (tree11);
6344 if (code01 == INTEGER_CST
6345 && code11 == INTEGER_CST
6346 && TREE_INT_CST_HIGH (tree01) == 0
6347 && TREE_INT_CST_HIGH (tree11) == 0
6348 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
6349 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
6350 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
6351 code0 == LSHIFT_EXPR ? tree01 : tree11);
6352 else if (code11 == MINUS_EXPR)
6353 {
6354 tree tree110, tree111;
6355 tree110 = TREE_OPERAND (tree11, 0);
6356 tree111 = TREE_OPERAND (tree11, 1);
6357 STRIP_NOPS (tree110);
6358 STRIP_NOPS (tree111);
6359 if (TREE_CODE (tree110) == INTEGER_CST
6360 && 0 == compare_tree_int (tree110,
6361 TYPE_PRECISION
6362 (TREE_TYPE (TREE_OPERAND
6363 (arg0, 0))))
6364 && operand_equal_p (tree01, tree111, 0))
6365 return build2 ((code0 == LSHIFT_EXPR
6366 ? LROTATE_EXPR
6367 : RROTATE_EXPR),
6368 type, TREE_OPERAND (arg0, 0), tree01);
6369 }
6370 else if (code01 == MINUS_EXPR)
6371 {
6372 tree tree010, tree011;
6373 tree010 = TREE_OPERAND (tree01, 0);
6374 tree011 = TREE_OPERAND (tree01, 1);
6375 STRIP_NOPS (tree010);
6376 STRIP_NOPS (tree011);
6377 if (TREE_CODE (tree010) == INTEGER_CST
6378 && 0 == compare_tree_int (tree010,
6379 TYPE_PRECISION
6380 (TREE_TYPE (TREE_OPERAND
6381 (arg0, 0))))
6382 && operand_equal_p (tree11, tree011, 0))
6383 return build2 ((code0 != LSHIFT_EXPR
6384 ? LROTATE_EXPR
6385 : RROTATE_EXPR),
6386 type, TREE_OPERAND (arg0, 0), tree11);
6387 }
6388 }
6389 }
6390
6391 associate:
6392 /* In most languages, can't associate operations on floats through
6393 parentheses. Rather than remember where the parentheses were, we
6394 don't associate floats at all, unless the user has specified
6395 -funsafe-math-optimizations. */
6396
6397 if (! wins
6398 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
6399 {
6400 tree var0, con0, lit0, minus_lit0;
6401 tree var1, con1, lit1, minus_lit1;
6402
6403 /* Split both trees into variables, constants, and literals. Then
6404 associate each group together, the constants with literals,
6405 then the result with variables. This increases the chances of
6406 literals being recombined later and of generating relocatable
6407 expressions for the sum of a constant and literal. */
6408 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
6409 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
6410 code == MINUS_EXPR);
6411
6412 /* Only do something if we found more than two objects. Otherwise,
6413 nothing has changed and we risk infinite recursion. */
6414 if (2 < ((var0 != 0) + (var1 != 0)
6415 + (con0 != 0) + (con1 != 0)
6416 + (lit0 != 0) + (lit1 != 0)
6417 + (minus_lit0 != 0) + (minus_lit1 != 0)))
6418 {
6419 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
6420 if (code == MINUS_EXPR)
6421 code = PLUS_EXPR;
6422
6423 var0 = associate_trees (var0, var1, code, type);
6424 con0 = associate_trees (con0, con1, code, type);
6425 lit0 = associate_trees (lit0, lit1, code, type);
6426 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
6427
6428 /* Preserve the MINUS_EXPR if the negative part of the literal is
6429 greater than the positive part. Otherwise, the multiplicative
6430 folding code (i.e extract_muldiv) may be fooled in case
6431 unsigned constants are subtracted, like in the following
6432 example: ((X*2 + 4) - 8U)/2. */
6433 if (minus_lit0 && lit0)
6434 {
6435 if (TREE_CODE (lit0) == INTEGER_CST
6436 && TREE_CODE (minus_lit0) == INTEGER_CST
6437 && tree_int_cst_lt (lit0, minus_lit0))
6438 {
6439 minus_lit0 = associate_trees (minus_lit0, lit0,
6440 MINUS_EXPR, type);
6441 lit0 = 0;
6442 }
6443 else
6444 {
6445 lit0 = associate_trees (lit0, minus_lit0,
6446 MINUS_EXPR, type);
6447 minus_lit0 = 0;
6448 }
6449 }
6450 if (minus_lit0)
6451 {
6452 if (con0 == 0)
6453 return fold_convert (type,
6454 associate_trees (var0, minus_lit0,
6455 MINUS_EXPR, type));
6456 else
6457 {
6458 con0 = associate_trees (con0, minus_lit0,
6459 MINUS_EXPR, type);
6460 return fold_convert (type,
6461 associate_trees (var0, con0,
6462 PLUS_EXPR, type));
6463 }
6464 }
6465
6466 con0 = associate_trees (con0, lit0, code, type);
6467 return fold_convert (type, associate_trees (var0, con0,
6468 code, type));
6469 }
6470 }
6471
6472 binary:
6473 if (wins)
6474 t1 = const_binop (code, arg0, arg1, 0);
6475 if (t1 != NULL_TREE)
6476 {
6477 /* The return value should always have
6478 the same type as the original expression. */
6479 if (TREE_TYPE (t1) != type)
6480 t1 = fold_convert (type, t1);
6481
6482 return t1;
6483 }
6484 return t;
6485
6486 case MINUS_EXPR:
6487 /* A - (-B) -> A + B */
6488 if (TREE_CODE (arg1) == NEGATE_EXPR)
6489 return fold (build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
6490 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
6491 if (TREE_CODE (arg0) == NEGATE_EXPR
6492 && (FLOAT_TYPE_P (type)
6493 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
6494 && negate_expr_p (arg1)
6495 && reorder_operands_p (arg0, arg1))
6496 return fold (build2 (MINUS_EXPR, type, negate_expr (arg1),
6497 TREE_OPERAND (arg0, 0)));
6498
6499 if (! FLOAT_TYPE_P (type))
6500 {
6501 if (! wins && integer_zerop (arg0))
6502 return negate_expr (fold_convert (type, arg1));
6503 if (integer_zerop (arg1))
6504 return non_lvalue (fold_convert (type, arg0));
6505
6506 /* Fold A - (A & B) into ~B & A. */
6507 if (!TREE_SIDE_EFFECTS (arg0)
6508 && TREE_CODE (arg1) == BIT_AND_EXPR)
6509 {
6510 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
6511 return fold (build2 (BIT_AND_EXPR, type,
6512 fold (build1 (BIT_NOT_EXPR, type,
6513 TREE_OPERAND (arg1, 0))),
6514 arg0));
6515 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
6516 return fold (build2 (BIT_AND_EXPR, type,
6517 fold (build1 (BIT_NOT_EXPR, type,
6518 TREE_OPERAND (arg1, 1))),
6519 arg0));
6520 }
6521
6522 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
6523 any power of 2 minus 1. */
6524 if (TREE_CODE (arg0) == BIT_AND_EXPR
6525 && TREE_CODE (arg1) == BIT_AND_EXPR
6526 && operand_equal_p (TREE_OPERAND (arg0, 0),
6527 TREE_OPERAND (arg1, 0), 0))
6528 {
6529 tree mask0 = TREE_OPERAND (arg0, 1);
6530 tree mask1 = TREE_OPERAND (arg1, 1);
6531 tree tem = fold (build1 (BIT_NOT_EXPR, type, mask0));
6532
6533 if (operand_equal_p (tem, mask1, 0))
6534 {
6535 tem = fold (build2 (BIT_XOR_EXPR, type,
6536 TREE_OPERAND (arg0, 0), mask1));
6537 return fold (build2 (MINUS_EXPR, type, tem, mask1));
6538 }
6539 }
6540 }
6541
6542 /* See if ARG1 is zero and X - ARG1 reduces to X. */
6543 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
6544 return non_lvalue (fold_convert (type, arg0));
6545
6546 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
6547 ARG0 is zero and X + ARG0 reduces to X, since that would mean
6548 (-ARG1 + ARG0) reduces to -ARG1. */
6549 else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
6550 return negate_expr (fold_convert (type, arg1));
6551
6552 /* Fold &x - &x. This can happen from &x.foo - &x.
6553 This is unsafe for certain floats even in non-IEEE formats.
6554 In IEEE, it is unsafe because it does wrong for NaNs.
6555 Also note that operand_equal_p is always false if an operand
6556 is volatile. */
6557
6558 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
6559 && operand_equal_p (arg0, arg1, 0))
6560 return fold_convert (type, integer_zero_node);
6561
6562 /* A - B -> A + (-B) if B is easily negatable. */
6563 if (!wins && negate_expr_p (arg1)
6564 && (FLOAT_TYPE_P (type)
6565 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
6566 return fold (build2 (PLUS_EXPR, type, arg0, negate_expr (arg1)));
6567
6568 if (TREE_CODE (arg0) == MULT_EXPR
6569 && TREE_CODE (arg1) == MULT_EXPR
6570 && (INTEGRAL_TYPE_P (type) || flag_unsafe_math_optimizations))
6571 {
6572 /* (A * C) - (B * C) -> (A-B) * C. */
6573 if (operand_equal_p (TREE_OPERAND (arg0, 1),
6574 TREE_OPERAND (arg1, 1), 0))
6575 return fold (build2 (MULT_EXPR, type,
6576 fold (build2 (MINUS_EXPR, type,
6577 TREE_OPERAND (arg0, 0),
6578 TREE_OPERAND (arg1, 0))),
6579 TREE_OPERAND (arg0, 1)));
6580 /* (A * C1) - (A * C2) -> A * (C1-C2). */
6581 if (operand_equal_p (TREE_OPERAND (arg0, 0),
6582 TREE_OPERAND (arg1, 0), 0))
6583 return fold (build2 (MULT_EXPR, type,
6584 TREE_OPERAND (arg0, 0),
6585 fold (build2 (MINUS_EXPR, type,
6586 TREE_OPERAND (arg0, 1),
6587 TREE_OPERAND (arg1, 1)))));
6588 }
6589
6590 goto associate;
6591
6592 case MULT_EXPR:
6593 /* (-A) * (-B) -> A * B */
6594 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
6595 return fold (build2 (MULT_EXPR, type,
6596 TREE_OPERAND (arg0, 0),
6597 negate_expr (arg1)));
6598 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
6599 return fold (build2 (MULT_EXPR, type,
6600 negate_expr (arg0),
6601 TREE_OPERAND (arg1, 0)));
6602
6603 if (! FLOAT_TYPE_P (type))
6604 {
6605 if (integer_zerop (arg1))
6606 return omit_one_operand (type, arg1, arg0);
6607 if (integer_onep (arg1))
6608 return non_lvalue (fold_convert (type, arg0));
6609
6610 /* (a * (1 << b)) is (a << b) */
6611 if (TREE_CODE (arg1) == LSHIFT_EXPR
6612 && integer_onep (TREE_OPERAND (arg1, 0)))
6613 return fold (build2 (LSHIFT_EXPR, type, arg0,
6614 TREE_OPERAND (arg1, 1)));
6615 if (TREE_CODE (arg0) == LSHIFT_EXPR
6616 && integer_onep (TREE_OPERAND (arg0, 0)))
6617 return fold (build2 (LSHIFT_EXPR, type, arg1,
6618 TREE_OPERAND (arg0, 1)));
6619
6620 if (TREE_CODE (arg1) == INTEGER_CST
6621 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0),
6622 fold_convert (type, arg1),
6623 code, NULL_TREE)))
6624 return fold_convert (type, tem);
6625
6626 }
6627 else
6628 {
6629 /* Maybe fold x * 0 to 0. The expressions aren't the same
6630 when x is NaN, since x * 0 is also NaN. Nor are they the
6631 same in modes with signed zeros, since multiplying a
6632 negative value by 0 gives -0, not +0. */
6633 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
6634 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
6635 && real_zerop (arg1))
6636 return omit_one_operand (type, arg1, arg0);
6637 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
6638 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6639 && real_onep (arg1))
6640 return non_lvalue (fold_convert (type, arg0));
6641
6642 /* Transform x * -1.0 into -x. */
6643 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6644 && real_minus_onep (arg1))
6645 return fold_convert (type, negate_expr (arg0));
6646
6647 /* Convert (C1/X)*C2 into (C1*C2)/X. */
6648 if (flag_unsafe_math_optimizations
6649 && TREE_CODE (arg0) == RDIV_EXPR
6650 && TREE_CODE (arg1) == REAL_CST
6651 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
6652 {
6653 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
6654 arg1, 0);
6655 if (tem)
6656 return fold (build2 (RDIV_EXPR, type, tem,
6657 TREE_OPERAND (arg0, 1)));
6658 }
6659
6660 if (flag_unsafe_math_optimizations)
6661 {
6662 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
6663 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
6664
6665 /* Optimizations of root(...)*root(...). */
6666 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
6667 {
6668 tree rootfn, arg, arglist;
6669 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6670 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6671
6672 /* Optimize sqrt(x)*sqrt(x) as x. */
6673 if (BUILTIN_SQRT_P (fcode0)
6674 && operand_equal_p (arg00, arg10, 0)
6675 && ! HONOR_SNANS (TYPE_MODE (type)))
6676 return arg00;
6677
6678 /* Optimize root(x)*root(y) as root(x*y). */
6679 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6680 arg = fold (build2 (MULT_EXPR, type, arg00, arg10));
6681 arglist = build_tree_list (NULL_TREE, arg);
6682 return build_function_call_expr (rootfn, arglist);
6683 }
6684
6685 /* Optimize expN(x)*expN(y) as expN(x+y). */
6686 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
6687 {
6688 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6689 tree arg = build2 (PLUS_EXPR, type,
6690 TREE_VALUE (TREE_OPERAND (arg0, 1)),
6691 TREE_VALUE (TREE_OPERAND (arg1, 1)));
6692 tree arglist = build_tree_list (NULL_TREE, fold (arg));
6693 return build_function_call_expr (expfn, arglist);
6694 }
6695
6696 /* Optimizations of pow(...)*pow(...). */
6697 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
6698 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
6699 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
6700 {
6701 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6702 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
6703 1)));
6704 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6705 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
6706 1)));
6707
6708 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
6709 if (operand_equal_p (arg01, arg11, 0))
6710 {
6711 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6712 tree arg = build2 (MULT_EXPR, type, arg00, arg10);
6713 tree arglist = tree_cons (NULL_TREE, fold (arg),
6714 build_tree_list (NULL_TREE,
6715 arg01));
6716 return build_function_call_expr (powfn, arglist);
6717 }
6718
6719 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
6720 if (operand_equal_p (arg00, arg10, 0))
6721 {
6722 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6723 tree arg = fold (build2 (PLUS_EXPR, type, arg01, arg11));
6724 tree arglist = tree_cons (NULL_TREE, arg00,
6725 build_tree_list (NULL_TREE,
6726 arg));
6727 return build_function_call_expr (powfn, arglist);
6728 }
6729 }
6730
6731 /* Optimize tan(x)*cos(x) as sin(x). */
6732 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
6733 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
6734 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
6735 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
6736 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
6737 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
6738 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
6739 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
6740 {
6741 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
6742
6743 if (sinfn != NULL_TREE)
6744 return build_function_call_expr (sinfn,
6745 TREE_OPERAND (arg0, 1));
6746 }
6747
6748 /* Optimize x*pow(x,c) as pow(x,c+1). */
6749 if (fcode1 == BUILT_IN_POW
6750 || fcode1 == BUILT_IN_POWF
6751 || fcode1 == BUILT_IN_POWL)
6752 {
6753 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6754 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
6755 1)));
6756 if (TREE_CODE (arg11) == REAL_CST
6757 && ! TREE_CONSTANT_OVERFLOW (arg11)
6758 && operand_equal_p (arg0, arg10, 0))
6759 {
6760 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6761 REAL_VALUE_TYPE c;
6762 tree arg, arglist;
6763
6764 c = TREE_REAL_CST (arg11);
6765 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6766 arg = build_real (type, c);
6767 arglist = build_tree_list (NULL_TREE, arg);
6768 arglist = tree_cons (NULL_TREE, arg0, arglist);
6769 return build_function_call_expr (powfn, arglist);
6770 }
6771 }
6772
6773 /* Optimize pow(x,c)*x as pow(x,c+1). */
6774 if (fcode0 == BUILT_IN_POW
6775 || fcode0 == BUILT_IN_POWF
6776 || fcode0 == BUILT_IN_POWL)
6777 {
6778 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6779 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
6780 1)));
6781 if (TREE_CODE (arg01) == REAL_CST
6782 && ! TREE_CONSTANT_OVERFLOW (arg01)
6783 && operand_equal_p (arg1, arg00, 0))
6784 {
6785 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6786 REAL_VALUE_TYPE c;
6787 tree arg, arglist;
6788
6789 c = TREE_REAL_CST (arg01);
6790 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6791 arg = build_real (type, c);
6792 arglist = build_tree_list (NULL_TREE, arg);
6793 arglist = tree_cons (NULL_TREE, arg1, arglist);
6794 return build_function_call_expr (powfn, arglist);
6795 }
6796 }
6797
6798 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
6799 if (! optimize_size
6800 && operand_equal_p (arg0, arg1, 0))
6801 {
6802 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
6803
6804 if (powfn)
6805 {
6806 tree arg = build_real (type, dconst2);
6807 tree arglist = build_tree_list (NULL_TREE, arg);
6808 arglist = tree_cons (NULL_TREE, arg0, arglist);
6809 return build_function_call_expr (powfn, arglist);
6810 }
6811 }
6812 }
6813 }
6814 goto associate;
6815
6816 case BIT_IOR_EXPR:
6817 bit_ior:
6818 if (integer_all_onesp (arg1))
6819 return omit_one_operand (type, arg1, arg0);
6820 if (integer_zerop (arg1))
6821 return non_lvalue (fold_convert (type, arg0));
6822 if (operand_equal_p (arg0, arg1, 0))
6823 return non_lvalue (fold_convert (type, arg0));
6824 t1 = distribute_bit_expr (code, type, arg0, arg1);
6825 if (t1 != NULL_TREE)
6826 return t1;
6827
6828 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
6829
6830 This results in more efficient code for machines without a NAND
6831 instruction. Combine will canonicalize to the first form
6832 which will allow use of NAND instructions provided by the
6833 backend if they exist. */
6834 if (TREE_CODE (arg0) == BIT_NOT_EXPR
6835 && TREE_CODE (arg1) == BIT_NOT_EXPR)
6836 {
6837 return fold (build1 (BIT_NOT_EXPR, type,
6838 build2 (BIT_AND_EXPR, type,
6839 TREE_OPERAND (arg0, 0),
6840 TREE_OPERAND (arg1, 0))));
6841 }
6842
6843 /* See if this can be simplified into a rotate first. If that
6844 is unsuccessful continue in the association code. */
6845 goto bit_rotate;
6846
6847 case BIT_XOR_EXPR:
6848 if (integer_zerop (arg1))
6849 return non_lvalue (fold_convert (type, arg0));
6850 if (integer_all_onesp (arg1))
6851 return fold (build1 (BIT_NOT_EXPR, type, arg0));
6852 if (operand_equal_p (arg0, arg1, 0))
6853 return omit_one_operand (type, integer_zero_node, arg0);
6854
6855 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
6856 with a constant, and the two constants have no bits in common,
6857 we should treat this as a BIT_IOR_EXPR since this may produce more
6858 simplifications. */
6859 if (TREE_CODE (arg0) == BIT_AND_EXPR
6860 && TREE_CODE (arg1) == BIT_AND_EXPR
6861 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6862 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
6863 && integer_zerop (const_binop (BIT_AND_EXPR,
6864 TREE_OPERAND (arg0, 1),
6865 TREE_OPERAND (arg1, 1), 0)))
6866 {
6867 code = BIT_IOR_EXPR;
6868 goto bit_ior;
6869 }
6870
6871 /* See if this can be simplified into a rotate first. If that
6872 is unsuccessful continue in the association code. */
6873 goto bit_rotate;
6874
6875 case BIT_AND_EXPR:
6876 if (integer_all_onesp (arg1))
6877 return non_lvalue (fold_convert (type, arg0));
6878 if (integer_zerop (arg1))
6879 return omit_one_operand (type, arg1, arg0);
6880 if (operand_equal_p (arg0, arg1, 0))
6881 return non_lvalue (fold_convert (type, arg0));
6882 t1 = distribute_bit_expr (code, type, arg0, arg1);
6883 if (t1 != NULL_TREE)
6884 return t1;
6885 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
6886 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
6887 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6888 {
6889 unsigned int prec
6890 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
6891
6892 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
6893 && (~TREE_INT_CST_LOW (arg1)
6894 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
6895 return fold_convert (type, TREE_OPERAND (arg0, 0));
6896 }
6897
6898 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
6899
6900 This results in more efficient code for machines without a NOR
6901 instruction. Combine will canonicalize to the first form
6902 which will allow use of NOR instructions provided by the
6903 backend if they exist. */
6904 if (TREE_CODE (arg0) == BIT_NOT_EXPR
6905 && TREE_CODE (arg1) == BIT_NOT_EXPR)
6906 {
6907 return fold (build1 (BIT_NOT_EXPR, type,
6908 build2 (BIT_IOR_EXPR, type,
6909 TREE_OPERAND (arg0, 0),
6910 TREE_OPERAND (arg1, 0))));
6911 }
6912
6913 goto associate;
6914
6915 case RDIV_EXPR:
6916 /* Don't touch a floating-point divide by zero unless the mode
6917 of the constant can represent infinity. */
6918 if (TREE_CODE (arg1) == REAL_CST
6919 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
6920 && real_zerop (arg1))
6921 return t;
6922
6923 /* (-A) / (-B) -> A / B */
6924 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
6925 return fold (build2 (RDIV_EXPR, type,
6926 TREE_OPERAND (arg0, 0),
6927 negate_expr (arg1)));
6928 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
6929 return fold (build2 (RDIV_EXPR, type,
6930 negate_expr (arg0),
6931 TREE_OPERAND (arg1, 0)));
6932
6933 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
6934 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6935 && real_onep (arg1))
6936 return non_lvalue (fold_convert (type, arg0));
6937
6938 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
6939 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6940 && real_minus_onep (arg1))
6941 return non_lvalue (fold_convert (type, negate_expr (arg0)));
6942
6943 /* If ARG1 is a constant, we can convert this to a multiply by the
6944 reciprocal. This does not have the same rounding properties,
6945 so only do this if -funsafe-math-optimizations. We can actually
6946 always safely do it if ARG1 is a power of two, but it's hard to
6947 tell if it is or not in a portable manner. */
6948 if (TREE_CODE (arg1) == REAL_CST)
6949 {
6950 if (flag_unsafe_math_optimizations
6951 && 0 != (tem = const_binop (code, build_real (type, dconst1),
6952 arg1, 0)))
6953 return fold (build2 (MULT_EXPR, type, arg0, tem));
6954 /* Find the reciprocal if optimizing and the result is exact. */
6955 if (optimize)
6956 {
6957 REAL_VALUE_TYPE r;
6958 r = TREE_REAL_CST (arg1);
6959 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
6960 {
6961 tem = build_real (type, r);
6962 return fold (build2 (MULT_EXPR, type, arg0, tem));
6963 }
6964 }
6965 }
6966 /* Convert A/B/C to A/(B*C). */
6967 if (flag_unsafe_math_optimizations
6968 && TREE_CODE (arg0) == RDIV_EXPR)
6969 return fold (build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
6970 fold (build2 (MULT_EXPR, type,
6971 TREE_OPERAND (arg0, 1), arg1))));
6972
6973 /* Convert A/(B/C) to (A/B)*C. */
6974 if (flag_unsafe_math_optimizations
6975 && TREE_CODE (arg1) == RDIV_EXPR)
6976 return fold (build2 (MULT_EXPR, type,
6977 fold (build2 (RDIV_EXPR, type, arg0,
6978 TREE_OPERAND (arg1, 0))),
6979 TREE_OPERAND (arg1, 1)));
6980
6981 /* Convert C1/(X*C2) into (C1/C2)/X. */
6982 if (flag_unsafe_math_optimizations
6983 && TREE_CODE (arg1) == MULT_EXPR
6984 && TREE_CODE (arg0) == REAL_CST
6985 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
6986 {
6987 tree tem = const_binop (RDIV_EXPR, arg0,
6988 TREE_OPERAND (arg1, 1), 0);
6989 if (tem)
6990 return fold (build2 (RDIV_EXPR, type, tem,
6991 TREE_OPERAND (arg1, 0)));
6992 }
6993
6994 if (flag_unsafe_math_optimizations)
6995 {
6996 enum built_in_function fcode = builtin_mathfn_code (arg1);
6997 /* Optimize x/expN(y) into x*expN(-y). */
6998 if (BUILTIN_EXPONENT_P (fcode))
6999 {
7000 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
7001 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
7002 tree arglist = build_tree_list (NULL_TREE,
7003 fold_convert (type, arg));
7004 arg1 = build_function_call_expr (expfn, arglist);
7005 return fold (build2 (MULT_EXPR, type, arg0, arg1));
7006 }
7007
7008 /* Optimize x/pow(y,z) into x*pow(y,-z). */
7009 if (fcode == BUILT_IN_POW
7010 || fcode == BUILT_IN_POWF
7011 || fcode == BUILT_IN_POWL)
7012 {
7013 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
7014 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7015 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
7016 tree neg11 = fold_convert (type, negate_expr (arg11));
7017 tree arglist = tree_cons(NULL_TREE, arg10,
7018 build_tree_list (NULL_TREE, neg11));
7019 arg1 = build_function_call_expr (powfn, arglist);
7020 return fold (build2 (MULT_EXPR, type, arg0, arg1));
7021 }
7022 }
7023
7024 if (flag_unsafe_math_optimizations)
7025 {
7026 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
7027 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
7028
7029 /* Optimize sin(x)/cos(x) as tan(x). */
7030 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
7031 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
7032 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
7033 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7034 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7035 {
7036 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
7037
7038 if (tanfn != NULL_TREE)
7039 return build_function_call_expr (tanfn,
7040 TREE_OPERAND (arg0, 1));
7041 }
7042
7043 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
7044 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
7045 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
7046 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
7047 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7048 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7049 {
7050 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
7051
7052 if (tanfn != NULL_TREE)
7053 {
7054 tree tmp = TREE_OPERAND (arg0, 1);
7055 tmp = build_function_call_expr (tanfn, tmp);
7056 return fold (build2 (RDIV_EXPR, type,
7057 build_real (type, dconst1), tmp));
7058 }
7059 }
7060
7061 /* Optimize pow(x,c)/x as pow(x,c-1). */
7062 if (fcode0 == BUILT_IN_POW
7063 || fcode0 == BUILT_IN_POWF
7064 || fcode0 == BUILT_IN_POWL)
7065 {
7066 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7067 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
7068 if (TREE_CODE (arg01) == REAL_CST
7069 && ! TREE_CONSTANT_OVERFLOW (arg01)
7070 && operand_equal_p (arg1, arg00, 0))
7071 {
7072 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7073 REAL_VALUE_TYPE c;
7074 tree arg, arglist;
7075
7076 c = TREE_REAL_CST (arg01);
7077 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
7078 arg = build_real (type, c);
7079 arglist = build_tree_list (NULL_TREE, arg);
7080 arglist = tree_cons (NULL_TREE, arg1, arglist);
7081 return build_function_call_expr (powfn, arglist);
7082 }
7083 }
7084 }
7085 goto binary;
7086
7087 case TRUNC_DIV_EXPR:
7088 case ROUND_DIV_EXPR:
7089 case FLOOR_DIV_EXPR:
7090 case CEIL_DIV_EXPR:
7091 case EXACT_DIV_EXPR:
7092 if (integer_onep (arg1))
7093 return non_lvalue (fold_convert (type, arg0));
7094 if (integer_zerop (arg1))
7095 return t;
7096 /* X / -1 is -X. */
7097 if (!TYPE_UNSIGNED (type)
7098 && TREE_CODE (arg1) == INTEGER_CST
7099 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
7100 && TREE_INT_CST_HIGH (arg1) == -1)
7101 return fold_convert (type, negate_expr (arg0));
7102
7103 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
7104 operation, EXACT_DIV_EXPR.
7105
7106 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
7107 At one time others generated faster code, it's not clear if they do
7108 after the last round to changes to the DIV code in expmed.c. */
7109 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
7110 && multiple_of_p (type, arg0, arg1))
7111 return fold (build2 (EXACT_DIV_EXPR, type, arg0, arg1));
7112
7113 if (TREE_CODE (arg1) == INTEGER_CST
7114 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
7115 code, NULL_TREE)))
7116 return fold_convert (type, tem);
7117
7118 goto binary;
7119
7120 case CEIL_MOD_EXPR:
7121 case FLOOR_MOD_EXPR:
7122 case ROUND_MOD_EXPR:
7123 case TRUNC_MOD_EXPR:
7124 if (integer_onep (arg1))
7125 return omit_one_operand (type, integer_zero_node, arg0);
7126 if (integer_zerop (arg1))
7127 return t;
7128 /* X % -1 is zero. */
7129 if (!TYPE_UNSIGNED (type)
7130 && TREE_CODE (arg1) == INTEGER_CST
7131 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
7132 && TREE_INT_CST_HIGH (arg1) == -1)
7133 return omit_one_operand (type, integer_zero_node, arg0);
7134
7135 if (TREE_CODE (arg1) == INTEGER_CST
7136 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
7137 code, NULL_TREE)))
7138 return fold_convert (type, tem);
7139
7140 goto binary;
7141
7142 case LROTATE_EXPR:
7143 case RROTATE_EXPR:
7144 if (integer_all_onesp (arg0))
7145 return omit_one_operand (type, arg0, arg1);
7146 goto shift;
7147
7148 case RSHIFT_EXPR:
7149 /* Optimize -1 >> x for arithmetic right shifts. */
7150 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
7151 return omit_one_operand (type, arg0, arg1);
7152 /* ... fall through ... */
7153
7154 case LSHIFT_EXPR:
7155 shift:
7156 if (integer_zerop (arg1))
7157 return non_lvalue (fold_convert (type, arg0));
7158 if (integer_zerop (arg0))
7159 return omit_one_operand (type, arg0, arg1);
7160
7161 /* Since negative shift count is not well-defined,
7162 don't try to compute it in the compiler. */
7163 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
7164 return t;
7165 /* Rewrite an LROTATE_EXPR by a constant into an
7166 RROTATE_EXPR by a new constant. */
7167 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
7168 {
7169 tree tem = build_int_2 (GET_MODE_BITSIZE (TYPE_MODE (type)), 0);
7170 tem = fold_convert (TREE_TYPE (arg1), tem);
7171 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
7172 return fold (build2 (RROTATE_EXPR, type, arg0, tem));
7173 }
7174
7175 /* If we have a rotate of a bit operation with the rotate count and
7176 the second operand of the bit operation both constant,
7177 permute the two operations. */
7178 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
7179 && (TREE_CODE (arg0) == BIT_AND_EXPR
7180 || TREE_CODE (arg0) == BIT_IOR_EXPR
7181 || TREE_CODE (arg0) == BIT_XOR_EXPR)
7182 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7183 return fold (build2 (TREE_CODE (arg0), type,
7184 fold (build2 (code, type,
7185 TREE_OPERAND (arg0, 0), arg1)),
7186 fold (build2 (code, type,
7187 TREE_OPERAND (arg0, 1), arg1))));
7188
7189 /* Two consecutive rotates adding up to the width of the mode can
7190 be ignored. */
7191 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
7192 && TREE_CODE (arg0) == RROTATE_EXPR
7193 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7194 && TREE_INT_CST_HIGH (arg1) == 0
7195 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
7196 && ((TREE_INT_CST_LOW (arg1)
7197 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
7198 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
7199 return TREE_OPERAND (arg0, 0);
7200
7201 goto binary;
7202
7203 case MIN_EXPR:
7204 if (operand_equal_p (arg0, arg1, 0))
7205 return omit_one_operand (type, arg0, arg1);
7206 if (INTEGRAL_TYPE_P (type)
7207 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
7208 return omit_one_operand (type, arg1, arg0);
7209 goto associate;
7210
7211 case MAX_EXPR:
7212 if (operand_equal_p (arg0, arg1, 0))
7213 return omit_one_operand (type, arg0, arg1);
7214 if (INTEGRAL_TYPE_P (type)
7215 && TYPE_MAX_VALUE (type)
7216 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
7217 return omit_one_operand (type, arg1, arg0);
7218 goto associate;
7219
7220 case TRUTH_NOT_EXPR:
7221 /* The argument to invert_truthvalue must have Boolean type. */
7222 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
7223 arg0 = fold_convert (boolean_type_node, arg0);
7224
7225 /* Note that the operand of this must be an int
7226 and its values must be 0 or 1.
7227 ("true" is a fixed value perhaps depending on the language,
7228 but we don't handle values other than 1 correctly yet.) */
7229 tem = invert_truthvalue (arg0);
7230 /* Avoid infinite recursion. */
7231 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
7232 {
7233 tem = fold_single_bit_test (code, arg0, arg1, type);
7234 if (tem)
7235 return tem;
7236 return t;
7237 }
7238 return fold_convert (type, tem);
7239
7240 case TRUTH_ANDIF_EXPR:
7241 /* Note that the operands of this must be ints
7242 and their values must be 0 or 1.
7243 ("true" is a fixed value perhaps depending on the language.) */
7244 /* If first arg is constant zero, return it. */
7245 if (integer_zerop (arg0))
7246 return fold_convert (type, arg0);
7247 case TRUTH_AND_EXPR:
7248 /* If either arg is constant true, drop it. */
7249 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7250 return non_lvalue (fold_convert (type, arg1));
7251 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
7252 /* Preserve sequence points. */
7253 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
7254 return non_lvalue (fold_convert (type, arg0));
7255 /* If second arg is constant zero, result is zero, but first arg
7256 must be evaluated. */
7257 if (integer_zerop (arg1))
7258 return omit_one_operand (type, arg1, arg0);
7259 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
7260 case will be handled here. */
7261 if (integer_zerop (arg0))
7262 return omit_one_operand (type, arg0, arg1);
7263
7264 truth_andor:
7265 /* We only do these simplifications if we are optimizing. */
7266 if (!optimize)
7267 return t;
7268
7269 /* Check for things like (A || B) && (A || C). We can convert this
7270 to A || (B && C). Note that either operator can be any of the four
7271 truth and/or operations and the transformation will still be
7272 valid. Also note that we only care about order for the
7273 ANDIF and ORIF operators. If B contains side effects, this
7274 might change the truth-value of A. */
7275 if (TREE_CODE (arg0) == TREE_CODE (arg1)
7276 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
7277 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
7278 || TREE_CODE (arg0) == TRUTH_AND_EXPR
7279 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
7280 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
7281 {
7282 tree a00 = TREE_OPERAND (arg0, 0);
7283 tree a01 = TREE_OPERAND (arg0, 1);
7284 tree a10 = TREE_OPERAND (arg1, 0);
7285 tree a11 = TREE_OPERAND (arg1, 1);
7286 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
7287 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
7288 && (code == TRUTH_AND_EXPR
7289 || code == TRUTH_OR_EXPR));
7290
7291 if (operand_equal_p (a00, a10, 0))
7292 return fold (build2 (TREE_CODE (arg0), type, a00,
7293 fold (build2 (code, type, a01, a11))));
7294 else if (commutative && operand_equal_p (a00, a11, 0))
7295 return fold (build2 (TREE_CODE (arg0), type, a00,
7296 fold (build2 (code, type, a01, a10))));
7297 else if (commutative && operand_equal_p (a01, a10, 0))
7298 return fold (build2 (TREE_CODE (arg0), type, a01,
7299 fold (build2 (code, type, a00, a11))));
7300
7301 /* This case if tricky because we must either have commutative
7302 operators or else A10 must not have side-effects. */
7303
7304 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
7305 && operand_equal_p (a01, a11, 0))
7306 return fold (build2 (TREE_CODE (arg0), type,
7307 fold (build2 (code, type, a00, a10)),
7308 a01));
7309 }
7310
7311 /* See if we can build a range comparison. */
7312 if (0 != (tem = fold_range_test (t)))
7313 return tem;
7314
7315 /* Check for the possibility of merging component references. If our
7316 lhs is another similar operation, try to merge its rhs with our
7317 rhs. Then try to merge our lhs and rhs. */
7318 if (TREE_CODE (arg0) == code
7319 && 0 != (tem = fold_truthop (code, type,
7320 TREE_OPERAND (arg0, 1), arg1)))
7321 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
7322
7323 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
7324 return tem;
7325
7326 return t;
7327
7328 case TRUTH_ORIF_EXPR:
7329 /* Note that the operands of this must be ints
7330 and their values must be 0 or true.
7331 ("true" is a fixed value perhaps depending on the language.) */
7332 /* If first arg is constant true, return it. */
7333 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7334 return fold_convert (type, arg0);
7335 case TRUTH_OR_EXPR:
7336 /* If either arg is constant zero, drop it. */
7337 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
7338 return non_lvalue (fold_convert (type, arg1));
7339 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
7340 /* Preserve sequence points. */
7341 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
7342 return non_lvalue (fold_convert (type, arg0));
7343 /* If second arg is constant true, result is true, but we must
7344 evaluate first arg. */
7345 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
7346 return omit_one_operand (type, arg1, arg0);
7347 /* Likewise for first arg, but note this only occurs here for
7348 TRUTH_OR_EXPR. */
7349 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7350 return omit_one_operand (type, arg0, arg1);
7351 goto truth_andor;
7352
7353 case TRUTH_XOR_EXPR:
7354 /* If either arg is constant zero, drop it. */
7355 if (integer_zerop (arg0))
7356 return non_lvalue (fold_convert (type, arg1));
7357 if (integer_zerop (arg1))
7358 return non_lvalue (fold_convert (type, arg0));
7359 /* If either arg is constant true, this is a logical inversion. */
7360 if (integer_onep (arg0))
7361 return non_lvalue (fold_convert (type, invert_truthvalue (arg1)));
7362 if (integer_onep (arg1))
7363 return non_lvalue (fold_convert (type, invert_truthvalue (arg0)));
7364 /* Identical arguments cancel to zero. */
7365 if (operand_equal_p (arg0, arg1, 0))
7366 return omit_one_operand (type, integer_zero_node, arg0);
7367 return t;
7368
7369 case EQ_EXPR:
7370 case NE_EXPR:
7371 case LT_EXPR:
7372 case GT_EXPR:
7373 case LE_EXPR:
7374 case GE_EXPR:
7375 /* If one arg is a real or integer constant, put it last. */
7376 if (tree_swap_operands_p (arg0, arg1, true))
7377 return fold (build2 (swap_tree_comparison (code), type, arg1, arg0));
7378
7379 /* If this is an equality comparison of the address of a non-weak
7380 object against zero, then we know the result. */
7381 if ((code == EQ_EXPR || code == NE_EXPR)
7382 && TREE_CODE (arg0) == ADDR_EXPR
7383 && DECL_P (TREE_OPERAND (arg0, 0))
7384 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
7385 && integer_zerop (arg1))
7386 return constant_boolean_node (code != EQ_EXPR, type);
7387
7388 /* If this is an equality comparison of the address of two non-weak,
7389 unaliased symbols neither of which are extern (since we do not
7390 have access to attributes for externs), then we know the result. */
7391 if ((code == EQ_EXPR || code == NE_EXPR)
7392 && TREE_CODE (arg0) == ADDR_EXPR
7393 && DECL_P (TREE_OPERAND (arg0, 0))
7394 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
7395 && ! lookup_attribute ("alias",
7396 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
7397 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
7398 && TREE_CODE (arg1) == ADDR_EXPR
7399 && DECL_P (TREE_OPERAND (arg1, 0))
7400 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
7401 && ! lookup_attribute ("alias",
7402 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
7403 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
7404 return constant_boolean_node (operand_equal_p (arg0, arg1, 0)
7405 ? code == EQ_EXPR : code != EQ_EXPR,
7406 type);
7407
7408 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
7409 {
7410 tree targ0 = strip_float_extensions (arg0);
7411 tree targ1 = strip_float_extensions (arg1);
7412 tree newtype = TREE_TYPE (targ0);
7413
7414 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
7415 newtype = TREE_TYPE (targ1);
7416
7417 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
7418 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
7419 return fold (build2 (code, type, fold_convert (newtype, targ0),
7420 fold_convert (newtype, targ1)));
7421
7422 /* (-a) CMP (-b) -> b CMP a */
7423 if (TREE_CODE (arg0) == NEGATE_EXPR
7424 && TREE_CODE (arg1) == NEGATE_EXPR)
7425 return fold (build2 (code, type, TREE_OPERAND (arg1, 0),
7426 TREE_OPERAND (arg0, 0)));
7427
7428 if (TREE_CODE (arg1) == REAL_CST)
7429 {
7430 REAL_VALUE_TYPE cst;
7431 cst = TREE_REAL_CST (arg1);
7432
7433 /* (-a) CMP CST -> a swap(CMP) (-CST) */
7434 if (TREE_CODE (arg0) == NEGATE_EXPR)
7435 return
7436 fold (build2 (swap_tree_comparison (code), type,
7437 TREE_OPERAND (arg0, 0),
7438 build_real (TREE_TYPE (arg1),
7439 REAL_VALUE_NEGATE (cst))));
7440
7441 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
7442 /* a CMP (-0) -> a CMP 0 */
7443 if (REAL_VALUE_MINUS_ZERO (cst))
7444 return fold (build2 (code, type, arg0,
7445 build_real (TREE_TYPE (arg1), dconst0)));
7446
7447 /* x != NaN is always true, other ops are always false. */
7448 if (REAL_VALUE_ISNAN (cst)
7449 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
7450 {
7451 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
7452 return omit_one_operand (type, tem, arg0);
7453 }
7454
7455 /* Fold comparisons against infinity. */
7456 if (REAL_VALUE_ISINF (cst))
7457 {
7458 tem = fold_inf_compare (code, type, arg0, arg1);
7459 if (tem != NULL_TREE)
7460 return tem;
7461 }
7462 }
7463
7464 /* If this is a comparison of a real constant with a PLUS_EXPR
7465 or a MINUS_EXPR of a real constant, we can convert it into a
7466 comparison with a revised real constant as long as no overflow
7467 occurs when unsafe_math_optimizations are enabled. */
7468 if (flag_unsafe_math_optimizations
7469 && TREE_CODE (arg1) == REAL_CST
7470 && (TREE_CODE (arg0) == PLUS_EXPR
7471 || TREE_CODE (arg0) == MINUS_EXPR)
7472 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7473 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
7474 ? MINUS_EXPR : PLUS_EXPR,
7475 arg1, TREE_OPERAND (arg0, 1), 0))
7476 && ! TREE_CONSTANT_OVERFLOW (tem))
7477 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
7478
7479 /* Likewise, we can simplify a comparison of a real constant with
7480 a MINUS_EXPR whose first operand is also a real constant, i.e.
7481 (c1 - x) < c2 becomes x > c1-c2. */
7482 if (flag_unsafe_math_optimizations
7483 && TREE_CODE (arg1) == REAL_CST
7484 && TREE_CODE (arg0) == MINUS_EXPR
7485 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
7486 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
7487 arg1, 0))
7488 && ! TREE_CONSTANT_OVERFLOW (tem))
7489 return fold (build2 (swap_tree_comparison (code), type,
7490 TREE_OPERAND (arg0, 1), tem));
7491
7492 /* Fold comparisons against built-in math functions. */
7493 if (TREE_CODE (arg1) == REAL_CST
7494 && flag_unsafe_math_optimizations
7495 && ! flag_errno_math)
7496 {
7497 enum built_in_function fcode = builtin_mathfn_code (arg0);
7498
7499 if (fcode != END_BUILTINS)
7500 {
7501 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
7502 if (tem != NULL_TREE)
7503 return tem;
7504 }
7505 }
7506 }
7507
7508 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
7509 if (TREE_CONSTANT (arg1)
7510 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
7511 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
7512 /* This optimization is invalid for ordered comparisons
7513 if CONST+INCR overflows or if foo+incr might overflow.
7514 This optimization is invalid for floating point due to rounding.
7515 For pointer types we assume overflow doesn't happen. */
7516 && (POINTER_TYPE_P (TREE_TYPE (arg0))
7517 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
7518 && (code == EQ_EXPR || code == NE_EXPR))))
7519 {
7520 tree varop, newconst;
7521
7522 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
7523 {
7524 newconst = fold (build2 (PLUS_EXPR, TREE_TYPE (arg0),
7525 arg1, TREE_OPERAND (arg0, 1)));
7526 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
7527 TREE_OPERAND (arg0, 0),
7528 TREE_OPERAND (arg0, 1));
7529 }
7530 else
7531 {
7532 newconst = fold (build2 (MINUS_EXPR, TREE_TYPE (arg0),
7533 arg1, TREE_OPERAND (arg0, 1)));
7534 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
7535 TREE_OPERAND (arg0, 0),
7536 TREE_OPERAND (arg0, 1));
7537 }
7538
7539
7540 /* If VAROP is a reference to a bitfield, we must mask
7541 the constant by the width of the field. */
7542 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
7543 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1)))
7544 {
7545 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
7546 int size = TREE_INT_CST_LOW (DECL_SIZE (fielddecl));
7547 tree folded_compare, shift;
7548
7549 /* First check whether the comparison would come out
7550 always the same. If we don't do that we would
7551 change the meaning with the masking. */
7552 folded_compare = fold (build2 (code, type,
7553 TREE_OPERAND (varop, 0),
7554 arg1));
7555 if (integer_zerop (folded_compare)
7556 || integer_onep (folded_compare))
7557 return omit_one_operand (type, folded_compare, varop);
7558
7559 shift = build_int_2 (TYPE_PRECISION (TREE_TYPE (varop)) - size,
7560 0);
7561 newconst = fold (build2 (LSHIFT_EXPR, TREE_TYPE (varop),
7562 newconst, shift));
7563 newconst = fold (build2 (RSHIFT_EXPR, TREE_TYPE (varop),
7564 newconst, shift));
7565 }
7566
7567 return fold (build2 (code, type, varop, newconst));
7568 }
7569
7570 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
7571 This transformation affects the cases which are handled in later
7572 optimizations involving comparisons with non-negative constants. */
7573 if (TREE_CODE (arg1) == INTEGER_CST
7574 && TREE_CODE (arg0) != INTEGER_CST
7575 && tree_int_cst_sgn (arg1) > 0)
7576 {
7577 switch (code)
7578 {
7579 case GE_EXPR:
7580 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7581 return fold (build2 (GT_EXPR, type, arg0, arg1));
7582
7583 case LT_EXPR:
7584 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7585 return fold (build2 (LE_EXPR, type, arg0, arg1));
7586
7587 default:
7588 break;
7589 }
7590 }
7591
7592 /* Comparisons with the highest or lowest possible integer of
7593 the specified size will have known values.
7594
7595 This is quite similar to fold_relational_hi_lo; however, my
7596 attempts to share the code have been nothing but trouble.
7597 I give up for now. */
7598 {
7599 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
7600
7601 if (TREE_CODE (arg1) == INTEGER_CST
7602 && ! TREE_CONSTANT_OVERFLOW (arg1)
7603 && width <= HOST_BITS_PER_WIDE_INT
7604 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
7605 || POINTER_TYPE_P (TREE_TYPE (arg1))))
7606 {
7607 unsigned HOST_WIDE_INT signed_max;
7608 unsigned HOST_WIDE_INT max, min;
7609
7610 signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
7611
7612 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
7613 {
7614 max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
7615 min = 0;
7616 }
7617 else
7618 {
7619 max = signed_max;
7620 min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
7621 }
7622
7623 if (TREE_INT_CST_HIGH (arg1) == 0
7624 && TREE_INT_CST_LOW (arg1) == max)
7625 switch (code)
7626 {
7627 case GT_EXPR:
7628 return omit_one_operand (type, integer_zero_node, arg0);
7629
7630 case GE_EXPR:
7631 return fold (build2 (EQ_EXPR, type, arg0, arg1));
7632
7633 case LE_EXPR:
7634 return omit_one_operand (type, integer_one_node, arg0);
7635
7636 case LT_EXPR:
7637 return fold (build2 (NE_EXPR, type, arg0, arg1));
7638
7639 /* The GE_EXPR and LT_EXPR cases above are not normally
7640 reached because of previous transformations. */
7641
7642 default:
7643 break;
7644 }
7645 else if (TREE_INT_CST_HIGH (arg1) == 0
7646 && TREE_INT_CST_LOW (arg1) == max - 1)
7647 switch (code)
7648 {
7649 case GT_EXPR:
7650 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
7651 return fold (build2 (EQ_EXPR, type, arg0, arg1));
7652 case LE_EXPR:
7653 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
7654 return fold (build2 (NE_EXPR, type, arg0, arg1));
7655 default:
7656 break;
7657 }
7658 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
7659 && TREE_INT_CST_LOW (arg1) == min)
7660 switch (code)
7661 {
7662 case LT_EXPR:
7663 return omit_one_operand (type, integer_zero_node, arg0);
7664
7665 case LE_EXPR:
7666 return fold (build2 (EQ_EXPR, type, arg0, arg1));
7667
7668 case GE_EXPR:
7669 return omit_one_operand (type, integer_one_node, arg0);
7670
7671 case GT_EXPR:
7672 return fold (build2 (NE_EXPR, type, arg0, arg1));
7673
7674 default:
7675 break;
7676 }
7677 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
7678 && TREE_INT_CST_LOW (arg1) == min + 1)
7679 switch (code)
7680 {
7681 case GE_EXPR:
7682 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7683 return fold (build2 (NE_EXPR, type, arg0, arg1));
7684 case LT_EXPR:
7685 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7686 return fold (build2 (EQ_EXPR, type, arg0, arg1));
7687 default:
7688 break;
7689 }
7690
7691 else if (!in_gimple_form
7692 && TREE_INT_CST_HIGH (arg1) == 0
7693 && TREE_INT_CST_LOW (arg1) == signed_max
7694 && TYPE_UNSIGNED (TREE_TYPE (arg1))
7695 /* signed_type does not work on pointer types. */
7696 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
7697 {
7698 /* The following case also applies to X < signed_max+1
7699 and X >= signed_max+1 because previous transformations. */
7700 if (code == LE_EXPR || code == GT_EXPR)
7701 {
7702 tree st0, st1;
7703 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
7704 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
7705 return fold
7706 (build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
7707 type, fold_convert (st0, arg0),
7708 fold_convert (st1, integer_zero_node)));
7709 }
7710 }
7711 }
7712 }
7713
7714 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
7715 a MINUS_EXPR of a constant, we can convert it into a comparison with
7716 a revised constant as long as no overflow occurs. */
7717 if ((code == EQ_EXPR || code == NE_EXPR)
7718 && TREE_CODE (arg1) == INTEGER_CST
7719 && (TREE_CODE (arg0) == PLUS_EXPR
7720 || TREE_CODE (arg0) == MINUS_EXPR)
7721 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7722 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
7723 ? MINUS_EXPR : PLUS_EXPR,
7724 arg1, TREE_OPERAND (arg0, 1), 0))
7725 && ! TREE_CONSTANT_OVERFLOW (tem))
7726 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
7727
7728 /* Similarly for a NEGATE_EXPR. */
7729 else if ((code == EQ_EXPR || code == NE_EXPR)
7730 && TREE_CODE (arg0) == NEGATE_EXPR
7731 && TREE_CODE (arg1) == INTEGER_CST
7732 && 0 != (tem = negate_expr (arg1))
7733 && TREE_CODE (tem) == INTEGER_CST
7734 && ! TREE_CONSTANT_OVERFLOW (tem))
7735 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
7736
7737 /* If we have X - Y == 0, we can convert that to X == Y and similarly
7738 for !=. Don't do this for ordered comparisons due to overflow. */
7739 else if ((code == NE_EXPR || code == EQ_EXPR)
7740 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
7741 return fold (build2 (code, type,
7742 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)));
7743
7744 /* If we are widening one operand of an integer comparison,
7745 see if the other operand is similarly being widened. Perhaps we
7746 can do the comparison in the narrower type. */
7747 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
7748 && TREE_CODE (arg0) == NOP_EXPR
7749 && (tem = get_unwidened (arg0, NULL_TREE)) != arg0
7750 && (code == EQ_EXPR || code == NE_EXPR
7751 || TYPE_UNSIGNED (TREE_TYPE (arg0))
7752 == TYPE_UNSIGNED (TREE_TYPE (tem)))
7753 && (t1 = get_unwidened (arg1, TREE_TYPE (tem))) != 0
7754 && (TREE_TYPE (t1) == TREE_TYPE (tem)
7755 || (TREE_CODE (t1) == INTEGER_CST
7756 && int_fits_type_p (t1, TREE_TYPE (tem)))))
7757 return fold (build2 (code, type, tem,
7758 fold_convert (TREE_TYPE (tem), t1)));
7759
7760 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
7761 constant, we can simplify it. */
7762 else if (TREE_CODE (arg1) == INTEGER_CST
7763 && (TREE_CODE (arg0) == MIN_EXPR
7764 || TREE_CODE (arg0) == MAX_EXPR)
7765 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7766 return optimize_minmax_comparison (t);
7767
7768 /* If we are comparing an ABS_EXPR with a constant, we can
7769 convert all the cases into explicit comparisons, but they may
7770 well not be faster than doing the ABS and one comparison.
7771 But ABS (X) <= C is a range comparison, which becomes a subtraction
7772 and a comparison, and is probably faster. */
7773 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
7774 && TREE_CODE (arg0) == ABS_EXPR
7775 && ! TREE_SIDE_EFFECTS (arg0)
7776 && (0 != (tem = negate_expr (arg1)))
7777 && TREE_CODE (tem) == INTEGER_CST
7778 && ! TREE_CONSTANT_OVERFLOW (tem))
7779 return fold (build2 (TRUTH_ANDIF_EXPR, type,
7780 build2 (GE_EXPR, type,
7781 TREE_OPERAND (arg0, 0), tem),
7782 build2 (LE_EXPR, type,
7783 TREE_OPERAND (arg0, 0), arg1)));
7784
7785 /* If this is an EQ or NE comparison with zero and ARG0 is
7786 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
7787 two operations, but the latter can be done in one less insn
7788 on machines that have only two-operand insns or on which a
7789 constant cannot be the first operand. */
7790 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
7791 && TREE_CODE (arg0) == BIT_AND_EXPR)
7792 {
7793 tree arg00 = TREE_OPERAND (arg0, 0);
7794 tree arg01 = TREE_OPERAND (arg0, 1);
7795 if (TREE_CODE (arg00) == LSHIFT_EXPR
7796 && integer_onep (TREE_OPERAND (arg00, 0)))
7797 return
7798 fold (build2 (code, type,
7799 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
7800 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
7801 arg01, TREE_OPERAND (arg00, 1)),
7802 fold_convert (TREE_TYPE (arg0),
7803 integer_one_node)),
7804 arg1));
7805 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
7806 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
7807 return
7808 fold (build2 (code, type,
7809 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
7810 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
7811 arg00, TREE_OPERAND (arg01, 1)),
7812 fold_convert (TREE_TYPE (arg0),
7813 integer_one_node)),
7814 arg1));
7815 }
7816
7817 /* If this is an NE or EQ comparison of zero against the result of a
7818 signed MOD operation whose second operand is a power of 2, make
7819 the MOD operation unsigned since it is simpler and equivalent. */
7820 if ((code == NE_EXPR || code == EQ_EXPR)
7821 && integer_zerop (arg1)
7822 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
7823 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
7824 || TREE_CODE (arg0) == CEIL_MOD_EXPR
7825 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
7826 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
7827 && integer_pow2p (TREE_OPERAND (arg0, 1)))
7828 {
7829 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
7830 tree newmod = build2 (TREE_CODE (arg0), newtype,
7831 fold_convert (newtype,
7832 TREE_OPERAND (arg0, 0)),
7833 fold_convert (newtype,
7834 TREE_OPERAND (arg0, 1)));
7835
7836 return build2 (code, type, newmod, fold_convert (newtype, arg1));
7837 }
7838
7839 /* If this is an NE comparison of zero with an AND of one, remove the
7840 comparison since the AND will give the correct value. */
7841 if (code == NE_EXPR && integer_zerop (arg1)
7842 && TREE_CODE (arg0) == BIT_AND_EXPR
7843 && integer_onep (TREE_OPERAND (arg0, 1)))
7844 return fold_convert (type, arg0);
7845
7846 /* If we have (A & C) == C where C is a power of 2, convert this into
7847 (A & C) != 0. Similarly for NE_EXPR. */
7848 if ((code == EQ_EXPR || code == NE_EXPR)
7849 && TREE_CODE (arg0) == BIT_AND_EXPR
7850 && integer_pow2p (TREE_OPERAND (arg0, 1))
7851 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
7852 return fold (build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
7853 arg0, integer_zero_node));
7854
7855 /* If we have (A & C) != 0 or (A & C) == 0 and C is a power of
7856 2, then fold the expression into shifts and logical operations. */
7857 tem = fold_single_bit_test (code, arg0, arg1, type);
7858 if (tem)
7859 return tem;
7860
7861 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
7862 Similarly for NE_EXPR. */
7863 if ((code == EQ_EXPR || code == NE_EXPR)
7864 && TREE_CODE (arg0) == BIT_AND_EXPR
7865 && TREE_CODE (arg1) == INTEGER_CST
7866 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7867 {
7868 tree dandnotc
7869 = fold (build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
7870 arg1, build1 (BIT_NOT_EXPR,
7871 TREE_TYPE (TREE_OPERAND (arg0, 1)),
7872 TREE_OPERAND (arg0, 1))));
7873 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
7874 if (integer_nonzerop (dandnotc))
7875 return omit_one_operand (type, rslt, arg0);
7876 }
7877
7878 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
7879 Similarly for NE_EXPR. */
7880 if ((code == EQ_EXPR || code == NE_EXPR)
7881 && TREE_CODE (arg0) == BIT_IOR_EXPR
7882 && TREE_CODE (arg1) == INTEGER_CST
7883 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7884 {
7885 tree candnotd
7886 = fold (build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
7887 TREE_OPERAND (arg0, 1),
7888 build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1)));
7889 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
7890 if (integer_nonzerop (candnotd))
7891 return omit_one_operand (type, rslt, arg0);
7892 }
7893
7894 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
7895 and similarly for >= into !=. */
7896 if ((code == LT_EXPR || code == GE_EXPR)
7897 && TYPE_UNSIGNED (TREE_TYPE (arg0))
7898 && TREE_CODE (arg1) == LSHIFT_EXPR
7899 && integer_onep (TREE_OPERAND (arg1, 0)))
7900 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
7901 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
7902 TREE_OPERAND (arg1, 1)),
7903 fold_convert (TREE_TYPE (arg0), integer_zero_node));
7904
7905 else if ((code == LT_EXPR || code == GE_EXPR)
7906 && TYPE_UNSIGNED (TREE_TYPE (arg0))
7907 && (TREE_CODE (arg1) == NOP_EXPR
7908 || TREE_CODE (arg1) == CONVERT_EXPR)
7909 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
7910 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
7911 return
7912 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
7913 fold_convert (TREE_TYPE (arg0),
7914 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
7915 TREE_OPERAND (TREE_OPERAND (arg1, 0),
7916 1))),
7917 fold_convert (TREE_TYPE (arg0), integer_zero_node));
7918
7919 /* Simplify comparison of something with itself. (For IEEE
7920 floating-point, we can only do some of these simplifications.) */
7921 if (operand_equal_p (arg0, arg1, 0))
7922 {
7923 switch (code)
7924 {
7925 case EQ_EXPR:
7926 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
7927 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7928 return constant_boolean_node (1, type);
7929 break;
7930
7931 case GE_EXPR:
7932 case LE_EXPR:
7933 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
7934 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7935 return constant_boolean_node (1, type);
7936 return fold (build2 (EQ_EXPR, type, arg0, arg1));
7937
7938 case NE_EXPR:
7939 /* For NE, we can only do this simplification if integer
7940 or we don't honor IEEE floating point NaNs. */
7941 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
7942 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7943 break;
7944 /* ... fall through ... */
7945 case GT_EXPR:
7946 case LT_EXPR:
7947 return constant_boolean_node (0, type);
7948 default:
7949 abort ();
7950 }
7951 }
7952
7953 /* If we are comparing an expression that just has comparisons
7954 of two integer values, arithmetic expressions of those comparisons,
7955 and constants, we can simplify it. There are only three cases
7956 to check: the two values can either be equal, the first can be
7957 greater, or the second can be greater. Fold the expression for
7958 those three values. Since each value must be 0 or 1, we have
7959 eight possibilities, each of which corresponds to the constant 0
7960 or 1 or one of the six possible comparisons.
7961
7962 This handles common cases like (a > b) == 0 but also handles
7963 expressions like ((x > y) - (y > x)) > 0, which supposedly
7964 occur in macroized code. */
7965
7966 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
7967 {
7968 tree cval1 = 0, cval2 = 0;
7969 int save_p = 0;
7970
7971 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
7972 /* Don't handle degenerate cases here; they should already
7973 have been handled anyway. */
7974 && cval1 != 0 && cval2 != 0
7975 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
7976 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
7977 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
7978 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
7979 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
7980 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
7981 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
7982 {
7983 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
7984 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
7985
7986 /* We can't just pass T to eval_subst in case cval1 or cval2
7987 was the same as ARG1. */
7988
7989 tree high_result
7990 = fold (build2 (code, type,
7991 eval_subst (arg0, cval1, maxval,
7992 cval2, minval),
7993 arg1));
7994 tree equal_result
7995 = fold (build2 (code, type,
7996 eval_subst (arg0, cval1, maxval,
7997 cval2, maxval),
7998 arg1));
7999 tree low_result
8000 = fold (build2 (code, type,
8001 eval_subst (arg0, cval1, minval,
8002 cval2, maxval),
8003 arg1));
8004
8005 /* All three of these results should be 0 or 1. Confirm they
8006 are. Then use those values to select the proper code
8007 to use. */
8008
8009 if ((integer_zerop (high_result)
8010 || integer_onep (high_result))
8011 && (integer_zerop (equal_result)
8012 || integer_onep (equal_result))
8013 && (integer_zerop (low_result)
8014 || integer_onep (low_result)))
8015 {
8016 /* Make a 3-bit mask with the high-order bit being the
8017 value for `>', the next for '=', and the low for '<'. */
8018 switch ((integer_onep (high_result) * 4)
8019 + (integer_onep (equal_result) * 2)
8020 + integer_onep (low_result))
8021 {
8022 case 0:
8023 /* Always false. */
8024 return omit_one_operand (type, integer_zero_node, arg0);
8025 case 1:
8026 code = LT_EXPR;
8027 break;
8028 case 2:
8029 code = EQ_EXPR;
8030 break;
8031 case 3:
8032 code = LE_EXPR;
8033 break;
8034 case 4:
8035 code = GT_EXPR;
8036 break;
8037 case 5:
8038 code = NE_EXPR;
8039 break;
8040 case 6:
8041 code = GE_EXPR;
8042 break;
8043 case 7:
8044 /* Always true. */
8045 return omit_one_operand (type, integer_one_node, arg0);
8046 }
8047
8048 tem = build2 (code, type, cval1, cval2);
8049 if (save_p)
8050 return save_expr (tem);
8051 else
8052 return fold (tem);
8053 }
8054 }
8055 }
8056
8057 /* If this is a comparison of a field, we may be able to simplify it. */
8058 if (((TREE_CODE (arg0) == COMPONENT_REF
8059 && lang_hooks.can_use_bit_fields_p ())
8060 || TREE_CODE (arg0) == BIT_FIELD_REF)
8061 && (code == EQ_EXPR || code == NE_EXPR)
8062 /* Handle the constant case even without -O
8063 to make sure the warnings are given. */
8064 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
8065 {
8066 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
8067 if (t1)
8068 return t1;
8069 }
8070
8071 /* If this is a comparison of complex values and either or both sides
8072 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
8073 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
8074 This may prevent needless evaluations. */
8075 if ((code == EQ_EXPR || code == NE_EXPR)
8076 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
8077 && (TREE_CODE (arg0) == COMPLEX_EXPR
8078 || TREE_CODE (arg1) == COMPLEX_EXPR
8079 || TREE_CODE (arg0) == COMPLEX_CST
8080 || TREE_CODE (arg1) == COMPLEX_CST))
8081 {
8082 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
8083 tree real0, imag0, real1, imag1;
8084
8085 arg0 = save_expr (arg0);
8086 arg1 = save_expr (arg1);
8087 real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
8088 imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
8089 real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
8090 imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
8091
8092 return fold (build2 ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
8093 : TRUTH_ORIF_EXPR),
8094 type,
8095 fold (build2 (code, type, real0, real1)),
8096 fold (build2 (code, type, imag0, imag1))));
8097 }
8098
8099 /* Optimize comparisons of strlen vs zero to a compare of the
8100 first character of the string vs zero. To wit,
8101 strlen(ptr) == 0 => *ptr == 0
8102 strlen(ptr) != 0 => *ptr != 0
8103 Other cases should reduce to one of these two (or a constant)
8104 due to the return value of strlen being unsigned. */
8105 if ((code == EQ_EXPR || code == NE_EXPR)
8106 && integer_zerop (arg1)
8107 && TREE_CODE (arg0) == CALL_EXPR)
8108 {
8109 tree fndecl = get_callee_fndecl (arg0);
8110 tree arglist;
8111
8112 if (fndecl
8113 && DECL_BUILT_IN (fndecl)
8114 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD
8115 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
8116 && (arglist = TREE_OPERAND (arg0, 1))
8117 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
8118 && ! TREE_CHAIN (arglist))
8119 return fold (build2 (code, type,
8120 build1 (INDIRECT_REF, char_type_node,
8121 TREE_VALUE(arglist)),
8122 integer_zero_node));
8123 }
8124
8125 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8126 into a single range test. */
8127 if (TREE_CODE (arg0) == TRUNC_DIV_EXPR
8128 && TREE_CODE (arg1) == INTEGER_CST
8129 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8130 && !integer_zerop (TREE_OPERAND (arg0, 1))
8131 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8132 && !TREE_OVERFLOW (arg1))
8133 {
8134 t1 = fold_div_compare (code, type, arg0, arg1);
8135 if (t1 != NULL_TREE)
8136 return t1;
8137 }
8138
8139 /* Both ARG0 and ARG1 are known to be constants at this point. */
8140 t1 = fold_relational_const (code, type, arg0, arg1);
8141 return (t1 == NULL_TREE ? t : t1);
8142
8143 case COND_EXPR:
8144 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
8145 so all simple results must be passed through pedantic_non_lvalue. */
8146 if (TREE_CODE (arg0) == INTEGER_CST)
8147 {
8148 tem = TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1));
8149 /* Only optimize constant conditions when the selected branch
8150 has the same type as the COND_EXPR. This avoids optimizing
8151 away "c ? x : throw", where the throw has a void type. */
8152 if (! VOID_TYPE_P (TREE_TYPE (tem))
8153 || VOID_TYPE_P (type))
8154 return pedantic_non_lvalue (tem);
8155 return t;
8156 }
8157 if (operand_equal_p (arg1, TREE_OPERAND (t, 2), 0))
8158 return pedantic_omit_one_operand (type, arg1, arg0);
8159
8160 /* If we have A op B ? A : C, we may be able to convert this to a
8161 simpler expression, depending on the operation and the values
8162 of B and C. Signed zeros prevent all of these transformations,
8163 for reasons given above each one. */
8164
8165 if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
8166 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
8167 arg1, TREE_OPERAND (arg0, 1))
8168 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
8169 {
8170 tree arg2 = TREE_OPERAND (t, 2);
8171 enum tree_code comp_code = TREE_CODE (arg0);
8172
8173 STRIP_NOPS (arg2);
8174
8175 /* If we have A op 0 ? A : -A, consider applying the following
8176 transformations:
8177
8178 A == 0? A : -A same as -A
8179 A != 0? A : -A same as A
8180 A >= 0? A : -A same as abs (A)
8181 A > 0? A : -A same as abs (A)
8182 A <= 0? A : -A same as -abs (A)
8183 A < 0? A : -A same as -abs (A)
8184
8185 None of these transformations work for modes with signed
8186 zeros. If A is +/-0, the first two transformations will
8187 change the sign of the result (from +0 to -0, or vice
8188 versa). The last four will fix the sign of the result,
8189 even though the original expressions could be positive or
8190 negative, depending on the sign of A.
8191
8192 Note that all these transformations are correct if A is
8193 NaN, since the two alternatives (A and -A) are also NaNs. */
8194 if ((FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 1)))
8195 ? real_zerop (TREE_OPERAND (arg0, 1))
8196 : integer_zerop (TREE_OPERAND (arg0, 1)))
8197 && TREE_CODE (arg2) == NEGATE_EXPR
8198 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
8199 switch (comp_code)
8200 {
8201 case EQ_EXPR:
8202 tem = fold_convert (TREE_TYPE (TREE_OPERAND (t, 1)), arg1);
8203 tem = fold_convert (type, negate_expr (tem));
8204 return pedantic_non_lvalue (tem);
8205 case NE_EXPR:
8206 return pedantic_non_lvalue (fold_convert (type, arg1));
8207 case GE_EXPR:
8208 case GT_EXPR:
8209 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
8210 arg1 = fold_convert (lang_hooks.types.signed_type
8211 (TREE_TYPE (arg1)), arg1);
8212 arg1 = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
8213 return pedantic_non_lvalue (fold_convert (type, arg1));
8214 case LE_EXPR:
8215 case LT_EXPR:
8216 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
8217 arg1 = fold_convert (lang_hooks.types.signed_type
8218 (TREE_TYPE (arg1)), arg1);
8219 arg1 = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
8220 arg1 = negate_expr (fold_convert (type, arg1));
8221 return pedantic_non_lvalue (arg1);
8222 default:
8223 abort ();
8224 }
8225
8226 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
8227 A == 0 ? A : 0 is always 0 unless A is -0. Note that
8228 both transformations are correct when A is NaN: A != 0
8229 is then true, and A == 0 is false. */
8230
8231 if (integer_zerop (TREE_OPERAND (arg0, 1)) && integer_zerop (arg2))
8232 {
8233 if (comp_code == NE_EXPR)
8234 return pedantic_non_lvalue (fold_convert (type, arg1));
8235 else if (comp_code == EQ_EXPR)
8236 return pedantic_non_lvalue (fold_convert (type, integer_zero_node));
8237 }
8238
8239 /* Try some transformations of A op B ? A : B.
8240
8241 A == B? A : B same as B
8242 A != B? A : B same as A
8243 A >= B? A : B same as max (A, B)
8244 A > B? A : B same as max (B, A)
8245 A <= B? A : B same as min (A, B)
8246 A < B? A : B same as min (B, A)
8247
8248 As above, these transformations don't work in the presence
8249 of signed zeros. For example, if A and B are zeros of
8250 opposite sign, the first two transformations will change
8251 the sign of the result. In the last four, the original
8252 expressions give different results for (A=+0, B=-0) and
8253 (A=-0, B=+0), but the transformed expressions do not.
8254
8255 The first two transformations are correct if either A or B
8256 is a NaN. In the first transformation, the condition will
8257 be false, and B will indeed be chosen. In the case of the
8258 second transformation, the condition A != B will be true,
8259 and A will be chosen.
8260
8261 The conversions to max() and min() are not correct if B is
8262 a number and A is not. The conditions in the original
8263 expressions will be false, so all four give B. The min()
8264 and max() versions would give a NaN instead. */
8265 if (operand_equal_for_comparison_p (TREE_OPERAND (arg0, 1),
8266 arg2, TREE_OPERAND (arg0, 0)))
8267 {
8268 tree comp_op0 = TREE_OPERAND (arg0, 0);
8269 tree comp_op1 = TREE_OPERAND (arg0, 1);
8270 tree comp_type = TREE_TYPE (comp_op0);
8271
8272 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
8273 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
8274 {
8275 comp_type = type;
8276 comp_op0 = arg1;
8277 comp_op1 = arg2;
8278 }
8279
8280 switch (comp_code)
8281 {
8282 case EQ_EXPR:
8283 return pedantic_non_lvalue (fold_convert (type, arg2));
8284 case NE_EXPR:
8285 return pedantic_non_lvalue (fold_convert (type, arg1));
8286 case LE_EXPR:
8287 case LT_EXPR:
8288 /* In C++ a ?: expression can be an lvalue, so put the
8289 operand which will be used if they are equal first
8290 so that we can convert this back to the
8291 corresponding COND_EXPR. */
8292 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
8293 return pedantic_non_lvalue (fold_convert
8294 (type, fold (build2 (MIN_EXPR, comp_type,
8295 (comp_code == LE_EXPR
8296 ? comp_op0 : comp_op1),
8297 (comp_code == LE_EXPR
8298 ? comp_op1 : comp_op0)))));
8299 break;
8300 case GE_EXPR:
8301 case GT_EXPR:
8302 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
8303 return pedantic_non_lvalue (fold_convert
8304 (type, fold (build2 (MAX_EXPR, comp_type,
8305 (comp_code == GE_EXPR
8306 ? comp_op0 : comp_op1),
8307 (comp_code == GE_EXPR
8308 ? comp_op1 : comp_op0)))));
8309 break;
8310 default:
8311 abort ();
8312 }
8313 }
8314
8315 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
8316 we might still be able to simplify this. For example,
8317 if C1 is one less or one more than C2, this might have started
8318 out as a MIN or MAX and been transformed by this function.
8319 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
8320
8321 if (INTEGRAL_TYPE_P (type)
8322 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8323 && TREE_CODE (arg2) == INTEGER_CST)
8324 switch (comp_code)
8325 {
8326 case EQ_EXPR:
8327 /* We can replace A with C1 in this case. */
8328 arg1 = fold_convert (type, TREE_OPERAND (arg0, 1));
8329 return fold (build3 (code, type, TREE_OPERAND (t, 0), arg1,
8330 TREE_OPERAND (t, 2)));
8331
8332 case LT_EXPR:
8333 /* If C1 is C2 + 1, this is min(A, C2). */
8334 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
8335 OEP_ONLY_CONST)
8336 && operand_equal_p (TREE_OPERAND (arg0, 1),
8337 const_binop (PLUS_EXPR, arg2,
8338 integer_one_node, 0),
8339 OEP_ONLY_CONST))
8340 return pedantic_non_lvalue
8341 (fold (build2 (MIN_EXPR, type, arg1, arg2)));
8342 break;
8343
8344 case LE_EXPR:
8345 /* If C1 is C2 - 1, this is min(A, C2). */
8346 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
8347 OEP_ONLY_CONST)
8348 && operand_equal_p (TREE_OPERAND (arg0, 1),
8349 const_binop (MINUS_EXPR, arg2,
8350 integer_one_node, 0),
8351 OEP_ONLY_CONST))
8352 return pedantic_non_lvalue
8353 (fold (build2 (MIN_EXPR, type, arg1, arg2)));
8354 break;
8355
8356 case GT_EXPR:
8357 /* If C1 is C2 - 1, this is max(A, C2). */
8358 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
8359 OEP_ONLY_CONST)
8360 && operand_equal_p (TREE_OPERAND (arg0, 1),
8361 const_binop (MINUS_EXPR, arg2,
8362 integer_one_node, 0),
8363 OEP_ONLY_CONST))
8364 return pedantic_non_lvalue
8365 (fold (build2 (MAX_EXPR, type, arg1, arg2)));
8366 break;
8367
8368 case GE_EXPR:
8369 /* If C1 is C2 + 1, this is max(A, C2). */
8370 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
8371 OEP_ONLY_CONST)
8372 && operand_equal_p (TREE_OPERAND (arg0, 1),
8373 const_binop (PLUS_EXPR, arg2,
8374 integer_one_node, 0),
8375 OEP_ONLY_CONST))
8376 return pedantic_non_lvalue
8377 (fold (build2 (MAX_EXPR, type, arg1, arg2)));
8378 break;
8379 case NE_EXPR:
8380 break;
8381 default:
8382 abort ();
8383 }
8384 }
8385
8386 /* If the second operand is simpler than the third, swap them
8387 since that produces better jump optimization results. */
8388 if (tree_swap_operands_p (TREE_OPERAND (t, 1),
8389 TREE_OPERAND (t, 2), false))
8390 {
8391 /* See if this can be inverted. If it can't, possibly because
8392 it was a floating-point inequality comparison, don't do
8393 anything. */
8394 tem = invert_truthvalue (arg0);
8395
8396 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8397 return fold (build3 (code, type, tem,
8398 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1)));
8399 }
8400
8401 /* Convert A ? 1 : 0 to simply A. */
8402 if (integer_onep (TREE_OPERAND (t, 1))
8403 && integer_zerop (TREE_OPERAND (t, 2))
8404 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
8405 call to fold will try to move the conversion inside
8406 a COND, which will recurse. In that case, the COND_EXPR
8407 is probably the best choice, so leave it alone. */
8408 && type == TREE_TYPE (arg0))
8409 return pedantic_non_lvalue (arg0);
8410
8411 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
8412 over COND_EXPR in cases such as floating point comparisons. */
8413 if (integer_zerop (TREE_OPERAND (t, 1))
8414 && integer_onep (TREE_OPERAND (t, 2))
8415 && truth_value_p (TREE_CODE (arg0)))
8416 return pedantic_non_lvalue (fold_convert (type,
8417 invert_truthvalue (arg0)));
8418
8419 /* Look for expressions of the form A & 2 ? 2 : 0. The result of this
8420 operation is simply A & 2. */
8421
8422 if (integer_zerop (TREE_OPERAND (t, 2))
8423 && TREE_CODE (arg0) == NE_EXPR
8424 && integer_zerop (TREE_OPERAND (arg0, 1))
8425 && integer_pow2p (arg1)
8426 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
8427 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
8428 arg1, OEP_ONLY_CONST))
8429 return pedantic_non_lvalue (fold_convert (type,
8430 TREE_OPERAND (arg0, 0)));
8431
8432 /* Convert A ? B : 0 into A && B if A and B are truth values. */
8433 if (integer_zerop (TREE_OPERAND (t, 2))
8434 && truth_value_p (TREE_CODE (arg0))
8435 && truth_value_p (TREE_CODE (arg1)))
8436 return pedantic_non_lvalue (fold (build2 (TRUTH_ANDIF_EXPR, type,
8437 arg0, arg1)));
8438
8439 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
8440 if (integer_onep (TREE_OPERAND (t, 2))
8441 && truth_value_p (TREE_CODE (arg0))
8442 && truth_value_p (TREE_CODE (arg1)))
8443 {
8444 /* Only perform transformation if ARG0 is easily inverted. */
8445 tem = invert_truthvalue (arg0);
8446 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8447 return pedantic_non_lvalue (fold (build2 (TRUTH_ORIF_EXPR, type,
8448 tem, arg1)));
8449 }
8450
8451 return t;
8452
8453 case COMPOUND_EXPR:
8454 /* When pedantic, a compound expression can be neither an lvalue
8455 nor an integer constant expression. */
8456 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
8457 return t;
8458 /* Don't let (0, 0) be null pointer constant. */
8459 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
8460 : fold_convert (type, arg1);
8461 return pedantic_non_lvalue (tem);
8462
8463 case COMPLEX_EXPR:
8464 if (wins)
8465 return build_complex (type, arg0, arg1);
8466 return t;
8467
8468 case REALPART_EXPR:
8469 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8470 return t;
8471 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
8472 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
8473 TREE_OPERAND (arg0, 1));
8474 else if (TREE_CODE (arg0) == COMPLEX_CST)
8475 return TREE_REALPART (arg0);
8476 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8477 return fold (build2 (TREE_CODE (arg0), type,
8478 fold (build1 (REALPART_EXPR, type,
8479 TREE_OPERAND (arg0, 0))),
8480 fold (build1 (REALPART_EXPR, type,
8481 TREE_OPERAND (arg0, 1)))));
8482 return t;
8483
8484 case IMAGPART_EXPR:
8485 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8486 return fold_convert (type, integer_zero_node);
8487 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
8488 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
8489 TREE_OPERAND (arg0, 0));
8490 else if (TREE_CODE (arg0) == COMPLEX_CST)
8491 return TREE_IMAGPART (arg0);
8492 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8493 return fold (build2 (TREE_CODE (arg0), type,
8494 fold (build1 (IMAGPART_EXPR, type,
8495 TREE_OPERAND (arg0, 0))),
8496 fold (build1 (IMAGPART_EXPR, type,
8497 TREE_OPERAND (arg0, 1)))));
8498 return t;
8499
8500 /* Pull arithmetic ops out of the CLEANUP_POINT_EXPR where
8501 appropriate. */
8502 case CLEANUP_POINT_EXPR:
8503 if (! has_cleanups (arg0))
8504 return TREE_OPERAND (t, 0);
8505
8506 {
8507 enum tree_code code0 = TREE_CODE (arg0);
8508 int kind0 = TREE_CODE_CLASS (code0);
8509 tree arg00 = TREE_OPERAND (arg0, 0);
8510 tree arg01;
8511
8512 if (kind0 == '1' || code0 == TRUTH_NOT_EXPR)
8513 return fold (build1 (code0, type,
8514 fold (build1 (CLEANUP_POINT_EXPR,
8515 TREE_TYPE (arg00), arg00))));
8516
8517 if (kind0 == '<' || kind0 == '2'
8518 || code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR
8519 || code0 == TRUTH_AND_EXPR || code0 == TRUTH_OR_EXPR
8520 || code0 == TRUTH_XOR_EXPR)
8521 {
8522 arg01 = TREE_OPERAND (arg0, 1);
8523
8524 if (TREE_CONSTANT (arg00)
8525 || ((code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR)
8526 && ! has_cleanups (arg00)))
8527 return fold (build2 (code0, type, arg00,
8528 fold (build1 (CLEANUP_POINT_EXPR,
8529 TREE_TYPE (arg01), arg01))));
8530
8531 if (TREE_CONSTANT (arg01))
8532 return fold (build2 (code0, type,
8533 fold (build1 (CLEANUP_POINT_EXPR,
8534 TREE_TYPE (arg00), arg00)),
8535 arg01));
8536 }
8537
8538 return t;
8539 }
8540
8541 case CALL_EXPR:
8542 /* Check for a built-in function. */
8543 if (TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
8544 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
8545 == FUNCTION_DECL)
8546 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
8547 {
8548 tree tmp = fold_builtin (t);
8549 if (tmp)
8550 return tmp;
8551 }
8552 return t;
8553
8554 default:
8555 return t;
8556 } /* switch (code) */
8557 }
8558
8559 #ifdef ENABLE_FOLD_CHECKING
8560 #undef fold
8561
8562 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
8563 static void fold_check_failed (tree, tree);
8564 void print_fold_checksum (tree);
8565
8566 /* When --enable-checking=fold, compute a digest of expr before
8567 and after actual fold call to see if fold did not accidentally
8568 change original expr. */
8569
8570 tree
8571 fold (tree expr)
8572 {
8573 tree ret;
8574 struct md5_ctx ctx;
8575 unsigned char checksum_before[16], checksum_after[16];
8576 htab_t ht;
8577
8578 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
8579 md5_init_ctx (&ctx);
8580 fold_checksum_tree (expr, &ctx, ht);
8581 md5_finish_ctx (&ctx, checksum_before);
8582 htab_empty (ht);
8583
8584 ret = fold_1 (expr);
8585
8586 md5_init_ctx (&ctx);
8587 fold_checksum_tree (expr, &ctx, ht);
8588 md5_finish_ctx (&ctx, checksum_after);
8589 htab_delete (ht);
8590
8591 if (memcmp (checksum_before, checksum_after, 16))
8592 fold_check_failed (expr, ret);
8593
8594 return ret;
8595 }
8596
8597 void
8598 print_fold_checksum (tree expr)
8599 {
8600 struct md5_ctx ctx;
8601 unsigned char checksum[16], cnt;
8602 htab_t ht;
8603
8604 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
8605 md5_init_ctx (&ctx);
8606 fold_checksum_tree (expr, &ctx, ht);
8607 md5_finish_ctx (&ctx, checksum);
8608 htab_delete (ht);
8609 for (cnt = 0; cnt < 16; ++cnt)
8610 fprintf (stderr, "%02x", checksum[cnt]);
8611 putc ('\n', stderr);
8612 }
8613
8614 static void
8615 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
8616 {
8617 internal_error ("fold check: original tree changed by fold");
8618 }
8619
8620 static void
8621 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
8622 {
8623 void **slot;
8624 enum tree_code code;
8625 char buf[sizeof (struct tree_decl)];
8626 int i, len;
8627
8628 if (sizeof (struct tree_exp) + 5 * sizeof (tree)
8629 > sizeof (struct tree_decl)
8630 || sizeof (struct tree_type) > sizeof (struct tree_decl))
8631 abort ();
8632 if (expr == NULL)
8633 return;
8634 slot = htab_find_slot (ht, expr, INSERT);
8635 if (*slot != NULL)
8636 return;
8637 *slot = expr;
8638 code = TREE_CODE (expr);
8639 if (code == SAVE_EXPR && SAVE_EXPR_NOPLACEHOLDER (expr))
8640 {
8641 /* Allow SAVE_EXPR_NOPLACEHOLDER flag to be modified. */
8642 memcpy (buf, expr, tree_size (expr));
8643 expr = (tree) buf;
8644 SAVE_EXPR_NOPLACEHOLDER (expr) = 0;
8645 }
8646 else if (TREE_CODE_CLASS (code) == 'd' && DECL_ASSEMBLER_NAME_SET_P (expr))
8647 {
8648 /* Allow DECL_ASSEMBLER_NAME to be modified. */
8649 memcpy (buf, expr, tree_size (expr));
8650 expr = (tree) buf;
8651 SET_DECL_ASSEMBLER_NAME (expr, NULL);
8652 }
8653 else if (TREE_CODE_CLASS (code) == 't'
8654 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)))
8655 {
8656 /* Allow TYPE_POINTER_TO and TYPE_REFERENCE_TO to be modified. */
8657 memcpy (buf, expr, tree_size (expr));
8658 expr = (tree) buf;
8659 TYPE_POINTER_TO (expr) = NULL;
8660 TYPE_REFERENCE_TO (expr) = NULL;
8661 }
8662 md5_process_bytes (expr, tree_size (expr), ctx);
8663 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
8664 if (TREE_CODE_CLASS (code) != 't' && TREE_CODE_CLASS (code) != 'd')
8665 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
8666 len = TREE_CODE_LENGTH (code);
8667 switch (TREE_CODE_CLASS (code))
8668 {
8669 case 'c':
8670 switch (code)
8671 {
8672 case STRING_CST:
8673 md5_process_bytes (TREE_STRING_POINTER (expr),
8674 TREE_STRING_LENGTH (expr), ctx);
8675 break;
8676 case COMPLEX_CST:
8677 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
8678 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
8679 break;
8680 case VECTOR_CST:
8681 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
8682 break;
8683 default:
8684 break;
8685 }
8686 break;
8687 case 'x':
8688 switch (code)
8689 {
8690 case TREE_LIST:
8691 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
8692 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
8693 break;
8694 case TREE_VEC:
8695 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
8696 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
8697 break;
8698 default:
8699 break;
8700 }
8701 break;
8702 case 'e':
8703 switch (code)
8704 {
8705 case SAVE_EXPR: len = 2; break;
8706 case GOTO_SUBROUTINE_EXPR: len = 0; break;
8707 case RTL_EXPR: len = 0; break;
8708 case WITH_CLEANUP_EXPR: len = 2; break;
8709 default: break;
8710 }
8711 /* Fall through. */
8712 case 'r':
8713 case '<':
8714 case '1':
8715 case '2':
8716 case 's':
8717 for (i = 0; i < len; ++i)
8718 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
8719 break;
8720 case 'd':
8721 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
8722 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
8723 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
8724 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
8725 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
8726 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
8727 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
8728 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
8729 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
8730 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
8731 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
8732 break;
8733 case 't':
8734 if (TREE_CODE (expr) == ENUMERAL_TYPE)
8735 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
8736 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
8737 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
8738 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
8739 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
8740 if (INTEGRAL_TYPE_P (expr)
8741 || SCALAR_FLOAT_TYPE_P (expr))
8742 {
8743 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
8744 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
8745 }
8746 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
8747 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
8748 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
8749 break;
8750 default:
8751 break;
8752 }
8753 }
8754
8755 #endif
8756
8757 /* Perform constant folding and related simplification of initializer
8758 expression EXPR. This behaves identically to "fold" but ignores
8759 potential run-time traps and exceptions that fold must preserve. */
8760
8761 tree
8762 fold_initializer (tree expr)
8763 {
8764 int saved_signaling_nans = flag_signaling_nans;
8765 int saved_trapping_math = flag_trapping_math;
8766 int saved_trapv = flag_trapv;
8767 tree result;
8768
8769 flag_signaling_nans = 0;
8770 flag_trapping_math = 0;
8771 flag_trapv = 0;
8772
8773 result = fold (expr);
8774
8775 flag_signaling_nans = saved_signaling_nans;
8776 flag_trapping_math = saved_trapping_math;
8777 flag_trapv = saved_trapv;
8778
8779 return result;
8780 }
8781
8782 /* Determine if first argument is a multiple of second argument. Return 0 if
8783 it is not, or we cannot easily determined it to be.
8784
8785 An example of the sort of thing we care about (at this point; this routine
8786 could surely be made more general, and expanded to do what the *_DIV_EXPR's
8787 fold cases do now) is discovering that
8788
8789 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
8790
8791 is a multiple of
8792
8793 SAVE_EXPR (J * 8)
8794
8795 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
8796
8797 This code also handles discovering that
8798
8799 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
8800
8801 is a multiple of 8 so we don't have to worry about dealing with a
8802 possible remainder.
8803
8804 Note that we *look* inside a SAVE_EXPR only to determine how it was
8805 calculated; it is not safe for fold to do much of anything else with the
8806 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
8807 at run time. For example, the latter example above *cannot* be implemented
8808 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
8809 evaluation time of the original SAVE_EXPR is not necessarily the same at
8810 the time the new expression is evaluated. The only optimization of this
8811 sort that would be valid is changing
8812
8813 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
8814
8815 divided by 8 to
8816
8817 SAVE_EXPR (I) * SAVE_EXPR (J)
8818
8819 (where the same SAVE_EXPR (J) is used in the original and the
8820 transformed version). */
8821
8822 static int
8823 multiple_of_p (tree type, tree top, tree bottom)
8824 {
8825 if (operand_equal_p (top, bottom, 0))
8826 return 1;
8827
8828 if (TREE_CODE (type) != INTEGER_TYPE)
8829 return 0;
8830
8831 switch (TREE_CODE (top))
8832 {
8833 case MULT_EXPR:
8834 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
8835 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
8836
8837 case PLUS_EXPR:
8838 case MINUS_EXPR:
8839 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
8840 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
8841
8842 case LSHIFT_EXPR:
8843 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
8844 {
8845 tree op1, t1;
8846
8847 op1 = TREE_OPERAND (top, 1);
8848 /* const_binop may not detect overflow correctly,
8849 so check for it explicitly here. */
8850 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
8851 > TREE_INT_CST_LOW (op1)
8852 && TREE_INT_CST_HIGH (op1) == 0
8853 && 0 != (t1 = fold_convert (type,
8854 const_binop (LSHIFT_EXPR,
8855 size_one_node,
8856 op1, 0)))
8857 && ! TREE_OVERFLOW (t1))
8858 return multiple_of_p (type, t1, bottom);
8859 }
8860 return 0;
8861
8862 case NOP_EXPR:
8863 /* Can't handle conversions from non-integral or wider integral type. */
8864 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
8865 || (TYPE_PRECISION (type)
8866 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
8867 return 0;
8868
8869 /* .. fall through ... */
8870
8871 case SAVE_EXPR:
8872 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
8873
8874 case INTEGER_CST:
8875 if (TREE_CODE (bottom) != INTEGER_CST
8876 || (TYPE_UNSIGNED (type)
8877 && (tree_int_cst_sgn (top) < 0
8878 || tree_int_cst_sgn (bottom) < 0)))
8879 return 0;
8880 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
8881 top, bottom, 0));
8882
8883 default:
8884 return 0;
8885 }
8886 }
8887
8888 /* Return true if `t' is known to be non-negative. */
8889
8890 int
8891 tree_expr_nonnegative_p (tree t)
8892 {
8893 switch (TREE_CODE (t))
8894 {
8895 case ABS_EXPR:
8896 return 1;
8897
8898 case INTEGER_CST:
8899 return tree_int_cst_sgn (t) >= 0;
8900
8901 case REAL_CST:
8902 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
8903
8904 case PLUS_EXPR:
8905 if (FLOAT_TYPE_P (TREE_TYPE (t)))
8906 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8907 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8908
8909 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
8910 both unsigned and at least 2 bits shorter than the result. */
8911 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
8912 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
8913 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
8914 {
8915 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
8916 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
8917 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
8918 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
8919 {
8920 unsigned int prec = MAX (TYPE_PRECISION (inner1),
8921 TYPE_PRECISION (inner2)) + 1;
8922 return prec < TYPE_PRECISION (TREE_TYPE (t));
8923 }
8924 }
8925 break;
8926
8927 case MULT_EXPR:
8928 if (FLOAT_TYPE_P (TREE_TYPE (t)))
8929 {
8930 /* x * x for floating point x is always non-negative. */
8931 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
8932 return 1;
8933 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8934 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8935 }
8936
8937 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
8938 both unsigned and their total bits is shorter than the result. */
8939 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
8940 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
8941 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
8942 {
8943 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
8944 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
8945 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
8946 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
8947 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
8948 < TYPE_PRECISION (TREE_TYPE (t));
8949 }
8950 return 0;
8951
8952 case TRUNC_DIV_EXPR:
8953 case CEIL_DIV_EXPR:
8954 case FLOOR_DIV_EXPR:
8955 case ROUND_DIV_EXPR:
8956 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8957 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8958
8959 case TRUNC_MOD_EXPR:
8960 case CEIL_MOD_EXPR:
8961 case FLOOR_MOD_EXPR:
8962 case ROUND_MOD_EXPR:
8963 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8964
8965 case RDIV_EXPR:
8966 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8967 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8968
8969 case BIT_AND_EXPR:
8970 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
8971 || tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8972 case BIT_IOR_EXPR:
8973 case BIT_XOR_EXPR:
8974 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8975 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8976
8977 case NOP_EXPR:
8978 {
8979 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
8980 tree outer_type = TREE_TYPE (t);
8981
8982 if (TREE_CODE (outer_type) == REAL_TYPE)
8983 {
8984 if (TREE_CODE (inner_type) == REAL_TYPE)
8985 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8986 if (TREE_CODE (inner_type) == INTEGER_TYPE)
8987 {
8988 if (TYPE_UNSIGNED (inner_type))
8989 return 1;
8990 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8991 }
8992 }
8993 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
8994 {
8995 if (TREE_CODE (inner_type) == REAL_TYPE)
8996 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
8997 if (TREE_CODE (inner_type) == INTEGER_TYPE)
8998 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
8999 && TYPE_UNSIGNED (inner_type);
9000 }
9001 }
9002 break;
9003
9004 case COND_EXPR:
9005 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
9006 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
9007 case COMPOUND_EXPR:
9008 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9009 case MIN_EXPR:
9010 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9011 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9012 case MAX_EXPR:
9013 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9014 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9015 case MODIFY_EXPR:
9016 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9017 case BIND_EXPR:
9018 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9019 case SAVE_EXPR:
9020 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9021 case NON_LVALUE_EXPR:
9022 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9023 case FLOAT_EXPR:
9024 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9025 case RTL_EXPR:
9026 return rtl_expr_nonnegative_p (RTL_EXPR_RTL (t));
9027
9028 case CALL_EXPR:
9029 {
9030 tree fndecl = get_callee_fndecl (t);
9031 tree arglist = TREE_OPERAND (t, 1);
9032 if (fndecl
9033 && DECL_BUILT_IN (fndecl)
9034 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD)
9035 switch (DECL_FUNCTION_CODE (fndecl))
9036 {
9037 #define CASE_BUILTIN_F(BUILT_IN_FN) \
9038 case BUILT_IN_FN: case BUILT_IN_FN##F: case BUILT_IN_FN##L:
9039 #define CASE_BUILTIN_I(BUILT_IN_FN) \
9040 case BUILT_IN_FN: case BUILT_IN_FN##L: case BUILT_IN_FN##LL:
9041
9042 CASE_BUILTIN_F (BUILT_IN_ACOS)
9043 CASE_BUILTIN_F (BUILT_IN_ACOSH)
9044 CASE_BUILTIN_F (BUILT_IN_CABS)
9045 CASE_BUILTIN_F (BUILT_IN_COSH)
9046 CASE_BUILTIN_F (BUILT_IN_ERFC)
9047 CASE_BUILTIN_F (BUILT_IN_EXP)
9048 CASE_BUILTIN_F (BUILT_IN_EXP10)
9049 CASE_BUILTIN_F (BUILT_IN_EXP2)
9050 CASE_BUILTIN_F (BUILT_IN_FABS)
9051 CASE_BUILTIN_F (BUILT_IN_FDIM)
9052 CASE_BUILTIN_F (BUILT_IN_FREXP)
9053 CASE_BUILTIN_F (BUILT_IN_HYPOT)
9054 CASE_BUILTIN_F (BUILT_IN_POW10)
9055 CASE_BUILTIN_I (BUILT_IN_FFS)
9056 CASE_BUILTIN_I (BUILT_IN_PARITY)
9057 CASE_BUILTIN_I (BUILT_IN_POPCOUNT)
9058 /* Always true. */
9059 return 1;
9060
9061 CASE_BUILTIN_F (BUILT_IN_SQRT)
9062 /* sqrt(-0.0) is -0.0. */
9063 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
9064 return 1;
9065 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
9066
9067 CASE_BUILTIN_F (BUILT_IN_ASINH)
9068 CASE_BUILTIN_F (BUILT_IN_ATAN)
9069 CASE_BUILTIN_F (BUILT_IN_ATANH)
9070 CASE_BUILTIN_F (BUILT_IN_CBRT)
9071 CASE_BUILTIN_F (BUILT_IN_CEIL)
9072 CASE_BUILTIN_F (BUILT_IN_ERF)
9073 CASE_BUILTIN_F (BUILT_IN_EXPM1)
9074 CASE_BUILTIN_F (BUILT_IN_FLOOR)
9075 CASE_BUILTIN_F (BUILT_IN_FMOD)
9076 CASE_BUILTIN_F (BUILT_IN_LDEXP)
9077 CASE_BUILTIN_F (BUILT_IN_LLRINT)
9078 CASE_BUILTIN_F (BUILT_IN_LLROUND)
9079 CASE_BUILTIN_F (BUILT_IN_LRINT)
9080 CASE_BUILTIN_F (BUILT_IN_LROUND)
9081 CASE_BUILTIN_F (BUILT_IN_MODF)
9082 CASE_BUILTIN_F (BUILT_IN_NEARBYINT)
9083 CASE_BUILTIN_F (BUILT_IN_POW)
9084 CASE_BUILTIN_F (BUILT_IN_RINT)
9085 CASE_BUILTIN_F (BUILT_IN_ROUND)
9086 CASE_BUILTIN_F (BUILT_IN_SIGNBIT)
9087 CASE_BUILTIN_F (BUILT_IN_SINH)
9088 CASE_BUILTIN_F (BUILT_IN_TANH)
9089 CASE_BUILTIN_F (BUILT_IN_TRUNC)
9090 /* True if the 1st argument is nonnegative. */
9091 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
9092
9093 CASE_BUILTIN_F (BUILT_IN_FMAX)
9094 /* True if the 1st OR 2nd arguments are nonnegative. */
9095 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
9096 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
9097
9098 CASE_BUILTIN_F (BUILT_IN_FMIN)
9099 /* True if the 1st AND 2nd arguments are nonnegative. */
9100 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
9101 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
9102
9103 CASE_BUILTIN_F (BUILT_IN_COPYSIGN)
9104 /* True if the 2nd argument is nonnegative. */
9105 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
9106
9107 default:
9108 break;
9109 #undef CASE_BUILTIN_F
9110 #undef CASE_BUILTIN_I
9111 }
9112 }
9113
9114 /* ... fall through ... */
9115
9116 default:
9117 if (truth_value_p (TREE_CODE (t)))
9118 /* Truth values evaluate to 0 or 1, which is nonnegative. */
9119 return 1;
9120 }
9121
9122 /* We don't know sign of `t', so be conservative and return false. */
9123 return 0;
9124 }
9125
9126 /* Return true when T is an address and is known to be nonzero.
9127 For floating point we further ensure that T is not denormal.
9128 Similar logic is present in nonzero_address in rtlanal.h */
9129
9130 static bool
9131 tree_expr_nonzero_p (tree t)
9132 {
9133 tree type = TREE_TYPE (t);
9134
9135 /* Doing something useful for floating point would need more work. */
9136 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9137 return false;
9138
9139 switch (TREE_CODE (t))
9140 {
9141 case ABS_EXPR:
9142 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
9143 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
9144
9145 case INTEGER_CST:
9146 return !integer_zerop (t);
9147
9148 case PLUS_EXPR:
9149 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
9150 {
9151 /* With the presence of negative values it is hard
9152 to say something. */
9153 if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9154 || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
9155 return false;
9156 /* One of operands must be positive and the other non-negative. */
9157 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
9158 || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
9159 }
9160 break;
9161
9162 case MULT_EXPR:
9163 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
9164 {
9165 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
9166 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
9167 }
9168 break;
9169
9170 case NOP_EXPR:
9171 {
9172 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
9173 tree outer_type = TREE_TYPE (t);
9174
9175 return (TYPE_PRECISION (inner_type) >= TYPE_PRECISION (outer_type)
9176 && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
9177 }
9178 break;
9179
9180 case ADDR_EXPR:
9181 /* Weak declarations may link to NULL. */
9182 if (DECL_P (TREE_OPERAND (t, 0)))
9183 return !DECL_WEAK (TREE_OPERAND (t, 0));
9184 /* Constants and all other cases are never weak. */
9185 return true;
9186
9187 case COND_EXPR:
9188 return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
9189 && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
9190
9191 case MIN_EXPR:
9192 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
9193 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
9194
9195 case MAX_EXPR:
9196 if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
9197 {
9198 /* When both operands are nonzero, then MAX must be too. */
9199 if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
9200 return true;
9201
9202 /* MAX where operand 0 is positive is positive. */
9203 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9204 }
9205 /* MAX where operand 1 is positive is positive. */
9206 else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
9207 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
9208 return true;
9209 break;
9210
9211 case COMPOUND_EXPR:
9212 case MODIFY_EXPR:
9213 case BIND_EXPR:
9214 return tree_expr_nonzero_p (TREE_OPERAND (t, 1));
9215
9216 case SAVE_EXPR:
9217 case NON_LVALUE_EXPR:
9218 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
9219
9220 case BIT_IOR_EXPR:
9221 return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
9222 || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
9223
9224 default:
9225 break;
9226 }
9227 return false;
9228 }
9229
9230 /* Return true if `r' is known to be non-negative.
9231 Only handles constants at the moment. */
9232
9233 int
9234 rtl_expr_nonnegative_p (rtx r)
9235 {
9236 switch (GET_CODE (r))
9237 {
9238 case CONST_INT:
9239 return INTVAL (r) >= 0;
9240
9241 case CONST_DOUBLE:
9242 if (GET_MODE (r) == VOIDmode)
9243 return CONST_DOUBLE_HIGH (r) >= 0;
9244 return 0;
9245
9246 case CONST_VECTOR:
9247 {
9248 int units, i;
9249 rtx elt;
9250
9251 units = CONST_VECTOR_NUNITS (r);
9252
9253 for (i = 0; i < units; ++i)
9254 {
9255 elt = CONST_VECTOR_ELT (r, i);
9256 if (!rtl_expr_nonnegative_p (elt))
9257 return 0;
9258 }
9259
9260 return 1;
9261 }
9262
9263 case SYMBOL_REF:
9264 case LABEL_REF:
9265 /* These are always nonnegative. */
9266 return 1;
9267
9268 default:
9269 return 0;
9270 }
9271 }
9272
9273
9274 /* See if we are applying CODE, a relational to the highest or lowest
9275 possible integer of TYPE. If so, then the result is a compile
9276 time constant. */
9277
9278 static tree
9279 fold_relational_hi_lo (enum tree_code *code_p, const tree type, tree *op0_p,
9280 tree *op1_p)
9281 {
9282 tree op0 = *op0_p;
9283 tree op1 = *op1_p;
9284 enum tree_code code = *code_p;
9285 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (op1)));
9286
9287 if (TREE_CODE (op1) == INTEGER_CST
9288 && ! TREE_CONSTANT_OVERFLOW (op1)
9289 && width <= HOST_BITS_PER_WIDE_INT
9290 && (INTEGRAL_TYPE_P (TREE_TYPE (op1))
9291 || POINTER_TYPE_P (TREE_TYPE (op1))))
9292 {
9293 unsigned HOST_WIDE_INT signed_max;
9294 unsigned HOST_WIDE_INT max, min;
9295
9296 signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
9297
9298 if (TYPE_UNSIGNED (TREE_TYPE (op1)))
9299 {
9300 max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
9301 min = 0;
9302 }
9303 else
9304 {
9305 max = signed_max;
9306 min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
9307 }
9308
9309 if (TREE_INT_CST_HIGH (op1) == 0
9310 && TREE_INT_CST_LOW (op1) == max)
9311 switch (code)
9312 {
9313 case GT_EXPR:
9314 return omit_one_operand (type, integer_zero_node, op0);
9315
9316 case GE_EXPR:
9317 *code_p = EQ_EXPR;
9318 break;
9319 case LE_EXPR:
9320 return omit_one_operand (type, integer_one_node, op0);
9321
9322 case LT_EXPR:
9323 *code_p = NE_EXPR;
9324 break;
9325
9326 /* The GE_EXPR and LT_EXPR cases above are not normally
9327 reached because of previous transformations. */
9328
9329 default:
9330 break;
9331 }
9332 else if (TREE_INT_CST_HIGH (op1) == 0
9333 && TREE_INT_CST_LOW (op1) == max - 1)
9334 switch (code)
9335 {
9336 case GT_EXPR:
9337 *code_p = EQ_EXPR;
9338 *op1_p = const_binop (PLUS_EXPR, op1, integer_one_node, 0);
9339 break;
9340 case LE_EXPR:
9341 *code_p = NE_EXPR;
9342 *op1_p = const_binop (PLUS_EXPR, op1, integer_one_node, 0);
9343 break;
9344 default:
9345 break;
9346 }
9347 else if (TREE_INT_CST_HIGH (op1) == (min ? -1 : 0)
9348 && TREE_INT_CST_LOW (op1) == min)
9349 switch (code)
9350 {
9351 case LT_EXPR:
9352 return omit_one_operand (type, integer_zero_node, op0);
9353
9354 case LE_EXPR:
9355 *code_p = EQ_EXPR;
9356 break;
9357
9358 case GE_EXPR:
9359 return omit_one_operand (type, integer_one_node, op0);
9360
9361 case GT_EXPR:
9362 *code_p = NE_EXPR;
9363 break;
9364
9365 default:
9366 break;
9367 }
9368 else if (TREE_INT_CST_HIGH (op1) == (min ? -1 : 0)
9369 && TREE_INT_CST_LOW (op1) == min + 1)
9370 switch (code)
9371 {
9372 case GE_EXPR:
9373 *code_p = NE_EXPR;
9374 *op1_p = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
9375 break;
9376 case LT_EXPR:
9377 *code_p = EQ_EXPR;
9378 *op1_p = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
9379 break;
9380 default:
9381 break;
9382 }
9383
9384 else if (TREE_INT_CST_HIGH (op1) == 0
9385 && TREE_INT_CST_LOW (op1) == signed_max
9386 && TYPE_UNSIGNED (TREE_TYPE (op1))
9387 /* signed_type does not work on pointer types. */
9388 && INTEGRAL_TYPE_P (TREE_TYPE (op1)))
9389 {
9390 /* The following case also applies to X < signed_max+1
9391 and X >= signed_max+1 because previous transformations. */
9392 if (code == LE_EXPR || code == GT_EXPR)
9393 {
9394 tree st0, st1, exp, retval;
9395 st0 = lang_hooks.types.signed_type (TREE_TYPE (op0));
9396 st1 = lang_hooks.types.signed_type (TREE_TYPE (op1));
9397
9398 exp = build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
9399 type,
9400 fold_convert (st0, op0),
9401 fold_convert (st1, integer_zero_node));
9402
9403 retval
9404 = nondestructive_fold_binary_to_constant (TREE_CODE (exp),
9405 TREE_TYPE (exp),
9406 TREE_OPERAND (exp, 0),
9407 TREE_OPERAND (exp, 1));
9408
9409 /* If we are in gimple form, then returning EXP would create
9410 non-gimple expressions. Clearing it is safe and insures
9411 we do not allow a non-gimple expression to escape. */
9412 if (in_gimple_form)
9413 exp = NULL;
9414
9415 return (retval ? retval : exp);
9416 }
9417 }
9418 }
9419
9420 return NULL_TREE;
9421 }
9422
9423
9424 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
9425 attempt to fold the expression to a constant without modifying TYPE,
9426 OP0 or OP1.
9427
9428 If the expression could be simplified to a constant, then return
9429 the constant. If the expression would not be simplified to a
9430 constant, then return NULL_TREE.
9431
9432 Note this is primarily designed to be called after gimplification
9433 of the tree structures and when at least one operand is a constant.
9434 As a result of those simplifying assumptions this routine is far
9435 simpler than the generic fold routine. */
9436
9437 tree
9438 nondestructive_fold_binary_to_constant (enum tree_code code, tree type,
9439 tree op0, tree op1)
9440 {
9441 int wins = 1;
9442 tree subop0;
9443 tree subop1;
9444 tree tem;
9445
9446 /* If this is a commutative operation, and ARG0 is a constant, move it
9447 to ARG1 to reduce the number of tests below. */
9448 if (commutative_tree_code (code)
9449 && (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST))
9450 {
9451 tem = op0;
9452 op0 = op1;
9453 op1 = tem;
9454 }
9455
9456 /* If either operand is a complex type, extract its real component. */
9457 if (TREE_CODE (op0) == COMPLEX_CST)
9458 subop0 = TREE_REALPART (op0);
9459 else
9460 subop0 = op0;
9461
9462 if (TREE_CODE (op1) == COMPLEX_CST)
9463 subop1 = TREE_REALPART (op1);
9464 else
9465 subop1 = op1;
9466
9467 /* Note if either argument is not a real or integer constant.
9468 With a few exceptions, simplification is limited to cases
9469 where both arguments are constants. */
9470 if ((TREE_CODE (subop0) != INTEGER_CST
9471 && TREE_CODE (subop0) != REAL_CST)
9472 || (TREE_CODE (subop1) != INTEGER_CST
9473 && TREE_CODE (subop1) != REAL_CST))
9474 wins = 0;
9475
9476 switch (code)
9477 {
9478 case PLUS_EXPR:
9479 /* (plus (address) (const_int)) is a constant. */
9480 if (TREE_CODE (op0) == PLUS_EXPR
9481 && TREE_CODE (op1) == INTEGER_CST
9482 && (TREE_CODE (TREE_OPERAND (op0, 0)) == ADDR_EXPR
9483 || (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
9484 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (op0, 0), 0))
9485 == ADDR_EXPR)))
9486 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
9487 {
9488 return build2 (PLUS_EXPR, type, TREE_OPERAND (op0, 0),
9489 const_binop (PLUS_EXPR, op1,
9490 TREE_OPERAND (op0, 1), 0));
9491 }
9492 case BIT_XOR_EXPR:
9493
9494 binary:
9495 if (!wins)
9496 return NULL_TREE;
9497
9498 /* Both arguments are constants. Simplify. */
9499 tem = const_binop (code, op0, op1, 0);
9500 if (tem != NULL_TREE)
9501 {
9502 /* The return value should always have the same type as
9503 the original expression. */
9504 if (TREE_TYPE (tem) != type)
9505 tem = fold_convert (type, tem);
9506
9507 return tem;
9508 }
9509 return NULL_TREE;
9510
9511 case MINUS_EXPR:
9512 /* Fold &x - &x. This can happen from &x.foo - &x.
9513 This is unsafe for certain floats even in non-IEEE formats.
9514 In IEEE, it is unsafe because it does wrong for NaNs.
9515 Also note that operand_equal_p is always false if an
9516 operand is volatile. */
9517 if (! FLOAT_TYPE_P (type) && operand_equal_p (op0, op1, 0))
9518 return fold_convert (type, integer_zero_node);
9519
9520 goto binary;
9521
9522 case MULT_EXPR:
9523 case BIT_AND_EXPR:
9524 /* Special case multiplication or bitwise AND where one argument
9525 is zero. */
9526 if (! FLOAT_TYPE_P (type) && integer_zerop (op1))
9527 return omit_one_operand (type, op1, op0);
9528 else
9529 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (op0)))
9530 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op0)))
9531 && real_zerop (op1))
9532 return omit_one_operand (type, op1, op0);
9533
9534 goto binary;
9535
9536 case BIT_IOR_EXPR:
9537 /* Special case when we know the result will be all ones. */
9538 if (integer_all_onesp (op1))
9539 return omit_one_operand (type, op1, op0);
9540
9541 goto binary;
9542
9543 case TRUNC_DIV_EXPR:
9544 case ROUND_DIV_EXPR:
9545 case FLOOR_DIV_EXPR:
9546 case CEIL_DIV_EXPR:
9547 case EXACT_DIV_EXPR:
9548 case TRUNC_MOD_EXPR:
9549 case ROUND_MOD_EXPR:
9550 case FLOOR_MOD_EXPR:
9551 case CEIL_MOD_EXPR:
9552 case RDIV_EXPR:
9553 /* Division by zero is undefined. */
9554 if (integer_zerop (op1))
9555 return NULL_TREE;
9556
9557 if (TREE_CODE (op1) == REAL_CST
9558 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (op1)))
9559 && real_zerop (op1))
9560 return NULL_TREE;
9561
9562 goto binary;
9563
9564 case MIN_EXPR:
9565 if (INTEGRAL_TYPE_P (type)
9566 && operand_equal_p (op1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
9567 return omit_one_operand (type, op1, op0);
9568
9569 goto binary;
9570
9571 case MAX_EXPR:
9572 if (INTEGRAL_TYPE_P (type)
9573 && TYPE_MAX_VALUE (type)
9574 && operand_equal_p (op1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
9575 return omit_one_operand (type, op1, op0);
9576
9577 goto binary;
9578
9579 case RSHIFT_EXPR:
9580 /* Optimize -1 >> x for arithmetic right shifts. */
9581 if (integer_all_onesp (op0) && ! TYPE_UNSIGNED (type))
9582 return omit_one_operand (type, op0, op1);
9583 /* ... fall through ... */
9584
9585 case LSHIFT_EXPR:
9586 if (integer_zerop (op0))
9587 return omit_one_operand (type, op0, op1);
9588
9589 /* Since negative shift count is not well-defined, don't
9590 try to compute it in the compiler. */
9591 if (TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sgn (op1) < 0)
9592 return NULL_TREE;
9593
9594 goto binary;
9595
9596 case LROTATE_EXPR:
9597 case RROTATE_EXPR:
9598 /* -1 rotated either direction by any amount is still -1. */
9599 if (integer_all_onesp (op0))
9600 return omit_one_operand (type, op0, op1);
9601
9602 /* 0 rotated either direction by any amount is still zero. */
9603 if (integer_zerop (op0))
9604 return omit_one_operand (type, op0, op1);
9605
9606 goto binary;
9607
9608 case COMPLEX_EXPR:
9609 if (wins)
9610 return build_complex (type, op0, op1);
9611 return NULL_TREE;
9612
9613 case LT_EXPR:
9614 case LE_EXPR:
9615 case GT_EXPR:
9616 case GE_EXPR:
9617 case EQ_EXPR:
9618 case NE_EXPR:
9619 /* If one arg is a real or integer constant, put it last. */
9620 if ((TREE_CODE (op0) == INTEGER_CST
9621 && TREE_CODE (op1) != INTEGER_CST)
9622 || (TREE_CODE (op0) == REAL_CST
9623 && TREE_CODE (op0) != REAL_CST))
9624 {
9625 tree temp;
9626
9627 temp = op0;
9628 op0 = op1;
9629 op1 = temp;
9630 code = swap_tree_comparison (code);
9631 }
9632
9633 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
9634 This transformation affects the cases which are handled in later
9635 optimizations involving comparisons with non-negative constants. */
9636 if (TREE_CODE (op1) == INTEGER_CST
9637 && TREE_CODE (op0) != INTEGER_CST
9638 && tree_int_cst_sgn (op1) > 0)
9639 {
9640 switch (code)
9641 {
9642 case GE_EXPR:
9643 code = GT_EXPR;
9644 op1 = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
9645 break;
9646
9647 case LT_EXPR:
9648 code = LE_EXPR;
9649 op1 = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
9650 break;
9651
9652 default:
9653 break;
9654 }
9655 }
9656
9657 tem = fold_relational_hi_lo (&code, type, &op0, &op1);
9658 if (tem)
9659 return tem;
9660
9661 if (!wins)
9662 return NULL_TREE;
9663
9664 return fold_relational_const (code, type, op0, op1);
9665
9666 case RANGE_EXPR:
9667 /* This could probably be handled. */
9668 return NULL_TREE;
9669
9670 case TRUTH_AND_EXPR:
9671 /* If second arg is constant zero, result is zero, but first arg
9672 must be evaluated. */
9673 if (integer_zerop (op1))
9674 return omit_one_operand (type, op1, op0);
9675 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
9676 case will be handled here. */
9677 if (integer_zerop (op0))
9678 return omit_one_operand (type, op0, op1);
9679 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
9680 return constant_boolean_node (true, type);
9681 return NULL_TREE;
9682
9683 case TRUTH_OR_EXPR:
9684 /* If second arg is constant true, result is true, but we must
9685 evaluate first arg. */
9686 if (TREE_CODE (op1) == INTEGER_CST && ! integer_zerop (op1))
9687 return omit_one_operand (type, op1, op0);
9688 /* Likewise for first arg, but note this only occurs here for
9689 TRUTH_OR_EXPR. */
9690 if (TREE_CODE (op0) == INTEGER_CST && ! integer_zerop (op0))
9691 return omit_one_operand (type, op0, op1);
9692 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
9693 return constant_boolean_node (false, type);
9694 return NULL_TREE;
9695
9696 case TRUTH_XOR_EXPR:
9697 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
9698 {
9699 int x = ! integer_zerop (op0) ^ ! integer_zerop (op1);
9700 return constant_boolean_node (x, type);
9701 }
9702 return NULL_TREE;
9703
9704 default:
9705 return NULL_TREE;
9706 }
9707 }
9708
9709 /* Given the components of a unary expression CODE, TYPE and OP0,
9710 attempt to fold the expression to a constant without modifying
9711 TYPE or OP0.
9712
9713 If the expression could be simplified to a constant, then return
9714 the constant. If the expression would not be simplified to a
9715 constant, then return NULL_TREE.
9716
9717 Note this is primarily designed to be called after gimplification
9718 of the tree structures and when op0 is a constant. As a result
9719 of those simplifying assumptions this routine is far simpler than
9720 the generic fold routine. */
9721
9722 tree
9723 nondestructive_fold_unary_to_constant (enum tree_code code, tree type,
9724 tree op0)
9725 {
9726 /* Make sure we have a suitable constant argument. */
9727 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
9728 {
9729 tree subop;
9730
9731 if (TREE_CODE (op0) == COMPLEX_CST)
9732 subop = TREE_REALPART (op0);
9733 else
9734 subop = op0;
9735
9736 if (TREE_CODE (subop) != INTEGER_CST && TREE_CODE (subop) != REAL_CST)
9737 return NULL_TREE;
9738 }
9739
9740 switch (code)
9741 {
9742 case NOP_EXPR:
9743 case FLOAT_EXPR:
9744 case CONVERT_EXPR:
9745 case FIX_TRUNC_EXPR:
9746 case FIX_FLOOR_EXPR:
9747 case FIX_CEIL_EXPR:
9748 return fold_convert_const (code, type, op0);
9749
9750 case NEGATE_EXPR:
9751 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
9752 return fold_negate_const (op0, type);
9753 else
9754 return NULL_TREE;
9755
9756 case ABS_EXPR:
9757 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
9758 return fold_abs_const (op0, type);
9759 else
9760 return NULL_TREE;
9761
9762 case BIT_NOT_EXPR:
9763 if (TREE_CODE (op0) == INTEGER_CST)
9764 return fold_not_const (op0, type);
9765 else
9766 return NULL_TREE;
9767
9768 case REALPART_EXPR:
9769 if (TREE_CODE (op0) == COMPLEX_CST)
9770 return TREE_REALPART (op0);
9771 else
9772 return NULL_TREE;
9773
9774 case IMAGPART_EXPR:
9775 if (TREE_CODE (op0) == COMPLEX_CST)
9776 return TREE_IMAGPART (op0);
9777 else
9778 return NULL_TREE;
9779
9780 case CONJ_EXPR:
9781 if (TREE_CODE (op0) == COMPLEX_CST
9782 && TREE_CODE (TREE_TYPE (op0)) == COMPLEX_TYPE)
9783 return build_complex (type, TREE_REALPART (op0),
9784 negate_expr (TREE_IMAGPART (op0)));
9785 return NULL_TREE;
9786
9787 default:
9788 return NULL_TREE;
9789 }
9790 }
9791
9792 /* If EXP represents referencing an element in a constant string
9793 (either via pointer arithmetic or array indexing), return the
9794 tree representing the value accessed, otherwise return NULL. */
9795
9796 tree
9797 fold_read_from_constant_string (tree exp)
9798 {
9799 if (TREE_CODE (exp) == INDIRECT_REF || TREE_CODE (exp) == ARRAY_REF)
9800 {
9801 tree exp1 = TREE_OPERAND (exp, 0);
9802 tree index;
9803 tree string;
9804
9805 if (TREE_CODE (exp) == INDIRECT_REF)
9806 {
9807 string = string_constant (exp1, &index);
9808 }
9809 else
9810 {
9811 tree domain = TYPE_DOMAIN (TREE_TYPE (exp1));
9812 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
9813 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
9814
9815 /* Optimize the special-case of a zero lower bound.
9816
9817 We convert the low_bound to sizetype to avoid some problems
9818 with constant folding. (E.g. suppose the lower bound is 1,
9819 and its mode is QI. Without the conversion,l (ARRAY
9820 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
9821 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
9822 if (! integer_zerop (low_bound))
9823 index = size_diffop (index, fold_convert (sizetype, low_bound));
9824
9825 string = exp1;
9826 }
9827
9828 if (string
9829 && TREE_CODE (string) == STRING_CST
9830 && TREE_CODE (index) == INTEGER_CST
9831 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
9832 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
9833 == MODE_INT)
9834 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
9835 return fold_convert (TREE_TYPE (exp),
9836 build_int_2 ((TREE_STRING_POINTER (string)
9837 [TREE_INT_CST_LOW (index)]), 0));
9838 }
9839 return NULL;
9840 }
9841
9842 /* Return the tree for neg (ARG0) when ARG0 is known to be either
9843 an integer constant or real constant.
9844
9845 TYPE is the type of the result. */
9846
9847 static tree
9848 fold_negate_const (tree arg0, tree type)
9849 {
9850 tree t = NULL_TREE;
9851
9852 if (TREE_CODE (arg0) == INTEGER_CST)
9853 {
9854 unsigned HOST_WIDE_INT low;
9855 HOST_WIDE_INT high;
9856 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
9857 TREE_INT_CST_HIGH (arg0),
9858 &low, &high);
9859 t = build_int_2 (low, high);
9860 TREE_TYPE (t) = type;
9861 TREE_OVERFLOW (t)
9862 = (TREE_OVERFLOW (arg0)
9863 | force_fit_type (t, overflow && !TYPE_UNSIGNED (type)));
9864 TREE_CONSTANT_OVERFLOW (t)
9865 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
9866 }
9867 else if (TREE_CODE (arg0) == REAL_CST)
9868 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
9869 #ifdef ENABLE_CHECKING
9870 else
9871 abort ();
9872 #endif
9873
9874 return t;
9875 }
9876
9877 /* Return the tree for abs (ARG0) when ARG0 is known to be either
9878 an integer constant or real constant.
9879
9880 TYPE is the type of the result. */
9881
9882 tree
9883 fold_abs_const (tree arg0, tree type)
9884 {
9885 tree t = NULL_TREE;
9886
9887 if (TREE_CODE (arg0) == INTEGER_CST)
9888 {
9889 /* If the value is unsigned, then the absolute value is
9890 the same as the ordinary value. */
9891 if (TYPE_UNSIGNED (type))
9892 return arg0;
9893 /* Similarly, if the value is non-negative. */
9894 else if (INT_CST_LT (integer_minus_one_node, arg0))
9895 return arg0;
9896 /* If the value is negative, then the absolute value is
9897 its negation. */
9898 else
9899 {
9900 unsigned HOST_WIDE_INT low;
9901 HOST_WIDE_INT high;
9902 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
9903 TREE_INT_CST_HIGH (arg0),
9904 &low, &high);
9905 t = build_int_2 (low, high);
9906 TREE_TYPE (t) = type;
9907 TREE_OVERFLOW (t)
9908 = (TREE_OVERFLOW (arg0)
9909 | force_fit_type (t, overflow));
9910 TREE_CONSTANT_OVERFLOW (t)
9911 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
9912 return t;
9913 }
9914 }
9915 else if (TREE_CODE (arg0) == REAL_CST)
9916 {
9917 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
9918 return build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
9919 else
9920 return arg0;
9921 }
9922 #ifdef ENABLE_CHECKING
9923 else
9924 abort ();
9925 #endif
9926
9927 return t;
9928 }
9929
9930 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
9931 constant. TYPE is the type of the result. */
9932
9933 static tree
9934 fold_not_const (tree arg0, tree type)
9935 {
9936 tree t = NULL_TREE;
9937
9938 if (TREE_CODE (arg0) == INTEGER_CST)
9939 {
9940 t = build_int_2 (~ TREE_INT_CST_LOW (arg0),
9941 ~ TREE_INT_CST_HIGH (arg0));
9942 TREE_TYPE (t) = type;
9943 force_fit_type (t, 0);
9944 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg0);
9945 TREE_CONSTANT_OVERFLOW (t) = TREE_CONSTANT_OVERFLOW (arg0);
9946 }
9947 #ifdef ENABLE_CHECKING
9948 else
9949 abort ();
9950 #endif
9951
9952 return t;
9953 }
9954
9955 /* Given CODE, a relational operator, the target type, TYPE and two
9956 constant operands OP0 and OP1, return the result of the
9957 relational operation. If the result is not a compile time
9958 constant, then return NULL_TREE. */
9959
9960 static tree
9961 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
9962 {
9963 tree tem;
9964 int invert;
9965
9966 /* From here on, the only cases we handle are when the result is
9967 known to be a constant.
9968
9969 To compute GT, swap the arguments and do LT.
9970 To compute GE, do LT and invert the result.
9971 To compute LE, swap the arguments, do LT and invert the result.
9972 To compute NE, do EQ and invert the result.
9973
9974 Therefore, the code below must handle only EQ and LT. */
9975
9976 if (code == LE_EXPR || code == GT_EXPR)
9977 {
9978 tem = op0, op0 = op1, op1 = tem;
9979 code = swap_tree_comparison (code);
9980 }
9981
9982 /* Note that it is safe to invert for real values here because we
9983 will check below in the one case that it matters. */
9984
9985 tem = NULL_TREE;
9986 invert = 0;
9987 if (code == NE_EXPR || code == GE_EXPR)
9988 {
9989 invert = 1;
9990 code = invert_tree_comparison (code, false);
9991 }
9992
9993 /* Compute a result for LT or EQ if args permit;
9994 Otherwise return T. */
9995 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
9996 {
9997 if (code == EQ_EXPR)
9998 tem = build_int_2 (tree_int_cst_equal (op0, op1), 0);
9999 else
10000 tem = build_int_2 ((TYPE_UNSIGNED (TREE_TYPE (op0))
10001 ? INT_CST_LT_UNSIGNED (op0, op1)
10002 : INT_CST_LT (op0, op1)),
10003 0);
10004 }
10005
10006 else if (code == EQ_EXPR && !TREE_SIDE_EFFECTS (op0)
10007 && integer_zerop (op1) && tree_expr_nonzero_p (op0))
10008 tem = build_int_2 (0, 0);
10009
10010 /* Two real constants can be compared explicitly. */
10011 else if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
10012 {
10013 /* If either operand is a NaN, the result is false with two
10014 exceptions: First, an NE_EXPR is true on NaNs, but that case
10015 is already handled correctly since we will be inverting the
10016 result for NE_EXPR. Second, if we had inverted a LE_EXPR
10017 or a GE_EXPR into a LT_EXPR, we must return true so that it
10018 will be inverted into false. */
10019
10020 if (REAL_VALUE_ISNAN (TREE_REAL_CST (op0))
10021 || REAL_VALUE_ISNAN (TREE_REAL_CST (op1)))
10022 tem = build_int_2 (invert && code == LT_EXPR, 0);
10023
10024 else if (code == EQ_EXPR)
10025 tem = build_int_2 (REAL_VALUES_EQUAL (TREE_REAL_CST (op0),
10026 TREE_REAL_CST (op1)),
10027 0);
10028 else
10029 tem = build_int_2 (REAL_VALUES_LESS (TREE_REAL_CST (op0),
10030 TREE_REAL_CST (op1)),
10031 0);
10032 }
10033
10034 if (tem == NULL_TREE)
10035 return NULL_TREE;
10036
10037 if (invert)
10038 TREE_INT_CST_LOW (tem) ^= 1;
10039
10040 TREE_TYPE (tem) = type;
10041 if (TREE_CODE (type) == BOOLEAN_TYPE)
10042 return lang_hooks.truthvalue_conversion (tem);
10043 return tem;
10044 }
10045
10046 /* Build an expression for the address of T. Folds away INDIRECT_REF to
10047 avoid confusing the gimplify process. */
10048
10049 tree
10050 build_fold_addr_expr_with_type (tree t, tree ptrtype)
10051 {
10052 if (TREE_CODE (t) == INDIRECT_REF)
10053 {
10054 t = TREE_OPERAND (t, 0);
10055 if (TREE_TYPE (t) != ptrtype)
10056 t = build1 (NOP_EXPR, ptrtype, t);
10057 }
10058 else
10059 {
10060 tree base = t;
10061 while (TREE_CODE (base) == COMPONENT_REF
10062 || TREE_CODE (base) == ARRAY_REF)
10063 base = TREE_OPERAND (base, 0);
10064 if (DECL_P (base))
10065 TREE_ADDRESSABLE (base) = 1;
10066
10067 t = build1 (ADDR_EXPR, ptrtype, t);
10068 }
10069
10070 return t;
10071 }
10072
10073 tree
10074 build_fold_addr_expr (tree t)
10075 {
10076 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
10077 }
10078
10079 /* Builds an expression for an indirection through T, simplifying some
10080 cases. */
10081
10082 tree
10083 build_fold_indirect_ref (tree t)
10084 {
10085 tree type = TREE_TYPE (TREE_TYPE (t));
10086 tree sub = t;
10087 tree subtype;
10088
10089 STRIP_NOPS (sub);
10090 if (TREE_CODE (sub) == ADDR_EXPR)
10091 {
10092 tree op = TREE_OPERAND (sub, 0);
10093 tree optype = TREE_TYPE (op);
10094 /* *&p => p */
10095 if (lang_hooks.types_compatible_p (type, optype))
10096 return op;
10097 /* *(foo *)&fooarray => fooarray[0] */
10098 else if (TREE_CODE (optype) == ARRAY_TYPE
10099 && lang_hooks.types_compatible_p (type, TREE_TYPE (optype)))
10100 return build2 (ARRAY_REF, type, op, size_zero_node);
10101 }
10102
10103 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
10104 subtype = TREE_TYPE (sub);
10105 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
10106 && lang_hooks.types_compatible_p (type, TREE_TYPE (TREE_TYPE (subtype))))
10107 {
10108 sub = build_fold_indirect_ref (sub);
10109 return build2 (ARRAY_REF, type, sub, size_zero_node);
10110 }
10111
10112 return build1 (INDIRECT_REF, type, t);
10113 }
10114
10115 #include "gt-fold-const.h"