exp_smem.adb, [...]: Remove OK_For_Stream flag, not used, not needed.
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
29
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type.
32
33 fold takes a tree as argument and returns a simplified tree.
34
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
38
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
41
42 force_fit_type takes a constant, an overflowable flag and prior
43 overflow indicators. It forces the value to fit the type and sets
44 TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate. */
45
46 #include "config.h"
47 #include "system.h"
48 #include "coretypes.h"
49 #include "tm.h"
50 #include "flags.h"
51 #include "tree.h"
52 #include "real.h"
53 #include "rtl.h"
54 #include "expr.h"
55 #include "tm_p.h"
56 #include "toplev.h"
57 #include "ggc.h"
58 #include "hashtab.h"
59 #include "langhooks.h"
60 #include "md5.h"
61
62 /* The following constants represent a bit based encoding of GCC's
63 comparison operators. This encoding simplifies transformations
64 on relational comparison operators, such as AND and OR. */
65 enum comparison_code {
66 COMPCODE_FALSE = 0,
67 COMPCODE_LT = 1,
68 COMPCODE_EQ = 2,
69 COMPCODE_LE = 3,
70 COMPCODE_GT = 4,
71 COMPCODE_LTGT = 5,
72 COMPCODE_GE = 6,
73 COMPCODE_ORD = 7,
74 COMPCODE_UNORD = 8,
75 COMPCODE_UNLT = 9,
76 COMPCODE_UNEQ = 10,
77 COMPCODE_UNLE = 11,
78 COMPCODE_UNGT = 12,
79 COMPCODE_NE = 13,
80 COMPCODE_UNGE = 14,
81 COMPCODE_TRUE = 15
82 };
83
84 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
85 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
86 static bool negate_mathfn_p (enum built_in_function);
87 static bool negate_expr_p (tree);
88 static tree negate_expr (tree);
89 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
90 static tree associate_trees (tree, tree, enum tree_code, tree);
91 static tree const_binop (enum tree_code, tree, tree, int);
92 static enum tree_code invert_tree_comparison (enum tree_code, bool);
93 static enum comparison_code comparison_to_compcode (enum tree_code);
94 static enum tree_code compcode_to_comparison (enum comparison_code);
95 static tree combine_comparisons (enum tree_code, enum tree_code,
96 enum tree_code, tree, tree, tree);
97 static int truth_value_p (enum tree_code);
98 static int operand_equal_for_comparison_p (tree, tree, tree);
99 static int twoval_comparison_p (tree, tree *, tree *, int *);
100 static tree eval_subst (tree, tree, tree, tree, tree);
101 static tree pedantic_omit_one_operand (tree, tree, tree);
102 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
103 static tree make_bit_field_ref (tree, tree, int, int, int);
104 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
105 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
106 enum machine_mode *, int *, int *,
107 tree *, tree *);
108 static int all_ones_mask_p (tree, int);
109 static tree sign_bit_p (tree, tree);
110 static int simple_operand_p (tree);
111 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
112 static tree make_range (tree, int *, tree *, tree *);
113 static tree build_range_check (tree, tree, int, tree, tree);
114 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
115 tree);
116 static tree fold_range_test (enum tree_code, tree, tree, tree);
117 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
118 static tree unextend (tree, int, int, tree);
119 static tree fold_truthop (enum tree_code, tree, tree, tree);
120 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
121 static tree extract_muldiv (tree, tree, enum tree_code, tree);
122 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
123 static int multiple_of_p (tree, tree, tree);
124 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
125 tree, tree,
126 tree, tree, int);
127 static bool fold_real_zero_addition_p (tree, tree, int);
128 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
129 tree, tree, tree);
130 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
131 static tree fold_div_compare (enum tree_code, tree, tree, tree);
132 static bool reorder_operands_p (tree, tree);
133 static tree fold_negate_const (tree, tree);
134 static tree fold_not_const (tree, tree);
135 static tree fold_relational_const (enum tree_code, tree, tree, tree);
136 static tree fold_relational_hi_lo (enum tree_code *, const tree,
137 tree *, tree *);
138 static bool tree_expr_nonzero_p (tree);
139
140 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
141 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
142 and SUM1. Then this yields nonzero if overflow occurred during the
143 addition.
144
145 Overflow occurs if A and B have the same sign, but A and SUM differ in
146 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
147 sign. */
148 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
149 \f
150 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
151 We do that by representing the two-word integer in 4 words, with only
152 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
153 number. The value of the word is LOWPART + HIGHPART * BASE. */
154
155 #define LOWPART(x) \
156 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
157 #define HIGHPART(x) \
158 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
159 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
160
161 /* Unpack a two-word integer into 4 words.
162 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
163 WORDS points to the array of HOST_WIDE_INTs. */
164
165 static void
166 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
167 {
168 words[0] = LOWPART (low);
169 words[1] = HIGHPART (low);
170 words[2] = LOWPART (hi);
171 words[3] = HIGHPART (hi);
172 }
173
174 /* Pack an array of 4 words into a two-word integer.
175 WORDS points to the array of words.
176 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
177
178 static void
179 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
180 HOST_WIDE_INT *hi)
181 {
182 *low = words[0] + words[1] * BASE;
183 *hi = words[2] + words[3] * BASE;
184 }
185 \f
186 /* T is an INT_CST node. OVERFLOWABLE indicates if we are interested
187 in overflow of the value, when >0 we are only interested in signed
188 overflow, for <0 we are interested in any overflow. OVERFLOWED
189 indicates whether overflow has already occurred. CONST_OVERFLOWED
190 indicates whether constant overflow has already occurred. We force
191 T's value to be within range of T's type (by setting to 0 or 1 all
192 the bits outside the type's range). We set TREE_OVERFLOWED if,
193 OVERFLOWED is nonzero,
194 or OVERFLOWABLE is >0 and signed overflow occurs
195 or OVERFLOWABLE is <0 and any overflow occurs
196 We set TREE_CONSTANT_OVERFLOWED if,
197 CONST_OVERFLOWED is nonzero
198 or we set TREE_OVERFLOWED.
199 We return either the original T, or a copy. */
200
201 tree
202 force_fit_type (tree t, int overflowable,
203 bool overflowed, bool overflowed_const)
204 {
205 unsigned HOST_WIDE_INT low;
206 HOST_WIDE_INT high;
207 unsigned int prec;
208 int sign_extended_type;
209
210 gcc_assert (TREE_CODE (t) == INTEGER_CST);
211
212 low = TREE_INT_CST_LOW (t);
213 high = TREE_INT_CST_HIGH (t);
214
215 if (POINTER_TYPE_P (TREE_TYPE (t))
216 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
217 prec = POINTER_SIZE;
218 else
219 prec = TYPE_PRECISION (TREE_TYPE (t));
220 /* Size types *are* sign extended. */
221 sign_extended_type = (!TYPE_UNSIGNED (TREE_TYPE (t))
222 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
223 && TYPE_IS_SIZETYPE (TREE_TYPE (t))));
224
225 /* First clear all bits that are beyond the type's precision. */
226
227 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
228 ;
229 else if (prec > HOST_BITS_PER_WIDE_INT)
230 high &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
231 else
232 {
233 high = 0;
234 if (prec < HOST_BITS_PER_WIDE_INT)
235 low &= ~((HOST_WIDE_INT) (-1) << prec);
236 }
237
238 if (!sign_extended_type)
239 /* No sign extension */;
240 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
241 /* Correct width already. */;
242 else if (prec > HOST_BITS_PER_WIDE_INT)
243 {
244 /* Sign extend top half? */
245 if (high & ((unsigned HOST_WIDE_INT)1
246 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
247 high |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
248 }
249 else if (prec == HOST_BITS_PER_WIDE_INT)
250 {
251 if ((HOST_WIDE_INT)low < 0)
252 high = -1;
253 }
254 else
255 {
256 /* Sign extend bottom half? */
257 if (low & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
258 {
259 high = -1;
260 low |= (HOST_WIDE_INT)(-1) << prec;
261 }
262 }
263
264 /* If the value changed, return a new node. */
265 if (overflowed || overflowed_const
266 || low != TREE_INT_CST_LOW (t) || high != TREE_INT_CST_HIGH (t))
267 {
268 t = build_int_cst_wide (TREE_TYPE (t), low, high);
269
270 if (overflowed
271 || overflowable < 0
272 || (overflowable > 0 && sign_extended_type))
273 {
274 t = copy_node (t);
275 TREE_OVERFLOW (t) = 1;
276 TREE_CONSTANT_OVERFLOW (t) = 1;
277 }
278 else if (overflowed_const)
279 {
280 t = copy_node (t);
281 TREE_CONSTANT_OVERFLOW (t) = 1;
282 }
283 }
284
285 return t;
286 }
287 \f
288 /* Add two doubleword integers with doubleword result.
289 Each argument is given as two `HOST_WIDE_INT' pieces.
290 One argument is L1 and H1; the other, L2 and H2.
291 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
292
293 int
294 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
295 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
296 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
297 {
298 unsigned HOST_WIDE_INT l;
299 HOST_WIDE_INT h;
300
301 l = l1 + l2;
302 h = h1 + h2 + (l < l1);
303
304 *lv = l;
305 *hv = h;
306 return OVERFLOW_SUM_SIGN (h1, h2, h);
307 }
308
309 /* Negate a doubleword integer with doubleword result.
310 Return nonzero if the operation overflows, assuming it's signed.
311 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
312 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
313
314 int
315 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
316 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
317 {
318 if (l1 == 0)
319 {
320 *lv = 0;
321 *hv = - h1;
322 return (*hv & h1) < 0;
323 }
324 else
325 {
326 *lv = -l1;
327 *hv = ~h1;
328 return 0;
329 }
330 }
331 \f
332 /* Multiply two doubleword integers with doubleword result.
333 Return nonzero if the operation overflows, assuming it's signed.
334 Each argument is given as two `HOST_WIDE_INT' pieces.
335 One argument is L1 and H1; the other, L2 and H2.
336 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
337
338 int
339 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
340 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
341 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
342 {
343 HOST_WIDE_INT arg1[4];
344 HOST_WIDE_INT arg2[4];
345 HOST_WIDE_INT prod[4 * 2];
346 unsigned HOST_WIDE_INT carry;
347 int i, j, k;
348 unsigned HOST_WIDE_INT toplow, neglow;
349 HOST_WIDE_INT tophigh, neghigh;
350
351 encode (arg1, l1, h1);
352 encode (arg2, l2, h2);
353
354 memset (prod, 0, sizeof prod);
355
356 for (i = 0; i < 4; i++)
357 {
358 carry = 0;
359 for (j = 0; j < 4; j++)
360 {
361 k = i + j;
362 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
363 carry += arg1[i] * arg2[j];
364 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
365 carry += prod[k];
366 prod[k] = LOWPART (carry);
367 carry = HIGHPART (carry);
368 }
369 prod[i + 4] = carry;
370 }
371
372 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
373
374 /* Check for overflow by calculating the top half of the answer in full;
375 it should agree with the low half's sign bit. */
376 decode (prod + 4, &toplow, &tophigh);
377 if (h1 < 0)
378 {
379 neg_double (l2, h2, &neglow, &neghigh);
380 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
381 }
382 if (h2 < 0)
383 {
384 neg_double (l1, h1, &neglow, &neghigh);
385 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
386 }
387 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
388 }
389 \f
390 /* Shift the doubleword integer in L1, H1 left by COUNT places
391 keeping only PREC bits of result.
392 Shift right if COUNT is negative.
393 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
394 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
395
396 void
397 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
398 HOST_WIDE_INT count, unsigned int prec,
399 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
400 {
401 unsigned HOST_WIDE_INT signmask;
402
403 if (count < 0)
404 {
405 rshift_double (l1, h1, -count, prec, lv, hv, arith);
406 return;
407 }
408
409 if (SHIFT_COUNT_TRUNCATED)
410 count %= prec;
411
412 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
413 {
414 /* Shifting by the host word size is undefined according to the
415 ANSI standard, so we must handle this as a special case. */
416 *hv = 0;
417 *lv = 0;
418 }
419 else if (count >= HOST_BITS_PER_WIDE_INT)
420 {
421 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
422 *lv = 0;
423 }
424 else
425 {
426 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
427 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
428 *lv = l1 << count;
429 }
430
431 /* Sign extend all bits that are beyond the precision. */
432
433 signmask = -((prec > HOST_BITS_PER_WIDE_INT
434 ? ((unsigned HOST_WIDE_INT) *hv
435 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
436 : (*lv >> (prec - 1))) & 1);
437
438 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
439 ;
440 else if (prec >= HOST_BITS_PER_WIDE_INT)
441 {
442 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
443 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
444 }
445 else
446 {
447 *hv = signmask;
448 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
449 *lv |= signmask << prec;
450 }
451 }
452
453 /* Shift the doubleword integer in L1, H1 right by COUNT places
454 keeping only PREC bits of result. COUNT must be positive.
455 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
456 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
457
458 void
459 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
460 HOST_WIDE_INT count, unsigned int prec,
461 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
462 int arith)
463 {
464 unsigned HOST_WIDE_INT signmask;
465
466 signmask = (arith
467 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
468 : 0);
469
470 if (SHIFT_COUNT_TRUNCATED)
471 count %= prec;
472
473 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
474 {
475 /* Shifting by the host word size is undefined according to the
476 ANSI standard, so we must handle this as a special case. */
477 *hv = 0;
478 *lv = 0;
479 }
480 else if (count >= HOST_BITS_PER_WIDE_INT)
481 {
482 *hv = 0;
483 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
484 }
485 else
486 {
487 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
488 *lv = ((l1 >> count)
489 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
490 }
491
492 /* Zero / sign extend all bits that are beyond the precision. */
493
494 if (count >= (HOST_WIDE_INT)prec)
495 {
496 *hv = signmask;
497 *lv = signmask;
498 }
499 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
500 ;
501 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
502 {
503 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
504 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
505 }
506 else
507 {
508 *hv = signmask;
509 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
510 *lv |= signmask << (prec - count);
511 }
512 }
513 \f
514 /* Rotate the doubleword integer in L1, H1 left by COUNT places
515 keeping only PREC bits of result.
516 Rotate right if COUNT is negative.
517 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
518
519 void
520 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
521 HOST_WIDE_INT count, unsigned int prec,
522 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
523 {
524 unsigned HOST_WIDE_INT s1l, s2l;
525 HOST_WIDE_INT s1h, s2h;
526
527 count %= prec;
528 if (count < 0)
529 count += prec;
530
531 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
532 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
533 *lv = s1l | s2l;
534 *hv = s1h | s2h;
535 }
536
537 /* Rotate the doubleword integer in L1, H1 left by COUNT places
538 keeping only PREC bits of result. COUNT must be positive.
539 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
540
541 void
542 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
543 HOST_WIDE_INT count, unsigned int prec,
544 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
545 {
546 unsigned HOST_WIDE_INT s1l, s2l;
547 HOST_WIDE_INT s1h, s2h;
548
549 count %= prec;
550 if (count < 0)
551 count += prec;
552
553 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
554 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
555 *lv = s1l | s2l;
556 *hv = s1h | s2h;
557 }
558 \f
559 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
560 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
561 CODE is a tree code for a kind of division, one of
562 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
563 or EXACT_DIV_EXPR
564 It controls how the quotient is rounded to an integer.
565 Return nonzero if the operation overflows.
566 UNS nonzero says do unsigned division. */
567
568 int
569 div_and_round_double (enum tree_code code, int uns,
570 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
571 HOST_WIDE_INT hnum_orig,
572 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
573 HOST_WIDE_INT hden_orig,
574 unsigned HOST_WIDE_INT *lquo,
575 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
576 HOST_WIDE_INT *hrem)
577 {
578 int quo_neg = 0;
579 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
580 HOST_WIDE_INT den[4], quo[4];
581 int i, j;
582 unsigned HOST_WIDE_INT work;
583 unsigned HOST_WIDE_INT carry = 0;
584 unsigned HOST_WIDE_INT lnum = lnum_orig;
585 HOST_WIDE_INT hnum = hnum_orig;
586 unsigned HOST_WIDE_INT lden = lden_orig;
587 HOST_WIDE_INT hden = hden_orig;
588 int overflow = 0;
589
590 if (hden == 0 && lden == 0)
591 overflow = 1, lden = 1;
592
593 /* Calculate quotient sign and convert operands to unsigned. */
594 if (!uns)
595 {
596 if (hnum < 0)
597 {
598 quo_neg = ~ quo_neg;
599 /* (minimum integer) / (-1) is the only overflow case. */
600 if (neg_double (lnum, hnum, &lnum, &hnum)
601 && ((HOST_WIDE_INT) lden & hden) == -1)
602 overflow = 1;
603 }
604 if (hden < 0)
605 {
606 quo_neg = ~ quo_neg;
607 neg_double (lden, hden, &lden, &hden);
608 }
609 }
610
611 if (hnum == 0 && hden == 0)
612 { /* single precision */
613 *hquo = *hrem = 0;
614 /* This unsigned division rounds toward zero. */
615 *lquo = lnum / lden;
616 goto finish_up;
617 }
618
619 if (hnum == 0)
620 { /* trivial case: dividend < divisor */
621 /* hden != 0 already checked. */
622 *hquo = *lquo = 0;
623 *hrem = hnum;
624 *lrem = lnum;
625 goto finish_up;
626 }
627
628 memset (quo, 0, sizeof quo);
629
630 memset (num, 0, sizeof num); /* to zero 9th element */
631 memset (den, 0, sizeof den);
632
633 encode (num, lnum, hnum);
634 encode (den, lden, hden);
635
636 /* Special code for when the divisor < BASE. */
637 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
638 {
639 /* hnum != 0 already checked. */
640 for (i = 4 - 1; i >= 0; i--)
641 {
642 work = num[i] + carry * BASE;
643 quo[i] = work / lden;
644 carry = work % lden;
645 }
646 }
647 else
648 {
649 /* Full double precision division,
650 with thanks to Don Knuth's "Seminumerical Algorithms". */
651 int num_hi_sig, den_hi_sig;
652 unsigned HOST_WIDE_INT quo_est, scale;
653
654 /* Find the highest nonzero divisor digit. */
655 for (i = 4 - 1;; i--)
656 if (den[i] != 0)
657 {
658 den_hi_sig = i;
659 break;
660 }
661
662 /* Insure that the first digit of the divisor is at least BASE/2.
663 This is required by the quotient digit estimation algorithm. */
664
665 scale = BASE / (den[den_hi_sig] + 1);
666 if (scale > 1)
667 { /* scale divisor and dividend */
668 carry = 0;
669 for (i = 0; i <= 4 - 1; i++)
670 {
671 work = (num[i] * scale) + carry;
672 num[i] = LOWPART (work);
673 carry = HIGHPART (work);
674 }
675
676 num[4] = carry;
677 carry = 0;
678 for (i = 0; i <= 4 - 1; i++)
679 {
680 work = (den[i] * scale) + carry;
681 den[i] = LOWPART (work);
682 carry = HIGHPART (work);
683 if (den[i] != 0) den_hi_sig = i;
684 }
685 }
686
687 num_hi_sig = 4;
688
689 /* Main loop */
690 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
691 {
692 /* Guess the next quotient digit, quo_est, by dividing the first
693 two remaining dividend digits by the high order quotient digit.
694 quo_est is never low and is at most 2 high. */
695 unsigned HOST_WIDE_INT tmp;
696
697 num_hi_sig = i + den_hi_sig + 1;
698 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
699 if (num[num_hi_sig] != den[den_hi_sig])
700 quo_est = work / den[den_hi_sig];
701 else
702 quo_est = BASE - 1;
703
704 /* Refine quo_est so it's usually correct, and at most one high. */
705 tmp = work - quo_est * den[den_hi_sig];
706 if (tmp < BASE
707 && (den[den_hi_sig - 1] * quo_est
708 > (tmp * BASE + num[num_hi_sig - 2])))
709 quo_est--;
710
711 /* Try QUO_EST as the quotient digit, by multiplying the
712 divisor by QUO_EST and subtracting from the remaining dividend.
713 Keep in mind that QUO_EST is the I - 1st digit. */
714
715 carry = 0;
716 for (j = 0; j <= den_hi_sig; j++)
717 {
718 work = quo_est * den[j] + carry;
719 carry = HIGHPART (work);
720 work = num[i + j] - LOWPART (work);
721 num[i + j] = LOWPART (work);
722 carry += HIGHPART (work) != 0;
723 }
724
725 /* If quo_est was high by one, then num[i] went negative and
726 we need to correct things. */
727 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
728 {
729 quo_est--;
730 carry = 0; /* add divisor back in */
731 for (j = 0; j <= den_hi_sig; j++)
732 {
733 work = num[i + j] + den[j] + carry;
734 carry = HIGHPART (work);
735 num[i + j] = LOWPART (work);
736 }
737
738 num [num_hi_sig] += carry;
739 }
740
741 /* Store the quotient digit. */
742 quo[i] = quo_est;
743 }
744 }
745
746 decode (quo, lquo, hquo);
747
748 finish_up:
749 /* If result is negative, make it so. */
750 if (quo_neg)
751 neg_double (*lquo, *hquo, lquo, hquo);
752
753 /* Compute trial remainder: rem = num - (quo * den) */
754 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
755 neg_double (*lrem, *hrem, lrem, hrem);
756 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
757
758 switch (code)
759 {
760 case TRUNC_DIV_EXPR:
761 case TRUNC_MOD_EXPR: /* round toward zero */
762 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
763 return overflow;
764
765 case FLOOR_DIV_EXPR:
766 case FLOOR_MOD_EXPR: /* round toward negative infinity */
767 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
768 {
769 /* quo = quo - 1; */
770 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
771 lquo, hquo);
772 }
773 else
774 return overflow;
775 break;
776
777 case CEIL_DIV_EXPR:
778 case CEIL_MOD_EXPR: /* round toward positive infinity */
779 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
780 {
781 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
782 lquo, hquo);
783 }
784 else
785 return overflow;
786 break;
787
788 case ROUND_DIV_EXPR:
789 case ROUND_MOD_EXPR: /* round to closest integer */
790 {
791 unsigned HOST_WIDE_INT labs_rem = *lrem;
792 HOST_WIDE_INT habs_rem = *hrem;
793 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
794 HOST_WIDE_INT habs_den = hden, htwice;
795
796 /* Get absolute values. */
797 if (*hrem < 0)
798 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
799 if (hden < 0)
800 neg_double (lden, hden, &labs_den, &habs_den);
801
802 /* If (2 * abs (lrem) >= abs (lden)) */
803 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
804 labs_rem, habs_rem, &ltwice, &htwice);
805
806 if (((unsigned HOST_WIDE_INT) habs_den
807 < (unsigned HOST_WIDE_INT) htwice)
808 || (((unsigned HOST_WIDE_INT) habs_den
809 == (unsigned HOST_WIDE_INT) htwice)
810 && (labs_den < ltwice)))
811 {
812 if (*hquo < 0)
813 /* quo = quo - 1; */
814 add_double (*lquo, *hquo,
815 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
816 else
817 /* quo = quo + 1; */
818 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
819 lquo, hquo);
820 }
821 else
822 return overflow;
823 }
824 break;
825
826 default:
827 gcc_unreachable ();
828 }
829
830 /* Compute true remainder: rem = num - (quo * den) */
831 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
832 neg_double (*lrem, *hrem, lrem, hrem);
833 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
834 return overflow;
835 }
836 \f
837 /* Return true if built-in mathematical function specified by CODE
838 preserves the sign of it argument, i.e. -f(x) == f(-x). */
839
840 static bool
841 negate_mathfn_p (enum built_in_function code)
842 {
843 switch (code)
844 {
845 case BUILT_IN_ASIN:
846 case BUILT_IN_ASINF:
847 case BUILT_IN_ASINL:
848 case BUILT_IN_ATAN:
849 case BUILT_IN_ATANF:
850 case BUILT_IN_ATANL:
851 case BUILT_IN_SIN:
852 case BUILT_IN_SINF:
853 case BUILT_IN_SINL:
854 case BUILT_IN_TAN:
855 case BUILT_IN_TANF:
856 case BUILT_IN_TANL:
857 return true;
858
859 default:
860 break;
861 }
862 return false;
863 }
864
865 /* Check whether we may negate an integer constant T without causing
866 overflow. */
867
868 bool
869 may_negate_without_overflow_p (tree t)
870 {
871 unsigned HOST_WIDE_INT val;
872 unsigned int prec;
873 tree type;
874
875 gcc_assert (TREE_CODE (t) == INTEGER_CST);
876
877 type = TREE_TYPE (t);
878 if (TYPE_UNSIGNED (type))
879 return false;
880
881 prec = TYPE_PRECISION (type);
882 if (prec > HOST_BITS_PER_WIDE_INT)
883 {
884 if (TREE_INT_CST_LOW (t) != 0)
885 return true;
886 prec -= HOST_BITS_PER_WIDE_INT;
887 val = TREE_INT_CST_HIGH (t);
888 }
889 else
890 val = TREE_INT_CST_LOW (t);
891 if (prec < HOST_BITS_PER_WIDE_INT)
892 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
893 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
894 }
895
896 /* Determine whether an expression T can be cheaply negated using
897 the function negate_expr. */
898
899 static bool
900 negate_expr_p (tree t)
901 {
902 tree type;
903
904 if (t == 0)
905 return false;
906
907 type = TREE_TYPE (t);
908
909 STRIP_SIGN_NOPS (t);
910 switch (TREE_CODE (t))
911 {
912 case INTEGER_CST:
913 if (TYPE_UNSIGNED (type) || ! flag_trapv)
914 return true;
915
916 /* Check that -CST will not overflow type. */
917 return may_negate_without_overflow_p (t);
918
919 case REAL_CST:
920 case NEGATE_EXPR:
921 return true;
922
923 case COMPLEX_CST:
924 return negate_expr_p (TREE_REALPART (t))
925 && negate_expr_p (TREE_IMAGPART (t));
926
927 case PLUS_EXPR:
928 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
929 return false;
930 /* -(A + B) -> (-B) - A. */
931 if (negate_expr_p (TREE_OPERAND (t, 1))
932 && reorder_operands_p (TREE_OPERAND (t, 0),
933 TREE_OPERAND (t, 1)))
934 return true;
935 /* -(A + B) -> (-A) - B. */
936 return negate_expr_p (TREE_OPERAND (t, 0));
937
938 case MINUS_EXPR:
939 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
940 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
941 && reorder_operands_p (TREE_OPERAND (t, 0),
942 TREE_OPERAND (t, 1));
943
944 case MULT_EXPR:
945 if (TYPE_UNSIGNED (TREE_TYPE (t)))
946 break;
947
948 /* Fall through. */
949
950 case RDIV_EXPR:
951 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
952 return negate_expr_p (TREE_OPERAND (t, 1))
953 || negate_expr_p (TREE_OPERAND (t, 0));
954 break;
955
956 case NOP_EXPR:
957 /* Negate -((double)float) as (double)(-float). */
958 if (TREE_CODE (type) == REAL_TYPE)
959 {
960 tree tem = strip_float_extensions (t);
961 if (tem != t)
962 return negate_expr_p (tem);
963 }
964 break;
965
966 case CALL_EXPR:
967 /* Negate -f(x) as f(-x). */
968 if (negate_mathfn_p (builtin_mathfn_code (t)))
969 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
970 break;
971
972 case RSHIFT_EXPR:
973 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
974 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
975 {
976 tree op1 = TREE_OPERAND (t, 1);
977 if (TREE_INT_CST_HIGH (op1) == 0
978 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
979 == TREE_INT_CST_LOW (op1))
980 return true;
981 }
982 break;
983
984 default:
985 break;
986 }
987 return false;
988 }
989
990 /* Given T, an expression, return the negation of T. Allow for T to be
991 null, in which case return null. */
992
993 static tree
994 negate_expr (tree t)
995 {
996 tree type;
997 tree tem;
998
999 if (t == 0)
1000 return 0;
1001
1002 type = TREE_TYPE (t);
1003 STRIP_SIGN_NOPS (t);
1004
1005 switch (TREE_CODE (t))
1006 {
1007 case INTEGER_CST:
1008 tem = fold_negate_const (t, type);
1009 if (! TREE_OVERFLOW (tem)
1010 || TYPE_UNSIGNED (type)
1011 || ! flag_trapv)
1012 return tem;
1013 break;
1014
1015 case REAL_CST:
1016 tem = fold_negate_const (t, type);
1017 /* Two's complement FP formats, such as c4x, may overflow. */
1018 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
1019 return fold_convert (type, tem);
1020 break;
1021
1022 case COMPLEX_CST:
1023 {
1024 tree rpart = negate_expr (TREE_REALPART (t));
1025 tree ipart = negate_expr (TREE_IMAGPART (t));
1026
1027 if ((TREE_CODE (rpart) == REAL_CST
1028 && TREE_CODE (ipart) == REAL_CST)
1029 || (TREE_CODE (rpart) == INTEGER_CST
1030 && TREE_CODE (ipart) == INTEGER_CST))
1031 return build_complex (type, rpart, ipart);
1032 }
1033 break;
1034
1035 case NEGATE_EXPR:
1036 return fold_convert (type, TREE_OPERAND (t, 0));
1037
1038 case PLUS_EXPR:
1039 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1040 {
1041 /* -(A + B) -> (-B) - A. */
1042 if (negate_expr_p (TREE_OPERAND (t, 1))
1043 && reorder_operands_p (TREE_OPERAND (t, 0),
1044 TREE_OPERAND (t, 1)))
1045 {
1046 tem = negate_expr (TREE_OPERAND (t, 1));
1047 tem = fold (build2 (MINUS_EXPR, TREE_TYPE (t),
1048 tem, TREE_OPERAND (t, 0)));
1049 return fold_convert (type, tem);
1050 }
1051
1052 /* -(A + B) -> (-A) - B. */
1053 if (negate_expr_p (TREE_OPERAND (t, 0)))
1054 {
1055 tem = negate_expr (TREE_OPERAND (t, 0));
1056 tem = fold (build2 (MINUS_EXPR, TREE_TYPE (t),
1057 tem, TREE_OPERAND (t, 1)));
1058 return fold_convert (type, tem);
1059 }
1060 }
1061 break;
1062
1063 case MINUS_EXPR:
1064 /* - (A - B) -> B - A */
1065 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1066 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1067 return fold_convert (type,
1068 fold (build2 (MINUS_EXPR, TREE_TYPE (t),
1069 TREE_OPERAND (t, 1),
1070 TREE_OPERAND (t, 0))));
1071 break;
1072
1073 case MULT_EXPR:
1074 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1075 break;
1076
1077 /* Fall through. */
1078
1079 case RDIV_EXPR:
1080 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1081 {
1082 tem = TREE_OPERAND (t, 1);
1083 if (negate_expr_p (tem))
1084 return fold_convert (type,
1085 fold (build2 (TREE_CODE (t), TREE_TYPE (t),
1086 TREE_OPERAND (t, 0),
1087 negate_expr (tem))));
1088 tem = TREE_OPERAND (t, 0);
1089 if (negate_expr_p (tem))
1090 return fold_convert (type,
1091 fold (build2 (TREE_CODE (t), TREE_TYPE (t),
1092 negate_expr (tem),
1093 TREE_OPERAND (t, 1))));
1094 }
1095 break;
1096
1097 case NOP_EXPR:
1098 /* Convert -((double)float) into (double)(-float). */
1099 if (TREE_CODE (type) == REAL_TYPE)
1100 {
1101 tem = strip_float_extensions (t);
1102 if (tem != t && negate_expr_p (tem))
1103 return fold_convert (type, negate_expr (tem));
1104 }
1105 break;
1106
1107 case CALL_EXPR:
1108 /* Negate -f(x) as f(-x). */
1109 if (negate_mathfn_p (builtin_mathfn_code (t))
1110 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1111 {
1112 tree fndecl, arg, arglist;
1113
1114 fndecl = get_callee_fndecl (t);
1115 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1116 arglist = build_tree_list (NULL_TREE, arg);
1117 return build_function_call_expr (fndecl, arglist);
1118 }
1119 break;
1120
1121 case RSHIFT_EXPR:
1122 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1123 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1124 {
1125 tree op1 = TREE_OPERAND (t, 1);
1126 if (TREE_INT_CST_HIGH (op1) == 0
1127 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1128 == TREE_INT_CST_LOW (op1))
1129 {
1130 tree ntype = TYPE_UNSIGNED (type)
1131 ? lang_hooks.types.signed_type (type)
1132 : lang_hooks.types.unsigned_type (type);
1133 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1134 temp = fold (build2 (RSHIFT_EXPR, ntype, temp, op1));
1135 return fold_convert (type, temp);
1136 }
1137 }
1138 break;
1139
1140 default:
1141 break;
1142 }
1143
1144 tem = fold (build1 (NEGATE_EXPR, TREE_TYPE (t), t));
1145 return fold_convert (type, tem);
1146 }
1147 \f
1148 /* Split a tree IN into a constant, literal and variable parts that could be
1149 combined with CODE to make IN. "constant" means an expression with
1150 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1151 commutative arithmetic operation. Store the constant part into *CONP,
1152 the literal in *LITP and return the variable part. If a part isn't
1153 present, set it to null. If the tree does not decompose in this way,
1154 return the entire tree as the variable part and the other parts as null.
1155
1156 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1157 case, we negate an operand that was subtracted. Except if it is a
1158 literal for which we use *MINUS_LITP instead.
1159
1160 If NEGATE_P is true, we are negating all of IN, again except a literal
1161 for which we use *MINUS_LITP instead.
1162
1163 If IN is itself a literal or constant, return it as appropriate.
1164
1165 Note that we do not guarantee that any of the three values will be the
1166 same type as IN, but they will have the same signedness and mode. */
1167
1168 static tree
1169 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1170 tree *minus_litp, int negate_p)
1171 {
1172 tree var = 0;
1173
1174 *conp = 0;
1175 *litp = 0;
1176 *minus_litp = 0;
1177
1178 /* Strip any conversions that don't change the machine mode or signedness. */
1179 STRIP_SIGN_NOPS (in);
1180
1181 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1182 *litp = in;
1183 else if (TREE_CODE (in) == code
1184 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1185 /* We can associate addition and subtraction together (even
1186 though the C standard doesn't say so) for integers because
1187 the value is not affected. For reals, the value might be
1188 affected, so we can't. */
1189 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1190 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1191 {
1192 tree op0 = TREE_OPERAND (in, 0);
1193 tree op1 = TREE_OPERAND (in, 1);
1194 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1195 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1196
1197 /* First see if either of the operands is a literal, then a constant. */
1198 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1199 *litp = op0, op0 = 0;
1200 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1201 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1202
1203 if (op0 != 0 && TREE_CONSTANT (op0))
1204 *conp = op0, op0 = 0;
1205 else if (op1 != 0 && TREE_CONSTANT (op1))
1206 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1207
1208 /* If we haven't dealt with either operand, this is not a case we can
1209 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1210 if (op0 != 0 && op1 != 0)
1211 var = in;
1212 else if (op0 != 0)
1213 var = op0;
1214 else
1215 var = op1, neg_var_p = neg1_p;
1216
1217 /* Now do any needed negations. */
1218 if (neg_litp_p)
1219 *minus_litp = *litp, *litp = 0;
1220 if (neg_conp_p)
1221 *conp = negate_expr (*conp);
1222 if (neg_var_p)
1223 var = negate_expr (var);
1224 }
1225 else if (TREE_CONSTANT (in))
1226 *conp = in;
1227 else
1228 var = in;
1229
1230 if (negate_p)
1231 {
1232 if (*litp)
1233 *minus_litp = *litp, *litp = 0;
1234 else if (*minus_litp)
1235 *litp = *minus_litp, *minus_litp = 0;
1236 *conp = negate_expr (*conp);
1237 var = negate_expr (var);
1238 }
1239
1240 return var;
1241 }
1242
1243 /* Re-associate trees split by the above function. T1 and T2 are either
1244 expressions to associate or null. Return the new expression, if any. If
1245 we build an operation, do it in TYPE and with CODE. */
1246
1247 static tree
1248 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1249 {
1250 if (t1 == 0)
1251 return t2;
1252 else if (t2 == 0)
1253 return t1;
1254
1255 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1256 try to fold this since we will have infinite recursion. But do
1257 deal with any NEGATE_EXPRs. */
1258 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1259 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1260 {
1261 if (code == PLUS_EXPR)
1262 {
1263 if (TREE_CODE (t1) == NEGATE_EXPR)
1264 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1265 fold_convert (type, TREE_OPERAND (t1, 0)));
1266 else if (TREE_CODE (t2) == NEGATE_EXPR)
1267 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1268 fold_convert (type, TREE_OPERAND (t2, 0)));
1269 else if (integer_zerop (t2))
1270 return fold_convert (type, t1);
1271 }
1272 else if (code == MINUS_EXPR)
1273 {
1274 if (integer_zerop (t2))
1275 return fold_convert (type, t1);
1276 }
1277
1278 return build2 (code, type, fold_convert (type, t1),
1279 fold_convert (type, t2));
1280 }
1281
1282 return fold (build2 (code, type, fold_convert (type, t1),
1283 fold_convert (type, t2)));
1284 }
1285 \f
1286 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1287 to produce a new constant.
1288
1289 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1290
1291 tree
1292 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1293 {
1294 unsigned HOST_WIDE_INT int1l, int2l;
1295 HOST_WIDE_INT int1h, int2h;
1296 unsigned HOST_WIDE_INT low;
1297 HOST_WIDE_INT hi;
1298 unsigned HOST_WIDE_INT garbagel;
1299 HOST_WIDE_INT garbageh;
1300 tree t;
1301 tree type = TREE_TYPE (arg1);
1302 int uns = TYPE_UNSIGNED (type);
1303 int is_sizetype
1304 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1305 int overflow = 0;
1306
1307 int1l = TREE_INT_CST_LOW (arg1);
1308 int1h = TREE_INT_CST_HIGH (arg1);
1309 int2l = TREE_INT_CST_LOW (arg2);
1310 int2h = TREE_INT_CST_HIGH (arg2);
1311
1312 switch (code)
1313 {
1314 case BIT_IOR_EXPR:
1315 low = int1l | int2l, hi = int1h | int2h;
1316 break;
1317
1318 case BIT_XOR_EXPR:
1319 low = int1l ^ int2l, hi = int1h ^ int2h;
1320 break;
1321
1322 case BIT_AND_EXPR:
1323 low = int1l & int2l, hi = int1h & int2h;
1324 break;
1325
1326 case RSHIFT_EXPR:
1327 int2l = -int2l;
1328 case LSHIFT_EXPR:
1329 /* It's unclear from the C standard whether shifts can overflow.
1330 The following code ignores overflow; perhaps a C standard
1331 interpretation ruling is needed. */
1332 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1333 &low, &hi, !uns);
1334 break;
1335
1336 case RROTATE_EXPR:
1337 int2l = - int2l;
1338 case LROTATE_EXPR:
1339 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1340 &low, &hi);
1341 break;
1342
1343 case PLUS_EXPR:
1344 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1345 break;
1346
1347 case MINUS_EXPR:
1348 neg_double (int2l, int2h, &low, &hi);
1349 add_double (int1l, int1h, low, hi, &low, &hi);
1350 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1351 break;
1352
1353 case MULT_EXPR:
1354 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1355 break;
1356
1357 case TRUNC_DIV_EXPR:
1358 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1359 case EXACT_DIV_EXPR:
1360 /* This is a shortcut for a common special case. */
1361 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1362 && ! TREE_CONSTANT_OVERFLOW (arg1)
1363 && ! TREE_CONSTANT_OVERFLOW (arg2)
1364 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1365 {
1366 if (code == CEIL_DIV_EXPR)
1367 int1l += int2l - 1;
1368
1369 low = int1l / int2l, hi = 0;
1370 break;
1371 }
1372
1373 /* ... fall through ... */
1374
1375 case ROUND_DIV_EXPR:
1376 if (int2h == 0 && int2l == 1)
1377 {
1378 low = int1l, hi = int1h;
1379 break;
1380 }
1381 if (int1l == int2l && int1h == int2h
1382 && ! (int1l == 0 && int1h == 0))
1383 {
1384 low = 1, hi = 0;
1385 break;
1386 }
1387 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1388 &low, &hi, &garbagel, &garbageh);
1389 break;
1390
1391 case TRUNC_MOD_EXPR:
1392 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1393 /* This is a shortcut for a common special case. */
1394 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1395 && ! TREE_CONSTANT_OVERFLOW (arg1)
1396 && ! TREE_CONSTANT_OVERFLOW (arg2)
1397 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1398 {
1399 if (code == CEIL_MOD_EXPR)
1400 int1l += int2l - 1;
1401 low = int1l % int2l, hi = 0;
1402 break;
1403 }
1404
1405 /* ... fall through ... */
1406
1407 case ROUND_MOD_EXPR:
1408 overflow = div_and_round_double (code, uns,
1409 int1l, int1h, int2l, int2h,
1410 &garbagel, &garbageh, &low, &hi);
1411 break;
1412
1413 case MIN_EXPR:
1414 case MAX_EXPR:
1415 if (uns)
1416 low = (((unsigned HOST_WIDE_INT) int1h
1417 < (unsigned HOST_WIDE_INT) int2h)
1418 || (((unsigned HOST_WIDE_INT) int1h
1419 == (unsigned HOST_WIDE_INT) int2h)
1420 && int1l < int2l));
1421 else
1422 low = (int1h < int2h
1423 || (int1h == int2h && int1l < int2l));
1424
1425 if (low == (code == MIN_EXPR))
1426 low = int1l, hi = int1h;
1427 else
1428 low = int2l, hi = int2h;
1429 break;
1430
1431 default:
1432 gcc_unreachable ();
1433 }
1434
1435 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1436
1437 if (notrunc)
1438 {
1439 /* Propagate overflow flags ourselves. */
1440 if (((!uns || is_sizetype) && overflow)
1441 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1442 {
1443 t = copy_node (t);
1444 TREE_OVERFLOW (t) = 1;
1445 TREE_CONSTANT_OVERFLOW (t) = 1;
1446 }
1447 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1448 {
1449 t = copy_node (t);
1450 TREE_CONSTANT_OVERFLOW (t) = 1;
1451 }
1452 }
1453 else
1454 t = force_fit_type (t, 1,
1455 ((!uns || is_sizetype) && overflow)
1456 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
1457 TREE_CONSTANT_OVERFLOW (arg1)
1458 | TREE_CONSTANT_OVERFLOW (arg2));
1459
1460 return t;
1461 }
1462
1463 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1464 constant. We assume ARG1 and ARG2 have the same data type, or at least
1465 are the same kind of constant and the same machine mode.
1466
1467 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1468
1469 static tree
1470 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1471 {
1472 STRIP_NOPS (arg1);
1473 STRIP_NOPS (arg2);
1474
1475 if (TREE_CODE (arg1) == INTEGER_CST)
1476 return int_const_binop (code, arg1, arg2, notrunc);
1477
1478 if (TREE_CODE (arg1) == REAL_CST)
1479 {
1480 enum machine_mode mode;
1481 REAL_VALUE_TYPE d1;
1482 REAL_VALUE_TYPE d2;
1483 REAL_VALUE_TYPE value;
1484 REAL_VALUE_TYPE result;
1485 bool inexact;
1486 tree t, type;
1487
1488 d1 = TREE_REAL_CST (arg1);
1489 d2 = TREE_REAL_CST (arg2);
1490
1491 type = TREE_TYPE (arg1);
1492 mode = TYPE_MODE (type);
1493
1494 /* Don't perform operation if we honor signaling NaNs and
1495 either operand is a NaN. */
1496 if (HONOR_SNANS (mode)
1497 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1498 return NULL_TREE;
1499
1500 /* Don't perform operation if it would raise a division
1501 by zero exception. */
1502 if (code == RDIV_EXPR
1503 && REAL_VALUES_EQUAL (d2, dconst0)
1504 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1505 return NULL_TREE;
1506
1507 /* If either operand is a NaN, just return it. Otherwise, set up
1508 for floating-point trap; we return an overflow. */
1509 if (REAL_VALUE_ISNAN (d1))
1510 return arg1;
1511 else if (REAL_VALUE_ISNAN (d2))
1512 return arg2;
1513
1514 inexact = real_arithmetic (&value, code, &d1, &d2);
1515 real_convert (&result, mode, &value);
1516
1517 /* Don't constant fold this floating point operation if the
1518 result may dependent upon the run-time rounding mode and
1519 flag_rounding_math is set, or if GCC's software emulation
1520 is unable to accurately represent the result. */
1521
1522 if ((flag_rounding_math
1523 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1524 && !flag_unsafe_math_optimizations))
1525 && (inexact || !real_identical (&result, &value)))
1526 return NULL_TREE;
1527
1528 t = build_real (type, result);
1529
1530 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1531 TREE_CONSTANT_OVERFLOW (t)
1532 = TREE_OVERFLOW (t)
1533 | TREE_CONSTANT_OVERFLOW (arg1)
1534 | TREE_CONSTANT_OVERFLOW (arg2);
1535 return t;
1536 }
1537 if (TREE_CODE (arg1) == COMPLEX_CST)
1538 {
1539 tree type = TREE_TYPE (arg1);
1540 tree r1 = TREE_REALPART (arg1);
1541 tree i1 = TREE_IMAGPART (arg1);
1542 tree r2 = TREE_REALPART (arg2);
1543 tree i2 = TREE_IMAGPART (arg2);
1544 tree t;
1545
1546 switch (code)
1547 {
1548 case PLUS_EXPR:
1549 t = build_complex (type,
1550 const_binop (PLUS_EXPR, r1, r2, notrunc),
1551 const_binop (PLUS_EXPR, i1, i2, notrunc));
1552 break;
1553
1554 case MINUS_EXPR:
1555 t = build_complex (type,
1556 const_binop (MINUS_EXPR, r1, r2, notrunc),
1557 const_binop (MINUS_EXPR, i1, i2, notrunc));
1558 break;
1559
1560 case MULT_EXPR:
1561 t = build_complex (type,
1562 const_binop (MINUS_EXPR,
1563 const_binop (MULT_EXPR,
1564 r1, r2, notrunc),
1565 const_binop (MULT_EXPR,
1566 i1, i2, notrunc),
1567 notrunc),
1568 const_binop (PLUS_EXPR,
1569 const_binop (MULT_EXPR,
1570 r1, i2, notrunc),
1571 const_binop (MULT_EXPR,
1572 i1, r2, notrunc),
1573 notrunc));
1574 break;
1575
1576 case RDIV_EXPR:
1577 {
1578 tree magsquared
1579 = const_binop (PLUS_EXPR,
1580 const_binop (MULT_EXPR, r2, r2, notrunc),
1581 const_binop (MULT_EXPR, i2, i2, notrunc),
1582 notrunc);
1583
1584 t = build_complex (type,
1585 const_binop
1586 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1587 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1588 const_binop (PLUS_EXPR,
1589 const_binop (MULT_EXPR, r1, r2,
1590 notrunc),
1591 const_binop (MULT_EXPR, i1, i2,
1592 notrunc),
1593 notrunc),
1594 magsquared, notrunc),
1595 const_binop
1596 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1597 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1598 const_binop (MINUS_EXPR,
1599 const_binop (MULT_EXPR, i1, r2,
1600 notrunc),
1601 const_binop (MULT_EXPR, r1, i2,
1602 notrunc),
1603 notrunc),
1604 magsquared, notrunc));
1605 }
1606 break;
1607
1608 default:
1609 gcc_unreachable ();
1610 }
1611 return t;
1612 }
1613 return 0;
1614 }
1615
1616 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1617 indicates which particular sizetype to create. */
1618
1619 tree
1620 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1621 {
1622 return build_int_cst (sizetype_tab[(int) kind], number);
1623 }
1624 \f
1625 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1626 is a tree code. The type of the result is taken from the operands.
1627 Both must be the same type integer type and it must be a size type.
1628 If the operands are constant, so is the result. */
1629
1630 tree
1631 size_binop (enum tree_code code, tree arg0, tree arg1)
1632 {
1633 tree type = TREE_TYPE (arg0);
1634
1635 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1636 && type == TREE_TYPE (arg1));
1637
1638 /* Handle the special case of two integer constants faster. */
1639 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1640 {
1641 /* And some specific cases even faster than that. */
1642 if (code == PLUS_EXPR && integer_zerop (arg0))
1643 return arg1;
1644 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1645 && integer_zerop (arg1))
1646 return arg0;
1647 else if (code == MULT_EXPR && integer_onep (arg0))
1648 return arg1;
1649
1650 /* Handle general case of two integer constants. */
1651 return int_const_binop (code, arg0, arg1, 0);
1652 }
1653
1654 if (arg0 == error_mark_node || arg1 == error_mark_node)
1655 return error_mark_node;
1656
1657 return fold (build2 (code, type, arg0, arg1));
1658 }
1659
1660 /* Given two values, either both of sizetype or both of bitsizetype,
1661 compute the difference between the two values. Return the value
1662 in signed type corresponding to the type of the operands. */
1663
1664 tree
1665 size_diffop (tree arg0, tree arg1)
1666 {
1667 tree type = TREE_TYPE (arg0);
1668 tree ctype;
1669
1670 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1671 && type == TREE_TYPE (arg1));
1672
1673 /* If the type is already signed, just do the simple thing. */
1674 if (!TYPE_UNSIGNED (type))
1675 return size_binop (MINUS_EXPR, arg0, arg1);
1676
1677 ctype = type == bitsizetype ? sbitsizetype : ssizetype;
1678
1679 /* If either operand is not a constant, do the conversions to the signed
1680 type and subtract. The hardware will do the right thing with any
1681 overflow in the subtraction. */
1682 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1683 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1684 fold_convert (ctype, arg1));
1685
1686 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1687 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1688 overflow) and negate (which can't either). Special-case a result
1689 of zero while we're here. */
1690 if (tree_int_cst_equal (arg0, arg1))
1691 return fold_convert (ctype, integer_zero_node);
1692 else if (tree_int_cst_lt (arg1, arg0))
1693 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1694 else
1695 return size_binop (MINUS_EXPR, fold_convert (ctype, integer_zero_node),
1696 fold_convert (ctype, size_binop (MINUS_EXPR,
1697 arg1, arg0)));
1698 }
1699 \f
1700 /* A subroutine of fold_convert_const handling conversions of an
1701 INTEGER_CST to another integer type. */
1702
1703 static tree
1704 fold_convert_const_int_from_int (tree type, tree arg1)
1705 {
1706 tree t;
1707
1708 /* Given an integer constant, make new constant with new type,
1709 appropriately sign-extended or truncated. */
1710 t = build_int_cst_wide (type, TREE_INT_CST_LOW (arg1),
1711 TREE_INT_CST_HIGH (arg1));
1712
1713 t = force_fit_type (t,
1714 /* Don't set the overflow when
1715 converting a pointer */
1716 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1717 (TREE_INT_CST_HIGH (arg1) < 0
1718 && (TYPE_UNSIGNED (type)
1719 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1720 | TREE_OVERFLOW (arg1),
1721 TREE_CONSTANT_OVERFLOW (arg1));
1722
1723 return t;
1724 }
1725
1726 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1727 to an integer type. */
1728
1729 static tree
1730 fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
1731 {
1732 int overflow = 0;
1733 tree t;
1734
1735 /* The following code implements the floating point to integer
1736 conversion rules required by the Java Language Specification,
1737 that IEEE NaNs are mapped to zero and values that overflow
1738 the target precision saturate, i.e. values greater than
1739 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1740 are mapped to INT_MIN. These semantics are allowed by the
1741 C and C++ standards that simply state that the behavior of
1742 FP-to-integer conversion is unspecified upon overflow. */
1743
1744 HOST_WIDE_INT high, low;
1745 REAL_VALUE_TYPE r;
1746 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1747
1748 switch (code)
1749 {
1750 case FIX_TRUNC_EXPR:
1751 real_trunc (&r, VOIDmode, &x);
1752 break;
1753
1754 case FIX_CEIL_EXPR:
1755 real_ceil (&r, VOIDmode, &x);
1756 break;
1757
1758 case FIX_FLOOR_EXPR:
1759 real_floor (&r, VOIDmode, &x);
1760 break;
1761
1762 case FIX_ROUND_EXPR:
1763 real_round (&r, VOIDmode, &x);
1764 break;
1765
1766 default:
1767 gcc_unreachable ();
1768 }
1769
1770 /* If R is NaN, return zero and show we have an overflow. */
1771 if (REAL_VALUE_ISNAN (r))
1772 {
1773 overflow = 1;
1774 high = 0;
1775 low = 0;
1776 }
1777
1778 /* See if R is less than the lower bound or greater than the
1779 upper bound. */
1780
1781 if (! overflow)
1782 {
1783 tree lt = TYPE_MIN_VALUE (type);
1784 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1785 if (REAL_VALUES_LESS (r, l))
1786 {
1787 overflow = 1;
1788 high = TREE_INT_CST_HIGH (lt);
1789 low = TREE_INT_CST_LOW (lt);
1790 }
1791 }
1792
1793 if (! overflow)
1794 {
1795 tree ut = TYPE_MAX_VALUE (type);
1796 if (ut)
1797 {
1798 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1799 if (REAL_VALUES_LESS (u, r))
1800 {
1801 overflow = 1;
1802 high = TREE_INT_CST_HIGH (ut);
1803 low = TREE_INT_CST_LOW (ut);
1804 }
1805 }
1806 }
1807
1808 if (! overflow)
1809 REAL_VALUE_TO_INT (&low, &high, r);
1810
1811 t = build_int_cst_wide (type, low, high);
1812
1813 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1),
1814 TREE_CONSTANT_OVERFLOW (arg1));
1815 return t;
1816 }
1817
1818 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1819 to another floating point type. */
1820
1821 static tree
1822 fold_convert_const_real_from_real (tree type, tree arg1)
1823 {
1824 REAL_VALUE_TYPE value;
1825 tree t;
1826
1827 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1828 t = build_real (type, value);
1829
1830 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1831 TREE_CONSTANT_OVERFLOW (t)
1832 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1833 return t;
1834 }
1835
1836 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1837 type TYPE. If no simplification can be done return NULL_TREE. */
1838
1839 static tree
1840 fold_convert_const (enum tree_code code, tree type, tree arg1)
1841 {
1842 if (TREE_TYPE (arg1) == type)
1843 return arg1;
1844
1845 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1846 {
1847 if (TREE_CODE (arg1) == INTEGER_CST)
1848 return fold_convert_const_int_from_int (type, arg1);
1849 else if (TREE_CODE (arg1) == REAL_CST)
1850 return fold_convert_const_int_from_real (code, type, arg1);
1851 }
1852 else if (TREE_CODE (type) == REAL_TYPE)
1853 {
1854 if (TREE_CODE (arg1) == INTEGER_CST)
1855 return build_real_from_int_cst (type, arg1);
1856 if (TREE_CODE (arg1) == REAL_CST)
1857 return fold_convert_const_real_from_real (type, arg1);
1858 }
1859 return NULL_TREE;
1860 }
1861
1862 /* Construct a vector of zero elements of vector type TYPE. */
1863
1864 static tree
1865 build_zero_vector (tree type)
1866 {
1867 tree elem, list;
1868 int i, units;
1869
1870 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1871 units = TYPE_VECTOR_SUBPARTS (type);
1872
1873 list = NULL_TREE;
1874 for (i = 0; i < units; i++)
1875 list = tree_cons (NULL_TREE, elem, list);
1876 return build_vector (type, list);
1877 }
1878
1879 /* Convert expression ARG to type TYPE. Used by the middle-end for
1880 simple conversions in preference to calling the front-end's convert. */
1881
1882 tree
1883 fold_convert (tree type, tree arg)
1884 {
1885 tree orig = TREE_TYPE (arg);
1886 tree tem;
1887
1888 if (type == orig)
1889 return arg;
1890
1891 if (TREE_CODE (arg) == ERROR_MARK
1892 || TREE_CODE (type) == ERROR_MARK
1893 || TREE_CODE (orig) == ERROR_MARK)
1894 return error_mark_node;
1895
1896 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
1897 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
1898 TYPE_MAIN_VARIANT (orig)))
1899 return fold (build1 (NOP_EXPR, type, arg));
1900
1901 switch (TREE_CODE (type))
1902 {
1903 case INTEGER_TYPE: case CHAR_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1904 case POINTER_TYPE: case REFERENCE_TYPE:
1905 case OFFSET_TYPE:
1906 if (TREE_CODE (arg) == INTEGER_CST)
1907 {
1908 tem = fold_convert_const (NOP_EXPR, type, arg);
1909 if (tem != NULL_TREE)
1910 return tem;
1911 }
1912 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1913 || TREE_CODE (orig) == OFFSET_TYPE)
1914 return fold (build1 (NOP_EXPR, type, arg));
1915 if (TREE_CODE (orig) == COMPLEX_TYPE)
1916 {
1917 tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1918 return fold_convert (type, tem);
1919 }
1920 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1921 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1922 return fold (build1 (NOP_EXPR, type, arg));
1923
1924 case REAL_TYPE:
1925 if (TREE_CODE (arg) == INTEGER_CST)
1926 {
1927 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1928 if (tem != NULL_TREE)
1929 return tem;
1930 }
1931 else if (TREE_CODE (arg) == REAL_CST)
1932 {
1933 tem = fold_convert_const (NOP_EXPR, type, arg);
1934 if (tem != NULL_TREE)
1935 return tem;
1936 }
1937
1938 switch (TREE_CODE (orig))
1939 {
1940 case INTEGER_TYPE: case CHAR_TYPE:
1941 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1942 case POINTER_TYPE: case REFERENCE_TYPE:
1943 return fold (build1 (FLOAT_EXPR, type, arg));
1944
1945 case REAL_TYPE:
1946 return fold (build1 (flag_float_store ? CONVERT_EXPR : NOP_EXPR,
1947 type, arg));
1948
1949 case COMPLEX_TYPE:
1950 tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1951 return fold_convert (type, tem);
1952
1953 default:
1954 gcc_unreachable ();
1955 }
1956
1957 case COMPLEX_TYPE:
1958 switch (TREE_CODE (orig))
1959 {
1960 case INTEGER_TYPE: case CHAR_TYPE:
1961 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1962 case POINTER_TYPE: case REFERENCE_TYPE:
1963 case REAL_TYPE:
1964 return build2 (COMPLEX_EXPR, type,
1965 fold_convert (TREE_TYPE (type), arg),
1966 fold_convert (TREE_TYPE (type), integer_zero_node));
1967 case COMPLEX_TYPE:
1968 {
1969 tree rpart, ipart;
1970
1971 if (TREE_CODE (arg) == COMPLEX_EXPR)
1972 {
1973 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
1974 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
1975 return fold (build2 (COMPLEX_EXPR, type, rpart, ipart));
1976 }
1977
1978 arg = save_expr (arg);
1979 rpart = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1980 ipart = fold (build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg));
1981 rpart = fold_convert (TREE_TYPE (type), rpart);
1982 ipart = fold_convert (TREE_TYPE (type), ipart);
1983 return fold (build2 (COMPLEX_EXPR, type, rpart, ipart));
1984 }
1985
1986 default:
1987 gcc_unreachable ();
1988 }
1989
1990 case VECTOR_TYPE:
1991 if (integer_zerop (arg))
1992 return build_zero_vector (type);
1993 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1994 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1995 || TREE_CODE (orig) == VECTOR_TYPE);
1996 return fold (build1 (NOP_EXPR, type, arg));
1997
1998 case VOID_TYPE:
1999 return fold (build1 (CONVERT_EXPR, type, fold_ignored_result (arg)));
2000
2001 default:
2002 gcc_unreachable ();
2003 }
2004 }
2005 \f
2006 /* Return an expr equal to X but certainly not valid as an lvalue. */
2007
2008 tree
2009 non_lvalue (tree x)
2010 {
2011 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2012 us. */
2013 if (in_gimple_form)
2014 return x;
2015
2016 /* We only need to wrap lvalue tree codes. */
2017 switch (TREE_CODE (x))
2018 {
2019 case VAR_DECL:
2020 case PARM_DECL:
2021 case RESULT_DECL:
2022 case LABEL_DECL:
2023 case FUNCTION_DECL:
2024 case SSA_NAME:
2025
2026 case COMPONENT_REF:
2027 case INDIRECT_REF:
2028 case ALIGN_INDIRECT_REF:
2029 case MISALIGNED_INDIRECT_REF:
2030 case ARRAY_REF:
2031 case ARRAY_RANGE_REF:
2032 case BIT_FIELD_REF:
2033 case OBJ_TYPE_REF:
2034
2035 case REALPART_EXPR:
2036 case IMAGPART_EXPR:
2037 case PREINCREMENT_EXPR:
2038 case PREDECREMENT_EXPR:
2039 case SAVE_EXPR:
2040 case TRY_CATCH_EXPR:
2041 case WITH_CLEANUP_EXPR:
2042 case COMPOUND_EXPR:
2043 case MODIFY_EXPR:
2044 case TARGET_EXPR:
2045 case COND_EXPR:
2046 case BIND_EXPR:
2047 case MIN_EXPR:
2048 case MAX_EXPR:
2049 break;
2050
2051 default:
2052 /* Assume the worst for front-end tree codes. */
2053 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2054 break;
2055 return x;
2056 }
2057 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2058 }
2059
2060 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2061 Zero means allow extended lvalues. */
2062
2063 int pedantic_lvalues;
2064
2065 /* When pedantic, return an expr equal to X but certainly not valid as a
2066 pedantic lvalue. Otherwise, return X. */
2067
2068 static tree
2069 pedantic_non_lvalue (tree x)
2070 {
2071 if (pedantic_lvalues)
2072 return non_lvalue (x);
2073 else
2074 return x;
2075 }
2076 \f
2077 /* Given a tree comparison code, return the code that is the logical inverse
2078 of the given code. It is not safe to do this for floating-point
2079 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2080 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2081
2082 static enum tree_code
2083 invert_tree_comparison (enum tree_code code, bool honor_nans)
2084 {
2085 if (honor_nans && flag_trapping_math)
2086 return ERROR_MARK;
2087
2088 switch (code)
2089 {
2090 case EQ_EXPR:
2091 return NE_EXPR;
2092 case NE_EXPR:
2093 return EQ_EXPR;
2094 case GT_EXPR:
2095 return honor_nans ? UNLE_EXPR : LE_EXPR;
2096 case GE_EXPR:
2097 return honor_nans ? UNLT_EXPR : LT_EXPR;
2098 case LT_EXPR:
2099 return honor_nans ? UNGE_EXPR : GE_EXPR;
2100 case LE_EXPR:
2101 return honor_nans ? UNGT_EXPR : GT_EXPR;
2102 case LTGT_EXPR:
2103 return UNEQ_EXPR;
2104 case UNEQ_EXPR:
2105 return LTGT_EXPR;
2106 case UNGT_EXPR:
2107 return LE_EXPR;
2108 case UNGE_EXPR:
2109 return LT_EXPR;
2110 case UNLT_EXPR:
2111 return GE_EXPR;
2112 case UNLE_EXPR:
2113 return GT_EXPR;
2114 case ORDERED_EXPR:
2115 return UNORDERED_EXPR;
2116 case UNORDERED_EXPR:
2117 return ORDERED_EXPR;
2118 default:
2119 gcc_unreachable ();
2120 }
2121 }
2122
2123 /* Similar, but return the comparison that results if the operands are
2124 swapped. This is safe for floating-point. */
2125
2126 enum tree_code
2127 swap_tree_comparison (enum tree_code code)
2128 {
2129 switch (code)
2130 {
2131 case EQ_EXPR:
2132 case NE_EXPR:
2133 return code;
2134 case GT_EXPR:
2135 return LT_EXPR;
2136 case GE_EXPR:
2137 return LE_EXPR;
2138 case LT_EXPR:
2139 return GT_EXPR;
2140 case LE_EXPR:
2141 return GE_EXPR;
2142 default:
2143 gcc_unreachable ();
2144 }
2145 }
2146
2147
2148 /* Convert a comparison tree code from an enum tree_code representation
2149 into a compcode bit-based encoding. This function is the inverse of
2150 compcode_to_comparison. */
2151
2152 static enum comparison_code
2153 comparison_to_compcode (enum tree_code code)
2154 {
2155 switch (code)
2156 {
2157 case LT_EXPR:
2158 return COMPCODE_LT;
2159 case EQ_EXPR:
2160 return COMPCODE_EQ;
2161 case LE_EXPR:
2162 return COMPCODE_LE;
2163 case GT_EXPR:
2164 return COMPCODE_GT;
2165 case NE_EXPR:
2166 return COMPCODE_NE;
2167 case GE_EXPR:
2168 return COMPCODE_GE;
2169 case ORDERED_EXPR:
2170 return COMPCODE_ORD;
2171 case UNORDERED_EXPR:
2172 return COMPCODE_UNORD;
2173 case UNLT_EXPR:
2174 return COMPCODE_UNLT;
2175 case UNEQ_EXPR:
2176 return COMPCODE_UNEQ;
2177 case UNLE_EXPR:
2178 return COMPCODE_UNLE;
2179 case UNGT_EXPR:
2180 return COMPCODE_UNGT;
2181 case LTGT_EXPR:
2182 return COMPCODE_LTGT;
2183 case UNGE_EXPR:
2184 return COMPCODE_UNGE;
2185 default:
2186 gcc_unreachable ();
2187 }
2188 }
2189
2190 /* Convert a compcode bit-based encoding of a comparison operator back
2191 to GCC's enum tree_code representation. This function is the
2192 inverse of comparison_to_compcode. */
2193
2194 static enum tree_code
2195 compcode_to_comparison (enum comparison_code code)
2196 {
2197 switch (code)
2198 {
2199 case COMPCODE_LT:
2200 return LT_EXPR;
2201 case COMPCODE_EQ:
2202 return EQ_EXPR;
2203 case COMPCODE_LE:
2204 return LE_EXPR;
2205 case COMPCODE_GT:
2206 return GT_EXPR;
2207 case COMPCODE_NE:
2208 return NE_EXPR;
2209 case COMPCODE_GE:
2210 return GE_EXPR;
2211 case COMPCODE_ORD:
2212 return ORDERED_EXPR;
2213 case COMPCODE_UNORD:
2214 return UNORDERED_EXPR;
2215 case COMPCODE_UNLT:
2216 return UNLT_EXPR;
2217 case COMPCODE_UNEQ:
2218 return UNEQ_EXPR;
2219 case COMPCODE_UNLE:
2220 return UNLE_EXPR;
2221 case COMPCODE_UNGT:
2222 return UNGT_EXPR;
2223 case COMPCODE_LTGT:
2224 return LTGT_EXPR;
2225 case COMPCODE_UNGE:
2226 return UNGE_EXPR;
2227 default:
2228 gcc_unreachable ();
2229 }
2230 }
2231
2232 /* Return a tree for the comparison which is the combination of
2233 doing the AND or OR (depending on CODE) of the two operations LCODE
2234 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2235 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2236 if this makes the transformation invalid. */
2237
2238 tree
2239 combine_comparisons (enum tree_code code, enum tree_code lcode,
2240 enum tree_code rcode, tree truth_type,
2241 tree ll_arg, tree lr_arg)
2242 {
2243 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2244 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2245 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2246 enum comparison_code compcode;
2247
2248 switch (code)
2249 {
2250 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2251 compcode = lcompcode & rcompcode;
2252 break;
2253
2254 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2255 compcode = lcompcode | rcompcode;
2256 break;
2257
2258 default:
2259 return NULL_TREE;
2260 }
2261
2262 if (!honor_nans)
2263 {
2264 /* Eliminate unordered comparisons, as well as LTGT and ORD
2265 which are not used unless the mode has NaNs. */
2266 compcode &= ~COMPCODE_UNORD;
2267 if (compcode == COMPCODE_LTGT)
2268 compcode = COMPCODE_NE;
2269 else if (compcode == COMPCODE_ORD)
2270 compcode = COMPCODE_TRUE;
2271 }
2272 else if (flag_trapping_math)
2273 {
2274 /* Check that the original operation and the optimized ones will trap
2275 under the same condition. */
2276 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2277 && (lcompcode != COMPCODE_EQ)
2278 && (lcompcode != COMPCODE_ORD);
2279 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2280 && (rcompcode != COMPCODE_EQ)
2281 && (rcompcode != COMPCODE_ORD);
2282 bool trap = (compcode & COMPCODE_UNORD) == 0
2283 && (compcode != COMPCODE_EQ)
2284 && (compcode != COMPCODE_ORD);
2285
2286 /* In a short-circuited boolean expression the LHS might be
2287 such that the RHS, if evaluated, will never trap. For
2288 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2289 if neither x nor y is NaN. (This is a mixed blessing: for
2290 example, the expression above will never trap, hence
2291 optimizing it to x < y would be invalid). */
2292 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2293 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2294 rtrap = false;
2295
2296 /* If the comparison was short-circuited, and only the RHS
2297 trapped, we may now generate a spurious trap. */
2298 if (rtrap && !ltrap
2299 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2300 return NULL_TREE;
2301
2302 /* If we changed the conditions that cause a trap, we lose. */
2303 if ((ltrap || rtrap) != trap)
2304 return NULL_TREE;
2305 }
2306
2307 if (compcode == COMPCODE_TRUE)
2308 return constant_boolean_node (true, truth_type);
2309 else if (compcode == COMPCODE_FALSE)
2310 return constant_boolean_node (false, truth_type);
2311 else
2312 return fold (build2 (compcode_to_comparison (compcode),
2313 truth_type, ll_arg, lr_arg));
2314 }
2315
2316 /* Return nonzero if CODE is a tree code that represents a truth value. */
2317
2318 static int
2319 truth_value_p (enum tree_code code)
2320 {
2321 return (TREE_CODE_CLASS (code) == tcc_comparison
2322 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2323 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2324 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2325 }
2326 \f
2327 /* Return nonzero if two operands (typically of the same tree node)
2328 are necessarily equal. If either argument has side-effects this
2329 function returns zero. FLAGS modifies behavior as follows:
2330
2331 If OEP_ONLY_CONST is set, only return nonzero for constants.
2332 This function tests whether the operands are indistinguishable;
2333 it does not test whether they are equal using C's == operation.
2334 The distinction is important for IEEE floating point, because
2335 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2336 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2337
2338 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2339 even though it may hold multiple values during a function.
2340 This is because a GCC tree node guarantees that nothing else is
2341 executed between the evaluation of its "operands" (which may often
2342 be evaluated in arbitrary order). Hence if the operands themselves
2343 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2344 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2345 unset means assuming isochronic (or instantaneous) tree equivalence.
2346 Unless comparing arbitrary expression trees, such as from different
2347 statements, this flag can usually be left unset.
2348
2349 If OEP_PURE_SAME is set, then pure functions with identical arguments
2350 are considered the same. It is used when the caller has other ways
2351 to ensure that global memory is unchanged in between. */
2352
2353 int
2354 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2355 {
2356 /* If either is ERROR_MARK, they aren't equal. */
2357 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2358 return 0;
2359
2360 /* If both types don't have the same signedness, then we can't consider
2361 them equal. We must check this before the STRIP_NOPS calls
2362 because they may change the signedness of the arguments. */
2363 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2364 return 0;
2365
2366 STRIP_NOPS (arg0);
2367 STRIP_NOPS (arg1);
2368
2369 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2370 /* This is needed for conversions and for COMPONENT_REF.
2371 Might as well play it safe and always test this. */
2372 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2373 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2374 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2375 return 0;
2376
2377 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2378 We don't care about side effects in that case because the SAVE_EXPR
2379 takes care of that for us. In all other cases, two expressions are
2380 equal if they have no side effects. If we have two identical
2381 expressions with side effects that should be treated the same due
2382 to the only side effects being identical SAVE_EXPR's, that will
2383 be detected in the recursive calls below. */
2384 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2385 && (TREE_CODE (arg0) == SAVE_EXPR
2386 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2387 return 1;
2388
2389 /* Next handle constant cases, those for which we can return 1 even
2390 if ONLY_CONST is set. */
2391 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2392 switch (TREE_CODE (arg0))
2393 {
2394 case INTEGER_CST:
2395 return (! TREE_CONSTANT_OVERFLOW (arg0)
2396 && ! TREE_CONSTANT_OVERFLOW (arg1)
2397 && tree_int_cst_equal (arg0, arg1));
2398
2399 case REAL_CST:
2400 return (! TREE_CONSTANT_OVERFLOW (arg0)
2401 && ! TREE_CONSTANT_OVERFLOW (arg1)
2402 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2403 TREE_REAL_CST (arg1)));
2404
2405 case VECTOR_CST:
2406 {
2407 tree v1, v2;
2408
2409 if (TREE_CONSTANT_OVERFLOW (arg0)
2410 || TREE_CONSTANT_OVERFLOW (arg1))
2411 return 0;
2412
2413 v1 = TREE_VECTOR_CST_ELTS (arg0);
2414 v2 = TREE_VECTOR_CST_ELTS (arg1);
2415 while (v1 && v2)
2416 {
2417 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2418 flags))
2419 return 0;
2420 v1 = TREE_CHAIN (v1);
2421 v2 = TREE_CHAIN (v2);
2422 }
2423
2424 return 1;
2425 }
2426
2427 case COMPLEX_CST:
2428 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2429 flags)
2430 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2431 flags));
2432
2433 case STRING_CST:
2434 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2435 && ! memcmp (TREE_STRING_POINTER (arg0),
2436 TREE_STRING_POINTER (arg1),
2437 TREE_STRING_LENGTH (arg0)));
2438
2439 case ADDR_EXPR:
2440 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2441 0);
2442 default:
2443 break;
2444 }
2445
2446 if (flags & OEP_ONLY_CONST)
2447 return 0;
2448
2449 /* Define macros to test an operand from arg0 and arg1 for equality and a
2450 variant that allows null and views null as being different from any
2451 non-null value. In the latter case, if either is null, the both
2452 must be; otherwise, do the normal comparison. */
2453 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2454 TREE_OPERAND (arg1, N), flags)
2455
2456 #define OP_SAME_WITH_NULL(N) \
2457 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2458 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2459
2460 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2461 {
2462 case tcc_unary:
2463 /* Two conversions are equal only if signedness and modes match. */
2464 switch (TREE_CODE (arg0))
2465 {
2466 case NOP_EXPR:
2467 case CONVERT_EXPR:
2468 case FIX_CEIL_EXPR:
2469 case FIX_TRUNC_EXPR:
2470 case FIX_FLOOR_EXPR:
2471 case FIX_ROUND_EXPR:
2472 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2473 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2474 return 0;
2475 break;
2476 default:
2477 break;
2478 }
2479
2480 return OP_SAME (0);
2481
2482
2483 case tcc_comparison:
2484 case tcc_binary:
2485 if (OP_SAME (0) && OP_SAME (1))
2486 return 1;
2487
2488 /* For commutative ops, allow the other order. */
2489 return (commutative_tree_code (TREE_CODE (arg0))
2490 && operand_equal_p (TREE_OPERAND (arg0, 0),
2491 TREE_OPERAND (arg1, 1), flags)
2492 && operand_equal_p (TREE_OPERAND (arg0, 1),
2493 TREE_OPERAND (arg1, 0), flags));
2494
2495 case tcc_reference:
2496 /* If either of the pointer (or reference) expressions we are
2497 dereferencing contain a side effect, these cannot be equal. */
2498 if (TREE_SIDE_EFFECTS (arg0)
2499 || TREE_SIDE_EFFECTS (arg1))
2500 return 0;
2501
2502 switch (TREE_CODE (arg0))
2503 {
2504 case INDIRECT_REF:
2505 case ALIGN_INDIRECT_REF:
2506 case MISALIGNED_INDIRECT_REF:
2507 case REALPART_EXPR:
2508 case IMAGPART_EXPR:
2509 return OP_SAME (0);
2510
2511 case ARRAY_REF:
2512 case ARRAY_RANGE_REF:
2513 /* Operands 2 and 3 may be null. */
2514 return (OP_SAME (0)
2515 && OP_SAME (1)
2516 && OP_SAME_WITH_NULL (2)
2517 && OP_SAME_WITH_NULL (3));
2518
2519 case COMPONENT_REF:
2520 /* Handle operand 2 the same as for ARRAY_REF. */
2521 return OP_SAME (0) && OP_SAME (1) && OP_SAME_WITH_NULL (2);
2522
2523 case BIT_FIELD_REF:
2524 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2525
2526 default:
2527 return 0;
2528 }
2529
2530 case tcc_expression:
2531 switch (TREE_CODE (arg0))
2532 {
2533 case ADDR_EXPR:
2534 case TRUTH_NOT_EXPR:
2535 return OP_SAME (0);
2536
2537 case TRUTH_ANDIF_EXPR:
2538 case TRUTH_ORIF_EXPR:
2539 return OP_SAME (0) && OP_SAME (1);
2540
2541 case TRUTH_AND_EXPR:
2542 case TRUTH_OR_EXPR:
2543 case TRUTH_XOR_EXPR:
2544 if (OP_SAME (0) && OP_SAME (1))
2545 return 1;
2546
2547 /* Otherwise take into account this is a commutative operation. */
2548 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2549 TREE_OPERAND (arg1, 1), flags)
2550 && operand_equal_p (TREE_OPERAND (arg0, 1),
2551 TREE_OPERAND (arg1, 0), flags));
2552
2553 case CALL_EXPR:
2554 /* If the CALL_EXPRs call different functions, then they
2555 clearly can not be equal. */
2556 if (!OP_SAME (0))
2557 return 0;
2558
2559 {
2560 unsigned int cef = call_expr_flags (arg0);
2561 if (flags & OEP_PURE_SAME)
2562 cef &= ECF_CONST | ECF_PURE;
2563 else
2564 cef &= ECF_CONST;
2565 if (!cef)
2566 return 0;
2567 }
2568
2569 /* Now see if all the arguments are the same. operand_equal_p
2570 does not handle TREE_LIST, so we walk the operands here
2571 feeding them to operand_equal_p. */
2572 arg0 = TREE_OPERAND (arg0, 1);
2573 arg1 = TREE_OPERAND (arg1, 1);
2574 while (arg0 && arg1)
2575 {
2576 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2577 flags))
2578 return 0;
2579
2580 arg0 = TREE_CHAIN (arg0);
2581 arg1 = TREE_CHAIN (arg1);
2582 }
2583
2584 /* If we get here and both argument lists are exhausted
2585 then the CALL_EXPRs are equal. */
2586 return ! (arg0 || arg1);
2587
2588 default:
2589 return 0;
2590 }
2591
2592 case tcc_declaration:
2593 /* Consider __builtin_sqrt equal to sqrt. */
2594 return (TREE_CODE (arg0) == FUNCTION_DECL
2595 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2596 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2597 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2598
2599 default:
2600 return 0;
2601 }
2602
2603 #undef OP_SAME
2604 #undef OP_SAME_WITH_NULL
2605 }
2606 \f
2607 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2608 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2609
2610 When in doubt, return 0. */
2611
2612 static int
2613 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2614 {
2615 int unsignedp1, unsignedpo;
2616 tree primarg0, primarg1, primother;
2617 unsigned int correct_width;
2618
2619 if (operand_equal_p (arg0, arg1, 0))
2620 return 1;
2621
2622 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2623 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2624 return 0;
2625
2626 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2627 and see if the inner values are the same. This removes any
2628 signedness comparison, which doesn't matter here. */
2629 primarg0 = arg0, primarg1 = arg1;
2630 STRIP_NOPS (primarg0);
2631 STRIP_NOPS (primarg1);
2632 if (operand_equal_p (primarg0, primarg1, 0))
2633 return 1;
2634
2635 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2636 actual comparison operand, ARG0.
2637
2638 First throw away any conversions to wider types
2639 already present in the operands. */
2640
2641 primarg1 = get_narrower (arg1, &unsignedp1);
2642 primother = get_narrower (other, &unsignedpo);
2643
2644 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2645 if (unsignedp1 == unsignedpo
2646 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2647 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2648 {
2649 tree type = TREE_TYPE (arg0);
2650
2651 /* Make sure shorter operand is extended the right way
2652 to match the longer operand. */
2653 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2654 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2655
2656 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2657 return 1;
2658 }
2659
2660 return 0;
2661 }
2662 \f
2663 /* See if ARG is an expression that is either a comparison or is performing
2664 arithmetic on comparisons. The comparisons must only be comparing
2665 two different values, which will be stored in *CVAL1 and *CVAL2; if
2666 they are nonzero it means that some operands have already been found.
2667 No variables may be used anywhere else in the expression except in the
2668 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2669 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2670
2671 If this is true, return 1. Otherwise, return zero. */
2672
2673 static int
2674 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2675 {
2676 enum tree_code code = TREE_CODE (arg);
2677 enum tree_code_class class = TREE_CODE_CLASS (code);
2678
2679 /* We can handle some of the tcc_expression cases here. */
2680 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2681 class = tcc_unary;
2682 else if (class == tcc_expression
2683 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2684 || code == COMPOUND_EXPR))
2685 class = tcc_binary;
2686
2687 else if (class == tcc_expression && code == SAVE_EXPR
2688 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2689 {
2690 /* If we've already found a CVAL1 or CVAL2, this expression is
2691 two complex to handle. */
2692 if (*cval1 || *cval2)
2693 return 0;
2694
2695 class = tcc_unary;
2696 *save_p = 1;
2697 }
2698
2699 switch (class)
2700 {
2701 case tcc_unary:
2702 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2703
2704 case tcc_binary:
2705 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2706 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2707 cval1, cval2, save_p));
2708
2709 case tcc_constant:
2710 return 1;
2711
2712 case tcc_expression:
2713 if (code == COND_EXPR)
2714 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2715 cval1, cval2, save_p)
2716 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2717 cval1, cval2, save_p)
2718 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2719 cval1, cval2, save_p));
2720 return 0;
2721
2722 case tcc_comparison:
2723 /* First see if we can handle the first operand, then the second. For
2724 the second operand, we know *CVAL1 can't be zero. It must be that
2725 one side of the comparison is each of the values; test for the
2726 case where this isn't true by failing if the two operands
2727 are the same. */
2728
2729 if (operand_equal_p (TREE_OPERAND (arg, 0),
2730 TREE_OPERAND (arg, 1), 0))
2731 return 0;
2732
2733 if (*cval1 == 0)
2734 *cval1 = TREE_OPERAND (arg, 0);
2735 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2736 ;
2737 else if (*cval2 == 0)
2738 *cval2 = TREE_OPERAND (arg, 0);
2739 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2740 ;
2741 else
2742 return 0;
2743
2744 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2745 ;
2746 else if (*cval2 == 0)
2747 *cval2 = TREE_OPERAND (arg, 1);
2748 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2749 ;
2750 else
2751 return 0;
2752
2753 return 1;
2754
2755 default:
2756 return 0;
2757 }
2758 }
2759 \f
2760 /* ARG is a tree that is known to contain just arithmetic operations and
2761 comparisons. Evaluate the operations in the tree substituting NEW0 for
2762 any occurrence of OLD0 as an operand of a comparison and likewise for
2763 NEW1 and OLD1. */
2764
2765 static tree
2766 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2767 {
2768 tree type = TREE_TYPE (arg);
2769 enum tree_code code = TREE_CODE (arg);
2770 enum tree_code_class class = TREE_CODE_CLASS (code);
2771
2772 /* We can handle some of the tcc_expression cases here. */
2773 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2774 class = tcc_unary;
2775 else if (class == tcc_expression
2776 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2777 class = tcc_binary;
2778
2779 switch (class)
2780 {
2781 case tcc_unary:
2782 return fold (build1 (code, type,
2783 eval_subst (TREE_OPERAND (arg, 0),
2784 old0, new0, old1, new1)));
2785
2786 case tcc_binary:
2787 return fold (build2 (code, type,
2788 eval_subst (TREE_OPERAND (arg, 0),
2789 old0, new0, old1, new1),
2790 eval_subst (TREE_OPERAND (arg, 1),
2791 old0, new0, old1, new1)));
2792
2793 case tcc_expression:
2794 switch (code)
2795 {
2796 case SAVE_EXPR:
2797 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2798
2799 case COMPOUND_EXPR:
2800 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2801
2802 case COND_EXPR:
2803 return fold (build3 (code, type,
2804 eval_subst (TREE_OPERAND (arg, 0),
2805 old0, new0, old1, new1),
2806 eval_subst (TREE_OPERAND (arg, 1),
2807 old0, new0, old1, new1),
2808 eval_subst (TREE_OPERAND (arg, 2),
2809 old0, new0, old1, new1)));
2810 default:
2811 break;
2812 }
2813 /* Fall through - ??? */
2814
2815 case tcc_comparison:
2816 {
2817 tree arg0 = TREE_OPERAND (arg, 0);
2818 tree arg1 = TREE_OPERAND (arg, 1);
2819
2820 /* We need to check both for exact equality and tree equality. The
2821 former will be true if the operand has a side-effect. In that
2822 case, we know the operand occurred exactly once. */
2823
2824 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2825 arg0 = new0;
2826 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2827 arg0 = new1;
2828
2829 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2830 arg1 = new0;
2831 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2832 arg1 = new1;
2833
2834 return fold (build2 (code, type, arg0, arg1));
2835 }
2836
2837 default:
2838 return arg;
2839 }
2840 }
2841 \f
2842 /* Return a tree for the case when the result of an expression is RESULT
2843 converted to TYPE and OMITTED was previously an operand of the expression
2844 but is now not needed (e.g., we folded OMITTED * 0).
2845
2846 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2847 the conversion of RESULT to TYPE. */
2848
2849 tree
2850 omit_one_operand (tree type, tree result, tree omitted)
2851 {
2852 tree t = fold_convert (type, result);
2853
2854 if (TREE_SIDE_EFFECTS (omitted))
2855 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2856
2857 return non_lvalue (t);
2858 }
2859
2860 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2861
2862 static tree
2863 pedantic_omit_one_operand (tree type, tree result, tree omitted)
2864 {
2865 tree t = fold_convert (type, result);
2866
2867 if (TREE_SIDE_EFFECTS (omitted))
2868 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2869
2870 return pedantic_non_lvalue (t);
2871 }
2872
2873 /* Return a tree for the case when the result of an expression is RESULT
2874 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
2875 of the expression but are now not needed.
2876
2877 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
2878 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
2879 evaluated before OMITTED2. Otherwise, if neither has side effects,
2880 just do the conversion of RESULT to TYPE. */
2881
2882 tree
2883 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
2884 {
2885 tree t = fold_convert (type, result);
2886
2887 if (TREE_SIDE_EFFECTS (omitted2))
2888 t = build2 (COMPOUND_EXPR, type, omitted2, t);
2889 if (TREE_SIDE_EFFECTS (omitted1))
2890 t = build2 (COMPOUND_EXPR, type, omitted1, t);
2891
2892 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
2893 }
2894
2895 \f
2896 /* Return a simplified tree node for the truth-negation of ARG. This
2897 never alters ARG itself. We assume that ARG is an operation that
2898 returns a truth value (0 or 1).
2899
2900 FIXME: one would think we would fold the result, but it causes
2901 problems with the dominator optimizer. */
2902 tree
2903 invert_truthvalue (tree arg)
2904 {
2905 tree type = TREE_TYPE (arg);
2906 enum tree_code code = TREE_CODE (arg);
2907
2908 if (code == ERROR_MARK)
2909 return arg;
2910
2911 /* If this is a comparison, we can simply invert it, except for
2912 floating-point non-equality comparisons, in which case we just
2913 enclose a TRUTH_NOT_EXPR around what we have. */
2914
2915 if (TREE_CODE_CLASS (code) == tcc_comparison)
2916 {
2917 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
2918 if (FLOAT_TYPE_P (op_type)
2919 && flag_trapping_math
2920 && code != ORDERED_EXPR && code != UNORDERED_EXPR
2921 && code != NE_EXPR && code != EQ_EXPR)
2922 return build1 (TRUTH_NOT_EXPR, type, arg);
2923 else
2924 {
2925 code = invert_tree_comparison (code,
2926 HONOR_NANS (TYPE_MODE (op_type)));
2927 if (code == ERROR_MARK)
2928 return build1 (TRUTH_NOT_EXPR, type, arg);
2929 else
2930 return build2 (code, type,
2931 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2932 }
2933 }
2934
2935 switch (code)
2936 {
2937 case INTEGER_CST:
2938 return constant_boolean_node (integer_zerop (arg), type);
2939
2940 case TRUTH_AND_EXPR:
2941 return build2 (TRUTH_OR_EXPR, type,
2942 invert_truthvalue (TREE_OPERAND (arg, 0)),
2943 invert_truthvalue (TREE_OPERAND (arg, 1)));
2944
2945 case TRUTH_OR_EXPR:
2946 return build2 (TRUTH_AND_EXPR, type,
2947 invert_truthvalue (TREE_OPERAND (arg, 0)),
2948 invert_truthvalue (TREE_OPERAND (arg, 1)));
2949
2950 case TRUTH_XOR_EXPR:
2951 /* Here we can invert either operand. We invert the first operand
2952 unless the second operand is a TRUTH_NOT_EXPR in which case our
2953 result is the XOR of the first operand with the inside of the
2954 negation of the second operand. */
2955
2956 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2957 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2958 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2959 else
2960 return build2 (TRUTH_XOR_EXPR, type,
2961 invert_truthvalue (TREE_OPERAND (arg, 0)),
2962 TREE_OPERAND (arg, 1));
2963
2964 case TRUTH_ANDIF_EXPR:
2965 return build2 (TRUTH_ORIF_EXPR, type,
2966 invert_truthvalue (TREE_OPERAND (arg, 0)),
2967 invert_truthvalue (TREE_OPERAND (arg, 1)));
2968
2969 case TRUTH_ORIF_EXPR:
2970 return build2 (TRUTH_ANDIF_EXPR, type,
2971 invert_truthvalue (TREE_OPERAND (arg, 0)),
2972 invert_truthvalue (TREE_OPERAND (arg, 1)));
2973
2974 case TRUTH_NOT_EXPR:
2975 return TREE_OPERAND (arg, 0);
2976
2977 case COND_EXPR:
2978 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
2979 invert_truthvalue (TREE_OPERAND (arg, 1)),
2980 invert_truthvalue (TREE_OPERAND (arg, 2)));
2981
2982 case COMPOUND_EXPR:
2983 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2984 invert_truthvalue (TREE_OPERAND (arg, 1)));
2985
2986 case NON_LVALUE_EXPR:
2987 return invert_truthvalue (TREE_OPERAND (arg, 0));
2988
2989 case NOP_EXPR:
2990 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
2991 break;
2992
2993 case CONVERT_EXPR:
2994 case FLOAT_EXPR:
2995 return build1 (TREE_CODE (arg), type,
2996 invert_truthvalue (TREE_OPERAND (arg, 0)));
2997
2998 case BIT_AND_EXPR:
2999 if (!integer_onep (TREE_OPERAND (arg, 1)))
3000 break;
3001 return build2 (EQ_EXPR, type, arg,
3002 fold_convert (type, integer_zero_node));
3003
3004 case SAVE_EXPR:
3005 return build1 (TRUTH_NOT_EXPR, type, arg);
3006
3007 case CLEANUP_POINT_EXPR:
3008 return build1 (CLEANUP_POINT_EXPR, type,
3009 invert_truthvalue (TREE_OPERAND (arg, 0)));
3010
3011 default:
3012 break;
3013 }
3014 gcc_assert (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE);
3015 return build1 (TRUTH_NOT_EXPR, type, arg);
3016 }
3017
3018 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3019 operands are another bit-wise operation with a common input. If so,
3020 distribute the bit operations to save an operation and possibly two if
3021 constants are involved. For example, convert
3022 (A | B) & (A | C) into A | (B & C)
3023 Further simplification will occur if B and C are constants.
3024
3025 If this optimization cannot be done, 0 will be returned. */
3026
3027 static tree
3028 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3029 {
3030 tree common;
3031 tree left, right;
3032
3033 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3034 || TREE_CODE (arg0) == code
3035 || (TREE_CODE (arg0) != BIT_AND_EXPR
3036 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3037 return 0;
3038
3039 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3040 {
3041 common = TREE_OPERAND (arg0, 0);
3042 left = TREE_OPERAND (arg0, 1);
3043 right = TREE_OPERAND (arg1, 1);
3044 }
3045 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3046 {
3047 common = TREE_OPERAND (arg0, 0);
3048 left = TREE_OPERAND (arg0, 1);
3049 right = TREE_OPERAND (arg1, 0);
3050 }
3051 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3052 {
3053 common = TREE_OPERAND (arg0, 1);
3054 left = TREE_OPERAND (arg0, 0);
3055 right = TREE_OPERAND (arg1, 1);
3056 }
3057 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3058 {
3059 common = TREE_OPERAND (arg0, 1);
3060 left = TREE_OPERAND (arg0, 0);
3061 right = TREE_OPERAND (arg1, 0);
3062 }
3063 else
3064 return 0;
3065
3066 return fold (build2 (TREE_CODE (arg0), type, common,
3067 fold (build2 (code, type, left, right))));
3068 }
3069 \f
3070 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3071 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3072
3073 static tree
3074 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3075 int unsignedp)
3076 {
3077 tree result;
3078
3079 if (bitpos == 0)
3080 {
3081 tree size = TYPE_SIZE (TREE_TYPE (inner));
3082 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3083 || POINTER_TYPE_P (TREE_TYPE (inner)))
3084 && host_integerp (size, 0)
3085 && tree_low_cst (size, 0) == bitsize)
3086 return fold_convert (type, inner);
3087 }
3088
3089 result = build3 (BIT_FIELD_REF, type, inner,
3090 size_int (bitsize), bitsize_int (bitpos));
3091
3092 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3093
3094 return result;
3095 }
3096
3097 /* Optimize a bit-field compare.
3098
3099 There are two cases: First is a compare against a constant and the
3100 second is a comparison of two items where the fields are at the same
3101 bit position relative to the start of a chunk (byte, halfword, word)
3102 large enough to contain it. In these cases we can avoid the shift
3103 implicit in bitfield extractions.
3104
3105 For constants, we emit a compare of the shifted constant with the
3106 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3107 compared. For two fields at the same position, we do the ANDs with the
3108 similar mask and compare the result of the ANDs.
3109
3110 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3111 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3112 are the left and right operands of the comparison, respectively.
3113
3114 If the optimization described above can be done, we return the resulting
3115 tree. Otherwise we return zero. */
3116
3117 static tree
3118 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3119 tree lhs, tree rhs)
3120 {
3121 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3122 tree type = TREE_TYPE (lhs);
3123 tree signed_type, unsigned_type;
3124 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3125 enum machine_mode lmode, rmode, nmode;
3126 int lunsignedp, runsignedp;
3127 int lvolatilep = 0, rvolatilep = 0;
3128 tree linner, rinner = NULL_TREE;
3129 tree mask;
3130 tree offset;
3131
3132 /* Get all the information about the extractions being done. If the bit size
3133 if the same as the size of the underlying object, we aren't doing an
3134 extraction at all and so can do nothing. We also don't want to
3135 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3136 then will no longer be able to replace it. */
3137 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3138 &lunsignedp, &lvolatilep, false);
3139 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3140 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3141 return 0;
3142
3143 if (!const_p)
3144 {
3145 /* If this is not a constant, we can only do something if bit positions,
3146 sizes, and signedness are the same. */
3147 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3148 &runsignedp, &rvolatilep, false);
3149
3150 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3151 || lunsignedp != runsignedp || offset != 0
3152 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3153 return 0;
3154 }
3155
3156 /* See if we can find a mode to refer to this field. We should be able to,
3157 but fail if we can't. */
3158 nmode = get_best_mode (lbitsize, lbitpos,
3159 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3160 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3161 TYPE_ALIGN (TREE_TYPE (rinner))),
3162 word_mode, lvolatilep || rvolatilep);
3163 if (nmode == VOIDmode)
3164 return 0;
3165
3166 /* Set signed and unsigned types of the precision of this mode for the
3167 shifts below. */
3168 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3169 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3170
3171 /* Compute the bit position and size for the new reference and our offset
3172 within it. If the new reference is the same size as the original, we
3173 won't optimize anything, so return zero. */
3174 nbitsize = GET_MODE_BITSIZE (nmode);
3175 nbitpos = lbitpos & ~ (nbitsize - 1);
3176 lbitpos -= nbitpos;
3177 if (nbitsize == lbitsize)
3178 return 0;
3179
3180 if (BYTES_BIG_ENDIAN)
3181 lbitpos = nbitsize - lbitsize - lbitpos;
3182
3183 /* Make the mask to be used against the extracted field. */
3184 mask = build_int_cst (unsigned_type, -1);
3185 mask = force_fit_type (mask, 0, false, false);
3186 mask = fold_convert (unsigned_type, mask);
3187 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3188 mask = const_binop (RSHIFT_EXPR, mask,
3189 size_int (nbitsize - lbitsize - lbitpos), 0);
3190
3191 if (! const_p)
3192 /* If not comparing with constant, just rework the comparison
3193 and return. */
3194 return build2 (code, compare_type,
3195 build2 (BIT_AND_EXPR, unsigned_type,
3196 make_bit_field_ref (linner, unsigned_type,
3197 nbitsize, nbitpos, 1),
3198 mask),
3199 build2 (BIT_AND_EXPR, unsigned_type,
3200 make_bit_field_ref (rinner, unsigned_type,
3201 nbitsize, nbitpos, 1),
3202 mask));
3203
3204 /* Otherwise, we are handling the constant case. See if the constant is too
3205 big for the field. Warn and return a tree of for 0 (false) if so. We do
3206 this not only for its own sake, but to avoid having to test for this
3207 error case below. If we didn't, we might generate wrong code.
3208
3209 For unsigned fields, the constant shifted right by the field length should
3210 be all zero. For signed fields, the high-order bits should agree with
3211 the sign bit. */
3212
3213 if (lunsignedp)
3214 {
3215 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3216 fold_convert (unsigned_type, rhs),
3217 size_int (lbitsize), 0)))
3218 {
3219 warning ("comparison is always %d due to width of bit-field",
3220 code == NE_EXPR);
3221 return constant_boolean_node (code == NE_EXPR, compare_type);
3222 }
3223 }
3224 else
3225 {
3226 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3227 size_int (lbitsize - 1), 0);
3228 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3229 {
3230 warning ("comparison is always %d due to width of bit-field",
3231 code == NE_EXPR);
3232 return constant_boolean_node (code == NE_EXPR, compare_type);
3233 }
3234 }
3235
3236 /* Single-bit compares should always be against zero. */
3237 if (lbitsize == 1 && ! integer_zerop (rhs))
3238 {
3239 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3240 rhs = fold_convert (type, integer_zero_node);
3241 }
3242
3243 /* Make a new bitfield reference, shift the constant over the
3244 appropriate number of bits and mask it with the computed mask
3245 (in case this was a signed field). If we changed it, make a new one. */
3246 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3247 if (lvolatilep)
3248 {
3249 TREE_SIDE_EFFECTS (lhs) = 1;
3250 TREE_THIS_VOLATILE (lhs) = 1;
3251 }
3252
3253 rhs = fold (const_binop (BIT_AND_EXPR,
3254 const_binop (LSHIFT_EXPR,
3255 fold_convert (unsigned_type, rhs),
3256 size_int (lbitpos), 0),
3257 mask, 0));
3258
3259 return build2 (code, compare_type,
3260 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3261 rhs);
3262 }
3263 \f
3264 /* Subroutine for fold_truthop: decode a field reference.
3265
3266 If EXP is a comparison reference, we return the innermost reference.
3267
3268 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3269 set to the starting bit number.
3270
3271 If the innermost field can be completely contained in a mode-sized
3272 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3273
3274 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3275 otherwise it is not changed.
3276
3277 *PUNSIGNEDP is set to the signedness of the field.
3278
3279 *PMASK is set to the mask used. This is either contained in a
3280 BIT_AND_EXPR or derived from the width of the field.
3281
3282 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3283
3284 Return 0 if this is not a component reference or is one that we can't
3285 do anything with. */
3286
3287 static tree
3288 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3289 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3290 int *punsignedp, int *pvolatilep,
3291 tree *pmask, tree *pand_mask)
3292 {
3293 tree outer_type = 0;
3294 tree and_mask = 0;
3295 tree mask, inner, offset;
3296 tree unsigned_type;
3297 unsigned int precision;
3298
3299 /* All the optimizations using this function assume integer fields.
3300 There are problems with FP fields since the type_for_size call
3301 below can fail for, e.g., XFmode. */
3302 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3303 return 0;
3304
3305 /* We are interested in the bare arrangement of bits, so strip everything
3306 that doesn't affect the machine mode. However, record the type of the
3307 outermost expression if it may matter below. */
3308 if (TREE_CODE (exp) == NOP_EXPR
3309 || TREE_CODE (exp) == CONVERT_EXPR
3310 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3311 outer_type = TREE_TYPE (exp);
3312 STRIP_NOPS (exp);
3313
3314 if (TREE_CODE (exp) == BIT_AND_EXPR)
3315 {
3316 and_mask = TREE_OPERAND (exp, 1);
3317 exp = TREE_OPERAND (exp, 0);
3318 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3319 if (TREE_CODE (and_mask) != INTEGER_CST)
3320 return 0;
3321 }
3322
3323 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3324 punsignedp, pvolatilep, false);
3325 if ((inner == exp && and_mask == 0)
3326 || *pbitsize < 0 || offset != 0
3327 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3328 return 0;
3329
3330 /* If the number of bits in the reference is the same as the bitsize of
3331 the outer type, then the outer type gives the signedness. Otherwise
3332 (in case of a small bitfield) the signedness is unchanged. */
3333 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3334 *punsignedp = TYPE_UNSIGNED (outer_type);
3335
3336 /* Compute the mask to access the bitfield. */
3337 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3338 precision = TYPE_PRECISION (unsigned_type);
3339
3340 mask = build_int_cst (unsigned_type, -1);
3341 mask = force_fit_type (mask, 0, false, false);
3342
3343 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3344 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3345
3346 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3347 if (and_mask != 0)
3348 mask = fold (build2 (BIT_AND_EXPR, unsigned_type,
3349 fold_convert (unsigned_type, and_mask), mask));
3350
3351 *pmask = mask;
3352 *pand_mask = and_mask;
3353 return inner;
3354 }
3355
3356 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3357 bit positions. */
3358
3359 static int
3360 all_ones_mask_p (tree mask, int size)
3361 {
3362 tree type = TREE_TYPE (mask);
3363 unsigned int precision = TYPE_PRECISION (type);
3364 tree tmask;
3365
3366 tmask = build_int_cst (lang_hooks.types.signed_type (type), -1);
3367 tmask = force_fit_type (tmask, 0, false, false);
3368
3369 return
3370 tree_int_cst_equal (mask,
3371 const_binop (RSHIFT_EXPR,
3372 const_binop (LSHIFT_EXPR, tmask,
3373 size_int (precision - size),
3374 0),
3375 size_int (precision - size), 0));
3376 }
3377
3378 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3379 represents the sign bit of EXP's type. If EXP represents a sign
3380 or zero extension, also test VAL against the unextended type.
3381 The return value is the (sub)expression whose sign bit is VAL,
3382 or NULL_TREE otherwise. */
3383
3384 static tree
3385 sign_bit_p (tree exp, tree val)
3386 {
3387 unsigned HOST_WIDE_INT mask_lo, lo;
3388 HOST_WIDE_INT mask_hi, hi;
3389 int width;
3390 tree t;
3391
3392 /* Tree EXP must have an integral type. */
3393 t = TREE_TYPE (exp);
3394 if (! INTEGRAL_TYPE_P (t))
3395 return NULL_TREE;
3396
3397 /* Tree VAL must be an integer constant. */
3398 if (TREE_CODE (val) != INTEGER_CST
3399 || TREE_CONSTANT_OVERFLOW (val))
3400 return NULL_TREE;
3401
3402 width = TYPE_PRECISION (t);
3403 if (width > HOST_BITS_PER_WIDE_INT)
3404 {
3405 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3406 lo = 0;
3407
3408 mask_hi = ((unsigned HOST_WIDE_INT) -1
3409 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3410 mask_lo = -1;
3411 }
3412 else
3413 {
3414 hi = 0;
3415 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3416
3417 mask_hi = 0;
3418 mask_lo = ((unsigned HOST_WIDE_INT) -1
3419 >> (HOST_BITS_PER_WIDE_INT - width));
3420 }
3421
3422 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3423 treat VAL as if it were unsigned. */
3424 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3425 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3426 return exp;
3427
3428 /* Handle extension from a narrower type. */
3429 if (TREE_CODE (exp) == NOP_EXPR
3430 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3431 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3432
3433 return NULL_TREE;
3434 }
3435
3436 /* Subroutine for fold_truthop: determine if an operand is simple enough
3437 to be evaluated unconditionally. */
3438
3439 static int
3440 simple_operand_p (tree exp)
3441 {
3442 /* Strip any conversions that don't change the machine mode. */
3443 STRIP_NOPS (exp);
3444
3445 return (CONSTANT_CLASS_P (exp)
3446 || TREE_CODE (exp) == SSA_NAME
3447 || (DECL_P (exp)
3448 && ! TREE_ADDRESSABLE (exp)
3449 && ! TREE_THIS_VOLATILE (exp)
3450 && ! DECL_NONLOCAL (exp)
3451 /* Don't regard global variables as simple. They may be
3452 allocated in ways unknown to the compiler (shared memory,
3453 #pragma weak, etc). */
3454 && ! TREE_PUBLIC (exp)
3455 && ! DECL_EXTERNAL (exp)
3456 /* Loading a static variable is unduly expensive, but global
3457 registers aren't expensive. */
3458 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3459 }
3460 \f
3461 /* The following functions are subroutines to fold_range_test and allow it to
3462 try to change a logical combination of comparisons into a range test.
3463
3464 For example, both
3465 X == 2 || X == 3 || X == 4 || X == 5
3466 and
3467 X >= 2 && X <= 5
3468 are converted to
3469 (unsigned) (X - 2) <= 3
3470
3471 We describe each set of comparisons as being either inside or outside
3472 a range, using a variable named like IN_P, and then describe the
3473 range with a lower and upper bound. If one of the bounds is omitted,
3474 it represents either the highest or lowest value of the type.
3475
3476 In the comments below, we represent a range by two numbers in brackets
3477 preceded by a "+" to designate being inside that range, or a "-" to
3478 designate being outside that range, so the condition can be inverted by
3479 flipping the prefix. An omitted bound is represented by a "-". For
3480 example, "- [-, 10]" means being outside the range starting at the lowest
3481 possible value and ending at 10, in other words, being greater than 10.
3482 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3483 always false.
3484
3485 We set up things so that the missing bounds are handled in a consistent
3486 manner so neither a missing bound nor "true" and "false" need to be
3487 handled using a special case. */
3488
3489 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3490 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3491 and UPPER1_P are nonzero if the respective argument is an upper bound
3492 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3493 must be specified for a comparison. ARG1 will be converted to ARG0's
3494 type if both are specified. */
3495
3496 static tree
3497 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3498 tree arg1, int upper1_p)
3499 {
3500 tree tem;
3501 int result;
3502 int sgn0, sgn1;
3503
3504 /* If neither arg represents infinity, do the normal operation.
3505 Else, if not a comparison, return infinity. Else handle the special
3506 comparison rules. Note that most of the cases below won't occur, but
3507 are handled for consistency. */
3508
3509 if (arg0 != 0 && arg1 != 0)
3510 {
3511 tem = fold (build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3512 arg0, fold_convert (TREE_TYPE (arg0), arg1)));
3513 STRIP_NOPS (tem);
3514 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3515 }
3516
3517 if (TREE_CODE_CLASS (code) != tcc_comparison)
3518 return 0;
3519
3520 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3521 for neither. In real maths, we cannot assume open ended ranges are
3522 the same. But, this is computer arithmetic, where numbers are finite.
3523 We can therefore make the transformation of any unbounded range with
3524 the value Z, Z being greater than any representable number. This permits
3525 us to treat unbounded ranges as equal. */
3526 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3527 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3528 switch (code)
3529 {
3530 case EQ_EXPR:
3531 result = sgn0 == sgn1;
3532 break;
3533 case NE_EXPR:
3534 result = sgn0 != sgn1;
3535 break;
3536 case LT_EXPR:
3537 result = sgn0 < sgn1;
3538 break;
3539 case LE_EXPR:
3540 result = sgn0 <= sgn1;
3541 break;
3542 case GT_EXPR:
3543 result = sgn0 > sgn1;
3544 break;
3545 case GE_EXPR:
3546 result = sgn0 >= sgn1;
3547 break;
3548 default:
3549 gcc_unreachable ();
3550 }
3551
3552 return constant_boolean_node (result, type);
3553 }
3554 \f
3555 /* Given EXP, a logical expression, set the range it is testing into
3556 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3557 actually being tested. *PLOW and *PHIGH will be made of the same type
3558 as the returned expression. If EXP is not a comparison, we will most
3559 likely not be returning a useful value and range. */
3560
3561 static tree
3562 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3563 {
3564 enum tree_code code;
3565 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3566 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3567 int in_p, n_in_p;
3568 tree low, high, n_low, n_high;
3569
3570 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3571 and see if we can refine the range. Some of the cases below may not
3572 happen, but it doesn't seem worth worrying about this. We "continue"
3573 the outer loop when we've changed something; otherwise we "break"
3574 the switch, which will "break" the while. */
3575
3576 in_p = 0;
3577 low = high = fold_convert (TREE_TYPE (exp), integer_zero_node);
3578
3579 while (1)
3580 {
3581 code = TREE_CODE (exp);
3582 exp_type = TREE_TYPE (exp);
3583
3584 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3585 {
3586 if (TREE_CODE_LENGTH (code) > 0)
3587 arg0 = TREE_OPERAND (exp, 0);
3588 if (TREE_CODE_CLASS (code) == tcc_comparison
3589 || TREE_CODE_CLASS (code) == tcc_unary
3590 || TREE_CODE_CLASS (code) == tcc_binary)
3591 arg0_type = TREE_TYPE (arg0);
3592 if (TREE_CODE_CLASS (code) == tcc_binary
3593 || TREE_CODE_CLASS (code) == tcc_comparison
3594 || (TREE_CODE_CLASS (code) == tcc_expression
3595 && TREE_CODE_LENGTH (code) > 1))
3596 arg1 = TREE_OPERAND (exp, 1);
3597 }
3598
3599 switch (code)
3600 {
3601 case TRUTH_NOT_EXPR:
3602 in_p = ! in_p, exp = arg0;
3603 continue;
3604
3605 case EQ_EXPR: case NE_EXPR:
3606 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3607 /* We can only do something if the range is testing for zero
3608 and if the second operand is an integer constant. Note that
3609 saying something is "in" the range we make is done by
3610 complementing IN_P since it will set in the initial case of
3611 being not equal to zero; "out" is leaving it alone. */
3612 if (low == 0 || high == 0
3613 || ! integer_zerop (low) || ! integer_zerop (high)
3614 || TREE_CODE (arg1) != INTEGER_CST)
3615 break;
3616
3617 switch (code)
3618 {
3619 case NE_EXPR: /* - [c, c] */
3620 low = high = arg1;
3621 break;
3622 case EQ_EXPR: /* + [c, c] */
3623 in_p = ! in_p, low = high = arg1;
3624 break;
3625 case GT_EXPR: /* - [-, c] */
3626 low = 0, high = arg1;
3627 break;
3628 case GE_EXPR: /* + [c, -] */
3629 in_p = ! in_p, low = arg1, high = 0;
3630 break;
3631 case LT_EXPR: /* - [c, -] */
3632 low = arg1, high = 0;
3633 break;
3634 case LE_EXPR: /* + [-, c] */
3635 in_p = ! in_p, low = 0, high = arg1;
3636 break;
3637 default:
3638 gcc_unreachable ();
3639 }
3640
3641 /* If this is an unsigned comparison, we also know that EXP is
3642 greater than or equal to zero. We base the range tests we make
3643 on that fact, so we record it here so we can parse existing
3644 range tests. We test arg0_type since often the return type
3645 of, e.g. EQ_EXPR, is boolean. */
3646 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3647 {
3648 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3649 in_p, low, high, 1,
3650 fold_convert (arg0_type, integer_zero_node),
3651 NULL_TREE))
3652 break;
3653
3654 in_p = n_in_p, low = n_low, high = n_high;
3655
3656 /* If the high bound is missing, but we have a nonzero low
3657 bound, reverse the range so it goes from zero to the low bound
3658 minus 1. */
3659 if (high == 0 && low && ! integer_zerop (low))
3660 {
3661 in_p = ! in_p;
3662 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3663 integer_one_node, 0);
3664 low = fold_convert (arg0_type, integer_zero_node);
3665 }
3666 }
3667
3668 exp = arg0;
3669 continue;
3670
3671 case NEGATE_EXPR:
3672 /* (-x) IN [a,b] -> x in [-b, -a] */
3673 n_low = range_binop (MINUS_EXPR, exp_type,
3674 fold_convert (exp_type, integer_zero_node),
3675 0, high, 1);
3676 n_high = range_binop (MINUS_EXPR, exp_type,
3677 fold_convert (exp_type, integer_zero_node),
3678 0, low, 0);
3679 low = n_low, high = n_high;
3680 exp = arg0;
3681 continue;
3682
3683 case BIT_NOT_EXPR:
3684 /* ~ X -> -X - 1 */
3685 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
3686 fold_convert (exp_type, integer_one_node));
3687 continue;
3688
3689 case PLUS_EXPR: case MINUS_EXPR:
3690 if (TREE_CODE (arg1) != INTEGER_CST)
3691 break;
3692
3693 /* If EXP is signed, any overflow in the computation is undefined,
3694 so we don't worry about it so long as our computations on
3695 the bounds don't overflow. For unsigned, overflow is defined
3696 and this is exactly the right thing. */
3697 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3698 arg0_type, low, 0, arg1, 0);
3699 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3700 arg0_type, high, 1, arg1, 0);
3701 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3702 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3703 break;
3704
3705 /* Check for an unsigned range which has wrapped around the maximum
3706 value thus making n_high < n_low, and normalize it. */
3707 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3708 {
3709 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3710 integer_one_node, 0);
3711 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3712 integer_one_node, 0);
3713
3714 /* If the range is of the form +/- [ x+1, x ], we won't
3715 be able to normalize it. But then, it represents the
3716 whole range or the empty set, so make it
3717 +/- [ -, - ]. */
3718 if (tree_int_cst_equal (n_low, low)
3719 && tree_int_cst_equal (n_high, high))
3720 low = high = 0;
3721 else
3722 in_p = ! in_p;
3723 }
3724 else
3725 low = n_low, high = n_high;
3726
3727 exp = arg0;
3728 continue;
3729
3730 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3731 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3732 break;
3733
3734 if (! INTEGRAL_TYPE_P (arg0_type)
3735 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3736 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3737 break;
3738
3739 n_low = low, n_high = high;
3740
3741 if (n_low != 0)
3742 n_low = fold_convert (arg0_type, n_low);
3743
3744 if (n_high != 0)
3745 n_high = fold_convert (arg0_type, n_high);
3746
3747
3748 /* If we're converting arg0 from an unsigned type, to exp,
3749 a signed type, we will be doing the comparison as unsigned.
3750 The tests above have already verified that LOW and HIGH
3751 are both positive.
3752
3753 So we have to ensure that we will handle large unsigned
3754 values the same way that the current signed bounds treat
3755 negative values. */
3756
3757 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
3758 {
3759 tree high_positive;
3760 tree equiv_type = lang_hooks.types.type_for_mode
3761 (TYPE_MODE (arg0_type), 1);
3762
3763 /* A range without an upper bound is, naturally, unbounded.
3764 Since convert would have cropped a very large value, use
3765 the max value for the destination type. */
3766 high_positive
3767 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3768 : TYPE_MAX_VALUE (arg0_type);
3769
3770 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
3771 high_positive = fold (build2 (RSHIFT_EXPR, arg0_type,
3772 fold_convert (arg0_type,
3773 high_positive),
3774 fold_convert (arg0_type,
3775 integer_one_node)));
3776
3777 /* If the low bound is specified, "and" the range with the
3778 range for which the original unsigned value will be
3779 positive. */
3780 if (low != 0)
3781 {
3782 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3783 1, n_low, n_high, 1,
3784 fold_convert (arg0_type,
3785 integer_zero_node),
3786 high_positive))
3787 break;
3788
3789 in_p = (n_in_p == in_p);
3790 }
3791 else
3792 {
3793 /* Otherwise, "or" the range with the range of the input
3794 that will be interpreted as negative. */
3795 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3796 0, n_low, n_high, 1,
3797 fold_convert (arg0_type,
3798 integer_zero_node),
3799 high_positive))
3800 break;
3801
3802 in_p = (in_p != n_in_p);
3803 }
3804 }
3805
3806 exp = arg0;
3807 low = n_low, high = n_high;
3808 continue;
3809
3810 default:
3811 break;
3812 }
3813
3814 break;
3815 }
3816
3817 /* If EXP is a constant, we can evaluate whether this is true or false. */
3818 if (TREE_CODE (exp) == INTEGER_CST)
3819 {
3820 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3821 exp, 0, low, 0))
3822 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3823 exp, 1, high, 1)));
3824 low = high = 0;
3825 exp = 0;
3826 }
3827
3828 *pin_p = in_p, *plow = low, *phigh = high;
3829 return exp;
3830 }
3831 \f
3832 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3833 type, TYPE, return an expression to test if EXP is in (or out of, depending
3834 on IN_P) the range. Return 0 if the test couldn't be created. */
3835
3836 static tree
3837 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
3838 {
3839 tree etype = TREE_TYPE (exp);
3840 tree value;
3841
3842 if (! in_p)
3843 {
3844 value = build_range_check (type, exp, 1, low, high);
3845 if (value != 0)
3846 return invert_truthvalue (value);
3847
3848 return 0;
3849 }
3850
3851 if (low == 0 && high == 0)
3852 return fold_convert (type, integer_one_node);
3853
3854 if (low == 0)
3855 return fold (build2 (LE_EXPR, type, exp, high));
3856
3857 if (high == 0)
3858 return fold (build2 (GE_EXPR, type, exp, low));
3859
3860 if (operand_equal_p (low, high, 0))
3861 return fold (build2 (EQ_EXPR, type, exp, low));
3862
3863 if (integer_zerop (low))
3864 {
3865 if (! TYPE_UNSIGNED (etype))
3866 {
3867 etype = lang_hooks.types.unsigned_type (etype);
3868 high = fold_convert (etype, high);
3869 exp = fold_convert (etype, exp);
3870 }
3871 return build_range_check (type, exp, 1, 0, high);
3872 }
3873
3874 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3875 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
3876 {
3877 unsigned HOST_WIDE_INT lo;
3878 HOST_WIDE_INT hi;
3879 int prec;
3880
3881 prec = TYPE_PRECISION (etype);
3882 if (prec <= HOST_BITS_PER_WIDE_INT)
3883 {
3884 hi = 0;
3885 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
3886 }
3887 else
3888 {
3889 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
3890 lo = (unsigned HOST_WIDE_INT) -1;
3891 }
3892
3893 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
3894 {
3895 if (TYPE_UNSIGNED (etype))
3896 {
3897 etype = lang_hooks.types.signed_type (etype);
3898 exp = fold_convert (etype, exp);
3899 }
3900 return fold (build2 (GT_EXPR, type, exp,
3901 fold_convert (etype, integer_zero_node)));
3902 }
3903 }
3904
3905 value = const_binop (MINUS_EXPR, high, low, 0);
3906 if (value != 0 && TREE_OVERFLOW (value) && ! TYPE_UNSIGNED (etype))
3907 {
3908 tree utype, minv, maxv;
3909
3910 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
3911 for the type in question, as we rely on this here. */
3912 switch (TREE_CODE (etype))
3913 {
3914 case INTEGER_TYPE:
3915 case ENUMERAL_TYPE:
3916 case CHAR_TYPE:
3917 utype = lang_hooks.types.unsigned_type (etype);
3918 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
3919 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
3920 integer_one_node, 1);
3921 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
3922 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
3923 minv, 1, maxv, 1)))
3924 {
3925 etype = utype;
3926 high = fold_convert (etype, high);
3927 low = fold_convert (etype, low);
3928 exp = fold_convert (etype, exp);
3929 value = const_binop (MINUS_EXPR, high, low, 0);
3930 }
3931 break;
3932 default:
3933 break;
3934 }
3935 }
3936
3937 if (value != 0 && ! TREE_OVERFLOW (value))
3938 return build_range_check (type,
3939 fold (build2 (MINUS_EXPR, etype, exp, low)),
3940 1, fold_convert (etype, integer_zero_node),
3941 value);
3942
3943 return 0;
3944 }
3945 \f
3946 /* Given two ranges, see if we can merge them into one. Return 1 if we
3947 can, 0 if we can't. Set the output range into the specified parameters. */
3948
3949 static int
3950 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
3951 tree high0, int in1_p, tree low1, tree high1)
3952 {
3953 int no_overlap;
3954 int subset;
3955 int temp;
3956 tree tem;
3957 int in_p;
3958 tree low, high;
3959 int lowequal = ((low0 == 0 && low1 == 0)
3960 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3961 low0, 0, low1, 0)));
3962 int highequal = ((high0 == 0 && high1 == 0)
3963 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3964 high0, 1, high1, 1)));
3965
3966 /* Make range 0 be the range that starts first, or ends last if they
3967 start at the same value. Swap them if it isn't. */
3968 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
3969 low0, 0, low1, 0))
3970 || (lowequal
3971 && integer_onep (range_binop (GT_EXPR, integer_type_node,
3972 high1, 1, high0, 1))))
3973 {
3974 temp = in0_p, in0_p = in1_p, in1_p = temp;
3975 tem = low0, low0 = low1, low1 = tem;
3976 tem = high0, high0 = high1, high1 = tem;
3977 }
3978
3979 /* Now flag two cases, whether the ranges are disjoint or whether the
3980 second range is totally subsumed in the first. Note that the tests
3981 below are simplified by the ones above. */
3982 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
3983 high0, 1, low1, 0));
3984 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
3985 high1, 1, high0, 1));
3986
3987 /* We now have four cases, depending on whether we are including or
3988 excluding the two ranges. */
3989 if (in0_p && in1_p)
3990 {
3991 /* If they don't overlap, the result is false. If the second range
3992 is a subset it is the result. Otherwise, the range is from the start
3993 of the second to the end of the first. */
3994 if (no_overlap)
3995 in_p = 0, low = high = 0;
3996 else if (subset)
3997 in_p = 1, low = low1, high = high1;
3998 else
3999 in_p = 1, low = low1, high = high0;
4000 }
4001
4002 else if (in0_p && ! in1_p)
4003 {
4004 /* If they don't overlap, the result is the first range. If they are
4005 equal, the result is false. If the second range is a subset of the
4006 first, and the ranges begin at the same place, we go from just after
4007 the end of the first range to the end of the second. If the second
4008 range is not a subset of the first, or if it is a subset and both
4009 ranges end at the same place, the range starts at the start of the
4010 first range and ends just before the second range.
4011 Otherwise, we can't describe this as a single range. */
4012 if (no_overlap)
4013 in_p = 1, low = low0, high = high0;
4014 else if (lowequal && highequal)
4015 in_p = 0, low = high = 0;
4016 else if (subset && lowequal)
4017 {
4018 in_p = 1, high = high0;
4019 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
4020 integer_one_node, 0);
4021 }
4022 else if (! subset || highequal)
4023 {
4024 in_p = 1, low = low0;
4025 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4026 integer_one_node, 0);
4027 }
4028 else
4029 return 0;
4030 }
4031
4032 else if (! in0_p && in1_p)
4033 {
4034 /* If they don't overlap, the result is the second range. If the second
4035 is a subset of the first, the result is false. Otherwise,
4036 the range starts just after the first range and ends at the
4037 end of the second. */
4038 if (no_overlap)
4039 in_p = 1, low = low1, high = high1;
4040 else if (subset || highequal)
4041 in_p = 0, low = high = 0;
4042 else
4043 {
4044 in_p = 1, high = high1;
4045 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4046 integer_one_node, 0);
4047 }
4048 }
4049
4050 else
4051 {
4052 /* The case where we are excluding both ranges. Here the complex case
4053 is if they don't overlap. In that case, the only time we have a
4054 range is if they are adjacent. If the second is a subset of the
4055 first, the result is the first. Otherwise, the range to exclude
4056 starts at the beginning of the first range and ends at the end of the
4057 second. */
4058 if (no_overlap)
4059 {
4060 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4061 range_binop (PLUS_EXPR, NULL_TREE,
4062 high0, 1,
4063 integer_one_node, 1),
4064 1, low1, 0)))
4065 in_p = 0, low = low0, high = high1;
4066 else
4067 {
4068 /* Canonicalize - [min, x] into - [-, x]. */
4069 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4070 switch (TREE_CODE (TREE_TYPE (low0)))
4071 {
4072 case ENUMERAL_TYPE:
4073 if (TYPE_PRECISION (TREE_TYPE (low0))
4074 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4075 break;
4076 /* FALLTHROUGH */
4077 case INTEGER_TYPE:
4078 case CHAR_TYPE:
4079 if (tree_int_cst_equal (low0,
4080 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4081 low0 = 0;
4082 break;
4083 case POINTER_TYPE:
4084 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4085 && integer_zerop (low0))
4086 low0 = 0;
4087 break;
4088 default:
4089 break;
4090 }
4091
4092 /* Canonicalize - [x, max] into - [x, -]. */
4093 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4094 switch (TREE_CODE (TREE_TYPE (high1)))
4095 {
4096 case ENUMERAL_TYPE:
4097 if (TYPE_PRECISION (TREE_TYPE (high1))
4098 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4099 break;
4100 /* FALLTHROUGH */
4101 case INTEGER_TYPE:
4102 case CHAR_TYPE:
4103 if (tree_int_cst_equal (high1,
4104 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4105 high1 = 0;
4106 break;
4107 case POINTER_TYPE:
4108 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4109 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4110 high1, 1,
4111 integer_one_node, 1)))
4112 high1 = 0;
4113 break;
4114 default:
4115 break;
4116 }
4117
4118 /* The ranges might be also adjacent between the maximum and
4119 minimum values of the given type. For
4120 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4121 return + [x + 1, y - 1]. */
4122 if (low0 == 0 && high1 == 0)
4123 {
4124 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4125 integer_one_node, 1);
4126 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4127 integer_one_node, 0);
4128 if (low == 0 || high == 0)
4129 return 0;
4130
4131 in_p = 1;
4132 }
4133 else
4134 return 0;
4135 }
4136 }
4137 else if (subset)
4138 in_p = 0, low = low0, high = high0;
4139 else
4140 in_p = 0, low = low0, high = high1;
4141 }
4142
4143 *pin_p = in_p, *plow = low, *phigh = high;
4144 return 1;
4145 }
4146 \f
4147
4148 /* Subroutine of fold, looking inside expressions of the form
4149 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4150 of the COND_EXPR. This function is being used also to optimize
4151 A op B ? C : A, by reversing the comparison first.
4152
4153 Return a folded expression whose code is not a COND_EXPR
4154 anymore, or NULL_TREE if no folding opportunity is found. */
4155
4156 static tree
4157 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4158 {
4159 enum tree_code comp_code = TREE_CODE (arg0);
4160 tree arg00 = TREE_OPERAND (arg0, 0);
4161 tree arg01 = TREE_OPERAND (arg0, 1);
4162 tree arg1_type = TREE_TYPE (arg1);
4163 tree tem;
4164
4165 STRIP_NOPS (arg1);
4166 STRIP_NOPS (arg2);
4167
4168 /* If we have A op 0 ? A : -A, consider applying the following
4169 transformations:
4170
4171 A == 0? A : -A same as -A
4172 A != 0? A : -A same as A
4173 A >= 0? A : -A same as abs (A)
4174 A > 0? A : -A same as abs (A)
4175 A <= 0? A : -A same as -abs (A)
4176 A < 0? A : -A same as -abs (A)
4177
4178 None of these transformations work for modes with signed
4179 zeros. If A is +/-0, the first two transformations will
4180 change the sign of the result (from +0 to -0, or vice
4181 versa). The last four will fix the sign of the result,
4182 even though the original expressions could be positive or
4183 negative, depending on the sign of A.
4184
4185 Note that all these transformations are correct if A is
4186 NaN, since the two alternatives (A and -A) are also NaNs. */
4187 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4188 ? real_zerop (arg01)
4189 : integer_zerop (arg01))
4190 && TREE_CODE (arg2) == NEGATE_EXPR
4191 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4192 switch (comp_code)
4193 {
4194 case EQ_EXPR:
4195 case UNEQ_EXPR:
4196 tem = fold_convert (arg1_type, arg1);
4197 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4198 case NE_EXPR:
4199 case LTGT_EXPR:
4200 return pedantic_non_lvalue (fold_convert (type, arg1));
4201 case UNGE_EXPR:
4202 case UNGT_EXPR:
4203 if (flag_trapping_math)
4204 break;
4205 /* Fall through. */
4206 case GE_EXPR:
4207 case GT_EXPR:
4208 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4209 arg1 = fold_convert (lang_hooks.types.signed_type
4210 (TREE_TYPE (arg1)), arg1);
4211 tem = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
4212 return pedantic_non_lvalue (fold_convert (type, tem));
4213 case UNLE_EXPR:
4214 case UNLT_EXPR:
4215 if (flag_trapping_math)
4216 break;
4217 case LE_EXPR:
4218 case LT_EXPR:
4219 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4220 arg1 = fold_convert (lang_hooks.types.signed_type
4221 (TREE_TYPE (arg1)), arg1);
4222 tem = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
4223 return negate_expr (fold_convert (type, tem));
4224 default:
4225 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4226 break;
4227 }
4228
4229 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4230 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4231 both transformations are correct when A is NaN: A != 0
4232 is then true, and A == 0 is false. */
4233
4234 if (integer_zerop (arg01) && integer_zerop (arg2))
4235 {
4236 if (comp_code == NE_EXPR)
4237 return pedantic_non_lvalue (fold_convert (type, arg1));
4238 else if (comp_code == EQ_EXPR)
4239 return fold_convert (type, integer_zero_node);
4240 }
4241
4242 /* Try some transformations of A op B ? A : B.
4243
4244 A == B? A : B same as B
4245 A != B? A : B same as A
4246 A >= B? A : B same as max (A, B)
4247 A > B? A : B same as max (B, A)
4248 A <= B? A : B same as min (A, B)
4249 A < B? A : B same as min (B, A)
4250
4251 As above, these transformations don't work in the presence
4252 of signed zeros. For example, if A and B are zeros of
4253 opposite sign, the first two transformations will change
4254 the sign of the result. In the last four, the original
4255 expressions give different results for (A=+0, B=-0) and
4256 (A=-0, B=+0), but the transformed expressions do not.
4257
4258 The first two transformations are correct if either A or B
4259 is a NaN. In the first transformation, the condition will
4260 be false, and B will indeed be chosen. In the case of the
4261 second transformation, the condition A != B will be true,
4262 and A will be chosen.
4263
4264 The conversions to max() and min() are not correct if B is
4265 a number and A is not. The conditions in the original
4266 expressions will be false, so all four give B. The min()
4267 and max() versions would give a NaN instead. */
4268 if (operand_equal_for_comparison_p (arg01, arg2, arg00))
4269 {
4270 tree comp_op0 = arg00;
4271 tree comp_op1 = arg01;
4272 tree comp_type = TREE_TYPE (comp_op0);
4273
4274 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4275 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4276 {
4277 comp_type = type;
4278 comp_op0 = arg1;
4279 comp_op1 = arg2;
4280 }
4281
4282 switch (comp_code)
4283 {
4284 case EQ_EXPR:
4285 return pedantic_non_lvalue (fold_convert (type, arg2));
4286 case NE_EXPR:
4287 return pedantic_non_lvalue (fold_convert (type, arg1));
4288 case LE_EXPR:
4289 case LT_EXPR:
4290 case UNLE_EXPR:
4291 case UNLT_EXPR:
4292 /* In C++ a ?: expression can be an lvalue, so put the
4293 operand which will be used if they are equal first
4294 so that we can convert this back to the
4295 corresponding COND_EXPR. */
4296 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4297 {
4298 comp_op0 = fold_convert (comp_type, comp_op0);
4299 comp_op1 = fold_convert (comp_type, comp_op1);
4300 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4301 ? fold (build2 (MIN_EXPR, comp_type, comp_op0, comp_op1))
4302 : fold (build2 (MIN_EXPR, comp_type, comp_op1, comp_op0));
4303 return pedantic_non_lvalue (fold_convert (type, tem));
4304 }
4305 break;
4306 case GE_EXPR:
4307 case GT_EXPR:
4308 case UNGE_EXPR:
4309 case UNGT_EXPR:
4310 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4311 {
4312 comp_op0 = fold_convert (comp_type, comp_op0);
4313 comp_op1 = fold_convert (comp_type, comp_op1);
4314 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4315 ? fold (build2 (MAX_EXPR, comp_type, comp_op0, comp_op1))
4316 : fold (build2 (MAX_EXPR, comp_type, comp_op1, comp_op0));
4317 return pedantic_non_lvalue (fold_convert (type, tem));
4318 }
4319 break;
4320 case UNEQ_EXPR:
4321 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4322 return pedantic_non_lvalue (fold_convert (type, arg2));
4323 break;
4324 case LTGT_EXPR:
4325 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4326 return pedantic_non_lvalue (fold_convert (type, arg1));
4327 break;
4328 default:
4329 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4330 break;
4331 }
4332 }
4333
4334 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4335 we might still be able to simplify this. For example,
4336 if C1 is one less or one more than C2, this might have started
4337 out as a MIN or MAX and been transformed by this function.
4338 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4339
4340 if (INTEGRAL_TYPE_P (type)
4341 && TREE_CODE (arg01) == INTEGER_CST
4342 && TREE_CODE (arg2) == INTEGER_CST)
4343 switch (comp_code)
4344 {
4345 case EQ_EXPR:
4346 /* We can replace A with C1 in this case. */
4347 arg1 = fold_convert (type, arg01);
4348 return fold (build3 (COND_EXPR, type, arg0, arg1, arg2));
4349
4350 case LT_EXPR:
4351 /* If C1 is C2 + 1, this is min(A, C2). */
4352 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4353 OEP_ONLY_CONST)
4354 && operand_equal_p (arg01,
4355 const_binop (PLUS_EXPR, arg2,
4356 integer_one_node, 0),
4357 OEP_ONLY_CONST))
4358 return pedantic_non_lvalue (fold (build2 (MIN_EXPR,
4359 type, arg1, arg2)));
4360 break;
4361
4362 case LE_EXPR:
4363 /* If C1 is C2 - 1, this is min(A, C2). */
4364 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4365 OEP_ONLY_CONST)
4366 && operand_equal_p (arg01,
4367 const_binop (MINUS_EXPR, arg2,
4368 integer_one_node, 0),
4369 OEP_ONLY_CONST))
4370 return pedantic_non_lvalue (fold (build2 (MIN_EXPR,
4371 type, arg1, arg2)));
4372 break;
4373
4374 case GT_EXPR:
4375 /* If C1 is C2 - 1, this is max(A, C2). */
4376 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4377 OEP_ONLY_CONST)
4378 && operand_equal_p (arg01,
4379 const_binop (MINUS_EXPR, arg2,
4380 integer_one_node, 0),
4381 OEP_ONLY_CONST))
4382 return pedantic_non_lvalue (fold (build2 (MAX_EXPR,
4383 type, arg1, arg2)));
4384 break;
4385
4386 case GE_EXPR:
4387 /* If C1 is C2 + 1, this is max(A, C2). */
4388 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4389 OEP_ONLY_CONST)
4390 && operand_equal_p (arg01,
4391 const_binop (PLUS_EXPR, arg2,
4392 integer_one_node, 0),
4393 OEP_ONLY_CONST))
4394 return pedantic_non_lvalue (fold (build2 (MAX_EXPR,
4395 type, arg1, arg2)));
4396 break;
4397 case NE_EXPR:
4398 break;
4399 default:
4400 gcc_unreachable ();
4401 }
4402
4403 return NULL_TREE;
4404 }
4405
4406
4407 \f
4408 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4409 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4410 #endif
4411
4412 /* EXP is some logical combination of boolean tests. See if we can
4413 merge it into some range test. Return the new tree if so. */
4414
4415 static tree
4416 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
4417 {
4418 int or_op = (code == TRUTH_ORIF_EXPR
4419 || code == TRUTH_OR_EXPR);
4420 int in0_p, in1_p, in_p;
4421 tree low0, low1, low, high0, high1, high;
4422 tree lhs = make_range (op0, &in0_p, &low0, &high0);
4423 tree rhs = make_range (op1, &in1_p, &low1, &high1);
4424 tree tem;
4425
4426 /* If this is an OR operation, invert both sides; we will invert
4427 again at the end. */
4428 if (or_op)
4429 in0_p = ! in0_p, in1_p = ! in1_p;
4430
4431 /* If both expressions are the same, if we can merge the ranges, and we
4432 can build the range test, return it or it inverted. If one of the
4433 ranges is always true or always false, consider it to be the same
4434 expression as the other. */
4435 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4436 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4437 in1_p, low1, high1)
4438 && 0 != (tem = (build_range_check (type,
4439 lhs != 0 ? lhs
4440 : rhs != 0 ? rhs : integer_zero_node,
4441 in_p, low, high))))
4442 return or_op ? invert_truthvalue (tem) : tem;
4443
4444 /* On machines where the branch cost is expensive, if this is a
4445 short-circuited branch and the underlying object on both sides
4446 is the same, make a non-short-circuit operation. */
4447 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4448 && lhs != 0 && rhs != 0
4449 && (code == TRUTH_ANDIF_EXPR
4450 || code == TRUTH_ORIF_EXPR)
4451 && operand_equal_p (lhs, rhs, 0))
4452 {
4453 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4454 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4455 which cases we can't do this. */
4456 if (simple_operand_p (lhs))
4457 return build2 (code == TRUTH_ANDIF_EXPR
4458 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4459 type, op0, op1);
4460
4461 else if (lang_hooks.decls.global_bindings_p () == 0
4462 && ! CONTAINS_PLACEHOLDER_P (lhs))
4463 {
4464 tree common = save_expr (lhs);
4465
4466 if (0 != (lhs = build_range_check (type, common,
4467 or_op ? ! in0_p : in0_p,
4468 low0, high0))
4469 && (0 != (rhs = build_range_check (type, common,
4470 or_op ? ! in1_p : in1_p,
4471 low1, high1))))
4472 return build2 (code == TRUTH_ANDIF_EXPR
4473 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4474 type, lhs, rhs);
4475 }
4476 }
4477
4478 return 0;
4479 }
4480 \f
4481 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4482 bit value. Arrange things so the extra bits will be set to zero if and
4483 only if C is signed-extended to its full width. If MASK is nonzero,
4484 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4485
4486 static tree
4487 unextend (tree c, int p, int unsignedp, tree mask)
4488 {
4489 tree type = TREE_TYPE (c);
4490 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4491 tree temp;
4492
4493 if (p == modesize || unsignedp)
4494 return c;
4495
4496 /* We work by getting just the sign bit into the low-order bit, then
4497 into the high-order bit, then sign-extend. We then XOR that value
4498 with C. */
4499 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4500 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4501
4502 /* We must use a signed type in order to get an arithmetic right shift.
4503 However, we must also avoid introducing accidental overflows, so that
4504 a subsequent call to integer_zerop will work. Hence we must
4505 do the type conversion here. At this point, the constant is either
4506 zero or one, and the conversion to a signed type can never overflow.
4507 We could get an overflow if this conversion is done anywhere else. */
4508 if (TYPE_UNSIGNED (type))
4509 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4510
4511 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4512 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4513 if (mask != 0)
4514 temp = const_binop (BIT_AND_EXPR, temp,
4515 fold_convert (TREE_TYPE (c), mask), 0);
4516 /* If necessary, convert the type back to match the type of C. */
4517 if (TYPE_UNSIGNED (type))
4518 temp = fold_convert (type, temp);
4519
4520 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4521 }
4522 \f
4523 /* Find ways of folding logical expressions of LHS and RHS:
4524 Try to merge two comparisons to the same innermost item.
4525 Look for range tests like "ch >= '0' && ch <= '9'".
4526 Look for combinations of simple terms on machines with expensive branches
4527 and evaluate the RHS unconditionally.
4528
4529 For example, if we have p->a == 2 && p->b == 4 and we can make an
4530 object large enough to span both A and B, we can do this with a comparison
4531 against the object ANDed with the a mask.
4532
4533 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4534 operations to do this with one comparison.
4535
4536 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4537 function and the one above.
4538
4539 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4540 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4541
4542 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4543 two operands.
4544
4545 We return the simplified tree or 0 if no optimization is possible. */
4546
4547 static tree
4548 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
4549 {
4550 /* If this is the "or" of two comparisons, we can do something if
4551 the comparisons are NE_EXPR. If this is the "and", we can do something
4552 if the comparisons are EQ_EXPR. I.e.,
4553 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4554
4555 WANTED_CODE is this operation code. For single bit fields, we can
4556 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4557 comparison for one-bit fields. */
4558
4559 enum tree_code wanted_code;
4560 enum tree_code lcode, rcode;
4561 tree ll_arg, lr_arg, rl_arg, rr_arg;
4562 tree ll_inner, lr_inner, rl_inner, rr_inner;
4563 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
4564 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
4565 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
4566 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
4567 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
4568 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
4569 enum machine_mode lnmode, rnmode;
4570 tree ll_mask, lr_mask, rl_mask, rr_mask;
4571 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
4572 tree l_const, r_const;
4573 tree lntype, rntype, result;
4574 int first_bit, end_bit;
4575 int volatilep;
4576
4577 /* Start by getting the comparison codes. Fail if anything is volatile.
4578 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4579 it were surrounded with a NE_EXPR. */
4580
4581 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
4582 return 0;
4583
4584 lcode = TREE_CODE (lhs);
4585 rcode = TREE_CODE (rhs);
4586
4587 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
4588 {
4589 lhs = build2 (NE_EXPR, truth_type, lhs,
4590 fold_convert (TREE_TYPE (lhs), integer_zero_node));
4591 lcode = NE_EXPR;
4592 }
4593
4594 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
4595 {
4596 rhs = build2 (NE_EXPR, truth_type, rhs,
4597 fold_convert (TREE_TYPE (rhs), integer_zero_node));
4598 rcode = NE_EXPR;
4599 }
4600
4601 if (TREE_CODE_CLASS (lcode) != tcc_comparison
4602 || TREE_CODE_CLASS (rcode) != tcc_comparison)
4603 return 0;
4604
4605 ll_arg = TREE_OPERAND (lhs, 0);
4606 lr_arg = TREE_OPERAND (lhs, 1);
4607 rl_arg = TREE_OPERAND (rhs, 0);
4608 rr_arg = TREE_OPERAND (rhs, 1);
4609
4610 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4611 if (simple_operand_p (ll_arg)
4612 && simple_operand_p (lr_arg))
4613 {
4614 tree result;
4615 if (operand_equal_p (ll_arg, rl_arg, 0)
4616 && operand_equal_p (lr_arg, rr_arg, 0))
4617 {
4618 result = combine_comparisons (code, lcode, rcode,
4619 truth_type, ll_arg, lr_arg);
4620 if (result)
4621 return result;
4622 }
4623 else if (operand_equal_p (ll_arg, rr_arg, 0)
4624 && operand_equal_p (lr_arg, rl_arg, 0))
4625 {
4626 result = combine_comparisons (code, lcode,
4627 swap_tree_comparison (rcode),
4628 truth_type, ll_arg, lr_arg);
4629 if (result)
4630 return result;
4631 }
4632 }
4633
4634 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
4635 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
4636
4637 /* If the RHS can be evaluated unconditionally and its operands are
4638 simple, it wins to evaluate the RHS unconditionally on machines
4639 with expensive branches. In this case, this isn't a comparison
4640 that can be merged. Avoid doing this if the RHS is a floating-point
4641 comparison since those can trap. */
4642
4643 if (BRANCH_COST >= 2
4644 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
4645 && simple_operand_p (rl_arg)
4646 && simple_operand_p (rr_arg))
4647 {
4648 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4649 if (code == TRUTH_OR_EXPR
4650 && lcode == NE_EXPR && integer_zerop (lr_arg)
4651 && rcode == NE_EXPR && integer_zerop (rr_arg)
4652 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4653 return build2 (NE_EXPR, truth_type,
4654 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4655 ll_arg, rl_arg),
4656 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4657
4658 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4659 if (code == TRUTH_AND_EXPR
4660 && lcode == EQ_EXPR && integer_zerop (lr_arg)
4661 && rcode == EQ_EXPR && integer_zerop (rr_arg)
4662 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4663 return build2 (EQ_EXPR, truth_type,
4664 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4665 ll_arg, rl_arg),
4666 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4667
4668 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
4669 return build2 (code, truth_type, lhs, rhs);
4670 }
4671
4672 /* See if the comparisons can be merged. Then get all the parameters for
4673 each side. */
4674
4675 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
4676 || (rcode != EQ_EXPR && rcode != NE_EXPR))
4677 return 0;
4678
4679 volatilep = 0;
4680 ll_inner = decode_field_reference (ll_arg,
4681 &ll_bitsize, &ll_bitpos, &ll_mode,
4682 &ll_unsignedp, &volatilep, &ll_mask,
4683 &ll_and_mask);
4684 lr_inner = decode_field_reference (lr_arg,
4685 &lr_bitsize, &lr_bitpos, &lr_mode,
4686 &lr_unsignedp, &volatilep, &lr_mask,
4687 &lr_and_mask);
4688 rl_inner = decode_field_reference (rl_arg,
4689 &rl_bitsize, &rl_bitpos, &rl_mode,
4690 &rl_unsignedp, &volatilep, &rl_mask,
4691 &rl_and_mask);
4692 rr_inner = decode_field_reference (rr_arg,
4693 &rr_bitsize, &rr_bitpos, &rr_mode,
4694 &rr_unsignedp, &volatilep, &rr_mask,
4695 &rr_and_mask);
4696
4697 /* It must be true that the inner operation on the lhs of each
4698 comparison must be the same if we are to be able to do anything.
4699 Then see if we have constants. If not, the same must be true for
4700 the rhs's. */
4701 if (volatilep || ll_inner == 0 || rl_inner == 0
4702 || ! operand_equal_p (ll_inner, rl_inner, 0))
4703 return 0;
4704
4705 if (TREE_CODE (lr_arg) == INTEGER_CST
4706 && TREE_CODE (rr_arg) == INTEGER_CST)
4707 l_const = lr_arg, r_const = rr_arg;
4708 else if (lr_inner == 0 || rr_inner == 0
4709 || ! operand_equal_p (lr_inner, rr_inner, 0))
4710 return 0;
4711 else
4712 l_const = r_const = 0;
4713
4714 /* If either comparison code is not correct for our logical operation,
4715 fail. However, we can convert a one-bit comparison against zero into
4716 the opposite comparison against that bit being set in the field. */
4717
4718 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
4719 if (lcode != wanted_code)
4720 {
4721 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
4722 {
4723 /* Make the left operand unsigned, since we are only interested
4724 in the value of one bit. Otherwise we are doing the wrong
4725 thing below. */
4726 ll_unsignedp = 1;
4727 l_const = ll_mask;
4728 }
4729 else
4730 return 0;
4731 }
4732
4733 /* This is analogous to the code for l_const above. */
4734 if (rcode != wanted_code)
4735 {
4736 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
4737 {
4738 rl_unsignedp = 1;
4739 r_const = rl_mask;
4740 }
4741 else
4742 return 0;
4743 }
4744
4745 /* After this point all optimizations will generate bit-field
4746 references, which we might not want. */
4747 if (! lang_hooks.can_use_bit_fields_p ())
4748 return 0;
4749
4750 /* See if we can find a mode that contains both fields being compared on
4751 the left. If we can't, fail. Otherwise, update all constants and masks
4752 to be relative to a field of that size. */
4753 first_bit = MIN (ll_bitpos, rl_bitpos);
4754 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
4755 lnmode = get_best_mode (end_bit - first_bit, first_bit,
4756 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
4757 volatilep);
4758 if (lnmode == VOIDmode)
4759 return 0;
4760
4761 lnbitsize = GET_MODE_BITSIZE (lnmode);
4762 lnbitpos = first_bit & ~ (lnbitsize - 1);
4763 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
4764 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
4765
4766 if (BYTES_BIG_ENDIAN)
4767 {
4768 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
4769 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
4770 }
4771
4772 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
4773 size_int (xll_bitpos), 0);
4774 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
4775 size_int (xrl_bitpos), 0);
4776
4777 if (l_const)
4778 {
4779 l_const = fold_convert (lntype, l_const);
4780 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
4781 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
4782 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
4783 fold (build1 (BIT_NOT_EXPR,
4784 lntype, ll_mask)),
4785 0)))
4786 {
4787 warning ("comparison is always %d", wanted_code == NE_EXPR);
4788
4789 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4790 }
4791 }
4792 if (r_const)
4793 {
4794 r_const = fold_convert (lntype, r_const);
4795 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
4796 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
4797 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
4798 fold (build1 (BIT_NOT_EXPR,
4799 lntype, rl_mask)),
4800 0)))
4801 {
4802 warning ("comparison is always %d", wanted_code == NE_EXPR);
4803
4804 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4805 }
4806 }
4807
4808 /* If the right sides are not constant, do the same for it. Also,
4809 disallow this optimization if a size or signedness mismatch occurs
4810 between the left and right sides. */
4811 if (l_const == 0)
4812 {
4813 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
4814 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
4815 /* Make sure the two fields on the right
4816 correspond to the left without being swapped. */
4817 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
4818 return 0;
4819
4820 first_bit = MIN (lr_bitpos, rr_bitpos);
4821 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
4822 rnmode = get_best_mode (end_bit - first_bit, first_bit,
4823 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
4824 volatilep);
4825 if (rnmode == VOIDmode)
4826 return 0;
4827
4828 rnbitsize = GET_MODE_BITSIZE (rnmode);
4829 rnbitpos = first_bit & ~ (rnbitsize - 1);
4830 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
4831 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
4832
4833 if (BYTES_BIG_ENDIAN)
4834 {
4835 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
4836 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
4837 }
4838
4839 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
4840 size_int (xlr_bitpos), 0);
4841 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
4842 size_int (xrr_bitpos), 0);
4843
4844 /* Make a mask that corresponds to both fields being compared.
4845 Do this for both items being compared. If the operands are the
4846 same size and the bits being compared are in the same position
4847 then we can do this by masking both and comparing the masked
4848 results. */
4849 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4850 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
4851 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
4852 {
4853 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4854 ll_unsignedp || rl_unsignedp);
4855 if (! all_ones_mask_p (ll_mask, lnbitsize))
4856 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
4857
4858 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
4859 lr_unsignedp || rr_unsignedp);
4860 if (! all_ones_mask_p (lr_mask, rnbitsize))
4861 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
4862
4863 return build2 (wanted_code, truth_type, lhs, rhs);
4864 }
4865
4866 /* There is still another way we can do something: If both pairs of
4867 fields being compared are adjacent, we may be able to make a wider
4868 field containing them both.
4869
4870 Note that we still must mask the lhs/rhs expressions. Furthermore,
4871 the mask must be shifted to account for the shift done by
4872 make_bit_field_ref. */
4873 if ((ll_bitsize + ll_bitpos == rl_bitpos
4874 && lr_bitsize + lr_bitpos == rr_bitpos)
4875 || (ll_bitpos == rl_bitpos + rl_bitsize
4876 && lr_bitpos == rr_bitpos + rr_bitsize))
4877 {
4878 tree type;
4879
4880 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
4881 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
4882 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
4883 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
4884
4885 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
4886 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
4887 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
4888 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
4889
4890 /* Convert to the smaller type before masking out unwanted bits. */
4891 type = lntype;
4892 if (lntype != rntype)
4893 {
4894 if (lnbitsize > rnbitsize)
4895 {
4896 lhs = fold_convert (rntype, lhs);
4897 ll_mask = fold_convert (rntype, ll_mask);
4898 type = rntype;
4899 }
4900 else if (lnbitsize < rnbitsize)
4901 {
4902 rhs = fold_convert (lntype, rhs);
4903 lr_mask = fold_convert (lntype, lr_mask);
4904 type = lntype;
4905 }
4906 }
4907
4908 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
4909 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
4910
4911 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
4912 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
4913
4914 return build2 (wanted_code, truth_type, lhs, rhs);
4915 }
4916
4917 return 0;
4918 }
4919
4920 /* Handle the case of comparisons with constants. If there is something in
4921 common between the masks, those bits of the constants must be the same.
4922 If not, the condition is always false. Test for this to avoid generating
4923 incorrect code below. */
4924 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
4925 if (! integer_zerop (result)
4926 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
4927 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
4928 {
4929 if (wanted_code == NE_EXPR)
4930 {
4931 warning ("%<or%> of unmatched not-equal tests is always 1");
4932 return constant_boolean_node (true, truth_type);
4933 }
4934 else
4935 {
4936 warning ("%<and%> of mutually exclusive equal-tests is always 0");
4937 return constant_boolean_node (false, truth_type);
4938 }
4939 }
4940
4941 /* Construct the expression we will return. First get the component
4942 reference we will make. Unless the mask is all ones the width of
4943 that field, perform the mask operation. Then compare with the
4944 merged constant. */
4945 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4946 ll_unsignedp || rl_unsignedp);
4947
4948 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4949 if (! all_ones_mask_p (ll_mask, lnbitsize))
4950 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
4951
4952 return build2 (wanted_code, truth_type, result,
4953 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
4954 }
4955 \f
4956 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
4957 constant. */
4958
4959 static tree
4960 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
4961 {
4962 tree arg0 = op0;
4963 enum tree_code op_code;
4964 tree comp_const = op1;
4965 tree minmax_const;
4966 int consts_equal, consts_lt;
4967 tree inner;
4968
4969 STRIP_SIGN_NOPS (arg0);
4970
4971 op_code = TREE_CODE (arg0);
4972 minmax_const = TREE_OPERAND (arg0, 1);
4973 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
4974 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
4975 inner = TREE_OPERAND (arg0, 0);
4976
4977 /* If something does not permit us to optimize, return the original tree. */
4978 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
4979 || TREE_CODE (comp_const) != INTEGER_CST
4980 || TREE_CONSTANT_OVERFLOW (comp_const)
4981 || TREE_CODE (minmax_const) != INTEGER_CST
4982 || TREE_CONSTANT_OVERFLOW (minmax_const))
4983 return NULL_TREE;
4984
4985 /* Now handle all the various comparison codes. We only handle EQ_EXPR
4986 and GT_EXPR, doing the rest with recursive calls using logical
4987 simplifications. */
4988 switch (code)
4989 {
4990 case NE_EXPR: case LT_EXPR: case LE_EXPR:
4991 {
4992 /* FIXME: We should be able to invert code without building a
4993 scratch tree node, but doing so would require us to
4994 duplicate a part of invert_truthvalue here. */
4995 tree tem = invert_truthvalue (build2 (code, type, op0, op1));
4996 tem = optimize_minmax_comparison (TREE_CODE (tem),
4997 TREE_TYPE (tem),
4998 TREE_OPERAND (tem, 0),
4999 TREE_OPERAND (tem, 1));
5000 return invert_truthvalue (tem);
5001 }
5002
5003 case GE_EXPR:
5004 return
5005 fold (build2 (TRUTH_ORIF_EXPR, type,
5006 optimize_minmax_comparison
5007 (EQ_EXPR, type, arg0, comp_const),
5008 optimize_minmax_comparison
5009 (GT_EXPR, type, arg0, comp_const)));
5010
5011 case EQ_EXPR:
5012 if (op_code == MAX_EXPR && consts_equal)
5013 /* MAX (X, 0) == 0 -> X <= 0 */
5014 return fold (build2 (LE_EXPR, type, inner, comp_const));
5015
5016 else if (op_code == MAX_EXPR && consts_lt)
5017 /* MAX (X, 0) == 5 -> X == 5 */
5018 return fold (build2 (EQ_EXPR, type, inner, comp_const));
5019
5020 else if (op_code == MAX_EXPR)
5021 /* MAX (X, 0) == -1 -> false */
5022 return omit_one_operand (type, integer_zero_node, inner);
5023
5024 else if (consts_equal)
5025 /* MIN (X, 0) == 0 -> X >= 0 */
5026 return fold (build2 (GE_EXPR, type, inner, comp_const));
5027
5028 else if (consts_lt)
5029 /* MIN (X, 0) == 5 -> false */
5030 return omit_one_operand (type, integer_zero_node, inner);
5031
5032 else
5033 /* MIN (X, 0) == -1 -> X == -1 */
5034 return fold (build2 (EQ_EXPR, type, inner, comp_const));
5035
5036 case GT_EXPR:
5037 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5038 /* MAX (X, 0) > 0 -> X > 0
5039 MAX (X, 0) > 5 -> X > 5 */
5040 return fold (build2 (GT_EXPR, type, inner, comp_const));
5041
5042 else if (op_code == MAX_EXPR)
5043 /* MAX (X, 0) > -1 -> true */
5044 return omit_one_operand (type, integer_one_node, inner);
5045
5046 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5047 /* MIN (X, 0) > 0 -> false
5048 MIN (X, 0) > 5 -> false */
5049 return omit_one_operand (type, integer_zero_node, inner);
5050
5051 else
5052 /* MIN (X, 0) > -1 -> X > -1 */
5053 return fold (build2 (GT_EXPR, type, inner, comp_const));
5054
5055 default:
5056 return NULL_TREE;
5057 }
5058 }
5059 \f
5060 /* T is an integer expression that is being multiplied, divided, or taken a
5061 modulus (CODE says which and what kind of divide or modulus) by a
5062 constant C. See if we can eliminate that operation by folding it with
5063 other operations already in T. WIDE_TYPE, if non-null, is a type that
5064 should be used for the computation if wider than our type.
5065
5066 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5067 (X * 2) + (Y * 4). We must, however, be assured that either the original
5068 expression would not overflow or that overflow is undefined for the type
5069 in the language in question.
5070
5071 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5072 the machine has a multiply-accumulate insn or that this is part of an
5073 addressing calculation.
5074
5075 If we return a non-null expression, it is an equivalent form of the
5076 original computation, but need not be in the original type. */
5077
5078 static tree
5079 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
5080 {
5081 /* To avoid exponential search depth, refuse to allow recursion past
5082 three levels. Beyond that (1) it's highly unlikely that we'll find
5083 something interesting and (2) we've probably processed it before
5084 when we built the inner expression. */
5085
5086 static int depth;
5087 tree ret;
5088
5089 if (depth > 3)
5090 return NULL;
5091
5092 depth++;
5093 ret = extract_muldiv_1 (t, c, code, wide_type);
5094 depth--;
5095
5096 return ret;
5097 }
5098
5099 static tree
5100 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
5101 {
5102 tree type = TREE_TYPE (t);
5103 enum tree_code tcode = TREE_CODE (t);
5104 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5105 > GET_MODE_SIZE (TYPE_MODE (type)))
5106 ? wide_type : type);
5107 tree t1, t2;
5108 int same_p = tcode == code;
5109 tree op0 = NULL_TREE, op1 = NULL_TREE;
5110
5111 /* Don't deal with constants of zero here; they confuse the code below. */
5112 if (integer_zerop (c))
5113 return NULL_TREE;
5114
5115 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5116 op0 = TREE_OPERAND (t, 0);
5117
5118 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5119 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5120
5121 /* Note that we need not handle conditional operations here since fold
5122 already handles those cases. So just do arithmetic here. */
5123 switch (tcode)
5124 {
5125 case INTEGER_CST:
5126 /* For a constant, we can always simplify if we are a multiply
5127 or (for divide and modulus) if it is a multiple of our constant. */
5128 if (code == MULT_EXPR
5129 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5130 return const_binop (code, fold_convert (ctype, t),
5131 fold_convert (ctype, c), 0);
5132 break;
5133
5134 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5135 /* If op0 is an expression ... */
5136 if ((COMPARISON_CLASS_P (op0)
5137 || UNARY_CLASS_P (op0)
5138 || BINARY_CLASS_P (op0)
5139 || EXPRESSION_CLASS_P (op0))
5140 /* ... and is unsigned, and its type is smaller than ctype,
5141 then we cannot pass through as widening. */
5142 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5143 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5144 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5145 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5146 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5147 /* ... or this is a truncation (t is narrower than op0),
5148 then we cannot pass through this narrowing. */
5149 || (GET_MODE_SIZE (TYPE_MODE (type))
5150 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5151 /* ... or signedness changes for division or modulus,
5152 then we cannot pass through this conversion. */
5153 || (code != MULT_EXPR
5154 && (TYPE_UNSIGNED (ctype)
5155 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5156 break;
5157
5158 /* Pass the constant down and see if we can make a simplification. If
5159 we can, replace this expression with the inner simplification for
5160 possible later conversion to our or some other type. */
5161 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5162 && TREE_CODE (t2) == INTEGER_CST
5163 && ! TREE_CONSTANT_OVERFLOW (t2)
5164 && (0 != (t1 = extract_muldiv (op0, t2, code,
5165 code == MULT_EXPR
5166 ? ctype : NULL_TREE))))
5167 return t1;
5168 break;
5169
5170 case ABS_EXPR:
5171 /* If widening the type changes it from signed to unsigned, then we
5172 must avoid building ABS_EXPR itself as unsigned. */
5173 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5174 {
5175 tree cstype = (*lang_hooks.types.signed_type) (ctype);
5176 if ((t1 = extract_muldiv (op0, c, code, cstype)) != 0)
5177 {
5178 t1 = fold (build1 (tcode, cstype, fold_convert (cstype, t1)));
5179 return fold_convert (ctype, t1);
5180 }
5181 break;
5182 }
5183 /* FALLTHROUGH */
5184 case NEGATE_EXPR:
5185 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5186 return fold (build1 (tcode, ctype, fold_convert (ctype, t1)));
5187 break;
5188
5189 case MIN_EXPR: case MAX_EXPR:
5190 /* If widening the type changes the signedness, then we can't perform
5191 this optimization as that changes the result. */
5192 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5193 break;
5194
5195 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5196 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
5197 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
5198 {
5199 if (tree_int_cst_sgn (c) < 0)
5200 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5201
5202 return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
5203 fold_convert (ctype, t2)));
5204 }
5205 break;
5206
5207 case LSHIFT_EXPR: case RSHIFT_EXPR:
5208 /* If the second operand is constant, this is a multiplication
5209 or floor division, by a power of two, so we can treat it that
5210 way unless the multiplier or divisor overflows. Signed
5211 left-shift overflow is implementation-defined rather than
5212 undefined in C90, so do not convert signed left shift into
5213 multiplication. */
5214 if (TREE_CODE (op1) == INTEGER_CST
5215 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5216 /* const_binop may not detect overflow correctly,
5217 so check for it explicitly here. */
5218 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5219 && TREE_INT_CST_HIGH (op1) == 0
5220 && 0 != (t1 = fold_convert (ctype,
5221 const_binop (LSHIFT_EXPR,
5222 size_one_node,
5223 op1, 0)))
5224 && ! TREE_OVERFLOW (t1))
5225 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5226 ? MULT_EXPR : FLOOR_DIV_EXPR,
5227 ctype, fold_convert (ctype, op0), t1),
5228 c, code, wide_type);
5229 break;
5230
5231 case PLUS_EXPR: case MINUS_EXPR:
5232 /* See if we can eliminate the operation on both sides. If we can, we
5233 can return a new PLUS or MINUS. If we can't, the only remaining
5234 cases where we can do anything are if the second operand is a
5235 constant. */
5236 t1 = extract_muldiv (op0, c, code, wide_type);
5237 t2 = extract_muldiv (op1, c, code, wide_type);
5238 if (t1 != 0 && t2 != 0
5239 && (code == MULT_EXPR
5240 /* If not multiplication, we can only do this if both operands
5241 are divisible by c. */
5242 || (multiple_of_p (ctype, op0, c)
5243 && multiple_of_p (ctype, op1, c))))
5244 return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
5245 fold_convert (ctype, t2)));
5246
5247 /* If this was a subtraction, negate OP1 and set it to be an addition.
5248 This simplifies the logic below. */
5249 if (tcode == MINUS_EXPR)
5250 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5251
5252 if (TREE_CODE (op1) != INTEGER_CST)
5253 break;
5254
5255 /* If either OP1 or C are negative, this optimization is not safe for
5256 some of the division and remainder types while for others we need
5257 to change the code. */
5258 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5259 {
5260 if (code == CEIL_DIV_EXPR)
5261 code = FLOOR_DIV_EXPR;
5262 else if (code == FLOOR_DIV_EXPR)
5263 code = CEIL_DIV_EXPR;
5264 else if (code != MULT_EXPR
5265 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5266 break;
5267 }
5268
5269 /* If it's a multiply or a division/modulus operation of a multiple
5270 of our constant, do the operation and verify it doesn't overflow. */
5271 if (code == MULT_EXPR
5272 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5273 {
5274 op1 = const_binop (code, fold_convert (ctype, op1),
5275 fold_convert (ctype, c), 0);
5276 /* We allow the constant to overflow with wrapping semantics. */
5277 if (op1 == 0
5278 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
5279 break;
5280 }
5281 else
5282 break;
5283
5284 /* If we have an unsigned type is not a sizetype, we cannot widen
5285 the operation since it will change the result if the original
5286 computation overflowed. */
5287 if (TYPE_UNSIGNED (ctype)
5288 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5289 && ctype != type)
5290 break;
5291
5292 /* If we were able to eliminate our operation from the first side,
5293 apply our operation to the second side and reform the PLUS. */
5294 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5295 return fold (build2 (tcode, ctype, fold_convert (ctype, t1), op1));
5296
5297 /* The last case is if we are a multiply. In that case, we can
5298 apply the distributive law to commute the multiply and addition
5299 if the multiplication of the constants doesn't overflow. */
5300 if (code == MULT_EXPR)
5301 return fold (build2 (tcode, ctype,
5302 fold (build2 (code, ctype,
5303 fold_convert (ctype, op0),
5304 fold_convert (ctype, c))),
5305 op1));
5306
5307 break;
5308
5309 case MULT_EXPR:
5310 /* We have a special case here if we are doing something like
5311 (C * 8) % 4 since we know that's zero. */
5312 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5313 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5314 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5315 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5316 return omit_one_operand (type, integer_zero_node, op0);
5317
5318 /* ... fall through ... */
5319
5320 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5321 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5322 /* If we can extract our operation from the LHS, do so and return a
5323 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5324 do something only if the second operand is a constant. */
5325 if (same_p
5326 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5327 return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
5328 fold_convert (ctype, op1)));
5329 else if (tcode == MULT_EXPR && code == MULT_EXPR
5330 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
5331 return fold (build2 (tcode, ctype, fold_convert (ctype, op0),
5332 fold_convert (ctype, t1)));
5333 else if (TREE_CODE (op1) != INTEGER_CST)
5334 return 0;
5335
5336 /* If these are the same operation types, we can associate them
5337 assuming no overflow. */
5338 if (tcode == code
5339 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5340 fold_convert (ctype, c), 0))
5341 && ! TREE_OVERFLOW (t1))
5342 return fold (build2 (tcode, ctype, fold_convert (ctype, op0), t1));
5343
5344 /* If these operations "cancel" each other, we have the main
5345 optimizations of this pass, which occur when either constant is a
5346 multiple of the other, in which case we replace this with either an
5347 operation or CODE or TCODE.
5348
5349 If we have an unsigned type that is not a sizetype, we cannot do
5350 this since it will change the result if the original computation
5351 overflowed. */
5352 if ((! TYPE_UNSIGNED (ctype)
5353 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5354 && ! flag_wrapv
5355 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5356 || (tcode == MULT_EXPR
5357 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5358 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5359 {
5360 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5361 return fold (build2 (tcode, ctype, fold_convert (ctype, op0),
5362 fold_convert (ctype,
5363 const_binop (TRUNC_DIV_EXPR,
5364 op1, c, 0))));
5365 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5366 return fold (build2 (code, ctype, fold_convert (ctype, op0),
5367 fold_convert (ctype,
5368 const_binop (TRUNC_DIV_EXPR,
5369 c, op1, 0))));
5370 }
5371 break;
5372
5373 default:
5374 break;
5375 }
5376
5377 return 0;
5378 }
5379 \f
5380 /* Return a node which has the indicated constant VALUE (either 0 or
5381 1), and is of the indicated TYPE. */
5382
5383 tree
5384 constant_boolean_node (int value, tree type)
5385 {
5386 if (type == integer_type_node)
5387 return value ? integer_one_node : integer_zero_node;
5388 else if (type == boolean_type_node)
5389 return value ? boolean_true_node : boolean_false_node;
5390 else
5391 return build_int_cst (type, value);
5392 }
5393
5394
5395 /* Return true if expr looks like an ARRAY_REF and set base and
5396 offset to the appropriate trees. If there is no offset,
5397 offset is set to NULL_TREE. */
5398
5399 static bool
5400 extract_array_ref (tree expr, tree *base, tree *offset)
5401 {
5402 /* We have to be careful with stripping nops as with the
5403 base type the meaning of the offset can change. */
5404 tree inner_expr = expr;
5405 STRIP_NOPS (inner_expr);
5406 /* One canonical form is a PLUS_EXPR with the first
5407 argument being an ADDR_EXPR with a possible NOP_EXPR
5408 attached. */
5409 if (TREE_CODE (expr) == PLUS_EXPR)
5410 {
5411 tree op0 = TREE_OPERAND (expr, 0);
5412 STRIP_NOPS (op0);
5413 if (TREE_CODE (op0) == ADDR_EXPR)
5414 {
5415 *base = TREE_OPERAND (expr, 0);
5416 *offset = TREE_OPERAND (expr, 1);
5417 return true;
5418 }
5419 }
5420 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5421 which we transform into an ADDR_EXPR with appropriate
5422 offset. For other arguments to the ADDR_EXPR we assume
5423 zero offset and as such do not care about the ADDR_EXPR
5424 type and strip possible nops from it. */
5425 else if (TREE_CODE (inner_expr) == ADDR_EXPR)
5426 {
5427 tree op0 = TREE_OPERAND (inner_expr, 0);
5428 if (TREE_CODE (op0) == ARRAY_REF)
5429 {
5430 *base = build_fold_addr_expr (TREE_OPERAND (op0, 0));
5431 *offset = TREE_OPERAND (op0, 1);
5432 }
5433 else
5434 {
5435 *base = inner_expr;
5436 *offset = NULL_TREE;
5437 }
5438 return true;
5439 }
5440
5441 return false;
5442 }
5443
5444
5445 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5446 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5447 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5448 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5449 COND is the first argument to CODE; otherwise (as in the example
5450 given here), it is the second argument. TYPE is the type of the
5451 original expression. Return NULL_TREE if no simplification is
5452 possible. */
5453
5454 static tree
5455 fold_binary_op_with_conditional_arg (enum tree_code code,
5456 tree type, tree op0, tree op1,
5457 tree cond, tree arg, int cond_first_p)
5458 {
5459 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5460 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5461 tree test, true_value, false_value;
5462 tree lhs = NULL_TREE;
5463 tree rhs = NULL_TREE;
5464
5465 /* This transformation is only worthwhile if we don't have to wrap
5466 arg in a SAVE_EXPR, and the operation can be simplified on at least
5467 one of the branches once its pushed inside the COND_EXPR. */
5468 if (!TREE_CONSTANT (arg))
5469 return NULL_TREE;
5470
5471 if (TREE_CODE (cond) == COND_EXPR)
5472 {
5473 test = TREE_OPERAND (cond, 0);
5474 true_value = TREE_OPERAND (cond, 1);
5475 false_value = TREE_OPERAND (cond, 2);
5476 /* If this operand throws an expression, then it does not make
5477 sense to try to perform a logical or arithmetic operation
5478 involving it. */
5479 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5480 lhs = true_value;
5481 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5482 rhs = false_value;
5483 }
5484 else
5485 {
5486 tree testtype = TREE_TYPE (cond);
5487 test = cond;
5488 true_value = constant_boolean_node (true, testtype);
5489 false_value = constant_boolean_node (false, testtype);
5490 }
5491
5492 arg = fold_convert (arg_type, arg);
5493 if (lhs == 0)
5494 {
5495 true_value = fold_convert (cond_type, true_value);
5496 lhs = fold (cond_first_p ? build2 (code, type, true_value, arg)
5497 : build2 (code, type, arg, true_value));
5498 }
5499 if (rhs == 0)
5500 {
5501 false_value = fold_convert (cond_type, false_value);
5502 rhs = fold (cond_first_p ? build2 (code, type, false_value, arg)
5503 : build2 (code, type, arg, false_value));
5504 }
5505
5506 test = fold (build3 (COND_EXPR, type, test, lhs, rhs));
5507 return fold_convert (type, test);
5508 }
5509
5510 \f
5511 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5512
5513 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5514 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5515 ADDEND is the same as X.
5516
5517 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5518 and finite. The problematic cases are when X is zero, and its mode
5519 has signed zeros. In the case of rounding towards -infinity,
5520 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5521 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5522
5523 static bool
5524 fold_real_zero_addition_p (tree type, tree addend, int negate)
5525 {
5526 if (!real_zerop (addend))
5527 return false;
5528
5529 /* Don't allow the fold with -fsignaling-nans. */
5530 if (HONOR_SNANS (TYPE_MODE (type)))
5531 return false;
5532
5533 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5534 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
5535 return true;
5536
5537 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5538 if (TREE_CODE (addend) == REAL_CST
5539 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
5540 negate = !negate;
5541
5542 /* The mode has signed zeros, and we have to honor their sign.
5543 In this situation, there is only one case we can return true for.
5544 X - 0 is the same as X unless rounding towards -infinity is
5545 supported. */
5546 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
5547 }
5548
5549 /* Subroutine of fold() that checks comparisons of built-in math
5550 functions against real constants.
5551
5552 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5553 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5554 is the type of the result and ARG0 and ARG1 are the operands of the
5555 comparison. ARG1 must be a TREE_REAL_CST.
5556
5557 The function returns the constant folded tree if a simplification
5558 can be made, and NULL_TREE otherwise. */
5559
5560 static tree
5561 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5562 tree type, tree arg0, tree arg1)
5563 {
5564 REAL_VALUE_TYPE c;
5565
5566 if (BUILTIN_SQRT_P (fcode))
5567 {
5568 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5569 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5570
5571 c = TREE_REAL_CST (arg1);
5572 if (REAL_VALUE_NEGATIVE (c))
5573 {
5574 /* sqrt(x) < y is always false, if y is negative. */
5575 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5576 return omit_one_operand (type, integer_zero_node, arg);
5577
5578 /* sqrt(x) > y is always true, if y is negative and we
5579 don't care about NaNs, i.e. negative values of x. */
5580 if (code == NE_EXPR || !HONOR_NANS (mode))
5581 return omit_one_operand (type, integer_one_node, arg);
5582
5583 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5584 return fold (build2 (GE_EXPR, type, arg,
5585 build_real (TREE_TYPE (arg), dconst0)));
5586 }
5587 else if (code == GT_EXPR || code == GE_EXPR)
5588 {
5589 REAL_VALUE_TYPE c2;
5590
5591 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5592 real_convert (&c2, mode, &c2);
5593
5594 if (REAL_VALUE_ISINF (c2))
5595 {
5596 /* sqrt(x) > y is x == +Inf, when y is very large. */
5597 if (HONOR_INFINITIES (mode))
5598 return fold (build2 (EQ_EXPR, type, arg,
5599 build_real (TREE_TYPE (arg), c2)));
5600
5601 /* sqrt(x) > y is always false, when y is very large
5602 and we don't care about infinities. */
5603 return omit_one_operand (type, integer_zero_node, arg);
5604 }
5605
5606 /* sqrt(x) > c is the same as x > c*c. */
5607 return fold (build2 (code, type, arg,
5608 build_real (TREE_TYPE (arg), c2)));
5609 }
5610 else if (code == LT_EXPR || code == LE_EXPR)
5611 {
5612 REAL_VALUE_TYPE c2;
5613
5614 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5615 real_convert (&c2, mode, &c2);
5616
5617 if (REAL_VALUE_ISINF (c2))
5618 {
5619 /* sqrt(x) < y is always true, when y is a very large
5620 value and we don't care about NaNs or Infinities. */
5621 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5622 return omit_one_operand (type, integer_one_node, arg);
5623
5624 /* sqrt(x) < y is x != +Inf when y is very large and we
5625 don't care about NaNs. */
5626 if (! HONOR_NANS (mode))
5627 return fold (build2 (NE_EXPR, type, arg,
5628 build_real (TREE_TYPE (arg), c2)));
5629
5630 /* sqrt(x) < y is x >= 0 when y is very large and we
5631 don't care about Infinities. */
5632 if (! HONOR_INFINITIES (mode))
5633 return fold (build2 (GE_EXPR, type, arg,
5634 build_real (TREE_TYPE (arg), dconst0)));
5635
5636 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5637 if (lang_hooks.decls.global_bindings_p () != 0
5638 || CONTAINS_PLACEHOLDER_P (arg))
5639 return NULL_TREE;
5640
5641 arg = save_expr (arg);
5642 return fold (build2 (TRUTH_ANDIF_EXPR, type,
5643 fold (build2 (GE_EXPR, type, arg,
5644 build_real (TREE_TYPE (arg),
5645 dconst0))),
5646 fold (build2 (NE_EXPR, type, arg,
5647 build_real (TREE_TYPE (arg),
5648 c2)))));
5649 }
5650
5651 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5652 if (! HONOR_NANS (mode))
5653 return fold (build2 (code, type, arg,
5654 build_real (TREE_TYPE (arg), c2)));
5655
5656 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5657 if (lang_hooks.decls.global_bindings_p () == 0
5658 && ! CONTAINS_PLACEHOLDER_P (arg))
5659 {
5660 arg = save_expr (arg);
5661 return fold (build2 (TRUTH_ANDIF_EXPR, type,
5662 fold (build2 (GE_EXPR, type, arg,
5663 build_real (TREE_TYPE (arg),
5664 dconst0))),
5665 fold (build2 (code, type, arg,
5666 build_real (TREE_TYPE (arg),
5667 c2)))));
5668 }
5669 }
5670 }
5671
5672 return NULL_TREE;
5673 }
5674
5675 /* Subroutine of fold() that optimizes comparisons against Infinities,
5676 either +Inf or -Inf.
5677
5678 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5679 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5680 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5681
5682 The function returns the constant folded tree if a simplification
5683 can be made, and NULL_TREE otherwise. */
5684
5685 static tree
5686 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5687 {
5688 enum machine_mode mode;
5689 REAL_VALUE_TYPE max;
5690 tree temp;
5691 bool neg;
5692
5693 mode = TYPE_MODE (TREE_TYPE (arg0));
5694
5695 /* For negative infinity swap the sense of the comparison. */
5696 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
5697 if (neg)
5698 code = swap_tree_comparison (code);
5699
5700 switch (code)
5701 {
5702 case GT_EXPR:
5703 /* x > +Inf is always false, if with ignore sNANs. */
5704 if (HONOR_SNANS (mode))
5705 return NULL_TREE;
5706 return omit_one_operand (type, integer_zero_node, arg0);
5707
5708 case LE_EXPR:
5709 /* x <= +Inf is always true, if we don't case about NaNs. */
5710 if (! HONOR_NANS (mode))
5711 return omit_one_operand (type, integer_one_node, arg0);
5712
5713 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5714 if (lang_hooks.decls.global_bindings_p () == 0
5715 && ! CONTAINS_PLACEHOLDER_P (arg0))
5716 {
5717 arg0 = save_expr (arg0);
5718 return fold (build2 (EQ_EXPR, type, arg0, arg0));
5719 }
5720 break;
5721
5722 case EQ_EXPR:
5723 case GE_EXPR:
5724 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
5725 real_maxval (&max, neg, mode);
5726 return fold (build2 (neg ? LT_EXPR : GT_EXPR, type,
5727 arg0, build_real (TREE_TYPE (arg0), max)));
5728
5729 case LT_EXPR:
5730 /* x < +Inf is always equal to x <= DBL_MAX. */
5731 real_maxval (&max, neg, mode);
5732 return fold (build2 (neg ? GE_EXPR : LE_EXPR, type,
5733 arg0, build_real (TREE_TYPE (arg0), max)));
5734
5735 case NE_EXPR:
5736 /* x != +Inf is always equal to !(x > DBL_MAX). */
5737 real_maxval (&max, neg, mode);
5738 if (! HONOR_NANS (mode))
5739 return fold (build2 (neg ? GE_EXPR : LE_EXPR, type,
5740 arg0, build_real (TREE_TYPE (arg0), max)));
5741
5742 /* The transformation below creates non-gimple code and thus is
5743 not appropriate if we are in gimple form. */
5744 if (in_gimple_form)
5745 return NULL_TREE;
5746
5747 temp = fold (build2 (neg ? LT_EXPR : GT_EXPR, type,
5748 arg0, build_real (TREE_TYPE (arg0), max)));
5749 return fold (build1 (TRUTH_NOT_EXPR, type, temp));
5750
5751 default:
5752 break;
5753 }
5754
5755 return NULL_TREE;
5756 }
5757
5758 /* Subroutine of fold() that optimizes comparisons of a division by
5759 a nonzero integer constant against an integer constant, i.e.
5760 X/C1 op C2.
5761
5762 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5763 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5764 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5765
5766 The function returns the constant folded tree if a simplification
5767 can be made, and NULL_TREE otherwise. */
5768
5769 static tree
5770 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5771 {
5772 tree prod, tmp, hi, lo;
5773 tree arg00 = TREE_OPERAND (arg0, 0);
5774 tree arg01 = TREE_OPERAND (arg0, 1);
5775 unsigned HOST_WIDE_INT lpart;
5776 HOST_WIDE_INT hpart;
5777 int overflow;
5778
5779 /* We have to do this the hard way to detect unsigned overflow.
5780 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
5781 overflow = mul_double (TREE_INT_CST_LOW (arg01),
5782 TREE_INT_CST_HIGH (arg01),
5783 TREE_INT_CST_LOW (arg1),
5784 TREE_INT_CST_HIGH (arg1), &lpart, &hpart);
5785 prod = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
5786 prod = force_fit_type (prod, -1, overflow, false);
5787
5788 if (TYPE_UNSIGNED (TREE_TYPE (arg0)))
5789 {
5790 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5791 lo = prod;
5792
5793 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
5794 overflow = add_double (TREE_INT_CST_LOW (prod),
5795 TREE_INT_CST_HIGH (prod),
5796 TREE_INT_CST_LOW (tmp),
5797 TREE_INT_CST_HIGH (tmp),
5798 &lpart, &hpart);
5799 hi = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
5800 hi = force_fit_type (hi, -1, overflow | TREE_OVERFLOW (prod),
5801 TREE_CONSTANT_OVERFLOW (prod));
5802 }
5803 else if (tree_int_cst_sgn (arg01) >= 0)
5804 {
5805 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5806 switch (tree_int_cst_sgn (arg1))
5807 {
5808 case -1:
5809 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5810 hi = prod;
5811 break;
5812
5813 case 0:
5814 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
5815 hi = tmp;
5816 break;
5817
5818 case 1:
5819 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
5820 lo = prod;
5821 break;
5822
5823 default:
5824 gcc_unreachable ();
5825 }
5826 }
5827 else
5828 {
5829 /* A negative divisor reverses the relational operators. */
5830 code = swap_tree_comparison (code);
5831
5832 tmp = int_const_binop (PLUS_EXPR, arg01, integer_one_node, 0);
5833 switch (tree_int_cst_sgn (arg1))
5834 {
5835 case -1:
5836 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5837 lo = prod;
5838 break;
5839
5840 case 0:
5841 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
5842 lo = tmp;
5843 break;
5844
5845 case 1:
5846 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
5847 hi = prod;
5848 break;
5849
5850 default:
5851 gcc_unreachable ();
5852 }
5853 }
5854
5855 switch (code)
5856 {
5857 case EQ_EXPR:
5858 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
5859 return omit_one_operand (type, integer_zero_node, arg00);
5860 if (TREE_OVERFLOW (hi))
5861 return fold (build2 (GE_EXPR, type, arg00, lo));
5862 if (TREE_OVERFLOW (lo))
5863 return fold (build2 (LE_EXPR, type, arg00, hi));
5864 return build_range_check (type, arg00, 1, lo, hi);
5865
5866 case NE_EXPR:
5867 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
5868 return omit_one_operand (type, integer_one_node, arg00);
5869 if (TREE_OVERFLOW (hi))
5870 return fold (build2 (LT_EXPR, type, arg00, lo));
5871 if (TREE_OVERFLOW (lo))
5872 return fold (build2 (GT_EXPR, type, arg00, hi));
5873 return build_range_check (type, arg00, 0, lo, hi);
5874
5875 case LT_EXPR:
5876 if (TREE_OVERFLOW (lo))
5877 return omit_one_operand (type, integer_zero_node, arg00);
5878 return fold (build2 (LT_EXPR, type, arg00, lo));
5879
5880 case LE_EXPR:
5881 if (TREE_OVERFLOW (hi))
5882 return omit_one_operand (type, integer_one_node, arg00);
5883 return fold (build2 (LE_EXPR, type, arg00, hi));
5884
5885 case GT_EXPR:
5886 if (TREE_OVERFLOW (hi))
5887 return omit_one_operand (type, integer_zero_node, arg00);
5888 return fold (build2 (GT_EXPR, type, arg00, hi));
5889
5890 case GE_EXPR:
5891 if (TREE_OVERFLOW (lo))
5892 return omit_one_operand (type, integer_one_node, arg00);
5893 return fold (build2 (GE_EXPR, type, arg00, lo));
5894
5895 default:
5896 break;
5897 }
5898
5899 return NULL_TREE;
5900 }
5901
5902
5903 /* If CODE with arguments ARG0 and ARG1 represents a single bit
5904 equality/inequality test, then return a simplified form of
5905 the test using shifts and logical operations. Otherwise return
5906 NULL. TYPE is the desired result type. */
5907
5908 tree
5909 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
5910 tree result_type)
5911 {
5912 /* If this is testing a single bit, we can optimize the test. */
5913 if ((code == NE_EXPR || code == EQ_EXPR)
5914 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
5915 && integer_pow2p (TREE_OPERAND (arg0, 1)))
5916 {
5917 tree inner = TREE_OPERAND (arg0, 0);
5918 tree type = TREE_TYPE (arg0);
5919 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
5920 enum machine_mode operand_mode = TYPE_MODE (type);
5921 int ops_unsigned;
5922 tree signed_type, unsigned_type, intermediate_type;
5923 tree arg00;
5924
5925 /* If we have (A & C) != 0 where C is the sign bit of A, convert
5926 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
5927 arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
5928 if (arg00 != NULL_TREE
5929 /* This is only a win if casting to a signed type is cheap,
5930 i.e. when arg00's type is not a partial mode. */
5931 && TYPE_PRECISION (TREE_TYPE (arg00))
5932 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
5933 {
5934 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
5935 return fold (build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
5936 result_type, fold_convert (stype, arg00),
5937 fold_convert (stype, integer_zero_node)));
5938 }
5939
5940 /* Otherwise we have (A & C) != 0 where C is a single bit,
5941 convert that into ((A >> C2) & 1). Where C2 = log2(C).
5942 Similarly for (A & C) == 0. */
5943
5944 /* If INNER is a right shift of a constant and it plus BITNUM does
5945 not overflow, adjust BITNUM and INNER. */
5946 if (TREE_CODE (inner) == RSHIFT_EXPR
5947 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
5948 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
5949 && bitnum < TYPE_PRECISION (type)
5950 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
5951 bitnum - TYPE_PRECISION (type)))
5952 {
5953 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
5954 inner = TREE_OPERAND (inner, 0);
5955 }
5956
5957 /* If we are going to be able to omit the AND below, we must do our
5958 operations as unsigned. If we must use the AND, we have a choice.
5959 Normally unsigned is faster, but for some machines signed is. */
5960 #ifdef LOAD_EXTEND_OP
5961 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
5962 && !flag_syntax_only) ? 0 : 1;
5963 #else
5964 ops_unsigned = 1;
5965 #endif
5966
5967 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
5968 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
5969 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
5970 inner = fold_convert (intermediate_type, inner);
5971
5972 if (bitnum != 0)
5973 inner = build2 (RSHIFT_EXPR, intermediate_type,
5974 inner, size_int (bitnum));
5975
5976 if (code == EQ_EXPR)
5977 inner = fold (build2 (BIT_XOR_EXPR, intermediate_type,
5978 inner, integer_one_node));
5979
5980 /* Put the AND last so it can combine with more things. */
5981 inner = build2 (BIT_AND_EXPR, intermediate_type,
5982 inner, integer_one_node);
5983
5984 /* Make sure to return the proper type. */
5985 inner = fold_convert (result_type, inner);
5986
5987 return inner;
5988 }
5989 return NULL_TREE;
5990 }
5991
5992 /* Check whether we are allowed to reorder operands arg0 and arg1,
5993 such that the evaluation of arg1 occurs before arg0. */
5994
5995 static bool
5996 reorder_operands_p (tree arg0, tree arg1)
5997 {
5998 if (! flag_evaluation_order)
5999 return true;
6000 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6001 return true;
6002 return ! TREE_SIDE_EFFECTS (arg0)
6003 && ! TREE_SIDE_EFFECTS (arg1);
6004 }
6005
6006 /* Test whether it is preferable two swap two operands, ARG0 and
6007 ARG1, for example because ARG0 is an integer constant and ARG1
6008 isn't. If REORDER is true, only recommend swapping if we can
6009 evaluate the operands in reverse order. */
6010
6011 bool
6012 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
6013 {
6014 STRIP_SIGN_NOPS (arg0);
6015 STRIP_SIGN_NOPS (arg1);
6016
6017 if (TREE_CODE (arg1) == INTEGER_CST)
6018 return 0;
6019 if (TREE_CODE (arg0) == INTEGER_CST)
6020 return 1;
6021
6022 if (TREE_CODE (arg1) == REAL_CST)
6023 return 0;
6024 if (TREE_CODE (arg0) == REAL_CST)
6025 return 1;
6026
6027 if (TREE_CODE (arg1) == COMPLEX_CST)
6028 return 0;
6029 if (TREE_CODE (arg0) == COMPLEX_CST)
6030 return 1;
6031
6032 if (TREE_CONSTANT (arg1))
6033 return 0;
6034 if (TREE_CONSTANT (arg0))
6035 return 1;
6036
6037 if (optimize_size)
6038 return 0;
6039
6040 if (reorder && flag_evaluation_order
6041 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6042 return 0;
6043
6044 if (DECL_P (arg1))
6045 return 0;
6046 if (DECL_P (arg0))
6047 return 1;
6048
6049 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6050 for commutative and comparison operators. Ensuring a canonical
6051 form allows the optimizers to find additional redundancies without
6052 having to explicitly check for both orderings. */
6053 if (TREE_CODE (arg0) == SSA_NAME
6054 && TREE_CODE (arg1) == SSA_NAME
6055 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6056 return 1;
6057
6058 return 0;
6059 }
6060
6061 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6062 ARG0 is extended to a wider type. */
6063
6064 static tree
6065 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6066 {
6067 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6068 tree arg1_unw;
6069 tree shorter_type, outer_type;
6070 tree min, max;
6071 bool above, below;
6072
6073 if (arg0_unw == arg0)
6074 return NULL_TREE;
6075 shorter_type = TREE_TYPE (arg0_unw);
6076
6077 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6078 return NULL_TREE;
6079
6080 arg1_unw = get_unwidened (arg1, shorter_type);
6081 if (!arg1_unw)
6082 return NULL_TREE;
6083
6084 /* If possible, express the comparison in the shorter mode. */
6085 if ((code == EQ_EXPR || code == NE_EXPR
6086 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6087 && (TREE_TYPE (arg1_unw) == shorter_type
6088 || (TREE_CODE (arg1_unw) == INTEGER_CST
6089 && TREE_CODE (shorter_type) == INTEGER_TYPE
6090 && int_fits_type_p (arg1_unw, shorter_type))))
6091 return fold (build (code, type, arg0_unw,
6092 fold_convert (shorter_type, arg1_unw)));
6093
6094 if (TREE_CODE (arg1_unw) != INTEGER_CST)
6095 return NULL_TREE;
6096
6097 /* If we are comparing with the integer that does not fit into the range
6098 of the shorter type, the result is known. */
6099 outer_type = TREE_TYPE (arg1_unw);
6100 min = lower_bound_in_type (outer_type, shorter_type);
6101 max = upper_bound_in_type (outer_type, shorter_type);
6102
6103 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6104 max, arg1_unw));
6105 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6106 arg1_unw, min));
6107
6108 switch (code)
6109 {
6110 case EQ_EXPR:
6111 if (above || below)
6112 return omit_one_operand (type, integer_zero_node, arg0);
6113 break;
6114
6115 case NE_EXPR:
6116 if (above || below)
6117 return omit_one_operand (type, integer_one_node, arg0);
6118 break;
6119
6120 case LT_EXPR:
6121 case LE_EXPR:
6122 if (above)
6123 return omit_one_operand (type, integer_one_node, arg0);
6124 else if (below)
6125 return omit_one_operand (type, integer_zero_node, arg0);
6126
6127 case GT_EXPR:
6128 case GE_EXPR:
6129 if (above)
6130 return omit_one_operand (type, integer_zero_node, arg0);
6131 else if (below)
6132 return omit_one_operand (type, integer_one_node, arg0);
6133
6134 default:
6135 break;
6136 }
6137
6138 return NULL_TREE;
6139 }
6140
6141 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6142 ARG0 just the signedness is changed. */
6143
6144 static tree
6145 fold_sign_changed_comparison (enum tree_code code, tree type,
6146 tree arg0, tree arg1)
6147 {
6148 tree arg0_inner, tmp;
6149 tree inner_type, outer_type;
6150
6151 if (TREE_CODE (arg0) != NOP_EXPR
6152 && TREE_CODE (arg0) != CONVERT_EXPR)
6153 return NULL_TREE;
6154
6155 outer_type = TREE_TYPE (arg0);
6156 arg0_inner = TREE_OPERAND (arg0, 0);
6157 inner_type = TREE_TYPE (arg0_inner);
6158
6159 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6160 return NULL_TREE;
6161
6162 if (TREE_CODE (arg1) != INTEGER_CST
6163 && !((TREE_CODE (arg1) == NOP_EXPR
6164 || TREE_CODE (arg1) == CONVERT_EXPR)
6165 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6166 return NULL_TREE;
6167
6168 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6169 && code != NE_EXPR
6170 && code != EQ_EXPR)
6171 return NULL_TREE;
6172
6173 if (TREE_CODE (arg1) == INTEGER_CST)
6174 {
6175 tmp = build_int_cst_wide (inner_type,
6176 TREE_INT_CST_LOW (arg1),
6177 TREE_INT_CST_HIGH (arg1));
6178 arg1 = force_fit_type (tmp, 0,
6179 TREE_OVERFLOW (arg1),
6180 TREE_CONSTANT_OVERFLOW (arg1));
6181 }
6182 else
6183 arg1 = fold_convert (inner_type, arg1);
6184
6185 return fold (build2 (code, type, arg0_inner, arg1));
6186 }
6187
6188 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6189 step of the array. ADDR is the address. MULT is the multiplicative expression.
6190 If the function succeeds, the new address expression is returned. Otherwise
6191 NULL_TREE is returned. */
6192
6193 static tree
6194 try_move_mult_to_index (enum tree_code code, tree addr, tree mult)
6195 {
6196 tree s, delta, step;
6197 tree arg0 = TREE_OPERAND (mult, 0), arg1 = TREE_OPERAND (mult, 1);
6198 tree ref = TREE_OPERAND (addr, 0), pref;
6199 tree ret, pos;
6200 tree itype;
6201
6202 STRIP_NOPS (arg0);
6203 STRIP_NOPS (arg1);
6204
6205 if (TREE_CODE (arg0) == INTEGER_CST)
6206 {
6207 s = arg0;
6208 delta = arg1;
6209 }
6210 else if (TREE_CODE (arg1) == INTEGER_CST)
6211 {
6212 s = arg1;
6213 delta = arg0;
6214 }
6215 else
6216 return NULL_TREE;
6217
6218 for (;; ref = TREE_OPERAND (ref, 0))
6219 {
6220 if (TREE_CODE (ref) == ARRAY_REF)
6221 {
6222 step = array_ref_element_size (ref);
6223
6224 if (TREE_CODE (step) != INTEGER_CST)
6225 continue;
6226
6227 itype = TREE_TYPE (step);
6228
6229 /* If the type sizes do not match, we might run into problems
6230 when one of them would overflow. */
6231 if (TYPE_PRECISION (itype) != TYPE_PRECISION (TREE_TYPE (s)))
6232 continue;
6233
6234 if (!operand_equal_p (step, fold_convert (itype, s), 0))
6235 continue;
6236
6237 delta = fold_convert (itype, delta);
6238 break;
6239 }
6240
6241 if (!handled_component_p (ref))
6242 return NULL_TREE;
6243 }
6244
6245 /* We found the suitable array reference. So copy everything up to it,
6246 and replace the index. */
6247
6248 pref = TREE_OPERAND (addr, 0);
6249 ret = copy_node (pref);
6250 pos = ret;
6251
6252 while (pref != ref)
6253 {
6254 pref = TREE_OPERAND (pref, 0);
6255 TREE_OPERAND (pos, 0) = copy_node (pref);
6256 pos = TREE_OPERAND (pos, 0);
6257 }
6258
6259 TREE_OPERAND (pos, 1) = fold (build2 (code, itype,
6260 TREE_OPERAND (pos, 1),
6261 delta));
6262
6263 return build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6264 }
6265
6266
6267 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6268 means A >= Y && A != MAX, but in this case we know that
6269 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6270
6271 static tree
6272 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6273 {
6274 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6275
6276 if (TREE_CODE (bound) == LT_EXPR)
6277 a = TREE_OPERAND (bound, 0);
6278 else if (TREE_CODE (bound) == GT_EXPR)
6279 a = TREE_OPERAND (bound, 1);
6280 else
6281 return NULL_TREE;
6282
6283 typea = TREE_TYPE (a);
6284 if (!INTEGRAL_TYPE_P (typea)
6285 && !POINTER_TYPE_P (typea))
6286 return NULL_TREE;
6287
6288 if (TREE_CODE (ineq) == LT_EXPR)
6289 {
6290 a1 = TREE_OPERAND (ineq, 1);
6291 y = TREE_OPERAND (ineq, 0);
6292 }
6293 else if (TREE_CODE (ineq) == GT_EXPR)
6294 {
6295 a1 = TREE_OPERAND (ineq, 0);
6296 y = TREE_OPERAND (ineq, 1);
6297 }
6298 else
6299 return NULL_TREE;
6300
6301 if (TREE_TYPE (a1) != typea)
6302 return NULL_TREE;
6303
6304 diff = fold (build2 (MINUS_EXPR, typea, a1, a));
6305 if (!integer_onep (diff))
6306 return NULL_TREE;
6307
6308 return fold (build2 (GE_EXPR, type, a, y));
6309 }
6310
6311 /* Fold complex addition when both components are accessible by parts.
6312 Return non-null if successful. CODE should be PLUS_EXPR for addition,
6313 or MINUS_EXPR for subtraction. */
6314
6315 static tree
6316 fold_complex_add (tree type, tree ac, tree bc, enum tree_code code)
6317 {
6318 tree ar, ai, br, bi, rr, ri, inner_type;
6319
6320 if (TREE_CODE (ac) == COMPLEX_EXPR)
6321 ar = TREE_OPERAND (ac, 0), ai = TREE_OPERAND (ac, 1);
6322 else if (TREE_CODE (ac) == COMPLEX_CST)
6323 ar = TREE_REALPART (ac), ai = TREE_IMAGPART (ac);
6324 else
6325 return NULL;
6326
6327 if (TREE_CODE (bc) == COMPLEX_EXPR)
6328 br = TREE_OPERAND (bc, 0), bi = TREE_OPERAND (bc, 1);
6329 else if (TREE_CODE (bc) == COMPLEX_CST)
6330 br = TREE_REALPART (bc), bi = TREE_IMAGPART (bc);
6331 else
6332 return NULL;
6333
6334 inner_type = TREE_TYPE (type);
6335
6336 rr = fold (build2 (code, inner_type, ar, br));
6337 ri = fold (build2 (code, inner_type, ai, bi));
6338
6339 return fold (build2 (COMPLEX_EXPR, type, rr, ri));
6340 }
6341
6342 /* Perform some simplifications of complex multiplication when one or more
6343 of the components are constants or zeros. Return non-null if successful. */
6344
6345 tree
6346 fold_complex_mult_parts (tree type, tree ar, tree ai, tree br, tree bi)
6347 {
6348 tree rr, ri, inner_type, zero;
6349 bool ar0, ai0, br0, bi0, bi1;
6350
6351 inner_type = TREE_TYPE (type);
6352 zero = NULL;
6353
6354 if (SCALAR_FLOAT_TYPE_P (inner_type))
6355 {
6356 ar0 = ai0 = br0 = bi0 = bi1 = false;
6357
6358 /* We're only interested in +0.0 here, thus we don't use real_zerop. */
6359
6360 if (TREE_CODE (ar) == REAL_CST
6361 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (ar), dconst0))
6362 ar0 = true, zero = ar;
6363
6364 if (TREE_CODE (ai) == REAL_CST
6365 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (ai), dconst0))
6366 ai0 = true, zero = ai;
6367
6368 if (TREE_CODE (br) == REAL_CST
6369 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (br), dconst0))
6370 br0 = true, zero = br;
6371
6372 if (TREE_CODE (bi) == REAL_CST)
6373 {
6374 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (bi), dconst0))
6375 bi0 = true, zero = bi;
6376 else if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (bi), dconst1))
6377 bi1 = true;
6378 }
6379 }
6380 else
6381 {
6382 ar0 = integer_zerop (ar);
6383 if (ar0)
6384 zero = ar;
6385 ai0 = integer_zerop (ai);
6386 if (ai0)
6387 zero = ai;
6388 br0 = integer_zerop (br);
6389 if (br0)
6390 zero = br;
6391 bi0 = integer_zerop (bi);
6392 if (bi0)
6393 {
6394 zero = bi;
6395 bi1 = false;
6396 }
6397 else
6398 bi1 = integer_onep (bi);
6399 }
6400
6401 /* We won't optimize anything below unless something is zero. */
6402 if (zero == NULL)
6403 return NULL;
6404
6405 if (ai0 && br0 && bi1)
6406 {
6407 rr = zero;
6408 ri = ar;
6409 }
6410 else if (ai0 && bi0)
6411 {
6412 rr = fold (build2 (MULT_EXPR, inner_type, ar, br));
6413 ri = zero;
6414 }
6415 else if (ai0 && br0)
6416 {
6417 rr = zero;
6418 ri = fold (build2 (MULT_EXPR, inner_type, ar, bi));
6419 }
6420 else if (ar0 && bi0)
6421 {
6422 rr = zero;
6423 ri = fold (build2 (MULT_EXPR, inner_type, ai, br));
6424 }
6425 else if (ar0 && br0)
6426 {
6427 rr = fold (build2 (MULT_EXPR, inner_type, ai, bi));
6428 rr = fold (build1 (NEGATE_EXPR, inner_type, rr));
6429 ri = zero;
6430 }
6431 else if (bi0)
6432 {
6433 rr = fold (build2 (MULT_EXPR, inner_type, ar, br));
6434 ri = fold (build2 (MULT_EXPR, inner_type, ai, br));
6435 }
6436 else if (ai0)
6437 {
6438 rr = fold (build2 (MULT_EXPR, inner_type, ar, br));
6439 ri = fold (build2 (MULT_EXPR, inner_type, ar, bi));
6440 }
6441 else if (br0)
6442 {
6443 rr = fold (build2 (MULT_EXPR, inner_type, ai, bi));
6444 rr = fold (build1 (NEGATE_EXPR, inner_type, rr));
6445 ri = fold (build2 (MULT_EXPR, inner_type, ar, bi));
6446 }
6447 else if (ar0)
6448 {
6449 rr = fold (build2 (MULT_EXPR, inner_type, ai, bi));
6450 rr = fold (build1 (NEGATE_EXPR, inner_type, rr));
6451 ri = fold (build2 (MULT_EXPR, inner_type, ai, br));
6452 }
6453 else
6454 return NULL;
6455
6456 return fold (build2 (COMPLEX_EXPR, type, rr, ri));
6457 }
6458
6459 static tree
6460 fold_complex_mult (tree type, tree ac, tree bc)
6461 {
6462 tree ar, ai, br, bi;
6463
6464 if (TREE_CODE (ac) == COMPLEX_EXPR)
6465 ar = TREE_OPERAND (ac, 0), ai = TREE_OPERAND (ac, 1);
6466 else if (TREE_CODE (ac) == COMPLEX_CST)
6467 ar = TREE_REALPART (ac), ai = TREE_IMAGPART (ac);
6468 else
6469 return NULL;
6470
6471 if (TREE_CODE (bc) == COMPLEX_EXPR)
6472 br = TREE_OPERAND (bc, 0), bi = TREE_OPERAND (bc, 1);
6473 else if (TREE_CODE (bc) == COMPLEX_CST)
6474 br = TREE_REALPART (bc), bi = TREE_IMAGPART (bc);
6475 else
6476 return NULL;
6477
6478 return fold_complex_mult_parts (type, ar, ai, br, bi);
6479 }
6480
6481 /* Perform some simplifications of complex division when one or more of
6482 the components are constants or zeros. Return non-null if successful. */
6483
6484 tree
6485 fold_complex_div_parts (tree type, tree ar, tree ai, tree br, tree bi,
6486 enum tree_code code)
6487 {
6488 tree rr, ri, inner_type, zero;
6489 bool ar0, ai0, br0, bi0, bi1;
6490
6491 inner_type = TREE_TYPE (type);
6492 zero = NULL;
6493
6494 if (SCALAR_FLOAT_TYPE_P (inner_type))
6495 {
6496 ar0 = ai0 = br0 = bi0 = bi1 = false;
6497
6498 /* We're only interested in +0.0 here, thus we don't use real_zerop. */
6499
6500 if (TREE_CODE (ar) == REAL_CST
6501 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (ar), dconst0))
6502 ar0 = true, zero = ar;
6503
6504 if (TREE_CODE (ai) == REAL_CST
6505 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (ai), dconst0))
6506 ai0 = true, zero = ai;
6507
6508 if (TREE_CODE (br) == REAL_CST
6509 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (br), dconst0))
6510 br0 = true, zero = br;
6511
6512 if (TREE_CODE (bi) == REAL_CST)
6513 {
6514 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (bi), dconst0))
6515 bi0 = true, zero = bi;
6516 else if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (bi), dconst1))
6517 bi1 = true;
6518 }
6519 }
6520 else
6521 {
6522 ar0 = integer_zerop (ar);
6523 if (ar0)
6524 zero = ar;
6525 ai0 = integer_zerop (ai);
6526 if (ai0)
6527 zero = ai;
6528 br0 = integer_zerop (br);
6529 if (br0)
6530 zero = br;
6531 bi0 = integer_zerop (bi);
6532 if (bi0)
6533 {
6534 zero = bi;
6535 bi1 = false;
6536 }
6537 else
6538 bi1 = integer_onep (bi);
6539 }
6540
6541 /* We won't optimize anything below unless something is zero. */
6542 if (zero == NULL)
6543 return NULL;
6544
6545 if (ai0 && bi0)
6546 {
6547 rr = fold (build2 (code, inner_type, ar, br));
6548 ri = zero;
6549 }
6550 else if (ai0 && br0)
6551 {
6552 rr = zero;
6553 ri = fold (build2 (code, inner_type, ar, bi));
6554 ri = fold (build1 (NEGATE_EXPR, inner_type, ri));
6555 }
6556 else if (ar0 && bi0)
6557 {
6558 rr = zero;
6559 ri = fold (build2 (code, inner_type, ai, br));
6560 }
6561 else if (ar0 && br0)
6562 {
6563 rr = fold (build2 (code, inner_type, ai, bi));
6564 ri = zero;
6565 }
6566 else if (bi0)
6567 {
6568 rr = fold (build2 (code, inner_type, ar, br));
6569 ri = fold (build2 (code, inner_type, ai, br));
6570 }
6571 else if (br0)
6572 {
6573 rr = fold (build2 (code, inner_type, ai, bi));
6574 ri = fold (build2 (code, inner_type, ar, bi));
6575 ri = fold (build1 (NEGATE_EXPR, inner_type, ri));
6576 }
6577 else
6578 return NULL;
6579
6580 return fold (build2 (COMPLEX_EXPR, type, rr, ri));
6581 }
6582
6583 static tree
6584 fold_complex_div (tree type, tree ac, tree bc, enum tree_code code)
6585 {
6586 tree ar, ai, br, bi;
6587
6588 if (TREE_CODE (ac) == COMPLEX_EXPR)
6589 ar = TREE_OPERAND (ac, 0), ai = TREE_OPERAND (ac, 1);
6590 else if (TREE_CODE (ac) == COMPLEX_CST)
6591 ar = TREE_REALPART (ac), ai = TREE_IMAGPART (ac);
6592 else
6593 return NULL;
6594
6595 if (TREE_CODE (bc) == COMPLEX_EXPR)
6596 br = TREE_OPERAND (bc, 0), bi = TREE_OPERAND (bc, 1);
6597 else if (TREE_CODE (bc) == COMPLEX_CST)
6598 br = TREE_REALPART (bc), bi = TREE_IMAGPART (bc);
6599 else
6600 return NULL;
6601
6602 return fold_complex_div_parts (type, ar, ai, br, bi, code);
6603 }
6604
6605 /* Fold a unary expression EXPR. Return the folded expression if
6606 folding is successful. Otherwise, return the original
6607 expression. */
6608
6609 static tree
6610 fold_unary (enum tree_code code, tree type, tree op0)
6611 {
6612 tree tem;
6613 tree arg0;
6614 enum tree_code_class kind = TREE_CODE_CLASS (code);
6615
6616 gcc_assert (IS_EXPR_CODE_CLASS (kind)
6617 && TREE_CODE_LENGTH (code) == 1);
6618
6619 arg0 = op0;
6620 if (arg0)
6621 {
6622 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
6623 {
6624 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
6625 STRIP_SIGN_NOPS (arg0);
6626 }
6627 else
6628 {
6629 /* Strip any conversions that don't change the mode. This
6630 is safe for every expression, except for a comparison
6631 expression because its signedness is derived from its
6632 operands.
6633
6634 Note that this is done as an internal manipulation within
6635 the constant folder, in order to find the simplest
6636 representation of the arguments so that their form can be
6637 studied. In any cases, the appropriate type conversions
6638 should be put back in the tree that will get out of the
6639 constant folder. */
6640 STRIP_NOPS (arg0);
6641 }
6642 }
6643
6644 if (TREE_CODE_CLASS (code) == tcc_unary)
6645 {
6646 if (TREE_CODE (arg0) == COMPOUND_EXPR)
6647 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6648 fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
6649 else if (TREE_CODE (arg0) == COND_EXPR)
6650 {
6651 tree arg01 = TREE_OPERAND (arg0, 1);
6652 tree arg02 = TREE_OPERAND (arg0, 2);
6653 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
6654 arg01 = fold (build1 (code, type, arg01));
6655 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
6656 arg02 = fold (build1 (code, type, arg02));
6657 tem = fold (build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
6658 arg01, arg02));
6659
6660 /* If this was a conversion, and all we did was to move into
6661 inside the COND_EXPR, bring it back out. But leave it if
6662 it is a conversion from integer to integer and the
6663 result precision is no wider than a word since such a
6664 conversion is cheap and may be optimized away by combine,
6665 while it couldn't if it were outside the COND_EXPR. Then return
6666 so we don't get into an infinite recursion loop taking the
6667 conversion out and then back in. */
6668
6669 if ((code == NOP_EXPR || code == CONVERT_EXPR
6670 || code == NON_LVALUE_EXPR)
6671 && TREE_CODE (tem) == COND_EXPR
6672 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
6673 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
6674 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
6675 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
6676 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
6677 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
6678 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
6679 && (INTEGRAL_TYPE_P
6680 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
6681 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
6682 || flag_syntax_only))
6683 tem = build1 (code, type,
6684 build3 (COND_EXPR,
6685 TREE_TYPE (TREE_OPERAND
6686 (TREE_OPERAND (tem, 1), 0)),
6687 TREE_OPERAND (tem, 0),
6688 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
6689 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
6690 return tem;
6691 }
6692 else if (COMPARISON_CLASS_P (arg0))
6693 {
6694 if (TREE_CODE (type) == BOOLEAN_TYPE)
6695 {
6696 arg0 = copy_node (arg0);
6697 TREE_TYPE (arg0) = type;
6698 return arg0;
6699 }
6700 else if (TREE_CODE (type) != INTEGER_TYPE)
6701 return fold (build3 (COND_EXPR, type, arg0,
6702 fold (build1 (code, type,
6703 integer_one_node)),
6704 fold (build1 (code, type,
6705 integer_zero_node))));
6706 }
6707 }
6708
6709 switch (code)
6710 {
6711 case NOP_EXPR:
6712 case FLOAT_EXPR:
6713 case CONVERT_EXPR:
6714 case FIX_TRUNC_EXPR:
6715 case FIX_CEIL_EXPR:
6716 case FIX_FLOOR_EXPR:
6717 case FIX_ROUND_EXPR:
6718 if (TREE_TYPE (op0) == type)
6719 return op0;
6720
6721 /* Handle cases of two conversions in a row. */
6722 if (TREE_CODE (op0) == NOP_EXPR
6723 || TREE_CODE (op0) == CONVERT_EXPR)
6724 {
6725 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
6726 tree inter_type = TREE_TYPE (op0);
6727 int inside_int = INTEGRAL_TYPE_P (inside_type);
6728 int inside_ptr = POINTER_TYPE_P (inside_type);
6729 int inside_float = FLOAT_TYPE_P (inside_type);
6730 unsigned int inside_prec = TYPE_PRECISION (inside_type);
6731 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
6732 int inter_int = INTEGRAL_TYPE_P (inter_type);
6733 int inter_ptr = POINTER_TYPE_P (inter_type);
6734 int inter_float = FLOAT_TYPE_P (inter_type);
6735 unsigned int inter_prec = TYPE_PRECISION (inter_type);
6736 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
6737 int final_int = INTEGRAL_TYPE_P (type);
6738 int final_ptr = POINTER_TYPE_P (type);
6739 int final_float = FLOAT_TYPE_P (type);
6740 unsigned int final_prec = TYPE_PRECISION (type);
6741 int final_unsignedp = TYPE_UNSIGNED (type);
6742
6743 /* In addition to the cases of two conversions in a row
6744 handled below, if we are converting something to its own
6745 type via an object of identical or wider precision, neither
6746 conversion is needed. */
6747 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
6748 && ((inter_int && final_int) || (inter_float && final_float))
6749 && inter_prec >= final_prec)
6750 return fold (build1 (code, type, TREE_OPERAND (op0, 0)));
6751
6752 /* Likewise, if the intermediate and final types are either both
6753 float or both integer, we don't need the middle conversion if
6754 it is wider than the final type and doesn't change the signedness
6755 (for integers). Avoid this if the final type is a pointer
6756 since then we sometimes need the inner conversion. Likewise if
6757 the outer has a precision not equal to the size of its mode. */
6758 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
6759 || (inter_float && inside_float))
6760 && inter_prec >= inside_prec
6761 && (inter_float || inter_unsignedp == inside_unsignedp)
6762 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6763 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6764 && ! final_ptr)
6765 return fold (build1 (code, type, TREE_OPERAND (op0, 0)));
6766
6767 /* If we have a sign-extension of a zero-extended value, we can
6768 replace that by a single zero-extension. */
6769 if (inside_int && inter_int && final_int
6770 && inside_prec < inter_prec && inter_prec < final_prec
6771 && inside_unsignedp && !inter_unsignedp)
6772 return fold (build1 (code, type, TREE_OPERAND (op0, 0)));
6773
6774 /* Two conversions in a row are not needed unless:
6775 - some conversion is floating-point (overstrict for now), or
6776 - the intermediate type is narrower than both initial and
6777 final, or
6778 - the intermediate type and innermost type differ in signedness,
6779 and the outermost type is wider than the intermediate, or
6780 - the initial type is a pointer type and the precisions of the
6781 intermediate and final types differ, or
6782 - the final type is a pointer type and the precisions of the
6783 initial and intermediate types differ. */
6784 if (! inside_float && ! inter_float && ! final_float
6785 && (inter_prec > inside_prec || inter_prec > final_prec)
6786 && ! (inside_int && inter_int
6787 && inter_unsignedp != inside_unsignedp
6788 && inter_prec < final_prec)
6789 && ((inter_unsignedp && inter_prec > inside_prec)
6790 == (final_unsignedp && final_prec > inter_prec))
6791 && ! (inside_ptr && inter_prec != final_prec)
6792 && ! (final_ptr && inside_prec != inter_prec)
6793 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6794 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6795 && ! final_ptr)
6796 return fold (build1 (code, type, TREE_OPERAND (op0, 0)));
6797 }
6798
6799 if (TREE_CODE (op0) == MODIFY_EXPR
6800 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
6801 /* Detect assigning a bitfield. */
6802 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
6803 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
6804 {
6805 /* Don't leave an assignment inside a conversion
6806 unless assigning a bitfield. */
6807 tem = build1 (code, type, TREE_OPERAND (op0, 1));
6808 /* First do the assignment, then return converted constant. */
6809 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, fold (tem));
6810 TREE_NO_WARNING (tem) = 1;
6811 TREE_USED (tem) = 1;
6812 return tem;
6813 }
6814
6815 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
6816 constants (if x has signed type, the sign bit cannot be set
6817 in c). This folds extension into the BIT_AND_EXPR. */
6818 if (INTEGRAL_TYPE_P (type)
6819 && TREE_CODE (type) != BOOLEAN_TYPE
6820 && TREE_CODE (op0) == BIT_AND_EXPR
6821 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
6822 {
6823 tree and = op0;
6824 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
6825 int change = 0;
6826
6827 if (TYPE_UNSIGNED (TREE_TYPE (and))
6828 || (TYPE_PRECISION (type)
6829 <= TYPE_PRECISION (TREE_TYPE (and))))
6830 change = 1;
6831 else if (TYPE_PRECISION (TREE_TYPE (and1))
6832 <= HOST_BITS_PER_WIDE_INT
6833 && host_integerp (and1, 1))
6834 {
6835 unsigned HOST_WIDE_INT cst;
6836
6837 cst = tree_low_cst (and1, 1);
6838 cst &= (HOST_WIDE_INT) -1
6839 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
6840 change = (cst == 0);
6841 #ifdef LOAD_EXTEND_OP
6842 if (change
6843 && !flag_syntax_only
6844 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
6845 == ZERO_EXTEND))
6846 {
6847 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
6848 and0 = fold_convert (uns, and0);
6849 and1 = fold_convert (uns, and1);
6850 }
6851 #endif
6852 }
6853 if (change)
6854 {
6855 tem = build_int_cst_wide (type, TREE_INT_CST_LOW (and1),
6856 TREE_INT_CST_HIGH (and1));
6857 tem = force_fit_type (tem, 0, TREE_OVERFLOW (and1),
6858 TREE_CONSTANT_OVERFLOW (and1));
6859 return fold (build2 (BIT_AND_EXPR, type,
6860 fold_convert (type, and0), tem));
6861 }
6862 }
6863
6864 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
6865 T2 being pointers to types of the same size. */
6866 if (POINTER_TYPE_P (type)
6867 && BINARY_CLASS_P (arg0)
6868 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
6869 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6870 {
6871 tree arg00 = TREE_OPERAND (arg0, 0);
6872 tree t0 = type;
6873 tree t1 = TREE_TYPE (arg00);
6874 tree tt0 = TREE_TYPE (t0);
6875 tree tt1 = TREE_TYPE (t1);
6876 tree s0 = TYPE_SIZE (tt0);
6877 tree s1 = TYPE_SIZE (tt1);
6878
6879 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
6880 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
6881 TREE_OPERAND (arg0, 1));
6882 }
6883
6884 tem = fold_convert_const (code, type, arg0);
6885 return tem ? tem : NULL_TREE;
6886
6887 case VIEW_CONVERT_EXPR:
6888 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
6889 return build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
6890 return NULL_TREE;
6891
6892 case NEGATE_EXPR:
6893 if (negate_expr_p (arg0))
6894 return fold_convert (type, negate_expr (arg0));
6895 /* Convert - (~A) to A + 1. */
6896 if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == BIT_NOT_EXPR)
6897 return fold (build2 (PLUS_EXPR, type, TREE_OPERAND (arg0, 0),
6898 build_int_cst (type, 1)));
6899 return NULL_TREE;
6900
6901 case ABS_EXPR:
6902 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
6903 return fold_abs_const (arg0, type);
6904 else if (TREE_CODE (arg0) == NEGATE_EXPR)
6905 return fold (build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0)));
6906 /* Convert fabs((double)float) into (double)fabsf(float). */
6907 else if (TREE_CODE (arg0) == NOP_EXPR
6908 && TREE_CODE (type) == REAL_TYPE)
6909 {
6910 tree targ0 = strip_float_extensions (arg0);
6911 if (targ0 != arg0)
6912 return fold_convert (type, fold (build1 (ABS_EXPR,
6913 TREE_TYPE (targ0),
6914 targ0)));
6915 }
6916 else if (tree_expr_nonnegative_p (arg0))
6917 return arg0;
6918
6919 /* Strip sign ops from argument. */
6920 if (TREE_CODE (type) == REAL_TYPE)
6921 {
6922 tem = fold_strip_sign_ops (arg0);
6923 if (tem)
6924 return fold (build1 (ABS_EXPR, type, fold_convert (type, tem)));
6925 }
6926 return NULL_TREE;
6927
6928 case CONJ_EXPR:
6929 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
6930 return fold_convert (type, arg0);
6931 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
6932 return build2 (COMPLEX_EXPR, type,
6933 TREE_OPERAND (arg0, 0),
6934 negate_expr (TREE_OPERAND (arg0, 1)));
6935 else if (TREE_CODE (arg0) == COMPLEX_CST)
6936 return build_complex (type, TREE_REALPART (arg0),
6937 negate_expr (TREE_IMAGPART (arg0)));
6938 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
6939 return fold (build2 (TREE_CODE (arg0), type,
6940 fold (build1 (CONJ_EXPR, type,
6941 TREE_OPERAND (arg0, 0))),
6942 fold (build1 (CONJ_EXPR, type,
6943 TREE_OPERAND (arg0, 1)))));
6944 else if (TREE_CODE (arg0) == CONJ_EXPR)
6945 return TREE_OPERAND (arg0, 0);
6946 return NULL_TREE;
6947
6948 case BIT_NOT_EXPR:
6949 if (TREE_CODE (arg0) == INTEGER_CST)
6950 return fold_not_const (arg0, type);
6951 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
6952 return TREE_OPERAND (arg0, 0);
6953 /* Convert ~ (-A) to A - 1. */
6954 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
6955 return fold (build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0),
6956 build_int_cst (type, 1)));
6957 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
6958 else if (INTEGRAL_TYPE_P (type)
6959 && ((TREE_CODE (arg0) == MINUS_EXPR
6960 && integer_onep (TREE_OPERAND (arg0, 1)))
6961 || (TREE_CODE (arg0) == PLUS_EXPR
6962 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
6963 return fold (build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0)));
6964 return NULL_TREE;
6965
6966 case TRUTH_NOT_EXPR:
6967 /* The argument to invert_truthvalue must have Boolean type. */
6968 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
6969 arg0 = fold_convert (boolean_type_node, arg0);
6970
6971 /* Note that the operand of this must be an int
6972 and its values must be 0 or 1.
6973 ("true" is a fixed value perhaps depending on the language,
6974 but we don't handle values other than 1 correctly yet.) */
6975 tem = invert_truthvalue (arg0);
6976 /* Avoid infinite recursion. */
6977 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
6978 return NULL_TREE;
6979 return fold_convert (type, tem);
6980
6981 case REALPART_EXPR:
6982 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
6983 return NULL_TREE;
6984 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
6985 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
6986 TREE_OPERAND (arg0, 1));
6987 else if (TREE_CODE (arg0) == COMPLEX_CST)
6988 return TREE_REALPART (arg0);
6989 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
6990 return fold (build2 (TREE_CODE (arg0), type,
6991 fold (build1 (REALPART_EXPR, type,
6992 TREE_OPERAND (arg0, 0))),
6993 fold (build1 (REALPART_EXPR, type,
6994 TREE_OPERAND (arg0, 1)))));
6995 return NULL_TREE;
6996
6997 case IMAGPART_EXPR:
6998 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
6999 return fold_convert (type, integer_zero_node);
7000 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7001 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
7002 TREE_OPERAND (arg0, 0));
7003 else if (TREE_CODE (arg0) == COMPLEX_CST)
7004 return TREE_IMAGPART (arg0);
7005 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7006 return fold (build2 (TREE_CODE (arg0), type,
7007 fold (build1 (IMAGPART_EXPR, type,
7008 TREE_OPERAND (arg0, 0))),
7009 fold (build1 (IMAGPART_EXPR, type,
7010 TREE_OPERAND (arg0, 1)))));
7011 return NULL_TREE;
7012
7013 default:
7014 return NULL_TREE;
7015 } /* switch (code) */
7016 }
7017
7018 /* Fold a binary expression EXPR. Return the folded expression if
7019 folding is successful. Otherwise, return the original
7020 expression. */
7021
7022 static tree
7023 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
7024 {
7025 tree t1 = NULL_TREE;
7026 tree tem;
7027 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
7028 enum tree_code_class kind = TREE_CODE_CLASS (code);
7029
7030 /* WINS will be nonzero when the switch is done
7031 if all operands are constant. */
7032 int wins = 1;
7033
7034 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7035 && TREE_CODE_LENGTH (code) == 2);
7036
7037 arg0 = op0;
7038 arg1 = op1;
7039
7040 if (arg0)
7041 {
7042 tree subop;
7043
7044 /* Strip any conversions that don't change the mode. This is
7045 safe for every expression, except for a comparison expression
7046 because its signedness is derived from its operands. So, in
7047 the latter case, only strip conversions that don't change the
7048 signedness.
7049
7050 Note that this is done as an internal manipulation within the
7051 constant folder, in order to find the simplest representation
7052 of the arguments so that their form can be studied. In any
7053 cases, the appropriate type conversions should be put back in
7054 the tree that will get out of the constant folder. */
7055 if (kind == tcc_comparison)
7056 STRIP_SIGN_NOPS (arg0);
7057 else
7058 STRIP_NOPS (arg0);
7059
7060 if (TREE_CODE (arg0) == COMPLEX_CST)
7061 subop = TREE_REALPART (arg0);
7062 else
7063 subop = arg0;
7064
7065 if (TREE_CODE (subop) != INTEGER_CST
7066 && TREE_CODE (subop) != REAL_CST)
7067 /* Note that TREE_CONSTANT isn't enough:
7068 static var addresses are constant but we can't
7069 do arithmetic on them. */
7070 wins = 0;
7071 }
7072
7073 if (arg1)
7074 {
7075 tree subop;
7076
7077 /* Strip any conversions that don't change the mode. This is
7078 safe for every expression, except for a comparison expression
7079 because its signedness is derived from its operands. So, in
7080 the latter case, only strip conversions that don't change the
7081 signedness.
7082
7083 Note that this is done as an internal manipulation within the
7084 constant folder, in order to find the simplest representation
7085 of the arguments so that their form can be studied. In any
7086 cases, the appropriate type conversions should be put back in
7087 the tree that will get out of the constant folder. */
7088 if (kind == tcc_comparison)
7089 STRIP_SIGN_NOPS (arg1);
7090 else
7091 STRIP_NOPS (arg1);
7092
7093 if (TREE_CODE (arg1) == COMPLEX_CST)
7094 subop = TREE_REALPART (arg1);
7095 else
7096 subop = arg1;
7097
7098 if (TREE_CODE (subop) != INTEGER_CST
7099 && TREE_CODE (subop) != REAL_CST)
7100 /* Note that TREE_CONSTANT isn't enough:
7101 static var addresses are constant but we can't
7102 do arithmetic on them. */
7103 wins = 0;
7104 }
7105
7106 /* If this is a commutative operation, and ARG0 is a constant, move it
7107 to ARG1 to reduce the number of tests below. */
7108 if (commutative_tree_code (code)
7109 && tree_swap_operands_p (arg0, arg1, true))
7110 return fold (build2 (code, type, op1, op0));
7111
7112 /* Now WINS is set as described above,
7113 ARG0 is the first operand of EXPR,
7114 and ARG1 is the second operand (if it has more than one operand).
7115
7116 First check for cases where an arithmetic operation is applied to a
7117 compound, conditional, or comparison operation. Push the arithmetic
7118 operation inside the compound or conditional to see if any folding
7119 can then be done. Convert comparison to conditional for this purpose.
7120 The also optimizes non-constant cases that used to be done in
7121 expand_expr.
7122
7123 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
7124 one of the operands is a comparison and the other is a comparison, a
7125 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
7126 code below would make the expression more complex. Change it to a
7127 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
7128 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
7129
7130 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
7131 || code == EQ_EXPR || code == NE_EXPR)
7132 && ((truth_value_p (TREE_CODE (arg0))
7133 && (truth_value_p (TREE_CODE (arg1))
7134 || (TREE_CODE (arg1) == BIT_AND_EXPR
7135 && integer_onep (TREE_OPERAND (arg1, 1)))))
7136 || (truth_value_p (TREE_CODE (arg1))
7137 && (truth_value_p (TREE_CODE (arg0))
7138 || (TREE_CODE (arg0) == BIT_AND_EXPR
7139 && integer_onep (TREE_OPERAND (arg0, 1)))))))
7140 {
7141 tem = fold (build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
7142 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
7143 : TRUTH_XOR_EXPR,
7144 type, fold_convert (boolean_type_node, arg0),
7145 fold_convert (boolean_type_node, arg1)));
7146
7147 if (code == EQ_EXPR)
7148 tem = invert_truthvalue (tem);
7149
7150 return tem;
7151 }
7152
7153 if (TREE_CODE_CLASS (code) == tcc_comparison
7154 && TREE_CODE (arg0) == COMPOUND_EXPR)
7155 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7156 fold (build2 (code, type, TREE_OPERAND (arg0, 1), arg1)));
7157 else if (TREE_CODE_CLASS (code) == tcc_comparison
7158 && TREE_CODE (arg1) == COMPOUND_EXPR)
7159 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
7160 fold (build2 (code, type, arg0, TREE_OPERAND (arg1, 1))));
7161 else if (TREE_CODE_CLASS (code) == tcc_binary
7162 || TREE_CODE_CLASS (code) == tcc_comparison)
7163 {
7164 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7165 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7166 fold (build2 (code, type, TREE_OPERAND (arg0, 1),
7167 arg1)));
7168 if (TREE_CODE (arg1) == COMPOUND_EXPR
7169 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
7170 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
7171 fold (build2 (code, type,
7172 arg0, TREE_OPERAND (arg1, 1))));
7173
7174 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
7175 {
7176 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
7177 arg0, arg1,
7178 /*cond_first_p=*/1);
7179 if (tem != NULL_TREE)
7180 return tem;
7181 }
7182
7183 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
7184 {
7185 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
7186 arg1, arg0,
7187 /*cond_first_p=*/0);
7188 if (tem != NULL_TREE)
7189 return tem;
7190 }
7191 }
7192
7193 switch (code)
7194 {
7195 case PLUS_EXPR:
7196 /* A + (-B) -> A - B */
7197 if (TREE_CODE (arg1) == NEGATE_EXPR)
7198 return fold (build2 (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
7199 /* (-A) + B -> B - A */
7200 if (TREE_CODE (arg0) == NEGATE_EXPR
7201 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
7202 return fold (build2 (MINUS_EXPR, type, arg1, TREE_OPERAND (arg0, 0)));
7203 /* Convert ~A + 1 to -A. */
7204 if (INTEGRAL_TYPE_P (type)
7205 && TREE_CODE (arg0) == BIT_NOT_EXPR
7206 && integer_onep (arg1))
7207 return fold (build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0)));
7208
7209 if (TREE_CODE (type) == COMPLEX_TYPE)
7210 {
7211 tem = fold_complex_add (type, arg0, arg1, PLUS_EXPR);
7212 if (tem)
7213 return tem;
7214 }
7215
7216 if (! FLOAT_TYPE_P (type))
7217 {
7218 if (integer_zerop (arg1))
7219 return non_lvalue (fold_convert (type, arg0));
7220
7221 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
7222 with a constant, and the two constants have no bits in common,
7223 we should treat this as a BIT_IOR_EXPR since this may produce more
7224 simplifications. */
7225 if (TREE_CODE (arg0) == BIT_AND_EXPR
7226 && TREE_CODE (arg1) == BIT_AND_EXPR
7227 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7228 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
7229 && integer_zerop (const_binop (BIT_AND_EXPR,
7230 TREE_OPERAND (arg0, 1),
7231 TREE_OPERAND (arg1, 1), 0)))
7232 {
7233 code = BIT_IOR_EXPR;
7234 goto bit_ior;
7235 }
7236
7237 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
7238 (plus (plus (mult) (mult)) (foo)) so that we can
7239 take advantage of the factoring cases below. */
7240 if (((TREE_CODE (arg0) == PLUS_EXPR
7241 || TREE_CODE (arg0) == MINUS_EXPR)
7242 && TREE_CODE (arg1) == MULT_EXPR)
7243 || ((TREE_CODE (arg1) == PLUS_EXPR
7244 || TREE_CODE (arg1) == MINUS_EXPR)
7245 && TREE_CODE (arg0) == MULT_EXPR))
7246 {
7247 tree parg0, parg1, parg, marg;
7248 enum tree_code pcode;
7249
7250 if (TREE_CODE (arg1) == MULT_EXPR)
7251 parg = arg0, marg = arg1;
7252 else
7253 parg = arg1, marg = arg0;
7254 pcode = TREE_CODE (parg);
7255 parg0 = TREE_OPERAND (parg, 0);
7256 parg1 = TREE_OPERAND (parg, 1);
7257 STRIP_NOPS (parg0);
7258 STRIP_NOPS (parg1);
7259
7260 if (TREE_CODE (parg0) == MULT_EXPR
7261 && TREE_CODE (parg1) != MULT_EXPR)
7262 return fold (build2 (pcode, type,
7263 fold (build2 (PLUS_EXPR, type,
7264 fold_convert (type, parg0),
7265 fold_convert (type, marg))),
7266 fold_convert (type, parg1)));
7267 if (TREE_CODE (parg0) != MULT_EXPR
7268 && TREE_CODE (parg1) == MULT_EXPR)
7269 return fold (build2 (PLUS_EXPR, type,
7270 fold_convert (type, parg0),
7271 fold (build2 (pcode, type,
7272 fold_convert (type, marg),
7273 fold_convert (type,
7274 parg1)))));
7275 }
7276
7277 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
7278 {
7279 tree arg00, arg01, arg10, arg11;
7280 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7281
7282 /* (A * C) + (B * C) -> (A+B) * C.
7283 We are most concerned about the case where C is a constant,
7284 but other combinations show up during loop reduction. Since
7285 it is not difficult, try all four possibilities. */
7286
7287 arg00 = TREE_OPERAND (arg0, 0);
7288 arg01 = TREE_OPERAND (arg0, 1);
7289 arg10 = TREE_OPERAND (arg1, 0);
7290 arg11 = TREE_OPERAND (arg1, 1);
7291 same = NULL_TREE;
7292
7293 if (operand_equal_p (arg01, arg11, 0))
7294 same = arg01, alt0 = arg00, alt1 = arg10;
7295 else if (operand_equal_p (arg00, arg10, 0))
7296 same = arg00, alt0 = arg01, alt1 = arg11;
7297 else if (operand_equal_p (arg00, arg11, 0))
7298 same = arg00, alt0 = arg01, alt1 = arg10;
7299 else if (operand_equal_p (arg01, arg10, 0))
7300 same = arg01, alt0 = arg00, alt1 = arg11;
7301
7302 /* No identical multiplicands; see if we can find a common
7303 power-of-two factor in non-power-of-two multiplies. This
7304 can help in multi-dimensional array access. */
7305 else if (TREE_CODE (arg01) == INTEGER_CST
7306 && TREE_CODE (arg11) == INTEGER_CST
7307 && TREE_INT_CST_HIGH (arg01) == 0
7308 && TREE_INT_CST_HIGH (arg11) == 0)
7309 {
7310 HOST_WIDE_INT int01, int11, tmp;
7311 int01 = TREE_INT_CST_LOW (arg01);
7312 int11 = TREE_INT_CST_LOW (arg11);
7313
7314 /* Move min of absolute values to int11. */
7315 if ((int01 >= 0 ? int01 : -int01)
7316 < (int11 >= 0 ? int11 : -int11))
7317 {
7318 tmp = int01, int01 = int11, int11 = tmp;
7319 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7320 alt0 = arg01, arg01 = arg11, arg11 = alt0;
7321 }
7322
7323 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
7324 {
7325 alt0 = fold (build2 (MULT_EXPR, type, arg00,
7326 build_int_cst (NULL_TREE,
7327 int01 / int11)));
7328 alt1 = arg10;
7329 same = arg11;
7330 }
7331 }
7332
7333 if (same)
7334 return fold (build2 (MULT_EXPR, type,
7335 fold (build2 (PLUS_EXPR, type,
7336 fold_convert (type, alt0),
7337 fold_convert (type, alt1))),
7338 same));
7339 }
7340
7341 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
7342 of the array. Loop optimizer sometimes produce this type of
7343 expressions. */
7344 if (TREE_CODE (arg0) == ADDR_EXPR
7345 && TREE_CODE (arg1) == MULT_EXPR)
7346 {
7347 tem = try_move_mult_to_index (PLUS_EXPR, arg0, arg1);
7348 if (tem)
7349 return fold_convert (type, fold (tem));
7350 }
7351 else if (TREE_CODE (arg1) == ADDR_EXPR
7352 && TREE_CODE (arg0) == MULT_EXPR)
7353 {
7354 tem = try_move_mult_to_index (PLUS_EXPR, arg1, arg0);
7355 if (tem)
7356 return fold_convert (type, fold (tem));
7357 }
7358 }
7359 else
7360 {
7361 /* See if ARG1 is zero and X + ARG1 reduces to X. */
7362 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
7363 return non_lvalue (fold_convert (type, arg0));
7364
7365 /* Likewise if the operands are reversed. */
7366 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
7367 return non_lvalue (fold_convert (type, arg1));
7368
7369 /* Convert X + -C into X - C. */
7370 if (TREE_CODE (arg1) == REAL_CST
7371 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
7372 {
7373 tem = fold_negate_const (arg1, type);
7374 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
7375 return fold (build2 (MINUS_EXPR, type,
7376 fold_convert (type, arg0),
7377 fold_convert (type, tem)));
7378 }
7379
7380 /* Convert x+x into x*2.0. */
7381 if (operand_equal_p (arg0, arg1, 0)
7382 && SCALAR_FLOAT_TYPE_P (type))
7383 return fold (build2 (MULT_EXPR, type, arg0,
7384 build_real (type, dconst2)));
7385
7386 /* Convert x*c+x into x*(c+1). */
7387 if (flag_unsafe_math_optimizations
7388 && TREE_CODE (arg0) == MULT_EXPR
7389 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7390 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
7391 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7392 {
7393 REAL_VALUE_TYPE c;
7394
7395 c = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
7396 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7397 return fold (build2 (MULT_EXPR, type, arg1,
7398 build_real (type, c)));
7399 }
7400
7401 /* Convert x+x*c into x*(c+1). */
7402 if (flag_unsafe_math_optimizations
7403 && TREE_CODE (arg1) == MULT_EXPR
7404 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
7405 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
7406 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
7407 {
7408 REAL_VALUE_TYPE c;
7409
7410 c = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
7411 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7412 return fold (build2 (MULT_EXPR, type, arg0,
7413 build_real (type, c)));
7414 }
7415
7416 /* Convert x*c1+x*c2 into x*(c1+c2). */
7417 if (flag_unsafe_math_optimizations
7418 && TREE_CODE (arg0) == MULT_EXPR
7419 && TREE_CODE (arg1) == MULT_EXPR
7420 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7421 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
7422 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
7423 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
7424 && operand_equal_p (TREE_OPERAND (arg0, 0),
7425 TREE_OPERAND (arg1, 0), 0))
7426 {
7427 REAL_VALUE_TYPE c1, c2;
7428
7429 c1 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
7430 c2 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
7431 real_arithmetic (&c1, PLUS_EXPR, &c1, &c2);
7432 return fold (build2 (MULT_EXPR, type,
7433 TREE_OPERAND (arg0, 0),
7434 build_real (type, c1)));
7435 }
7436 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
7437 if (flag_unsafe_math_optimizations
7438 && TREE_CODE (arg1) == PLUS_EXPR
7439 && TREE_CODE (arg0) != MULT_EXPR)
7440 {
7441 tree tree10 = TREE_OPERAND (arg1, 0);
7442 tree tree11 = TREE_OPERAND (arg1, 1);
7443 if (TREE_CODE (tree11) == MULT_EXPR
7444 && TREE_CODE (tree10) == MULT_EXPR)
7445 {
7446 tree tree0;
7447 tree0 = fold (build2 (PLUS_EXPR, type, arg0, tree10));
7448 return fold (build2 (PLUS_EXPR, type, tree0, tree11));
7449 }
7450 }
7451 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
7452 if (flag_unsafe_math_optimizations
7453 && TREE_CODE (arg0) == PLUS_EXPR
7454 && TREE_CODE (arg1) != MULT_EXPR)
7455 {
7456 tree tree00 = TREE_OPERAND (arg0, 0);
7457 tree tree01 = TREE_OPERAND (arg0, 1);
7458 if (TREE_CODE (tree01) == MULT_EXPR
7459 && TREE_CODE (tree00) == MULT_EXPR)
7460 {
7461 tree tree0;
7462 tree0 = fold (build2 (PLUS_EXPR, type, tree01, arg1));
7463 return fold (build2 (PLUS_EXPR, type, tree00, tree0));
7464 }
7465 }
7466 }
7467
7468 bit_rotate:
7469 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
7470 is a rotate of A by C1 bits. */
7471 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
7472 is a rotate of A by B bits. */
7473 {
7474 enum tree_code code0, code1;
7475 code0 = TREE_CODE (arg0);
7476 code1 = TREE_CODE (arg1);
7477 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
7478 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
7479 && operand_equal_p (TREE_OPERAND (arg0, 0),
7480 TREE_OPERAND (arg1, 0), 0)
7481 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7482 {
7483 tree tree01, tree11;
7484 enum tree_code code01, code11;
7485
7486 tree01 = TREE_OPERAND (arg0, 1);
7487 tree11 = TREE_OPERAND (arg1, 1);
7488 STRIP_NOPS (tree01);
7489 STRIP_NOPS (tree11);
7490 code01 = TREE_CODE (tree01);
7491 code11 = TREE_CODE (tree11);
7492 if (code01 == INTEGER_CST
7493 && code11 == INTEGER_CST
7494 && TREE_INT_CST_HIGH (tree01) == 0
7495 && TREE_INT_CST_HIGH (tree11) == 0
7496 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
7497 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
7498 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
7499 code0 == LSHIFT_EXPR ? tree01 : tree11);
7500 else if (code11 == MINUS_EXPR)
7501 {
7502 tree tree110, tree111;
7503 tree110 = TREE_OPERAND (tree11, 0);
7504 tree111 = TREE_OPERAND (tree11, 1);
7505 STRIP_NOPS (tree110);
7506 STRIP_NOPS (tree111);
7507 if (TREE_CODE (tree110) == INTEGER_CST
7508 && 0 == compare_tree_int (tree110,
7509 TYPE_PRECISION
7510 (TREE_TYPE (TREE_OPERAND
7511 (arg0, 0))))
7512 && operand_equal_p (tree01, tree111, 0))
7513 return build2 ((code0 == LSHIFT_EXPR
7514 ? LROTATE_EXPR
7515 : RROTATE_EXPR),
7516 type, TREE_OPERAND (arg0, 0), tree01);
7517 }
7518 else if (code01 == MINUS_EXPR)
7519 {
7520 tree tree010, tree011;
7521 tree010 = TREE_OPERAND (tree01, 0);
7522 tree011 = TREE_OPERAND (tree01, 1);
7523 STRIP_NOPS (tree010);
7524 STRIP_NOPS (tree011);
7525 if (TREE_CODE (tree010) == INTEGER_CST
7526 && 0 == compare_tree_int (tree010,
7527 TYPE_PRECISION
7528 (TREE_TYPE (TREE_OPERAND
7529 (arg0, 0))))
7530 && operand_equal_p (tree11, tree011, 0))
7531 return build2 ((code0 != LSHIFT_EXPR
7532 ? LROTATE_EXPR
7533 : RROTATE_EXPR),
7534 type, TREE_OPERAND (arg0, 0), tree11);
7535 }
7536 }
7537 }
7538
7539 associate:
7540 /* In most languages, can't associate operations on floats through
7541 parentheses. Rather than remember where the parentheses were, we
7542 don't associate floats at all, unless the user has specified
7543 -funsafe-math-optimizations. */
7544
7545 if (! wins
7546 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
7547 {
7548 tree var0, con0, lit0, minus_lit0;
7549 tree var1, con1, lit1, minus_lit1;
7550
7551 /* Split both trees into variables, constants, and literals. Then
7552 associate each group together, the constants with literals,
7553 then the result with variables. This increases the chances of
7554 literals being recombined later and of generating relocatable
7555 expressions for the sum of a constant and literal. */
7556 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
7557 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
7558 code == MINUS_EXPR);
7559
7560 /* Only do something if we found more than two objects. Otherwise,
7561 nothing has changed and we risk infinite recursion. */
7562 if (2 < ((var0 != 0) + (var1 != 0)
7563 + (con0 != 0) + (con1 != 0)
7564 + (lit0 != 0) + (lit1 != 0)
7565 + (minus_lit0 != 0) + (minus_lit1 != 0)))
7566 {
7567 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
7568 if (code == MINUS_EXPR)
7569 code = PLUS_EXPR;
7570
7571 var0 = associate_trees (var0, var1, code, type);
7572 con0 = associate_trees (con0, con1, code, type);
7573 lit0 = associate_trees (lit0, lit1, code, type);
7574 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
7575
7576 /* Preserve the MINUS_EXPR if the negative part of the literal is
7577 greater than the positive part. Otherwise, the multiplicative
7578 folding code (i.e extract_muldiv) may be fooled in case
7579 unsigned constants are subtracted, like in the following
7580 example: ((X*2 + 4) - 8U)/2. */
7581 if (minus_lit0 && lit0)
7582 {
7583 if (TREE_CODE (lit0) == INTEGER_CST
7584 && TREE_CODE (minus_lit0) == INTEGER_CST
7585 && tree_int_cst_lt (lit0, minus_lit0))
7586 {
7587 minus_lit0 = associate_trees (minus_lit0, lit0,
7588 MINUS_EXPR, type);
7589 lit0 = 0;
7590 }
7591 else
7592 {
7593 lit0 = associate_trees (lit0, minus_lit0,
7594 MINUS_EXPR, type);
7595 minus_lit0 = 0;
7596 }
7597 }
7598 if (minus_lit0)
7599 {
7600 if (con0 == 0)
7601 return fold_convert (type,
7602 associate_trees (var0, minus_lit0,
7603 MINUS_EXPR, type));
7604 else
7605 {
7606 con0 = associate_trees (con0, minus_lit0,
7607 MINUS_EXPR, type);
7608 return fold_convert (type,
7609 associate_trees (var0, con0,
7610 PLUS_EXPR, type));
7611 }
7612 }
7613
7614 con0 = associate_trees (con0, lit0, code, type);
7615 return fold_convert (type, associate_trees (var0, con0,
7616 code, type));
7617 }
7618 }
7619
7620 binary:
7621 if (wins)
7622 t1 = const_binop (code, arg0, arg1, 0);
7623 if (t1 != NULL_TREE)
7624 {
7625 /* The return value should always have
7626 the same type as the original expression. */
7627 if (TREE_TYPE (t1) != type)
7628 t1 = fold_convert (type, t1);
7629
7630 return t1;
7631 }
7632 return NULL_TREE;
7633
7634 case MINUS_EXPR:
7635 /* A - (-B) -> A + B */
7636 if (TREE_CODE (arg1) == NEGATE_EXPR)
7637 return fold (build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
7638 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
7639 if (TREE_CODE (arg0) == NEGATE_EXPR
7640 && (FLOAT_TYPE_P (type)
7641 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
7642 && negate_expr_p (arg1)
7643 && reorder_operands_p (arg0, arg1))
7644 return fold (build2 (MINUS_EXPR, type, negate_expr (arg1),
7645 TREE_OPERAND (arg0, 0)));
7646 /* Convert -A - 1 to ~A. */
7647 if (INTEGRAL_TYPE_P (type)
7648 && TREE_CODE (arg0) == NEGATE_EXPR
7649 && integer_onep (arg1))
7650 return fold (build1 (BIT_NOT_EXPR, type, TREE_OPERAND (arg0, 0)));
7651
7652 /* Convert -1 - A to ~A. */
7653 if (INTEGRAL_TYPE_P (type)
7654 && integer_all_onesp (arg0))
7655 return fold (build1 (BIT_NOT_EXPR, type, arg1));
7656
7657 if (TREE_CODE (type) == COMPLEX_TYPE)
7658 {
7659 tem = fold_complex_add (type, arg0, arg1, MINUS_EXPR);
7660 if (tem)
7661 return tem;
7662 }
7663
7664 if (! FLOAT_TYPE_P (type))
7665 {
7666 if (! wins && integer_zerop (arg0))
7667 return negate_expr (fold_convert (type, arg1));
7668 if (integer_zerop (arg1))
7669 return non_lvalue (fold_convert (type, arg0));
7670
7671 /* Fold A - (A & B) into ~B & A. */
7672 if (!TREE_SIDE_EFFECTS (arg0)
7673 && TREE_CODE (arg1) == BIT_AND_EXPR)
7674 {
7675 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
7676 return fold (build2 (BIT_AND_EXPR, type,
7677 fold (build1 (BIT_NOT_EXPR, type,
7678 TREE_OPERAND (arg1, 0))),
7679 arg0));
7680 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7681 return fold (build2 (BIT_AND_EXPR, type,
7682 fold (build1 (BIT_NOT_EXPR, type,
7683 TREE_OPERAND (arg1, 1))),
7684 arg0));
7685 }
7686
7687 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
7688 any power of 2 minus 1. */
7689 if (TREE_CODE (arg0) == BIT_AND_EXPR
7690 && TREE_CODE (arg1) == BIT_AND_EXPR
7691 && operand_equal_p (TREE_OPERAND (arg0, 0),
7692 TREE_OPERAND (arg1, 0), 0))
7693 {
7694 tree mask0 = TREE_OPERAND (arg0, 1);
7695 tree mask1 = TREE_OPERAND (arg1, 1);
7696 tree tem = fold (build1 (BIT_NOT_EXPR, type, mask0));
7697
7698 if (operand_equal_p (tem, mask1, 0))
7699 {
7700 tem = fold (build2 (BIT_XOR_EXPR, type,
7701 TREE_OPERAND (arg0, 0), mask1));
7702 return fold (build2 (MINUS_EXPR, type, tem, mask1));
7703 }
7704 }
7705 }
7706
7707 /* See if ARG1 is zero and X - ARG1 reduces to X. */
7708 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
7709 return non_lvalue (fold_convert (type, arg0));
7710
7711 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
7712 ARG0 is zero and X + ARG0 reduces to X, since that would mean
7713 (-ARG1 + ARG0) reduces to -ARG1. */
7714 else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
7715 return negate_expr (fold_convert (type, arg1));
7716
7717 /* Fold &x - &x. This can happen from &x.foo - &x.
7718 This is unsafe for certain floats even in non-IEEE formats.
7719 In IEEE, it is unsafe because it does wrong for NaNs.
7720 Also note that operand_equal_p is always false if an operand
7721 is volatile. */
7722
7723 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
7724 && operand_equal_p (arg0, arg1, 0))
7725 return fold_convert (type, integer_zero_node);
7726
7727 /* A - B -> A + (-B) if B is easily negatable. */
7728 if (!wins && negate_expr_p (arg1)
7729 && ((FLOAT_TYPE_P (type)
7730 /* Avoid this transformation if B is a positive REAL_CST. */
7731 && (TREE_CODE (arg1) != REAL_CST
7732 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
7733 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
7734 return fold (build2 (PLUS_EXPR, type, arg0, negate_expr (arg1)));
7735
7736 /* Try folding difference of addresses. */
7737 {
7738 HOST_WIDE_INT diff;
7739
7740 if ((TREE_CODE (arg0) == ADDR_EXPR
7741 || TREE_CODE (arg1) == ADDR_EXPR)
7742 && ptr_difference_const (arg0, arg1, &diff))
7743 return build_int_cst_type (type, diff);
7744 }
7745
7746 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
7747 of the array. Loop optimizer sometimes produce this type of
7748 expressions. */
7749 if (TREE_CODE (arg0) == ADDR_EXPR
7750 && TREE_CODE (arg1) == MULT_EXPR)
7751 {
7752 tem = try_move_mult_to_index (MINUS_EXPR, arg0, arg1);
7753 if (tem)
7754 return fold_convert (type, fold (tem));
7755 }
7756
7757 if (TREE_CODE (arg0) == MULT_EXPR
7758 && TREE_CODE (arg1) == MULT_EXPR
7759 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
7760 {
7761 /* (A * C) - (B * C) -> (A-B) * C. */
7762 if (operand_equal_p (TREE_OPERAND (arg0, 1),
7763 TREE_OPERAND (arg1, 1), 0))
7764 return fold (build2 (MULT_EXPR, type,
7765 fold (build2 (MINUS_EXPR, type,
7766 TREE_OPERAND (arg0, 0),
7767 TREE_OPERAND (arg1, 0))),
7768 TREE_OPERAND (arg0, 1)));
7769 /* (A * C1) - (A * C2) -> A * (C1-C2). */
7770 if (operand_equal_p (TREE_OPERAND (arg0, 0),
7771 TREE_OPERAND (arg1, 0), 0))
7772 return fold (build2 (MULT_EXPR, type,
7773 TREE_OPERAND (arg0, 0),
7774 fold (build2 (MINUS_EXPR, type,
7775 TREE_OPERAND (arg0, 1),
7776 TREE_OPERAND (arg1, 1)))));
7777 }
7778
7779 goto associate;
7780
7781 case MULT_EXPR:
7782 /* (-A) * (-B) -> A * B */
7783 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
7784 return fold (build2 (MULT_EXPR, type,
7785 TREE_OPERAND (arg0, 0),
7786 negate_expr (arg1)));
7787 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
7788 return fold (build2 (MULT_EXPR, type,
7789 negate_expr (arg0),
7790 TREE_OPERAND (arg1, 0)));
7791
7792 if (TREE_CODE (type) == COMPLEX_TYPE)
7793 {
7794 tem = fold_complex_mult (type, arg0, arg1);
7795 if (tem)
7796 return tem;
7797 }
7798
7799 if (! FLOAT_TYPE_P (type))
7800 {
7801 if (integer_zerop (arg1))
7802 return omit_one_operand (type, arg1, arg0);
7803 if (integer_onep (arg1))
7804 return non_lvalue (fold_convert (type, arg0));
7805 /* Transform x * -1 into -x. */
7806 if (integer_all_onesp (arg1))
7807 return fold_convert (type, negate_expr (arg0));
7808
7809 /* (a * (1 << b)) is (a << b) */
7810 if (TREE_CODE (arg1) == LSHIFT_EXPR
7811 && integer_onep (TREE_OPERAND (arg1, 0)))
7812 return fold (build2 (LSHIFT_EXPR, type, arg0,
7813 TREE_OPERAND (arg1, 1)));
7814 if (TREE_CODE (arg0) == LSHIFT_EXPR
7815 && integer_onep (TREE_OPERAND (arg0, 0)))
7816 return fold (build2 (LSHIFT_EXPR, type, arg1,
7817 TREE_OPERAND (arg0, 1)));
7818
7819 if (TREE_CODE (arg1) == INTEGER_CST
7820 && 0 != (tem = extract_muldiv (op0,
7821 fold_convert (type, arg1),
7822 code, NULL_TREE)))
7823 return fold_convert (type, tem);
7824
7825 }
7826 else
7827 {
7828 /* Maybe fold x * 0 to 0. The expressions aren't the same
7829 when x is NaN, since x * 0 is also NaN. Nor are they the
7830 same in modes with signed zeros, since multiplying a
7831 negative value by 0 gives -0, not +0. */
7832 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
7833 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
7834 && real_zerop (arg1))
7835 return omit_one_operand (type, arg1, arg0);
7836 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
7837 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7838 && real_onep (arg1))
7839 return non_lvalue (fold_convert (type, arg0));
7840
7841 /* Transform x * -1.0 into -x. */
7842 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7843 && real_minus_onep (arg1))
7844 return fold_convert (type, negate_expr (arg0));
7845
7846 /* Convert (C1/X)*C2 into (C1*C2)/X. */
7847 if (flag_unsafe_math_optimizations
7848 && TREE_CODE (arg0) == RDIV_EXPR
7849 && TREE_CODE (arg1) == REAL_CST
7850 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
7851 {
7852 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
7853 arg1, 0);
7854 if (tem)
7855 return fold (build2 (RDIV_EXPR, type, tem,
7856 TREE_OPERAND (arg0, 1)));
7857 }
7858
7859 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
7860 if (operand_equal_p (arg0, arg1, 0))
7861 {
7862 tree tem = fold_strip_sign_ops (arg0);
7863 if (tem != NULL_TREE)
7864 {
7865 tem = fold_convert (type, tem);
7866 return fold (build2 (MULT_EXPR, type, tem, tem));
7867 }
7868 }
7869
7870 if (flag_unsafe_math_optimizations)
7871 {
7872 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
7873 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
7874
7875 /* Optimizations of root(...)*root(...). */
7876 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
7877 {
7878 tree rootfn, arg, arglist;
7879 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7880 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7881
7882 /* Optimize sqrt(x)*sqrt(x) as x. */
7883 if (BUILTIN_SQRT_P (fcode0)
7884 && operand_equal_p (arg00, arg10, 0)
7885 && ! HONOR_SNANS (TYPE_MODE (type)))
7886 return arg00;
7887
7888 /* Optimize root(x)*root(y) as root(x*y). */
7889 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7890 arg = fold (build2 (MULT_EXPR, type, arg00, arg10));
7891 arglist = build_tree_list (NULL_TREE, arg);
7892 return build_function_call_expr (rootfn, arglist);
7893 }
7894
7895 /* Optimize expN(x)*expN(y) as expN(x+y). */
7896 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
7897 {
7898 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7899 tree arg = build2 (PLUS_EXPR, type,
7900 TREE_VALUE (TREE_OPERAND (arg0, 1)),
7901 TREE_VALUE (TREE_OPERAND (arg1, 1)));
7902 tree arglist = build_tree_list (NULL_TREE, fold (arg));
7903 return build_function_call_expr (expfn, arglist);
7904 }
7905
7906 /* Optimizations of pow(...)*pow(...). */
7907 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
7908 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
7909 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
7910 {
7911 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7912 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
7913 1)));
7914 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7915 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7916 1)));
7917
7918 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
7919 if (operand_equal_p (arg01, arg11, 0))
7920 {
7921 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7922 tree arg = build2 (MULT_EXPR, type, arg00, arg10);
7923 tree arglist = tree_cons (NULL_TREE, fold (arg),
7924 build_tree_list (NULL_TREE,
7925 arg01));
7926 return build_function_call_expr (powfn, arglist);
7927 }
7928
7929 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
7930 if (operand_equal_p (arg00, arg10, 0))
7931 {
7932 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7933 tree arg = fold (build2 (PLUS_EXPR, type, arg01, arg11));
7934 tree arglist = tree_cons (NULL_TREE, arg00,
7935 build_tree_list (NULL_TREE,
7936 arg));
7937 return build_function_call_expr (powfn, arglist);
7938 }
7939 }
7940
7941 /* Optimize tan(x)*cos(x) as sin(x). */
7942 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
7943 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
7944 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
7945 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
7946 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
7947 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
7948 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7949 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7950 {
7951 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
7952
7953 if (sinfn != NULL_TREE)
7954 return build_function_call_expr (sinfn,
7955 TREE_OPERAND (arg0, 1));
7956 }
7957
7958 /* Optimize x*pow(x,c) as pow(x,c+1). */
7959 if (fcode1 == BUILT_IN_POW
7960 || fcode1 == BUILT_IN_POWF
7961 || fcode1 == BUILT_IN_POWL)
7962 {
7963 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7964 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7965 1)));
7966 if (TREE_CODE (arg11) == REAL_CST
7967 && ! TREE_CONSTANT_OVERFLOW (arg11)
7968 && operand_equal_p (arg0, arg10, 0))
7969 {
7970 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
7971 REAL_VALUE_TYPE c;
7972 tree arg, arglist;
7973
7974 c = TREE_REAL_CST (arg11);
7975 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7976 arg = build_real (type, c);
7977 arglist = build_tree_list (NULL_TREE, arg);
7978 arglist = tree_cons (NULL_TREE, arg0, arglist);
7979 return build_function_call_expr (powfn, arglist);
7980 }
7981 }
7982
7983 /* Optimize pow(x,c)*x as pow(x,c+1). */
7984 if (fcode0 == BUILT_IN_POW
7985 || fcode0 == BUILT_IN_POWF
7986 || fcode0 == BUILT_IN_POWL)
7987 {
7988 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7989 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
7990 1)));
7991 if (TREE_CODE (arg01) == REAL_CST
7992 && ! TREE_CONSTANT_OVERFLOW (arg01)
7993 && operand_equal_p (arg1, arg00, 0))
7994 {
7995 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7996 REAL_VALUE_TYPE c;
7997 tree arg, arglist;
7998
7999 c = TREE_REAL_CST (arg01);
8000 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
8001 arg = build_real (type, c);
8002 arglist = build_tree_list (NULL_TREE, arg);
8003 arglist = tree_cons (NULL_TREE, arg1, arglist);
8004 return build_function_call_expr (powfn, arglist);
8005 }
8006 }
8007
8008 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
8009 if (! optimize_size
8010 && operand_equal_p (arg0, arg1, 0))
8011 {
8012 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
8013
8014 if (powfn)
8015 {
8016 tree arg = build_real (type, dconst2);
8017 tree arglist = build_tree_list (NULL_TREE, arg);
8018 arglist = tree_cons (NULL_TREE, arg0, arglist);
8019 return build_function_call_expr (powfn, arglist);
8020 }
8021 }
8022 }
8023 }
8024 goto associate;
8025
8026 case BIT_IOR_EXPR:
8027 bit_ior:
8028 if (integer_all_onesp (arg1))
8029 return omit_one_operand (type, arg1, arg0);
8030 if (integer_zerop (arg1))
8031 return non_lvalue (fold_convert (type, arg0));
8032 if (operand_equal_p (arg0, arg1, 0))
8033 return non_lvalue (fold_convert (type, arg0));
8034
8035 /* ~X | X is -1. */
8036 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8037 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8038 {
8039 t1 = build_int_cst (type, -1);
8040 t1 = force_fit_type (t1, 0, false, false);
8041 return omit_one_operand (type, t1, arg1);
8042 }
8043
8044 /* X | ~X is -1. */
8045 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8046 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8047 {
8048 t1 = build_int_cst (type, -1);
8049 t1 = force_fit_type (t1, 0, false, false);
8050 return omit_one_operand (type, t1, arg0);
8051 }
8052
8053 t1 = distribute_bit_expr (code, type, arg0, arg1);
8054 if (t1 != NULL_TREE)
8055 return t1;
8056
8057 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
8058
8059 This results in more efficient code for machines without a NAND
8060 instruction. Combine will canonicalize to the first form
8061 which will allow use of NAND instructions provided by the
8062 backend if they exist. */
8063 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8064 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8065 {
8066 return fold (build1 (BIT_NOT_EXPR, type,
8067 build2 (BIT_AND_EXPR, type,
8068 TREE_OPERAND (arg0, 0),
8069 TREE_OPERAND (arg1, 0))));
8070 }
8071
8072 /* See if this can be simplified into a rotate first. If that
8073 is unsuccessful continue in the association code. */
8074 goto bit_rotate;
8075
8076 case BIT_XOR_EXPR:
8077 if (integer_zerop (arg1))
8078 return non_lvalue (fold_convert (type, arg0));
8079 if (integer_all_onesp (arg1))
8080 return fold (build1 (BIT_NOT_EXPR, type, arg0));
8081 if (operand_equal_p (arg0, arg1, 0))
8082 return omit_one_operand (type, integer_zero_node, arg0);
8083
8084 /* ~X ^ X is -1. */
8085 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8086 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8087 {
8088 t1 = build_int_cst (type, -1);
8089 t1 = force_fit_type (t1, 0, false, false);
8090 return omit_one_operand (type, t1, arg1);
8091 }
8092
8093 /* X ^ ~X is -1. */
8094 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8095 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8096 {
8097 t1 = build_int_cst (type, -1);
8098 t1 = force_fit_type (t1, 0, false, false);
8099 return omit_one_operand (type, t1, arg0);
8100 }
8101
8102 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
8103 with a constant, and the two constants have no bits in common,
8104 we should treat this as a BIT_IOR_EXPR since this may produce more
8105 simplifications. */
8106 if (TREE_CODE (arg0) == BIT_AND_EXPR
8107 && TREE_CODE (arg1) == BIT_AND_EXPR
8108 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8109 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8110 && integer_zerop (const_binop (BIT_AND_EXPR,
8111 TREE_OPERAND (arg0, 1),
8112 TREE_OPERAND (arg1, 1), 0)))
8113 {
8114 code = BIT_IOR_EXPR;
8115 goto bit_ior;
8116 }
8117
8118 /* See if this can be simplified into a rotate first. If that
8119 is unsuccessful continue in the association code. */
8120 goto bit_rotate;
8121
8122 case BIT_AND_EXPR:
8123 if (integer_all_onesp (arg1))
8124 return non_lvalue (fold_convert (type, arg0));
8125 if (integer_zerop (arg1))
8126 return omit_one_operand (type, arg1, arg0);
8127 if (operand_equal_p (arg0, arg1, 0))
8128 return non_lvalue (fold_convert (type, arg0));
8129
8130 /* ~X & X is always zero. */
8131 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8132 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8133 return omit_one_operand (type, integer_zero_node, arg1);
8134
8135 /* X & ~X is always zero. */
8136 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8137 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8138 return omit_one_operand (type, integer_zero_node, arg0);
8139
8140 t1 = distribute_bit_expr (code, type, arg0, arg1);
8141 if (t1 != NULL_TREE)
8142 return t1;
8143 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
8144 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
8145 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
8146 {
8147 unsigned int prec
8148 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
8149
8150 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
8151 && (~TREE_INT_CST_LOW (arg1)
8152 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
8153 return fold_convert (type, TREE_OPERAND (arg0, 0));
8154 }
8155
8156 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
8157
8158 This results in more efficient code for machines without a NOR
8159 instruction. Combine will canonicalize to the first form
8160 which will allow use of NOR instructions provided by the
8161 backend if they exist. */
8162 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8163 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8164 {
8165 return fold (build1 (BIT_NOT_EXPR, type,
8166 build2 (BIT_IOR_EXPR, type,
8167 TREE_OPERAND (arg0, 0),
8168 TREE_OPERAND (arg1, 0))));
8169 }
8170
8171 goto associate;
8172
8173 case RDIV_EXPR:
8174 /* Don't touch a floating-point divide by zero unless the mode
8175 of the constant can represent infinity. */
8176 if (TREE_CODE (arg1) == REAL_CST
8177 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
8178 && real_zerop (arg1))
8179 return NULL_TREE;
8180
8181 /* (-A) / (-B) -> A / B */
8182 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
8183 return fold (build2 (RDIV_EXPR, type,
8184 TREE_OPERAND (arg0, 0),
8185 negate_expr (arg1)));
8186 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
8187 return fold (build2 (RDIV_EXPR, type,
8188 negate_expr (arg0),
8189 TREE_OPERAND (arg1, 0)));
8190
8191 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
8192 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8193 && real_onep (arg1))
8194 return non_lvalue (fold_convert (type, arg0));
8195
8196 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
8197 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8198 && real_minus_onep (arg1))
8199 return non_lvalue (fold_convert (type, negate_expr (arg0)));
8200
8201 /* If ARG1 is a constant, we can convert this to a multiply by the
8202 reciprocal. This does not have the same rounding properties,
8203 so only do this if -funsafe-math-optimizations. We can actually
8204 always safely do it if ARG1 is a power of two, but it's hard to
8205 tell if it is or not in a portable manner. */
8206 if (TREE_CODE (arg1) == REAL_CST)
8207 {
8208 if (flag_unsafe_math_optimizations
8209 && 0 != (tem = const_binop (code, build_real (type, dconst1),
8210 arg1, 0)))
8211 return fold (build2 (MULT_EXPR, type, arg0, tem));
8212 /* Find the reciprocal if optimizing and the result is exact. */
8213 if (optimize)
8214 {
8215 REAL_VALUE_TYPE r;
8216 r = TREE_REAL_CST (arg1);
8217 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
8218 {
8219 tem = build_real (type, r);
8220 return fold (build2 (MULT_EXPR, type, arg0, tem));
8221 }
8222 }
8223 }
8224 /* Convert A/B/C to A/(B*C). */
8225 if (flag_unsafe_math_optimizations
8226 && TREE_CODE (arg0) == RDIV_EXPR)
8227 return fold (build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
8228 fold (build2 (MULT_EXPR, type,
8229 TREE_OPERAND (arg0, 1), arg1))));
8230
8231 /* Convert A/(B/C) to (A/B)*C. */
8232 if (flag_unsafe_math_optimizations
8233 && TREE_CODE (arg1) == RDIV_EXPR)
8234 return fold (build2 (MULT_EXPR, type,
8235 fold (build2 (RDIV_EXPR, type, arg0,
8236 TREE_OPERAND (arg1, 0))),
8237 TREE_OPERAND (arg1, 1)));
8238
8239 /* Convert C1/(X*C2) into (C1/C2)/X. */
8240 if (flag_unsafe_math_optimizations
8241 && TREE_CODE (arg1) == MULT_EXPR
8242 && TREE_CODE (arg0) == REAL_CST
8243 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
8244 {
8245 tree tem = const_binop (RDIV_EXPR, arg0,
8246 TREE_OPERAND (arg1, 1), 0);
8247 if (tem)
8248 return fold (build2 (RDIV_EXPR, type, tem,
8249 TREE_OPERAND (arg1, 0)));
8250 }
8251
8252 if (TREE_CODE (type) == COMPLEX_TYPE)
8253 {
8254 tem = fold_complex_div (type, arg0, arg1, code);
8255 if (tem)
8256 return tem;
8257 }
8258
8259 if (flag_unsafe_math_optimizations)
8260 {
8261 enum built_in_function fcode = builtin_mathfn_code (arg1);
8262 /* Optimize x/expN(y) into x*expN(-y). */
8263 if (BUILTIN_EXPONENT_P (fcode))
8264 {
8265 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
8266 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
8267 tree arglist = build_tree_list (NULL_TREE,
8268 fold_convert (type, arg));
8269 arg1 = build_function_call_expr (expfn, arglist);
8270 return fold (build2 (MULT_EXPR, type, arg0, arg1));
8271 }
8272
8273 /* Optimize x/pow(y,z) into x*pow(y,-z). */
8274 if (fcode == BUILT_IN_POW
8275 || fcode == BUILT_IN_POWF
8276 || fcode == BUILT_IN_POWL)
8277 {
8278 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
8279 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
8280 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
8281 tree neg11 = fold_convert (type, negate_expr (arg11));
8282 tree arglist = tree_cons(NULL_TREE, arg10,
8283 build_tree_list (NULL_TREE, neg11));
8284 arg1 = build_function_call_expr (powfn, arglist);
8285 return fold (build2 (MULT_EXPR, type, arg0, arg1));
8286 }
8287 }
8288
8289 if (flag_unsafe_math_optimizations)
8290 {
8291 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
8292 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
8293
8294 /* Optimize sin(x)/cos(x) as tan(x). */
8295 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
8296 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
8297 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
8298 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
8299 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
8300 {
8301 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
8302
8303 if (tanfn != NULL_TREE)
8304 return build_function_call_expr (tanfn,
8305 TREE_OPERAND (arg0, 1));
8306 }
8307
8308 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
8309 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
8310 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
8311 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
8312 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
8313 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
8314 {
8315 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
8316
8317 if (tanfn != NULL_TREE)
8318 {
8319 tree tmp = TREE_OPERAND (arg0, 1);
8320 tmp = build_function_call_expr (tanfn, tmp);
8321 return fold (build2 (RDIV_EXPR, type,
8322 build_real (type, dconst1), tmp));
8323 }
8324 }
8325
8326 /* Optimize pow(x,c)/x as pow(x,c-1). */
8327 if (fcode0 == BUILT_IN_POW
8328 || fcode0 == BUILT_IN_POWF
8329 || fcode0 == BUILT_IN_POWL)
8330 {
8331 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8332 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
8333 if (TREE_CODE (arg01) == REAL_CST
8334 && ! TREE_CONSTANT_OVERFLOW (arg01)
8335 && operand_equal_p (arg1, arg00, 0))
8336 {
8337 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8338 REAL_VALUE_TYPE c;
8339 tree arg, arglist;
8340
8341 c = TREE_REAL_CST (arg01);
8342 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
8343 arg = build_real (type, c);
8344 arglist = build_tree_list (NULL_TREE, arg);
8345 arglist = tree_cons (NULL_TREE, arg1, arglist);
8346 return build_function_call_expr (powfn, arglist);
8347 }
8348 }
8349 }
8350 goto binary;
8351
8352 case TRUNC_DIV_EXPR:
8353 case ROUND_DIV_EXPR:
8354 case FLOOR_DIV_EXPR:
8355 case CEIL_DIV_EXPR:
8356 case EXACT_DIV_EXPR:
8357 if (integer_onep (arg1))
8358 return non_lvalue (fold_convert (type, arg0));
8359 if (integer_zerop (arg1))
8360 return NULL_TREE;
8361 /* X / -1 is -X. */
8362 if (!TYPE_UNSIGNED (type)
8363 && TREE_CODE (arg1) == INTEGER_CST
8364 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
8365 && TREE_INT_CST_HIGH (arg1) == -1)
8366 return fold_convert (type, negate_expr (arg0));
8367
8368 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
8369 operation, EXACT_DIV_EXPR.
8370
8371 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
8372 At one time others generated faster code, it's not clear if they do
8373 after the last round to changes to the DIV code in expmed.c. */
8374 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
8375 && multiple_of_p (type, arg0, arg1))
8376 return fold (build2 (EXACT_DIV_EXPR, type, arg0, arg1));
8377
8378 if (TREE_CODE (arg1) == INTEGER_CST
8379 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
8380 return fold_convert (type, tem);
8381
8382 if (TREE_CODE (type) == COMPLEX_TYPE)
8383 {
8384 tem = fold_complex_div (type, arg0, arg1, code);
8385 if (tem)
8386 return tem;
8387 }
8388 goto binary;
8389
8390 case CEIL_MOD_EXPR:
8391 case FLOOR_MOD_EXPR:
8392 case ROUND_MOD_EXPR:
8393 case TRUNC_MOD_EXPR:
8394 /* X % 1 is always zero, but be sure to preserve any side
8395 effects in X. */
8396 if (integer_onep (arg1))
8397 return omit_one_operand (type, integer_zero_node, arg0);
8398
8399 /* X % 0, return X % 0 unchanged so that we can get the
8400 proper warnings and errors. */
8401 if (integer_zerop (arg1))
8402 return NULL_TREE;
8403
8404 /* 0 % X is always zero, but be sure to preserve any side
8405 effects in X. Place this after checking for X == 0. */
8406 if (integer_zerop (arg0))
8407 return omit_one_operand (type, integer_zero_node, arg1);
8408
8409 /* X % -1 is zero. */
8410 if (!TYPE_UNSIGNED (type)
8411 && TREE_CODE (arg1) == INTEGER_CST
8412 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
8413 && TREE_INT_CST_HIGH (arg1) == -1)
8414 return omit_one_operand (type, integer_zero_node, arg0);
8415
8416 /* Optimize unsigned TRUNC_MOD_EXPR by a power of two into a
8417 BIT_AND_EXPR, i.e. "X % C" into "X & C2". */
8418 if (code == TRUNC_MOD_EXPR
8419 && TYPE_UNSIGNED (type)
8420 && integer_pow2p (arg1))
8421 {
8422 unsigned HOST_WIDE_INT high, low;
8423 tree mask;
8424 int l;
8425
8426 l = tree_log2 (arg1);
8427 if (l >= HOST_BITS_PER_WIDE_INT)
8428 {
8429 high = ((unsigned HOST_WIDE_INT) 1
8430 << (l - HOST_BITS_PER_WIDE_INT)) - 1;
8431 low = -1;
8432 }
8433 else
8434 {
8435 high = 0;
8436 low = ((unsigned HOST_WIDE_INT) 1 << l) - 1;
8437 }
8438
8439 mask = build_int_cst_wide (type, low, high);
8440 return fold (build2 (BIT_AND_EXPR, type,
8441 fold_convert (type, arg0), mask));
8442 }
8443
8444 /* X % -C is the same as X % C. */
8445 if (code == TRUNC_MOD_EXPR
8446 && !TYPE_UNSIGNED (type)
8447 && TREE_CODE (arg1) == INTEGER_CST
8448 && TREE_INT_CST_HIGH (arg1) < 0
8449 && !flag_trapv
8450 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
8451 && !sign_bit_p (arg1, arg1))
8452 return fold (build2 (code, type, fold_convert (type, arg0),
8453 fold_convert (type, negate_expr (arg1))));
8454
8455 /* X % -Y is the same as X % Y. */
8456 if (code == TRUNC_MOD_EXPR
8457 && !TYPE_UNSIGNED (type)
8458 && TREE_CODE (arg1) == NEGATE_EXPR
8459 && !flag_trapv)
8460 return fold (build2 (code, type, fold_convert (type, arg0),
8461 fold_convert (type, TREE_OPERAND (arg1, 0))));
8462
8463 if (TREE_CODE (arg1) == INTEGER_CST
8464 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
8465 return fold_convert (type, tem);
8466
8467 goto binary;
8468
8469 case LROTATE_EXPR:
8470 case RROTATE_EXPR:
8471 if (integer_all_onesp (arg0))
8472 return omit_one_operand (type, arg0, arg1);
8473 goto shift;
8474
8475 case RSHIFT_EXPR:
8476 /* Optimize -1 >> x for arithmetic right shifts. */
8477 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
8478 return omit_one_operand (type, arg0, arg1);
8479 /* ... fall through ... */
8480
8481 case LSHIFT_EXPR:
8482 shift:
8483 if (integer_zerop (arg1))
8484 return non_lvalue (fold_convert (type, arg0));
8485 if (integer_zerop (arg0))
8486 return omit_one_operand (type, arg0, arg1);
8487
8488 /* Since negative shift count is not well-defined,
8489 don't try to compute it in the compiler. */
8490 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
8491 return NULL_TREE;
8492 /* Rewrite an LROTATE_EXPR by a constant into an
8493 RROTATE_EXPR by a new constant. */
8494 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
8495 {
8496 tree tem = build_int_cst (NULL_TREE,
8497 GET_MODE_BITSIZE (TYPE_MODE (type)));
8498 tem = fold_convert (TREE_TYPE (arg1), tem);
8499 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
8500 return fold (build2 (RROTATE_EXPR, type, arg0, tem));
8501 }
8502
8503 /* If we have a rotate of a bit operation with the rotate count and
8504 the second operand of the bit operation both constant,
8505 permute the two operations. */
8506 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8507 && (TREE_CODE (arg0) == BIT_AND_EXPR
8508 || TREE_CODE (arg0) == BIT_IOR_EXPR
8509 || TREE_CODE (arg0) == BIT_XOR_EXPR)
8510 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8511 return fold (build2 (TREE_CODE (arg0), type,
8512 fold (build2 (code, type,
8513 TREE_OPERAND (arg0, 0), arg1)),
8514 fold (build2 (code, type,
8515 TREE_OPERAND (arg0, 1), arg1))));
8516
8517 /* Two consecutive rotates adding up to the width of the mode can
8518 be ignored. */
8519 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8520 && TREE_CODE (arg0) == RROTATE_EXPR
8521 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8522 && TREE_INT_CST_HIGH (arg1) == 0
8523 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
8524 && ((TREE_INT_CST_LOW (arg1)
8525 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
8526 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
8527 return TREE_OPERAND (arg0, 0);
8528
8529 goto binary;
8530
8531 case MIN_EXPR:
8532 if (operand_equal_p (arg0, arg1, 0))
8533 return omit_one_operand (type, arg0, arg1);
8534 if (INTEGRAL_TYPE_P (type)
8535 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
8536 return omit_one_operand (type, arg1, arg0);
8537 goto associate;
8538
8539 case MAX_EXPR:
8540 if (operand_equal_p (arg0, arg1, 0))
8541 return omit_one_operand (type, arg0, arg1);
8542 if (INTEGRAL_TYPE_P (type)
8543 && TYPE_MAX_VALUE (type)
8544 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
8545 return omit_one_operand (type, arg1, arg0);
8546 goto associate;
8547
8548 case TRUTH_ANDIF_EXPR:
8549 /* Note that the operands of this must be ints
8550 and their values must be 0 or 1.
8551 ("true" is a fixed value perhaps depending on the language.) */
8552 /* If first arg is constant zero, return it. */
8553 if (integer_zerop (arg0))
8554 return fold_convert (type, arg0);
8555 case TRUTH_AND_EXPR:
8556 /* If either arg is constant true, drop it. */
8557 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8558 return non_lvalue (fold_convert (type, arg1));
8559 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
8560 /* Preserve sequence points. */
8561 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
8562 return non_lvalue (fold_convert (type, arg0));
8563 /* If second arg is constant zero, result is zero, but first arg
8564 must be evaluated. */
8565 if (integer_zerop (arg1))
8566 return omit_one_operand (type, arg1, arg0);
8567 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
8568 case will be handled here. */
8569 if (integer_zerop (arg0))
8570 return omit_one_operand (type, arg0, arg1);
8571
8572 /* !X && X is always false. */
8573 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8574 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8575 return omit_one_operand (type, integer_zero_node, arg1);
8576 /* X && !X is always false. */
8577 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8578 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8579 return omit_one_operand (type, integer_zero_node, arg0);
8580
8581 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
8582 means A >= Y && A != MAX, but in this case we know that
8583 A < X <= MAX. */
8584
8585 if (!TREE_SIDE_EFFECTS (arg0)
8586 && !TREE_SIDE_EFFECTS (arg1))
8587 {
8588 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
8589 if (tem)
8590 return fold (build2 (code, type, tem, arg1));
8591
8592 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
8593 if (tem)
8594 return fold (build2 (code, type, arg0, tem));
8595 }
8596
8597 truth_andor:
8598 /* We only do these simplifications if we are optimizing. */
8599 if (!optimize)
8600 return NULL_TREE;
8601
8602 /* Check for things like (A || B) && (A || C). We can convert this
8603 to A || (B && C). Note that either operator can be any of the four
8604 truth and/or operations and the transformation will still be
8605 valid. Also note that we only care about order for the
8606 ANDIF and ORIF operators. If B contains side effects, this
8607 might change the truth-value of A. */
8608 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8609 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8610 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8611 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8612 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8613 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8614 {
8615 tree a00 = TREE_OPERAND (arg0, 0);
8616 tree a01 = TREE_OPERAND (arg0, 1);
8617 tree a10 = TREE_OPERAND (arg1, 0);
8618 tree a11 = TREE_OPERAND (arg1, 1);
8619 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8620 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8621 && (code == TRUTH_AND_EXPR
8622 || code == TRUTH_OR_EXPR));
8623
8624 if (operand_equal_p (a00, a10, 0))
8625 return fold (build2 (TREE_CODE (arg0), type, a00,
8626 fold (build2 (code, type, a01, a11))));
8627 else if (commutative && operand_equal_p (a00, a11, 0))
8628 return fold (build2 (TREE_CODE (arg0), type, a00,
8629 fold (build2 (code, type, a01, a10))));
8630 else if (commutative && operand_equal_p (a01, a10, 0))
8631 return fold (build2 (TREE_CODE (arg0), type, a01,
8632 fold (build2 (code, type, a00, a11))));
8633
8634 /* This case if tricky because we must either have commutative
8635 operators or else A10 must not have side-effects. */
8636
8637 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8638 && operand_equal_p (a01, a11, 0))
8639 return fold (build2 (TREE_CODE (arg0), type,
8640 fold (build2 (code, type, a00, a10)),
8641 a01));
8642 }
8643
8644 /* See if we can build a range comparison. */
8645 if (0 != (tem = fold_range_test (code, type, op0, op1)))
8646 return tem;
8647
8648 /* Check for the possibility of merging component references. If our
8649 lhs is another similar operation, try to merge its rhs with our
8650 rhs. Then try to merge our lhs and rhs. */
8651 if (TREE_CODE (arg0) == code
8652 && 0 != (tem = fold_truthop (code, type,
8653 TREE_OPERAND (arg0, 1), arg1)))
8654 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
8655
8656 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
8657 return tem;
8658
8659 return NULL_TREE;
8660
8661 case TRUTH_ORIF_EXPR:
8662 /* Note that the operands of this must be ints
8663 and their values must be 0 or true.
8664 ("true" is a fixed value perhaps depending on the language.) */
8665 /* If first arg is constant true, return it. */
8666 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8667 return fold_convert (type, arg0);
8668 case TRUTH_OR_EXPR:
8669 /* If either arg is constant zero, drop it. */
8670 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
8671 return non_lvalue (fold_convert (type, arg1));
8672 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
8673 /* Preserve sequence points. */
8674 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
8675 return non_lvalue (fold_convert (type, arg0));
8676 /* If second arg is constant true, result is true, but we must
8677 evaluate first arg. */
8678 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
8679 return omit_one_operand (type, arg1, arg0);
8680 /* Likewise for first arg, but note this only occurs here for
8681 TRUTH_OR_EXPR. */
8682 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8683 return omit_one_operand (type, arg0, arg1);
8684
8685 /* !X || X is always true. */
8686 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8687 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8688 return omit_one_operand (type, integer_one_node, arg1);
8689 /* X || !X is always true. */
8690 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8691 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8692 return omit_one_operand (type, integer_one_node, arg0);
8693
8694 goto truth_andor;
8695
8696 case TRUTH_XOR_EXPR:
8697 /* If the second arg is constant zero, drop it. */
8698 if (integer_zerop (arg1))
8699 return non_lvalue (fold_convert (type, arg0));
8700 /* If the second arg is constant true, this is a logical inversion. */
8701 if (integer_onep (arg1))
8702 return non_lvalue (fold_convert (type, invert_truthvalue (arg0)));
8703 /* Identical arguments cancel to zero. */
8704 if (operand_equal_p (arg0, arg1, 0))
8705 return omit_one_operand (type, integer_zero_node, arg0);
8706
8707 /* !X ^ X is always true. */
8708 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8709 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8710 return omit_one_operand (type, integer_one_node, arg1);
8711
8712 /* X ^ !X is always true. */
8713 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8714 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8715 return omit_one_operand (type, integer_one_node, arg0);
8716
8717 return NULL_TREE;
8718
8719 case EQ_EXPR:
8720 case NE_EXPR:
8721 case LT_EXPR:
8722 case GT_EXPR:
8723 case LE_EXPR:
8724 case GE_EXPR:
8725 /* If one arg is a real or integer constant, put it last. */
8726 if (tree_swap_operands_p (arg0, arg1, true))
8727 return fold (build2 (swap_tree_comparison (code), type, arg1, arg0));
8728
8729 /* If this is an equality comparison of the address of a non-weak
8730 object against zero, then we know the result. */
8731 if ((code == EQ_EXPR || code == NE_EXPR)
8732 && TREE_CODE (arg0) == ADDR_EXPR
8733 && DECL_P (TREE_OPERAND (arg0, 0))
8734 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
8735 && integer_zerop (arg1))
8736 return constant_boolean_node (code != EQ_EXPR, type);
8737
8738 /* If this is an equality comparison of the address of two non-weak,
8739 unaliased symbols neither of which are extern (since we do not
8740 have access to attributes for externs), then we know the result. */
8741 if ((code == EQ_EXPR || code == NE_EXPR)
8742 && TREE_CODE (arg0) == ADDR_EXPR
8743 && DECL_P (TREE_OPERAND (arg0, 0))
8744 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
8745 && ! lookup_attribute ("alias",
8746 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
8747 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
8748 && TREE_CODE (arg1) == ADDR_EXPR
8749 && DECL_P (TREE_OPERAND (arg1, 0))
8750 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
8751 && ! lookup_attribute ("alias",
8752 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
8753 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
8754 return constant_boolean_node (operand_equal_p (arg0, arg1, 0)
8755 ? code == EQ_EXPR : code != EQ_EXPR,
8756 type);
8757
8758 /* If this is a comparison of two exprs that look like an
8759 ARRAY_REF of the same object, then we can fold this to a
8760 comparison of the two offsets. */
8761 if (TREE_CODE_CLASS (code) == tcc_comparison)
8762 {
8763 tree base0, offset0, base1, offset1;
8764
8765 if (extract_array_ref (arg0, &base0, &offset0)
8766 && extract_array_ref (arg1, &base1, &offset1)
8767 && operand_equal_p (base0, base1, 0))
8768 {
8769 if (offset0 == NULL_TREE
8770 && offset1 == NULL_TREE)
8771 {
8772 offset0 = integer_zero_node;
8773 offset1 = integer_zero_node;
8774 }
8775 else if (offset0 == NULL_TREE)
8776 offset0 = build_int_cst (TREE_TYPE (offset1), 0);
8777 else if (offset1 == NULL_TREE)
8778 offset1 = build_int_cst (TREE_TYPE (offset0), 0);
8779
8780 if (TREE_TYPE (offset0) == TREE_TYPE (offset1))
8781 return fold (build2 (code, type, offset0, offset1));
8782 }
8783 }
8784
8785 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8786 {
8787 tree targ0 = strip_float_extensions (arg0);
8788 tree targ1 = strip_float_extensions (arg1);
8789 tree newtype = TREE_TYPE (targ0);
8790
8791 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8792 newtype = TREE_TYPE (targ1);
8793
8794 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8795 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8796 return fold (build2 (code, type, fold_convert (newtype, targ0),
8797 fold_convert (newtype, targ1)));
8798
8799 /* (-a) CMP (-b) -> b CMP a */
8800 if (TREE_CODE (arg0) == NEGATE_EXPR
8801 && TREE_CODE (arg1) == NEGATE_EXPR)
8802 return fold (build2 (code, type, TREE_OPERAND (arg1, 0),
8803 TREE_OPERAND (arg0, 0)));
8804
8805 if (TREE_CODE (arg1) == REAL_CST)
8806 {
8807 REAL_VALUE_TYPE cst;
8808 cst = TREE_REAL_CST (arg1);
8809
8810 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8811 if (TREE_CODE (arg0) == NEGATE_EXPR)
8812 return
8813 fold (build2 (swap_tree_comparison (code), type,
8814 TREE_OPERAND (arg0, 0),
8815 build_real (TREE_TYPE (arg1),
8816 REAL_VALUE_NEGATE (cst))));
8817
8818 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8819 /* a CMP (-0) -> a CMP 0 */
8820 if (REAL_VALUE_MINUS_ZERO (cst))
8821 return fold (build2 (code, type, arg0,
8822 build_real (TREE_TYPE (arg1), dconst0)));
8823
8824 /* x != NaN is always true, other ops are always false. */
8825 if (REAL_VALUE_ISNAN (cst)
8826 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8827 {
8828 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8829 return omit_one_operand (type, tem, arg0);
8830 }
8831
8832 /* Fold comparisons against infinity. */
8833 if (REAL_VALUE_ISINF (cst))
8834 {
8835 tem = fold_inf_compare (code, type, arg0, arg1);
8836 if (tem != NULL_TREE)
8837 return tem;
8838 }
8839 }
8840
8841 /* If this is a comparison of a real constant with a PLUS_EXPR
8842 or a MINUS_EXPR of a real constant, we can convert it into a
8843 comparison with a revised real constant as long as no overflow
8844 occurs when unsafe_math_optimizations are enabled. */
8845 if (flag_unsafe_math_optimizations
8846 && TREE_CODE (arg1) == REAL_CST
8847 && (TREE_CODE (arg0) == PLUS_EXPR
8848 || TREE_CODE (arg0) == MINUS_EXPR)
8849 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8850 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8851 ? MINUS_EXPR : PLUS_EXPR,
8852 arg1, TREE_OPERAND (arg0, 1), 0))
8853 && ! TREE_CONSTANT_OVERFLOW (tem))
8854 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
8855
8856 /* Likewise, we can simplify a comparison of a real constant with
8857 a MINUS_EXPR whose first operand is also a real constant, i.e.
8858 (c1 - x) < c2 becomes x > c1-c2. */
8859 if (flag_unsafe_math_optimizations
8860 && TREE_CODE (arg1) == REAL_CST
8861 && TREE_CODE (arg0) == MINUS_EXPR
8862 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
8863 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
8864 arg1, 0))
8865 && ! TREE_CONSTANT_OVERFLOW (tem))
8866 return fold (build2 (swap_tree_comparison (code), type,
8867 TREE_OPERAND (arg0, 1), tem));
8868
8869 /* Fold comparisons against built-in math functions. */
8870 if (TREE_CODE (arg1) == REAL_CST
8871 && flag_unsafe_math_optimizations
8872 && ! flag_errno_math)
8873 {
8874 enum built_in_function fcode = builtin_mathfn_code (arg0);
8875
8876 if (fcode != END_BUILTINS)
8877 {
8878 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
8879 if (tem != NULL_TREE)
8880 return tem;
8881 }
8882 }
8883 }
8884
8885 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
8886 if (TREE_CONSTANT (arg1)
8887 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
8888 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
8889 /* This optimization is invalid for ordered comparisons
8890 if CONST+INCR overflows or if foo+incr might overflow.
8891 This optimization is invalid for floating point due to rounding.
8892 For pointer types we assume overflow doesn't happen. */
8893 && (POINTER_TYPE_P (TREE_TYPE (arg0))
8894 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8895 && (code == EQ_EXPR || code == NE_EXPR))))
8896 {
8897 tree varop, newconst;
8898
8899 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
8900 {
8901 newconst = fold (build2 (PLUS_EXPR, TREE_TYPE (arg0),
8902 arg1, TREE_OPERAND (arg0, 1)));
8903 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
8904 TREE_OPERAND (arg0, 0),
8905 TREE_OPERAND (arg0, 1));
8906 }
8907 else
8908 {
8909 newconst = fold (build2 (MINUS_EXPR, TREE_TYPE (arg0),
8910 arg1, TREE_OPERAND (arg0, 1)));
8911 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
8912 TREE_OPERAND (arg0, 0),
8913 TREE_OPERAND (arg0, 1));
8914 }
8915
8916
8917 /* If VAROP is a reference to a bitfield, we must mask
8918 the constant by the width of the field. */
8919 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
8920 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
8921 && host_integerp (DECL_SIZE (TREE_OPERAND
8922 (TREE_OPERAND (varop, 0), 1)), 1))
8923 {
8924 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
8925 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
8926 tree folded_compare, shift;
8927
8928 /* First check whether the comparison would come out
8929 always the same. If we don't do that we would
8930 change the meaning with the masking. */
8931 folded_compare = fold (build2 (code, type,
8932 TREE_OPERAND (varop, 0), arg1));
8933 if (integer_zerop (folded_compare)
8934 || integer_onep (folded_compare))
8935 return omit_one_operand (type, folded_compare, varop);
8936
8937 shift = build_int_cst (NULL_TREE,
8938 TYPE_PRECISION (TREE_TYPE (varop)) - size);
8939 shift = fold_convert (TREE_TYPE (varop), shift);
8940 newconst = fold (build2 (LSHIFT_EXPR, TREE_TYPE (varop),
8941 newconst, shift));
8942 newconst = fold (build2 (RSHIFT_EXPR, TREE_TYPE (varop),
8943 newconst, shift));
8944 }
8945
8946 return fold (build2 (code, type, varop, newconst));
8947 }
8948
8949 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
8950 This transformation affects the cases which are handled in later
8951 optimizations involving comparisons with non-negative constants. */
8952 if (TREE_CODE (arg1) == INTEGER_CST
8953 && TREE_CODE (arg0) != INTEGER_CST
8954 && tree_int_cst_sgn (arg1) > 0)
8955 {
8956 switch (code)
8957 {
8958 case GE_EXPR:
8959 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
8960 return fold (build2 (GT_EXPR, type, arg0, arg1));
8961
8962 case LT_EXPR:
8963 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
8964 return fold (build2 (LE_EXPR, type, arg0, arg1));
8965
8966 default:
8967 break;
8968 }
8969 }
8970
8971 /* Comparisons with the highest or lowest possible integer of
8972 the specified size will have known values.
8973
8974 This is quite similar to fold_relational_hi_lo, however,
8975 attempts to share the code have been nothing but trouble. */
8976 {
8977 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
8978
8979 if (TREE_CODE (arg1) == INTEGER_CST
8980 && ! TREE_CONSTANT_OVERFLOW (arg1)
8981 && width <= 2 * HOST_BITS_PER_WIDE_INT
8982 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
8983 || POINTER_TYPE_P (TREE_TYPE (arg1))))
8984 {
8985 HOST_WIDE_INT signed_max_hi;
8986 unsigned HOST_WIDE_INT signed_max_lo;
8987 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
8988
8989 if (width <= HOST_BITS_PER_WIDE_INT)
8990 {
8991 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
8992 - 1;
8993 signed_max_hi = 0;
8994 max_hi = 0;
8995
8996 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
8997 {
8998 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
8999 min_lo = 0;
9000 min_hi = 0;
9001 }
9002 else
9003 {
9004 max_lo = signed_max_lo;
9005 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
9006 min_hi = -1;
9007 }
9008 }
9009 else
9010 {
9011 width -= HOST_BITS_PER_WIDE_INT;
9012 signed_max_lo = -1;
9013 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
9014 - 1;
9015 max_lo = -1;
9016 min_lo = 0;
9017
9018 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
9019 {
9020 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
9021 min_hi = 0;
9022 }
9023 else
9024 {
9025 max_hi = signed_max_hi;
9026 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
9027 }
9028 }
9029
9030 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
9031 && TREE_INT_CST_LOW (arg1) == max_lo)
9032 switch (code)
9033 {
9034 case GT_EXPR:
9035 return omit_one_operand (type, integer_zero_node, arg0);
9036
9037 case GE_EXPR:
9038 return fold (build2 (EQ_EXPR, type, arg0, arg1));
9039
9040 case LE_EXPR:
9041 return omit_one_operand (type, integer_one_node, arg0);
9042
9043 case LT_EXPR:
9044 return fold (build2 (NE_EXPR, type, arg0, arg1));
9045
9046 /* The GE_EXPR and LT_EXPR cases above are not normally
9047 reached because of previous transformations. */
9048
9049 default:
9050 break;
9051 }
9052 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9053 == max_hi
9054 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
9055 switch (code)
9056 {
9057 case GT_EXPR:
9058 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
9059 return fold (build2 (EQ_EXPR, type, arg0, arg1));
9060 case LE_EXPR:
9061 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
9062 return fold (build2 (NE_EXPR, type, arg0, arg1));
9063 default:
9064 break;
9065 }
9066 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9067 == min_hi
9068 && TREE_INT_CST_LOW (arg1) == min_lo)
9069 switch (code)
9070 {
9071 case LT_EXPR:
9072 return omit_one_operand (type, integer_zero_node, arg0);
9073
9074 case LE_EXPR:
9075 return fold (build2 (EQ_EXPR, type, arg0, arg1));
9076
9077 case GE_EXPR:
9078 return omit_one_operand (type, integer_one_node, arg0);
9079
9080 case GT_EXPR:
9081 return fold (build2 (NE_EXPR, type, arg0, arg1));
9082
9083 default:
9084 break;
9085 }
9086 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9087 == min_hi
9088 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
9089 switch (code)
9090 {
9091 case GE_EXPR:
9092 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
9093 return fold (build2 (NE_EXPR, type, arg0, arg1));
9094 case LT_EXPR:
9095 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
9096 return fold (build2 (EQ_EXPR, type, arg0, arg1));
9097 default:
9098 break;
9099 }
9100
9101 else if (!in_gimple_form
9102 && TREE_INT_CST_HIGH (arg1) == signed_max_hi
9103 && TREE_INT_CST_LOW (arg1) == signed_max_lo
9104 && TYPE_UNSIGNED (TREE_TYPE (arg1))
9105 /* signed_type does not work on pointer types. */
9106 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
9107 {
9108 /* The following case also applies to X < signed_max+1
9109 and X >= signed_max+1 because previous transformations. */
9110 if (code == LE_EXPR || code == GT_EXPR)
9111 {
9112 tree st0, st1;
9113 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
9114 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
9115 return fold
9116 (build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
9117 type, fold_convert (st0, arg0),
9118 fold_convert (st1, integer_zero_node)));
9119 }
9120 }
9121 }
9122 }
9123
9124 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
9125 a MINUS_EXPR of a constant, we can convert it into a comparison with
9126 a revised constant as long as no overflow occurs. */
9127 if ((code == EQ_EXPR || code == NE_EXPR)
9128 && TREE_CODE (arg1) == INTEGER_CST
9129 && (TREE_CODE (arg0) == PLUS_EXPR
9130 || TREE_CODE (arg0) == MINUS_EXPR)
9131 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9132 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9133 ? MINUS_EXPR : PLUS_EXPR,
9134 arg1, TREE_OPERAND (arg0, 1), 0))
9135 && ! TREE_CONSTANT_OVERFLOW (tem))
9136 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
9137
9138 /* Similarly for a NEGATE_EXPR. */
9139 else if ((code == EQ_EXPR || code == NE_EXPR)
9140 && TREE_CODE (arg0) == NEGATE_EXPR
9141 && TREE_CODE (arg1) == INTEGER_CST
9142 && 0 != (tem = negate_expr (arg1))
9143 && TREE_CODE (tem) == INTEGER_CST
9144 && ! TREE_CONSTANT_OVERFLOW (tem))
9145 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
9146
9147 /* If we have X - Y == 0, we can convert that to X == Y and similarly
9148 for !=. Don't do this for ordered comparisons due to overflow. */
9149 else if ((code == NE_EXPR || code == EQ_EXPR)
9150 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
9151 return fold (build2 (code, type,
9152 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)));
9153
9154 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9155 && (TREE_CODE (arg0) == NOP_EXPR
9156 || TREE_CODE (arg0) == CONVERT_EXPR))
9157 {
9158 /* If we are widening one operand of an integer comparison,
9159 see if the other operand is similarly being widened. Perhaps we
9160 can do the comparison in the narrower type. */
9161 tem = fold_widened_comparison (code, type, arg0, arg1);
9162 if (tem)
9163 return tem;
9164
9165 /* Or if we are changing signedness. */
9166 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
9167 if (tem)
9168 return tem;
9169 }
9170
9171 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9172 constant, we can simplify it. */
9173 else if (TREE_CODE (arg1) == INTEGER_CST
9174 && (TREE_CODE (arg0) == MIN_EXPR
9175 || TREE_CODE (arg0) == MAX_EXPR)
9176 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9177 {
9178 tem = optimize_minmax_comparison (code, type, op0, op1);
9179 if (tem)
9180 return tem;
9181
9182 return NULL_TREE;
9183 }
9184
9185 /* If we are comparing an ABS_EXPR with a constant, we can
9186 convert all the cases into explicit comparisons, but they may
9187 well not be faster than doing the ABS and one comparison.
9188 But ABS (X) <= C is a range comparison, which becomes a subtraction
9189 and a comparison, and is probably faster. */
9190 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
9191 && TREE_CODE (arg0) == ABS_EXPR
9192 && ! TREE_SIDE_EFFECTS (arg0)
9193 && (0 != (tem = negate_expr (arg1)))
9194 && TREE_CODE (tem) == INTEGER_CST
9195 && ! TREE_CONSTANT_OVERFLOW (tem))
9196 return fold (build2 (TRUTH_ANDIF_EXPR, type,
9197 build2 (GE_EXPR, type,
9198 TREE_OPERAND (arg0, 0), tem),
9199 build2 (LE_EXPR, type,
9200 TREE_OPERAND (arg0, 0), arg1)));
9201
9202 /* Convert ABS_EXPR<x> >= 0 to true. */
9203 else if (code == GE_EXPR
9204 && tree_expr_nonnegative_p (arg0)
9205 && (integer_zerop (arg1)
9206 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9207 && real_zerop (arg1))))
9208 return omit_one_operand (type, integer_one_node, arg0);
9209
9210 /* Convert ABS_EXPR<x> < 0 to false. */
9211 else if (code == LT_EXPR
9212 && tree_expr_nonnegative_p (arg0)
9213 && (integer_zerop (arg1) || real_zerop (arg1)))
9214 return omit_one_operand (type, integer_zero_node, arg0);
9215
9216 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
9217 else if ((code == EQ_EXPR || code == NE_EXPR)
9218 && TREE_CODE (arg0) == ABS_EXPR
9219 && (integer_zerop (arg1) || real_zerop (arg1)))
9220 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), arg1));
9221
9222 /* If this is an EQ or NE comparison with zero and ARG0 is
9223 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
9224 two operations, but the latter can be done in one less insn
9225 on machines that have only two-operand insns or on which a
9226 constant cannot be the first operand. */
9227 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
9228 && TREE_CODE (arg0) == BIT_AND_EXPR)
9229 {
9230 tree arg00 = TREE_OPERAND (arg0, 0);
9231 tree arg01 = TREE_OPERAND (arg0, 1);
9232 if (TREE_CODE (arg00) == LSHIFT_EXPR
9233 && integer_onep (TREE_OPERAND (arg00, 0)))
9234 return
9235 fold (build2 (code, type,
9236 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9237 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
9238 arg01, TREE_OPERAND (arg00, 1)),
9239 fold_convert (TREE_TYPE (arg0),
9240 integer_one_node)),
9241 arg1));
9242 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
9243 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
9244 return
9245 fold (build2 (code, type,
9246 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9247 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
9248 arg00, TREE_OPERAND (arg01, 1)),
9249 fold_convert (TREE_TYPE (arg0),
9250 integer_one_node)),
9251 arg1));
9252 }
9253
9254 /* If this is an NE or EQ comparison of zero against the result of a
9255 signed MOD operation whose second operand is a power of 2, make
9256 the MOD operation unsigned since it is simpler and equivalent. */
9257 if ((code == NE_EXPR || code == EQ_EXPR)
9258 && integer_zerop (arg1)
9259 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
9260 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
9261 || TREE_CODE (arg0) == CEIL_MOD_EXPR
9262 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
9263 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
9264 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9265 {
9266 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
9267 tree newmod = fold (build2 (TREE_CODE (arg0), newtype,
9268 fold_convert (newtype,
9269 TREE_OPERAND (arg0, 0)),
9270 fold_convert (newtype,
9271 TREE_OPERAND (arg0, 1))));
9272
9273 return fold (build2 (code, type, newmod,
9274 fold_convert (newtype, arg1)));
9275 }
9276
9277 /* If this is an NE comparison of zero with an AND of one, remove the
9278 comparison since the AND will give the correct value. */
9279 if (code == NE_EXPR && integer_zerop (arg1)
9280 && TREE_CODE (arg0) == BIT_AND_EXPR
9281 && integer_onep (TREE_OPERAND (arg0, 1)))
9282 return fold_convert (type, arg0);
9283
9284 /* If we have (A & C) == C where C is a power of 2, convert this into
9285 (A & C) != 0. Similarly for NE_EXPR. */
9286 if ((code == EQ_EXPR || code == NE_EXPR)
9287 && TREE_CODE (arg0) == BIT_AND_EXPR
9288 && integer_pow2p (TREE_OPERAND (arg0, 1))
9289 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9290 return fold (build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
9291 arg0, fold_convert (TREE_TYPE (arg0),
9292 integer_zero_node)));
9293
9294 /* If we have (A & C) != 0 or (A & C) == 0 and C is a power of
9295 2, then fold the expression into shifts and logical operations. */
9296 tem = fold_single_bit_test (code, arg0, arg1, type);
9297 if (tem)
9298 return tem;
9299
9300 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
9301 Similarly for NE_EXPR. */
9302 if ((code == EQ_EXPR || code == NE_EXPR)
9303 && TREE_CODE (arg0) == BIT_AND_EXPR
9304 && TREE_CODE (arg1) == INTEGER_CST
9305 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9306 {
9307 tree notc = fold (build1 (BIT_NOT_EXPR,
9308 TREE_TYPE (TREE_OPERAND (arg0, 1)),
9309 TREE_OPERAND (arg0, 1)));
9310 tree dandnotc = fold (build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9311 arg1, notc));
9312 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
9313 if (integer_nonzerop (dandnotc))
9314 return omit_one_operand (type, rslt, arg0);
9315 }
9316
9317 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
9318 Similarly for NE_EXPR. */
9319 if ((code == EQ_EXPR || code == NE_EXPR)
9320 && TREE_CODE (arg0) == BIT_IOR_EXPR
9321 && TREE_CODE (arg1) == INTEGER_CST
9322 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9323 {
9324 tree notd = fold (build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1));
9325 tree candnotd = fold (build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9326 TREE_OPERAND (arg0, 1), notd));
9327 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
9328 if (integer_nonzerop (candnotd))
9329 return omit_one_operand (type, rslt, arg0);
9330 }
9331
9332 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
9333 and similarly for >= into !=. */
9334 if ((code == LT_EXPR || code == GE_EXPR)
9335 && TYPE_UNSIGNED (TREE_TYPE (arg0))
9336 && TREE_CODE (arg1) == LSHIFT_EXPR
9337 && integer_onep (TREE_OPERAND (arg1, 0)))
9338 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
9339 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
9340 TREE_OPERAND (arg1, 1)),
9341 fold_convert (TREE_TYPE (arg0), integer_zero_node));
9342
9343 else if ((code == LT_EXPR || code == GE_EXPR)
9344 && TYPE_UNSIGNED (TREE_TYPE (arg0))
9345 && (TREE_CODE (arg1) == NOP_EXPR
9346 || TREE_CODE (arg1) == CONVERT_EXPR)
9347 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
9348 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
9349 return
9350 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
9351 fold_convert (TREE_TYPE (arg0),
9352 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
9353 TREE_OPERAND (TREE_OPERAND (arg1, 0),
9354 1))),
9355 fold_convert (TREE_TYPE (arg0), integer_zero_node));
9356
9357 /* Simplify comparison of something with itself. (For IEEE
9358 floating-point, we can only do some of these simplifications.) */
9359 if (operand_equal_p (arg0, arg1, 0))
9360 {
9361 switch (code)
9362 {
9363 case EQ_EXPR:
9364 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9365 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9366 return constant_boolean_node (1, type);
9367 break;
9368
9369 case GE_EXPR:
9370 case LE_EXPR:
9371 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9372 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9373 return constant_boolean_node (1, type);
9374 return fold (build2 (EQ_EXPR, type, arg0, arg1));
9375
9376 case NE_EXPR:
9377 /* For NE, we can only do this simplification if integer
9378 or we don't honor IEEE floating point NaNs. */
9379 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9380 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9381 break;
9382 /* ... fall through ... */
9383 case GT_EXPR:
9384 case LT_EXPR:
9385 return constant_boolean_node (0, type);
9386 default:
9387 gcc_unreachable ();
9388 }
9389 }
9390
9391 /* If we are comparing an expression that just has comparisons
9392 of two integer values, arithmetic expressions of those comparisons,
9393 and constants, we can simplify it. There are only three cases
9394 to check: the two values can either be equal, the first can be
9395 greater, or the second can be greater. Fold the expression for
9396 those three values. Since each value must be 0 or 1, we have
9397 eight possibilities, each of which corresponds to the constant 0
9398 or 1 or one of the six possible comparisons.
9399
9400 This handles common cases like (a > b) == 0 but also handles
9401 expressions like ((x > y) - (y > x)) > 0, which supposedly
9402 occur in macroized code. */
9403
9404 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9405 {
9406 tree cval1 = 0, cval2 = 0;
9407 int save_p = 0;
9408
9409 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9410 /* Don't handle degenerate cases here; they should already
9411 have been handled anyway. */
9412 && cval1 != 0 && cval2 != 0
9413 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9414 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9415 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9416 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9417 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9418 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9419 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9420 {
9421 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9422 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9423
9424 /* We can't just pass T to eval_subst in case cval1 or cval2
9425 was the same as ARG1. */
9426
9427 tree high_result
9428 = fold (build2 (code, type,
9429 eval_subst (arg0, cval1, maxval,
9430 cval2, minval),
9431 arg1));
9432 tree equal_result
9433 = fold (build2 (code, type,
9434 eval_subst (arg0, cval1, maxval,
9435 cval2, maxval),
9436 arg1));
9437 tree low_result
9438 = fold (build2 (code, type,
9439 eval_subst (arg0, cval1, minval,
9440 cval2, maxval),
9441 arg1));
9442
9443 /* All three of these results should be 0 or 1. Confirm they
9444 are. Then use those values to select the proper code
9445 to use. */
9446
9447 if ((integer_zerop (high_result)
9448 || integer_onep (high_result))
9449 && (integer_zerop (equal_result)
9450 || integer_onep (equal_result))
9451 && (integer_zerop (low_result)
9452 || integer_onep (low_result)))
9453 {
9454 /* Make a 3-bit mask with the high-order bit being the
9455 value for `>', the next for '=', and the low for '<'. */
9456 switch ((integer_onep (high_result) * 4)
9457 + (integer_onep (equal_result) * 2)
9458 + integer_onep (low_result))
9459 {
9460 case 0:
9461 /* Always false. */
9462 return omit_one_operand (type, integer_zero_node, arg0);
9463 case 1:
9464 code = LT_EXPR;
9465 break;
9466 case 2:
9467 code = EQ_EXPR;
9468 break;
9469 case 3:
9470 code = LE_EXPR;
9471 break;
9472 case 4:
9473 code = GT_EXPR;
9474 break;
9475 case 5:
9476 code = NE_EXPR;
9477 break;
9478 case 6:
9479 code = GE_EXPR;
9480 break;
9481 case 7:
9482 /* Always true. */
9483 return omit_one_operand (type, integer_one_node, arg0);
9484 }
9485
9486 tem = build2 (code, type, cval1, cval2);
9487 if (save_p)
9488 return save_expr (tem);
9489 else
9490 return fold (tem);
9491 }
9492 }
9493 }
9494
9495 /* If this is a comparison of a field, we may be able to simplify it. */
9496 if (((TREE_CODE (arg0) == COMPONENT_REF
9497 && lang_hooks.can_use_bit_fields_p ())
9498 || TREE_CODE (arg0) == BIT_FIELD_REF)
9499 && (code == EQ_EXPR || code == NE_EXPR)
9500 /* Handle the constant case even without -O
9501 to make sure the warnings are given. */
9502 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
9503 {
9504 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
9505 if (t1)
9506 return t1;
9507 }
9508
9509 /* If this is a comparison of complex values and either or both sides
9510 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
9511 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
9512 This may prevent needless evaluations. */
9513 if ((code == EQ_EXPR || code == NE_EXPR)
9514 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
9515 && (TREE_CODE (arg0) == COMPLEX_EXPR
9516 || TREE_CODE (arg1) == COMPLEX_EXPR
9517 || TREE_CODE (arg0) == COMPLEX_CST
9518 || TREE_CODE (arg1) == COMPLEX_CST))
9519 {
9520 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
9521 tree real0, imag0, real1, imag1;
9522
9523 arg0 = save_expr (arg0);
9524 arg1 = save_expr (arg1);
9525 real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
9526 imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
9527 real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
9528 imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
9529
9530 return fold (build2 ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
9531 : TRUTH_ORIF_EXPR),
9532 type,
9533 fold (build2 (code, type, real0, real1)),
9534 fold (build2 (code, type, imag0, imag1))));
9535 }
9536
9537 /* Optimize comparisons of strlen vs zero to a compare of the
9538 first character of the string vs zero. To wit,
9539 strlen(ptr) == 0 => *ptr == 0
9540 strlen(ptr) != 0 => *ptr != 0
9541 Other cases should reduce to one of these two (or a constant)
9542 due to the return value of strlen being unsigned. */
9543 if ((code == EQ_EXPR || code == NE_EXPR)
9544 && integer_zerop (arg1)
9545 && TREE_CODE (arg0) == CALL_EXPR)
9546 {
9547 tree fndecl = get_callee_fndecl (arg0);
9548 tree arglist;
9549
9550 if (fndecl
9551 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
9552 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
9553 && (arglist = TREE_OPERAND (arg0, 1))
9554 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
9555 && ! TREE_CHAIN (arglist))
9556 return fold (build2 (code, type,
9557 build1 (INDIRECT_REF, char_type_node,
9558 TREE_VALUE (arglist)),
9559 fold_convert (char_type_node,
9560 integer_zero_node)));
9561 }
9562
9563 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9564 into a single range test. */
9565 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9566 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9567 && TREE_CODE (arg1) == INTEGER_CST
9568 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9569 && !integer_zerop (TREE_OPERAND (arg0, 1))
9570 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9571 && !TREE_OVERFLOW (arg1))
9572 {
9573 t1 = fold_div_compare (code, type, arg0, arg1);
9574 if (t1 != NULL_TREE)
9575 return t1;
9576 }
9577
9578 if ((code == EQ_EXPR || code == NE_EXPR)
9579 && !TREE_SIDE_EFFECTS (arg0)
9580 && integer_zerop (arg1)
9581 && tree_expr_nonzero_p (arg0))
9582 return constant_boolean_node (code==NE_EXPR, type);
9583
9584 t1 = fold_relational_const (code, type, arg0, arg1);
9585 return t1 == NULL_TREE ? NULL_TREE : t1;
9586
9587 case UNORDERED_EXPR:
9588 case ORDERED_EXPR:
9589 case UNLT_EXPR:
9590 case UNLE_EXPR:
9591 case UNGT_EXPR:
9592 case UNGE_EXPR:
9593 case UNEQ_EXPR:
9594 case LTGT_EXPR:
9595 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9596 {
9597 t1 = fold_relational_const (code, type, arg0, arg1);
9598 if (t1 != NULL_TREE)
9599 return t1;
9600 }
9601
9602 /* If the first operand is NaN, the result is constant. */
9603 if (TREE_CODE (arg0) == REAL_CST
9604 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
9605 && (code != LTGT_EXPR || ! flag_trapping_math))
9606 {
9607 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
9608 ? integer_zero_node
9609 : integer_one_node;
9610 return omit_one_operand (type, t1, arg1);
9611 }
9612
9613 /* If the second operand is NaN, the result is constant. */
9614 if (TREE_CODE (arg1) == REAL_CST
9615 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
9616 && (code != LTGT_EXPR || ! flag_trapping_math))
9617 {
9618 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
9619 ? integer_zero_node
9620 : integer_one_node;
9621 return omit_one_operand (type, t1, arg0);
9622 }
9623
9624 /* Simplify unordered comparison of something with itself. */
9625 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
9626 && operand_equal_p (arg0, arg1, 0))
9627 return constant_boolean_node (1, type);
9628
9629 if (code == LTGT_EXPR
9630 && !flag_trapping_math
9631 && operand_equal_p (arg0, arg1, 0))
9632 return constant_boolean_node (0, type);
9633
9634 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9635 {
9636 tree targ0 = strip_float_extensions (arg0);
9637 tree targ1 = strip_float_extensions (arg1);
9638 tree newtype = TREE_TYPE (targ0);
9639
9640 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9641 newtype = TREE_TYPE (targ1);
9642
9643 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9644 return fold (build2 (code, type, fold_convert (newtype, targ0),
9645 fold_convert (newtype, targ1)));
9646 }
9647
9648 return NULL_TREE;
9649
9650 case COMPOUND_EXPR:
9651 /* When pedantic, a compound expression can be neither an lvalue
9652 nor an integer constant expression. */
9653 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
9654 return NULL_TREE;
9655 /* Don't let (0, 0) be null pointer constant. */
9656 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
9657 : fold_convert (type, arg1);
9658 return pedantic_non_lvalue (tem);
9659
9660 case COMPLEX_EXPR:
9661 if (wins)
9662 return build_complex (type, arg0, arg1);
9663 return NULL_TREE;
9664
9665 default:
9666 return NULL_TREE;
9667 } /* switch (code) */
9668 }
9669
9670 /* Fold a ternary expression EXPR. Return the folded expression if
9671 folding is successful. Otherwise, return the original
9672 expression. */
9673
9674 static tree
9675 fold_ternary (tree expr)
9676 {
9677 const tree t = expr;
9678 const tree type = TREE_TYPE (expr);
9679 tree tem;
9680 tree op0, op1, op2;
9681 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
9682 enum tree_code code = TREE_CODE (t);
9683 enum tree_code_class kind = TREE_CODE_CLASS (code);
9684
9685 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9686 && TREE_CODE_LENGTH (code) == 3);
9687
9688 op0 = TREE_OPERAND (t, 0);
9689 op1 = TREE_OPERAND (t, 1);
9690 op2 = TREE_OPERAND (t, 2);
9691
9692 /* Strip any conversions that don't change the mode. This is safe
9693 for every expression, except for a comparison expression because
9694 its signedness is derived from its operands. So, in the latter
9695 case, only strip conversions that don't change the signedness.
9696
9697 Note that this is done as an internal manipulation within the
9698 constant folder, in order to find the simplest representation of
9699 the arguments so that their form can be studied. In any cases,
9700 the appropriate type conversions should be put back in the tree
9701 that will get out of the constant folder. */
9702 if (op0)
9703 {
9704 arg0 = op0;
9705 STRIP_NOPS (arg0);
9706 }
9707
9708 if (op1)
9709 {
9710 arg1 = op1;
9711 STRIP_NOPS (arg1);
9712 }
9713
9714 switch (code)
9715 {
9716 case COMPONENT_REF:
9717 if (TREE_CODE (arg0) == CONSTRUCTOR
9718 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
9719 {
9720 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
9721 if (m)
9722 return TREE_VALUE (m);
9723 }
9724 return NULL_TREE;
9725
9726 case COND_EXPR:
9727 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
9728 so all simple results must be passed through pedantic_non_lvalue. */
9729 if (TREE_CODE (arg0) == INTEGER_CST)
9730 {
9731 tem = integer_zerop (arg0) ? op2 : op1;
9732 /* Only optimize constant conditions when the selected branch
9733 has the same type as the COND_EXPR. This avoids optimizing
9734 away "c ? x : throw", where the throw has a void type. */
9735 if (! VOID_TYPE_P (TREE_TYPE (tem))
9736 || VOID_TYPE_P (type))
9737 return pedantic_non_lvalue (tem);
9738 return NULL_TREE;
9739 }
9740 if (operand_equal_p (arg1, op2, 0))
9741 return pedantic_omit_one_operand (type, arg1, arg0);
9742
9743 /* If we have A op B ? A : C, we may be able to convert this to a
9744 simpler expression, depending on the operation and the values
9745 of B and C. Signed zeros prevent all of these transformations,
9746 for reasons given above each one.
9747
9748 Also try swapping the arguments and inverting the conditional. */
9749 if (COMPARISON_CLASS_P (arg0)
9750 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
9751 arg1, TREE_OPERAND (arg0, 1))
9752 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
9753 {
9754 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
9755 if (tem)
9756 return tem;
9757 }
9758
9759 if (COMPARISON_CLASS_P (arg0)
9760 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
9761 op2,
9762 TREE_OPERAND (arg0, 1))
9763 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
9764 {
9765 tem = invert_truthvalue (arg0);
9766 if (COMPARISON_CLASS_P (tem))
9767 {
9768 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
9769 if (tem)
9770 return tem;
9771 }
9772 }
9773
9774 /* If the second operand is simpler than the third, swap them
9775 since that produces better jump optimization results. */
9776 if (tree_swap_operands_p (op1, op2, false))
9777 {
9778 /* See if this can be inverted. If it can't, possibly because
9779 it was a floating-point inequality comparison, don't do
9780 anything. */
9781 tem = invert_truthvalue (arg0);
9782
9783 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
9784 return fold (build3 (code, type, tem, op2, op1));
9785 }
9786
9787 /* Convert A ? 1 : 0 to simply A. */
9788 if (integer_onep (op1)
9789 && integer_zerop (op2)
9790 /* If we try to convert OP0 to our type, the
9791 call to fold will try to move the conversion inside
9792 a COND, which will recurse. In that case, the COND_EXPR
9793 is probably the best choice, so leave it alone. */
9794 && type == TREE_TYPE (arg0))
9795 return pedantic_non_lvalue (arg0);
9796
9797 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
9798 over COND_EXPR in cases such as floating point comparisons. */
9799 if (integer_zerop (op1)
9800 && integer_onep (op2)
9801 && truth_value_p (TREE_CODE (arg0)))
9802 return pedantic_non_lvalue (fold_convert (type,
9803 invert_truthvalue (arg0)));
9804
9805 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
9806 if (TREE_CODE (arg0) == LT_EXPR
9807 && integer_zerop (TREE_OPERAND (arg0, 1))
9808 && integer_zerop (op2)
9809 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
9810 return fold_convert (type, fold (build2 (BIT_AND_EXPR,
9811 TREE_TYPE (tem), tem, arg1)));
9812
9813 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
9814 already handled above. */
9815 if (TREE_CODE (arg0) == BIT_AND_EXPR
9816 && integer_onep (TREE_OPERAND (arg0, 1))
9817 && integer_zerop (op2)
9818 && integer_pow2p (arg1))
9819 {
9820 tree tem = TREE_OPERAND (arg0, 0);
9821 STRIP_NOPS (tem);
9822 if (TREE_CODE (tem) == RSHIFT_EXPR
9823 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
9824 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
9825 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
9826 return fold (build2 (BIT_AND_EXPR, type,
9827 TREE_OPERAND (tem, 0), arg1));
9828 }
9829
9830 /* A & N ? N : 0 is simply A & N if N is a power of two. This
9831 is probably obsolete because the first operand should be a
9832 truth value (that's why we have the two cases above), but let's
9833 leave it in until we can confirm this for all front-ends. */
9834 if (integer_zerop (op2)
9835 && TREE_CODE (arg0) == NE_EXPR
9836 && integer_zerop (TREE_OPERAND (arg0, 1))
9837 && integer_pow2p (arg1)
9838 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
9839 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
9840 arg1, OEP_ONLY_CONST))
9841 return pedantic_non_lvalue (fold_convert (type,
9842 TREE_OPERAND (arg0, 0)));
9843
9844 /* Convert A ? B : 0 into A && B if A and B are truth values. */
9845 if (integer_zerop (op2)
9846 && truth_value_p (TREE_CODE (arg0))
9847 && truth_value_p (TREE_CODE (arg1)))
9848 return fold (build2 (TRUTH_ANDIF_EXPR, type, arg0, arg1));
9849
9850 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
9851 if (integer_onep (op2)
9852 && truth_value_p (TREE_CODE (arg0))
9853 && truth_value_p (TREE_CODE (arg1)))
9854 {
9855 /* Only perform transformation if ARG0 is easily inverted. */
9856 tem = invert_truthvalue (arg0);
9857 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
9858 return fold (build2 (TRUTH_ORIF_EXPR, type, tem, arg1));
9859 }
9860
9861 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
9862 if (integer_zerop (arg1)
9863 && truth_value_p (TREE_CODE (arg0))
9864 && truth_value_p (TREE_CODE (op2)))
9865 {
9866 /* Only perform transformation if ARG0 is easily inverted. */
9867 tem = invert_truthvalue (arg0);
9868 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
9869 return fold (build2 (TRUTH_ANDIF_EXPR, type, tem, op2));
9870 }
9871
9872 /* Convert A ? 1 : B into A || B if A and B are truth values. */
9873 if (integer_onep (arg1)
9874 && truth_value_p (TREE_CODE (arg0))
9875 && truth_value_p (TREE_CODE (op2)))
9876 return fold (build2 (TRUTH_ORIF_EXPR, type, arg0, op2));
9877
9878 return NULL_TREE;
9879
9880 case CALL_EXPR:
9881 /* Check for a built-in function. */
9882 if (TREE_CODE (op0) == ADDR_EXPR
9883 && TREE_CODE (TREE_OPERAND (op0, 0)) == FUNCTION_DECL
9884 && DECL_BUILT_IN (TREE_OPERAND (op0, 0)))
9885 {
9886 tree tmp = fold_builtin (t, false);
9887 if (tmp)
9888 return tmp;
9889 }
9890 return NULL_TREE;
9891
9892 default:
9893 return NULL_TREE;
9894 } /* switch (code) */
9895 }
9896
9897 /* Perform constant folding and related simplification of EXPR.
9898 The related simplifications include x*1 => x, x*0 => 0, etc.,
9899 and application of the associative law.
9900 NOP_EXPR conversions may be removed freely (as long as we
9901 are careful not to change the type of the overall expression).
9902 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
9903 but we can constant-fold them if they have constant operands. */
9904
9905 #ifdef ENABLE_FOLD_CHECKING
9906 # define fold(x) fold_1 (x)
9907 static tree fold_1 (tree);
9908 static
9909 #endif
9910 tree
9911 fold (tree expr)
9912 {
9913 const tree t = expr;
9914 enum tree_code code = TREE_CODE (t);
9915 enum tree_code_class kind = TREE_CODE_CLASS (code);
9916 tree tem;
9917
9918 /* Return right away if a constant. */
9919 if (kind == tcc_constant)
9920 return t;
9921
9922 if (IS_EXPR_CODE_CLASS (kind))
9923 {
9924 tree type = TREE_TYPE (t);
9925 tree op0, op1;
9926
9927 switch (TREE_CODE_LENGTH (code))
9928 {
9929 case 1:
9930 op0 = TREE_OPERAND (t, 0);
9931 tem = fold_unary (code, type, op0);
9932 return tem ? tem : expr;
9933 case 2:
9934 op0 = TREE_OPERAND (t, 0);
9935 op1 = TREE_OPERAND (t, 1);
9936 tem = fold_binary (code, type, op0, op1);
9937 return tem ? tem : expr;
9938 case 3:
9939 tem = fold_ternary (expr);
9940 return tem ? tem : expr;
9941 default:
9942 break;
9943 }
9944 }
9945
9946 switch (code)
9947 {
9948 case CONST_DECL:
9949 return fold (DECL_INITIAL (t));
9950
9951 default:
9952 return t;
9953 } /* switch (code) */
9954 }
9955
9956 #ifdef ENABLE_FOLD_CHECKING
9957 #undef fold
9958
9959 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
9960 static void fold_check_failed (tree, tree);
9961 void print_fold_checksum (tree);
9962
9963 /* When --enable-checking=fold, compute a digest of expr before
9964 and after actual fold call to see if fold did not accidentally
9965 change original expr. */
9966
9967 tree
9968 fold (tree expr)
9969 {
9970 tree ret;
9971 struct md5_ctx ctx;
9972 unsigned char checksum_before[16], checksum_after[16];
9973 htab_t ht;
9974
9975 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
9976 md5_init_ctx (&ctx);
9977 fold_checksum_tree (expr, &ctx, ht);
9978 md5_finish_ctx (&ctx, checksum_before);
9979 htab_empty (ht);
9980
9981 ret = fold_1 (expr);
9982
9983 md5_init_ctx (&ctx);
9984 fold_checksum_tree (expr, &ctx, ht);
9985 md5_finish_ctx (&ctx, checksum_after);
9986 htab_delete (ht);
9987
9988 if (memcmp (checksum_before, checksum_after, 16))
9989 fold_check_failed (expr, ret);
9990
9991 return ret;
9992 }
9993
9994 void
9995 print_fold_checksum (tree expr)
9996 {
9997 struct md5_ctx ctx;
9998 unsigned char checksum[16], cnt;
9999 htab_t ht;
10000
10001 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10002 md5_init_ctx (&ctx);
10003 fold_checksum_tree (expr, &ctx, ht);
10004 md5_finish_ctx (&ctx, checksum);
10005 htab_delete (ht);
10006 for (cnt = 0; cnt < 16; ++cnt)
10007 fprintf (stderr, "%02x", checksum[cnt]);
10008 putc ('\n', stderr);
10009 }
10010
10011 static void
10012 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
10013 {
10014 internal_error ("fold check: original tree changed by fold");
10015 }
10016
10017 static void
10018 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
10019 {
10020 void **slot;
10021 enum tree_code code;
10022 char buf[sizeof (struct tree_decl)];
10023 int i, len;
10024
10025 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
10026 <= sizeof (struct tree_decl))
10027 && sizeof (struct tree_type) <= sizeof (struct tree_decl));
10028 if (expr == NULL)
10029 return;
10030 slot = htab_find_slot (ht, expr, INSERT);
10031 if (*slot != NULL)
10032 return;
10033 *slot = expr;
10034 code = TREE_CODE (expr);
10035 if (TREE_CODE_CLASS (code) == tcc_declaration
10036 && DECL_ASSEMBLER_NAME_SET_P (expr))
10037 {
10038 /* Allow DECL_ASSEMBLER_NAME to be modified. */
10039 memcpy (buf, expr, tree_size (expr));
10040 expr = (tree) buf;
10041 SET_DECL_ASSEMBLER_NAME (expr, NULL);
10042 }
10043 else if (TREE_CODE_CLASS (code) == tcc_type
10044 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
10045 || TYPE_CACHED_VALUES_P (expr)))
10046 {
10047 /* Allow these fields to be modified. */
10048 memcpy (buf, expr, tree_size (expr));
10049 expr = (tree) buf;
10050 TYPE_POINTER_TO (expr) = NULL;
10051 TYPE_REFERENCE_TO (expr) = NULL;
10052 TYPE_CACHED_VALUES_P (expr) = 0;
10053 TYPE_CACHED_VALUES (expr) = NULL;
10054 }
10055 md5_process_bytes (expr, tree_size (expr), ctx);
10056 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
10057 if (TREE_CODE_CLASS (code) != tcc_type
10058 && TREE_CODE_CLASS (code) != tcc_declaration)
10059 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
10060 switch (TREE_CODE_CLASS (code))
10061 {
10062 case tcc_constant:
10063 switch (code)
10064 {
10065 case STRING_CST:
10066 md5_process_bytes (TREE_STRING_POINTER (expr),
10067 TREE_STRING_LENGTH (expr), ctx);
10068 break;
10069 case COMPLEX_CST:
10070 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
10071 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
10072 break;
10073 case VECTOR_CST:
10074 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
10075 break;
10076 default:
10077 break;
10078 }
10079 break;
10080 case tcc_exceptional:
10081 switch (code)
10082 {
10083 case TREE_LIST:
10084 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
10085 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
10086 break;
10087 case TREE_VEC:
10088 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
10089 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
10090 break;
10091 default:
10092 break;
10093 }
10094 break;
10095 case tcc_expression:
10096 case tcc_reference:
10097 case tcc_comparison:
10098 case tcc_unary:
10099 case tcc_binary:
10100 case tcc_statement:
10101 len = TREE_CODE_LENGTH (code);
10102 for (i = 0; i < len; ++i)
10103 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
10104 break;
10105 case tcc_declaration:
10106 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
10107 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
10108 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
10109 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
10110 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
10111 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
10112 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
10113 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
10114 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
10115 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
10116 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
10117 break;
10118 case tcc_type:
10119 if (TREE_CODE (expr) == ENUMERAL_TYPE)
10120 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
10121 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
10122 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
10123 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
10124 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
10125 if (INTEGRAL_TYPE_P (expr)
10126 || SCALAR_FLOAT_TYPE_P (expr))
10127 {
10128 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
10129 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
10130 }
10131 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
10132 if (TREE_CODE (expr) == RECORD_TYPE
10133 || TREE_CODE (expr) == UNION_TYPE
10134 || TREE_CODE (expr) == QUAL_UNION_TYPE)
10135 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
10136 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
10137 break;
10138 default:
10139 break;
10140 }
10141 }
10142
10143 #endif
10144
10145 /* Perform constant folding and related simplification of initializer
10146 expression EXPR. This behaves identically to "fold" but ignores
10147 potential run-time traps and exceptions that fold must preserve. */
10148
10149 tree
10150 fold_initializer (tree expr)
10151 {
10152 int saved_signaling_nans = flag_signaling_nans;
10153 int saved_trapping_math = flag_trapping_math;
10154 int saved_rounding_math = flag_rounding_math;
10155 int saved_trapv = flag_trapv;
10156 tree result;
10157
10158 flag_signaling_nans = 0;
10159 flag_trapping_math = 0;
10160 flag_rounding_math = 0;
10161 flag_trapv = 0;
10162
10163 result = fold (expr);
10164
10165 flag_signaling_nans = saved_signaling_nans;
10166 flag_trapping_math = saved_trapping_math;
10167 flag_rounding_math = saved_rounding_math;
10168 flag_trapv = saved_trapv;
10169
10170 return result;
10171 }
10172
10173 /* Determine if first argument is a multiple of second argument. Return 0 if
10174 it is not, or we cannot easily determined it to be.
10175
10176 An example of the sort of thing we care about (at this point; this routine
10177 could surely be made more general, and expanded to do what the *_DIV_EXPR's
10178 fold cases do now) is discovering that
10179
10180 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
10181
10182 is a multiple of
10183
10184 SAVE_EXPR (J * 8)
10185
10186 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
10187
10188 This code also handles discovering that
10189
10190 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
10191
10192 is a multiple of 8 so we don't have to worry about dealing with a
10193 possible remainder.
10194
10195 Note that we *look* inside a SAVE_EXPR only to determine how it was
10196 calculated; it is not safe for fold to do much of anything else with the
10197 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
10198 at run time. For example, the latter example above *cannot* be implemented
10199 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
10200 evaluation time of the original SAVE_EXPR is not necessarily the same at
10201 the time the new expression is evaluated. The only optimization of this
10202 sort that would be valid is changing
10203
10204 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
10205
10206 divided by 8 to
10207
10208 SAVE_EXPR (I) * SAVE_EXPR (J)
10209
10210 (where the same SAVE_EXPR (J) is used in the original and the
10211 transformed version). */
10212
10213 static int
10214 multiple_of_p (tree type, tree top, tree bottom)
10215 {
10216 if (operand_equal_p (top, bottom, 0))
10217 return 1;
10218
10219 if (TREE_CODE (type) != INTEGER_TYPE)
10220 return 0;
10221
10222 switch (TREE_CODE (top))
10223 {
10224 case BIT_AND_EXPR:
10225 /* Bitwise and provides a power of two multiple. If the mask is
10226 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
10227 if (!integer_pow2p (bottom))
10228 return 0;
10229 /* FALLTHRU */
10230
10231 case MULT_EXPR:
10232 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
10233 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
10234
10235 case PLUS_EXPR:
10236 case MINUS_EXPR:
10237 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
10238 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
10239
10240 case LSHIFT_EXPR:
10241 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
10242 {
10243 tree op1, t1;
10244
10245 op1 = TREE_OPERAND (top, 1);
10246 /* const_binop may not detect overflow correctly,
10247 so check for it explicitly here. */
10248 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
10249 > TREE_INT_CST_LOW (op1)
10250 && TREE_INT_CST_HIGH (op1) == 0
10251 && 0 != (t1 = fold_convert (type,
10252 const_binop (LSHIFT_EXPR,
10253 size_one_node,
10254 op1, 0)))
10255 && ! TREE_OVERFLOW (t1))
10256 return multiple_of_p (type, t1, bottom);
10257 }
10258 return 0;
10259
10260 case NOP_EXPR:
10261 /* Can't handle conversions from non-integral or wider integral type. */
10262 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
10263 || (TYPE_PRECISION (type)
10264 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
10265 return 0;
10266
10267 /* .. fall through ... */
10268
10269 case SAVE_EXPR:
10270 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
10271
10272 case INTEGER_CST:
10273 if (TREE_CODE (bottom) != INTEGER_CST
10274 || (TYPE_UNSIGNED (type)
10275 && (tree_int_cst_sgn (top) < 0
10276 || tree_int_cst_sgn (bottom) < 0)))
10277 return 0;
10278 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
10279 top, bottom, 0));
10280
10281 default:
10282 return 0;
10283 }
10284 }
10285
10286 /* Return true if `t' is known to be non-negative. */
10287
10288 int
10289 tree_expr_nonnegative_p (tree t)
10290 {
10291 switch (TREE_CODE (t))
10292 {
10293 case ABS_EXPR:
10294 return 1;
10295
10296 case INTEGER_CST:
10297 return tree_int_cst_sgn (t) >= 0;
10298
10299 case REAL_CST:
10300 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
10301
10302 case PLUS_EXPR:
10303 if (FLOAT_TYPE_P (TREE_TYPE (t)))
10304 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10305 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10306
10307 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
10308 both unsigned and at least 2 bits shorter than the result. */
10309 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
10310 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
10311 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
10312 {
10313 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
10314 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
10315 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
10316 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
10317 {
10318 unsigned int prec = MAX (TYPE_PRECISION (inner1),
10319 TYPE_PRECISION (inner2)) + 1;
10320 return prec < TYPE_PRECISION (TREE_TYPE (t));
10321 }
10322 }
10323 break;
10324
10325 case MULT_EXPR:
10326 if (FLOAT_TYPE_P (TREE_TYPE (t)))
10327 {
10328 /* x * x for floating point x is always non-negative. */
10329 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
10330 return 1;
10331 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10332 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10333 }
10334
10335 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
10336 both unsigned and their total bits is shorter than the result. */
10337 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
10338 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
10339 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
10340 {
10341 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
10342 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
10343 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
10344 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
10345 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
10346 < TYPE_PRECISION (TREE_TYPE (t));
10347 }
10348 return 0;
10349
10350 case TRUNC_DIV_EXPR:
10351 case CEIL_DIV_EXPR:
10352 case FLOOR_DIV_EXPR:
10353 case ROUND_DIV_EXPR:
10354 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10355 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10356
10357 case TRUNC_MOD_EXPR:
10358 case CEIL_MOD_EXPR:
10359 case FLOOR_MOD_EXPR:
10360 case ROUND_MOD_EXPR:
10361 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10362
10363 case RDIV_EXPR:
10364 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10365 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10366
10367 case BIT_AND_EXPR:
10368 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
10369 || tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10370 case BIT_IOR_EXPR:
10371 case BIT_XOR_EXPR:
10372 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10373 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10374
10375 case NOP_EXPR:
10376 {
10377 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
10378 tree outer_type = TREE_TYPE (t);
10379
10380 if (TREE_CODE (outer_type) == REAL_TYPE)
10381 {
10382 if (TREE_CODE (inner_type) == REAL_TYPE)
10383 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10384 if (TREE_CODE (inner_type) == INTEGER_TYPE)
10385 {
10386 if (TYPE_UNSIGNED (inner_type))
10387 return 1;
10388 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10389 }
10390 }
10391 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
10392 {
10393 if (TREE_CODE (inner_type) == REAL_TYPE)
10394 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
10395 if (TREE_CODE (inner_type) == INTEGER_TYPE)
10396 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
10397 && TYPE_UNSIGNED (inner_type);
10398 }
10399 }
10400 break;
10401
10402 case COND_EXPR:
10403 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
10404 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
10405 case COMPOUND_EXPR:
10406 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10407 case MIN_EXPR:
10408 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10409 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10410 case MAX_EXPR:
10411 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10412 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10413 case MODIFY_EXPR:
10414 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10415 case BIND_EXPR:
10416 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t, 1)));
10417 case SAVE_EXPR:
10418 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10419 case NON_LVALUE_EXPR:
10420 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10421 case FLOAT_EXPR:
10422 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10423
10424 case TARGET_EXPR:
10425 {
10426 tree temp = TARGET_EXPR_SLOT (t);
10427 t = TARGET_EXPR_INITIAL (t);
10428
10429 /* If the initializer is non-void, then it's a normal expression
10430 that will be assigned to the slot. */
10431 if (!VOID_TYPE_P (t))
10432 return tree_expr_nonnegative_p (t);
10433
10434 /* Otherwise, the initializer sets the slot in some way. One common
10435 way is an assignment statement at the end of the initializer. */
10436 while (1)
10437 {
10438 if (TREE_CODE (t) == BIND_EXPR)
10439 t = expr_last (BIND_EXPR_BODY (t));
10440 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
10441 || TREE_CODE (t) == TRY_CATCH_EXPR)
10442 t = expr_last (TREE_OPERAND (t, 0));
10443 else if (TREE_CODE (t) == STATEMENT_LIST)
10444 t = expr_last (t);
10445 else
10446 break;
10447 }
10448 if (TREE_CODE (t) == MODIFY_EXPR
10449 && TREE_OPERAND (t, 0) == temp)
10450 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10451
10452 return 0;
10453 }
10454
10455 case CALL_EXPR:
10456 {
10457 tree fndecl = get_callee_fndecl (t);
10458 tree arglist = TREE_OPERAND (t, 1);
10459 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
10460 switch (DECL_FUNCTION_CODE (fndecl))
10461 {
10462 #define CASE_BUILTIN_F(BUILT_IN_FN) \
10463 case BUILT_IN_FN: case BUILT_IN_FN##F: case BUILT_IN_FN##L:
10464 #define CASE_BUILTIN_I(BUILT_IN_FN) \
10465 case BUILT_IN_FN: case BUILT_IN_FN##L: case BUILT_IN_FN##LL:
10466
10467 CASE_BUILTIN_F (BUILT_IN_ACOS)
10468 CASE_BUILTIN_F (BUILT_IN_ACOSH)
10469 CASE_BUILTIN_F (BUILT_IN_CABS)
10470 CASE_BUILTIN_F (BUILT_IN_COSH)
10471 CASE_BUILTIN_F (BUILT_IN_ERFC)
10472 CASE_BUILTIN_F (BUILT_IN_EXP)
10473 CASE_BUILTIN_F (BUILT_IN_EXP10)
10474 CASE_BUILTIN_F (BUILT_IN_EXP2)
10475 CASE_BUILTIN_F (BUILT_IN_FABS)
10476 CASE_BUILTIN_F (BUILT_IN_FDIM)
10477 CASE_BUILTIN_F (BUILT_IN_FREXP)
10478 CASE_BUILTIN_F (BUILT_IN_HYPOT)
10479 CASE_BUILTIN_F (BUILT_IN_POW10)
10480 CASE_BUILTIN_I (BUILT_IN_FFS)
10481 CASE_BUILTIN_I (BUILT_IN_PARITY)
10482 CASE_BUILTIN_I (BUILT_IN_POPCOUNT)
10483 /* Always true. */
10484 return 1;
10485
10486 CASE_BUILTIN_F (BUILT_IN_SQRT)
10487 /* sqrt(-0.0) is -0.0. */
10488 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
10489 return 1;
10490 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
10491
10492 CASE_BUILTIN_F (BUILT_IN_ASINH)
10493 CASE_BUILTIN_F (BUILT_IN_ATAN)
10494 CASE_BUILTIN_F (BUILT_IN_ATANH)
10495 CASE_BUILTIN_F (BUILT_IN_CBRT)
10496 CASE_BUILTIN_F (BUILT_IN_CEIL)
10497 CASE_BUILTIN_F (BUILT_IN_ERF)
10498 CASE_BUILTIN_F (BUILT_IN_EXPM1)
10499 CASE_BUILTIN_F (BUILT_IN_FLOOR)
10500 CASE_BUILTIN_F (BUILT_IN_FMOD)
10501 CASE_BUILTIN_F (BUILT_IN_LDEXP)
10502 CASE_BUILTIN_F (BUILT_IN_LLRINT)
10503 CASE_BUILTIN_F (BUILT_IN_LLROUND)
10504 CASE_BUILTIN_F (BUILT_IN_LRINT)
10505 CASE_BUILTIN_F (BUILT_IN_LROUND)
10506 CASE_BUILTIN_F (BUILT_IN_MODF)
10507 CASE_BUILTIN_F (BUILT_IN_NEARBYINT)
10508 CASE_BUILTIN_F (BUILT_IN_POW)
10509 CASE_BUILTIN_F (BUILT_IN_RINT)
10510 CASE_BUILTIN_F (BUILT_IN_ROUND)
10511 CASE_BUILTIN_F (BUILT_IN_SIGNBIT)
10512 CASE_BUILTIN_F (BUILT_IN_SINH)
10513 CASE_BUILTIN_F (BUILT_IN_TANH)
10514 CASE_BUILTIN_F (BUILT_IN_TRUNC)
10515 /* True if the 1st argument is nonnegative. */
10516 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
10517
10518 CASE_BUILTIN_F (BUILT_IN_FMAX)
10519 /* True if the 1st OR 2nd arguments are nonnegative. */
10520 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
10521 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
10522
10523 CASE_BUILTIN_F (BUILT_IN_FMIN)
10524 /* True if the 1st AND 2nd arguments are nonnegative. */
10525 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
10526 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
10527
10528 CASE_BUILTIN_F (BUILT_IN_COPYSIGN)
10529 /* True if the 2nd argument is nonnegative. */
10530 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
10531
10532 default:
10533 break;
10534 #undef CASE_BUILTIN_F
10535 #undef CASE_BUILTIN_I
10536 }
10537 }
10538
10539 /* ... fall through ... */
10540
10541 default:
10542 if (truth_value_p (TREE_CODE (t)))
10543 /* Truth values evaluate to 0 or 1, which is nonnegative. */
10544 return 1;
10545 }
10546
10547 /* We don't know sign of `t', so be conservative and return false. */
10548 return 0;
10549 }
10550
10551 /* Return true when T is an address and is known to be nonzero.
10552 For floating point we further ensure that T is not denormal.
10553 Similar logic is present in nonzero_address in rtlanal.h. */
10554
10555 static bool
10556 tree_expr_nonzero_p (tree t)
10557 {
10558 tree type = TREE_TYPE (t);
10559
10560 /* Doing something useful for floating point would need more work. */
10561 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
10562 return false;
10563
10564 switch (TREE_CODE (t))
10565 {
10566 case ABS_EXPR:
10567 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
10568 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
10569
10570 case INTEGER_CST:
10571 /* We used to test for !integer_zerop here. This does not work correctly
10572 if TREE_CONSTANT_OVERFLOW (t). */
10573 return (TREE_INT_CST_LOW (t) != 0
10574 || TREE_INT_CST_HIGH (t) != 0);
10575
10576 case PLUS_EXPR:
10577 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
10578 {
10579 /* With the presence of negative values it is hard
10580 to say something. */
10581 if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10582 || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
10583 return false;
10584 /* One of operands must be positive and the other non-negative. */
10585 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
10586 || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
10587 }
10588 break;
10589
10590 case MULT_EXPR:
10591 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
10592 {
10593 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
10594 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
10595 }
10596 break;
10597
10598 case NOP_EXPR:
10599 {
10600 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
10601 tree outer_type = TREE_TYPE (t);
10602
10603 return (TYPE_PRECISION (inner_type) >= TYPE_PRECISION (outer_type)
10604 && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
10605 }
10606 break;
10607
10608 case ADDR_EXPR:
10609 {
10610 tree base = get_base_address (TREE_OPERAND (t, 0));
10611
10612 if (!base)
10613 return false;
10614
10615 /* Weak declarations may link to NULL. */
10616 if (DECL_P (base))
10617 return !DECL_WEAK (base);
10618
10619 /* Constants are never weak. */
10620 if (CONSTANT_CLASS_P (base))
10621 return true;
10622
10623 return false;
10624 }
10625
10626 case COND_EXPR:
10627 return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
10628 && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
10629
10630 case MIN_EXPR:
10631 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
10632 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
10633
10634 case MAX_EXPR:
10635 if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
10636 {
10637 /* When both operands are nonzero, then MAX must be too. */
10638 if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
10639 return true;
10640
10641 /* MAX where operand 0 is positive is positive. */
10642 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10643 }
10644 /* MAX where operand 1 is positive is positive. */
10645 else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
10646 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
10647 return true;
10648 break;
10649
10650 case COMPOUND_EXPR:
10651 case MODIFY_EXPR:
10652 case BIND_EXPR:
10653 return tree_expr_nonzero_p (TREE_OPERAND (t, 1));
10654
10655 case SAVE_EXPR:
10656 case NON_LVALUE_EXPR:
10657 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
10658
10659 case BIT_IOR_EXPR:
10660 return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
10661 || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
10662
10663 default:
10664 break;
10665 }
10666 return false;
10667 }
10668
10669 /* See if we are applying CODE, a relational to the highest or lowest
10670 possible integer of TYPE. If so, then the result is a compile
10671 time constant. */
10672
10673 static tree
10674 fold_relational_hi_lo (enum tree_code *code_p, const tree type, tree *op0_p,
10675 tree *op1_p)
10676 {
10677 tree op0 = *op0_p;
10678 tree op1 = *op1_p;
10679 enum tree_code code = *code_p;
10680 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (op1)));
10681
10682 if (TREE_CODE (op1) == INTEGER_CST
10683 && ! TREE_CONSTANT_OVERFLOW (op1)
10684 && width <= HOST_BITS_PER_WIDE_INT
10685 && (INTEGRAL_TYPE_P (TREE_TYPE (op1))
10686 || POINTER_TYPE_P (TREE_TYPE (op1))))
10687 {
10688 unsigned HOST_WIDE_INT signed_max;
10689 unsigned HOST_WIDE_INT max, min;
10690
10691 signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
10692
10693 if (TYPE_UNSIGNED (TREE_TYPE (op1)))
10694 {
10695 max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
10696 min = 0;
10697 }
10698 else
10699 {
10700 max = signed_max;
10701 min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
10702 }
10703
10704 if (TREE_INT_CST_HIGH (op1) == 0
10705 && TREE_INT_CST_LOW (op1) == max)
10706 switch (code)
10707 {
10708 case GT_EXPR:
10709 return omit_one_operand (type, integer_zero_node, op0);
10710
10711 case GE_EXPR:
10712 *code_p = EQ_EXPR;
10713 break;
10714 case LE_EXPR:
10715 return omit_one_operand (type, integer_one_node, op0);
10716
10717 case LT_EXPR:
10718 *code_p = NE_EXPR;
10719 break;
10720
10721 /* The GE_EXPR and LT_EXPR cases above are not normally
10722 reached because of previous transformations. */
10723
10724 default:
10725 break;
10726 }
10727 else if (TREE_INT_CST_HIGH (op1) == 0
10728 && TREE_INT_CST_LOW (op1) == max - 1)
10729 switch (code)
10730 {
10731 case GT_EXPR:
10732 *code_p = EQ_EXPR;
10733 *op1_p = const_binop (PLUS_EXPR, op1, integer_one_node, 0);
10734 break;
10735 case LE_EXPR:
10736 *code_p = NE_EXPR;
10737 *op1_p = const_binop (PLUS_EXPR, op1, integer_one_node, 0);
10738 break;
10739 default:
10740 break;
10741 }
10742 else if (TREE_INT_CST_HIGH (op1) == (min ? -1 : 0)
10743 && TREE_INT_CST_LOW (op1) == min)
10744 switch (code)
10745 {
10746 case LT_EXPR:
10747 return omit_one_operand (type, integer_zero_node, op0);
10748
10749 case LE_EXPR:
10750 *code_p = EQ_EXPR;
10751 break;
10752
10753 case GE_EXPR:
10754 return omit_one_operand (type, integer_one_node, op0);
10755
10756 case GT_EXPR:
10757 *code_p = NE_EXPR;
10758 break;
10759
10760 default:
10761 break;
10762 }
10763 else if (TREE_INT_CST_HIGH (op1) == (min ? -1 : 0)
10764 && TREE_INT_CST_LOW (op1) == min + 1)
10765 switch (code)
10766 {
10767 case GE_EXPR:
10768 *code_p = NE_EXPR;
10769 *op1_p = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
10770 break;
10771 case LT_EXPR:
10772 *code_p = EQ_EXPR;
10773 *op1_p = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
10774 break;
10775 default:
10776 break;
10777 }
10778
10779 else if (TREE_INT_CST_HIGH (op1) == 0
10780 && TREE_INT_CST_LOW (op1) == signed_max
10781 && TYPE_UNSIGNED (TREE_TYPE (op1))
10782 /* signed_type does not work on pointer types. */
10783 && INTEGRAL_TYPE_P (TREE_TYPE (op1)))
10784 {
10785 /* The following case also applies to X < signed_max+1
10786 and X >= signed_max+1 because previous transformations. */
10787 if (code == LE_EXPR || code == GT_EXPR)
10788 {
10789 tree st0, st1, exp, retval;
10790 st0 = lang_hooks.types.signed_type (TREE_TYPE (op0));
10791 st1 = lang_hooks.types.signed_type (TREE_TYPE (op1));
10792
10793 exp = build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
10794 type,
10795 fold_convert (st0, op0),
10796 fold_convert (st1, integer_zero_node));
10797
10798 retval = fold_binary_to_constant (TREE_CODE (exp),
10799 TREE_TYPE (exp),
10800 TREE_OPERAND (exp, 0),
10801 TREE_OPERAND (exp, 1));
10802
10803 /* If we are in gimple form, then returning EXP would create
10804 non-gimple expressions. Clearing it is safe and insures
10805 we do not allow a non-gimple expression to escape. */
10806 if (in_gimple_form)
10807 exp = NULL;
10808
10809 return (retval ? retval : exp);
10810 }
10811 }
10812 }
10813
10814 return NULL_TREE;
10815 }
10816
10817
10818 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
10819 attempt to fold the expression to a constant without modifying TYPE,
10820 OP0 or OP1.
10821
10822 If the expression could be simplified to a constant, then return
10823 the constant. If the expression would not be simplified to a
10824 constant, then return NULL_TREE.
10825
10826 Note this is primarily designed to be called after gimplification
10827 of the tree structures and when at least one operand is a constant.
10828 As a result of those simplifying assumptions this routine is far
10829 simpler than the generic fold routine. */
10830
10831 tree
10832 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
10833 {
10834 int wins = 1;
10835 tree subop0;
10836 tree subop1;
10837 tree tem;
10838
10839 /* If this is a commutative operation, and ARG0 is a constant, move it
10840 to ARG1 to reduce the number of tests below. */
10841 if (commutative_tree_code (code)
10842 && (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST))
10843 {
10844 tem = op0;
10845 op0 = op1;
10846 op1 = tem;
10847 }
10848
10849 /* If either operand is a complex type, extract its real component. */
10850 if (TREE_CODE (op0) == COMPLEX_CST)
10851 subop0 = TREE_REALPART (op0);
10852 else
10853 subop0 = op0;
10854
10855 if (TREE_CODE (op1) == COMPLEX_CST)
10856 subop1 = TREE_REALPART (op1);
10857 else
10858 subop1 = op1;
10859
10860 /* Note if either argument is not a real or integer constant.
10861 With a few exceptions, simplification is limited to cases
10862 where both arguments are constants. */
10863 if ((TREE_CODE (subop0) != INTEGER_CST
10864 && TREE_CODE (subop0) != REAL_CST)
10865 || (TREE_CODE (subop1) != INTEGER_CST
10866 && TREE_CODE (subop1) != REAL_CST))
10867 wins = 0;
10868
10869 switch (code)
10870 {
10871 case PLUS_EXPR:
10872 /* (plus (address) (const_int)) is a constant. */
10873 if (TREE_CODE (op0) == PLUS_EXPR
10874 && TREE_CODE (op1) == INTEGER_CST
10875 && (TREE_CODE (TREE_OPERAND (op0, 0)) == ADDR_EXPR
10876 || (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
10877 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (op0, 0), 0))
10878 == ADDR_EXPR)))
10879 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
10880 {
10881 return build2 (PLUS_EXPR, type, TREE_OPERAND (op0, 0),
10882 const_binop (PLUS_EXPR, op1,
10883 TREE_OPERAND (op0, 1), 0));
10884 }
10885 case BIT_XOR_EXPR:
10886
10887 binary:
10888 if (!wins)
10889 return NULL_TREE;
10890
10891 /* Both arguments are constants. Simplify. */
10892 tem = const_binop (code, op0, op1, 0);
10893 if (tem != NULL_TREE)
10894 {
10895 /* The return value should always have the same type as
10896 the original expression. */
10897 if (TREE_TYPE (tem) != type)
10898 tem = fold_convert (type, tem);
10899
10900 return tem;
10901 }
10902 return NULL_TREE;
10903
10904 case MINUS_EXPR:
10905 /* Fold &x - &x. This can happen from &x.foo - &x.
10906 This is unsafe for certain floats even in non-IEEE formats.
10907 In IEEE, it is unsafe because it does wrong for NaNs.
10908 Also note that operand_equal_p is always false if an
10909 operand is volatile. */
10910 if (! FLOAT_TYPE_P (type) && operand_equal_p (op0, op1, 0))
10911 return fold_convert (type, integer_zero_node);
10912
10913 goto binary;
10914
10915 case MULT_EXPR:
10916 case BIT_AND_EXPR:
10917 /* Special case multiplication or bitwise AND where one argument
10918 is zero. */
10919 if (! FLOAT_TYPE_P (type) && integer_zerop (op1))
10920 return omit_one_operand (type, op1, op0);
10921 else
10922 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (op0)))
10923 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op0)))
10924 && real_zerop (op1))
10925 return omit_one_operand (type, op1, op0);
10926
10927 goto binary;
10928
10929 case BIT_IOR_EXPR:
10930 /* Special case when we know the result will be all ones. */
10931 if (integer_all_onesp (op1))
10932 return omit_one_operand (type, op1, op0);
10933
10934 goto binary;
10935
10936 case TRUNC_DIV_EXPR:
10937 case ROUND_DIV_EXPR:
10938 case FLOOR_DIV_EXPR:
10939 case CEIL_DIV_EXPR:
10940 case EXACT_DIV_EXPR:
10941 case TRUNC_MOD_EXPR:
10942 case ROUND_MOD_EXPR:
10943 case FLOOR_MOD_EXPR:
10944 case CEIL_MOD_EXPR:
10945 case RDIV_EXPR:
10946 /* Division by zero is undefined. */
10947 if (integer_zerop (op1))
10948 return NULL_TREE;
10949
10950 if (TREE_CODE (op1) == REAL_CST
10951 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (op1)))
10952 && real_zerop (op1))
10953 return NULL_TREE;
10954
10955 goto binary;
10956
10957 case MIN_EXPR:
10958 if (INTEGRAL_TYPE_P (type)
10959 && operand_equal_p (op1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
10960 return omit_one_operand (type, op1, op0);
10961
10962 goto binary;
10963
10964 case MAX_EXPR:
10965 if (INTEGRAL_TYPE_P (type)
10966 && TYPE_MAX_VALUE (type)
10967 && operand_equal_p (op1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
10968 return omit_one_operand (type, op1, op0);
10969
10970 goto binary;
10971
10972 case RSHIFT_EXPR:
10973 /* Optimize -1 >> x for arithmetic right shifts. */
10974 if (integer_all_onesp (op0) && ! TYPE_UNSIGNED (type))
10975 return omit_one_operand (type, op0, op1);
10976 /* ... fall through ... */
10977
10978 case LSHIFT_EXPR:
10979 if (integer_zerop (op0))
10980 return omit_one_operand (type, op0, op1);
10981
10982 /* Since negative shift count is not well-defined, don't
10983 try to compute it in the compiler. */
10984 if (TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sgn (op1) < 0)
10985 return NULL_TREE;
10986
10987 goto binary;
10988
10989 case LROTATE_EXPR:
10990 case RROTATE_EXPR:
10991 /* -1 rotated either direction by any amount is still -1. */
10992 if (integer_all_onesp (op0))
10993 return omit_one_operand (type, op0, op1);
10994
10995 /* 0 rotated either direction by any amount is still zero. */
10996 if (integer_zerop (op0))
10997 return omit_one_operand (type, op0, op1);
10998
10999 goto binary;
11000
11001 case COMPLEX_EXPR:
11002 if (wins)
11003 return build_complex (type, op0, op1);
11004 return NULL_TREE;
11005
11006 case LT_EXPR:
11007 case LE_EXPR:
11008 case GT_EXPR:
11009 case GE_EXPR:
11010 case EQ_EXPR:
11011 case NE_EXPR:
11012 /* If one arg is a real or integer constant, put it last. */
11013 if ((TREE_CODE (op0) == INTEGER_CST
11014 && TREE_CODE (op1) != INTEGER_CST)
11015 || (TREE_CODE (op0) == REAL_CST
11016 && TREE_CODE (op0) != REAL_CST))
11017 {
11018 tree temp;
11019
11020 temp = op0;
11021 op0 = op1;
11022 op1 = temp;
11023 code = swap_tree_comparison (code);
11024 }
11025
11026 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
11027 This transformation affects the cases which are handled in later
11028 optimizations involving comparisons with non-negative constants. */
11029 if (TREE_CODE (op1) == INTEGER_CST
11030 && TREE_CODE (op0) != INTEGER_CST
11031 && tree_int_cst_sgn (op1) > 0)
11032 {
11033 switch (code)
11034 {
11035 case GE_EXPR:
11036 code = GT_EXPR;
11037 op1 = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
11038 break;
11039
11040 case LT_EXPR:
11041 code = LE_EXPR;
11042 op1 = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
11043 break;
11044
11045 default:
11046 break;
11047 }
11048 }
11049
11050 tem = fold_relational_hi_lo (&code, type, &op0, &op1);
11051 if (tem)
11052 return tem;
11053
11054 /* Fall through. */
11055
11056 case ORDERED_EXPR:
11057 case UNORDERED_EXPR:
11058 case UNLT_EXPR:
11059 case UNLE_EXPR:
11060 case UNGT_EXPR:
11061 case UNGE_EXPR:
11062 case UNEQ_EXPR:
11063 case LTGT_EXPR:
11064 if (!wins)
11065 return NULL_TREE;
11066
11067 return fold_relational_const (code, type, op0, op1);
11068
11069 case RANGE_EXPR:
11070 /* This could probably be handled. */
11071 return NULL_TREE;
11072
11073 case TRUTH_AND_EXPR:
11074 /* If second arg is constant zero, result is zero, but first arg
11075 must be evaluated. */
11076 if (integer_zerop (op1))
11077 return omit_one_operand (type, op1, op0);
11078 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
11079 case will be handled here. */
11080 if (integer_zerop (op0))
11081 return omit_one_operand (type, op0, op1);
11082 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
11083 return constant_boolean_node (true, type);
11084 return NULL_TREE;
11085
11086 case TRUTH_OR_EXPR:
11087 /* If second arg is constant true, result is true, but we must
11088 evaluate first arg. */
11089 if (TREE_CODE (op1) == INTEGER_CST && ! integer_zerop (op1))
11090 return omit_one_operand (type, op1, op0);
11091 /* Likewise for first arg, but note this only occurs here for
11092 TRUTH_OR_EXPR. */
11093 if (TREE_CODE (op0) == INTEGER_CST && ! integer_zerop (op0))
11094 return omit_one_operand (type, op0, op1);
11095 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
11096 return constant_boolean_node (false, type);
11097 return NULL_TREE;
11098
11099 case TRUTH_XOR_EXPR:
11100 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
11101 {
11102 int x = ! integer_zerop (op0) ^ ! integer_zerop (op1);
11103 return constant_boolean_node (x, type);
11104 }
11105 return NULL_TREE;
11106
11107 default:
11108 return NULL_TREE;
11109 }
11110 }
11111
11112 /* Given the components of a unary expression CODE, TYPE and OP0,
11113 attempt to fold the expression to a constant without modifying
11114 TYPE or OP0.
11115
11116 If the expression could be simplified to a constant, then return
11117 the constant. If the expression would not be simplified to a
11118 constant, then return NULL_TREE.
11119
11120 Note this is primarily designed to be called after gimplification
11121 of the tree structures and when op0 is a constant. As a result
11122 of those simplifying assumptions this routine is far simpler than
11123 the generic fold routine. */
11124
11125 tree
11126 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
11127 {
11128 /* Make sure we have a suitable constant argument. */
11129 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
11130 {
11131 tree subop;
11132
11133 if (TREE_CODE (op0) == COMPLEX_CST)
11134 subop = TREE_REALPART (op0);
11135 else
11136 subop = op0;
11137
11138 if (TREE_CODE (subop) != INTEGER_CST && TREE_CODE (subop) != REAL_CST)
11139 return NULL_TREE;
11140 }
11141
11142 switch (code)
11143 {
11144 case NOP_EXPR:
11145 case FLOAT_EXPR:
11146 case CONVERT_EXPR:
11147 case FIX_TRUNC_EXPR:
11148 case FIX_FLOOR_EXPR:
11149 case FIX_CEIL_EXPR:
11150 case FIX_ROUND_EXPR:
11151 return fold_convert_const (code, type, op0);
11152
11153 case NEGATE_EXPR:
11154 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
11155 return fold_negate_const (op0, type);
11156 else
11157 return NULL_TREE;
11158
11159 case ABS_EXPR:
11160 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
11161 return fold_abs_const (op0, type);
11162 else
11163 return NULL_TREE;
11164
11165 case BIT_NOT_EXPR:
11166 if (TREE_CODE (op0) == INTEGER_CST)
11167 return fold_not_const (op0, type);
11168 else
11169 return NULL_TREE;
11170
11171 case REALPART_EXPR:
11172 if (TREE_CODE (op0) == COMPLEX_CST)
11173 return TREE_REALPART (op0);
11174 else
11175 return NULL_TREE;
11176
11177 case IMAGPART_EXPR:
11178 if (TREE_CODE (op0) == COMPLEX_CST)
11179 return TREE_IMAGPART (op0);
11180 else
11181 return NULL_TREE;
11182
11183 case CONJ_EXPR:
11184 if (TREE_CODE (op0) == COMPLEX_CST
11185 && TREE_CODE (TREE_TYPE (op0)) == COMPLEX_TYPE)
11186 return build_complex (type, TREE_REALPART (op0),
11187 negate_expr (TREE_IMAGPART (op0)));
11188 return NULL_TREE;
11189
11190 default:
11191 return NULL_TREE;
11192 }
11193 }
11194
11195 /* If EXP represents referencing an element in a constant string
11196 (either via pointer arithmetic or array indexing), return the
11197 tree representing the value accessed, otherwise return NULL. */
11198
11199 tree
11200 fold_read_from_constant_string (tree exp)
11201 {
11202 if (TREE_CODE (exp) == INDIRECT_REF || TREE_CODE (exp) == ARRAY_REF)
11203 {
11204 tree exp1 = TREE_OPERAND (exp, 0);
11205 tree index;
11206 tree string;
11207
11208 if (TREE_CODE (exp) == INDIRECT_REF)
11209 string = string_constant (exp1, &index);
11210 else
11211 {
11212 tree low_bound = array_ref_low_bound (exp);
11213 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
11214
11215 /* Optimize the special-case of a zero lower bound.
11216
11217 We convert the low_bound to sizetype to avoid some problems
11218 with constant folding. (E.g. suppose the lower bound is 1,
11219 and its mode is QI. Without the conversion,l (ARRAY
11220 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
11221 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
11222 if (! integer_zerop (low_bound))
11223 index = size_diffop (index, fold_convert (sizetype, low_bound));
11224
11225 string = exp1;
11226 }
11227
11228 if (string
11229 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (string))
11230 && TREE_CODE (string) == STRING_CST
11231 && TREE_CODE (index) == INTEGER_CST
11232 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
11233 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
11234 == MODE_INT)
11235 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
11236 return fold_convert (TREE_TYPE (exp),
11237 build_int_cst (NULL_TREE,
11238 (TREE_STRING_POINTER (string)
11239 [TREE_INT_CST_LOW (index)])));
11240 }
11241 return NULL;
11242 }
11243
11244 /* Return the tree for neg (ARG0) when ARG0 is known to be either
11245 an integer constant or real constant.
11246
11247 TYPE is the type of the result. */
11248
11249 static tree
11250 fold_negate_const (tree arg0, tree type)
11251 {
11252 tree t = NULL_TREE;
11253
11254 switch (TREE_CODE (arg0))
11255 {
11256 case INTEGER_CST:
11257 {
11258 unsigned HOST_WIDE_INT low;
11259 HOST_WIDE_INT high;
11260 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
11261 TREE_INT_CST_HIGH (arg0),
11262 &low, &high);
11263 t = build_int_cst_wide (type, low, high);
11264 t = force_fit_type (t, 1,
11265 (overflow | TREE_OVERFLOW (arg0))
11266 && !TYPE_UNSIGNED (type),
11267 TREE_CONSTANT_OVERFLOW (arg0));
11268 break;
11269 }
11270
11271 case REAL_CST:
11272 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
11273 break;
11274
11275 default:
11276 gcc_unreachable ();
11277 }
11278
11279 return t;
11280 }
11281
11282 /* Return the tree for abs (ARG0) when ARG0 is known to be either
11283 an integer constant or real constant.
11284
11285 TYPE is the type of the result. */
11286
11287 tree
11288 fold_abs_const (tree arg0, tree type)
11289 {
11290 tree t = NULL_TREE;
11291
11292 switch (TREE_CODE (arg0))
11293 {
11294 case INTEGER_CST:
11295 /* If the value is unsigned, then the absolute value is
11296 the same as the ordinary value. */
11297 if (TYPE_UNSIGNED (type))
11298 t = arg0;
11299 /* Similarly, if the value is non-negative. */
11300 else if (INT_CST_LT (integer_minus_one_node, arg0))
11301 t = arg0;
11302 /* If the value is negative, then the absolute value is
11303 its negation. */
11304 else
11305 {
11306 unsigned HOST_WIDE_INT low;
11307 HOST_WIDE_INT high;
11308 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
11309 TREE_INT_CST_HIGH (arg0),
11310 &low, &high);
11311 t = build_int_cst_wide (type, low, high);
11312 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg0),
11313 TREE_CONSTANT_OVERFLOW (arg0));
11314 }
11315 break;
11316
11317 case REAL_CST:
11318 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
11319 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
11320 else
11321 t = arg0;
11322 break;
11323
11324 default:
11325 gcc_unreachable ();
11326 }
11327
11328 return t;
11329 }
11330
11331 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
11332 constant. TYPE is the type of the result. */
11333
11334 static tree
11335 fold_not_const (tree arg0, tree type)
11336 {
11337 tree t = NULL_TREE;
11338
11339 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
11340
11341 t = build_int_cst_wide (type,
11342 ~ TREE_INT_CST_LOW (arg0),
11343 ~ TREE_INT_CST_HIGH (arg0));
11344 t = force_fit_type (t, 0, TREE_OVERFLOW (arg0),
11345 TREE_CONSTANT_OVERFLOW (arg0));
11346
11347 return t;
11348 }
11349
11350 /* Given CODE, a relational operator, the target type, TYPE and two
11351 constant operands OP0 and OP1, return the result of the
11352 relational operation. If the result is not a compile time
11353 constant, then return NULL_TREE. */
11354
11355 static tree
11356 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
11357 {
11358 int result, invert;
11359
11360 /* From here on, the only cases we handle are when the result is
11361 known to be a constant. */
11362
11363 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
11364 {
11365 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
11366 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
11367
11368 /* Handle the cases where either operand is a NaN. */
11369 if (real_isnan (c0) || real_isnan (c1))
11370 {
11371 switch (code)
11372 {
11373 case EQ_EXPR:
11374 case ORDERED_EXPR:
11375 result = 0;
11376 break;
11377
11378 case NE_EXPR:
11379 case UNORDERED_EXPR:
11380 case UNLT_EXPR:
11381 case UNLE_EXPR:
11382 case UNGT_EXPR:
11383 case UNGE_EXPR:
11384 case UNEQ_EXPR:
11385 result = 1;
11386 break;
11387
11388 case LT_EXPR:
11389 case LE_EXPR:
11390 case GT_EXPR:
11391 case GE_EXPR:
11392 case LTGT_EXPR:
11393 if (flag_trapping_math)
11394 return NULL_TREE;
11395 result = 0;
11396 break;
11397
11398 default:
11399 gcc_unreachable ();
11400 }
11401
11402 return constant_boolean_node (result, type);
11403 }
11404
11405 return constant_boolean_node (real_compare (code, c0, c1), type);
11406 }
11407
11408 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
11409
11410 To compute GT, swap the arguments and do LT.
11411 To compute GE, do LT and invert the result.
11412 To compute LE, swap the arguments, do LT and invert the result.
11413 To compute NE, do EQ and invert the result.
11414
11415 Therefore, the code below must handle only EQ and LT. */
11416
11417 if (code == LE_EXPR || code == GT_EXPR)
11418 {
11419 tree tem = op0;
11420 op0 = op1;
11421 op1 = tem;
11422 code = swap_tree_comparison (code);
11423 }
11424
11425 /* Note that it is safe to invert for real values here because we
11426 have already handled the one case that it matters. */
11427
11428 invert = 0;
11429 if (code == NE_EXPR || code == GE_EXPR)
11430 {
11431 invert = 1;
11432 code = invert_tree_comparison (code, false);
11433 }
11434
11435 /* Compute a result for LT or EQ if args permit;
11436 Otherwise return T. */
11437 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
11438 {
11439 if (code == EQ_EXPR)
11440 result = tree_int_cst_equal (op0, op1);
11441 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
11442 result = INT_CST_LT_UNSIGNED (op0, op1);
11443 else
11444 result = INT_CST_LT (op0, op1);
11445 }
11446 else
11447 return NULL_TREE;
11448
11449 if (invert)
11450 result ^= 1;
11451 return constant_boolean_node (result, type);
11452 }
11453
11454 /* Build an expression for the a clean point containing EXPR with type TYPE.
11455 Don't build a cleanup point expression for EXPR which don't have side
11456 effects. */
11457
11458 tree
11459 fold_build_cleanup_point_expr (tree type, tree expr)
11460 {
11461 /* If the expression does not have side effects then we don't have to wrap
11462 it with a cleanup point expression. */
11463 if (!TREE_SIDE_EFFECTS (expr))
11464 return expr;
11465
11466 /* If the expression is a return, check to see if the expression inside the
11467 return has no side effects or the right hand side of the modify expression
11468 inside the return. If either don't have side effects set we don't need to
11469 wrap the expression in a cleanup point expression. Note we don't check the
11470 left hand side of the modify because it should always be a return decl. */
11471 if (TREE_CODE (expr) == RETURN_EXPR)
11472 {
11473 tree op = TREE_OPERAND (expr, 0);
11474 if (!op || !TREE_SIDE_EFFECTS (op))
11475 return expr;
11476 op = TREE_OPERAND (op, 1);
11477 if (!TREE_SIDE_EFFECTS (op))
11478 return expr;
11479 }
11480
11481 return build1 (CLEANUP_POINT_EXPR, type, expr);
11482 }
11483
11484 /* Build an expression for the address of T. Folds away INDIRECT_REF to
11485 avoid confusing the gimplify process. */
11486
11487 tree
11488 build_fold_addr_expr_with_type (tree t, tree ptrtype)
11489 {
11490 /* The size of the object is not relevant when talking about its address. */
11491 if (TREE_CODE (t) == WITH_SIZE_EXPR)
11492 t = TREE_OPERAND (t, 0);
11493
11494 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
11495 if (TREE_CODE (t) == INDIRECT_REF
11496 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
11497 {
11498 t = TREE_OPERAND (t, 0);
11499 if (TREE_TYPE (t) != ptrtype)
11500 t = build1 (NOP_EXPR, ptrtype, t);
11501 }
11502 else
11503 {
11504 tree base = t;
11505
11506 while (handled_component_p (base))
11507 base = TREE_OPERAND (base, 0);
11508 if (DECL_P (base))
11509 TREE_ADDRESSABLE (base) = 1;
11510
11511 t = build1 (ADDR_EXPR, ptrtype, t);
11512 }
11513
11514 return t;
11515 }
11516
11517 tree
11518 build_fold_addr_expr (tree t)
11519 {
11520 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
11521 }
11522
11523 /* Given a pointer value T, return a simplified version of an indirection
11524 through T, or NULL_TREE if no simplification is possible. */
11525
11526 static tree
11527 fold_indirect_ref_1 (tree t)
11528 {
11529 tree type = TREE_TYPE (TREE_TYPE (t));
11530 tree sub = t;
11531 tree subtype;
11532
11533 STRIP_NOPS (sub);
11534 subtype = TREE_TYPE (sub);
11535 if (!POINTER_TYPE_P (subtype))
11536 return NULL_TREE;
11537
11538 if (TREE_CODE (sub) == ADDR_EXPR)
11539 {
11540 tree op = TREE_OPERAND (sub, 0);
11541 tree optype = TREE_TYPE (op);
11542 /* *&p => p */
11543 if (lang_hooks.types_compatible_p (type, optype))
11544 return op;
11545 /* *(foo *)&fooarray => fooarray[0] */
11546 else if (TREE_CODE (optype) == ARRAY_TYPE
11547 && lang_hooks.types_compatible_p (type, TREE_TYPE (optype)))
11548 {
11549 tree type_domain = TYPE_DOMAIN (optype);
11550 tree min_val = size_zero_node;
11551 if (type_domain && TYPE_MIN_VALUE (type_domain))
11552 min_val = TYPE_MIN_VALUE (type_domain);
11553 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
11554 }
11555 }
11556
11557 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
11558 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
11559 && lang_hooks.types_compatible_p (type, TREE_TYPE (TREE_TYPE (subtype))))
11560 {
11561 tree type_domain;
11562 tree min_val = size_zero_node;
11563 sub = build_fold_indirect_ref (sub);
11564 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
11565 if (type_domain && TYPE_MIN_VALUE (type_domain))
11566 min_val = TYPE_MIN_VALUE (type_domain);
11567 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
11568 }
11569
11570 return NULL_TREE;
11571 }
11572
11573 /* Builds an expression for an indirection through T, simplifying some
11574 cases. */
11575
11576 tree
11577 build_fold_indirect_ref (tree t)
11578 {
11579 tree sub = fold_indirect_ref_1 (t);
11580
11581 if (sub)
11582 return sub;
11583 else
11584 return build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (t)), t);
11585 }
11586
11587 /* Given an INDIRECT_REF T, return either T or a simplified version. */
11588
11589 tree
11590 fold_indirect_ref (tree t)
11591 {
11592 tree sub = fold_indirect_ref_1 (TREE_OPERAND (t, 0));
11593
11594 if (sub)
11595 return sub;
11596 else
11597 return t;
11598 }
11599
11600 /* Strip non-trapping, non-side-effecting tree nodes from an expression
11601 whose result is ignored. The type of the returned tree need not be
11602 the same as the original expression. */
11603
11604 tree
11605 fold_ignored_result (tree t)
11606 {
11607 if (!TREE_SIDE_EFFECTS (t))
11608 return integer_zero_node;
11609
11610 for (;;)
11611 switch (TREE_CODE_CLASS (TREE_CODE (t)))
11612 {
11613 case tcc_unary:
11614 t = TREE_OPERAND (t, 0);
11615 break;
11616
11617 case tcc_binary:
11618 case tcc_comparison:
11619 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
11620 t = TREE_OPERAND (t, 0);
11621 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
11622 t = TREE_OPERAND (t, 1);
11623 else
11624 return t;
11625 break;
11626
11627 case tcc_expression:
11628 switch (TREE_CODE (t))
11629 {
11630 case COMPOUND_EXPR:
11631 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
11632 return t;
11633 t = TREE_OPERAND (t, 0);
11634 break;
11635
11636 case COND_EXPR:
11637 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
11638 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
11639 return t;
11640 t = TREE_OPERAND (t, 0);
11641 break;
11642
11643 default:
11644 return t;
11645 }
11646 break;
11647
11648 default:
11649 return t;
11650 }
11651 }
11652
11653 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
11654 This can only be applied to objects of a sizetype. */
11655
11656 tree
11657 round_up (tree value, int divisor)
11658 {
11659 tree div = NULL_TREE;
11660
11661 gcc_assert (divisor > 0);
11662 if (divisor == 1)
11663 return value;
11664
11665 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
11666 have to do anything. Only do this when we are not given a const,
11667 because in that case, this check is more expensive than just
11668 doing it. */
11669 if (TREE_CODE (value) != INTEGER_CST)
11670 {
11671 div = build_int_cst (TREE_TYPE (value), divisor);
11672
11673 if (multiple_of_p (TREE_TYPE (value), value, div))
11674 return value;
11675 }
11676
11677 /* If divisor is a power of two, simplify this to bit manipulation. */
11678 if (divisor == (divisor & -divisor))
11679 {
11680 tree t;
11681
11682 t = build_int_cst (TREE_TYPE (value), divisor - 1);
11683 value = size_binop (PLUS_EXPR, value, t);
11684 t = build_int_cst (TREE_TYPE (value), -divisor);
11685 value = size_binop (BIT_AND_EXPR, value, t);
11686 }
11687 else
11688 {
11689 if (!div)
11690 div = build_int_cst (TREE_TYPE (value), divisor);
11691 value = size_binop (CEIL_DIV_EXPR, value, div);
11692 value = size_binop (MULT_EXPR, value, div);
11693 }
11694
11695 return value;
11696 }
11697
11698 /* Likewise, but round down. */
11699
11700 tree
11701 round_down (tree value, int divisor)
11702 {
11703 tree div = NULL_TREE;
11704
11705 gcc_assert (divisor > 0);
11706 if (divisor == 1)
11707 return value;
11708
11709 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
11710 have to do anything. Only do this when we are not given a const,
11711 because in that case, this check is more expensive than just
11712 doing it. */
11713 if (TREE_CODE (value) != INTEGER_CST)
11714 {
11715 div = build_int_cst (TREE_TYPE (value), divisor);
11716
11717 if (multiple_of_p (TREE_TYPE (value), value, div))
11718 return value;
11719 }
11720
11721 /* If divisor is a power of two, simplify this to bit manipulation. */
11722 if (divisor == (divisor & -divisor))
11723 {
11724 tree t;
11725
11726 t = build_int_cst (TREE_TYPE (value), -divisor);
11727 value = size_binop (BIT_AND_EXPR, value, t);
11728 }
11729 else
11730 {
11731 if (!div)
11732 div = build_int_cst (TREE_TYPE (value), divisor);
11733 value = size_binop (FLOOR_DIV_EXPR, value, div);
11734 value = size_binop (MULT_EXPR, value, div);
11735 }
11736
11737 return value;
11738 }
11739
11740 /* Returns the pointer to the base of the object addressed by EXP and
11741 extracts the information about the offset of the access, storing it
11742 to PBITPOS and POFFSET. */
11743
11744 static tree
11745 split_address_to_core_and_offset (tree exp,
11746 HOST_WIDE_INT *pbitpos, tree *poffset)
11747 {
11748 tree core;
11749 enum machine_mode mode;
11750 int unsignedp, volatilep;
11751 HOST_WIDE_INT bitsize;
11752
11753 if (TREE_CODE (exp) == ADDR_EXPR)
11754 {
11755 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
11756 poffset, &mode, &unsignedp, &volatilep,
11757 false);
11758
11759 if (TREE_CODE (core) == INDIRECT_REF)
11760 core = TREE_OPERAND (core, 0);
11761 }
11762 else
11763 {
11764 core = exp;
11765 *pbitpos = 0;
11766 *poffset = NULL_TREE;
11767 }
11768
11769 return core;
11770 }
11771
11772 /* Returns true if addresses of E1 and E2 differ by a constant, false
11773 otherwise. If they do, E1 - E2 is stored in *DIFF. */
11774
11775 bool
11776 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
11777 {
11778 tree core1, core2;
11779 HOST_WIDE_INT bitpos1, bitpos2;
11780 tree toffset1, toffset2, tdiff, type;
11781
11782 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
11783 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
11784
11785 if (bitpos1 % BITS_PER_UNIT != 0
11786 || bitpos2 % BITS_PER_UNIT != 0
11787 || !operand_equal_p (core1, core2, 0))
11788 return false;
11789
11790 if (toffset1 && toffset2)
11791 {
11792 type = TREE_TYPE (toffset1);
11793 if (type != TREE_TYPE (toffset2))
11794 toffset2 = fold_convert (type, toffset2);
11795
11796 tdiff = fold (build2 (MINUS_EXPR, type, toffset1, toffset2));
11797 if (!host_integerp (tdiff, 0))
11798 return false;
11799
11800 *diff = tree_low_cst (tdiff, 0);
11801 }
11802 else if (toffset1 || toffset2)
11803 {
11804 /* If only one of the offsets is non-constant, the difference cannot
11805 be a constant. */
11806 return false;
11807 }
11808 else
11809 *diff = 0;
11810
11811 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
11812 return true;
11813 }
11814
11815 /* Simplify the floating point expression EXP when the sign of the
11816 result is not significant. Return NULL_TREE if no simplification
11817 is possible. */
11818
11819 tree
11820 fold_strip_sign_ops (tree exp)
11821 {
11822 tree arg0, arg1;
11823
11824 switch (TREE_CODE (exp))
11825 {
11826 case ABS_EXPR:
11827 case NEGATE_EXPR:
11828 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
11829 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
11830
11831 case MULT_EXPR:
11832 case RDIV_EXPR:
11833 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
11834 return NULL_TREE;
11835 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
11836 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
11837 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
11838 return fold (build2 (TREE_CODE (exp), TREE_TYPE (exp),
11839 arg0 ? arg0 : TREE_OPERAND (exp, 0),
11840 arg1 ? arg1 : TREE_OPERAND (exp, 1)));
11841 break;
11842
11843 default:
11844 break;
11845 }
11846 return NULL_TREE;
11847 }
11848