ad5039bba6eb541619b24f1219c4da1ab0b3c414
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
29
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type.
32
33 fold takes a tree as argument and returns a simplified tree.
34
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
38
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
41
42 force_fit_type takes a constant, an overflowable flag and prior
43 overflow indicators. It forces the value to fit the type and sets
44 TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate. */
45
46 #include "config.h"
47 #include "system.h"
48 #include "coretypes.h"
49 #include "tm.h"
50 #include "flags.h"
51 #include "tree.h"
52 #include "real.h"
53 #include "rtl.h"
54 #include "expr.h"
55 #include "tm_p.h"
56 #include "toplev.h"
57 #include "ggc.h"
58 #include "hashtab.h"
59 #include "langhooks.h"
60 #include "md5.h"
61
62 /* The following constants represent a bit based encoding of GCC's
63 comparison operators. This encoding simplifies transformations
64 on relational comparison operators, such as AND and OR. */
65 enum comparison_code {
66 COMPCODE_FALSE = 0,
67 COMPCODE_LT = 1,
68 COMPCODE_EQ = 2,
69 COMPCODE_LE = 3,
70 COMPCODE_GT = 4,
71 COMPCODE_LTGT = 5,
72 COMPCODE_GE = 6,
73 COMPCODE_ORD = 7,
74 COMPCODE_UNORD = 8,
75 COMPCODE_UNLT = 9,
76 COMPCODE_UNEQ = 10,
77 COMPCODE_UNLE = 11,
78 COMPCODE_UNGT = 12,
79 COMPCODE_NE = 13,
80 COMPCODE_UNGE = 14,
81 COMPCODE_TRUE = 15
82 };
83
84 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
85 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
86 static bool negate_mathfn_p (enum built_in_function);
87 static bool negate_expr_p (tree);
88 static tree negate_expr (tree);
89 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
90 static tree associate_trees (tree, tree, enum tree_code, tree);
91 static tree const_binop (enum tree_code, tree, tree, int);
92 static enum tree_code invert_tree_comparison (enum tree_code, bool);
93 static enum comparison_code comparison_to_compcode (enum tree_code);
94 static enum tree_code compcode_to_comparison (enum comparison_code);
95 static tree combine_comparisons (enum tree_code, enum tree_code,
96 enum tree_code, tree, tree, tree);
97 static int truth_value_p (enum tree_code);
98 static int operand_equal_for_comparison_p (tree, tree, tree);
99 static int twoval_comparison_p (tree, tree *, tree *, int *);
100 static tree eval_subst (tree, tree, tree, tree, tree);
101 static tree pedantic_omit_one_operand (tree, tree, tree);
102 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
103 static tree make_bit_field_ref (tree, tree, int, int, int);
104 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
105 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
106 enum machine_mode *, int *, int *,
107 tree *, tree *);
108 static int all_ones_mask_p (tree, int);
109 static tree sign_bit_p (tree, tree);
110 static int simple_operand_p (tree);
111 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
112 static tree make_range (tree, int *, tree *, tree *);
113 static tree build_range_check (tree, tree, int, tree, tree);
114 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
115 tree);
116 static tree fold_range_test (enum tree_code, tree, tree, tree);
117 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
118 static tree unextend (tree, int, int, tree);
119 static tree fold_truthop (enum tree_code, tree, tree, tree);
120 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
121 static tree extract_muldiv (tree, tree, enum tree_code, tree);
122 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
123 static int multiple_of_p (tree, tree, tree);
124 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
125 tree, tree,
126 tree, tree, int);
127 static bool fold_real_zero_addition_p (tree, tree, int);
128 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
129 tree, tree, tree);
130 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
131 static tree fold_div_compare (enum tree_code, tree, tree, tree);
132 static bool reorder_operands_p (tree, tree);
133 static tree fold_negate_const (tree, tree);
134 static tree fold_not_const (tree, tree);
135 static tree fold_relational_const (enum tree_code, tree, tree, tree);
136 static bool tree_expr_nonzero_p (tree);
137
138 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
139 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
140 and SUM1. Then this yields nonzero if overflow occurred during the
141 addition.
142
143 Overflow occurs if A and B have the same sign, but A and SUM differ in
144 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
145 sign. */
146 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
147 \f
148 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
149 We do that by representing the two-word integer in 4 words, with only
150 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
151 number. The value of the word is LOWPART + HIGHPART * BASE. */
152
153 #define LOWPART(x) \
154 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
155 #define HIGHPART(x) \
156 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
157 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
158
159 /* Unpack a two-word integer into 4 words.
160 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
161 WORDS points to the array of HOST_WIDE_INTs. */
162
163 static void
164 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
165 {
166 words[0] = LOWPART (low);
167 words[1] = HIGHPART (low);
168 words[2] = LOWPART (hi);
169 words[3] = HIGHPART (hi);
170 }
171
172 /* Pack an array of 4 words into a two-word integer.
173 WORDS points to the array of words.
174 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
175
176 static void
177 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
178 HOST_WIDE_INT *hi)
179 {
180 *low = words[0] + words[1] * BASE;
181 *hi = words[2] + words[3] * BASE;
182 }
183 \f
184 /* T is an INT_CST node. OVERFLOWABLE indicates if we are interested
185 in overflow of the value, when >0 we are only interested in signed
186 overflow, for <0 we are interested in any overflow. OVERFLOWED
187 indicates whether overflow has already occurred. CONST_OVERFLOWED
188 indicates whether constant overflow has already occurred. We force
189 T's value to be within range of T's type (by setting to 0 or 1 all
190 the bits outside the type's range). We set TREE_OVERFLOWED if,
191 OVERFLOWED is nonzero,
192 or OVERFLOWABLE is >0 and signed overflow occurs
193 or OVERFLOWABLE is <0 and any overflow occurs
194 We set TREE_CONSTANT_OVERFLOWED if,
195 CONST_OVERFLOWED is nonzero
196 or we set TREE_OVERFLOWED.
197 We return either the original T, or a copy. */
198
199 tree
200 force_fit_type (tree t, int overflowable,
201 bool overflowed, bool overflowed_const)
202 {
203 unsigned HOST_WIDE_INT low;
204 HOST_WIDE_INT high;
205 unsigned int prec;
206 int sign_extended_type;
207
208 gcc_assert (TREE_CODE (t) == INTEGER_CST);
209
210 low = TREE_INT_CST_LOW (t);
211 high = TREE_INT_CST_HIGH (t);
212
213 if (POINTER_TYPE_P (TREE_TYPE (t))
214 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
215 prec = POINTER_SIZE;
216 else
217 prec = TYPE_PRECISION (TREE_TYPE (t));
218 /* Size types *are* sign extended. */
219 sign_extended_type = (!TYPE_UNSIGNED (TREE_TYPE (t))
220 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
221 && TYPE_IS_SIZETYPE (TREE_TYPE (t))));
222
223 /* First clear all bits that are beyond the type's precision. */
224
225 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
226 ;
227 else if (prec > HOST_BITS_PER_WIDE_INT)
228 high &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
229 else
230 {
231 high = 0;
232 if (prec < HOST_BITS_PER_WIDE_INT)
233 low &= ~((HOST_WIDE_INT) (-1) << prec);
234 }
235
236 if (!sign_extended_type)
237 /* No sign extension */;
238 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
239 /* Correct width already. */;
240 else if (prec > HOST_BITS_PER_WIDE_INT)
241 {
242 /* Sign extend top half? */
243 if (high & ((unsigned HOST_WIDE_INT)1
244 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
245 high |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
246 }
247 else if (prec == HOST_BITS_PER_WIDE_INT)
248 {
249 if ((HOST_WIDE_INT)low < 0)
250 high = -1;
251 }
252 else
253 {
254 /* Sign extend bottom half? */
255 if (low & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
256 {
257 high = -1;
258 low |= (HOST_WIDE_INT)(-1) << prec;
259 }
260 }
261
262 /* If the value changed, return a new node. */
263 if (overflowed || overflowed_const
264 || low != TREE_INT_CST_LOW (t) || high != TREE_INT_CST_HIGH (t))
265 {
266 t = build_int_cst_wide (TREE_TYPE (t), low, high);
267
268 if (overflowed
269 || overflowable < 0
270 || (overflowable > 0 && sign_extended_type))
271 {
272 t = copy_node (t);
273 TREE_OVERFLOW (t) = 1;
274 TREE_CONSTANT_OVERFLOW (t) = 1;
275 }
276 else if (overflowed_const)
277 {
278 t = copy_node (t);
279 TREE_CONSTANT_OVERFLOW (t) = 1;
280 }
281 }
282
283 return t;
284 }
285 \f
286 /* Add two doubleword integers with doubleword result.
287 Each argument is given as two `HOST_WIDE_INT' pieces.
288 One argument is L1 and H1; the other, L2 and H2.
289 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
290
291 int
292 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
293 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
294 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
295 {
296 unsigned HOST_WIDE_INT l;
297 HOST_WIDE_INT h;
298
299 l = l1 + l2;
300 h = h1 + h2 + (l < l1);
301
302 *lv = l;
303 *hv = h;
304 return OVERFLOW_SUM_SIGN (h1, h2, h);
305 }
306
307 /* Negate a doubleword integer with doubleword result.
308 Return nonzero if the operation overflows, assuming it's signed.
309 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
310 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
311
312 int
313 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
314 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
315 {
316 if (l1 == 0)
317 {
318 *lv = 0;
319 *hv = - h1;
320 return (*hv & h1) < 0;
321 }
322 else
323 {
324 *lv = -l1;
325 *hv = ~h1;
326 return 0;
327 }
328 }
329 \f
330 /* Multiply two doubleword integers with doubleword result.
331 Return nonzero if the operation overflows, assuming it's signed.
332 Each argument is given as two `HOST_WIDE_INT' pieces.
333 One argument is L1 and H1; the other, L2 and H2.
334 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
335
336 int
337 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
338 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
339 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
340 {
341 HOST_WIDE_INT arg1[4];
342 HOST_WIDE_INT arg2[4];
343 HOST_WIDE_INT prod[4 * 2];
344 unsigned HOST_WIDE_INT carry;
345 int i, j, k;
346 unsigned HOST_WIDE_INT toplow, neglow;
347 HOST_WIDE_INT tophigh, neghigh;
348
349 encode (arg1, l1, h1);
350 encode (arg2, l2, h2);
351
352 memset (prod, 0, sizeof prod);
353
354 for (i = 0; i < 4; i++)
355 {
356 carry = 0;
357 for (j = 0; j < 4; j++)
358 {
359 k = i + j;
360 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
361 carry += arg1[i] * arg2[j];
362 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
363 carry += prod[k];
364 prod[k] = LOWPART (carry);
365 carry = HIGHPART (carry);
366 }
367 prod[i + 4] = carry;
368 }
369
370 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
371
372 /* Check for overflow by calculating the top half of the answer in full;
373 it should agree with the low half's sign bit. */
374 decode (prod + 4, &toplow, &tophigh);
375 if (h1 < 0)
376 {
377 neg_double (l2, h2, &neglow, &neghigh);
378 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
379 }
380 if (h2 < 0)
381 {
382 neg_double (l1, h1, &neglow, &neghigh);
383 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
384 }
385 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
386 }
387 \f
388 /* Shift the doubleword integer in L1, H1 left by COUNT places
389 keeping only PREC bits of result.
390 Shift right if COUNT is negative.
391 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
392 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
393
394 void
395 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
396 HOST_WIDE_INT count, unsigned int prec,
397 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
398 {
399 unsigned HOST_WIDE_INT signmask;
400
401 if (count < 0)
402 {
403 rshift_double (l1, h1, -count, prec, lv, hv, arith);
404 return;
405 }
406
407 if (SHIFT_COUNT_TRUNCATED)
408 count %= prec;
409
410 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
411 {
412 /* Shifting by the host word size is undefined according to the
413 ANSI standard, so we must handle this as a special case. */
414 *hv = 0;
415 *lv = 0;
416 }
417 else if (count >= HOST_BITS_PER_WIDE_INT)
418 {
419 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
420 *lv = 0;
421 }
422 else
423 {
424 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
425 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
426 *lv = l1 << count;
427 }
428
429 /* Sign extend all bits that are beyond the precision. */
430
431 signmask = -((prec > HOST_BITS_PER_WIDE_INT
432 ? ((unsigned HOST_WIDE_INT) *hv
433 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
434 : (*lv >> (prec - 1))) & 1);
435
436 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
437 ;
438 else if (prec >= HOST_BITS_PER_WIDE_INT)
439 {
440 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
441 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
442 }
443 else
444 {
445 *hv = signmask;
446 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
447 *lv |= signmask << prec;
448 }
449 }
450
451 /* Shift the doubleword integer in L1, H1 right by COUNT places
452 keeping only PREC bits of result. COUNT must be positive.
453 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
454 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
455
456 void
457 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
458 HOST_WIDE_INT count, unsigned int prec,
459 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
460 int arith)
461 {
462 unsigned HOST_WIDE_INT signmask;
463
464 signmask = (arith
465 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
466 : 0);
467
468 if (SHIFT_COUNT_TRUNCATED)
469 count %= prec;
470
471 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
472 {
473 /* Shifting by the host word size is undefined according to the
474 ANSI standard, so we must handle this as a special case. */
475 *hv = 0;
476 *lv = 0;
477 }
478 else if (count >= HOST_BITS_PER_WIDE_INT)
479 {
480 *hv = 0;
481 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
482 }
483 else
484 {
485 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
486 *lv = ((l1 >> count)
487 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
488 }
489
490 /* Zero / sign extend all bits that are beyond the precision. */
491
492 if (count >= (HOST_WIDE_INT)prec)
493 {
494 *hv = signmask;
495 *lv = signmask;
496 }
497 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
498 ;
499 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
500 {
501 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
502 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
503 }
504 else
505 {
506 *hv = signmask;
507 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
508 *lv |= signmask << (prec - count);
509 }
510 }
511 \f
512 /* Rotate the doubleword integer in L1, H1 left by COUNT places
513 keeping only PREC bits of result.
514 Rotate right if COUNT is negative.
515 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
516
517 void
518 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
519 HOST_WIDE_INT count, unsigned int prec,
520 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
521 {
522 unsigned HOST_WIDE_INT s1l, s2l;
523 HOST_WIDE_INT s1h, s2h;
524
525 count %= prec;
526 if (count < 0)
527 count += prec;
528
529 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
530 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
531 *lv = s1l | s2l;
532 *hv = s1h | s2h;
533 }
534
535 /* Rotate the doubleword integer in L1, H1 left by COUNT places
536 keeping only PREC bits of result. COUNT must be positive.
537 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
538
539 void
540 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
541 HOST_WIDE_INT count, unsigned int prec,
542 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
543 {
544 unsigned HOST_WIDE_INT s1l, s2l;
545 HOST_WIDE_INT s1h, s2h;
546
547 count %= prec;
548 if (count < 0)
549 count += prec;
550
551 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
552 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
553 *lv = s1l | s2l;
554 *hv = s1h | s2h;
555 }
556 \f
557 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
558 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
559 CODE is a tree code for a kind of division, one of
560 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
561 or EXACT_DIV_EXPR
562 It controls how the quotient is rounded to an integer.
563 Return nonzero if the operation overflows.
564 UNS nonzero says do unsigned division. */
565
566 int
567 div_and_round_double (enum tree_code code, int uns,
568 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
569 HOST_WIDE_INT hnum_orig,
570 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
571 HOST_WIDE_INT hden_orig,
572 unsigned HOST_WIDE_INT *lquo,
573 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
574 HOST_WIDE_INT *hrem)
575 {
576 int quo_neg = 0;
577 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
578 HOST_WIDE_INT den[4], quo[4];
579 int i, j;
580 unsigned HOST_WIDE_INT work;
581 unsigned HOST_WIDE_INT carry = 0;
582 unsigned HOST_WIDE_INT lnum = lnum_orig;
583 HOST_WIDE_INT hnum = hnum_orig;
584 unsigned HOST_WIDE_INT lden = lden_orig;
585 HOST_WIDE_INT hden = hden_orig;
586 int overflow = 0;
587
588 if (hden == 0 && lden == 0)
589 overflow = 1, lden = 1;
590
591 /* Calculate quotient sign and convert operands to unsigned. */
592 if (!uns)
593 {
594 if (hnum < 0)
595 {
596 quo_neg = ~ quo_neg;
597 /* (minimum integer) / (-1) is the only overflow case. */
598 if (neg_double (lnum, hnum, &lnum, &hnum)
599 && ((HOST_WIDE_INT) lden & hden) == -1)
600 overflow = 1;
601 }
602 if (hden < 0)
603 {
604 quo_neg = ~ quo_neg;
605 neg_double (lden, hden, &lden, &hden);
606 }
607 }
608
609 if (hnum == 0 && hden == 0)
610 { /* single precision */
611 *hquo = *hrem = 0;
612 /* This unsigned division rounds toward zero. */
613 *lquo = lnum / lden;
614 goto finish_up;
615 }
616
617 if (hnum == 0)
618 { /* trivial case: dividend < divisor */
619 /* hden != 0 already checked. */
620 *hquo = *lquo = 0;
621 *hrem = hnum;
622 *lrem = lnum;
623 goto finish_up;
624 }
625
626 memset (quo, 0, sizeof quo);
627
628 memset (num, 0, sizeof num); /* to zero 9th element */
629 memset (den, 0, sizeof den);
630
631 encode (num, lnum, hnum);
632 encode (den, lden, hden);
633
634 /* Special code for when the divisor < BASE. */
635 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
636 {
637 /* hnum != 0 already checked. */
638 for (i = 4 - 1; i >= 0; i--)
639 {
640 work = num[i] + carry * BASE;
641 quo[i] = work / lden;
642 carry = work % lden;
643 }
644 }
645 else
646 {
647 /* Full double precision division,
648 with thanks to Don Knuth's "Seminumerical Algorithms". */
649 int num_hi_sig, den_hi_sig;
650 unsigned HOST_WIDE_INT quo_est, scale;
651
652 /* Find the highest nonzero divisor digit. */
653 for (i = 4 - 1;; i--)
654 if (den[i] != 0)
655 {
656 den_hi_sig = i;
657 break;
658 }
659
660 /* Insure that the first digit of the divisor is at least BASE/2.
661 This is required by the quotient digit estimation algorithm. */
662
663 scale = BASE / (den[den_hi_sig] + 1);
664 if (scale > 1)
665 { /* scale divisor and dividend */
666 carry = 0;
667 for (i = 0; i <= 4 - 1; i++)
668 {
669 work = (num[i] * scale) + carry;
670 num[i] = LOWPART (work);
671 carry = HIGHPART (work);
672 }
673
674 num[4] = carry;
675 carry = 0;
676 for (i = 0; i <= 4 - 1; i++)
677 {
678 work = (den[i] * scale) + carry;
679 den[i] = LOWPART (work);
680 carry = HIGHPART (work);
681 if (den[i] != 0) den_hi_sig = i;
682 }
683 }
684
685 num_hi_sig = 4;
686
687 /* Main loop */
688 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
689 {
690 /* Guess the next quotient digit, quo_est, by dividing the first
691 two remaining dividend digits by the high order quotient digit.
692 quo_est is never low and is at most 2 high. */
693 unsigned HOST_WIDE_INT tmp;
694
695 num_hi_sig = i + den_hi_sig + 1;
696 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
697 if (num[num_hi_sig] != den[den_hi_sig])
698 quo_est = work / den[den_hi_sig];
699 else
700 quo_est = BASE - 1;
701
702 /* Refine quo_est so it's usually correct, and at most one high. */
703 tmp = work - quo_est * den[den_hi_sig];
704 if (tmp < BASE
705 && (den[den_hi_sig - 1] * quo_est
706 > (tmp * BASE + num[num_hi_sig - 2])))
707 quo_est--;
708
709 /* Try QUO_EST as the quotient digit, by multiplying the
710 divisor by QUO_EST and subtracting from the remaining dividend.
711 Keep in mind that QUO_EST is the I - 1st digit. */
712
713 carry = 0;
714 for (j = 0; j <= den_hi_sig; j++)
715 {
716 work = quo_est * den[j] + carry;
717 carry = HIGHPART (work);
718 work = num[i + j] - LOWPART (work);
719 num[i + j] = LOWPART (work);
720 carry += HIGHPART (work) != 0;
721 }
722
723 /* If quo_est was high by one, then num[i] went negative and
724 we need to correct things. */
725 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
726 {
727 quo_est--;
728 carry = 0; /* add divisor back in */
729 for (j = 0; j <= den_hi_sig; j++)
730 {
731 work = num[i + j] + den[j] + carry;
732 carry = HIGHPART (work);
733 num[i + j] = LOWPART (work);
734 }
735
736 num [num_hi_sig] += carry;
737 }
738
739 /* Store the quotient digit. */
740 quo[i] = quo_est;
741 }
742 }
743
744 decode (quo, lquo, hquo);
745
746 finish_up:
747 /* If result is negative, make it so. */
748 if (quo_neg)
749 neg_double (*lquo, *hquo, lquo, hquo);
750
751 /* Compute trial remainder: rem = num - (quo * den) */
752 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
753 neg_double (*lrem, *hrem, lrem, hrem);
754 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
755
756 switch (code)
757 {
758 case TRUNC_DIV_EXPR:
759 case TRUNC_MOD_EXPR: /* round toward zero */
760 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
761 return overflow;
762
763 case FLOOR_DIV_EXPR:
764 case FLOOR_MOD_EXPR: /* round toward negative infinity */
765 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
766 {
767 /* quo = quo - 1; */
768 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
769 lquo, hquo);
770 }
771 else
772 return overflow;
773 break;
774
775 case CEIL_DIV_EXPR:
776 case CEIL_MOD_EXPR: /* round toward positive infinity */
777 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
778 {
779 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
780 lquo, hquo);
781 }
782 else
783 return overflow;
784 break;
785
786 case ROUND_DIV_EXPR:
787 case ROUND_MOD_EXPR: /* round to closest integer */
788 {
789 unsigned HOST_WIDE_INT labs_rem = *lrem;
790 HOST_WIDE_INT habs_rem = *hrem;
791 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
792 HOST_WIDE_INT habs_den = hden, htwice;
793
794 /* Get absolute values. */
795 if (*hrem < 0)
796 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
797 if (hden < 0)
798 neg_double (lden, hden, &labs_den, &habs_den);
799
800 /* If (2 * abs (lrem) >= abs (lden)) */
801 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
802 labs_rem, habs_rem, &ltwice, &htwice);
803
804 if (((unsigned HOST_WIDE_INT) habs_den
805 < (unsigned HOST_WIDE_INT) htwice)
806 || (((unsigned HOST_WIDE_INT) habs_den
807 == (unsigned HOST_WIDE_INT) htwice)
808 && (labs_den < ltwice)))
809 {
810 if (*hquo < 0)
811 /* quo = quo - 1; */
812 add_double (*lquo, *hquo,
813 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
814 else
815 /* quo = quo + 1; */
816 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
817 lquo, hquo);
818 }
819 else
820 return overflow;
821 }
822 break;
823
824 default:
825 gcc_unreachable ();
826 }
827
828 /* Compute true remainder: rem = num - (quo * den) */
829 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
830 neg_double (*lrem, *hrem, lrem, hrem);
831 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
832 return overflow;
833 }
834
835 /* If ARG2 divides ARG1 with zero remainder, carries out the division
836 of type CODE and returns the quotient.
837 Otherwise returns NULL_TREE. */
838
839 static tree
840 div_if_zero_remainder (enum tree_code code, tree arg1, tree arg2)
841 {
842 unsigned HOST_WIDE_INT int1l, int2l;
843 HOST_WIDE_INT int1h, int2h;
844 unsigned HOST_WIDE_INT quol, reml;
845 HOST_WIDE_INT quoh, remh;
846 tree type = TREE_TYPE (arg1);
847 int uns = TYPE_UNSIGNED (type);
848
849 int1l = TREE_INT_CST_LOW (arg1);
850 int1h = TREE_INT_CST_HIGH (arg1);
851 int2l = TREE_INT_CST_LOW (arg2);
852 int2h = TREE_INT_CST_HIGH (arg2);
853
854 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
855 &quol, &quoh, &reml, &remh);
856 if (remh != 0 || reml != 0)
857 return NULL_TREE;
858
859 return build_int_cst_wide (type, quol, quoh);
860 }
861 \f
862 /* Return true if built-in mathematical function specified by CODE
863 preserves the sign of it argument, i.e. -f(x) == f(-x). */
864
865 static bool
866 negate_mathfn_p (enum built_in_function code)
867 {
868 switch (code)
869 {
870 case BUILT_IN_ASIN:
871 case BUILT_IN_ASINF:
872 case BUILT_IN_ASINL:
873 case BUILT_IN_ATAN:
874 case BUILT_IN_ATANF:
875 case BUILT_IN_ATANL:
876 case BUILT_IN_SIN:
877 case BUILT_IN_SINF:
878 case BUILT_IN_SINL:
879 case BUILT_IN_TAN:
880 case BUILT_IN_TANF:
881 case BUILT_IN_TANL:
882 return true;
883
884 default:
885 break;
886 }
887 return false;
888 }
889
890 /* Check whether we may negate an integer constant T without causing
891 overflow. */
892
893 bool
894 may_negate_without_overflow_p (tree t)
895 {
896 unsigned HOST_WIDE_INT val;
897 unsigned int prec;
898 tree type;
899
900 gcc_assert (TREE_CODE (t) == INTEGER_CST);
901
902 type = TREE_TYPE (t);
903 if (TYPE_UNSIGNED (type))
904 return false;
905
906 prec = TYPE_PRECISION (type);
907 if (prec > HOST_BITS_PER_WIDE_INT)
908 {
909 if (TREE_INT_CST_LOW (t) != 0)
910 return true;
911 prec -= HOST_BITS_PER_WIDE_INT;
912 val = TREE_INT_CST_HIGH (t);
913 }
914 else
915 val = TREE_INT_CST_LOW (t);
916 if (prec < HOST_BITS_PER_WIDE_INT)
917 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
918 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
919 }
920
921 /* Determine whether an expression T can be cheaply negated using
922 the function negate_expr. */
923
924 static bool
925 negate_expr_p (tree t)
926 {
927 tree type;
928
929 if (t == 0)
930 return false;
931
932 type = TREE_TYPE (t);
933
934 STRIP_SIGN_NOPS (t);
935 switch (TREE_CODE (t))
936 {
937 case INTEGER_CST:
938 if (TYPE_UNSIGNED (type) || ! flag_trapv)
939 return true;
940
941 /* Check that -CST will not overflow type. */
942 return may_negate_without_overflow_p (t);
943
944 case REAL_CST:
945 case NEGATE_EXPR:
946 return true;
947
948 case COMPLEX_CST:
949 return negate_expr_p (TREE_REALPART (t))
950 && negate_expr_p (TREE_IMAGPART (t));
951
952 case PLUS_EXPR:
953 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
954 return false;
955 /* -(A + B) -> (-B) - A. */
956 if (negate_expr_p (TREE_OPERAND (t, 1))
957 && reorder_operands_p (TREE_OPERAND (t, 0),
958 TREE_OPERAND (t, 1)))
959 return true;
960 /* -(A + B) -> (-A) - B. */
961 return negate_expr_p (TREE_OPERAND (t, 0));
962
963 case MINUS_EXPR:
964 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
965 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
966 && reorder_operands_p (TREE_OPERAND (t, 0),
967 TREE_OPERAND (t, 1));
968
969 case MULT_EXPR:
970 if (TYPE_UNSIGNED (TREE_TYPE (t)))
971 break;
972
973 /* Fall through. */
974
975 case RDIV_EXPR:
976 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
977 return negate_expr_p (TREE_OPERAND (t, 1))
978 || negate_expr_p (TREE_OPERAND (t, 0));
979 break;
980
981 case NOP_EXPR:
982 /* Negate -((double)float) as (double)(-float). */
983 if (TREE_CODE (type) == REAL_TYPE)
984 {
985 tree tem = strip_float_extensions (t);
986 if (tem != t)
987 return negate_expr_p (tem);
988 }
989 break;
990
991 case CALL_EXPR:
992 /* Negate -f(x) as f(-x). */
993 if (negate_mathfn_p (builtin_mathfn_code (t)))
994 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
995 break;
996
997 case RSHIFT_EXPR:
998 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
999 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1000 {
1001 tree op1 = TREE_OPERAND (t, 1);
1002 if (TREE_INT_CST_HIGH (op1) == 0
1003 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1004 == TREE_INT_CST_LOW (op1))
1005 return true;
1006 }
1007 break;
1008
1009 default:
1010 break;
1011 }
1012 return false;
1013 }
1014
1015 /* Given T, an expression, return the negation of T. Allow for T to be
1016 null, in which case return null. */
1017
1018 static tree
1019 negate_expr (tree t)
1020 {
1021 tree type;
1022 tree tem;
1023
1024 if (t == 0)
1025 return 0;
1026
1027 type = TREE_TYPE (t);
1028 STRIP_SIGN_NOPS (t);
1029
1030 switch (TREE_CODE (t))
1031 {
1032 case INTEGER_CST:
1033 tem = fold_negate_const (t, type);
1034 if (! TREE_OVERFLOW (tem)
1035 || TYPE_UNSIGNED (type)
1036 || ! flag_trapv)
1037 return tem;
1038 break;
1039
1040 case REAL_CST:
1041 tem = fold_negate_const (t, type);
1042 /* Two's complement FP formats, such as c4x, may overflow. */
1043 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
1044 return fold_convert (type, tem);
1045 break;
1046
1047 case COMPLEX_CST:
1048 {
1049 tree rpart = negate_expr (TREE_REALPART (t));
1050 tree ipart = negate_expr (TREE_IMAGPART (t));
1051
1052 if ((TREE_CODE (rpart) == REAL_CST
1053 && TREE_CODE (ipart) == REAL_CST)
1054 || (TREE_CODE (rpart) == INTEGER_CST
1055 && TREE_CODE (ipart) == INTEGER_CST))
1056 return build_complex (type, rpart, ipart);
1057 }
1058 break;
1059
1060 case NEGATE_EXPR:
1061 return fold_convert (type, TREE_OPERAND (t, 0));
1062
1063 case PLUS_EXPR:
1064 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1065 {
1066 /* -(A + B) -> (-B) - A. */
1067 if (negate_expr_p (TREE_OPERAND (t, 1))
1068 && reorder_operands_p (TREE_OPERAND (t, 0),
1069 TREE_OPERAND (t, 1)))
1070 {
1071 tem = negate_expr (TREE_OPERAND (t, 1));
1072 tem = fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1073 tem, TREE_OPERAND (t, 0));
1074 return fold_convert (type, tem);
1075 }
1076
1077 /* -(A + B) -> (-A) - B. */
1078 if (negate_expr_p (TREE_OPERAND (t, 0)))
1079 {
1080 tem = negate_expr (TREE_OPERAND (t, 0));
1081 tem = fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1082 tem, TREE_OPERAND (t, 1));
1083 return fold_convert (type, tem);
1084 }
1085 }
1086 break;
1087
1088 case MINUS_EXPR:
1089 /* - (A - B) -> B - A */
1090 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1091 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1092 return fold_convert (type,
1093 fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1094 TREE_OPERAND (t, 1),
1095 TREE_OPERAND (t, 0)));
1096 break;
1097
1098 case MULT_EXPR:
1099 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1100 break;
1101
1102 /* Fall through. */
1103
1104 case RDIV_EXPR:
1105 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1106 {
1107 tem = TREE_OPERAND (t, 1);
1108 if (negate_expr_p (tem))
1109 return fold_convert (type,
1110 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1111 TREE_OPERAND (t, 0),
1112 negate_expr (tem)));
1113 tem = TREE_OPERAND (t, 0);
1114 if (negate_expr_p (tem))
1115 return fold_convert (type,
1116 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1117 negate_expr (tem),
1118 TREE_OPERAND (t, 1)));
1119 }
1120 break;
1121
1122 case NOP_EXPR:
1123 /* Convert -((double)float) into (double)(-float). */
1124 if (TREE_CODE (type) == REAL_TYPE)
1125 {
1126 tem = strip_float_extensions (t);
1127 if (tem != t && negate_expr_p (tem))
1128 return fold_convert (type, negate_expr (tem));
1129 }
1130 break;
1131
1132 case CALL_EXPR:
1133 /* Negate -f(x) as f(-x). */
1134 if (negate_mathfn_p (builtin_mathfn_code (t))
1135 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1136 {
1137 tree fndecl, arg, arglist;
1138
1139 fndecl = get_callee_fndecl (t);
1140 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1141 arglist = build_tree_list (NULL_TREE, arg);
1142 return build_function_call_expr (fndecl, arglist);
1143 }
1144 break;
1145
1146 case RSHIFT_EXPR:
1147 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1148 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1149 {
1150 tree op1 = TREE_OPERAND (t, 1);
1151 if (TREE_INT_CST_HIGH (op1) == 0
1152 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1153 == TREE_INT_CST_LOW (op1))
1154 {
1155 tree ntype = TYPE_UNSIGNED (type)
1156 ? lang_hooks.types.signed_type (type)
1157 : lang_hooks.types.unsigned_type (type);
1158 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1159 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1160 return fold_convert (type, temp);
1161 }
1162 }
1163 break;
1164
1165 default:
1166 break;
1167 }
1168
1169 tem = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1170 return fold_convert (type, tem);
1171 }
1172 \f
1173 /* Split a tree IN into a constant, literal and variable parts that could be
1174 combined with CODE to make IN. "constant" means an expression with
1175 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1176 commutative arithmetic operation. Store the constant part into *CONP,
1177 the literal in *LITP and return the variable part. If a part isn't
1178 present, set it to null. If the tree does not decompose in this way,
1179 return the entire tree as the variable part and the other parts as null.
1180
1181 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1182 case, we negate an operand that was subtracted. Except if it is a
1183 literal for which we use *MINUS_LITP instead.
1184
1185 If NEGATE_P is true, we are negating all of IN, again except a literal
1186 for which we use *MINUS_LITP instead.
1187
1188 If IN is itself a literal or constant, return it as appropriate.
1189
1190 Note that we do not guarantee that any of the three values will be the
1191 same type as IN, but they will have the same signedness and mode. */
1192
1193 static tree
1194 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1195 tree *minus_litp, int negate_p)
1196 {
1197 tree var = 0;
1198
1199 *conp = 0;
1200 *litp = 0;
1201 *minus_litp = 0;
1202
1203 /* Strip any conversions that don't change the machine mode or signedness. */
1204 STRIP_SIGN_NOPS (in);
1205
1206 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1207 *litp = in;
1208 else if (TREE_CODE (in) == code
1209 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1210 /* We can associate addition and subtraction together (even
1211 though the C standard doesn't say so) for integers because
1212 the value is not affected. For reals, the value might be
1213 affected, so we can't. */
1214 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1215 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1216 {
1217 tree op0 = TREE_OPERAND (in, 0);
1218 tree op1 = TREE_OPERAND (in, 1);
1219 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1220 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1221
1222 /* First see if either of the operands is a literal, then a constant. */
1223 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1224 *litp = op0, op0 = 0;
1225 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1226 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1227
1228 if (op0 != 0 && TREE_CONSTANT (op0))
1229 *conp = op0, op0 = 0;
1230 else if (op1 != 0 && TREE_CONSTANT (op1))
1231 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1232
1233 /* If we haven't dealt with either operand, this is not a case we can
1234 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1235 if (op0 != 0 && op1 != 0)
1236 var = in;
1237 else if (op0 != 0)
1238 var = op0;
1239 else
1240 var = op1, neg_var_p = neg1_p;
1241
1242 /* Now do any needed negations. */
1243 if (neg_litp_p)
1244 *minus_litp = *litp, *litp = 0;
1245 if (neg_conp_p)
1246 *conp = negate_expr (*conp);
1247 if (neg_var_p)
1248 var = negate_expr (var);
1249 }
1250 else if (TREE_CONSTANT (in))
1251 *conp = in;
1252 else
1253 var = in;
1254
1255 if (negate_p)
1256 {
1257 if (*litp)
1258 *minus_litp = *litp, *litp = 0;
1259 else if (*minus_litp)
1260 *litp = *minus_litp, *minus_litp = 0;
1261 *conp = negate_expr (*conp);
1262 var = negate_expr (var);
1263 }
1264
1265 return var;
1266 }
1267
1268 /* Re-associate trees split by the above function. T1 and T2 are either
1269 expressions to associate or null. Return the new expression, if any. If
1270 we build an operation, do it in TYPE and with CODE. */
1271
1272 static tree
1273 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1274 {
1275 if (t1 == 0)
1276 return t2;
1277 else if (t2 == 0)
1278 return t1;
1279
1280 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1281 try to fold this since we will have infinite recursion. But do
1282 deal with any NEGATE_EXPRs. */
1283 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1284 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1285 {
1286 if (code == PLUS_EXPR)
1287 {
1288 if (TREE_CODE (t1) == NEGATE_EXPR)
1289 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1290 fold_convert (type, TREE_OPERAND (t1, 0)));
1291 else if (TREE_CODE (t2) == NEGATE_EXPR)
1292 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1293 fold_convert (type, TREE_OPERAND (t2, 0)));
1294 else if (integer_zerop (t2))
1295 return fold_convert (type, t1);
1296 }
1297 else if (code == MINUS_EXPR)
1298 {
1299 if (integer_zerop (t2))
1300 return fold_convert (type, t1);
1301 }
1302
1303 return build2 (code, type, fold_convert (type, t1),
1304 fold_convert (type, t2));
1305 }
1306
1307 return fold_build2 (code, type, fold_convert (type, t1),
1308 fold_convert (type, t2));
1309 }
1310 \f
1311 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1312 to produce a new constant.
1313
1314 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1315
1316 tree
1317 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1318 {
1319 unsigned HOST_WIDE_INT int1l, int2l;
1320 HOST_WIDE_INT int1h, int2h;
1321 unsigned HOST_WIDE_INT low;
1322 HOST_WIDE_INT hi;
1323 unsigned HOST_WIDE_INT garbagel;
1324 HOST_WIDE_INT garbageh;
1325 tree t;
1326 tree type = TREE_TYPE (arg1);
1327 int uns = TYPE_UNSIGNED (type);
1328 int is_sizetype
1329 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1330 int overflow = 0;
1331
1332 int1l = TREE_INT_CST_LOW (arg1);
1333 int1h = TREE_INT_CST_HIGH (arg1);
1334 int2l = TREE_INT_CST_LOW (arg2);
1335 int2h = TREE_INT_CST_HIGH (arg2);
1336
1337 switch (code)
1338 {
1339 case BIT_IOR_EXPR:
1340 low = int1l | int2l, hi = int1h | int2h;
1341 break;
1342
1343 case BIT_XOR_EXPR:
1344 low = int1l ^ int2l, hi = int1h ^ int2h;
1345 break;
1346
1347 case BIT_AND_EXPR:
1348 low = int1l & int2l, hi = int1h & int2h;
1349 break;
1350
1351 case RSHIFT_EXPR:
1352 int2l = -int2l;
1353 case LSHIFT_EXPR:
1354 /* It's unclear from the C standard whether shifts can overflow.
1355 The following code ignores overflow; perhaps a C standard
1356 interpretation ruling is needed. */
1357 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1358 &low, &hi, !uns);
1359 break;
1360
1361 case RROTATE_EXPR:
1362 int2l = - int2l;
1363 case LROTATE_EXPR:
1364 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1365 &low, &hi);
1366 break;
1367
1368 case PLUS_EXPR:
1369 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1370 break;
1371
1372 case MINUS_EXPR:
1373 neg_double (int2l, int2h, &low, &hi);
1374 add_double (int1l, int1h, low, hi, &low, &hi);
1375 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1376 break;
1377
1378 case MULT_EXPR:
1379 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1380 break;
1381
1382 case TRUNC_DIV_EXPR:
1383 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1384 case EXACT_DIV_EXPR:
1385 /* This is a shortcut for a common special case. */
1386 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1387 && ! TREE_CONSTANT_OVERFLOW (arg1)
1388 && ! TREE_CONSTANT_OVERFLOW (arg2)
1389 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1390 {
1391 if (code == CEIL_DIV_EXPR)
1392 int1l += int2l - 1;
1393
1394 low = int1l / int2l, hi = 0;
1395 break;
1396 }
1397
1398 /* ... fall through ... */
1399
1400 case ROUND_DIV_EXPR:
1401 if (int2h == 0 && int2l == 1)
1402 {
1403 low = int1l, hi = int1h;
1404 break;
1405 }
1406 if (int1l == int2l && int1h == int2h
1407 && ! (int1l == 0 && int1h == 0))
1408 {
1409 low = 1, hi = 0;
1410 break;
1411 }
1412 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1413 &low, &hi, &garbagel, &garbageh);
1414 break;
1415
1416 case TRUNC_MOD_EXPR:
1417 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1418 /* This is a shortcut for a common special case. */
1419 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1420 && ! TREE_CONSTANT_OVERFLOW (arg1)
1421 && ! TREE_CONSTANT_OVERFLOW (arg2)
1422 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1423 {
1424 if (code == CEIL_MOD_EXPR)
1425 int1l += int2l - 1;
1426 low = int1l % int2l, hi = 0;
1427 break;
1428 }
1429
1430 /* ... fall through ... */
1431
1432 case ROUND_MOD_EXPR:
1433 overflow = div_and_round_double (code, uns,
1434 int1l, int1h, int2l, int2h,
1435 &garbagel, &garbageh, &low, &hi);
1436 break;
1437
1438 case MIN_EXPR:
1439 case MAX_EXPR:
1440 if (uns)
1441 low = (((unsigned HOST_WIDE_INT) int1h
1442 < (unsigned HOST_WIDE_INT) int2h)
1443 || (((unsigned HOST_WIDE_INT) int1h
1444 == (unsigned HOST_WIDE_INT) int2h)
1445 && int1l < int2l));
1446 else
1447 low = (int1h < int2h
1448 || (int1h == int2h && int1l < int2l));
1449
1450 if (low == (code == MIN_EXPR))
1451 low = int1l, hi = int1h;
1452 else
1453 low = int2l, hi = int2h;
1454 break;
1455
1456 default:
1457 gcc_unreachable ();
1458 }
1459
1460 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1461
1462 if (notrunc)
1463 {
1464 /* Propagate overflow flags ourselves. */
1465 if (((!uns || is_sizetype) && overflow)
1466 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1467 {
1468 t = copy_node (t);
1469 TREE_OVERFLOW (t) = 1;
1470 TREE_CONSTANT_OVERFLOW (t) = 1;
1471 }
1472 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1473 {
1474 t = copy_node (t);
1475 TREE_CONSTANT_OVERFLOW (t) = 1;
1476 }
1477 }
1478 else
1479 t = force_fit_type (t, 1,
1480 ((!uns || is_sizetype) && overflow)
1481 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
1482 TREE_CONSTANT_OVERFLOW (arg1)
1483 | TREE_CONSTANT_OVERFLOW (arg2));
1484
1485 return t;
1486 }
1487
1488 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1489 constant. We assume ARG1 and ARG2 have the same data type, or at least
1490 are the same kind of constant and the same machine mode.
1491
1492 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1493
1494 static tree
1495 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1496 {
1497 STRIP_NOPS (arg1);
1498 STRIP_NOPS (arg2);
1499
1500 if (TREE_CODE (arg1) == INTEGER_CST)
1501 return int_const_binop (code, arg1, arg2, notrunc);
1502
1503 if (TREE_CODE (arg1) == REAL_CST)
1504 {
1505 enum machine_mode mode;
1506 REAL_VALUE_TYPE d1;
1507 REAL_VALUE_TYPE d2;
1508 REAL_VALUE_TYPE value;
1509 REAL_VALUE_TYPE result;
1510 bool inexact;
1511 tree t, type;
1512
1513 d1 = TREE_REAL_CST (arg1);
1514 d2 = TREE_REAL_CST (arg2);
1515
1516 type = TREE_TYPE (arg1);
1517 mode = TYPE_MODE (type);
1518
1519 /* Don't perform operation if we honor signaling NaNs and
1520 either operand is a NaN. */
1521 if (HONOR_SNANS (mode)
1522 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1523 return NULL_TREE;
1524
1525 /* Don't perform operation if it would raise a division
1526 by zero exception. */
1527 if (code == RDIV_EXPR
1528 && REAL_VALUES_EQUAL (d2, dconst0)
1529 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1530 return NULL_TREE;
1531
1532 /* If either operand is a NaN, just return it. Otherwise, set up
1533 for floating-point trap; we return an overflow. */
1534 if (REAL_VALUE_ISNAN (d1))
1535 return arg1;
1536 else if (REAL_VALUE_ISNAN (d2))
1537 return arg2;
1538
1539 inexact = real_arithmetic (&value, code, &d1, &d2);
1540 real_convert (&result, mode, &value);
1541
1542 /* Don't constant fold this floating point operation if the
1543 result may dependent upon the run-time rounding mode and
1544 flag_rounding_math is set, or if GCC's software emulation
1545 is unable to accurately represent the result. */
1546
1547 if ((flag_rounding_math
1548 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1549 && !flag_unsafe_math_optimizations))
1550 && (inexact || !real_identical (&result, &value)))
1551 return NULL_TREE;
1552
1553 t = build_real (type, result);
1554
1555 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1556 TREE_CONSTANT_OVERFLOW (t)
1557 = TREE_OVERFLOW (t)
1558 | TREE_CONSTANT_OVERFLOW (arg1)
1559 | TREE_CONSTANT_OVERFLOW (arg2);
1560 return t;
1561 }
1562 if (TREE_CODE (arg1) == COMPLEX_CST)
1563 {
1564 tree type = TREE_TYPE (arg1);
1565 tree r1 = TREE_REALPART (arg1);
1566 tree i1 = TREE_IMAGPART (arg1);
1567 tree r2 = TREE_REALPART (arg2);
1568 tree i2 = TREE_IMAGPART (arg2);
1569 tree t;
1570
1571 switch (code)
1572 {
1573 case PLUS_EXPR:
1574 t = build_complex (type,
1575 const_binop (PLUS_EXPR, r1, r2, notrunc),
1576 const_binop (PLUS_EXPR, i1, i2, notrunc));
1577 break;
1578
1579 case MINUS_EXPR:
1580 t = build_complex (type,
1581 const_binop (MINUS_EXPR, r1, r2, notrunc),
1582 const_binop (MINUS_EXPR, i1, i2, notrunc));
1583 break;
1584
1585 case MULT_EXPR:
1586 t = build_complex (type,
1587 const_binop (MINUS_EXPR,
1588 const_binop (MULT_EXPR,
1589 r1, r2, notrunc),
1590 const_binop (MULT_EXPR,
1591 i1, i2, notrunc),
1592 notrunc),
1593 const_binop (PLUS_EXPR,
1594 const_binop (MULT_EXPR,
1595 r1, i2, notrunc),
1596 const_binop (MULT_EXPR,
1597 i1, r2, notrunc),
1598 notrunc));
1599 break;
1600
1601 case RDIV_EXPR:
1602 {
1603 tree t1, t2, real, imag;
1604 tree magsquared
1605 = const_binop (PLUS_EXPR,
1606 const_binop (MULT_EXPR, r2, r2, notrunc),
1607 const_binop (MULT_EXPR, i2, i2, notrunc),
1608 notrunc);
1609
1610 t1 = const_binop (PLUS_EXPR,
1611 const_binop (MULT_EXPR, r1, r2, notrunc),
1612 const_binop (MULT_EXPR, i1, i2, notrunc),
1613 notrunc);
1614 t2 = const_binop (MINUS_EXPR,
1615 const_binop (MULT_EXPR, i1, r2, notrunc),
1616 const_binop (MULT_EXPR, r1, i2, notrunc),
1617 notrunc);
1618
1619 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1620 {
1621 real = const_binop (TRUNC_DIV_EXPR, t1, magsquared, notrunc);
1622 imag = const_binop (TRUNC_DIV_EXPR, t2, magsquared, notrunc);
1623 }
1624 else
1625 {
1626 real = const_binop (RDIV_EXPR, t1, magsquared, notrunc);
1627 imag = const_binop (RDIV_EXPR, t2, magsquared, notrunc);
1628 if (!real || !imag)
1629 return NULL_TREE;
1630 }
1631
1632 t = build_complex (type, real, imag);
1633 }
1634 break;
1635
1636 default:
1637 gcc_unreachable ();
1638 }
1639 return t;
1640 }
1641 return 0;
1642 }
1643
1644 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1645 indicates which particular sizetype to create. */
1646
1647 tree
1648 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1649 {
1650 return build_int_cst (sizetype_tab[(int) kind], number);
1651 }
1652 \f
1653 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1654 is a tree code. The type of the result is taken from the operands.
1655 Both must be the same type integer type and it must be a size type.
1656 If the operands are constant, so is the result. */
1657
1658 tree
1659 size_binop (enum tree_code code, tree arg0, tree arg1)
1660 {
1661 tree type = TREE_TYPE (arg0);
1662
1663 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1664 && type == TREE_TYPE (arg1));
1665
1666 /* Handle the special case of two integer constants faster. */
1667 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1668 {
1669 /* And some specific cases even faster than that. */
1670 if (code == PLUS_EXPR && integer_zerop (arg0))
1671 return arg1;
1672 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1673 && integer_zerop (arg1))
1674 return arg0;
1675 else if (code == MULT_EXPR && integer_onep (arg0))
1676 return arg1;
1677
1678 /* Handle general case of two integer constants. */
1679 return int_const_binop (code, arg0, arg1, 0);
1680 }
1681
1682 if (arg0 == error_mark_node || arg1 == error_mark_node)
1683 return error_mark_node;
1684
1685 return fold_build2 (code, type, arg0, arg1);
1686 }
1687
1688 /* Given two values, either both of sizetype or both of bitsizetype,
1689 compute the difference between the two values. Return the value
1690 in signed type corresponding to the type of the operands. */
1691
1692 tree
1693 size_diffop (tree arg0, tree arg1)
1694 {
1695 tree type = TREE_TYPE (arg0);
1696 tree ctype;
1697
1698 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1699 && type == TREE_TYPE (arg1));
1700
1701 /* If the type is already signed, just do the simple thing. */
1702 if (!TYPE_UNSIGNED (type))
1703 return size_binop (MINUS_EXPR, arg0, arg1);
1704
1705 ctype = type == bitsizetype ? sbitsizetype : ssizetype;
1706
1707 /* If either operand is not a constant, do the conversions to the signed
1708 type and subtract. The hardware will do the right thing with any
1709 overflow in the subtraction. */
1710 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1711 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1712 fold_convert (ctype, arg1));
1713
1714 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1715 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1716 overflow) and negate (which can't either). Special-case a result
1717 of zero while we're here. */
1718 if (tree_int_cst_equal (arg0, arg1))
1719 return fold_convert (ctype, integer_zero_node);
1720 else if (tree_int_cst_lt (arg1, arg0))
1721 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1722 else
1723 return size_binop (MINUS_EXPR, fold_convert (ctype, integer_zero_node),
1724 fold_convert (ctype, size_binop (MINUS_EXPR,
1725 arg1, arg0)));
1726 }
1727 \f
1728 /* A subroutine of fold_convert_const handling conversions of an
1729 INTEGER_CST to another integer type. */
1730
1731 static tree
1732 fold_convert_const_int_from_int (tree type, tree arg1)
1733 {
1734 tree t;
1735
1736 /* Given an integer constant, make new constant with new type,
1737 appropriately sign-extended or truncated. */
1738 t = build_int_cst_wide (type, TREE_INT_CST_LOW (arg1),
1739 TREE_INT_CST_HIGH (arg1));
1740
1741 t = force_fit_type (t,
1742 /* Don't set the overflow when
1743 converting a pointer */
1744 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1745 (TREE_INT_CST_HIGH (arg1) < 0
1746 && (TYPE_UNSIGNED (type)
1747 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1748 | TREE_OVERFLOW (arg1),
1749 TREE_CONSTANT_OVERFLOW (arg1));
1750
1751 return t;
1752 }
1753
1754 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1755 to an integer type. */
1756
1757 static tree
1758 fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
1759 {
1760 int overflow = 0;
1761 tree t;
1762
1763 /* The following code implements the floating point to integer
1764 conversion rules required by the Java Language Specification,
1765 that IEEE NaNs are mapped to zero and values that overflow
1766 the target precision saturate, i.e. values greater than
1767 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1768 are mapped to INT_MIN. These semantics are allowed by the
1769 C and C++ standards that simply state that the behavior of
1770 FP-to-integer conversion is unspecified upon overflow. */
1771
1772 HOST_WIDE_INT high, low;
1773 REAL_VALUE_TYPE r;
1774 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1775
1776 switch (code)
1777 {
1778 case FIX_TRUNC_EXPR:
1779 real_trunc (&r, VOIDmode, &x);
1780 break;
1781
1782 case FIX_CEIL_EXPR:
1783 real_ceil (&r, VOIDmode, &x);
1784 break;
1785
1786 case FIX_FLOOR_EXPR:
1787 real_floor (&r, VOIDmode, &x);
1788 break;
1789
1790 case FIX_ROUND_EXPR:
1791 real_round (&r, VOIDmode, &x);
1792 break;
1793
1794 default:
1795 gcc_unreachable ();
1796 }
1797
1798 /* If R is NaN, return zero and show we have an overflow. */
1799 if (REAL_VALUE_ISNAN (r))
1800 {
1801 overflow = 1;
1802 high = 0;
1803 low = 0;
1804 }
1805
1806 /* See if R is less than the lower bound or greater than the
1807 upper bound. */
1808
1809 if (! overflow)
1810 {
1811 tree lt = TYPE_MIN_VALUE (type);
1812 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1813 if (REAL_VALUES_LESS (r, l))
1814 {
1815 overflow = 1;
1816 high = TREE_INT_CST_HIGH (lt);
1817 low = TREE_INT_CST_LOW (lt);
1818 }
1819 }
1820
1821 if (! overflow)
1822 {
1823 tree ut = TYPE_MAX_VALUE (type);
1824 if (ut)
1825 {
1826 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1827 if (REAL_VALUES_LESS (u, r))
1828 {
1829 overflow = 1;
1830 high = TREE_INT_CST_HIGH (ut);
1831 low = TREE_INT_CST_LOW (ut);
1832 }
1833 }
1834 }
1835
1836 if (! overflow)
1837 REAL_VALUE_TO_INT (&low, &high, r);
1838
1839 t = build_int_cst_wide (type, low, high);
1840
1841 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1),
1842 TREE_CONSTANT_OVERFLOW (arg1));
1843 return t;
1844 }
1845
1846 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1847 to another floating point type. */
1848
1849 static tree
1850 fold_convert_const_real_from_real (tree type, tree arg1)
1851 {
1852 REAL_VALUE_TYPE value;
1853 tree t;
1854
1855 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1856 t = build_real (type, value);
1857
1858 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1859 TREE_CONSTANT_OVERFLOW (t)
1860 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1861 return t;
1862 }
1863
1864 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1865 type TYPE. If no simplification can be done return NULL_TREE. */
1866
1867 static tree
1868 fold_convert_const (enum tree_code code, tree type, tree arg1)
1869 {
1870 if (TREE_TYPE (arg1) == type)
1871 return arg1;
1872
1873 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1874 {
1875 if (TREE_CODE (arg1) == INTEGER_CST)
1876 return fold_convert_const_int_from_int (type, arg1);
1877 else if (TREE_CODE (arg1) == REAL_CST)
1878 return fold_convert_const_int_from_real (code, type, arg1);
1879 }
1880 else if (TREE_CODE (type) == REAL_TYPE)
1881 {
1882 if (TREE_CODE (arg1) == INTEGER_CST)
1883 return build_real_from_int_cst (type, arg1);
1884 if (TREE_CODE (arg1) == REAL_CST)
1885 return fold_convert_const_real_from_real (type, arg1);
1886 }
1887 return NULL_TREE;
1888 }
1889
1890 /* Construct a vector of zero elements of vector type TYPE. */
1891
1892 static tree
1893 build_zero_vector (tree type)
1894 {
1895 tree elem, list;
1896 int i, units;
1897
1898 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1899 units = TYPE_VECTOR_SUBPARTS (type);
1900
1901 list = NULL_TREE;
1902 for (i = 0; i < units; i++)
1903 list = tree_cons (NULL_TREE, elem, list);
1904 return build_vector (type, list);
1905 }
1906
1907 /* Convert expression ARG to type TYPE. Used by the middle-end for
1908 simple conversions in preference to calling the front-end's convert. */
1909
1910 tree
1911 fold_convert (tree type, tree arg)
1912 {
1913 tree orig = TREE_TYPE (arg);
1914 tree tem;
1915
1916 if (type == orig)
1917 return arg;
1918
1919 if (TREE_CODE (arg) == ERROR_MARK
1920 || TREE_CODE (type) == ERROR_MARK
1921 || TREE_CODE (orig) == ERROR_MARK)
1922 return error_mark_node;
1923
1924 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
1925 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
1926 TYPE_MAIN_VARIANT (orig)))
1927 return fold_build1 (NOP_EXPR, type, arg);
1928
1929 switch (TREE_CODE (type))
1930 {
1931 case INTEGER_TYPE: case CHAR_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1932 case POINTER_TYPE: case REFERENCE_TYPE:
1933 case OFFSET_TYPE:
1934 if (TREE_CODE (arg) == INTEGER_CST)
1935 {
1936 tem = fold_convert_const (NOP_EXPR, type, arg);
1937 if (tem != NULL_TREE)
1938 return tem;
1939 }
1940 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1941 || TREE_CODE (orig) == OFFSET_TYPE)
1942 return fold_build1 (NOP_EXPR, type, arg);
1943 if (TREE_CODE (orig) == COMPLEX_TYPE)
1944 {
1945 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
1946 return fold_convert (type, tem);
1947 }
1948 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1949 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1950 return fold_build1 (NOP_EXPR, type, arg);
1951
1952 case REAL_TYPE:
1953 if (TREE_CODE (arg) == INTEGER_CST)
1954 {
1955 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1956 if (tem != NULL_TREE)
1957 return tem;
1958 }
1959 else if (TREE_CODE (arg) == REAL_CST)
1960 {
1961 tem = fold_convert_const (NOP_EXPR, type, arg);
1962 if (tem != NULL_TREE)
1963 return tem;
1964 }
1965
1966 switch (TREE_CODE (orig))
1967 {
1968 case INTEGER_TYPE: case CHAR_TYPE:
1969 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1970 case POINTER_TYPE: case REFERENCE_TYPE:
1971 return fold_build1 (FLOAT_EXPR, type, arg);
1972
1973 case REAL_TYPE:
1974 return fold_build1 (flag_float_store ? CONVERT_EXPR : NOP_EXPR,
1975 type, arg);
1976
1977 case COMPLEX_TYPE:
1978 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
1979 return fold_convert (type, tem);
1980
1981 default:
1982 gcc_unreachable ();
1983 }
1984
1985 case COMPLEX_TYPE:
1986 switch (TREE_CODE (orig))
1987 {
1988 case INTEGER_TYPE: case CHAR_TYPE:
1989 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1990 case POINTER_TYPE: case REFERENCE_TYPE:
1991 case REAL_TYPE:
1992 return build2 (COMPLEX_EXPR, type,
1993 fold_convert (TREE_TYPE (type), arg),
1994 fold_convert (TREE_TYPE (type), integer_zero_node));
1995 case COMPLEX_TYPE:
1996 {
1997 tree rpart, ipart;
1998
1999 if (TREE_CODE (arg) == COMPLEX_EXPR)
2000 {
2001 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2002 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2003 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2004 }
2005
2006 arg = save_expr (arg);
2007 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2008 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2009 rpart = fold_convert (TREE_TYPE (type), rpart);
2010 ipart = fold_convert (TREE_TYPE (type), ipart);
2011 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2012 }
2013
2014 default:
2015 gcc_unreachable ();
2016 }
2017
2018 case VECTOR_TYPE:
2019 if (integer_zerop (arg))
2020 return build_zero_vector (type);
2021 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2022 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2023 || TREE_CODE (orig) == VECTOR_TYPE);
2024 return fold_build1 (NOP_EXPR, type, arg);
2025
2026 case VOID_TYPE:
2027 return fold_build1 (CONVERT_EXPR, type, fold_ignored_result (arg));
2028
2029 default:
2030 gcc_unreachable ();
2031 }
2032 }
2033 \f
2034 /* Return false if expr can be assumed not to be an value, true
2035 otherwise. */
2036
2037 static bool
2038 maybe_lvalue_p (tree x)
2039 {
2040 /* We only need to wrap lvalue tree codes. */
2041 switch (TREE_CODE (x))
2042 {
2043 case VAR_DECL:
2044 case PARM_DECL:
2045 case RESULT_DECL:
2046 case LABEL_DECL:
2047 case FUNCTION_DECL:
2048 case SSA_NAME:
2049
2050 case COMPONENT_REF:
2051 case INDIRECT_REF:
2052 case ALIGN_INDIRECT_REF:
2053 case MISALIGNED_INDIRECT_REF:
2054 case ARRAY_REF:
2055 case ARRAY_RANGE_REF:
2056 case BIT_FIELD_REF:
2057 case OBJ_TYPE_REF:
2058
2059 case REALPART_EXPR:
2060 case IMAGPART_EXPR:
2061 case PREINCREMENT_EXPR:
2062 case PREDECREMENT_EXPR:
2063 case SAVE_EXPR:
2064 case TRY_CATCH_EXPR:
2065 case WITH_CLEANUP_EXPR:
2066 case COMPOUND_EXPR:
2067 case MODIFY_EXPR:
2068 case TARGET_EXPR:
2069 case COND_EXPR:
2070 case BIND_EXPR:
2071 case MIN_EXPR:
2072 case MAX_EXPR:
2073 break;
2074
2075 default:
2076 /* Assume the worst for front-end tree codes. */
2077 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2078 break;
2079 return false;
2080 }
2081
2082 return true;
2083 }
2084
2085 /* Return an expr equal to X but certainly not valid as an lvalue. */
2086
2087 tree
2088 non_lvalue (tree x)
2089 {
2090 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2091 us. */
2092 if (in_gimple_form)
2093 return x;
2094
2095 if (! maybe_lvalue_p (x))
2096 return x;
2097 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2098 }
2099
2100 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2101 Zero means allow extended lvalues. */
2102
2103 int pedantic_lvalues;
2104
2105 /* When pedantic, return an expr equal to X but certainly not valid as a
2106 pedantic lvalue. Otherwise, return X. */
2107
2108 static tree
2109 pedantic_non_lvalue (tree x)
2110 {
2111 if (pedantic_lvalues)
2112 return non_lvalue (x);
2113 else
2114 return x;
2115 }
2116 \f
2117 /* Given a tree comparison code, return the code that is the logical inverse
2118 of the given code. It is not safe to do this for floating-point
2119 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2120 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2121
2122 static enum tree_code
2123 invert_tree_comparison (enum tree_code code, bool honor_nans)
2124 {
2125 if (honor_nans && flag_trapping_math)
2126 return ERROR_MARK;
2127
2128 switch (code)
2129 {
2130 case EQ_EXPR:
2131 return NE_EXPR;
2132 case NE_EXPR:
2133 return EQ_EXPR;
2134 case GT_EXPR:
2135 return honor_nans ? UNLE_EXPR : LE_EXPR;
2136 case GE_EXPR:
2137 return honor_nans ? UNLT_EXPR : LT_EXPR;
2138 case LT_EXPR:
2139 return honor_nans ? UNGE_EXPR : GE_EXPR;
2140 case LE_EXPR:
2141 return honor_nans ? UNGT_EXPR : GT_EXPR;
2142 case LTGT_EXPR:
2143 return UNEQ_EXPR;
2144 case UNEQ_EXPR:
2145 return LTGT_EXPR;
2146 case UNGT_EXPR:
2147 return LE_EXPR;
2148 case UNGE_EXPR:
2149 return LT_EXPR;
2150 case UNLT_EXPR:
2151 return GE_EXPR;
2152 case UNLE_EXPR:
2153 return GT_EXPR;
2154 case ORDERED_EXPR:
2155 return UNORDERED_EXPR;
2156 case UNORDERED_EXPR:
2157 return ORDERED_EXPR;
2158 default:
2159 gcc_unreachable ();
2160 }
2161 }
2162
2163 /* Similar, but return the comparison that results if the operands are
2164 swapped. This is safe for floating-point. */
2165
2166 enum tree_code
2167 swap_tree_comparison (enum tree_code code)
2168 {
2169 switch (code)
2170 {
2171 case EQ_EXPR:
2172 case NE_EXPR:
2173 return code;
2174 case GT_EXPR:
2175 return LT_EXPR;
2176 case GE_EXPR:
2177 return LE_EXPR;
2178 case LT_EXPR:
2179 return GT_EXPR;
2180 case LE_EXPR:
2181 return GE_EXPR;
2182 default:
2183 gcc_unreachable ();
2184 }
2185 }
2186
2187
2188 /* Convert a comparison tree code from an enum tree_code representation
2189 into a compcode bit-based encoding. This function is the inverse of
2190 compcode_to_comparison. */
2191
2192 static enum comparison_code
2193 comparison_to_compcode (enum tree_code code)
2194 {
2195 switch (code)
2196 {
2197 case LT_EXPR:
2198 return COMPCODE_LT;
2199 case EQ_EXPR:
2200 return COMPCODE_EQ;
2201 case LE_EXPR:
2202 return COMPCODE_LE;
2203 case GT_EXPR:
2204 return COMPCODE_GT;
2205 case NE_EXPR:
2206 return COMPCODE_NE;
2207 case GE_EXPR:
2208 return COMPCODE_GE;
2209 case ORDERED_EXPR:
2210 return COMPCODE_ORD;
2211 case UNORDERED_EXPR:
2212 return COMPCODE_UNORD;
2213 case UNLT_EXPR:
2214 return COMPCODE_UNLT;
2215 case UNEQ_EXPR:
2216 return COMPCODE_UNEQ;
2217 case UNLE_EXPR:
2218 return COMPCODE_UNLE;
2219 case UNGT_EXPR:
2220 return COMPCODE_UNGT;
2221 case LTGT_EXPR:
2222 return COMPCODE_LTGT;
2223 case UNGE_EXPR:
2224 return COMPCODE_UNGE;
2225 default:
2226 gcc_unreachable ();
2227 }
2228 }
2229
2230 /* Convert a compcode bit-based encoding of a comparison operator back
2231 to GCC's enum tree_code representation. This function is the
2232 inverse of comparison_to_compcode. */
2233
2234 static enum tree_code
2235 compcode_to_comparison (enum comparison_code code)
2236 {
2237 switch (code)
2238 {
2239 case COMPCODE_LT:
2240 return LT_EXPR;
2241 case COMPCODE_EQ:
2242 return EQ_EXPR;
2243 case COMPCODE_LE:
2244 return LE_EXPR;
2245 case COMPCODE_GT:
2246 return GT_EXPR;
2247 case COMPCODE_NE:
2248 return NE_EXPR;
2249 case COMPCODE_GE:
2250 return GE_EXPR;
2251 case COMPCODE_ORD:
2252 return ORDERED_EXPR;
2253 case COMPCODE_UNORD:
2254 return UNORDERED_EXPR;
2255 case COMPCODE_UNLT:
2256 return UNLT_EXPR;
2257 case COMPCODE_UNEQ:
2258 return UNEQ_EXPR;
2259 case COMPCODE_UNLE:
2260 return UNLE_EXPR;
2261 case COMPCODE_UNGT:
2262 return UNGT_EXPR;
2263 case COMPCODE_LTGT:
2264 return LTGT_EXPR;
2265 case COMPCODE_UNGE:
2266 return UNGE_EXPR;
2267 default:
2268 gcc_unreachable ();
2269 }
2270 }
2271
2272 /* Return a tree for the comparison which is the combination of
2273 doing the AND or OR (depending on CODE) of the two operations LCODE
2274 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2275 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2276 if this makes the transformation invalid. */
2277
2278 tree
2279 combine_comparisons (enum tree_code code, enum tree_code lcode,
2280 enum tree_code rcode, tree truth_type,
2281 tree ll_arg, tree lr_arg)
2282 {
2283 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2284 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2285 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2286 enum comparison_code compcode;
2287
2288 switch (code)
2289 {
2290 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2291 compcode = lcompcode & rcompcode;
2292 break;
2293
2294 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2295 compcode = lcompcode | rcompcode;
2296 break;
2297
2298 default:
2299 return NULL_TREE;
2300 }
2301
2302 if (!honor_nans)
2303 {
2304 /* Eliminate unordered comparisons, as well as LTGT and ORD
2305 which are not used unless the mode has NaNs. */
2306 compcode &= ~COMPCODE_UNORD;
2307 if (compcode == COMPCODE_LTGT)
2308 compcode = COMPCODE_NE;
2309 else if (compcode == COMPCODE_ORD)
2310 compcode = COMPCODE_TRUE;
2311 }
2312 else if (flag_trapping_math)
2313 {
2314 /* Check that the original operation and the optimized ones will trap
2315 under the same condition. */
2316 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2317 && (lcompcode != COMPCODE_EQ)
2318 && (lcompcode != COMPCODE_ORD);
2319 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2320 && (rcompcode != COMPCODE_EQ)
2321 && (rcompcode != COMPCODE_ORD);
2322 bool trap = (compcode & COMPCODE_UNORD) == 0
2323 && (compcode != COMPCODE_EQ)
2324 && (compcode != COMPCODE_ORD);
2325
2326 /* In a short-circuited boolean expression the LHS might be
2327 such that the RHS, if evaluated, will never trap. For
2328 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2329 if neither x nor y is NaN. (This is a mixed blessing: for
2330 example, the expression above will never trap, hence
2331 optimizing it to x < y would be invalid). */
2332 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2333 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2334 rtrap = false;
2335
2336 /* If the comparison was short-circuited, and only the RHS
2337 trapped, we may now generate a spurious trap. */
2338 if (rtrap && !ltrap
2339 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2340 return NULL_TREE;
2341
2342 /* If we changed the conditions that cause a trap, we lose. */
2343 if ((ltrap || rtrap) != trap)
2344 return NULL_TREE;
2345 }
2346
2347 if (compcode == COMPCODE_TRUE)
2348 return constant_boolean_node (true, truth_type);
2349 else if (compcode == COMPCODE_FALSE)
2350 return constant_boolean_node (false, truth_type);
2351 else
2352 return fold_build2 (compcode_to_comparison (compcode),
2353 truth_type, ll_arg, lr_arg);
2354 }
2355
2356 /* Return nonzero if CODE is a tree code that represents a truth value. */
2357
2358 static int
2359 truth_value_p (enum tree_code code)
2360 {
2361 return (TREE_CODE_CLASS (code) == tcc_comparison
2362 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2363 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2364 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2365 }
2366 \f
2367 /* Return nonzero if two operands (typically of the same tree node)
2368 are necessarily equal. If either argument has side-effects this
2369 function returns zero. FLAGS modifies behavior as follows:
2370
2371 If OEP_ONLY_CONST is set, only return nonzero for constants.
2372 This function tests whether the operands are indistinguishable;
2373 it does not test whether they are equal using C's == operation.
2374 The distinction is important for IEEE floating point, because
2375 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2376 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2377
2378 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2379 even though it may hold multiple values during a function.
2380 This is because a GCC tree node guarantees that nothing else is
2381 executed between the evaluation of its "operands" (which may often
2382 be evaluated in arbitrary order). Hence if the operands themselves
2383 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2384 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2385 unset means assuming isochronic (or instantaneous) tree equivalence.
2386 Unless comparing arbitrary expression trees, such as from different
2387 statements, this flag can usually be left unset.
2388
2389 If OEP_PURE_SAME is set, then pure functions with identical arguments
2390 are considered the same. It is used when the caller has other ways
2391 to ensure that global memory is unchanged in between. */
2392
2393 int
2394 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2395 {
2396 /* If either is ERROR_MARK, they aren't equal. */
2397 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2398 return 0;
2399
2400 /* If both types don't have the same signedness, then we can't consider
2401 them equal. We must check this before the STRIP_NOPS calls
2402 because they may change the signedness of the arguments. */
2403 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2404 return 0;
2405
2406 STRIP_NOPS (arg0);
2407 STRIP_NOPS (arg1);
2408
2409 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2410 /* This is needed for conversions and for COMPONENT_REF.
2411 Might as well play it safe and always test this. */
2412 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2413 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2414 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2415 return 0;
2416
2417 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2418 We don't care about side effects in that case because the SAVE_EXPR
2419 takes care of that for us. In all other cases, two expressions are
2420 equal if they have no side effects. If we have two identical
2421 expressions with side effects that should be treated the same due
2422 to the only side effects being identical SAVE_EXPR's, that will
2423 be detected in the recursive calls below. */
2424 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2425 && (TREE_CODE (arg0) == SAVE_EXPR
2426 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2427 return 1;
2428
2429 /* Next handle constant cases, those for which we can return 1 even
2430 if ONLY_CONST is set. */
2431 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2432 switch (TREE_CODE (arg0))
2433 {
2434 case INTEGER_CST:
2435 return (! TREE_CONSTANT_OVERFLOW (arg0)
2436 && ! TREE_CONSTANT_OVERFLOW (arg1)
2437 && tree_int_cst_equal (arg0, arg1));
2438
2439 case REAL_CST:
2440 return (! TREE_CONSTANT_OVERFLOW (arg0)
2441 && ! TREE_CONSTANT_OVERFLOW (arg1)
2442 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2443 TREE_REAL_CST (arg1)));
2444
2445 case VECTOR_CST:
2446 {
2447 tree v1, v2;
2448
2449 if (TREE_CONSTANT_OVERFLOW (arg0)
2450 || TREE_CONSTANT_OVERFLOW (arg1))
2451 return 0;
2452
2453 v1 = TREE_VECTOR_CST_ELTS (arg0);
2454 v2 = TREE_VECTOR_CST_ELTS (arg1);
2455 while (v1 && v2)
2456 {
2457 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2458 flags))
2459 return 0;
2460 v1 = TREE_CHAIN (v1);
2461 v2 = TREE_CHAIN (v2);
2462 }
2463
2464 return 1;
2465 }
2466
2467 case COMPLEX_CST:
2468 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2469 flags)
2470 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2471 flags));
2472
2473 case STRING_CST:
2474 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2475 && ! memcmp (TREE_STRING_POINTER (arg0),
2476 TREE_STRING_POINTER (arg1),
2477 TREE_STRING_LENGTH (arg0)));
2478
2479 case ADDR_EXPR:
2480 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2481 0);
2482 default:
2483 break;
2484 }
2485
2486 if (flags & OEP_ONLY_CONST)
2487 return 0;
2488
2489 /* Define macros to test an operand from arg0 and arg1 for equality and a
2490 variant that allows null and views null as being different from any
2491 non-null value. In the latter case, if either is null, the both
2492 must be; otherwise, do the normal comparison. */
2493 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2494 TREE_OPERAND (arg1, N), flags)
2495
2496 #define OP_SAME_WITH_NULL(N) \
2497 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2498 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2499
2500 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2501 {
2502 case tcc_unary:
2503 /* Two conversions are equal only if signedness and modes match. */
2504 switch (TREE_CODE (arg0))
2505 {
2506 case NOP_EXPR:
2507 case CONVERT_EXPR:
2508 case FIX_CEIL_EXPR:
2509 case FIX_TRUNC_EXPR:
2510 case FIX_FLOOR_EXPR:
2511 case FIX_ROUND_EXPR:
2512 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2513 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2514 return 0;
2515 break;
2516 default:
2517 break;
2518 }
2519
2520 return OP_SAME (0);
2521
2522
2523 case tcc_comparison:
2524 case tcc_binary:
2525 if (OP_SAME (0) && OP_SAME (1))
2526 return 1;
2527
2528 /* For commutative ops, allow the other order. */
2529 return (commutative_tree_code (TREE_CODE (arg0))
2530 && operand_equal_p (TREE_OPERAND (arg0, 0),
2531 TREE_OPERAND (arg1, 1), flags)
2532 && operand_equal_p (TREE_OPERAND (arg0, 1),
2533 TREE_OPERAND (arg1, 0), flags));
2534
2535 case tcc_reference:
2536 /* If either of the pointer (or reference) expressions we are
2537 dereferencing contain a side effect, these cannot be equal. */
2538 if (TREE_SIDE_EFFECTS (arg0)
2539 || TREE_SIDE_EFFECTS (arg1))
2540 return 0;
2541
2542 switch (TREE_CODE (arg0))
2543 {
2544 case INDIRECT_REF:
2545 case ALIGN_INDIRECT_REF:
2546 case MISALIGNED_INDIRECT_REF:
2547 case REALPART_EXPR:
2548 case IMAGPART_EXPR:
2549 return OP_SAME (0);
2550
2551 case ARRAY_REF:
2552 case ARRAY_RANGE_REF:
2553 /* Operands 2 and 3 may be null. */
2554 return (OP_SAME (0)
2555 && OP_SAME (1)
2556 && OP_SAME_WITH_NULL (2)
2557 && OP_SAME_WITH_NULL (3));
2558
2559 case COMPONENT_REF:
2560 /* Handle operand 2 the same as for ARRAY_REF. */
2561 return OP_SAME (0) && OP_SAME (1) && OP_SAME_WITH_NULL (2);
2562
2563 case BIT_FIELD_REF:
2564 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2565
2566 default:
2567 return 0;
2568 }
2569
2570 case tcc_expression:
2571 switch (TREE_CODE (arg0))
2572 {
2573 case ADDR_EXPR:
2574 case TRUTH_NOT_EXPR:
2575 return OP_SAME (0);
2576
2577 case TRUTH_ANDIF_EXPR:
2578 case TRUTH_ORIF_EXPR:
2579 return OP_SAME (0) && OP_SAME (1);
2580
2581 case TRUTH_AND_EXPR:
2582 case TRUTH_OR_EXPR:
2583 case TRUTH_XOR_EXPR:
2584 if (OP_SAME (0) && OP_SAME (1))
2585 return 1;
2586
2587 /* Otherwise take into account this is a commutative operation. */
2588 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2589 TREE_OPERAND (arg1, 1), flags)
2590 && operand_equal_p (TREE_OPERAND (arg0, 1),
2591 TREE_OPERAND (arg1, 0), flags));
2592
2593 case CALL_EXPR:
2594 /* If the CALL_EXPRs call different functions, then they
2595 clearly can not be equal. */
2596 if (!OP_SAME (0))
2597 return 0;
2598
2599 {
2600 unsigned int cef = call_expr_flags (arg0);
2601 if (flags & OEP_PURE_SAME)
2602 cef &= ECF_CONST | ECF_PURE;
2603 else
2604 cef &= ECF_CONST;
2605 if (!cef)
2606 return 0;
2607 }
2608
2609 /* Now see if all the arguments are the same. operand_equal_p
2610 does not handle TREE_LIST, so we walk the operands here
2611 feeding them to operand_equal_p. */
2612 arg0 = TREE_OPERAND (arg0, 1);
2613 arg1 = TREE_OPERAND (arg1, 1);
2614 while (arg0 && arg1)
2615 {
2616 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2617 flags))
2618 return 0;
2619
2620 arg0 = TREE_CHAIN (arg0);
2621 arg1 = TREE_CHAIN (arg1);
2622 }
2623
2624 /* If we get here and both argument lists are exhausted
2625 then the CALL_EXPRs are equal. */
2626 return ! (arg0 || arg1);
2627
2628 default:
2629 return 0;
2630 }
2631
2632 case tcc_declaration:
2633 /* Consider __builtin_sqrt equal to sqrt. */
2634 return (TREE_CODE (arg0) == FUNCTION_DECL
2635 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2636 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2637 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2638
2639 default:
2640 return 0;
2641 }
2642
2643 #undef OP_SAME
2644 #undef OP_SAME_WITH_NULL
2645 }
2646 \f
2647 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2648 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2649
2650 When in doubt, return 0. */
2651
2652 static int
2653 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2654 {
2655 int unsignedp1, unsignedpo;
2656 tree primarg0, primarg1, primother;
2657 unsigned int correct_width;
2658
2659 if (operand_equal_p (arg0, arg1, 0))
2660 return 1;
2661
2662 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2663 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2664 return 0;
2665
2666 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2667 and see if the inner values are the same. This removes any
2668 signedness comparison, which doesn't matter here. */
2669 primarg0 = arg0, primarg1 = arg1;
2670 STRIP_NOPS (primarg0);
2671 STRIP_NOPS (primarg1);
2672 if (operand_equal_p (primarg0, primarg1, 0))
2673 return 1;
2674
2675 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2676 actual comparison operand, ARG0.
2677
2678 First throw away any conversions to wider types
2679 already present in the operands. */
2680
2681 primarg1 = get_narrower (arg1, &unsignedp1);
2682 primother = get_narrower (other, &unsignedpo);
2683
2684 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2685 if (unsignedp1 == unsignedpo
2686 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2687 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2688 {
2689 tree type = TREE_TYPE (arg0);
2690
2691 /* Make sure shorter operand is extended the right way
2692 to match the longer operand. */
2693 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2694 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2695
2696 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2697 return 1;
2698 }
2699
2700 return 0;
2701 }
2702 \f
2703 /* See if ARG is an expression that is either a comparison or is performing
2704 arithmetic on comparisons. The comparisons must only be comparing
2705 two different values, which will be stored in *CVAL1 and *CVAL2; if
2706 they are nonzero it means that some operands have already been found.
2707 No variables may be used anywhere else in the expression except in the
2708 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2709 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2710
2711 If this is true, return 1. Otherwise, return zero. */
2712
2713 static int
2714 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2715 {
2716 enum tree_code code = TREE_CODE (arg);
2717 enum tree_code_class class = TREE_CODE_CLASS (code);
2718
2719 /* We can handle some of the tcc_expression cases here. */
2720 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2721 class = tcc_unary;
2722 else if (class == tcc_expression
2723 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2724 || code == COMPOUND_EXPR))
2725 class = tcc_binary;
2726
2727 else if (class == tcc_expression && code == SAVE_EXPR
2728 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2729 {
2730 /* If we've already found a CVAL1 or CVAL2, this expression is
2731 two complex to handle. */
2732 if (*cval1 || *cval2)
2733 return 0;
2734
2735 class = tcc_unary;
2736 *save_p = 1;
2737 }
2738
2739 switch (class)
2740 {
2741 case tcc_unary:
2742 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2743
2744 case tcc_binary:
2745 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2746 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2747 cval1, cval2, save_p));
2748
2749 case tcc_constant:
2750 return 1;
2751
2752 case tcc_expression:
2753 if (code == COND_EXPR)
2754 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2755 cval1, cval2, save_p)
2756 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2757 cval1, cval2, save_p)
2758 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2759 cval1, cval2, save_p));
2760 return 0;
2761
2762 case tcc_comparison:
2763 /* First see if we can handle the first operand, then the second. For
2764 the second operand, we know *CVAL1 can't be zero. It must be that
2765 one side of the comparison is each of the values; test for the
2766 case where this isn't true by failing if the two operands
2767 are the same. */
2768
2769 if (operand_equal_p (TREE_OPERAND (arg, 0),
2770 TREE_OPERAND (arg, 1), 0))
2771 return 0;
2772
2773 if (*cval1 == 0)
2774 *cval1 = TREE_OPERAND (arg, 0);
2775 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2776 ;
2777 else if (*cval2 == 0)
2778 *cval2 = TREE_OPERAND (arg, 0);
2779 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2780 ;
2781 else
2782 return 0;
2783
2784 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2785 ;
2786 else if (*cval2 == 0)
2787 *cval2 = TREE_OPERAND (arg, 1);
2788 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2789 ;
2790 else
2791 return 0;
2792
2793 return 1;
2794
2795 default:
2796 return 0;
2797 }
2798 }
2799 \f
2800 /* ARG is a tree that is known to contain just arithmetic operations and
2801 comparisons. Evaluate the operations in the tree substituting NEW0 for
2802 any occurrence of OLD0 as an operand of a comparison and likewise for
2803 NEW1 and OLD1. */
2804
2805 static tree
2806 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2807 {
2808 tree type = TREE_TYPE (arg);
2809 enum tree_code code = TREE_CODE (arg);
2810 enum tree_code_class class = TREE_CODE_CLASS (code);
2811
2812 /* We can handle some of the tcc_expression cases here. */
2813 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2814 class = tcc_unary;
2815 else if (class == tcc_expression
2816 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2817 class = tcc_binary;
2818
2819 switch (class)
2820 {
2821 case tcc_unary:
2822 return fold_build1 (code, type,
2823 eval_subst (TREE_OPERAND (arg, 0),
2824 old0, new0, old1, new1));
2825
2826 case tcc_binary:
2827 return fold_build2 (code, type,
2828 eval_subst (TREE_OPERAND (arg, 0),
2829 old0, new0, old1, new1),
2830 eval_subst (TREE_OPERAND (arg, 1),
2831 old0, new0, old1, new1));
2832
2833 case tcc_expression:
2834 switch (code)
2835 {
2836 case SAVE_EXPR:
2837 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2838
2839 case COMPOUND_EXPR:
2840 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2841
2842 case COND_EXPR:
2843 return fold_build3 (code, type,
2844 eval_subst (TREE_OPERAND (arg, 0),
2845 old0, new0, old1, new1),
2846 eval_subst (TREE_OPERAND (arg, 1),
2847 old0, new0, old1, new1),
2848 eval_subst (TREE_OPERAND (arg, 2),
2849 old0, new0, old1, new1));
2850 default:
2851 break;
2852 }
2853 /* Fall through - ??? */
2854
2855 case tcc_comparison:
2856 {
2857 tree arg0 = TREE_OPERAND (arg, 0);
2858 tree arg1 = TREE_OPERAND (arg, 1);
2859
2860 /* We need to check both for exact equality and tree equality. The
2861 former will be true if the operand has a side-effect. In that
2862 case, we know the operand occurred exactly once. */
2863
2864 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2865 arg0 = new0;
2866 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2867 arg0 = new1;
2868
2869 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2870 arg1 = new0;
2871 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2872 arg1 = new1;
2873
2874 return fold_build2 (code, type, arg0, arg1);
2875 }
2876
2877 default:
2878 return arg;
2879 }
2880 }
2881 \f
2882 /* Return a tree for the case when the result of an expression is RESULT
2883 converted to TYPE and OMITTED was previously an operand of the expression
2884 but is now not needed (e.g., we folded OMITTED * 0).
2885
2886 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2887 the conversion of RESULT to TYPE. */
2888
2889 tree
2890 omit_one_operand (tree type, tree result, tree omitted)
2891 {
2892 tree t = fold_convert (type, result);
2893
2894 if (TREE_SIDE_EFFECTS (omitted))
2895 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2896
2897 return non_lvalue (t);
2898 }
2899
2900 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2901
2902 static tree
2903 pedantic_omit_one_operand (tree type, tree result, tree omitted)
2904 {
2905 tree t = fold_convert (type, result);
2906
2907 if (TREE_SIDE_EFFECTS (omitted))
2908 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2909
2910 return pedantic_non_lvalue (t);
2911 }
2912
2913 /* Return a tree for the case when the result of an expression is RESULT
2914 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
2915 of the expression but are now not needed.
2916
2917 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
2918 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
2919 evaluated before OMITTED2. Otherwise, if neither has side effects,
2920 just do the conversion of RESULT to TYPE. */
2921
2922 tree
2923 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
2924 {
2925 tree t = fold_convert (type, result);
2926
2927 if (TREE_SIDE_EFFECTS (omitted2))
2928 t = build2 (COMPOUND_EXPR, type, omitted2, t);
2929 if (TREE_SIDE_EFFECTS (omitted1))
2930 t = build2 (COMPOUND_EXPR, type, omitted1, t);
2931
2932 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
2933 }
2934
2935 \f
2936 /* Return a simplified tree node for the truth-negation of ARG. This
2937 never alters ARG itself. We assume that ARG is an operation that
2938 returns a truth value (0 or 1).
2939
2940 FIXME: one would think we would fold the result, but it causes
2941 problems with the dominator optimizer. */
2942 tree
2943 invert_truthvalue (tree arg)
2944 {
2945 tree type = TREE_TYPE (arg);
2946 enum tree_code code = TREE_CODE (arg);
2947
2948 if (code == ERROR_MARK)
2949 return arg;
2950
2951 /* If this is a comparison, we can simply invert it, except for
2952 floating-point non-equality comparisons, in which case we just
2953 enclose a TRUTH_NOT_EXPR around what we have. */
2954
2955 if (TREE_CODE_CLASS (code) == tcc_comparison)
2956 {
2957 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
2958 if (FLOAT_TYPE_P (op_type)
2959 && flag_trapping_math
2960 && code != ORDERED_EXPR && code != UNORDERED_EXPR
2961 && code != NE_EXPR && code != EQ_EXPR)
2962 return build1 (TRUTH_NOT_EXPR, type, arg);
2963 else
2964 {
2965 code = invert_tree_comparison (code,
2966 HONOR_NANS (TYPE_MODE (op_type)));
2967 if (code == ERROR_MARK)
2968 return build1 (TRUTH_NOT_EXPR, type, arg);
2969 else
2970 return build2 (code, type,
2971 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2972 }
2973 }
2974
2975 switch (code)
2976 {
2977 case INTEGER_CST:
2978 return constant_boolean_node (integer_zerop (arg), type);
2979
2980 case TRUTH_AND_EXPR:
2981 return build2 (TRUTH_OR_EXPR, type,
2982 invert_truthvalue (TREE_OPERAND (arg, 0)),
2983 invert_truthvalue (TREE_OPERAND (arg, 1)));
2984
2985 case TRUTH_OR_EXPR:
2986 return build2 (TRUTH_AND_EXPR, type,
2987 invert_truthvalue (TREE_OPERAND (arg, 0)),
2988 invert_truthvalue (TREE_OPERAND (arg, 1)));
2989
2990 case TRUTH_XOR_EXPR:
2991 /* Here we can invert either operand. We invert the first operand
2992 unless the second operand is a TRUTH_NOT_EXPR in which case our
2993 result is the XOR of the first operand with the inside of the
2994 negation of the second operand. */
2995
2996 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2997 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2998 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2999 else
3000 return build2 (TRUTH_XOR_EXPR, type,
3001 invert_truthvalue (TREE_OPERAND (arg, 0)),
3002 TREE_OPERAND (arg, 1));
3003
3004 case TRUTH_ANDIF_EXPR:
3005 return build2 (TRUTH_ORIF_EXPR, type,
3006 invert_truthvalue (TREE_OPERAND (arg, 0)),
3007 invert_truthvalue (TREE_OPERAND (arg, 1)));
3008
3009 case TRUTH_ORIF_EXPR:
3010 return build2 (TRUTH_ANDIF_EXPR, type,
3011 invert_truthvalue (TREE_OPERAND (arg, 0)),
3012 invert_truthvalue (TREE_OPERAND (arg, 1)));
3013
3014 case TRUTH_NOT_EXPR:
3015 return TREE_OPERAND (arg, 0);
3016
3017 case COND_EXPR:
3018 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3019 invert_truthvalue (TREE_OPERAND (arg, 1)),
3020 invert_truthvalue (TREE_OPERAND (arg, 2)));
3021
3022 case COMPOUND_EXPR:
3023 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3024 invert_truthvalue (TREE_OPERAND (arg, 1)));
3025
3026 case NON_LVALUE_EXPR:
3027 return invert_truthvalue (TREE_OPERAND (arg, 0));
3028
3029 case NOP_EXPR:
3030 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3031 break;
3032
3033 case CONVERT_EXPR:
3034 case FLOAT_EXPR:
3035 return build1 (TREE_CODE (arg), type,
3036 invert_truthvalue (TREE_OPERAND (arg, 0)));
3037
3038 case BIT_AND_EXPR:
3039 if (!integer_onep (TREE_OPERAND (arg, 1)))
3040 break;
3041 return build2 (EQ_EXPR, type, arg,
3042 fold_convert (type, integer_zero_node));
3043
3044 case SAVE_EXPR:
3045 return build1 (TRUTH_NOT_EXPR, type, arg);
3046
3047 case CLEANUP_POINT_EXPR:
3048 return build1 (CLEANUP_POINT_EXPR, type,
3049 invert_truthvalue (TREE_OPERAND (arg, 0)));
3050
3051 default:
3052 break;
3053 }
3054 gcc_assert (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE);
3055 return build1 (TRUTH_NOT_EXPR, type, arg);
3056 }
3057
3058 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3059 operands are another bit-wise operation with a common input. If so,
3060 distribute the bit operations to save an operation and possibly two if
3061 constants are involved. For example, convert
3062 (A | B) & (A | C) into A | (B & C)
3063 Further simplification will occur if B and C are constants.
3064
3065 If this optimization cannot be done, 0 will be returned. */
3066
3067 static tree
3068 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3069 {
3070 tree common;
3071 tree left, right;
3072
3073 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3074 || TREE_CODE (arg0) == code
3075 || (TREE_CODE (arg0) != BIT_AND_EXPR
3076 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3077 return 0;
3078
3079 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3080 {
3081 common = TREE_OPERAND (arg0, 0);
3082 left = TREE_OPERAND (arg0, 1);
3083 right = TREE_OPERAND (arg1, 1);
3084 }
3085 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3086 {
3087 common = TREE_OPERAND (arg0, 0);
3088 left = TREE_OPERAND (arg0, 1);
3089 right = TREE_OPERAND (arg1, 0);
3090 }
3091 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3092 {
3093 common = TREE_OPERAND (arg0, 1);
3094 left = TREE_OPERAND (arg0, 0);
3095 right = TREE_OPERAND (arg1, 1);
3096 }
3097 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3098 {
3099 common = TREE_OPERAND (arg0, 1);
3100 left = TREE_OPERAND (arg0, 0);
3101 right = TREE_OPERAND (arg1, 0);
3102 }
3103 else
3104 return 0;
3105
3106 return fold_build2 (TREE_CODE (arg0), type, common,
3107 fold_build2 (code, type, left, right));
3108 }
3109
3110 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3111 with code CODE. This optimization is unsafe. */
3112 static tree
3113 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3114 {
3115 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3116 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3117
3118 /* (A / C) +- (B / C) -> (A +- B) / C. */
3119 if (mul0 == mul1
3120 && operand_equal_p (TREE_OPERAND (arg0, 1),
3121 TREE_OPERAND (arg1, 1), 0))
3122 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3123 fold_build2 (code, type,
3124 TREE_OPERAND (arg0, 0),
3125 TREE_OPERAND (arg1, 0)),
3126 TREE_OPERAND (arg0, 1));
3127
3128 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3129 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3130 TREE_OPERAND (arg1, 0), 0)
3131 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3132 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3133 {
3134 REAL_VALUE_TYPE r0, r1;
3135 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3136 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3137 if (!mul0)
3138 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3139 if (!mul1)
3140 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3141 real_arithmetic (&r0, code, &r0, &r1);
3142 return fold_build2 (MULT_EXPR, type,
3143 TREE_OPERAND (arg0, 0),
3144 build_real (type, r0));
3145 }
3146
3147 return NULL_TREE;
3148 }
3149 \f
3150 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3151 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3152
3153 static tree
3154 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3155 int unsignedp)
3156 {
3157 tree result;
3158
3159 if (bitpos == 0)
3160 {
3161 tree size = TYPE_SIZE (TREE_TYPE (inner));
3162 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3163 || POINTER_TYPE_P (TREE_TYPE (inner)))
3164 && host_integerp (size, 0)
3165 && tree_low_cst (size, 0) == bitsize)
3166 return fold_convert (type, inner);
3167 }
3168
3169 result = build3 (BIT_FIELD_REF, type, inner,
3170 size_int (bitsize), bitsize_int (bitpos));
3171
3172 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3173
3174 return result;
3175 }
3176
3177 /* Optimize a bit-field compare.
3178
3179 There are two cases: First is a compare against a constant and the
3180 second is a comparison of two items where the fields are at the same
3181 bit position relative to the start of a chunk (byte, halfword, word)
3182 large enough to contain it. In these cases we can avoid the shift
3183 implicit in bitfield extractions.
3184
3185 For constants, we emit a compare of the shifted constant with the
3186 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3187 compared. For two fields at the same position, we do the ANDs with the
3188 similar mask and compare the result of the ANDs.
3189
3190 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3191 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3192 are the left and right operands of the comparison, respectively.
3193
3194 If the optimization described above can be done, we return the resulting
3195 tree. Otherwise we return zero. */
3196
3197 static tree
3198 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3199 tree lhs, tree rhs)
3200 {
3201 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3202 tree type = TREE_TYPE (lhs);
3203 tree signed_type, unsigned_type;
3204 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3205 enum machine_mode lmode, rmode, nmode;
3206 int lunsignedp, runsignedp;
3207 int lvolatilep = 0, rvolatilep = 0;
3208 tree linner, rinner = NULL_TREE;
3209 tree mask;
3210 tree offset;
3211
3212 /* Get all the information about the extractions being done. If the bit size
3213 if the same as the size of the underlying object, we aren't doing an
3214 extraction at all and so can do nothing. We also don't want to
3215 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3216 then will no longer be able to replace it. */
3217 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3218 &lunsignedp, &lvolatilep, false);
3219 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3220 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3221 return 0;
3222
3223 if (!const_p)
3224 {
3225 /* If this is not a constant, we can only do something if bit positions,
3226 sizes, and signedness are the same. */
3227 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3228 &runsignedp, &rvolatilep, false);
3229
3230 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3231 || lunsignedp != runsignedp || offset != 0
3232 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3233 return 0;
3234 }
3235
3236 /* See if we can find a mode to refer to this field. We should be able to,
3237 but fail if we can't. */
3238 nmode = get_best_mode (lbitsize, lbitpos,
3239 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3240 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3241 TYPE_ALIGN (TREE_TYPE (rinner))),
3242 word_mode, lvolatilep || rvolatilep);
3243 if (nmode == VOIDmode)
3244 return 0;
3245
3246 /* Set signed and unsigned types of the precision of this mode for the
3247 shifts below. */
3248 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3249 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3250
3251 /* Compute the bit position and size for the new reference and our offset
3252 within it. If the new reference is the same size as the original, we
3253 won't optimize anything, so return zero. */
3254 nbitsize = GET_MODE_BITSIZE (nmode);
3255 nbitpos = lbitpos & ~ (nbitsize - 1);
3256 lbitpos -= nbitpos;
3257 if (nbitsize == lbitsize)
3258 return 0;
3259
3260 if (BYTES_BIG_ENDIAN)
3261 lbitpos = nbitsize - lbitsize - lbitpos;
3262
3263 /* Make the mask to be used against the extracted field. */
3264 mask = build_int_cst (unsigned_type, -1);
3265 mask = force_fit_type (mask, 0, false, false);
3266 mask = fold_convert (unsigned_type, mask);
3267 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3268 mask = const_binop (RSHIFT_EXPR, mask,
3269 size_int (nbitsize - lbitsize - lbitpos), 0);
3270
3271 if (! const_p)
3272 /* If not comparing with constant, just rework the comparison
3273 and return. */
3274 return build2 (code, compare_type,
3275 build2 (BIT_AND_EXPR, unsigned_type,
3276 make_bit_field_ref (linner, unsigned_type,
3277 nbitsize, nbitpos, 1),
3278 mask),
3279 build2 (BIT_AND_EXPR, unsigned_type,
3280 make_bit_field_ref (rinner, unsigned_type,
3281 nbitsize, nbitpos, 1),
3282 mask));
3283
3284 /* Otherwise, we are handling the constant case. See if the constant is too
3285 big for the field. Warn and return a tree of for 0 (false) if so. We do
3286 this not only for its own sake, but to avoid having to test for this
3287 error case below. If we didn't, we might generate wrong code.
3288
3289 For unsigned fields, the constant shifted right by the field length should
3290 be all zero. For signed fields, the high-order bits should agree with
3291 the sign bit. */
3292
3293 if (lunsignedp)
3294 {
3295 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3296 fold_convert (unsigned_type, rhs),
3297 size_int (lbitsize), 0)))
3298 {
3299 warning (0, "comparison is always %d due to width of bit-field",
3300 code == NE_EXPR);
3301 return constant_boolean_node (code == NE_EXPR, compare_type);
3302 }
3303 }
3304 else
3305 {
3306 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3307 size_int (lbitsize - 1), 0);
3308 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3309 {
3310 warning (0, "comparison is always %d due to width of bit-field",
3311 code == NE_EXPR);
3312 return constant_boolean_node (code == NE_EXPR, compare_type);
3313 }
3314 }
3315
3316 /* Single-bit compares should always be against zero. */
3317 if (lbitsize == 1 && ! integer_zerop (rhs))
3318 {
3319 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3320 rhs = fold_convert (type, integer_zero_node);
3321 }
3322
3323 /* Make a new bitfield reference, shift the constant over the
3324 appropriate number of bits and mask it with the computed mask
3325 (in case this was a signed field). If we changed it, make a new one. */
3326 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3327 if (lvolatilep)
3328 {
3329 TREE_SIDE_EFFECTS (lhs) = 1;
3330 TREE_THIS_VOLATILE (lhs) = 1;
3331 }
3332
3333 rhs = fold (const_binop (BIT_AND_EXPR,
3334 const_binop (LSHIFT_EXPR,
3335 fold_convert (unsigned_type, rhs),
3336 size_int (lbitpos), 0),
3337 mask, 0));
3338
3339 return build2 (code, compare_type,
3340 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3341 rhs);
3342 }
3343 \f
3344 /* Subroutine for fold_truthop: decode a field reference.
3345
3346 If EXP is a comparison reference, we return the innermost reference.
3347
3348 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3349 set to the starting bit number.
3350
3351 If the innermost field can be completely contained in a mode-sized
3352 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3353
3354 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3355 otherwise it is not changed.
3356
3357 *PUNSIGNEDP is set to the signedness of the field.
3358
3359 *PMASK is set to the mask used. This is either contained in a
3360 BIT_AND_EXPR or derived from the width of the field.
3361
3362 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3363
3364 Return 0 if this is not a component reference or is one that we can't
3365 do anything with. */
3366
3367 static tree
3368 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3369 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3370 int *punsignedp, int *pvolatilep,
3371 tree *pmask, tree *pand_mask)
3372 {
3373 tree outer_type = 0;
3374 tree and_mask = 0;
3375 tree mask, inner, offset;
3376 tree unsigned_type;
3377 unsigned int precision;
3378
3379 /* All the optimizations using this function assume integer fields.
3380 There are problems with FP fields since the type_for_size call
3381 below can fail for, e.g., XFmode. */
3382 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3383 return 0;
3384
3385 /* We are interested in the bare arrangement of bits, so strip everything
3386 that doesn't affect the machine mode. However, record the type of the
3387 outermost expression if it may matter below. */
3388 if (TREE_CODE (exp) == NOP_EXPR
3389 || TREE_CODE (exp) == CONVERT_EXPR
3390 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3391 outer_type = TREE_TYPE (exp);
3392 STRIP_NOPS (exp);
3393
3394 if (TREE_CODE (exp) == BIT_AND_EXPR)
3395 {
3396 and_mask = TREE_OPERAND (exp, 1);
3397 exp = TREE_OPERAND (exp, 0);
3398 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3399 if (TREE_CODE (and_mask) != INTEGER_CST)
3400 return 0;
3401 }
3402
3403 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3404 punsignedp, pvolatilep, false);
3405 if ((inner == exp && and_mask == 0)
3406 || *pbitsize < 0 || offset != 0
3407 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3408 return 0;
3409
3410 /* If the number of bits in the reference is the same as the bitsize of
3411 the outer type, then the outer type gives the signedness. Otherwise
3412 (in case of a small bitfield) the signedness is unchanged. */
3413 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3414 *punsignedp = TYPE_UNSIGNED (outer_type);
3415
3416 /* Compute the mask to access the bitfield. */
3417 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3418 precision = TYPE_PRECISION (unsigned_type);
3419
3420 mask = build_int_cst (unsigned_type, -1);
3421 mask = force_fit_type (mask, 0, false, false);
3422
3423 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3424 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3425
3426 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3427 if (and_mask != 0)
3428 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3429 fold_convert (unsigned_type, and_mask), mask);
3430
3431 *pmask = mask;
3432 *pand_mask = and_mask;
3433 return inner;
3434 }
3435
3436 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3437 bit positions. */
3438
3439 static int
3440 all_ones_mask_p (tree mask, int size)
3441 {
3442 tree type = TREE_TYPE (mask);
3443 unsigned int precision = TYPE_PRECISION (type);
3444 tree tmask;
3445
3446 tmask = build_int_cst (lang_hooks.types.signed_type (type), -1);
3447 tmask = force_fit_type (tmask, 0, false, false);
3448
3449 return
3450 tree_int_cst_equal (mask,
3451 const_binop (RSHIFT_EXPR,
3452 const_binop (LSHIFT_EXPR, tmask,
3453 size_int (precision - size),
3454 0),
3455 size_int (precision - size), 0));
3456 }
3457
3458 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3459 represents the sign bit of EXP's type. If EXP represents a sign
3460 or zero extension, also test VAL against the unextended type.
3461 The return value is the (sub)expression whose sign bit is VAL,
3462 or NULL_TREE otherwise. */
3463
3464 static tree
3465 sign_bit_p (tree exp, tree val)
3466 {
3467 unsigned HOST_WIDE_INT mask_lo, lo;
3468 HOST_WIDE_INT mask_hi, hi;
3469 int width;
3470 tree t;
3471
3472 /* Tree EXP must have an integral type. */
3473 t = TREE_TYPE (exp);
3474 if (! INTEGRAL_TYPE_P (t))
3475 return NULL_TREE;
3476
3477 /* Tree VAL must be an integer constant. */
3478 if (TREE_CODE (val) != INTEGER_CST
3479 || TREE_CONSTANT_OVERFLOW (val))
3480 return NULL_TREE;
3481
3482 width = TYPE_PRECISION (t);
3483 if (width > HOST_BITS_PER_WIDE_INT)
3484 {
3485 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3486 lo = 0;
3487
3488 mask_hi = ((unsigned HOST_WIDE_INT) -1
3489 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3490 mask_lo = -1;
3491 }
3492 else
3493 {
3494 hi = 0;
3495 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3496
3497 mask_hi = 0;
3498 mask_lo = ((unsigned HOST_WIDE_INT) -1
3499 >> (HOST_BITS_PER_WIDE_INT - width));
3500 }
3501
3502 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3503 treat VAL as if it were unsigned. */
3504 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3505 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3506 return exp;
3507
3508 /* Handle extension from a narrower type. */
3509 if (TREE_CODE (exp) == NOP_EXPR
3510 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3511 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3512
3513 return NULL_TREE;
3514 }
3515
3516 /* Subroutine for fold_truthop: determine if an operand is simple enough
3517 to be evaluated unconditionally. */
3518
3519 static int
3520 simple_operand_p (tree exp)
3521 {
3522 /* Strip any conversions that don't change the machine mode. */
3523 STRIP_NOPS (exp);
3524
3525 return (CONSTANT_CLASS_P (exp)
3526 || TREE_CODE (exp) == SSA_NAME
3527 || (DECL_P (exp)
3528 && ! TREE_ADDRESSABLE (exp)
3529 && ! TREE_THIS_VOLATILE (exp)
3530 && ! DECL_NONLOCAL (exp)
3531 /* Don't regard global variables as simple. They may be
3532 allocated in ways unknown to the compiler (shared memory,
3533 #pragma weak, etc). */
3534 && ! TREE_PUBLIC (exp)
3535 && ! DECL_EXTERNAL (exp)
3536 /* Loading a static variable is unduly expensive, but global
3537 registers aren't expensive. */
3538 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3539 }
3540 \f
3541 /* The following functions are subroutines to fold_range_test and allow it to
3542 try to change a logical combination of comparisons into a range test.
3543
3544 For example, both
3545 X == 2 || X == 3 || X == 4 || X == 5
3546 and
3547 X >= 2 && X <= 5
3548 are converted to
3549 (unsigned) (X - 2) <= 3
3550
3551 We describe each set of comparisons as being either inside or outside
3552 a range, using a variable named like IN_P, and then describe the
3553 range with a lower and upper bound. If one of the bounds is omitted,
3554 it represents either the highest or lowest value of the type.
3555
3556 In the comments below, we represent a range by two numbers in brackets
3557 preceded by a "+" to designate being inside that range, or a "-" to
3558 designate being outside that range, so the condition can be inverted by
3559 flipping the prefix. An omitted bound is represented by a "-". For
3560 example, "- [-, 10]" means being outside the range starting at the lowest
3561 possible value and ending at 10, in other words, being greater than 10.
3562 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3563 always false.
3564
3565 We set up things so that the missing bounds are handled in a consistent
3566 manner so neither a missing bound nor "true" and "false" need to be
3567 handled using a special case. */
3568
3569 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3570 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3571 and UPPER1_P are nonzero if the respective argument is an upper bound
3572 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3573 must be specified for a comparison. ARG1 will be converted to ARG0's
3574 type if both are specified. */
3575
3576 static tree
3577 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3578 tree arg1, int upper1_p)
3579 {
3580 tree tem;
3581 int result;
3582 int sgn0, sgn1;
3583
3584 /* If neither arg represents infinity, do the normal operation.
3585 Else, if not a comparison, return infinity. Else handle the special
3586 comparison rules. Note that most of the cases below won't occur, but
3587 are handled for consistency. */
3588
3589 if (arg0 != 0 && arg1 != 0)
3590 {
3591 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3592 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3593 STRIP_NOPS (tem);
3594 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3595 }
3596
3597 if (TREE_CODE_CLASS (code) != tcc_comparison)
3598 return 0;
3599
3600 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3601 for neither. In real maths, we cannot assume open ended ranges are
3602 the same. But, this is computer arithmetic, where numbers are finite.
3603 We can therefore make the transformation of any unbounded range with
3604 the value Z, Z being greater than any representable number. This permits
3605 us to treat unbounded ranges as equal. */
3606 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3607 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3608 switch (code)
3609 {
3610 case EQ_EXPR:
3611 result = sgn0 == sgn1;
3612 break;
3613 case NE_EXPR:
3614 result = sgn0 != sgn1;
3615 break;
3616 case LT_EXPR:
3617 result = sgn0 < sgn1;
3618 break;
3619 case LE_EXPR:
3620 result = sgn0 <= sgn1;
3621 break;
3622 case GT_EXPR:
3623 result = sgn0 > sgn1;
3624 break;
3625 case GE_EXPR:
3626 result = sgn0 >= sgn1;
3627 break;
3628 default:
3629 gcc_unreachable ();
3630 }
3631
3632 return constant_boolean_node (result, type);
3633 }
3634 \f
3635 /* Given EXP, a logical expression, set the range it is testing into
3636 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3637 actually being tested. *PLOW and *PHIGH will be made of the same type
3638 as the returned expression. If EXP is not a comparison, we will most
3639 likely not be returning a useful value and range. */
3640
3641 static tree
3642 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3643 {
3644 enum tree_code code;
3645 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3646 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3647 int in_p, n_in_p;
3648 tree low, high, n_low, n_high;
3649
3650 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3651 and see if we can refine the range. Some of the cases below may not
3652 happen, but it doesn't seem worth worrying about this. We "continue"
3653 the outer loop when we've changed something; otherwise we "break"
3654 the switch, which will "break" the while. */
3655
3656 in_p = 0;
3657 low = high = fold_convert (TREE_TYPE (exp), integer_zero_node);
3658
3659 while (1)
3660 {
3661 code = TREE_CODE (exp);
3662 exp_type = TREE_TYPE (exp);
3663
3664 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3665 {
3666 if (TREE_CODE_LENGTH (code) > 0)
3667 arg0 = TREE_OPERAND (exp, 0);
3668 if (TREE_CODE_CLASS (code) == tcc_comparison
3669 || TREE_CODE_CLASS (code) == tcc_unary
3670 || TREE_CODE_CLASS (code) == tcc_binary)
3671 arg0_type = TREE_TYPE (arg0);
3672 if (TREE_CODE_CLASS (code) == tcc_binary
3673 || TREE_CODE_CLASS (code) == tcc_comparison
3674 || (TREE_CODE_CLASS (code) == tcc_expression
3675 && TREE_CODE_LENGTH (code) > 1))
3676 arg1 = TREE_OPERAND (exp, 1);
3677 }
3678
3679 switch (code)
3680 {
3681 case TRUTH_NOT_EXPR:
3682 in_p = ! in_p, exp = arg0;
3683 continue;
3684
3685 case EQ_EXPR: case NE_EXPR:
3686 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3687 /* We can only do something if the range is testing for zero
3688 and if the second operand is an integer constant. Note that
3689 saying something is "in" the range we make is done by
3690 complementing IN_P since it will set in the initial case of
3691 being not equal to zero; "out" is leaving it alone. */
3692 if (low == 0 || high == 0
3693 || ! integer_zerop (low) || ! integer_zerop (high)
3694 || TREE_CODE (arg1) != INTEGER_CST)
3695 break;
3696
3697 switch (code)
3698 {
3699 case NE_EXPR: /* - [c, c] */
3700 low = high = arg1;
3701 break;
3702 case EQ_EXPR: /* + [c, c] */
3703 in_p = ! in_p, low = high = arg1;
3704 break;
3705 case GT_EXPR: /* - [-, c] */
3706 low = 0, high = arg1;
3707 break;
3708 case GE_EXPR: /* + [c, -] */
3709 in_p = ! in_p, low = arg1, high = 0;
3710 break;
3711 case LT_EXPR: /* - [c, -] */
3712 low = arg1, high = 0;
3713 break;
3714 case LE_EXPR: /* + [-, c] */
3715 in_p = ! in_p, low = 0, high = arg1;
3716 break;
3717 default:
3718 gcc_unreachable ();
3719 }
3720
3721 /* If this is an unsigned comparison, we also know that EXP is
3722 greater than or equal to zero. We base the range tests we make
3723 on that fact, so we record it here so we can parse existing
3724 range tests. We test arg0_type since often the return type
3725 of, e.g. EQ_EXPR, is boolean. */
3726 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3727 {
3728 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3729 in_p, low, high, 1,
3730 fold_convert (arg0_type, integer_zero_node),
3731 NULL_TREE))
3732 break;
3733
3734 in_p = n_in_p, low = n_low, high = n_high;
3735
3736 /* If the high bound is missing, but we have a nonzero low
3737 bound, reverse the range so it goes from zero to the low bound
3738 minus 1. */
3739 if (high == 0 && low && ! integer_zerop (low))
3740 {
3741 in_p = ! in_p;
3742 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3743 integer_one_node, 0);
3744 low = fold_convert (arg0_type, integer_zero_node);
3745 }
3746 }
3747
3748 exp = arg0;
3749 continue;
3750
3751 case NEGATE_EXPR:
3752 /* (-x) IN [a,b] -> x in [-b, -a] */
3753 n_low = range_binop (MINUS_EXPR, exp_type,
3754 fold_convert (exp_type, integer_zero_node),
3755 0, high, 1);
3756 n_high = range_binop (MINUS_EXPR, exp_type,
3757 fold_convert (exp_type, integer_zero_node),
3758 0, low, 0);
3759 low = n_low, high = n_high;
3760 exp = arg0;
3761 continue;
3762
3763 case BIT_NOT_EXPR:
3764 /* ~ X -> -X - 1 */
3765 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
3766 fold_convert (exp_type, integer_one_node));
3767 continue;
3768
3769 case PLUS_EXPR: case MINUS_EXPR:
3770 if (TREE_CODE (arg1) != INTEGER_CST)
3771 break;
3772
3773 /* If EXP is signed, any overflow in the computation is undefined,
3774 so we don't worry about it so long as our computations on
3775 the bounds don't overflow. For unsigned, overflow is defined
3776 and this is exactly the right thing. */
3777 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3778 arg0_type, low, 0, arg1, 0);
3779 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3780 arg0_type, high, 1, arg1, 0);
3781 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3782 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3783 break;
3784
3785 /* Check for an unsigned range which has wrapped around the maximum
3786 value thus making n_high < n_low, and normalize it. */
3787 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3788 {
3789 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3790 integer_one_node, 0);
3791 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3792 integer_one_node, 0);
3793
3794 /* If the range is of the form +/- [ x+1, x ], we won't
3795 be able to normalize it. But then, it represents the
3796 whole range or the empty set, so make it
3797 +/- [ -, - ]. */
3798 if (tree_int_cst_equal (n_low, low)
3799 && tree_int_cst_equal (n_high, high))
3800 low = high = 0;
3801 else
3802 in_p = ! in_p;
3803 }
3804 else
3805 low = n_low, high = n_high;
3806
3807 exp = arg0;
3808 continue;
3809
3810 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3811 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3812 break;
3813
3814 if (! INTEGRAL_TYPE_P (arg0_type)
3815 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3816 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3817 break;
3818
3819 n_low = low, n_high = high;
3820
3821 if (n_low != 0)
3822 n_low = fold_convert (arg0_type, n_low);
3823
3824 if (n_high != 0)
3825 n_high = fold_convert (arg0_type, n_high);
3826
3827
3828 /* If we're converting arg0 from an unsigned type, to exp,
3829 a signed type, we will be doing the comparison as unsigned.
3830 The tests above have already verified that LOW and HIGH
3831 are both positive.
3832
3833 So we have to ensure that we will handle large unsigned
3834 values the same way that the current signed bounds treat
3835 negative values. */
3836
3837 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
3838 {
3839 tree high_positive;
3840 tree equiv_type = lang_hooks.types.type_for_mode
3841 (TYPE_MODE (arg0_type), 1);
3842
3843 /* A range without an upper bound is, naturally, unbounded.
3844 Since convert would have cropped a very large value, use
3845 the max value for the destination type. */
3846 high_positive
3847 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3848 : TYPE_MAX_VALUE (arg0_type);
3849
3850 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
3851 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
3852 fold_convert (arg0_type,
3853 high_positive),
3854 fold_convert (arg0_type,
3855 integer_one_node));
3856
3857 /* If the low bound is specified, "and" the range with the
3858 range for which the original unsigned value will be
3859 positive. */
3860 if (low != 0)
3861 {
3862 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3863 1, n_low, n_high, 1,
3864 fold_convert (arg0_type,
3865 integer_zero_node),
3866 high_positive))
3867 break;
3868
3869 in_p = (n_in_p == in_p);
3870 }
3871 else
3872 {
3873 /* Otherwise, "or" the range with the range of the input
3874 that will be interpreted as negative. */
3875 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3876 0, n_low, n_high, 1,
3877 fold_convert (arg0_type,
3878 integer_zero_node),
3879 high_positive))
3880 break;
3881
3882 in_p = (in_p != n_in_p);
3883 }
3884 }
3885
3886 exp = arg0;
3887 low = n_low, high = n_high;
3888 continue;
3889
3890 default:
3891 break;
3892 }
3893
3894 break;
3895 }
3896
3897 /* If EXP is a constant, we can evaluate whether this is true or false. */
3898 if (TREE_CODE (exp) == INTEGER_CST)
3899 {
3900 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3901 exp, 0, low, 0))
3902 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3903 exp, 1, high, 1)));
3904 low = high = 0;
3905 exp = 0;
3906 }
3907
3908 *pin_p = in_p, *plow = low, *phigh = high;
3909 return exp;
3910 }
3911 \f
3912 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3913 type, TYPE, return an expression to test if EXP is in (or out of, depending
3914 on IN_P) the range. Return 0 if the test couldn't be created. */
3915
3916 static tree
3917 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
3918 {
3919 tree etype = TREE_TYPE (exp);
3920 tree value;
3921
3922 if (! in_p)
3923 {
3924 value = build_range_check (type, exp, 1, low, high);
3925 if (value != 0)
3926 return invert_truthvalue (value);
3927
3928 return 0;
3929 }
3930
3931 if (low == 0 && high == 0)
3932 return fold_convert (type, integer_one_node);
3933
3934 if (low == 0)
3935 return fold_build2 (LE_EXPR, type, exp, high);
3936
3937 if (high == 0)
3938 return fold_build2 (GE_EXPR, type, exp, low);
3939
3940 if (operand_equal_p (low, high, 0))
3941 return fold_build2 (EQ_EXPR, type, exp, low);
3942
3943 if (integer_zerop (low))
3944 {
3945 if (! TYPE_UNSIGNED (etype))
3946 {
3947 etype = lang_hooks.types.unsigned_type (etype);
3948 high = fold_convert (etype, high);
3949 exp = fold_convert (etype, exp);
3950 }
3951 return build_range_check (type, exp, 1, 0, high);
3952 }
3953
3954 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3955 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
3956 {
3957 unsigned HOST_WIDE_INT lo;
3958 HOST_WIDE_INT hi;
3959 int prec;
3960
3961 prec = TYPE_PRECISION (etype);
3962 if (prec <= HOST_BITS_PER_WIDE_INT)
3963 {
3964 hi = 0;
3965 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
3966 }
3967 else
3968 {
3969 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
3970 lo = (unsigned HOST_WIDE_INT) -1;
3971 }
3972
3973 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
3974 {
3975 if (TYPE_UNSIGNED (etype))
3976 {
3977 etype = lang_hooks.types.signed_type (etype);
3978 exp = fold_convert (etype, exp);
3979 }
3980 return fold_build2 (GT_EXPR, type, exp,
3981 fold_convert (etype, integer_zero_node));
3982 }
3983 }
3984
3985 value = const_binop (MINUS_EXPR, high, low, 0);
3986 if (value != 0 && TREE_OVERFLOW (value) && ! TYPE_UNSIGNED (etype))
3987 {
3988 tree utype, minv, maxv;
3989
3990 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
3991 for the type in question, as we rely on this here. */
3992 switch (TREE_CODE (etype))
3993 {
3994 case INTEGER_TYPE:
3995 case ENUMERAL_TYPE:
3996 case CHAR_TYPE:
3997 utype = lang_hooks.types.unsigned_type (etype);
3998 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
3999 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4000 integer_one_node, 1);
4001 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4002 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4003 minv, 1, maxv, 1)))
4004 {
4005 etype = utype;
4006 high = fold_convert (etype, high);
4007 low = fold_convert (etype, low);
4008 exp = fold_convert (etype, exp);
4009 value = const_binop (MINUS_EXPR, high, low, 0);
4010 }
4011 break;
4012 default:
4013 break;
4014 }
4015 }
4016
4017 if (value != 0 && ! TREE_OVERFLOW (value))
4018 return build_range_check (type,
4019 fold_build2 (MINUS_EXPR, etype, exp, low),
4020 1, fold_convert (etype, integer_zero_node),
4021 value);
4022
4023 return 0;
4024 }
4025 \f
4026 /* Given two ranges, see if we can merge them into one. Return 1 if we
4027 can, 0 if we can't. Set the output range into the specified parameters. */
4028
4029 static int
4030 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4031 tree high0, int in1_p, tree low1, tree high1)
4032 {
4033 int no_overlap;
4034 int subset;
4035 int temp;
4036 tree tem;
4037 int in_p;
4038 tree low, high;
4039 int lowequal = ((low0 == 0 && low1 == 0)
4040 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4041 low0, 0, low1, 0)));
4042 int highequal = ((high0 == 0 && high1 == 0)
4043 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4044 high0, 1, high1, 1)));
4045
4046 /* Make range 0 be the range that starts first, or ends last if they
4047 start at the same value. Swap them if it isn't. */
4048 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4049 low0, 0, low1, 0))
4050 || (lowequal
4051 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4052 high1, 1, high0, 1))))
4053 {
4054 temp = in0_p, in0_p = in1_p, in1_p = temp;
4055 tem = low0, low0 = low1, low1 = tem;
4056 tem = high0, high0 = high1, high1 = tem;
4057 }
4058
4059 /* Now flag two cases, whether the ranges are disjoint or whether the
4060 second range is totally subsumed in the first. Note that the tests
4061 below are simplified by the ones above. */
4062 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4063 high0, 1, low1, 0));
4064 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4065 high1, 1, high0, 1));
4066
4067 /* We now have four cases, depending on whether we are including or
4068 excluding the two ranges. */
4069 if (in0_p && in1_p)
4070 {
4071 /* If they don't overlap, the result is false. If the second range
4072 is a subset it is the result. Otherwise, the range is from the start
4073 of the second to the end of the first. */
4074 if (no_overlap)
4075 in_p = 0, low = high = 0;
4076 else if (subset)
4077 in_p = 1, low = low1, high = high1;
4078 else
4079 in_p = 1, low = low1, high = high0;
4080 }
4081
4082 else if (in0_p && ! in1_p)
4083 {
4084 /* If they don't overlap, the result is the first range. If they are
4085 equal, the result is false. If the second range is a subset of the
4086 first, and the ranges begin at the same place, we go from just after
4087 the end of the first range to the end of the second. If the second
4088 range is not a subset of the first, or if it is a subset and both
4089 ranges end at the same place, the range starts at the start of the
4090 first range and ends just before the second range.
4091 Otherwise, we can't describe this as a single range. */
4092 if (no_overlap)
4093 in_p = 1, low = low0, high = high0;
4094 else if (lowequal && highequal)
4095 in_p = 0, low = high = 0;
4096 else if (subset && lowequal)
4097 {
4098 in_p = 1, high = high0;
4099 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
4100 integer_one_node, 0);
4101 }
4102 else if (! subset || highequal)
4103 {
4104 in_p = 1, low = low0;
4105 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4106 integer_one_node, 0);
4107 }
4108 else
4109 return 0;
4110 }
4111
4112 else if (! in0_p && in1_p)
4113 {
4114 /* If they don't overlap, the result is the second range. If the second
4115 is a subset of the first, the result is false. Otherwise,
4116 the range starts just after the first range and ends at the
4117 end of the second. */
4118 if (no_overlap)
4119 in_p = 1, low = low1, high = high1;
4120 else if (subset || highequal)
4121 in_p = 0, low = high = 0;
4122 else
4123 {
4124 in_p = 1, high = high1;
4125 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4126 integer_one_node, 0);
4127 }
4128 }
4129
4130 else
4131 {
4132 /* The case where we are excluding both ranges. Here the complex case
4133 is if they don't overlap. In that case, the only time we have a
4134 range is if they are adjacent. If the second is a subset of the
4135 first, the result is the first. Otherwise, the range to exclude
4136 starts at the beginning of the first range and ends at the end of the
4137 second. */
4138 if (no_overlap)
4139 {
4140 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4141 range_binop (PLUS_EXPR, NULL_TREE,
4142 high0, 1,
4143 integer_one_node, 1),
4144 1, low1, 0)))
4145 in_p = 0, low = low0, high = high1;
4146 else
4147 {
4148 /* Canonicalize - [min, x] into - [-, x]. */
4149 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4150 switch (TREE_CODE (TREE_TYPE (low0)))
4151 {
4152 case ENUMERAL_TYPE:
4153 if (TYPE_PRECISION (TREE_TYPE (low0))
4154 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4155 break;
4156 /* FALLTHROUGH */
4157 case INTEGER_TYPE:
4158 case CHAR_TYPE:
4159 if (tree_int_cst_equal (low0,
4160 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4161 low0 = 0;
4162 break;
4163 case POINTER_TYPE:
4164 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4165 && integer_zerop (low0))
4166 low0 = 0;
4167 break;
4168 default:
4169 break;
4170 }
4171
4172 /* Canonicalize - [x, max] into - [x, -]. */
4173 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4174 switch (TREE_CODE (TREE_TYPE (high1)))
4175 {
4176 case ENUMERAL_TYPE:
4177 if (TYPE_PRECISION (TREE_TYPE (high1))
4178 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4179 break;
4180 /* FALLTHROUGH */
4181 case INTEGER_TYPE:
4182 case CHAR_TYPE:
4183 if (tree_int_cst_equal (high1,
4184 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4185 high1 = 0;
4186 break;
4187 case POINTER_TYPE:
4188 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4189 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4190 high1, 1,
4191 integer_one_node, 1)))
4192 high1 = 0;
4193 break;
4194 default:
4195 break;
4196 }
4197
4198 /* The ranges might be also adjacent between the maximum and
4199 minimum values of the given type. For
4200 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4201 return + [x + 1, y - 1]. */
4202 if (low0 == 0 && high1 == 0)
4203 {
4204 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4205 integer_one_node, 1);
4206 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4207 integer_one_node, 0);
4208 if (low == 0 || high == 0)
4209 return 0;
4210
4211 in_p = 1;
4212 }
4213 else
4214 return 0;
4215 }
4216 }
4217 else if (subset)
4218 in_p = 0, low = low0, high = high0;
4219 else
4220 in_p = 0, low = low0, high = high1;
4221 }
4222
4223 *pin_p = in_p, *plow = low, *phigh = high;
4224 return 1;
4225 }
4226 \f
4227
4228 /* Subroutine of fold, looking inside expressions of the form
4229 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4230 of the COND_EXPR. This function is being used also to optimize
4231 A op B ? C : A, by reversing the comparison first.
4232
4233 Return a folded expression whose code is not a COND_EXPR
4234 anymore, or NULL_TREE if no folding opportunity is found. */
4235
4236 static tree
4237 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4238 {
4239 enum tree_code comp_code = TREE_CODE (arg0);
4240 tree arg00 = TREE_OPERAND (arg0, 0);
4241 tree arg01 = TREE_OPERAND (arg0, 1);
4242 tree arg1_type = TREE_TYPE (arg1);
4243 tree tem;
4244
4245 STRIP_NOPS (arg1);
4246 STRIP_NOPS (arg2);
4247
4248 /* If we have A op 0 ? A : -A, consider applying the following
4249 transformations:
4250
4251 A == 0? A : -A same as -A
4252 A != 0? A : -A same as A
4253 A >= 0? A : -A same as abs (A)
4254 A > 0? A : -A same as abs (A)
4255 A <= 0? A : -A same as -abs (A)
4256 A < 0? A : -A same as -abs (A)
4257
4258 None of these transformations work for modes with signed
4259 zeros. If A is +/-0, the first two transformations will
4260 change the sign of the result (from +0 to -0, or vice
4261 versa). The last four will fix the sign of the result,
4262 even though the original expressions could be positive or
4263 negative, depending on the sign of A.
4264
4265 Note that all these transformations are correct if A is
4266 NaN, since the two alternatives (A and -A) are also NaNs. */
4267 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4268 ? real_zerop (arg01)
4269 : integer_zerop (arg01))
4270 && ((TREE_CODE (arg2) == NEGATE_EXPR
4271 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4272 /* In the case that A is of the form X-Y, '-A' (arg2) may
4273 have already been folded to Y-X, check for that. */
4274 || (TREE_CODE (arg1) == MINUS_EXPR
4275 && TREE_CODE (arg2) == MINUS_EXPR
4276 && operand_equal_p (TREE_OPERAND (arg1, 0),
4277 TREE_OPERAND (arg2, 1), 0)
4278 && operand_equal_p (TREE_OPERAND (arg1, 1),
4279 TREE_OPERAND (arg2, 0), 0))))
4280 switch (comp_code)
4281 {
4282 case EQ_EXPR:
4283 case UNEQ_EXPR:
4284 tem = fold_convert (arg1_type, arg1);
4285 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4286 case NE_EXPR:
4287 case LTGT_EXPR:
4288 return pedantic_non_lvalue (fold_convert (type, arg1));
4289 case UNGE_EXPR:
4290 case UNGT_EXPR:
4291 if (flag_trapping_math)
4292 break;
4293 /* Fall through. */
4294 case GE_EXPR:
4295 case GT_EXPR:
4296 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4297 arg1 = fold_convert (lang_hooks.types.signed_type
4298 (TREE_TYPE (arg1)), arg1);
4299 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4300 return pedantic_non_lvalue (fold_convert (type, tem));
4301 case UNLE_EXPR:
4302 case UNLT_EXPR:
4303 if (flag_trapping_math)
4304 break;
4305 case LE_EXPR:
4306 case LT_EXPR:
4307 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4308 arg1 = fold_convert (lang_hooks.types.signed_type
4309 (TREE_TYPE (arg1)), arg1);
4310 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4311 return negate_expr (fold_convert (type, tem));
4312 default:
4313 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4314 break;
4315 }
4316
4317 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4318 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4319 both transformations are correct when A is NaN: A != 0
4320 is then true, and A == 0 is false. */
4321
4322 if (integer_zerop (arg01) && integer_zerop (arg2))
4323 {
4324 if (comp_code == NE_EXPR)
4325 return pedantic_non_lvalue (fold_convert (type, arg1));
4326 else if (comp_code == EQ_EXPR)
4327 return fold_convert (type, integer_zero_node);
4328 }
4329
4330 /* Try some transformations of A op B ? A : B.
4331
4332 A == B? A : B same as B
4333 A != B? A : B same as A
4334 A >= B? A : B same as max (A, B)
4335 A > B? A : B same as max (B, A)
4336 A <= B? A : B same as min (A, B)
4337 A < B? A : B same as min (B, A)
4338
4339 As above, these transformations don't work in the presence
4340 of signed zeros. For example, if A and B are zeros of
4341 opposite sign, the first two transformations will change
4342 the sign of the result. In the last four, the original
4343 expressions give different results for (A=+0, B=-0) and
4344 (A=-0, B=+0), but the transformed expressions do not.
4345
4346 The first two transformations are correct if either A or B
4347 is a NaN. In the first transformation, the condition will
4348 be false, and B will indeed be chosen. In the case of the
4349 second transformation, the condition A != B will be true,
4350 and A will be chosen.
4351
4352 The conversions to max() and min() are not correct if B is
4353 a number and A is not. The conditions in the original
4354 expressions will be false, so all four give B. The min()
4355 and max() versions would give a NaN instead. */
4356 if (operand_equal_for_comparison_p (arg01, arg2, arg00)
4357 /* Avoid these transformations if the COND_EXPR may be used
4358 as an lvalue in the C++ front-end. PR c++/19199. */
4359 && (in_gimple_form
4360 || strcmp (lang_hooks.name, "GNU C++") != 0
4361 || ! maybe_lvalue_p (arg1)
4362 || ! maybe_lvalue_p (arg2)))
4363 {
4364 tree comp_op0 = arg00;
4365 tree comp_op1 = arg01;
4366 tree comp_type = TREE_TYPE (comp_op0);
4367
4368 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4369 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4370 {
4371 comp_type = type;
4372 comp_op0 = arg1;
4373 comp_op1 = arg2;
4374 }
4375
4376 switch (comp_code)
4377 {
4378 case EQ_EXPR:
4379 return pedantic_non_lvalue (fold_convert (type, arg2));
4380 case NE_EXPR:
4381 return pedantic_non_lvalue (fold_convert (type, arg1));
4382 case LE_EXPR:
4383 case LT_EXPR:
4384 case UNLE_EXPR:
4385 case UNLT_EXPR:
4386 /* In C++ a ?: expression can be an lvalue, so put the
4387 operand which will be used if they are equal first
4388 so that we can convert this back to the
4389 corresponding COND_EXPR. */
4390 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4391 {
4392 comp_op0 = fold_convert (comp_type, comp_op0);
4393 comp_op1 = fold_convert (comp_type, comp_op1);
4394 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4395 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4396 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4397 return pedantic_non_lvalue (fold_convert (type, tem));
4398 }
4399 break;
4400 case GE_EXPR:
4401 case GT_EXPR:
4402 case UNGE_EXPR:
4403 case UNGT_EXPR:
4404 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4405 {
4406 comp_op0 = fold_convert (comp_type, comp_op0);
4407 comp_op1 = fold_convert (comp_type, comp_op1);
4408 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4409 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
4410 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
4411 return pedantic_non_lvalue (fold_convert (type, tem));
4412 }
4413 break;
4414 case UNEQ_EXPR:
4415 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4416 return pedantic_non_lvalue (fold_convert (type, arg2));
4417 break;
4418 case LTGT_EXPR:
4419 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4420 return pedantic_non_lvalue (fold_convert (type, arg1));
4421 break;
4422 default:
4423 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4424 break;
4425 }
4426 }
4427
4428 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4429 we might still be able to simplify this. For example,
4430 if C1 is one less or one more than C2, this might have started
4431 out as a MIN or MAX and been transformed by this function.
4432 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4433
4434 if (INTEGRAL_TYPE_P (type)
4435 && TREE_CODE (arg01) == INTEGER_CST
4436 && TREE_CODE (arg2) == INTEGER_CST)
4437 switch (comp_code)
4438 {
4439 case EQ_EXPR:
4440 /* We can replace A with C1 in this case. */
4441 arg1 = fold_convert (type, arg01);
4442 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
4443
4444 case LT_EXPR:
4445 /* If C1 is C2 + 1, this is min(A, C2). */
4446 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4447 OEP_ONLY_CONST)
4448 && operand_equal_p (arg01,
4449 const_binop (PLUS_EXPR, arg2,
4450 integer_one_node, 0),
4451 OEP_ONLY_CONST))
4452 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4453 type, arg1, arg2));
4454 break;
4455
4456 case LE_EXPR:
4457 /* If C1 is C2 - 1, this is min(A, C2). */
4458 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4459 OEP_ONLY_CONST)
4460 && operand_equal_p (arg01,
4461 const_binop (MINUS_EXPR, arg2,
4462 integer_one_node, 0),
4463 OEP_ONLY_CONST))
4464 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4465 type, arg1, arg2));
4466 break;
4467
4468 case GT_EXPR:
4469 /* If C1 is C2 - 1, this is max(A, C2). */
4470 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4471 OEP_ONLY_CONST)
4472 && operand_equal_p (arg01,
4473 const_binop (MINUS_EXPR, arg2,
4474 integer_one_node, 0),
4475 OEP_ONLY_CONST))
4476 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4477 type, arg1, arg2));
4478 break;
4479
4480 case GE_EXPR:
4481 /* If C1 is C2 + 1, this is max(A, C2). */
4482 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4483 OEP_ONLY_CONST)
4484 && operand_equal_p (arg01,
4485 const_binop (PLUS_EXPR, arg2,
4486 integer_one_node, 0),
4487 OEP_ONLY_CONST))
4488 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4489 type, arg1, arg2));
4490 break;
4491 case NE_EXPR:
4492 break;
4493 default:
4494 gcc_unreachable ();
4495 }
4496
4497 return NULL_TREE;
4498 }
4499
4500
4501 \f
4502 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4503 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4504 #endif
4505
4506 /* EXP is some logical combination of boolean tests. See if we can
4507 merge it into some range test. Return the new tree if so. */
4508
4509 static tree
4510 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
4511 {
4512 int or_op = (code == TRUTH_ORIF_EXPR
4513 || code == TRUTH_OR_EXPR);
4514 int in0_p, in1_p, in_p;
4515 tree low0, low1, low, high0, high1, high;
4516 tree lhs = make_range (op0, &in0_p, &low0, &high0);
4517 tree rhs = make_range (op1, &in1_p, &low1, &high1);
4518 tree tem;
4519
4520 /* If this is an OR operation, invert both sides; we will invert
4521 again at the end. */
4522 if (or_op)
4523 in0_p = ! in0_p, in1_p = ! in1_p;
4524
4525 /* If both expressions are the same, if we can merge the ranges, and we
4526 can build the range test, return it or it inverted. If one of the
4527 ranges is always true or always false, consider it to be the same
4528 expression as the other. */
4529 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4530 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4531 in1_p, low1, high1)
4532 && 0 != (tem = (build_range_check (type,
4533 lhs != 0 ? lhs
4534 : rhs != 0 ? rhs : integer_zero_node,
4535 in_p, low, high))))
4536 return or_op ? invert_truthvalue (tem) : tem;
4537
4538 /* On machines where the branch cost is expensive, if this is a
4539 short-circuited branch and the underlying object on both sides
4540 is the same, make a non-short-circuit operation. */
4541 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4542 && lhs != 0 && rhs != 0
4543 && (code == TRUTH_ANDIF_EXPR
4544 || code == TRUTH_ORIF_EXPR)
4545 && operand_equal_p (lhs, rhs, 0))
4546 {
4547 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4548 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4549 which cases we can't do this. */
4550 if (simple_operand_p (lhs))
4551 return build2 (code == TRUTH_ANDIF_EXPR
4552 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4553 type, op0, op1);
4554
4555 else if (lang_hooks.decls.global_bindings_p () == 0
4556 && ! CONTAINS_PLACEHOLDER_P (lhs))
4557 {
4558 tree common = save_expr (lhs);
4559
4560 if (0 != (lhs = build_range_check (type, common,
4561 or_op ? ! in0_p : in0_p,
4562 low0, high0))
4563 && (0 != (rhs = build_range_check (type, common,
4564 or_op ? ! in1_p : in1_p,
4565 low1, high1))))
4566 return build2 (code == TRUTH_ANDIF_EXPR
4567 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4568 type, lhs, rhs);
4569 }
4570 }
4571
4572 return 0;
4573 }
4574 \f
4575 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4576 bit value. Arrange things so the extra bits will be set to zero if and
4577 only if C is signed-extended to its full width. If MASK is nonzero,
4578 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4579
4580 static tree
4581 unextend (tree c, int p, int unsignedp, tree mask)
4582 {
4583 tree type = TREE_TYPE (c);
4584 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4585 tree temp;
4586
4587 if (p == modesize || unsignedp)
4588 return c;
4589
4590 /* We work by getting just the sign bit into the low-order bit, then
4591 into the high-order bit, then sign-extend. We then XOR that value
4592 with C. */
4593 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4594 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4595
4596 /* We must use a signed type in order to get an arithmetic right shift.
4597 However, we must also avoid introducing accidental overflows, so that
4598 a subsequent call to integer_zerop will work. Hence we must
4599 do the type conversion here. At this point, the constant is either
4600 zero or one, and the conversion to a signed type can never overflow.
4601 We could get an overflow if this conversion is done anywhere else. */
4602 if (TYPE_UNSIGNED (type))
4603 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4604
4605 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4606 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4607 if (mask != 0)
4608 temp = const_binop (BIT_AND_EXPR, temp,
4609 fold_convert (TREE_TYPE (c), mask), 0);
4610 /* If necessary, convert the type back to match the type of C. */
4611 if (TYPE_UNSIGNED (type))
4612 temp = fold_convert (type, temp);
4613
4614 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4615 }
4616 \f
4617 /* Find ways of folding logical expressions of LHS and RHS:
4618 Try to merge two comparisons to the same innermost item.
4619 Look for range tests like "ch >= '0' && ch <= '9'".
4620 Look for combinations of simple terms on machines with expensive branches
4621 and evaluate the RHS unconditionally.
4622
4623 For example, if we have p->a == 2 && p->b == 4 and we can make an
4624 object large enough to span both A and B, we can do this with a comparison
4625 against the object ANDed with the a mask.
4626
4627 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4628 operations to do this with one comparison.
4629
4630 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4631 function and the one above.
4632
4633 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4634 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4635
4636 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4637 two operands.
4638
4639 We return the simplified tree or 0 if no optimization is possible. */
4640
4641 static tree
4642 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
4643 {
4644 /* If this is the "or" of two comparisons, we can do something if
4645 the comparisons are NE_EXPR. If this is the "and", we can do something
4646 if the comparisons are EQ_EXPR. I.e.,
4647 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4648
4649 WANTED_CODE is this operation code. For single bit fields, we can
4650 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4651 comparison for one-bit fields. */
4652
4653 enum tree_code wanted_code;
4654 enum tree_code lcode, rcode;
4655 tree ll_arg, lr_arg, rl_arg, rr_arg;
4656 tree ll_inner, lr_inner, rl_inner, rr_inner;
4657 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
4658 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
4659 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
4660 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
4661 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
4662 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
4663 enum machine_mode lnmode, rnmode;
4664 tree ll_mask, lr_mask, rl_mask, rr_mask;
4665 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
4666 tree l_const, r_const;
4667 tree lntype, rntype, result;
4668 int first_bit, end_bit;
4669 int volatilep;
4670
4671 /* Start by getting the comparison codes. Fail if anything is volatile.
4672 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4673 it were surrounded with a NE_EXPR. */
4674
4675 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
4676 return 0;
4677
4678 lcode = TREE_CODE (lhs);
4679 rcode = TREE_CODE (rhs);
4680
4681 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
4682 {
4683 lhs = build2 (NE_EXPR, truth_type, lhs,
4684 fold_convert (TREE_TYPE (lhs), integer_zero_node));
4685 lcode = NE_EXPR;
4686 }
4687
4688 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
4689 {
4690 rhs = build2 (NE_EXPR, truth_type, rhs,
4691 fold_convert (TREE_TYPE (rhs), integer_zero_node));
4692 rcode = NE_EXPR;
4693 }
4694
4695 if (TREE_CODE_CLASS (lcode) != tcc_comparison
4696 || TREE_CODE_CLASS (rcode) != tcc_comparison)
4697 return 0;
4698
4699 ll_arg = TREE_OPERAND (lhs, 0);
4700 lr_arg = TREE_OPERAND (lhs, 1);
4701 rl_arg = TREE_OPERAND (rhs, 0);
4702 rr_arg = TREE_OPERAND (rhs, 1);
4703
4704 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4705 if (simple_operand_p (ll_arg)
4706 && simple_operand_p (lr_arg))
4707 {
4708 tree result;
4709 if (operand_equal_p (ll_arg, rl_arg, 0)
4710 && operand_equal_p (lr_arg, rr_arg, 0))
4711 {
4712 result = combine_comparisons (code, lcode, rcode,
4713 truth_type, ll_arg, lr_arg);
4714 if (result)
4715 return result;
4716 }
4717 else if (operand_equal_p (ll_arg, rr_arg, 0)
4718 && operand_equal_p (lr_arg, rl_arg, 0))
4719 {
4720 result = combine_comparisons (code, lcode,
4721 swap_tree_comparison (rcode),
4722 truth_type, ll_arg, lr_arg);
4723 if (result)
4724 return result;
4725 }
4726 }
4727
4728 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
4729 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
4730
4731 /* If the RHS can be evaluated unconditionally and its operands are
4732 simple, it wins to evaluate the RHS unconditionally on machines
4733 with expensive branches. In this case, this isn't a comparison
4734 that can be merged. Avoid doing this if the RHS is a floating-point
4735 comparison since those can trap. */
4736
4737 if (BRANCH_COST >= 2
4738 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
4739 && simple_operand_p (rl_arg)
4740 && simple_operand_p (rr_arg))
4741 {
4742 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4743 if (code == TRUTH_OR_EXPR
4744 && lcode == NE_EXPR && integer_zerop (lr_arg)
4745 && rcode == NE_EXPR && integer_zerop (rr_arg)
4746 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4747 return build2 (NE_EXPR, truth_type,
4748 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4749 ll_arg, rl_arg),
4750 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4751
4752 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4753 if (code == TRUTH_AND_EXPR
4754 && lcode == EQ_EXPR && integer_zerop (lr_arg)
4755 && rcode == EQ_EXPR && integer_zerop (rr_arg)
4756 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4757 return build2 (EQ_EXPR, truth_type,
4758 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4759 ll_arg, rl_arg),
4760 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4761
4762 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
4763 return build2 (code, truth_type, lhs, rhs);
4764 }
4765
4766 /* See if the comparisons can be merged. Then get all the parameters for
4767 each side. */
4768
4769 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
4770 || (rcode != EQ_EXPR && rcode != NE_EXPR))
4771 return 0;
4772
4773 volatilep = 0;
4774 ll_inner = decode_field_reference (ll_arg,
4775 &ll_bitsize, &ll_bitpos, &ll_mode,
4776 &ll_unsignedp, &volatilep, &ll_mask,
4777 &ll_and_mask);
4778 lr_inner = decode_field_reference (lr_arg,
4779 &lr_bitsize, &lr_bitpos, &lr_mode,
4780 &lr_unsignedp, &volatilep, &lr_mask,
4781 &lr_and_mask);
4782 rl_inner = decode_field_reference (rl_arg,
4783 &rl_bitsize, &rl_bitpos, &rl_mode,
4784 &rl_unsignedp, &volatilep, &rl_mask,
4785 &rl_and_mask);
4786 rr_inner = decode_field_reference (rr_arg,
4787 &rr_bitsize, &rr_bitpos, &rr_mode,
4788 &rr_unsignedp, &volatilep, &rr_mask,
4789 &rr_and_mask);
4790
4791 /* It must be true that the inner operation on the lhs of each
4792 comparison must be the same if we are to be able to do anything.
4793 Then see if we have constants. If not, the same must be true for
4794 the rhs's. */
4795 if (volatilep || ll_inner == 0 || rl_inner == 0
4796 || ! operand_equal_p (ll_inner, rl_inner, 0))
4797 return 0;
4798
4799 if (TREE_CODE (lr_arg) == INTEGER_CST
4800 && TREE_CODE (rr_arg) == INTEGER_CST)
4801 l_const = lr_arg, r_const = rr_arg;
4802 else if (lr_inner == 0 || rr_inner == 0
4803 || ! operand_equal_p (lr_inner, rr_inner, 0))
4804 return 0;
4805 else
4806 l_const = r_const = 0;
4807
4808 /* If either comparison code is not correct for our logical operation,
4809 fail. However, we can convert a one-bit comparison against zero into
4810 the opposite comparison against that bit being set in the field. */
4811
4812 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
4813 if (lcode != wanted_code)
4814 {
4815 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
4816 {
4817 /* Make the left operand unsigned, since we are only interested
4818 in the value of one bit. Otherwise we are doing the wrong
4819 thing below. */
4820 ll_unsignedp = 1;
4821 l_const = ll_mask;
4822 }
4823 else
4824 return 0;
4825 }
4826
4827 /* This is analogous to the code for l_const above. */
4828 if (rcode != wanted_code)
4829 {
4830 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
4831 {
4832 rl_unsignedp = 1;
4833 r_const = rl_mask;
4834 }
4835 else
4836 return 0;
4837 }
4838
4839 /* After this point all optimizations will generate bit-field
4840 references, which we might not want. */
4841 if (! lang_hooks.can_use_bit_fields_p ())
4842 return 0;
4843
4844 /* See if we can find a mode that contains both fields being compared on
4845 the left. If we can't, fail. Otherwise, update all constants and masks
4846 to be relative to a field of that size. */
4847 first_bit = MIN (ll_bitpos, rl_bitpos);
4848 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
4849 lnmode = get_best_mode (end_bit - first_bit, first_bit,
4850 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
4851 volatilep);
4852 if (lnmode == VOIDmode)
4853 return 0;
4854
4855 lnbitsize = GET_MODE_BITSIZE (lnmode);
4856 lnbitpos = first_bit & ~ (lnbitsize - 1);
4857 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
4858 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
4859
4860 if (BYTES_BIG_ENDIAN)
4861 {
4862 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
4863 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
4864 }
4865
4866 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
4867 size_int (xll_bitpos), 0);
4868 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
4869 size_int (xrl_bitpos), 0);
4870
4871 if (l_const)
4872 {
4873 l_const = fold_convert (lntype, l_const);
4874 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
4875 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
4876 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
4877 fold_build1 (BIT_NOT_EXPR,
4878 lntype, ll_mask),
4879 0)))
4880 {
4881 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
4882
4883 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4884 }
4885 }
4886 if (r_const)
4887 {
4888 r_const = fold_convert (lntype, r_const);
4889 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
4890 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
4891 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
4892 fold_build1 (BIT_NOT_EXPR,
4893 lntype, rl_mask),
4894 0)))
4895 {
4896 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
4897
4898 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4899 }
4900 }
4901
4902 /* If the right sides are not constant, do the same for it. Also,
4903 disallow this optimization if a size or signedness mismatch occurs
4904 between the left and right sides. */
4905 if (l_const == 0)
4906 {
4907 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
4908 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
4909 /* Make sure the two fields on the right
4910 correspond to the left without being swapped. */
4911 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
4912 return 0;
4913
4914 first_bit = MIN (lr_bitpos, rr_bitpos);
4915 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
4916 rnmode = get_best_mode (end_bit - first_bit, first_bit,
4917 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
4918 volatilep);
4919 if (rnmode == VOIDmode)
4920 return 0;
4921
4922 rnbitsize = GET_MODE_BITSIZE (rnmode);
4923 rnbitpos = first_bit & ~ (rnbitsize - 1);
4924 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
4925 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
4926
4927 if (BYTES_BIG_ENDIAN)
4928 {
4929 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
4930 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
4931 }
4932
4933 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
4934 size_int (xlr_bitpos), 0);
4935 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
4936 size_int (xrr_bitpos), 0);
4937
4938 /* Make a mask that corresponds to both fields being compared.
4939 Do this for both items being compared. If the operands are the
4940 same size and the bits being compared are in the same position
4941 then we can do this by masking both and comparing the masked
4942 results. */
4943 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4944 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
4945 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
4946 {
4947 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4948 ll_unsignedp || rl_unsignedp);
4949 if (! all_ones_mask_p (ll_mask, lnbitsize))
4950 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
4951
4952 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
4953 lr_unsignedp || rr_unsignedp);
4954 if (! all_ones_mask_p (lr_mask, rnbitsize))
4955 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
4956
4957 return build2 (wanted_code, truth_type, lhs, rhs);
4958 }
4959
4960 /* There is still another way we can do something: If both pairs of
4961 fields being compared are adjacent, we may be able to make a wider
4962 field containing them both.
4963
4964 Note that we still must mask the lhs/rhs expressions. Furthermore,
4965 the mask must be shifted to account for the shift done by
4966 make_bit_field_ref. */
4967 if ((ll_bitsize + ll_bitpos == rl_bitpos
4968 && lr_bitsize + lr_bitpos == rr_bitpos)
4969 || (ll_bitpos == rl_bitpos + rl_bitsize
4970 && lr_bitpos == rr_bitpos + rr_bitsize))
4971 {
4972 tree type;
4973
4974 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
4975 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
4976 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
4977 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
4978
4979 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
4980 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
4981 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
4982 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
4983
4984 /* Convert to the smaller type before masking out unwanted bits. */
4985 type = lntype;
4986 if (lntype != rntype)
4987 {
4988 if (lnbitsize > rnbitsize)
4989 {
4990 lhs = fold_convert (rntype, lhs);
4991 ll_mask = fold_convert (rntype, ll_mask);
4992 type = rntype;
4993 }
4994 else if (lnbitsize < rnbitsize)
4995 {
4996 rhs = fold_convert (lntype, rhs);
4997 lr_mask = fold_convert (lntype, lr_mask);
4998 type = lntype;
4999 }
5000 }
5001
5002 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5003 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5004
5005 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5006 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5007
5008 return build2 (wanted_code, truth_type, lhs, rhs);
5009 }
5010
5011 return 0;
5012 }
5013
5014 /* Handle the case of comparisons with constants. If there is something in
5015 common between the masks, those bits of the constants must be the same.
5016 If not, the condition is always false. Test for this to avoid generating
5017 incorrect code below. */
5018 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5019 if (! integer_zerop (result)
5020 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5021 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5022 {
5023 if (wanted_code == NE_EXPR)
5024 {
5025 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5026 return constant_boolean_node (true, truth_type);
5027 }
5028 else
5029 {
5030 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5031 return constant_boolean_node (false, truth_type);
5032 }
5033 }
5034
5035 /* Construct the expression we will return. First get the component
5036 reference we will make. Unless the mask is all ones the width of
5037 that field, perform the mask operation. Then compare with the
5038 merged constant. */
5039 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5040 ll_unsignedp || rl_unsignedp);
5041
5042 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5043 if (! all_ones_mask_p (ll_mask, lnbitsize))
5044 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5045
5046 return build2 (wanted_code, truth_type, result,
5047 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5048 }
5049 \f
5050 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5051 constant. */
5052
5053 static tree
5054 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5055 {
5056 tree arg0 = op0;
5057 enum tree_code op_code;
5058 tree comp_const = op1;
5059 tree minmax_const;
5060 int consts_equal, consts_lt;
5061 tree inner;
5062
5063 STRIP_SIGN_NOPS (arg0);
5064
5065 op_code = TREE_CODE (arg0);
5066 minmax_const = TREE_OPERAND (arg0, 1);
5067 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5068 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5069 inner = TREE_OPERAND (arg0, 0);
5070
5071 /* If something does not permit us to optimize, return the original tree. */
5072 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5073 || TREE_CODE (comp_const) != INTEGER_CST
5074 || TREE_CONSTANT_OVERFLOW (comp_const)
5075 || TREE_CODE (minmax_const) != INTEGER_CST
5076 || TREE_CONSTANT_OVERFLOW (minmax_const))
5077 return NULL_TREE;
5078
5079 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5080 and GT_EXPR, doing the rest with recursive calls using logical
5081 simplifications. */
5082 switch (code)
5083 {
5084 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5085 {
5086 /* FIXME: We should be able to invert code without building a
5087 scratch tree node, but doing so would require us to
5088 duplicate a part of invert_truthvalue here. */
5089 tree tem = invert_truthvalue (build2 (code, type, op0, op1));
5090 tem = optimize_minmax_comparison (TREE_CODE (tem),
5091 TREE_TYPE (tem),
5092 TREE_OPERAND (tem, 0),
5093 TREE_OPERAND (tem, 1));
5094 return invert_truthvalue (tem);
5095 }
5096
5097 case GE_EXPR:
5098 return
5099 fold_build2 (TRUTH_ORIF_EXPR, type,
5100 optimize_minmax_comparison
5101 (EQ_EXPR, type, arg0, comp_const),
5102 optimize_minmax_comparison
5103 (GT_EXPR, type, arg0, comp_const));
5104
5105 case EQ_EXPR:
5106 if (op_code == MAX_EXPR && consts_equal)
5107 /* MAX (X, 0) == 0 -> X <= 0 */
5108 return fold_build2 (LE_EXPR, type, inner, comp_const);
5109
5110 else if (op_code == MAX_EXPR && consts_lt)
5111 /* MAX (X, 0) == 5 -> X == 5 */
5112 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5113
5114 else if (op_code == MAX_EXPR)
5115 /* MAX (X, 0) == -1 -> false */
5116 return omit_one_operand (type, integer_zero_node, inner);
5117
5118 else if (consts_equal)
5119 /* MIN (X, 0) == 0 -> X >= 0 */
5120 return fold_build2 (GE_EXPR, type, inner, comp_const);
5121
5122 else if (consts_lt)
5123 /* MIN (X, 0) == 5 -> false */
5124 return omit_one_operand (type, integer_zero_node, inner);
5125
5126 else
5127 /* MIN (X, 0) == -1 -> X == -1 */
5128 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5129
5130 case GT_EXPR:
5131 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5132 /* MAX (X, 0) > 0 -> X > 0
5133 MAX (X, 0) > 5 -> X > 5 */
5134 return fold_build2 (GT_EXPR, type, inner, comp_const);
5135
5136 else if (op_code == MAX_EXPR)
5137 /* MAX (X, 0) > -1 -> true */
5138 return omit_one_operand (type, integer_one_node, inner);
5139
5140 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5141 /* MIN (X, 0) > 0 -> false
5142 MIN (X, 0) > 5 -> false */
5143 return omit_one_operand (type, integer_zero_node, inner);
5144
5145 else
5146 /* MIN (X, 0) > -1 -> X > -1 */
5147 return fold_build2 (GT_EXPR, type, inner, comp_const);
5148
5149 default:
5150 return NULL_TREE;
5151 }
5152 }
5153 \f
5154 /* T is an integer expression that is being multiplied, divided, or taken a
5155 modulus (CODE says which and what kind of divide or modulus) by a
5156 constant C. See if we can eliminate that operation by folding it with
5157 other operations already in T. WIDE_TYPE, if non-null, is a type that
5158 should be used for the computation if wider than our type.
5159
5160 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5161 (X * 2) + (Y * 4). We must, however, be assured that either the original
5162 expression would not overflow or that overflow is undefined for the type
5163 in the language in question.
5164
5165 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5166 the machine has a multiply-accumulate insn or that this is part of an
5167 addressing calculation.
5168
5169 If we return a non-null expression, it is an equivalent form of the
5170 original computation, but need not be in the original type. */
5171
5172 static tree
5173 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
5174 {
5175 /* To avoid exponential search depth, refuse to allow recursion past
5176 three levels. Beyond that (1) it's highly unlikely that we'll find
5177 something interesting and (2) we've probably processed it before
5178 when we built the inner expression. */
5179
5180 static int depth;
5181 tree ret;
5182
5183 if (depth > 3)
5184 return NULL;
5185
5186 depth++;
5187 ret = extract_muldiv_1 (t, c, code, wide_type);
5188 depth--;
5189
5190 return ret;
5191 }
5192
5193 static tree
5194 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
5195 {
5196 tree type = TREE_TYPE (t);
5197 enum tree_code tcode = TREE_CODE (t);
5198 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5199 > GET_MODE_SIZE (TYPE_MODE (type)))
5200 ? wide_type : type);
5201 tree t1, t2;
5202 int same_p = tcode == code;
5203 tree op0 = NULL_TREE, op1 = NULL_TREE;
5204
5205 /* Don't deal with constants of zero here; they confuse the code below. */
5206 if (integer_zerop (c))
5207 return NULL_TREE;
5208
5209 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5210 op0 = TREE_OPERAND (t, 0);
5211
5212 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5213 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5214
5215 /* Note that we need not handle conditional operations here since fold
5216 already handles those cases. So just do arithmetic here. */
5217 switch (tcode)
5218 {
5219 case INTEGER_CST:
5220 /* For a constant, we can always simplify if we are a multiply
5221 or (for divide and modulus) if it is a multiple of our constant. */
5222 if (code == MULT_EXPR
5223 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5224 return const_binop (code, fold_convert (ctype, t),
5225 fold_convert (ctype, c), 0);
5226 break;
5227
5228 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5229 /* If op0 is an expression ... */
5230 if ((COMPARISON_CLASS_P (op0)
5231 || UNARY_CLASS_P (op0)
5232 || BINARY_CLASS_P (op0)
5233 || EXPRESSION_CLASS_P (op0))
5234 /* ... and is unsigned, and its type is smaller than ctype,
5235 then we cannot pass through as widening. */
5236 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5237 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5238 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5239 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5240 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5241 /* ... or this is a truncation (t is narrower than op0),
5242 then we cannot pass through this narrowing. */
5243 || (GET_MODE_SIZE (TYPE_MODE (type))
5244 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5245 /* ... or signedness changes for division or modulus,
5246 then we cannot pass through this conversion. */
5247 || (code != MULT_EXPR
5248 && (TYPE_UNSIGNED (ctype)
5249 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5250 break;
5251
5252 /* Pass the constant down and see if we can make a simplification. If
5253 we can, replace this expression with the inner simplification for
5254 possible later conversion to our or some other type. */
5255 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5256 && TREE_CODE (t2) == INTEGER_CST
5257 && ! TREE_CONSTANT_OVERFLOW (t2)
5258 && (0 != (t1 = extract_muldiv (op0, t2, code,
5259 code == MULT_EXPR
5260 ? ctype : NULL_TREE))))
5261 return t1;
5262 break;
5263
5264 case ABS_EXPR:
5265 /* If widening the type changes it from signed to unsigned, then we
5266 must avoid building ABS_EXPR itself as unsigned. */
5267 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5268 {
5269 tree cstype = (*lang_hooks.types.signed_type) (ctype);
5270 if ((t1 = extract_muldiv (op0, c, code, cstype)) != 0)
5271 {
5272 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5273 return fold_convert (ctype, t1);
5274 }
5275 break;
5276 }
5277 /* FALLTHROUGH */
5278 case NEGATE_EXPR:
5279 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5280 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5281 break;
5282
5283 case MIN_EXPR: case MAX_EXPR:
5284 /* If widening the type changes the signedness, then we can't perform
5285 this optimization as that changes the result. */
5286 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5287 break;
5288
5289 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5290 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
5291 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
5292 {
5293 if (tree_int_cst_sgn (c) < 0)
5294 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5295
5296 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5297 fold_convert (ctype, t2));
5298 }
5299 break;
5300
5301 case LSHIFT_EXPR: case RSHIFT_EXPR:
5302 /* If the second operand is constant, this is a multiplication
5303 or floor division, by a power of two, so we can treat it that
5304 way unless the multiplier or divisor overflows. Signed
5305 left-shift overflow is implementation-defined rather than
5306 undefined in C90, so do not convert signed left shift into
5307 multiplication. */
5308 if (TREE_CODE (op1) == INTEGER_CST
5309 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5310 /* const_binop may not detect overflow correctly,
5311 so check for it explicitly here. */
5312 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5313 && TREE_INT_CST_HIGH (op1) == 0
5314 && 0 != (t1 = fold_convert (ctype,
5315 const_binop (LSHIFT_EXPR,
5316 size_one_node,
5317 op1, 0)))
5318 && ! TREE_OVERFLOW (t1))
5319 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5320 ? MULT_EXPR : FLOOR_DIV_EXPR,
5321 ctype, fold_convert (ctype, op0), t1),
5322 c, code, wide_type);
5323 break;
5324
5325 case PLUS_EXPR: case MINUS_EXPR:
5326 /* See if we can eliminate the operation on both sides. If we can, we
5327 can return a new PLUS or MINUS. If we can't, the only remaining
5328 cases where we can do anything are if the second operand is a
5329 constant. */
5330 t1 = extract_muldiv (op0, c, code, wide_type);
5331 t2 = extract_muldiv (op1, c, code, wide_type);
5332 if (t1 != 0 && t2 != 0
5333 && (code == MULT_EXPR
5334 /* If not multiplication, we can only do this if both operands
5335 are divisible by c. */
5336 || (multiple_of_p (ctype, op0, c)
5337 && multiple_of_p (ctype, op1, c))))
5338 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5339 fold_convert (ctype, t2));
5340
5341 /* If this was a subtraction, negate OP1 and set it to be an addition.
5342 This simplifies the logic below. */
5343 if (tcode == MINUS_EXPR)
5344 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5345
5346 if (TREE_CODE (op1) != INTEGER_CST)
5347 break;
5348
5349 /* If either OP1 or C are negative, this optimization is not safe for
5350 some of the division and remainder types while for others we need
5351 to change the code. */
5352 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5353 {
5354 if (code == CEIL_DIV_EXPR)
5355 code = FLOOR_DIV_EXPR;
5356 else if (code == FLOOR_DIV_EXPR)
5357 code = CEIL_DIV_EXPR;
5358 else if (code != MULT_EXPR
5359 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5360 break;
5361 }
5362
5363 /* If it's a multiply or a division/modulus operation of a multiple
5364 of our constant, do the operation and verify it doesn't overflow. */
5365 if (code == MULT_EXPR
5366 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5367 {
5368 op1 = const_binop (code, fold_convert (ctype, op1),
5369 fold_convert (ctype, c), 0);
5370 /* We allow the constant to overflow with wrapping semantics. */
5371 if (op1 == 0
5372 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
5373 break;
5374 }
5375 else
5376 break;
5377
5378 /* If we have an unsigned type is not a sizetype, we cannot widen
5379 the operation since it will change the result if the original
5380 computation overflowed. */
5381 if (TYPE_UNSIGNED (ctype)
5382 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5383 && ctype != type)
5384 break;
5385
5386 /* If we were able to eliminate our operation from the first side,
5387 apply our operation to the second side and reform the PLUS. */
5388 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5389 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5390
5391 /* The last case is if we are a multiply. In that case, we can
5392 apply the distributive law to commute the multiply and addition
5393 if the multiplication of the constants doesn't overflow. */
5394 if (code == MULT_EXPR)
5395 return fold_build2 (tcode, ctype,
5396 fold_build2 (code, ctype,
5397 fold_convert (ctype, op0),
5398 fold_convert (ctype, c)),
5399 op1);
5400
5401 break;
5402
5403 case MULT_EXPR:
5404 /* We have a special case here if we are doing something like
5405 (C * 8) % 4 since we know that's zero. */
5406 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5407 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5408 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5409 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5410 return omit_one_operand (type, integer_zero_node, op0);
5411
5412 /* ... fall through ... */
5413
5414 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5415 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5416 /* If we can extract our operation from the LHS, do so and return a
5417 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5418 do something only if the second operand is a constant. */
5419 if (same_p
5420 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5421 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5422 fold_convert (ctype, op1));
5423 else if (tcode == MULT_EXPR && code == MULT_EXPR
5424 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
5425 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5426 fold_convert (ctype, t1));
5427 else if (TREE_CODE (op1) != INTEGER_CST)
5428 return 0;
5429
5430 /* If these are the same operation types, we can associate them
5431 assuming no overflow. */
5432 if (tcode == code
5433 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5434 fold_convert (ctype, c), 0))
5435 && ! TREE_OVERFLOW (t1))
5436 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5437
5438 /* If these operations "cancel" each other, we have the main
5439 optimizations of this pass, which occur when either constant is a
5440 multiple of the other, in which case we replace this with either an
5441 operation or CODE or TCODE.
5442
5443 If we have an unsigned type that is not a sizetype, we cannot do
5444 this since it will change the result if the original computation
5445 overflowed. */
5446 if ((! TYPE_UNSIGNED (ctype)
5447 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5448 && ! flag_wrapv
5449 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5450 || (tcode == MULT_EXPR
5451 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5452 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5453 {
5454 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5455 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5456 fold_convert (ctype,
5457 const_binop (TRUNC_DIV_EXPR,
5458 op1, c, 0)));
5459 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5460 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5461 fold_convert (ctype,
5462 const_binop (TRUNC_DIV_EXPR,
5463 c, op1, 0)));
5464 }
5465 break;
5466
5467 default:
5468 break;
5469 }
5470
5471 return 0;
5472 }
5473 \f
5474 /* Return a node which has the indicated constant VALUE (either 0 or
5475 1), and is of the indicated TYPE. */
5476
5477 tree
5478 constant_boolean_node (int value, tree type)
5479 {
5480 if (type == integer_type_node)
5481 return value ? integer_one_node : integer_zero_node;
5482 else if (type == boolean_type_node)
5483 return value ? boolean_true_node : boolean_false_node;
5484 else
5485 return build_int_cst (type, value);
5486 }
5487
5488
5489 /* Return true if expr looks like an ARRAY_REF and set base and
5490 offset to the appropriate trees. If there is no offset,
5491 offset is set to NULL_TREE. Base will be canonicalized to
5492 something you can get the element type from using
5493 TREE_TYPE (TREE_TYPE (base)). */
5494
5495 static bool
5496 extract_array_ref (tree expr, tree *base, tree *offset)
5497 {
5498 /* One canonical form is a PLUS_EXPR with the first
5499 argument being an ADDR_EXPR with a possible NOP_EXPR
5500 attached. */
5501 if (TREE_CODE (expr) == PLUS_EXPR)
5502 {
5503 tree op0 = TREE_OPERAND (expr, 0);
5504 tree inner_base, dummy1;
5505 /* Strip NOP_EXPRs here because the C frontends and/or
5506 folders present us (int *)&x.a + 4B possibly. */
5507 STRIP_NOPS (op0);
5508 if (extract_array_ref (op0, &inner_base, &dummy1))
5509 {
5510 *base = inner_base;
5511 if (dummy1 == NULL_TREE)
5512 *offset = TREE_OPERAND (expr, 1);
5513 else
5514 *offset = fold_build2 (PLUS_EXPR, TREE_TYPE (expr),
5515 dummy1, TREE_OPERAND (expr, 1));
5516 return true;
5517 }
5518 }
5519 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5520 which we transform into an ADDR_EXPR with appropriate
5521 offset. For other arguments to the ADDR_EXPR we assume
5522 zero offset and as such do not care about the ADDR_EXPR
5523 type and strip possible nops from it. */
5524 else if (TREE_CODE (expr) == ADDR_EXPR)
5525 {
5526 tree op0 = TREE_OPERAND (expr, 0);
5527 if (TREE_CODE (op0) == ARRAY_REF)
5528 {
5529 *base = TREE_OPERAND (op0, 0);
5530 *offset = TREE_OPERAND (op0, 1);
5531 }
5532 else
5533 {
5534 /* Handle array-to-pointer decay as &a. */
5535 if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
5536 *base = TREE_OPERAND (expr, 0);
5537 else
5538 *base = expr;
5539 *offset = NULL_TREE;
5540 }
5541 return true;
5542 }
5543 /* The next canonical form is a VAR_DECL with POINTER_TYPE. */
5544 else if (SSA_VAR_P (expr)
5545 && TREE_CODE (TREE_TYPE (expr)) == POINTER_TYPE)
5546 {
5547 *base = expr;
5548 *offset = NULL_TREE;
5549 return true;
5550 }
5551
5552 return false;
5553 }
5554
5555
5556 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5557 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5558 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5559 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5560 COND is the first argument to CODE; otherwise (as in the example
5561 given here), it is the second argument. TYPE is the type of the
5562 original expression. Return NULL_TREE if no simplification is
5563 possible. */
5564
5565 static tree
5566 fold_binary_op_with_conditional_arg (enum tree_code code,
5567 tree type, tree op0, tree op1,
5568 tree cond, tree arg, int cond_first_p)
5569 {
5570 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5571 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5572 tree test, true_value, false_value;
5573 tree lhs = NULL_TREE;
5574 tree rhs = NULL_TREE;
5575
5576 /* This transformation is only worthwhile if we don't have to wrap
5577 arg in a SAVE_EXPR, and the operation can be simplified on at least
5578 one of the branches once its pushed inside the COND_EXPR. */
5579 if (!TREE_CONSTANT (arg))
5580 return NULL_TREE;
5581
5582 if (TREE_CODE (cond) == COND_EXPR)
5583 {
5584 test = TREE_OPERAND (cond, 0);
5585 true_value = TREE_OPERAND (cond, 1);
5586 false_value = TREE_OPERAND (cond, 2);
5587 /* If this operand throws an expression, then it does not make
5588 sense to try to perform a logical or arithmetic operation
5589 involving it. */
5590 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5591 lhs = true_value;
5592 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5593 rhs = false_value;
5594 }
5595 else
5596 {
5597 tree testtype = TREE_TYPE (cond);
5598 test = cond;
5599 true_value = constant_boolean_node (true, testtype);
5600 false_value = constant_boolean_node (false, testtype);
5601 }
5602
5603 arg = fold_convert (arg_type, arg);
5604 if (lhs == 0)
5605 {
5606 true_value = fold_convert (cond_type, true_value);
5607 if (cond_first_p)
5608 lhs = fold_build2 (code, type, true_value, arg);
5609 else
5610 lhs = fold_build2 (code, type, arg, true_value);
5611 }
5612 if (rhs == 0)
5613 {
5614 false_value = fold_convert (cond_type, false_value);
5615 if (cond_first_p)
5616 rhs = fold_build2 (code, type, false_value, arg);
5617 else
5618 rhs = fold_build2 (code, type, arg, false_value);
5619 }
5620
5621 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
5622 return fold_convert (type, test);
5623 }
5624
5625 \f
5626 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5627
5628 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5629 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5630 ADDEND is the same as X.
5631
5632 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5633 and finite. The problematic cases are when X is zero, and its mode
5634 has signed zeros. In the case of rounding towards -infinity,
5635 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5636 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5637
5638 static bool
5639 fold_real_zero_addition_p (tree type, tree addend, int negate)
5640 {
5641 if (!real_zerop (addend))
5642 return false;
5643
5644 /* Don't allow the fold with -fsignaling-nans. */
5645 if (HONOR_SNANS (TYPE_MODE (type)))
5646 return false;
5647
5648 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5649 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
5650 return true;
5651
5652 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5653 if (TREE_CODE (addend) == REAL_CST
5654 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
5655 negate = !negate;
5656
5657 /* The mode has signed zeros, and we have to honor their sign.
5658 In this situation, there is only one case we can return true for.
5659 X - 0 is the same as X unless rounding towards -infinity is
5660 supported. */
5661 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
5662 }
5663
5664 /* Subroutine of fold() that checks comparisons of built-in math
5665 functions against real constants.
5666
5667 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5668 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5669 is the type of the result and ARG0 and ARG1 are the operands of the
5670 comparison. ARG1 must be a TREE_REAL_CST.
5671
5672 The function returns the constant folded tree if a simplification
5673 can be made, and NULL_TREE otherwise. */
5674
5675 static tree
5676 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5677 tree type, tree arg0, tree arg1)
5678 {
5679 REAL_VALUE_TYPE c;
5680
5681 if (BUILTIN_SQRT_P (fcode))
5682 {
5683 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5684 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5685
5686 c = TREE_REAL_CST (arg1);
5687 if (REAL_VALUE_NEGATIVE (c))
5688 {
5689 /* sqrt(x) < y is always false, if y is negative. */
5690 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5691 return omit_one_operand (type, integer_zero_node, arg);
5692
5693 /* sqrt(x) > y is always true, if y is negative and we
5694 don't care about NaNs, i.e. negative values of x. */
5695 if (code == NE_EXPR || !HONOR_NANS (mode))
5696 return omit_one_operand (type, integer_one_node, arg);
5697
5698 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5699 return fold_build2 (GE_EXPR, type, arg,
5700 build_real (TREE_TYPE (arg), dconst0));
5701 }
5702 else if (code == GT_EXPR || code == GE_EXPR)
5703 {
5704 REAL_VALUE_TYPE c2;
5705
5706 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5707 real_convert (&c2, mode, &c2);
5708
5709 if (REAL_VALUE_ISINF (c2))
5710 {
5711 /* sqrt(x) > y is x == +Inf, when y is very large. */
5712 if (HONOR_INFINITIES (mode))
5713 return fold_build2 (EQ_EXPR, type, arg,
5714 build_real (TREE_TYPE (arg), c2));
5715
5716 /* sqrt(x) > y is always false, when y is very large
5717 and we don't care about infinities. */
5718 return omit_one_operand (type, integer_zero_node, arg);
5719 }
5720
5721 /* sqrt(x) > c is the same as x > c*c. */
5722 return fold_build2 (code, type, arg,
5723 build_real (TREE_TYPE (arg), c2));
5724 }
5725 else if (code == LT_EXPR || code == LE_EXPR)
5726 {
5727 REAL_VALUE_TYPE c2;
5728
5729 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5730 real_convert (&c2, mode, &c2);
5731
5732 if (REAL_VALUE_ISINF (c2))
5733 {
5734 /* sqrt(x) < y is always true, when y is a very large
5735 value and we don't care about NaNs or Infinities. */
5736 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5737 return omit_one_operand (type, integer_one_node, arg);
5738
5739 /* sqrt(x) < y is x != +Inf when y is very large and we
5740 don't care about NaNs. */
5741 if (! HONOR_NANS (mode))
5742 return fold_build2 (NE_EXPR, type, arg,
5743 build_real (TREE_TYPE (arg), c2));
5744
5745 /* sqrt(x) < y is x >= 0 when y is very large and we
5746 don't care about Infinities. */
5747 if (! HONOR_INFINITIES (mode))
5748 return fold_build2 (GE_EXPR, type, arg,
5749 build_real (TREE_TYPE (arg), dconst0));
5750
5751 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5752 if (lang_hooks.decls.global_bindings_p () != 0
5753 || CONTAINS_PLACEHOLDER_P (arg))
5754 return NULL_TREE;
5755
5756 arg = save_expr (arg);
5757 return fold_build2 (TRUTH_ANDIF_EXPR, type,
5758 fold_build2 (GE_EXPR, type, arg,
5759 build_real (TREE_TYPE (arg),
5760 dconst0)),
5761 fold_build2 (NE_EXPR, type, arg,
5762 build_real (TREE_TYPE (arg),
5763 c2)));
5764 }
5765
5766 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5767 if (! HONOR_NANS (mode))
5768 return fold_build2 (code, type, arg,
5769 build_real (TREE_TYPE (arg), c2));
5770
5771 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5772 if (lang_hooks.decls.global_bindings_p () == 0
5773 && ! CONTAINS_PLACEHOLDER_P (arg))
5774 {
5775 arg = save_expr (arg);
5776 return fold_build2 (TRUTH_ANDIF_EXPR, type,
5777 fold_build2 (GE_EXPR, type, arg,
5778 build_real (TREE_TYPE (arg),
5779 dconst0)),
5780 fold_build2 (code, type, arg,
5781 build_real (TREE_TYPE (arg),
5782 c2)));
5783 }
5784 }
5785 }
5786
5787 return NULL_TREE;
5788 }
5789
5790 /* Subroutine of fold() that optimizes comparisons against Infinities,
5791 either +Inf or -Inf.
5792
5793 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5794 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5795 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5796
5797 The function returns the constant folded tree if a simplification
5798 can be made, and NULL_TREE otherwise. */
5799
5800 static tree
5801 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5802 {
5803 enum machine_mode mode;
5804 REAL_VALUE_TYPE max;
5805 tree temp;
5806 bool neg;
5807
5808 mode = TYPE_MODE (TREE_TYPE (arg0));
5809
5810 /* For negative infinity swap the sense of the comparison. */
5811 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
5812 if (neg)
5813 code = swap_tree_comparison (code);
5814
5815 switch (code)
5816 {
5817 case GT_EXPR:
5818 /* x > +Inf is always false, if with ignore sNANs. */
5819 if (HONOR_SNANS (mode))
5820 return NULL_TREE;
5821 return omit_one_operand (type, integer_zero_node, arg0);
5822
5823 case LE_EXPR:
5824 /* x <= +Inf is always true, if we don't case about NaNs. */
5825 if (! HONOR_NANS (mode))
5826 return omit_one_operand (type, integer_one_node, arg0);
5827
5828 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5829 if (lang_hooks.decls.global_bindings_p () == 0
5830 && ! CONTAINS_PLACEHOLDER_P (arg0))
5831 {
5832 arg0 = save_expr (arg0);
5833 return fold_build2 (EQ_EXPR, type, arg0, arg0);
5834 }
5835 break;
5836
5837 case EQ_EXPR:
5838 case GE_EXPR:
5839 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
5840 real_maxval (&max, neg, mode);
5841 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
5842 arg0, build_real (TREE_TYPE (arg0), max));
5843
5844 case LT_EXPR:
5845 /* x < +Inf is always equal to x <= DBL_MAX. */
5846 real_maxval (&max, neg, mode);
5847 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
5848 arg0, build_real (TREE_TYPE (arg0), max));
5849
5850 case NE_EXPR:
5851 /* x != +Inf is always equal to !(x > DBL_MAX). */
5852 real_maxval (&max, neg, mode);
5853 if (! HONOR_NANS (mode))
5854 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
5855 arg0, build_real (TREE_TYPE (arg0), max));
5856
5857 /* The transformation below creates non-gimple code and thus is
5858 not appropriate if we are in gimple form. */
5859 if (in_gimple_form)
5860 return NULL_TREE;
5861
5862 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
5863 arg0, build_real (TREE_TYPE (arg0), max));
5864 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
5865
5866 default:
5867 break;
5868 }
5869
5870 return NULL_TREE;
5871 }
5872
5873 /* Subroutine of fold() that optimizes comparisons of a division by
5874 a nonzero integer constant against an integer constant, i.e.
5875 X/C1 op C2.
5876
5877 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5878 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5879 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5880
5881 The function returns the constant folded tree if a simplification
5882 can be made, and NULL_TREE otherwise. */
5883
5884 static tree
5885 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5886 {
5887 tree prod, tmp, hi, lo;
5888 tree arg00 = TREE_OPERAND (arg0, 0);
5889 tree arg01 = TREE_OPERAND (arg0, 1);
5890 unsigned HOST_WIDE_INT lpart;
5891 HOST_WIDE_INT hpart;
5892 int overflow;
5893
5894 /* We have to do this the hard way to detect unsigned overflow.
5895 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
5896 overflow = mul_double (TREE_INT_CST_LOW (arg01),
5897 TREE_INT_CST_HIGH (arg01),
5898 TREE_INT_CST_LOW (arg1),
5899 TREE_INT_CST_HIGH (arg1), &lpart, &hpart);
5900 prod = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
5901 prod = force_fit_type (prod, -1, overflow, false);
5902
5903 if (TYPE_UNSIGNED (TREE_TYPE (arg0)))
5904 {
5905 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5906 lo = prod;
5907
5908 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
5909 overflow = add_double (TREE_INT_CST_LOW (prod),
5910 TREE_INT_CST_HIGH (prod),
5911 TREE_INT_CST_LOW (tmp),
5912 TREE_INT_CST_HIGH (tmp),
5913 &lpart, &hpart);
5914 hi = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
5915 hi = force_fit_type (hi, -1, overflow | TREE_OVERFLOW (prod),
5916 TREE_CONSTANT_OVERFLOW (prod));
5917 }
5918 else if (tree_int_cst_sgn (arg01) >= 0)
5919 {
5920 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5921 switch (tree_int_cst_sgn (arg1))
5922 {
5923 case -1:
5924 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5925 hi = prod;
5926 break;
5927
5928 case 0:
5929 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
5930 hi = tmp;
5931 break;
5932
5933 case 1:
5934 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
5935 lo = prod;
5936 break;
5937
5938 default:
5939 gcc_unreachable ();
5940 }
5941 }
5942 else
5943 {
5944 /* A negative divisor reverses the relational operators. */
5945 code = swap_tree_comparison (code);
5946
5947 tmp = int_const_binop (PLUS_EXPR, arg01, integer_one_node, 0);
5948 switch (tree_int_cst_sgn (arg1))
5949 {
5950 case -1:
5951 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5952 lo = prod;
5953 break;
5954
5955 case 0:
5956 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
5957 lo = tmp;
5958 break;
5959
5960 case 1:
5961 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
5962 hi = prod;
5963 break;
5964
5965 default:
5966 gcc_unreachable ();
5967 }
5968 }
5969
5970 switch (code)
5971 {
5972 case EQ_EXPR:
5973 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
5974 return omit_one_operand (type, integer_zero_node, arg00);
5975 if (TREE_OVERFLOW (hi))
5976 return fold_build2 (GE_EXPR, type, arg00, lo);
5977 if (TREE_OVERFLOW (lo))
5978 return fold_build2 (LE_EXPR, type, arg00, hi);
5979 return build_range_check (type, arg00, 1, lo, hi);
5980
5981 case NE_EXPR:
5982 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
5983 return omit_one_operand (type, integer_one_node, arg00);
5984 if (TREE_OVERFLOW (hi))
5985 return fold_build2 (LT_EXPR, type, arg00, lo);
5986 if (TREE_OVERFLOW (lo))
5987 return fold_build2 (GT_EXPR, type, arg00, hi);
5988 return build_range_check (type, arg00, 0, lo, hi);
5989
5990 case LT_EXPR:
5991 if (TREE_OVERFLOW (lo))
5992 return omit_one_operand (type, integer_zero_node, arg00);
5993 return fold_build2 (LT_EXPR, type, arg00, lo);
5994
5995 case LE_EXPR:
5996 if (TREE_OVERFLOW (hi))
5997 return omit_one_operand (type, integer_one_node, arg00);
5998 return fold_build2 (LE_EXPR, type, arg00, hi);
5999
6000 case GT_EXPR:
6001 if (TREE_OVERFLOW (hi))
6002 return omit_one_operand (type, integer_zero_node, arg00);
6003 return fold_build2 (GT_EXPR, type, arg00, hi);
6004
6005 case GE_EXPR:
6006 if (TREE_OVERFLOW (lo))
6007 return omit_one_operand (type, integer_one_node, arg00);
6008 return fold_build2 (GE_EXPR, type, arg00, lo);
6009
6010 default:
6011 break;
6012 }
6013
6014 return NULL_TREE;
6015 }
6016
6017
6018 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6019 equality/inequality test, then return a simplified form of the test
6020 using a sign testing. Otherwise return NULL. TYPE is the desired
6021 result type. */
6022
6023 static tree
6024 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6025 tree result_type)
6026 {
6027 /* If this is testing a single bit, we can optimize the test. */
6028 if ((code == NE_EXPR || code == EQ_EXPR)
6029 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6030 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6031 {
6032 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6033 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6034 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6035
6036 if (arg00 != NULL_TREE
6037 /* This is only a win if casting to a signed type is cheap,
6038 i.e. when arg00's type is not a partial mode. */
6039 && TYPE_PRECISION (TREE_TYPE (arg00))
6040 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6041 {
6042 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
6043 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6044 result_type, fold_convert (stype, arg00),
6045 fold_convert (stype, integer_zero_node));
6046 }
6047 }
6048
6049 return NULL_TREE;
6050 }
6051
6052 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6053 equality/inequality test, then return a simplified form of
6054 the test using shifts and logical operations. Otherwise return
6055 NULL. TYPE is the desired result type. */
6056
6057 tree
6058 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6059 tree result_type)
6060 {
6061 /* If this is testing a single bit, we can optimize the test. */
6062 if ((code == NE_EXPR || code == EQ_EXPR)
6063 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6064 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6065 {
6066 tree inner = TREE_OPERAND (arg0, 0);
6067 tree type = TREE_TYPE (arg0);
6068 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6069 enum machine_mode operand_mode = TYPE_MODE (type);
6070 int ops_unsigned;
6071 tree signed_type, unsigned_type, intermediate_type;
6072 tree tem;
6073
6074 /* First, see if we can fold the single bit test into a sign-bit
6075 test. */
6076 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6077 result_type);
6078 if (tem)
6079 return tem;
6080
6081 /* Otherwise we have (A & C) != 0 where C is a single bit,
6082 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6083 Similarly for (A & C) == 0. */
6084
6085 /* If INNER is a right shift of a constant and it plus BITNUM does
6086 not overflow, adjust BITNUM and INNER. */
6087 if (TREE_CODE (inner) == RSHIFT_EXPR
6088 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6089 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6090 && bitnum < TYPE_PRECISION (type)
6091 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6092 bitnum - TYPE_PRECISION (type)))
6093 {
6094 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6095 inner = TREE_OPERAND (inner, 0);
6096 }
6097
6098 /* If we are going to be able to omit the AND below, we must do our
6099 operations as unsigned. If we must use the AND, we have a choice.
6100 Normally unsigned is faster, but for some machines signed is. */
6101 #ifdef LOAD_EXTEND_OP
6102 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6103 && !flag_syntax_only) ? 0 : 1;
6104 #else
6105 ops_unsigned = 1;
6106 #endif
6107
6108 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6109 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6110 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6111 inner = fold_convert (intermediate_type, inner);
6112
6113 if (bitnum != 0)
6114 inner = build2 (RSHIFT_EXPR, intermediate_type,
6115 inner, size_int (bitnum));
6116
6117 if (code == EQ_EXPR)
6118 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type,
6119 inner, integer_one_node);
6120
6121 /* Put the AND last so it can combine with more things. */
6122 inner = build2 (BIT_AND_EXPR, intermediate_type,
6123 inner, integer_one_node);
6124
6125 /* Make sure to return the proper type. */
6126 inner = fold_convert (result_type, inner);
6127
6128 return inner;
6129 }
6130 return NULL_TREE;
6131 }
6132
6133 /* Check whether we are allowed to reorder operands arg0 and arg1,
6134 such that the evaluation of arg1 occurs before arg0. */
6135
6136 static bool
6137 reorder_operands_p (tree arg0, tree arg1)
6138 {
6139 if (! flag_evaluation_order)
6140 return true;
6141 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6142 return true;
6143 return ! TREE_SIDE_EFFECTS (arg0)
6144 && ! TREE_SIDE_EFFECTS (arg1);
6145 }
6146
6147 /* Test whether it is preferable two swap two operands, ARG0 and
6148 ARG1, for example because ARG0 is an integer constant and ARG1
6149 isn't. If REORDER is true, only recommend swapping if we can
6150 evaluate the operands in reverse order. */
6151
6152 bool
6153 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
6154 {
6155 STRIP_SIGN_NOPS (arg0);
6156 STRIP_SIGN_NOPS (arg1);
6157
6158 if (TREE_CODE (arg1) == INTEGER_CST)
6159 return 0;
6160 if (TREE_CODE (arg0) == INTEGER_CST)
6161 return 1;
6162
6163 if (TREE_CODE (arg1) == REAL_CST)
6164 return 0;
6165 if (TREE_CODE (arg0) == REAL_CST)
6166 return 1;
6167
6168 if (TREE_CODE (arg1) == COMPLEX_CST)
6169 return 0;
6170 if (TREE_CODE (arg0) == COMPLEX_CST)
6171 return 1;
6172
6173 if (TREE_CONSTANT (arg1))
6174 return 0;
6175 if (TREE_CONSTANT (arg0))
6176 return 1;
6177
6178 if (optimize_size)
6179 return 0;
6180
6181 if (reorder && flag_evaluation_order
6182 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6183 return 0;
6184
6185 if (DECL_P (arg1))
6186 return 0;
6187 if (DECL_P (arg0))
6188 return 1;
6189
6190 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6191 for commutative and comparison operators. Ensuring a canonical
6192 form allows the optimizers to find additional redundancies without
6193 having to explicitly check for both orderings. */
6194 if (TREE_CODE (arg0) == SSA_NAME
6195 && TREE_CODE (arg1) == SSA_NAME
6196 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6197 return 1;
6198
6199 return 0;
6200 }
6201
6202 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6203 ARG0 is extended to a wider type. */
6204
6205 static tree
6206 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6207 {
6208 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6209 tree arg1_unw;
6210 tree shorter_type, outer_type;
6211 tree min, max;
6212 bool above, below;
6213
6214 if (arg0_unw == arg0)
6215 return NULL_TREE;
6216 shorter_type = TREE_TYPE (arg0_unw);
6217
6218 #ifdef HAVE_canonicalize_funcptr_for_compare
6219 /* Disable this optimization if we're casting a function pointer
6220 type on targets that require function pointer canonicalization. */
6221 if (HAVE_canonicalize_funcptr_for_compare
6222 && TREE_CODE (shorter_type) == POINTER_TYPE
6223 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6224 return NULL_TREE;
6225 #endif
6226
6227 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6228 return NULL_TREE;
6229
6230 arg1_unw = get_unwidened (arg1, shorter_type);
6231 if (!arg1_unw)
6232 return NULL_TREE;
6233
6234 /* If possible, express the comparison in the shorter mode. */
6235 if ((code == EQ_EXPR || code == NE_EXPR
6236 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6237 && (TREE_TYPE (arg1_unw) == shorter_type
6238 || (TREE_CODE (arg1_unw) == INTEGER_CST
6239 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6240 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6241 && int_fits_type_p (arg1_unw, shorter_type))))
6242 return fold_build2 (code, type, arg0_unw,
6243 fold_convert (shorter_type, arg1_unw));
6244
6245 if (TREE_CODE (arg1_unw) != INTEGER_CST)
6246 return NULL_TREE;
6247
6248 /* If we are comparing with the integer that does not fit into the range
6249 of the shorter type, the result is known. */
6250 outer_type = TREE_TYPE (arg1_unw);
6251 min = lower_bound_in_type (outer_type, shorter_type);
6252 max = upper_bound_in_type (outer_type, shorter_type);
6253
6254 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6255 max, arg1_unw));
6256 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6257 arg1_unw, min));
6258
6259 switch (code)
6260 {
6261 case EQ_EXPR:
6262 if (above || below)
6263 return omit_one_operand (type, integer_zero_node, arg0);
6264 break;
6265
6266 case NE_EXPR:
6267 if (above || below)
6268 return omit_one_operand (type, integer_one_node, arg0);
6269 break;
6270
6271 case LT_EXPR:
6272 case LE_EXPR:
6273 if (above)
6274 return omit_one_operand (type, integer_one_node, arg0);
6275 else if (below)
6276 return omit_one_operand (type, integer_zero_node, arg0);
6277
6278 case GT_EXPR:
6279 case GE_EXPR:
6280 if (above)
6281 return omit_one_operand (type, integer_zero_node, arg0);
6282 else if (below)
6283 return omit_one_operand (type, integer_one_node, arg0);
6284
6285 default:
6286 break;
6287 }
6288
6289 return NULL_TREE;
6290 }
6291
6292 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6293 ARG0 just the signedness is changed. */
6294
6295 static tree
6296 fold_sign_changed_comparison (enum tree_code code, tree type,
6297 tree arg0, tree arg1)
6298 {
6299 tree arg0_inner, tmp;
6300 tree inner_type, outer_type;
6301
6302 if (TREE_CODE (arg0) != NOP_EXPR
6303 && TREE_CODE (arg0) != CONVERT_EXPR)
6304 return NULL_TREE;
6305
6306 outer_type = TREE_TYPE (arg0);
6307 arg0_inner = TREE_OPERAND (arg0, 0);
6308 inner_type = TREE_TYPE (arg0_inner);
6309
6310 #ifdef HAVE_canonicalize_funcptr_for_compare
6311 /* Disable this optimization if we're casting a function pointer
6312 type on targets that require function pointer canonicalization. */
6313 if (HAVE_canonicalize_funcptr_for_compare
6314 && TREE_CODE (inner_type) == POINTER_TYPE
6315 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6316 return NULL_TREE;
6317 #endif
6318
6319 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6320 return NULL_TREE;
6321
6322 if (TREE_CODE (arg1) != INTEGER_CST
6323 && !((TREE_CODE (arg1) == NOP_EXPR
6324 || TREE_CODE (arg1) == CONVERT_EXPR)
6325 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6326 return NULL_TREE;
6327
6328 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6329 && code != NE_EXPR
6330 && code != EQ_EXPR)
6331 return NULL_TREE;
6332
6333 if (TREE_CODE (arg1) == INTEGER_CST)
6334 {
6335 tmp = build_int_cst_wide (inner_type,
6336 TREE_INT_CST_LOW (arg1),
6337 TREE_INT_CST_HIGH (arg1));
6338 arg1 = force_fit_type (tmp, 0,
6339 TREE_OVERFLOW (arg1),
6340 TREE_CONSTANT_OVERFLOW (arg1));
6341 }
6342 else
6343 arg1 = fold_convert (inner_type, arg1);
6344
6345 return fold_build2 (code, type, arg0_inner, arg1);
6346 }
6347
6348 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6349 step of the array. Reconstructs s and delta in the case of s * delta
6350 being an integer constant (and thus already folded).
6351 ADDR is the address. MULT is the multiplicative expression.
6352 If the function succeeds, the new address expression is returned. Otherwise
6353 NULL_TREE is returned. */
6354
6355 static tree
6356 try_move_mult_to_index (enum tree_code code, tree addr, tree op1)
6357 {
6358 tree s, delta, step;
6359 tree ref = TREE_OPERAND (addr, 0), pref;
6360 tree ret, pos;
6361 tree itype;
6362
6363 /* Canonicalize op1 into a possibly non-constant delta
6364 and an INTEGER_CST s. */
6365 if (TREE_CODE (op1) == MULT_EXPR)
6366 {
6367 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6368
6369 STRIP_NOPS (arg0);
6370 STRIP_NOPS (arg1);
6371
6372 if (TREE_CODE (arg0) == INTEGER_CST)
6373 {
6374 s = arg0;
6375 delta = arg1;
6376 }
6377 else if (TREE_CODE (arg1) == INTEGER_CST)
6378 {
6379 s = arg1;
6380 delta = arg0;
6381 }
6382 else
6383 return NULL_TREE;
6384 }
6385 else if (TREE_CODE (op1) == INTEGER_CST)
6386 {
6387 delta = op1;
6388 s = NULL_TREE;
6389 }
6390 else
6391 {
6392 /* Simulate we are delta * 1. */
6393 delta = op1;
6394 s = integer_one_node;
6395 }
6396
6397 for (;; ref = TREE_OPERAND (ref, 0))
6398 {
6399 if (TREE_CODE (ref) == ARRAY_REF)
6400 {
6401 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6402 if (! itype)
6403 continue;
6404
6405 step = array_ref_element_size (ref);
6406 if (TREE_CODE (step) != INTEGER_CST)
6407 continue;
6408
6409 if (s)
6410 {
6411 if (! tree_int_cst_equal (step, s))
6412 continue;
6413 }
6414 else
6415 {
6416 /* Try if delta is a multiple of step. */
6417 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step);
6418 if (! tmp)
6419 continue;
6420 delta = tmp;
6421 }
6422
6423 break;
6424 }
6425
6426 if (!handled_component_p (ref))
6427 return NULL_TREE;
6428 }
6429
6430 /* We found the suitable array reference. So copy everything up to it,
6431 and replace the index. */
6432
6433 pref = TREE_OPERAND (addr, 0);
6434 ret = copy_node (pref);
6435 pos = ret;
6436
6437 while (pref != ref)
6438 {
6439 pref = TREE_OPERAND (pref, 0);
6440 TREE_OPERAND (pos, 0) = copy_node (pref);
6441 pos = TREE_OPERAND (pos, 0);
6442 }
6443
6444 TREE_OPERAND (pos, 1) = fold_build2 (code, itype,
6445 fold_convert (itype,
6446 TREE_OPERAND (pos, 1)),
6447 fold_convert (itype, delta));
6448
6449 return build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6450 }
6451
6452
6453 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6454 means A >= Y && A != MAX, but in this case we know that
6455 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6456
6457 static tree
6458 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6459 {
6460 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6461
6462 if (TREE_CODE (bound) == LT_EXPR)
6463 a = TREE_OPERAND (bound, 0);
6464 else if (TREE_CODE (bound) == GT_EXPR)
6465 a = TREE_OPERAND (bound, 1);
6466 else
6467 return NULL_TREE;
6468
6469 typea = TREE_TYPE (a);
6470 if (!INTEGRAL_TYPE_P (typea)
6471 && !POINTER_TYPE_P (typea))
6472 return NULL_TREE;
6473
6474 if (TREE_CODE (ineq) == LT_EXPR)
6475 {
6476 a1 = TREE_OPERAND (ineq, 1);
6477 y = TREE_OPERAND (ineq, 0);
6478 }
6479 else if (TREE_CODE (ineq) == GT_EXPR)
6480 {
6481 a1 = TREE_OPERAND (ineq, 0);
6482 y = TREE_OPERAND (ineq, 1);
6483 }
6484 else
6485 return NULL_TREE;
6486
6487 if (TREE_TYPE (a1) != typea)
6488 return NULL_TREE;
6489
6490 diff = fold_build2 (MINUS_EXPR, typea, a1, a);
6491 if (!integer_onep (diff))
6492 return NULL_TREE;
6493
6494 return fold_build2 (GE_EXPR, type, a, y);
6495 }
6496
6497 /* Fold complex addition when both components are accessible by parts.
6498 Return non-null if successful. CODE should be PLUS_EXPR for addition,
6499 or MINUS_EXPR for subtraction. */
6500
6501 static tree
6502 fold_complex_add (tree type, tree ac, tree bc, enum tree_code code)
6503 {
6504 tree ar, ai, br, bi, rr, ri, inner_type;
6505
6506 if (TREE_CODE (ac) == COMPLEX_EXPR)
6507 ar = TREE_OPERAND (ac, 0), ai = TREE_OPERAND (ac, 1);
6508 else if (TREE_CODE (ac) == COMPLEX_CST)
6509 ar = TREE_REALPART (ac), ai = TREE_IMAGPART (ac);
6510 else
6511 return NULL;
6512
6513 if (TREE_CODE (bc) == COMPLEX_EXPR)
6514 br = TREE_OPERAND (bc, 0), bi = TREE_OPERAND (bc, 1);
6515 else if (TREE_CODE (bc) == COMPLEX_CST)
6516 br = TREE_REALPART (bc), bi = TREE_IMAGPART (bc);
6517 else
6518 return NULL;
6519
6520 inner_type = TREE_TYPE (type);
6521
6522 rr = fold_build2 (code, inner_type, ar, br);
6523 ri = fold_build2 (code, inner_type, ai, bi);
6524
6525 return fold_build2 (COMPLEX_EXPR, type, rr, ri);
6526 }
6527
6528 /* Perform some simplifications of complex multiplication when one or more
6529 of the components are constants or zeros. Return non-null if successful. */
6530
6531 tree
6532 fold_complex_mult_parts (tree type, tree ar, tree ai, tree br, tree bi)
6533 {
6534 tree rr, ri, inner_type, zero;
6535 bool ar0, ai0, br0, bi0, bi1;
6536
6537 inner_type = TREE_TYPE (type);
6538 zero = NULL;
6539
6540 if (SCALAR_FLOAT_TYPE_P (inner_type))
6541 {
6542 ar0 = ai0 = br0 = bi0 = bi1 = false;
6543
6544 /* We're only interested in +0.0 here, thus we don't use real_zerop. */
6545
6546 if (TREE_CODE (ar) == REAL_CST
6547 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (ar), dconst0))
6548 ar0 = true, zero = ar;
6549
6550 if (TREE_CODE (ai) == REAL_CST
6551 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (ai), dconst0))
6552 ai0 = true, zero = ai;
6553
6554 if (TREE_CODE (br) == REAL_CST
6555 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (br), dconst0))
6556 br0 = true, zero = br;
6557
6558 if (TREE_CODE (bi) == REAL_CST)
6559 {
6560 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (bi), dconst0))
6561 bi0 = true, zero = bi;
6562 else if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (bi), dconst1))
6563 bi1 = true;
6564 }
6565 }
6566 else
6567 {
6568 ar0 = integer_zerop (ar);
6569 if (ar0)
6570 zero = ar;
6571 ai0 = integer_zerop (ai);
6572 if (ai0)
6573 zero = ai;
6574 br0 = integer_zerop (br);
6575 if (br0)
6576 zero = br;
6577 bi0 = integer_zerop (bi);
6578 if (bi0)
6579 {
6580 zero = bi;
6581 bi1 = false;
6582 }
6583 else
6584 bi1 = integer_onep (bi);
6585 }
6586
6587 /* We won't optimize anything below unless something is zero. */
6588 if (zero == NULL)
6589 return NULL;
6590
6591 if (ai0 && br0 && bi1)
6592 {
6593 rr = zero;
6594 ri = ar;
6595 }
6596 else if (ai0 && bi0)
6597 {
6598 rr = fold_build2 (MULT_EXPR, inner_type, ar, br);
6599 ri = zero;
6600 }
6601 else if (ai0 && br0)
6602 {
6603 rr = zero;
6604 ri = fold_build2 (MULT_EXPR, inner_type, ar, bi);
6605 }
6606 else if (ar0 && bi0)
6607 {
6608 rr = zero;
6609 ri = fold_build2 (MULT_EXPR, inner_type, ai, br);
6610 }
6611 else if (ar0 && br0)
6612 {
6613 rr = fold_build2 (MULT_EXPR, inner_type, ai, bi);
6614 rr = fold_build1 (NEGATE_EXPR, inner_type, rr);
6615 ri = zero;
6616 }
6617 else if (bi0)
6618 {
6619 rr = fold_build2 (MULT_EXPR, inner_type, ar, br);
6620 ri = fold_build2 (MULT_EXPR, inner_type, ai, br);
6621 }
6622 else if (ai0)
6623 {
6624 rr = fold_build2 (MULT_EXPR, inner_type, ar, br);
6625 ri = fold_build2 (MULT_EXPR, inner_type, ar, bi);
6626 }
6627 else if (br0)
6628 {
6629 rr = fold_build2 (MULT_EXPR, inner_type, ai, bi);
6630 rr = fold_build1 (NEGATE_EXPR, inner_type, rr);
6631 ri = fold_build2 (MULT_EXPR, inner_type, ar, bi);
6632 }
6633 else if (ar0)
6634 {
6635 rr = fold_build2 (MULT_EXPR, inner_type, ai, bi);
6636 rr = fold_build1 (NEGATE_EXPR, inner_type, rr);
6637 ri = fold_build2 (MULT_EXPR, inner_type, ai, br);
6638 }
6639 else
6640 return NULL;
6641
6642 return fold_build2 (COMPLEX_EXPR, type, rr, ri);
6643 }
6644
6645 static tree
6646 fold_complex_mult (tree type, tree ac, tree bc)
6647 {
6648 tree ar, ai, br, bi;
6649
6650 if (TREE_CODE (ac) == COMPLEX_EXPR)
6651 ar = TREE_OPERAND (ac, 0), ai = TREE_OPERAND (ac, 1);
6652 else if (TREE_CODE (ac) == COMPLEX_CST)
6653 ar = TREE_REALPART (ac), ai = TREE_IMAGPART (ac);
6654 else
6655 return NULL;
6656
6657 if (TREE_CODE (bc) == COMPLEX_EXPR)
6658 br = TREE_OPERAND (bc, 0), bi = TREE_OPERAND (bc, 1);
6659 else if (TREE_CODE (bc) == COMPLEX_CST)
6660 br = TREE_REALPART (bc), bi = TREE_IMAGPART (bc);
6661 else
6662 return NULL;
6663
6664 return fold_complex_mult_parts (type, ar, ai, br, bi);
6665 }
6666
6667 /* Perform some simplifications of complex division when one or more of
6668 the components are constants or zeros. Return non-null if successful. */
6669
6670 tree
6671 fold_complex_div_parts (tree type, tree ar, tree ai, tree br, tree bi,
6672 enum tree_code code)
6673 {
6674 tree rr, ri, inner_type, zero;
6675 bool ar0, ai0, br0, bi0, bi1;
6676
6677 inner_type = TREE_TYPE (type);
6678 zero = NULL;
6679
6680 if (SCALAR_FLOAT_TYPE_P (inner_type))
6681 {
6682 ar0 = ai0 = br0 = bi0 = bi1 = false;
6683
6684 /* We're only interested in +0.0 here, thus we don't use real_zerop. */
6685
6686 if (TREE_CODE (ar) == REAL_CST
6687 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (ar), dconst0))
6688 ar0 = true, zero = ar;
6689
6690 if (TREE_CODE (ai) == REAL_CST
6691 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (ai), dconst0))
6692 ai0 = true, zero = ai;
6693
6694 if (TREE_CODE (br) == REAL_CST
6695 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (br), dconst0))
6696 br0 = true, zero = br;
6697
6698 if (TREE_CODE (bi) == REAL_CST)
6699 {
6700 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (bi), dconst0))
6701 bi0 = true, zero = bi;
6702 else if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (bi), dconst1))
6703 bi1 = true;
6704 }
6705 }
6706 else
6707 {
6708 ar0 = integer_zerop (ar);
6709 if (ar0)
6710 zero = ar;
6711 ai0 = integer_zerop (ai);
6712 if (ai0)
6713 zero = ai;
6714 br0 = integer_zerop (br);
6715 if (br0)
6716 zero = br;
6717 bi0 = integer_zerop (bi);
6718 if (bi0)
6719 {
6720 zero = bi;
6721 bi1 = false;
6722 }
6723 else
6724 bi1 = integer_onep (bi);
6725 }
6726
6727 /* We won't optimize anything below unless something is zero. */
6728 if (zero == NULL)
6729 return NULL;
6730
6731 if (ai0 && bi0)
6732 {
6733 rr = fold_build2 (code, inner_type, ar, br);
6734 ri = zero;
6735 }
6736 else if (ai0 && br0)
6737 {
6738 rr = zero;
6739 ri = fold_build2 (code, inner_type, ar, bi);
6740 ri = fold_build1 (NEGATE_EXPR, inner_type, ri);
6741 }
6742 else if (ar0 && bi0)
6743 {
6744 rr = zero;
6745 ri = fold_build2 (code, inner_type, ai, br);
6746 }
6747 else if (ar0 && br0)
6748 {
6749 rr = fold_build2 (code, inner_type, ai, bi);
6750 ri = zero;
6751 }
6752 else if (bi0)
6753 {
6754 rr = fold_build2 (code, inner_type, ar, br);
6755 ri = fold_build2 (code, inner_type, ai, br);
6756 }
6757 else if (br0)
6758 {
6759 rr = fold_build2 (code, inner_type, ai, bi);
6760 ri = fold_build2 (code, inner_type, ar, bi);
6761 ri = fold_build1 (NEGATE_EXPR, inner_type, ri);
6762 }
6763 else
6764 return NULL;
6765
6766 return fold_build2 (COMPLEX_EXPR, type, rr, ri);
6767 }
6768
6769 static tree
6770 fold_complex_div (tree type, tree ac, tree bc, enum tree_code code)
6771 {
6772 tree ar, ai, br, bi;
6773
6774 if (TREE_CODE (ac) == COMPLEX_EXPR)
6775 ar = TREE_OPERAND (ac, 0), ai = TREE_OPERAND (ac, 1);
6776 else if (TREE_CODE (ac) == COMPLEX_CST)
6777 ar = TREE_REALPART (ac), ai = TREE_IMAGPART (ac);
6778 else
6779 return NULL;
6780
6781 if (TREE_CODE (bc) == COMPLEX_EXPR)
6782 br = TREE_OPERAND (bc, 0), bi = TREE_OPERAND (bc, 1);
6783 else if (TREE_CODE (bc) == COMPLEX_CST)
6784 br = TREE_REALPART (bc), bi = TREE_IMAGPART (bc);
6785 else
6786 return NULL;
6787
6788 return fold_complex_div_parts (type, ar, ai, br, bi, code);
6789 }
6790
6791 /* Fold a unary expression of code CODE and type TYPE with operand
6792 OP0. Return the folded expression if folding is successful.
6793 Otherwise, return NULL_TREE. */
6794
6795 tree
6796 fold_unary (enum tree_code code, tree type, tree op0)
6797 {
6798 tree tem;
6799 tree arg0;
6800 enum tree_code_class kind = TREE_CODE_CLASS (code);
6801
6802 gcc_assert (IS_EXPR_CODE_CLASS (kind)
6803 && TREE_CODE_LENGTH (code) == 1);
6804
6805 arg0 = op0;
6806 if (arg0)
6807 {
6808 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
6809 {
6810 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
6811 STRIP_SIGN_NOPS (arg0);
6812 }
6813 else
6814 {
6815 /* Strip any conversions that don't change the mode. This
6816 is safe for every expression, except for a comparison
6817 expression because its signedness is derived from its
6818 operands.
6819
6820 Note that this is done as an internal manipulation within
6821 the constant folder, in order to find the simplest
6822 representation of the arguments so that their form can be
6823 studied. In any cases, the appropriate type conversions
6824 should be put back in the tree that will get out of the
6825 constant folder. */
6826 STRIP_NOPS (arg0);
6827 }
6828 }
6829
6830 if (TREE_CODE_CLASS (code) == tcc_unary)
6831 {
6832 if (TREE_CODE (arg0) == COMPOUND_EXPR)
6833 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6834 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
6835 else if (TREE_CODE (arg0) == COND_EXPR)
6836 {
6837 tree arg01 = TREE_OPERAND (arg0, 1);
6838 tree arg02 = TREE_OPERAND (arg0, 2);
6839 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
6840 arg01 = fold_build1 (code, type, arg01);
6841 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
6842 arg02 = fold_build1 (code, type, arg02);
6843 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
6844 arg01, arg02);
6845
6846 /* If this was a conversion, and all we did was to move into
6847 inside the COND_EXPR, bring it back out. But leave it if
6848 it is a conversion from integer to integer and the
6849 result precision is no wider than a word since such a
6850 conversion is cheap and may be optimized away by combine,
6851 while it couldn't if it were outside the COND_EXPR. Then return
6852 so we don't get into an infinite recursion loop taking the
6853 conversion out and then back in. */
6854
6855 if ((code == NOP_EXPR || code == CONVERT_EXPR
6856 || code == NON_LVALUE_EXPR)
6857 && TREE_CODE (tem) == COND_EXPR
6858 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
6859 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
6860 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
6861 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
6862 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
6863 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
6864 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
6865 && (INTEGRAL_TYPE_P
6866 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
6867 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
6868 || flag_syntax_only))
6869 tem = build1 (code, type,
6870 build3 (COND_EXPR,
6871 TREE_TYPE (TREE_OPERAND
6872 (TREE_OPERAND (tem, 1), 0)),
6873 TREE_OPERAND (tem, 0),
6874 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
6875 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
6876 return tem;
6877 }
6878 else if (COMPARISON_CLASS_P (arg0))
6879 {
6880 if (TREE_CODE (type) == BOOLEAN_TYPE)
6881 {
6882 arg0 = copy_node (arg0);
6883 TREE_TYPE (arg0) = type;
6884 return arg0;
6885 }
6886 else if (TREE_CODE (type) != INTEGER_TYPE)
6887 return fold_build3 (COND_EXPR, type, arg0,
6888 fold_build1 (code, type,
6889 integer_one_node),
6890 fold_build1 (code, type,
6891 integer_zero_node));
6892 }
6893 }
6894
6895 switch (code)
6896 {
6897 case NOP_EXPR:
6898 case FLOAT_EXPR:
6899 case CONVERT_EXPR:
6900 case FIX_TRUNC_EXPR:
6901 case FIX_CEIL_EXPR:
6902 case FIX_FLOOR_EXPR:
6903 case FIX_ROUND_EXPR:
6904 if (TREE_TYPE (op0) == type)
6905 return op0;
6906
6907 /* Handle cases of two conversions in a row. */
6908 if (TREE_CODE (op0) == NOP_EXPR
6909 || TREE_CODE (op0) == CONVERT_EXPR)
6910 {
6911 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
6912 tree inter_type = TREE_TYPE (op0);
6913 int inside_int = INTEGRAL_TYPE_P (inside_type);
6914 int inside_ptr = POINTER_TYPE_P (inside_type);
6915 int inside_float = FLOAT_TYPE_P (inside_type);
6916 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
6917 unsigned int inside_prec = TYPE_PRECISION (inside_type);
6918 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
6919 int inter_int = INTEGRAL_TYPE_P (inter_type);
6920 int inter_ptr = POINTER_TYPE_P (inter_type);
6921 int inter_float = FLOAT_TYPE_P (inter_type);
6922 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
6923 unsigned int inter_prec = TYPE_PRECISION (inter_type);
6924 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
6925 int final_int = INTEGRAL_TYPE_P (type);
6926 int final_ptr = POINTER_TYPE_P (type);
6927 int final_float = FLOAT_TYPE_P (type);
6928 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
6929 unsigned int final_prec = TYPE_PRECISION (type);
6930 int final_unsignedp = TYPE_UNSIGNED (type);
6931
6932 /* In addition to the cases of two conversions in a row
6933 handled below, if we are converting something to its own
6934 type via an object of identical or wider precision, neither
6935 conversion is needed. */
6936 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
6937 && ((inter_int && final_int) || (inter_float && final_float))
6938 && inter_prec >= final_prec)
6939 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6940
6941 /* Likewise, if the intermediate and final types are either both
6942 float or both integer, we don't need the middle conversion if
6943 it is wider than the final type and doesn't change the signedness
6944 (for integers). Avoid this if the final type is a pointer
6945 since then we sometimes need the inner conversion. Likewise if
6946 the outer has a precision not equal to the size of its mode. */
6947 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
6948 || (inter_float && inside_float)
6949 || (inter_vec && inside_vec))
6950 && inter_prec >= inside_prec
6951 && (inter_float || inter_vec
6952 || inter_unsignedp == inside_unsignedp)
6953 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6954 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6955 && ! final_ptr
6956 && (! final_vec || inter_prec == inside_prec))
6957 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6958
6959 /* If we have a sign-extension of a zero-extended value, we can
6960 replace that by a single zero-extension. */
6961 if (inside_int && inter_int && final_int
6962 && inside_prec < inter_prec && inter_prec < final_prec
6963 && inside_unsignedp && !inter_unsignedp)
6964 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6965
6966 /* Two conversions in a row are not needed unless:
6967 - some conversion is floating-point (overstrict for now), or
6968 - some conversion is a vector (overstrict for now), or
6969 - the intermediate type is narrower than both initial and
6970 final, or
6971 - the intermediate type and innermost type differ in signedness,
6972 and the outermost type is wider than the intermediate, or
6973 - the initial type is a pointer type and the precisions of the
6974 intermediate and final types differ, or
6975 - the final type is a pointer type and the precisions of the
6976 initial and intermediate types differ. */
6977 if (! inside_float && ! inter_float && ! final_float
6978 && ! inside_vec && ! inter_vec && ! final_vec
6979 && (inter_prec > inside_prec || inter_prec > final_prec)
6980 && ! (inside_int && inter_int
6981 && inter_unsignedp != inside_unsignedp
6982 && inter_prec < final_prec)
6983 && ((inter_unsignedp && inter_prec > inside_prec)
6984 == (final_unsignedp && final_prec > inter_prec))
6985 && ! (inside_ptr && inter_prec != final_prec)
6986 && ! (final_ptr && inside_prec != inter_prec)
6987 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6988 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6989 && ! final_ptr)
6990 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6991 }
6992
6993 if (TREE_CODE (op0) == MODIFY_EXPR
6994 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
6995 /* Detect assigning a bitfield. */
6996 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
6997 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
6998 {
6999 /* Don't leave an assignment inside a conversion
7000 unless assigning a bitfield. */
7001 tem = fold_build1 (code, type, TREE_OPERAND (op0, 1));
7002 /* First do the assignment, then return converted constant. */
7003 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7004 TREE_NO_WARNING (tem) = 1;
7005 TREE_USED (tem) = 1;
7006 return tem;
7007 }
7008
7009 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7010 constants (if x has signed type, the sign bit cannot be set
7011 in c). This folds extension into the BIT_AND_EXPR. */
7012 if (INTEGRAL_TYPE_P (type)
7013 && TREE_CODE (type) != BOOLEAN_TYPE
7014 && TREE_CODE (op0) == BIT_AND_EXPR
7015 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7016 {
7017 tree and = op0;
7018 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
7019 int change = 0;
7020
7021 if (TYPE_UNSIGNED (TREE_TYPE (and))
7022 || (TYPE_PRECISION (type)
7023 <= TYPE_PRECISION (TREE_TYPE (and))))
7024 change = 1;
7025 else if (TYPE_PRECISION (TREE_TYPE (and1))
7026 <= HOST_BITS_PER_WIDE_INT
7027 && host_integerp (and1, 1))
7028 {
7029 unsigned HOST_WIDE_INT cst;
7030
7031 cst = tree_low_cst (and1, 1);
7032 cst &= (HOST_WIDE_INT) -1
7033 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7034 change = (cst == 0);
7035 #ifdef LOAD_EXTEND_OP
7036 if (change
7037 && !flag_syntax_only
7038 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7039 == ZERO_EXTEND))
7040 {
7041 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
7042 and0 = fold_convert (uns, and0);
7043 and1 = fold_convert (uns, and1);
7044 }
7045 #endif
7046 }
7047 if (change)
7048 {
7049 tem = build_int_cst_wide (type, TREE_INT_CST_LOW (and1),
7050 TREE_INT_CST_HIGH (and1));
7051 tem = force_fit_type (tem, 0, TREE_OVERFLOW (and1),
7052 TREE_CONSTANT_OVERFLOW (and1));
7053 return fold_build2 (BIT_AND_EXPR, type,
7054 fold_convert (type, and0), tem);
7055 }
7056 }
7057
7058 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
7059 T2 being pointers to types of the same size. */
7060 if (POINTER_TYPE_P (type)
7061 && BINARY_CLASS_P (arg0)
7062 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7063 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7064 {
7065 tree arg00 = TREE_OPERAND (arg0, 0);
7066 tree t0 = type;
7067 tree t1 = TREE_TYPE (arg00);
7068 tree tt0 = TREE_TYPE (t0);
7069 tree tt1 = TREE_TYPE (t1);
7070 tree s0 = TYPE_SIZE (tt0);
7071 tree s1 = TYPE_SIZE (tt1);
7072
7073 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
7074 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
7075 TREE_OPERAND (arg0, 1));
7076 }
7077
7078 tem = fold_convert_const (code, type, arg0);
7079 return tem ? tem : NULL_TREE;
7080
7081 case VIEW_CONVERT_EXPR:
7082 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
7083 return build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
7084 return NULL_TREE;
7085
7086 case NEGATE_EXPR:
7087 if (negate_expr_p (arg0))
7088 return fold_convert (type, negate_expr (arg0));
7089 /* Convert - (~A) to A + 1. */
7090 if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == BIT_NOT_EXPR)
7091 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (arg0, 0),
7092 build_int_cst (type, 1));
7093 return NULL_TREE;
7094
7095 case ABS_EXPR:
7096 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
7097 return fold_abs_const (arg0, type);
7098 else if (TREE_CODE (arg0) == NEGATE_EXPR)
7099 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
7100 /* Convert fabs((double)float) into (double)fabsf(float). */
7101 else if (TREE_CODE (arg0) == NOP_EXPR
7102 && TREE_CODE (type) == REAL_TYPE)
7103 {
7104 tree targ0 = strip_float_extensions (arg0);
7105 if (targ0 != arg0)
7106 return fold_convert (type, fold_build1 (ABS_EXPR,
7107 TREE_TYPE (targ0),
7108 targ0));
7109 }
7110 else if (tree_expr_nonnegative_p (arg0))
7111 return arg0;
7112
7113 /* Strip sign ops from argument. */
7114 if (TREE_CODE (type) == REAL_TYPE)
7115 {
7116 tem = fold_strip_sign_ops (arg0);
7117 if (tem)
7118 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
7119 }
7120 return NULL_TREE;
7121
7122 case CONJ_EXPR:
7123 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7124 return fold_convert (type, arg0);
7125 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7126 return build2 (COMPLEX_EXPR, type,
7127 TREE_OPERAND (arg0, 0),
7128 negate_expr (TREE_OPERAND (arg0, 1)));
7129 else if (TREE_CODE (arg0) == COMPLEX_CST)
7130 return build_complex (type, TREE_REALPART (arg0),
7131 negate_expr (TREE_IMAGPART (arg0)));
7132 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7133 return fold_build2 (TREE_CODE (arg0), type,
7134 fold_build1 (CONJ_EXPR, type,
7135 TREE_OPERAND (arg0, 0)),
7136 fold_build1 (CONJ_EXPR, type,
7137 TREE_OPERAND (arg0, 1)));
7138 else if (TREE_CODE (arg0) == CONJ_EXPR)
7139 return TREE_OPERAND (arg0, 0);
7140 return NULL_TREE;
7141
7142 case BIT_NOT_EXPR:
7143 if (TREE_CODE (arg0) == INTEGER_CST)
7144 return fold_not_const (arg0, type);
7145 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
7146 return TREE_OPERAND (arg0, 0);
7147 /* Convert ~ (-A) to A - 1. */
7148 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
7149 return fold_build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0),
7150 build_int_cst (type, 1));
7151 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
7152 else if (INTEGRAL_TYPE_P (type)
7153 && ((TREE_CODE (arg0) == MINUS_EXPR
7154 && integer_onep (TREE_OPERAND (arg0, 1)))
7155 || (TREE_CODE (arg0) == PLUS_EXPR
7156 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
7157 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7158 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7159 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7160 && (tem = fold_unary (BIT_NOT_EXPR, type,
7161 fold_convert (type,
7162 TREE_OPERAND (arg0, 0)))))
7163 return fold_build2 (BIT_XOR_EXPR, type, tem,
7164 fold_convert (type, TREE_OPERAND (arg0, 1)));
7165 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7166 && (tem = fold_unary (BIT_NOT_EXPR, type,
7167 fold_convert (type,
7168 TREE_OPERAND (arg0, 1)))))
7169 return fold_build2 (BIT_XOR_EXPR, type,
7170 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
7171
7172 return NULL_TREE;
7173
7174 case TRUTH_NOT_EXPR:
7175 /* The argument to invert_truthvalue must have Boolean type. */
7176 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
7177 arg0 = fold_convert (boolean_type_node, arg0);
7178
7179 /* Note that the operand of this must be an int
7180 and its values must be 0 or 1.
7181 ("true" is a fixed value perhaps depending on the language,
7182 but we don't handle values other than 1 correctly yet.) */
7183 tem = invert_truthvalue (arg0);
7184 /* Avoid infinite recursion. */
7185 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
7186 return NULL_TREE;
7187 return fold_convert (type, tem);
7188
7189 case REALPART_EXPR:
7190 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7191 return NULL_TREE;
7192 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7193 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
7194 TREE_OPERAND (arg0, 1));
7195 else if (TREE_CODE (arg0) == COMPLEX_CST)
7196 return TREE_REALPART (arg0);
7197 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7198 return fold_build2 (TREE_CODE (arg0), type,
7199 fold_build1 (REALPART_EXPR, type,
7200 TREE_OPERAND (arg0, 0)),
7201 fold_build1 (REALPART_EXPR, type,
7202 TREE_OPERAND (arg0, 1)));
7203 return NULL_TREE;
7204
7205 case IMAGPART_EXPR:
7206 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7207 return fold_convert (type, integer_zero_node);
7208 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7209 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
7210 TREE_OPERAND (arg0, 0));
7211 else if (TREE_CODE (arg0) == COMPLEX_CST)
7212 return TREE_IMAGPART (arg0);
7213 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7214 return fold_build2 (TREE_CODE (arg0), type,
7215 fold_build1 (IMAGPART_EXPR, type,
7216 TREE_OPERAND (arg0, 0)),
7217 fold_build1 (IMAGPART_EXPR, type,
7218 TREE_OPERAND (arg0, 1)));
7219 return NULL_TREE;
7220
7221 default:
7222 return NULL_TREE;
7223 } /* switch (code) */
7224 }
7225
7226 /* Fold a binary expression of code CODE and type TYPE with operands
7227 OP0 and OP1. Return the folded expression if folding is
7228 successful. Otherwise, return NULL_TREE. */
7229
7230 tree
7231 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
7232 {
7233 tree t1 = NULL_TREE;
7234 tree tem;
7235 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
7236 enum tree_code_class kind = TREE_CODE_CLASS (code);
7237
7238 /* WINS will be nonzero when the switch is done
7239 if all operands are constant. */
7240 int wins = 1;
7241
7242 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7243 && TREE_CODE_LENGTH (code) == 2);
7244
7245 arg0 = op0;
7246 arg1 = op1;
7247
7248 if (arg0)
7249 {
7250 tree subop;
7251
7252 /* Strip any conversions that don't change the mode. This is
7253 safe for every expression, except for a comparison expression
7254 because its signedness is derived from its operands. So, in
7255 the latter case, only strip conversions that don't change the
7256 signedness.
7257
7258 Note that this is done as an internal manipulation within the
7259 constant folder, in order to find the simplest representation
7260 of the arguments so that their form can be studied. In any
7261 cases, the appropriate type conversions should be put back in
7262 the tree that will get out of the constant folder. */
7263 if (kind == tcc_comparison)
7264 STRIP_SIGN_NOPS (arg0);
7265 else
7266 STRIP_NOPS (arg0);
7267
7268 if (TREE_CODE (arg0) == COMPLEX_CST)
7269 subop = TREE_REALPART (arg0);
7270 else
7271 subop = arg0;
7272
7273 if (TREE_CODE (subop) != INTEGER_CST
7274 && TREE_CODE (subop) != REAL_CST)
7275 /* Note that TREE_CONSTANT isn't enough:
7276 static var addresses are constant but we can't
7277 do arithmetic on them. */
7278 wins = 0;
7279 }
7280
7281 if (arg1)
7282 {
7283 tree subop;
7284
7285 /* Strip any conversions that don't change the mode. This is
7286 safe for every expression, except for a comparison expression
7287 because its signedness is derived from its operands. So, in
7288 the latter case, only strip conversions that don't change the
7289 signedness.
7290
7291 Note that this is done as an internal manipulation within the
7292 constant folder, in order to find the simplest representation
7293 of the arguments so that their form can be studied. In any
7294 cases, the appropriate type conversions should be put back in
7295 the tree that will get out of the constant folder. */
7296 if (kind == tcc_comparison)
7297 STRIP_SIGN_NOPS (arg1);
7298 else
7299 STRIP_NOPS (arg1);
7300
7301 if (TREE_CODE (arg1) == COMPLEX_CST)
7302 subop = TREE_REALPART (arg1);
7303 else
7304 subop = arg1;
7305
7306 if (TREE_CODE (subop) != INTEGER_CST
7307 && TREE_CODE (subop) != REAL_CST)
7308 /* Note that TREE_CONSTANT isn't enough:
7309 static var addresses are constant but we can't
7310 do arithmetic on them. */
7311 wins = 0;
7312 }
7313
7314 /* If this is a commutative operation, and ARG0 is a constant, move it
7315 to ARG1 to reduce the number of tests below. */
7316 if (commutative_tree_code (code)
7317 && tree_swap_operands_p (arg0, arg1, true))
7318 return fold_build2 (code, type, op1, op0);
7319
7320 /* Now WINS is set as described above,
7321 ARG0 is the first operand of EXPR,
7322 and ARG1 is the second operand (if it has more than one operand).
7323
7324 First check for cases where an arithmetic operation is applied to a
7325 compound, conditional, or comparison operation. Push the arithmetic
7326 operation inside the compound or conditional to see if any folding
7327 can then be done. Convert comparison to conditional for this purpose.
7328 The also optimizes non-constant cases that used to be done in
7329 expand_expr.
7330
7331 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
7332 one of the operands is a comparison and the other is a comparison, a
7333 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
7334 code below would make the expression more complex. Change it to a
7335 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
7336 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
7337
7338 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
7339 || code == EQ_EXPR || code == NE_EXPR)
7340 && ((truth_value_p (TREE_CODE (arg0))
7341 && (truth_value_p (TREE_CODE (arg1))
7342 || (TREE_CODE (arg1) == BIT_AND_EXPR
7343 && integer_onep (TREE_OPERAND (arg1, 1)))))
7344 || (truth_value_p (TREE_CODE (arg1))
7345 && (truth_value_p (TREE_CODE (arg0))
7346 || (TREE_CODE (arg0) == BIT_AND_EXPR
7347 && integer_onep (TREE_OPERAND (arg0, 1)))))))
7348 {
7349 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
7350 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
7351 : TRUTH_XOR_EXPR,
7352 boolean_type_node,
7353 fold_convert (boolean_type_node, arg0),
7354 fold_convert (boolean_type_node, arg1));
7355
7356 if (code == EQ_EXPR)
7357 tem = invert_truthvalue (tem);
7358
7359 return fold_convert (type, tem);
7360 }
7361
7362 if (TREE_CODE_CLASS (code) == tcc_comparison
7363 && TREE_CODE (arg0) == COMPOUND_EXPR)
7364 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7365 fold_build2 (code, type, TREE_OPERAND (arg0, 1), arg1));
7366 else if (TREE_CODE_CLASS (code) == tcc_comparison
7367 && TREE_CODE (arg1) == COMPOUND_EXPR)
7368 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
7369 fold_build2 (code, type, arg0, TREE_OPERAND (arg1, 1)));
7370 else if (TREE_CODE_CLASS (code) == tcc_binary
7371 || TREE_CODE_CLASS (code) == tcc_comparison)
7372 {
7373 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7374 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7375 fold_build2 (code, type, TREE_OPERAND (arg0, 1),
7376 arg1));
7377 if (TREE_CODE (arg1) == COMPOUND_EXPR
7378 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
7379 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
7380 fold_build2 (code, type,
7381 arg0, TREE_OPERAND (arg1, 1)));
7382
7383 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
7384 {
7385 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
7386 arg0, arg1,
7387 /*cond_first_p=*/1);
7388 if (tem != NULL_TREE)
7389 return tem;
7390 }
7391
7392 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
7393 {
7394 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
7395 arg1, arg0,
7396 /*cond_first_p=*/0);
7397 if (tem != NULL_TREE)
7398 return tem;
7399 }
7400 }
7401
7402 switch (code)
7403 {
7404 case PLUS_EXPR:
7405 /* A + (-B) -> A - B */
7406 if (TREE_CODE (arg1) == NEGATE_EXPR)
7407 return fold_build2 (MINUS_EXPR, type,
7408 fold_convert (type, arg0),
7409 fold_convert (type, TREE_OPERAND (arg1, 0)));
7410 /* (-A) + B -> B - A */
7411 if (TREE_CODE (arg0) == NEGATE_EXPR
7412 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
7413 return fold_build2 (MINUS_EXPR, type,
7414 fold_convert (type, arg1),
7415 fold_convert (type, TREE_OPERAND (arg0, 0)));
7416 /* Convert ~A + 1 to -A. */
7417 if (INTEGRAL_TYPE_P (type)
7418 && TREE_CODE (arg0) == BIT_NOT_EXPR
7419 && integer_onep (arg1))
7420 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7421
7422 if (TREE_CODE (type) == COMPLEX_TYPE)
7423 {
7424 tem = fold_complex_add (type, arg0, arg1, PLUS_EXPR);
7425 if (tem)
7426 return tem;
7427 }
7428
7429 if (! FLOAT_TYPE_P (type))
7430 {
7431 if (integer_zerop (arg1))
7432 return non_lvalue (fold_convert (type, arg0));
7433
7434 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
7435 with a constant, and the two constants have no bits in common,
7436 we should treat this as a BIT_IOR_EXPR since this may produce more
7437 simplifications. */
7438 if (TREE_CODE (arg0) == BIT_AND_EXPR
7439 && TREE_CODE (arg1) == BIT_AND_EXPR
7440 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7441 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
7442 && integer_zerop (const_binop (BIT_AND_EXPR,
7443 TREE_OPERAND (arg0, 1),
7444 TREE_OPERAND (arg1, 1), 0)))
7445 {
7446 code = BIT_IOR_EXPR;
7447 goto bit_ior;
7448 }
7449
7450 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
7451 (plus (plus (mult) (mult)) (foo)) so that we can
7452 take advantage of the factoring cases below. */
7453 if (((TREE_CODE (arg0) == PLUS_EXPR
7454 || TREE_CODE (arg0) == MINUS_EXPR)
7455 && TREE_CODE (arg1) == MULT_EXPR)
7456 || ((TREE_CODE (arg1) == PLUS_EXPR
7457 || TREE_CODE (arg1) == MINUS_EXPR)
7458 && TREE_CODE (arg0) == MULT_EXPR))
7459 {
7460 tree parg0, parg1, parg, marg;
7461 enum tree_code pcode;
7462
7463 if (TREE_CODE (arg1) == MULT_EXPR)
7464 parg = arg0, marg = arg1;
7465 else
7466 parg = arg1, marg = arg0;
7467 pcode = TREE_CODE (parg);
7468 parg0 = TREE_OPERAND (parg, 0);
7469 parg1 = TREE_OPERAND (parg, 1);
7470 STRIP_NOPS (parg0);
7471 STRIP_NOPS (parg1);
7472
7473 if (TREE_CODE (parg0) == MULT_EXPR
7474 && TREE_CODE (parg1) != MULT_EXPR)
7475 return fold_build2 (pcode, type,
7476 fold_build2 (PLUS_EXPR, type,
7477 fold_convert (type, parg0),
7478 fold_convert (type, marg)),
7479 fold_convert (type, parg1));
7480 if (TREE_CODE (parg0) != MULT_EXPR
7481 && TREE_CODE (parg1) == MULT_EXPR)
7482 return fold_build2 (PLUS_EXPR, type,
7483 fold_convert (type, parg0),
7484 fold_build2 (pcode, type,
7485 fold_convert (type, marg),
7486 fold_convert (type,
7487 parg1)));
7488 }
7489
7490 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
7491 {
7492 tree arg00, arg01, arg10, arg11;
7493 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7494
7495 /* (A * C) + (B * C) -> (A+B) * C.
7496 We are most concerned about the case where C is a constant,
7497 but other combinations show up during loop reduction. Since
7498 it is not difficult, try all four possibilities. */
7499
7500 arg00 = TREE_OPERAND (arg0, 0);
7501 arg01 = TREE_OPERAND (arg0, 1);
7502 arg10 = TREE_OPERAND (arg1, 0);
7503 arg11 = TREE_OPERAND (arg1, 1);
7504 same = NULL_TREE;
7505
7506 if (operand_equal_p (arg01, arg11, 0))
7507 same = arg01, alt0 = arg00, alt1 = arg10;
7508 else if (operand_equal_p (arg00, arg10, 0))
7509 same = arg00, alt0 = arg01, alt1 = arg11;
7510 else if (operand_equal_p (arg00, arg11, 0))
7511 same = arg00, alt0 = arg01, alt1 = arg10;
7512 else if (operand_equal_p (arg01, arg10, 0))
7513 same = arg01, alt0 = arg00, alt1 = arg11;
7514
7515 /* No identical multiplicands; see if we can find a common
7516 power-of-two factor in non-power-of-two multiplies. This
7517 can help in multi-dimensional array access. */
7518 else if (TREE_CODE (arg01) == INTEGER_CST
7519 && TREE_CODE (arg11) == INTEGER_CST
7520 && TREE_INT_CST_HIGH (arg01) == 0
7521 && TREE_INT_CST_HIGH (arg11) == 0)
7522 {
7523 HOST_WIDE_INT int01, int11, tmp;
7524 int01 = TREE_INT_CST_LOW (arg01);
7525 int11 = TREE_INT_CST_LOW (arg11);
7526
7527 /* Move min of absolute values to int11. */
7528 if ((int01 >= 0 ? int01 : -int01)
7529 < (int11 >= 0 ? int11 : -int11))
7530 {
7531 tmp = int01, int01 = int11, int11 = tmp;
7532 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7533 alt0 = arg01, arg01 = arg11, arg11 = alt0;
7534 }
7535
7536 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
7537 {
7538 alt0 = fold_build2 (MULT_EXPR, type, arg00,
7539 build_int_cst (NULL_TREE,
7540 int01 / int11));
7541 alt1 = arg10;
7542 same = arg11;
7543 }
7544 }
7545
7546 if (same)
7547 return fold_build2 (MULT_EXPR, type,
7548 fold_build2 (PLUS_EXPR, type,
7549 fold_convert (type, alt0),
7550 fold_convert (type, alt1)),
7551 fold_convert (type, same));
7552 }
7553
7554 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
7555 of the array. Loop optimizer sometimes produce this type of
7556 expressions. */
7557 if (TREE_CODE (arg0) == ADDR_EXPR)
7558 {
7559 tem = try_move_mult_to_index (PLUS_EXPR, arg0, arg1);
7560 if (tem)
7561 return fold_convert (type, fold (tem));
7562 }
7563 else if (TREE_CODE (arg1) == ADDR_EXPR)
7564 {
7565 tem = try_move_mult_to_index (PLUS_EXPR, arg1, arg0);
7566 if (tem)
7567 return fold_convert (type, fold (tem));
7568 }
7569 }
7570 else
7571 {
7572 /* See if ARG1 is zero and X + ARG1 reduces to X. */
7573 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
7574 return non_lvalue (fold_convert (type, arg0));
7575
7576 /* Likewise if the operands are reversed. */
7577 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
7578 return non_lvalue (fold_convert (type, arg1));
7579
7580 /* Convert X + -C into X - C. */
7581 if (TREE_CODE (arg1) == REAL_CST
7582 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
7583 {
7584 tem = fold_negate_const (arg1, type);
7585 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
7586 return fold_build2 (MINUS_EXPR, type,
7587 fold_convert (type, arg0),
7588 fold_convert (type, tem));
7589 }
7590
7591 if (flag_unsafe_math_optimizations
7592 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
7593 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
7594 && (tem = distribute_real_division (code, type, arg0, arg1)))
7595 return tem;
7596
7597 /* Convert x+x into x*2.0. */
7598 if (operand_equal_p (arg0, arg1, 0)
7599 && SCALAR_FLOAT_TYPE_P (type))
7600 return fold_build2 (MULT_EXPR, type, arg0,
7601 build_real (type, dconst2));
7602
7603 /* Convert x*c+x into x*(c+1). */
7604 if (flag_unsafe_math_optimizations
7605 && TREE_CODE (arg0) == MULT_EXPR
7606 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7607 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
7608 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7609 {
7610 REAL_VALUE_TYPE c;
7611
7612 c = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
7613 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7614 return fold_build2 (MULT_EXPR, type, arg1,
7615 build_real (type, c));
7616 }
7617
7618 /* Convert x+x*c into x*(c+1). */
7619 if (flag_unsafe_math_optimizations
7620 && TREE_CODE (arg1) == MULT_EXPR
7621 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
7622 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
7623 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
7624 {
7625 REAL_VALUE_TYPE c;
7626
7627 c = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
7628 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7629 return fold_build2 (MULT_EXPR, type, arg0,
7630 build_real (type, c));
7631 }
7632
7633 /* Convert x*c1+x*c2 into x*(c1+c2). */
7634 if (flag_unsafe_math_optimizations
7635 && TREE_CODE (arg0) == MULT_EXPR
7636 && TREE_CODE (arg1) == MULT_EXPR
7637 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7638 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
7639 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
7640 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
7641 && operand_equal_p (TREE_OPERAND (arg0, 0),
7642 TREE_OPERAND (arg1, 0), 0))
7643 {
7644 REAL_VALUE_TYPE c1, c2;
7645
7646 c1 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
7647 c2 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
7648 real_arithmetic (&c1, PLUS_EXPR, &c1, &c2);
7649 return fold_build2 (MULT_EXPR, type,
7650 TREE_OPERAND (arg0, 0),
7651 build_real (type, c1));
7652 }
7653 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
7654 if (flag_unsafe_math_optimizations
7655 && TREE_CODE (arg1) == PLUS_EXPR
7656 && TREE_CODE (arg0) != MULT_EXPR)
7657 {
7658 tree tree10 = TREE_OPERAND (arg1, 0);
7659 tree tree11 = TREE_OPERAND (arg1, 1);
7660 if (TREE_CODE (tree11) == MULT_EXPR
7661 && TREE_CODE (tree10) == MULT_EXPR)
7662 {
7663 tree tree0;
7664 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
7665 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
7666 }
7667 }
7668 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
7669 if (flag_unsafe_math_optimizations
7670 && TREE_CODE (arg0) == PLUS_EXPR
7671 && TREE_CODE (arg1) != MULT_EXPR)
7672 {
7673 tree tree00 = TREE_OPERAND (arg0, 0);
7674 tree tree01 = TREE_OPERAND (arg0, 1);
7675 if (TREE_CODE (tree01) == MULT_EXPR
7676 && TREE_CODE (tree00) == MULT_EXPR)
7677 {
7678 tree tree0;
7679 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
7680 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
7681 }
7682 }
7683 }
7684
7685 bit_rotate:
7686 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
7687 is a rotate of A by C1 bits. */
7688 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
7689 is a rotate of A by B bits. */
7690 {
7691 enum tree_code code0, code1;
7692 code0 = TREE_CODE (arg0);
7693 code1 = TREE_CODE (arg1);
7694 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
7695 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
7696 && operand_equal_p (TREE_OPERAND (arg0, 0),
7697 TREE_OPERAND (arg1, 0), 0)
7698 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7699 {
7700 tree tree01, tree11;
7701 enum tree_code code01, code11;
7702
7703 tree01 = TREE_OPERAND (arg0, 1);
7704 tree11 = TREE_OPERAND (arg1, 1);
7705 STRIP_NOPS (tree01);
7706 STRIP_NOPS (tree11);
7707 code01 = TREE_CODE (tree01);
7708 code11 = TREE_CODE (tree11);
7709 if (code01 == INTEGER_CST
7710 && code11 == INTEGER_CST
7711 && TREE_INT_CST_HIGH (tree01) == 0
7712 && TREE_INT_CST_HIGH (tree11) == 0
7713 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
7714 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
7715 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
7716 code0 == LSHIFT_EXPR ? tree01 : tree11);
7717 else if (code11 == MINUS_EXPR)
7718 {
7719 tree tree110, tree111;
7720 tree110 = TREE_OPERAND (tree11, 0);
7721 tree111 = TREE_OPERAND (tree11, 1);
7722 STRIP_NOPS (tree110);
7723 STRIP_NOPS (tree111);
7724 if (TREE_CODE (tree110) == INTEGER_CST
7725 && 0 == compare_tree_int (tree110,
7726 TYPE_PRECISION
7727 (TREE_TYPE (TREE_OPERAND
7728 (arg0, 0))))
7729 && operand_equal_p (tree01, tree111, 0))
7730 return build2 ((code0 == LSHIFT_EXPR
7731 ? LROTATE_EXPR
7732 : RROTATE_EXPR),
7733 type, TREE_OPERAND (arg0, 0), tree01);
7734 }
7735 else if (code01 == MINUS_EXPR)
7736 {
7737 tree tree010, tree011;
7738 tree010 = TREE_OPERAND (tree01, 0);
7739 tree011 = TREE_OPERAND (tree01, 1);
7740 STRIP_NOPS (tree010);
7741 STRIP_NOPS (tree011);
7742 if (TREE_CODE (tree010) == INTEGER_CST
7743 && 0 == compare_tree_int (tree010,
7744 TYPE_PRECISION
7745 (TREE_TYPE (TREE_OPERAND
7746 (arg0, 0))))
7747 && operand_equal_p (tree11, tree011, 0))
7748 return build2 ((code0 != LSHIFT_EXPR
7749 ? LROTATE_EXPR
7750 : RROTATE_EXPR),
7751 type, TREE_OPERAND (arg0, 0), tree11);
7752 }
7753 }
7754 }
7755
7756 associate:
7757 /* In most languages, can't associate operations on floats through
7758 parentheses. Rather than remember where the parentheses were, we
7759 don't associate floats at all, unless the user has specified
7760 -funsafe-math-optimizations. */
7761
7762 if (! wins
7763 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
7764 {
7765 tree var0, con0, lit0, minus_lit0;
7766 tree var1, con1, lit1, minus_lit1;
7767
7768 /* Split both trees into variables, constants, and literals. Then
7769 associate each group together, the constants with literals,
7770 then the result with variables. This increases the chances of
7771 literals being recombined later and of generating relocatable
7772 expressions for the sum of a constant and literal. */
7773 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
7774 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
7775 code == MINUS_EXPR);
7776
7777 /* Only do something if we found more than two objects. Otherwise,
7778 nothing has changed and we risk infinite recursion. */
7779 if (2 < ((var0 != 0) + (var1 != 0)
7780 + (con0 != 0) + (con1 != 0)
7781 + (lit0 != 0) + (lit1 != 0)
7782 + (minus_lit0 != 0) + (minus_lit1 != 0)))
7783 {
7784 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
7785 if (code == MINUS_EXPR)
7786 code = PLUS_EXPR;
7787
7788 var0 = associate_trees (var0, var1, code, type);
7789 con0 = associate_trees (con0, con1, code, type);
7790 lit0 = associate_trees (lit0, lit1, code, type);
7791 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
7792
7793 /* Preserve the MINUS_EXPR if the negative part of the literal is
7794 greater than the positive part. Otherwise, the multiplicative
7795 folding code (i.e extract_muldiv) may be fooled in case
7796 unsigned constants are subtracted, like in the following
7797 example: ((X*2 + 4) - 8U)/2. */
7798 if (minus_lit0 && lit0)
7799 {
7800 if (TREE_CODE (lit0) == INTEGER_CST
7801 && TREE_CODE (minus_lit0) == INTEGER_CST
7802 && tree_int_cst_lt (lit0, minus_lit0))
7803 {
7804 minus_lit0 = associate_trees (minus_lit0, lit0,
7805 MINUS_EXPR, type);
7806 lit0 = 0;
7807 }
7808 else
7809 {
7810 lit0 = associate_trees (lit0, minus_lit0,
7811 MINUS_EXPR, type);
7812 minus_lit0 = 0;
7813 }
7814 }
7815 if (minus_lit0)
7816 {
7817 if (con0 == 0)
7818 return fold_convert (type,
7819 associate_trees (var0, minus_lit0,
7820 MINUS_EXPR, type));
7821 else
7822 {
7823 con0 = associate_trees (con0, minus_lit0,
7824 MINUS_EXPR, type);
7825 return fold_convert (type,
7826 associate_trees (var0, con0,
7827 PLUS_EXPR, type));
7828 }
7829 }
7830
7831 con0 = associate_trees (con0, lit0, code, type);
7832 return fold_convert (type, associate_trees (var0, con0,
7833 code, type));
7834 }
7835 }
7836
7837 binary:
7838 if (wins)
7839 t1 = const_binop (code, arg0, arg1, 0);
7840 if (t1 != NULL_TREE)
7841 {
7842 /* The return value should always have
7843 the same type as the original expression. */
7844 if (TREE_TYPE (t1) != type)
7845 t1 = fold_convert (type, t1);
7846
7847 return t1;
7848 }
7849 return NULL_TREE;
7850
7851 case MINUS_EXPR:
7852 /* A - (-B) -> A + B */
7853 if (TREE_CODE (arg1) == NEGATE_EXPR)
7854 return fold_build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0));
7855 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
7856 if (TREE_CODE (arg0) == NEGATE_EXPR
7857 && (FLOAT_TYPE_P (type)
7858 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
7859 && negate_expr_p (arg1)
7860 && reorder_operands_p (arg0, arg1))
7861 return fold_build2 (MINUS_EXPR, type, negate_expr (arg1),
7862 TREE_OPERAND (arg0, 0));
7863 /* Convert -A - 1 to ~A. */
7864 if (INTEGRAL_TYPE_P (type)
7865 && TREE_CODE (arg0) == NEGATE_EXPR
7866 && integer_onep (arg1))
7867 return fold_build1 (BIT_NOT_EXPR, type, TREE_OPERAND (arg0, 0));
7868
7869 /* Convert -1 - A to ~A. */
7870 if (INTEGRAL_TYPE_P (type)
7871 && integer_all_onesp (arg0))
7872 return fold_build1 (BIT_NOT_EXPR, type, arg1);
7873
7874 if (TREE_CODE (type) == COMPLEX_TYPE)
7875 {
7876 tem = fold_complex_add (type, arg0, arg1, MINUS_EXPR);
7877 if (tem)
7878 return tem;
7879 }
7880
7881 if (! FLOAT_TYPE_P (type))
7882 {
7883 if (! wins && integer_zerop (arg0))
7884 return negate_expr (fold_convert (type, arg1));
7885 if (integer_zerop (arg1))
7886 return non_lvalue (fold_convert (type, arg0));
7887
7888 /* Fold A - (A & B) into ~B & A. */
7889 if (!TREE_SIDE_EFFECTS (arg0)
7890 && TREE_CODE (arg1) == BIT_AND_EXPR)
7891 {
7892 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
7893 return fold_build2 (BIT_AND_EXPR, type,
7894 fold_build1 (BIT_NOT_EXPR, type,
7895 TREE_OPERAND (arg1, 0)),
7896 arg0);
7897 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7898 return fold_build2 (BIT_AND_EXPR, type,
7899 fold_build1 (BIT_NOT_EXPR, type,
7900 TREE_OPERAND (arg1, 1)),
7901 arg0);
7902 }
7903
7904 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
7905 any power of 2 minus 1. */
7906 if (TREE_CODE (arg0) == BIT_AND_EXPR
7907 && TREE_CODE (arg1) == BIT_AND_EXPR
7908 && operand_equal_p (TREE_OPERAND (arg0, 0),
7909 TREE_OPERAND (arg1, 0), 0))
7910 {
7911 tree mask0 = TREE_OPERAND (arg0, 1);
7912 tree mask1 = TREE_OPERAND (arg1, 1);
7913 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
7914
7915 if (operand_equal_p (tem, mask1, 0))
7916 {
7917 tem = fold_build2 (BIT_XOR_EXPR, type,
7918 TREE_OPERAND (arg0, 0), mask1);
7919 return fold_build2 (MINUS_EXPR, type, tem, mask1);
7920 }
7921 }
7922 }
7923
7924 /* See if ARG1 is zero and X - ARG1 reduces to X. */
7925 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
7926 return non_lvalue (fold_convert (type, arg0));
7927
7928 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
7929 ARG0 is zero and X + ARG0 reduces to X, since that would mean
7930 (-ARG1 + ARG0) reduces to -ARG1. */
7931 else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
7932 return negate_expr (fold_convert (type, arg1));
7933
7934 /* Fold &x - &x. This can happen from &x.foo - &x.
7935 This is unsafe for certain floats even in non-IEEE formats.
7936 In IEEE, it is unsafe because it does wrong for NaNs.
7937 Also note that operand_equal_p is always false if an operand
7938 is volatile. */
7939
7940 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
7941 && operand_equal_p (arg0, arg1, 0))
7942 return fold_convert (type, integer_zero_node);
7943
7944 /* A - B -> A + (-B) if B is easily negatable. */
7945 if (!wins && negate_expr_p (arg1)
7946 && ((FLOAT_TYPE_P (type)
7947 /* Avoid this transformation if B is a positive REAL_CST. */
7948 && (TREE_CODE (arg1) != REAL_CST
7949 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
7950 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
7951 return fold_build2 (PLUS_EXPR, type, arg0, negate_expr (arg1));
7952
7953 /* Try folding difference of addresses. */
7954 {
7955 HOST_WIDE_INT diff;
7956
7957 if ((TREE_CODE (arg0) == ADDR_EXPR
7958 || TREE_CODE (arg1) == ADDR_EXPR)
7959 && ptr_difference_const (arg0, arg1, &diff))
7960 return build_int_cst_type (type, diff);
7961 }
7962
7963 /* Fold &a[i] - &a[j] to i-j. */
7964 if (TREE_CODE (arg0) == ADDR_EXPR
7965 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
7966 && TREE_CODE (arg1) == ADDR_EXPR
7967 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
7968 {
7969 tree aref0 = TREE_OPERAND (arg0, 0);
7970 tree aref1 = TREE_OPERAND (arg1, 0);
7971 if (operand_equal_p (TREE_OPERAND (aref0, 0),
7972 TREE_OPERAND (aref1, 0), 0))
7973 {
7974 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
7975 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
7976 tree esz = array_ref_element_size (aref0);
7977 tree diff = build2 (MINUS_EXPR, type, op0, op1);
7978 return fold_build2 (MULT_EXPR, type, diff,
7979 fold_convert (type, esz));
7980
7981 }
7982 }
7983
7984 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
7985 of the array. Loop optimizer sometimes produce this type of
7986 expressions. */
7987 if (TREE_CODE (arg0) == ADDR_EXPR)
7988 {
7989 tem = try_move_mult_to_index (MINUS_EXPR, arg0, arg1);
7990 if (tem)
7991 return fold_convert (type, fold (tem));
7992 }
7993
7994 if (flag_unsafe_math_optimizations
7995 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
7996 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
7997 && (tem = distribute_real_division (code, type, arg0, arg1)))
7998 return tem;
7999
8000 if (TREE_CODE (arg0) == MULT_EXPR
8001 && TREE_CODE (arg1) == MULT_EXPR
8002 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
8003 {
8004 /* (A * C) - (B * C) -> (A-B) * C. */
8005 if (operand_equal_p (TREE_OPERAND (arg0, 1),
8006 TREE_OPERAND (arg1, 1), 0))
8007 return fold_build2 (MULT_EXPR, type,
8008 fold_build2 (MINUS_EXPR, type,
8009 TREE_OPERAND (arg0, 0),
8010 TREE_OPERAND (arg1, 0)),
8011 TREE_OPERAND (arg0, 1));
8012 /* (A * C1) - (A * C2) -> A * (C1-C2). */
8013 if (operand_equal_p (TREE_OPERAND (arg0, 0),
8014 TREE_OPERAND (arg1, 0), 0))
8015 return fold_build2 (MULT_EXPR, type,
8016 TREE_OPERAND (arg0, 0),
8017 fold_build2 (MINUS_EXPR, type,
8018 TREE_OPERAND (arg0, 1),
8019 TREE_OPERAND (arg1, 1)));
8020 }
8021
8022 goto associate;
8023
8024 case MULT_EXPR:
8025 /* (-A) * (-B) -> A * B */
8026 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
8027 return fold_build2 (MULT_EXPR, type,
8028 TREE_OPERAND (arg0, 0),
8029 negate_expr (arg1));
8030 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
8031 return fold_build2 (MULT_EXPR, type,
8032 negate_expr (arg0),
8033 TREE_OPERAND (arg1, 0));
8034
8035 if (TREE_CODE (type) == COMPLEX_TYPE)
8036 {
8037 tem = fold_complex_mult (type, arg0, arg1);
8038 if (tem)
8039 return tem;
8040 }
8041
8042 if (! FLOAT_TYPE_P (type))
8043 {
8044 if (integer_zerop (arg1))
8045 return omit_one_operand (type, arg1, arg0);
8046 if (integer_onep (arg1))
8047 return non_lvalue (fold_convert (type, arg0));
8048 /* Transform x * -1 into -x. */
8049 if (integer_all_onesp (arg1))
8050 return fold_convert (type, negate_expr (arg0));
8051
8052 /* (a * (1 << b)) is (a << b) */
8053 if (TREE_CODE (arg1) == LSHIFT_EXPR
8054 && integer_onep (TREE_OPERAND (arg1, 0)))
8055 return fold_build2 (LSHIFT_EXPR, type, arg0,
8056 TREE_OPERAND (arg1, 1));
8057 if (TREE_CODE (arg0) == LSHIFT_EXPR
8058 && integer_onep (TREE_OPERAND (arg0, 0)))
8059 return fold_build2 (LSHIFT_EXPR, type, arg1,
8060 TREE_OPERAND (arg0, 1));
8061
8062 if (TREE_CODE (arg1) == INTEGER_CST
8063 && 0 != (tem = extract_muldiv (op0,
8064 fold_convert (type, arg1),
8065 code, NULL_TREE)))
8066 return fold_convert (type, tem);
8067
8068 }
8069 else
8070 {
8071 /* Maybe fold x * 0 to 0. The expressions aren't the same
8072 when x is NaN, since x * 0 is also NaN. Nor are they the
8073 same in modes with signed zeros, since multiplying a
8074 negative value by 0 gives -0, not +0. */
8075 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
8076 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
8077 && real_zerop (arg1))
8078 return omit_one_operand (type, arg1, arg0);
8079 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
8080 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8081 && real_onep (arg1))
8082 return non_lvalue (fold_convert (type, arg0));
8083
8084 /* Transform x * -1.0 into -x. */
8085 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8086 && real_minus_onep (arg1))
8087 return fold_convert (type, negate_expr (arg0));
8088
8089 /* Convert (C1/X)*C2 into (C1*C2)/X. */
8090 if (flag_unsafe_math_optimizations
8091 && TREE_CODE (arg0) == RDIV_EXPR
8092 && TREE_CODE (arg1) == REAL_CST
8093 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
8094 {
8095 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
8096 arg1, 0);
8097 if (tem)
8098 return fold_build2 (RDIV_EXPR, type, tem,
8099 TREE_OPERAND (arg0, 1));
8100 }
8101
8102 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
8103 if (operand_equal_p (arg0, arg1, 0))
8104 {
8105 tree tem = fold_strip_sign_ops (arg0);
8106 if (tem != NULL_TREE)
8107 {
8108 tem = fold_convert (type, tem);
8109 return fold_build2 (MULT_EXPR, type, tem, tem);
8110 }
8111 }
8112
8113 if (flag_unsafe_math_optimizations)
8114 {
8115 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
8116 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
8117
8118 /* Optimizations of root(...)*root(...). */
8119 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
8120 {
8121 tree rootfn, arg, arglist;
8122 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8123 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
8124
8125 /* Optimize sqrt(x)*sqrt(x) as x. */
8126 if (BUILTIN_SQRT_P (fcode0)
8127 && operand_equal_p (arg00, arg10, 0)
8128 && ! HONOR_SNANS (TYPE_MODE (type)))
8129 return arg00;
8130
8131 /* Optimize root(x)*root(y) as root(x*y). */
8132 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8133 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
8134 arglist = build_tree_list (NULL_TREE, arg);
8135 return build_function_call_expr (rootfn, arglist);
8136 }
8137
8138 /* Optimize expN(x)*expN(y) as expN(x+y). */
8139 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
8140 {
8141 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8142 tree arg = fold_build2 (PLUS_EXPR, type,
8143 TREE_VALUE (TREE_OPERAND (arg0, 1)),
8144 TREE_VALUE (TREE_OPERAND (arg1, 1)));
8145 tree arglist = build_tree_list (NULL_TREE, arg);
8146 return build_function_call_expr (expfn, arglist);
8147 }
8148
8149 /* Optimizations of pow(...)*pow(...). */
8150 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
8151 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
8152 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
8153 {
8154 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8155 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
8156 1)));
8157 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
8158 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
8159 1)));
8160
8161 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
8162 if (operand_equal_p (arg01, arg11, 0))
8163 {
8164 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8165 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
8166 tree arglist = tree_cons (NULL_TREE, arg,
8167 build_tree_list (NULL_TREE,
8168 arg01));
8169 return build_function_call_expr (powfn, arglist);
8170 }
8171
8172 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
8173 if (operand_equal_p (arg00, arg10, 0))
8174 {
8175 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8176 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
8177 tree arglist = tree_cons (NULL_TREE, arg00,
8178 build_tree_list (NULL_TREE,
8179 arg));
8180 return build_function_call_expr (powfn, arglist);
8181 }
8182 }
8183
8184 /* Optimize tan(x)*cos(x) as sin(x). */
8185 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
8186 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
8187 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
8188 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
8189 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
8190 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
8191 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
8192 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
8193 {
8194 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
8195
8196 if (sinfn != NULL_TREE)
8197 return build_function_call_expr (sinfn,
8198 TREE_OPERAND (arg0, 1));
8199 }
8200
8201 /* Optimize x*pow(x,c) as pow(x,c+1). */
8202 if (fcode1 == BUILT_IN_POW
8203 || fcode1 == BUILT_IN_POWF
8204 || fcode1 == BUILT_IN_POWL)
8205 {
8206 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
8207 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
8208 1)));
8209 if (TREE_CODE (arg11) == REAL_CST
8210 && ! TREE_CONSTANT_OVERFLOW (arg11)
8211 && operand_equal_p (arg0, arg10, 0))
8212 {
8213 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
8214 REAL_VALUE_TYPE c;
8215 tree arg, arglist;
8216
8217 c = TREE_REAL_CST (arg11);
8218 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
8219 arg = build_real (type, c);
8220 arglist = build_tree_list (NULL_TREE, arg);
8221 arglist = tree_cons (NULL_TREE, arg0, arglist);
8222 return build_function_call_expr (powfn, arglist);
8223 }
8224 }
8225
8226 /* Optimize pow(x,c)*x as pow(x,c+1). */
8227 if (fcode0 == BUILT_IN_POW
8228 || fcode0 == BUILT_IN_POWF
8229 || fcode0 == BUILT_IN_POWL)
8230 {
8231 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8232 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
8233 1)));
8234 if (TREE_CODE (arg01) == REAL_CST
8235 && ! TREE_CONSTANT_OVERFLOW (arg01)
8236 && operand_equal_p (arg1, arg00, 0))
8237 {
8238 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8239 REAL_VALUE_TYPE c;
8240 tree arg, arglist;
8241
8242 c = TREE_REAL_CST (arg01);
8243 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
8244 arg = build_real (type, c);
8245 arglist = build_tree_list (NULL_TREE, arg);
8246 arglist = tree_cons (NULL_TREE, arg1, arglist);
8247 return build_function_call_expr (powfn, arglist);
8248 }
8249 }
8250
8251 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
8252 if (! optimize_size
8253 && operand_equal_p (arg0, arg1, 0))
8254 {
8255 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
8256
8257 if (powfn)
8258 {
8259 tree arg = build_real (type, dconst2);
8260 tree arglist = build_tree_list (NULL_TREE, arg);
8261 arglist = tree_cons (NULL_TREE, arg0, arglist);
8262 return build_function_call_expr (powfn, arglist);
8263 }
8264 }
8265 }
8266 }
8267 goto associate;
8268
8269 case BIT_IOR_EXPR:
8270 bit_ior:
8271 if (integer_all_onesp (arg1))
8272 return omit_one_operand (type, arg1, arg0);
8273 if (integer_zerop (arg1))
8274 return non_lvalue (fold_convert (type, arg0));
8275 if (operand_equal_p (arg0, arg1, 0))
8276 return non_lvalue (fold_convert (type, arg0));
8277
8278 /* ~X | X is -1. */
8279 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8280 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8281 {
8282 t1 = build_int_cst (type, -1);
8283 t1 = force_fit_type (t1, 0, false, false);
8284 return omit_one_operand (type, t1, arg1);
8285 }
8286
8287 /* X | ~X is -1. */
8288 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8289 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8290 {
8291 t1 = build_int_cst (type, -1);
8292 t1 = force_fit_type (t1, 0, false, false);
8293 return omit_one_operand (type, t1, arg0);
8294 }
8295
8296 t1 = distribute_bit_expr (code, type, arg0, arg1);
8297 if (t1 != NULL_TREE)
8298 return t1;
8299
8300 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
8301
8302 This results in more efficient code for machines without a NAND
8303 instruction. Combine will canonicalize to the first form
8304 which will allow use of NAND instructions provided by the
8305 backend if they exist. */
8306 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8307 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8308 {
8309 return fold_build1 (BIT_NOT_EXPR, type,
8310 build2 (BIT_AND_EXPR, type,
8311 TREE_OPERAND (arg0, 0),
8312 TREE_OPERAND (arg1, 0)));
8313 }
8314
8315 /* See if this can be simplified into a rotate first. If that
8316 is unsuccessful continue in the association code. */
8317 goto bit_rotate;
8318
8319 case BIT_XOR_EXPR:
8320 if (integer_zerop (arg1))
8321 return non_lvalue (fold_convert (type, arg0));
8322 if (integer_all_onesp (arg1))
8323 return fold_build1 (BIT_NOT_EXPR, type, arg0);
8324 if (operand_equal_p (arg0, arg1, 0))
8325 return omit_one_operand (type, integer_zero_node, arg0);
8326
8327 /* ~X ^ X is -1. */
8328 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8329 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8330 {
8331 t1 = build_int_cst (type, -1);
8332 t1 = force_fit_type (t1, 0, false, false);
8333 return omit_one_operand (type, t1, arg1);
8334 }
8335
8336 /* X ^ ~X is -1. */
8337 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8338 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8339 {
8340 t1 = build_int_cst (type, -1);
8341 t1 = force_fit_type (t1, 0, false, false);
8342 return omit_one_operand (type, t1, arg0);
8343 }
8344
8345 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
8346 with a constant, and the two constants have no bits in common,
8347 we should treat this as a BIT_IOR_EXPR since this may produce more
8348 simplifications. */
8349 if (TREE_CODE (arg0) == BIT_AND_EXPR
8350 && TREE_CODE (arg1) == BIT_AND_EXPR
8351 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8352 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8353 && integer_zerop (const_binop (BIT_AND_EXPR,
8354 TREE_OPERAND (arg0, 1),
8355 TREE_OPERAND (arg1, 1), 0)))
8356 {
8357 code = BIT_IOR_EXPR;
8358 goto bit_ior;
8359 }
8360
8361 /* Convert ~X ^ ~Y to X ^ Y. */
8362 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8363 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8364 return fold_build2 (code, type,
8365 fold_convert (type, TREE_OPERAND (arg0, 0)),
8366 fold_convert (type, TREE_OPERAND (arg1, 0)));
8367
8368 /* See if this can be simplified into a rotate first. If that
8369 is unsuccessful continue in the association code. */
8370 goto bit_rotate;
8371
8372 case BIT_AND_EXPR:
8373 if (integer_all_onesp (arg1))
8374 return non_lvalue (fold_convert (type, arg0));
8375 if (integer_zerop (arg1))
8376 return omit_one_operand (type, arg1, arg0);
8377 if (operand_equal_p (arg0, arg1, 0))
8378 return non_lvalue (fold_convert (type, arg0));
8379
8380 /* ~X & X is always zero. */
8381 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8382 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8383 return omit_one_operand (type, integer_zero_node, arg1);
8384
8385 /* X & ~X is always zero. */
8386 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8387 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8388 return omit_one_operand (type, integer_zero_node, arg0);
8389
8390 t1 = distribute_bit_expr (code, type, arg0, arg1);
8391 if (t1 != NULL_TREE)
8392 return t1;
8393 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
8394 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
8395 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
8396 {
8397 unsigned int prec
8398 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
8399
8400 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
8401 && (~TREE_INT_CST_LOW (arg1)
8402 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
8403 return fold_convert (type, TREE_OPERAND (arg0, 0));
8404 }
8405
8406 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
8407
8408 This results in more efficient code for machines without a NOR
8409 instruction. Combine will canonicalize to the first form
8410 which will allow use of NOR instructions provided by the
8411 backend if they exist. */
8412 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8413 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8414 {
8415 return fold_build1 (BIT_NOT_EXPR, type,
8416 build2 (BIT_IOR_EXPR, type,
8417 TREE_OPERAND (arg0, 0),
8418 TREE_OPERAND (arg1, 0)));
8419 }
8420
8421 goto associate;
8422
8423 case RDIV_EXPR:
8424 /* Don't touch a floating-point divide by zero unless the mode
8425 of the constant can represent infinity. */
8426 if (TREE_CODE (arg1) == REAL_CST
8427 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
8428 && real_zerop (arg1))
8429 return NULL_TREE;
8430
8431 /* (-A) / (-B) -> A / B */
8432 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
8433 return fold_build2 (RDIV_EXPR, type,
8434 TREE_OPERAND (arg0, 0),
8435 negate_expr (arg1));
8436 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
8437 return fold_build2 (RDIV_EXPR, type,
8438 negate_expr (arg0),
8439 TREE_OPERAND (arg1, 0));
8440
8441 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
8442 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8443 && real_onep (arg1))
8444 return non_lvalue (fold_convert (type, arg0));
8445
8446 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
8447 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8448 && real_minus_onep (arg1))
8449 return non_lvalue (fold_convert (type, negate_expr (arg0)));
8450
8451 /* If ARG1 is a constant, we can convert this to a multiply by the
8452 reciprocal. This does not have the same rounding properties,
8453 so only do this if -funsafe-math-optimizations. We can actually
8454 always safely do it if ARG1 is a power of two, but it's hard to
8455 tell if it is or not in a portable manner. */
8456 if (TREE_CODE (arg1) == REAL_CST)
8457 {
8458 if (flag_unsafe_math_optimizations
8459 && 0 != (tem = const_binop (code, build_real (type, dconst1),
8460 arg1, 0)))
8461 return fold_build2 (MULT_EXPR, type, arg0, tem);
8462 /* Find the reciprocal if optimizing and the result is exact. */
8463 if (optimize)
8464 {
8465 REAL_VALUE_TYPE r;
8466 r = TREE_REAL_CST (arg1);
8467 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
8468 {
8469 tem = build_real (type, r);
8470 return fold_build2 (MULT_EXPR, type, arg0, tem);
8471 }
8472 }
8473 }
8474 /* Convert A/B/C to A/(B*C). */
8475 if (flag_unsafe_math_optimizations
8476 && TREE_CODE (arg0) == RDIV_EXPR)
8477 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
8478 fold_build2 (MULT_EXPR, type,
8479 TREE_OPERAND (arg0, 1), arg1));
8480
8481 /* Convert A/(B/C) to (A/B)*C. */
8482 if (flag_unsafe_math_optimizations
8483 && TREE_CODE (arg1) == RDIV_EXPR)
8484 return fold_build2 (MULT_EXPR, type,
8485 fold_build2 (RDIV_EXPR, type, arg0,
8486 TREE_OPERAND (arg1, 0)),
8487 TREE_OPERAND (arg1, 1));
8488
8489 /* Convert C1/(X*C2) into (C1/C2)/X. */
8490 if (flag_unsafe_math_optimizations
8491 && TREE_CODE (arg1) == MULT_EXPR
8492 && TREE_CODE (arg0) == REAL_CST
8493 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
8494 {
8495 tree tem = const_binop (RDIV_EXPR, arg0,
8496 TREE_OPERAND (arg1, 1), 0);
8497 if (tem)
8498 return fold_build2 (RDIV_EXPR, type, tem,
8499 TREE_OPERAND (arg1, 0));
8500 }
8501
8502 if (TREE_CODE (type) == COMPLEX_TYPE)
8503 {
8504 tem = fold_complex_div (type, arg0, arg1, code);
8505 if (tem)
8506 return tem;
8507 }
8508
8509 if (flag_unsafe_math_optimizations)
8510 {
8511 enum built_in_function fcode = builtin_mathfn_code (arg1);
8512 /* Optimize x/expN(y) into x*expN(-y). */
8513 if (BUILTIN_EXPONENT_P (fcode))
8514 {
8515 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
8516 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
8517 tree arglist = build_tree_list (NULL_TREE,
8518 fold_convert (type, arg));
8519 arg1 = build_function_call_expr (expfn, arglist);
8520 return fold_build2 (MULT_EXPR, type, arg0, arg1);
8521 }
8522
8523 /* Optimize x/pow(y,z) into x*pow(y,-z). */
8524 if (fcode == BUILT_IN_POW
8525 || fcode == BUILT_IN_POWF
8526 || fcode == BUILT_IN_POWL)
8527 {
8528 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
8529 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
8530 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
8531 tree neg11 = fold_convert (type, negate_expr (arg11));
8532 tree arglist = tree_cons(NULL_TREE, arg10,
8533 build_tree_list (NULL_TREE, neg11));
8534 arg1 = build_function_call_expr (powfn, arglist);
8535 return fold_build2 (MULT_EXPR, type, arg0, arg1);
8536 }
8537 }
8538
8539 if (flag_unsafe_math_optimizations)
8540 {
8541 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
8542 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
8543
8544 /* Optimize sin(x)/cos(x) as tan(x). */
8545 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
8546 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
8547 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
8548 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
8549 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
8550 {
8551 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
8552
8553 if (tanfn != NULL_TREE)
8554 return build_function_call_expr (tanfn,
8555 TREE_OPERAND (arg0, 1));
8556 }
8557
8558 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
8559 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
8560 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
8561 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
8562 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
8563 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
8564 {
8565 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
8566
8567 if (tanfn != NULL_TREE)
8568 {
8569 tree tmp = TREE_OPERAND (arg0, 1);
8570 tmp = build_function_call_expr (tanfn, tmp);
8571 return fold_build2 (RDIV_EXPR, type,
8572 build_real (type, dconst1), tmp);
8573 }
8574 }
8575
8576 /* Optimize pow(x,c)/x as pow(x,c-1). */
8577 if (fcode0 == BUILT_IN_POW
8578 || fcode0 == BUILT_IN_POWF
8579 || fcode0 == BUILT_IN_POWL)
8580 {
8581 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8582 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
8583 if (TREE_CODE (arg01) == REAL_CST
8584 && ! TREE_CONSTANT_OVERFLOW (arg01)
8585 && operand_equal_p (arg1, arg00, 0))
8586 {
8587 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8588 REAL_VALUE_TYPE c;
8589 tree arg, arglist;
8590
8591 c = TREE_REAL_CST (arg01);
8592 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
8593 arg = build_real (type, c);
8594 arglist = build_tree_list (NULL_TREE, arg);
8595 arglist = tree_cons (NULL_TREE, arg1, arglist);
8596 return build_function_call_expr (powfn, arglist);
8597 }
8598 }
8599 }
8600 goto binary;
8601
8602 case TRUNC_DIV_EXPR:
8603 case ROUND_DIV_EXPR:
8604 case FLOOR_DIV_EXPR:
8605 case CEIL_DIV_EXPR:
8606 case EXACT_DIV_EXPR:
8607 if (integer_onep (arg1))
8608 return non_lvalue (fold_convert (type, arg0));
8609 if (integer_zerop (arg1))
8610 return NULL_TREE;
8611 /* X / -1 is -X. */
8612 if (!TYPE_UNSIGNED (type)
8613 && TREE_CODE (arg1) == INTEGER_CST
8614 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
8615 && TREE_INT_CST_HIGH (arg1) == -1)
8616 return fold_convert (type, negate_expr (arg0));
8617
8618 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
8619 operation, EXACT_DIV_EXPR.
8620
8621 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
8622 At one time others generated faster code, it's not clear if they do
8623 after the last round to changes to the DIV code in expmed.c. */
8624 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
8625 && multiple_of_p (type, arg0, arg1))
8626 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
8627
8628 if (TREE_CODE (arg1) == INTEGER_CST
8629 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
8630 return fold_convert (type, tem);
8631
8632 if (TREE_CODE (type) == COMPLEX_TYPE)
8633 {
8634 tem = fold_complex_div (type, arg0, arg1, code);
8635 if (tem)
8636 return tem;
8637 }
8638 goto binary;
8639
8640 case CEIL_MOD_EXPR:
8641 case FLOOR_MOD_EXPR:
8642 case ROUND_MOD_EXPR:
8643 case TRUNC_MOD_EXPR:
8644 /* X % 1 is always zero, but be sure to preserve any side
8645 effects in X. */
8646 if (integer_onep (arg1))
8647 return omit_one_operand (type, integer_zero_node, arg0);
8648
8649 /* X % 0, return X % 0 unchanged so that we can get the
8650 proper warnings and errors. */
8651 if (integer_zerop (arg1))
8652 return NULL_TREE;
8653
8654 /* 0 % X is always zero, but be sure to preserve any side
8655 effects in X. Place this after checking for X == 0. */
8656 if (integer_zerop (arg0))
8657 return omit_one_operand (type, integer_zero_node, arg1);
8658
8659 /* X % -1 is zero. */
8660 if (!TYPE_UNSIGNED (type)
8661 && TREE_CODE (arg1) == INTEGER_CST
8662 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
8663 && TREE_INT_CST_HIGH (arg1) == -1)
8664 return omit_one_operand (type, integer_zero_node, arg0);
8665
8666 /* Optimize unsigned TRUNC_MOD_EXPR by a power of two into a
8667 BIT_AND_EXPR, i.e. "X % C" into "X & C2". */
8668 if (code == TRUNC_MOD_EXPR
8669 && TYPE_UNSIGNED (type)
8670 && integer_pow2p (arg1))
8671 {
8672 unsigned HOST_WIDE_INT high, low;
8673 tree mask;
8674 int l;
8675
8676 l = tree_log2 (arg1);
8677 if (l >= HOST_BITS_PER_WIDE_INT)
8678 {
8679 high = ((unsigned HOST_WIDE_INT) 1
8680 << (l - HOST_BITS_PER_WIDE_INT)) - 1;
8681 low = -1;
8682 }
8683 else
8684 {
8685 high = 0;
8686 low = ((unsigned HOST_WIDE_INT) 1 << l) - 1;
8687 }
8688
8689 mask = build_int_cst_wide (type, low, high);
8690 return fold_build2 (BIT_AND_EXPR, type,
8691 fold_convert (type, arg0), mask);
8692 }
8693
8694 /* X % -C is the same as X % C. */
8695 if (code == TRUNC_MOD_EXPR
8696 && !TYPE_UNSIGNED (type)
8697 && TREE_CODE (arg1) == INTEGER_CST
8698 && !TREE_CONSTANT_OVERFLOW (arg1)
8699 && TREE_INT_CST_HIGH (arg1) < 0
8700 && !flag_trapv
8701 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
8702 && !sign_bit_p (arg1, arg1))
8703 return fold_build2 (code, type, fold_convert (type, arg0),
8704 fold_convert (type, negate_expr (arg1)));
8705
8706 /* X % -Y is the same as X % Y. */
8707 if (code == TRUNC_MOD_EXPR
8708 && !TYPE_UNSIGNED (type)
8709 && TREE_CODE (arg1) == NEGATE_EXPR
8710 && !flag_trapv)
8711 return fold_build2 (code, type, fold_convert (type, arg0),
8712 fold_convert (type, TREE_OPERAND (arg1, 0)));
8713
8714 if (TREE_CODE (arg1) == INTEGER_CST
8715 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
8716 return fold_convert (type, tem);
8717
8718 goto binary;
8719
8720 case LROTATE_EXPR:
8721 case RROTATE_EXPR:
8722 if (integer_all_onesp (arg0))
8723 return omit_one_operand (type, arg0, arg1);
8724 goto shift;
8725
8726 case RSHIFT_EXPR:
8727 /* Optimize -1 >> x for arithmetic right shifts. */
8728 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
8729 return omit_one_operand (type, arg0, arg1);
8730 /* ... fall through ... */
8731
8732 case LSHIFT_EXPR:
8733 shift:
8734 if (integer_zerop (arg1))
8735 return non_lvalue (fold_convert (type, arg0));
8736 if (integer_zerop (arg0))
8737 return omit_one_operand (type, arg0, arg1);
8738
8739 /* Since negative shift count is not well-defined,
8740 don't try to compute it in the compiler. */
8741 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
8742 return NULL_TREE;
8743 /* Rewrite an LROTATE_EXPR by a constant into an
8744 RROTATE_EXPR by a new constant. */
8745 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
8746 {
8747 tree tem = build_int_cst (NULL_TREE,
8748 GET_MODE_BITSIZE (TYPE_MODE (type)));
8749 tem = fold_convert (TREE_TYPE (arg1), tem);
8750 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
8751 return fold_build2 (RROTATE_EXPR, type, arg0, tem);
8752 }
8753
8754 /* If we have a rotate of a bit operation with the rotate count and
8755 the second operand of the bit operation both constant,
8756 permute the two operations. */
8757 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8758 && (TREE_CODE (arg0) == BIT_AND_EXPR
8759 || TREE_CODE (arg0) == BIT_IOR_EXPR
8760 || TREE_CODE (arg0) == BIT_XOR_EXPR)
8761 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8762 return fold_build2 (TREE_CODE (arg0), type,
8763 fold_build2 (code, type,
8764 TREE_OPERAND (arg0, 0), arg1),
8765 fold_build2 (code, type,
8766 TREE_OPERAND (arg0, 1), arg1));
8767
8768 /* Two consecutive rotates adding up to the width of the mode can
8769 be ignored. */
8770 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8771 && TREE_CODE (arg0) == RROTATE_EXPR
8772 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8773 && TREE_INT_CST_HIGH (arg1) == 0
8774 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
8775 && ((TREE_INT_CST_LOW (arg1)
8776 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
8777 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
8778 return TREE_OPERAND (arg0, 0);
8779
8780 goto binary;
8781
8782 case MIN_EXPR:
8783 if (operand_equal_p (arg0, arg1, 0))
8784 return omit_one_operand (type, arg0, arg1);
8785 if (INTEGRAL_TYPE_P (type)
8786 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
8787 return omit_one_operand (type, arg1, arg0);
8788 goto associate;
8789
8790 case MAX_EXPR:
8791 if (operand_equal_p (arg0, arg1, 0))
8792 return omit_one_operand (type, arg0, arg1);
8793 if (INTEGRAL_TYPE_P (type)
8794 && TYPE_MAX_VALUE (type)
8795 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
8796 return omit_one_operand (type, arg1, arg0);
8797 goto associate;
8798
8799 case TRUTH_ANDIF_EXPR:
8800 /* Note that the operands of this must be ints
8801 and their values must be 0 or 1.
8802 ("true" is a fixed value perhaps depending on the language.) */
8803 /* If first arg is constant zero, return it. */
8804 if (integer_zerop (arg0))
8805 return fold_convert (type, arg0);
8806 case TRUTH_AND_EXPR:
8807 /* If either arg is constant true, drop it. */
8808 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8809 return non_lvalue (fold_convert (type, arg1));
8810 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
8811 /* Preserve sequence points. */
8812 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
8813 return non_lvalue (fold_convert (type, arg0));
8814 /* If second arg is constant zero, result is zero, but first arg
8815 must be evaluated. */
8816 if (integer_zerop (arg1))
8817 return omit_one_operand (type, arg1, arg0);
8818 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
8819 case will be handled here. */
8820 if (integer_zerop (arg0))
8821 return omit_one_operand (type, arg0, arg1);
8822
8823 /* !X && X is always false. */
8824 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8825 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8826 return omit_one_operand (type, integer_zero_node, arg1);
8827 /* X && !X is always false. */
8828 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8829 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8830 return omit_one_operand (type, integer_zero_node, arg0);
8831
8832 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
8833 means A >= Y && A != MAX, but in this case we know that
8834 A < X <= MAX. */
8835
8836 if (!TREE_SIDE_EFFECTS (arg0)
8837 && !TREE_SIDE_EFFECTS (arg1))
8838 {
8839 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
8840 if (tem)
8841 return fold_build2 (code, type, tem, arg1);
8842
8843 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
8844 if (tem)
8845 return fold_build2 (code, type, arg0, tem);
8846 }
8847
8848 truth_andor:
8849 /* We only do these simplifications if we are optimizing. */
8850 if (!optimize)
8851 return NULL_TREE;
8852
8853 /* Check for things like (A || B) && (A || C). We can convert this
8854 to A || (B && C). Note that either operator can be any of the four
8855 truth and/or operations and the transformation will still be
8856 valid. Also note that we only care about order for the
8857 ANDIF and ORIF operators. If B contains side effects, this
8858 might change the truth-value of A. */
8859 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8860 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8861 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8862 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8863 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8864 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8865 {
8866 tree a00 = TREE_OPERAND (arg0, 0);
8867 tree a01 = TREE_OPERAND (arg0, 1);
8868 tree a10 = TREE_OPERAND (arg1, 0);
8869 tree a11 = TREE_OPERAND (arg1, 1);
8870 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8871 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8872 && (code == TRUTH_AND_EXPR
8873 || code == TRUTH_OR_EXPR));
8874
8875 if (operand_equal_p (a00, a10, 0))
8876 return fold_build2 (TREE_CODE (arg0), type, a00,
8877 fold_build2 (code, type, a01, a11));
8878 else if (commutative && operand_equal_p (a00, a11, 0))
8879 return fold_build2 (TREE_CODE (arg0), type, a00,
8880 fold_build2 (code, type, a01, a10));
8881 else if (commutative && operand_equal_p (a01, a10, 0))
8882 return fold_build2 (TREE_CODE (arg0), type, a01,
8883 fold_build2 (code, type, a00, a11));
8884
8885 /* This case if tricky because we must either have commutative
8886 operators or else A10 must not have side-effects. */
8887
8888 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8889 && operand_equal_p (a01, a11, 0))
8890 return fold_build2 (TREE_CODE (arg0), type,
8891 fold_build2 (code, type, a00, a10),
8892 a01);
8893 }
8894
8895 /* See if we can build a range comparison. */
8896 if (0 != (tem = fold_range_test (code, type, op0, op1)))
8897 return tem;
8898
8899 /* Check for the possibility of merging component references. If our
8900 lhs is another similar operation, try to merge its rhs with our
8901 rhs. Then try to merge our lhs and rhs. */
8902 if (TREE_CODE (arg0) == code
8903 && 0 != (tem = fold_truthop (code, type,
8904 TREE_OPERAND (arg0, 1), arg1)))
8905 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
8906
8907 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
8908 return tem;
8909
8910 return NULL_TREE;
8911
8912 case TRUTH_ORIF_EXPR:
8913 /* Note that the operands of this must be ints
8914 and their values must be 0 or true.
8915 ("true" is a fixed value perhaps depending on the language.) */
8916 /* If first arg is constant true, return it. */
8917 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8918 return fold_convert (type, arg0);
8919 case TRUTH_OR_EXPR:
8920 /* If either arg is constant zero, drop it. */
8921 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
8922 return non_lvalue (fold_convert (type, arg1));
8923 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
8924 /* Preserve sequence points. */
8925 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
8926 return non_lvalue (fold_convert (type, arg0));
8927 /* If second arg is constant true, result is true, but we must
8928 evaluate first arg. */
8929 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
8930 return omit_one_operand (type, arg1, arg0);
8931 /* Likewise for first arg, but note this only occurs here for
8932 TRUTH_OR_EXPR. */
8933 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8934 return omit_one_operand (type, arg0, arg1);
8935
8936 /* !X || X is always true. */
8937 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8938 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8939 return omit_one_operand (type, integer_one_node, arg1);
8940 /* X || !X is always true. */
8941 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8942 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8943 return omit_one_operand (type, integer_one_node, arg0);
8944
8945 goto truth_andor;
8946
8947 case TRUTH_XOR_EXPR:
8948 /* If the second arg is constant zero, drop it. */
8949 if (integer_zerop (arg1))
8950 return non_lvalue (fold_convert (type, arg0));
8951 /* If the second arg is constant true, this is a logical inversion. */
8952 if (integer_onep (arg1))
8953 {
8954 /* Only call invert_truthvalue if operand is a truth value. */
8955 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8956 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
8957 else
8958 tem = invert_truthvalue (arg0);
8959 return non_lvalue (fold_convert (type, tem));
8960 }
8961 /* Identical arguments cancel to zero. */
8962 if (operand_equal_p (arg0, arg1, 0))
8963 return omit_one_operand (type, integer_zero_node, arg0);
8964
8965 /* !X ^ X is always true. */
8966 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8967 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8968 return omit_one_operand (type, integer_one_node, arg1);
8969
8970 /* X ^ !X is always true. */
8971 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8972 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8973 return omit_one_operand (type, integer_one_node, arg0);
8974
8975 return NULL_TREE;
8976
8977 case EQ_EXPR:
8978 case NE_EXPR:
8979 case LT_EXPR:
8980 case GT_EXPR:
8981 case LE_EXPR:
8982 case GE_EXPR:
8983 /* If one arg is a real or integer constant, put it last. */
8984 if (tree_swap_operands_p (arg0, arg1, true))
8985 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
8986
8987 /* bool_var != 0 becomes bool_var. */
8988 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
8989 && code == NE_EXPR)
8990 return non_lvalue (fold_convert (type, arg0));
8991
8992 /* bool_var == 1 becomes bool_var. */
8993 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
8994 && code == EQ_EXPR)
8995 return non_lvalue (fold_convert (type, arg0));
8996
8997 /* If this is an equality comparison of the address of a non-weak
8998 object against zero, then we know the result. */
8999 if ((code == EQ_EXPR || code == NE_EXPR)
9000 && TREE_CODE (arg0) == ADDR_EXPR
9001 && DECL_P (TREE_OPERAND (arg0, 0))
9002 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
9003 && integer_zerop (arg1))
9004 return constant_boolean_node (code != EQ_EXPR, type);
9005
9006 /* If this is an equality comparison of the address of two non-weak,
9007 unaliased symbols neither of which are extern (since we do not
9008 have access to attributes for externs), then we know the result. */
9009 if ((code == EQ_EXPR || code == NE_EXPR)
9010 && TREE_CODE (arg0) == ADDR_EXPR
9011 && DECL_P (TREE_OPERAND (arg0, 0))
9012 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
9013 && ! lookup_attribute ("alias",
9014 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
9015 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
9016 && TREE_CODE (arg1) == ADDR_EXPR
9017 && DECL_P (TREE_OPERAND (arg1, 0))
9018 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
9019 && ! lookup_attribute ("alias",
9020 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
9021 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
9022 return constant_boolean_node (operand_equal_p (arg0, arg1, 0)
9023 ? code == EQ_EXPR : code != EQ_EXPR,
9024 type);
9025
9026 /* If this is a comparison of two exprs that look like an
9027 ARRAY_REF of the same object, then we can fold this to a
9028 comparison of the two offsets. */
9029 if (TREE_CODE_CLASS (code) == tcc_comparison)
9030 {
9031 tree base0, offset0, base1, offset1;
9032
9033 if (extract_array_ref (arg0, &base0, &offset0)
9034 && extract_array_ref (arg1, &base1, &offset1)
9035 && operand_equal_p (base0, base1, 0))
9036 {
9037 if (TYPE_SIZE (TREE_TYPE (TREE_TYPE (base0)))
9038 && integer_zerop (TYPE_SIZE (TREE_TYPE (TREE_TYPE (base0)))))
9039 offset0 = NULL_TREE;
9040 if (TYPE_SIZE (TREE_TYPE (TREE_TYPE (base1)))
9041 && integer_zerop (TYPE_SIZE (TREE_TYPE (TREE_TYPE (base1)))))
9042 offset1 = NULL_TREE;
9043 if (offset0 == NULL_TREE
9044 && offset1 == NULL_TREE)
9045 {
9046 offset0 = integer_zero_node;
9047 offset1 = integer_zero_node;
9048 }
9049 else if (offset0 == NULL_TREE)
9050 offset0 = build_int_cst (TREE_TYPE (offset1), 0);
9051 else if (offset1 == NULL_TREE)
9052 offset1 = build_int_cst (TREE_TYPE (offset0), 0);
9053
9054 if (TREE_TYPE (offset0) == TREE_TYPE (offset1))
9055 return fold_build2 (code, type, offset0, offset1);
9056 }
9057 }
9058
9059 /* Transform comparisons of the form X +- C CMP X. */
9060 if ((code != EQ_EXPR && code != NE_EXPR)
9061 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9062 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9063 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9064 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
9065 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9066 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
9067 && !(flag_wrapv || flag_trapv))))
9068 {
9069 tree arg01 = TREE_OPERAND (arg0, 1);
9070 enum tree_code code0 = TREE_CODE (arg0);
9071 int is_positive;
9072
9073 if (TREE_CODE (arg01) == REAL_CST)
9074 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
9075 else
9076 is_positive = tree_int_cst_sgn (arg01);
9077
9078 /* (X - c) > X becomes false. */
9079 if (code == GT_EXPR
9080 && ((code0 == MINUS_EXPR && is_positive >= 0)
9081 || (code0 == PLUS_EXPR && is_positive <= 0)))
9082 return constant_boolean_node (0, type);
9083
9084 /* Likewise (X + c) < X becomes false. */
9085 if (code == LT_EXPR
9086 && ((code0 == PLUS_EXPR && is_positive >= 0)
9087 || (code0 == MINUS_EXPR && is_positive <= 0)))
9088 return constant_boolean_node (0, type);
9089
9090 /* Convert (X - c) <= X to true. */
9091 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
9092 && code == LE_EXPR
9093 && ((code0 == MINUS_EXPR && is_positive >= 0)
9094 || (code0 == PLUS_EXPR && is_positive <= 0)))
9095 return constant_boolean_node (1, type);
9096
9097 /* Convert (X + c) >= X to true. */
9098 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
9099 && code == GE_EXPR
9100 && ((code0 == PLUS_EXPR && is_positive >= 0)
9101 || (code0 == MINUS_EXPR && is_positive <= 0)))
9102 return constant_boolean_node (1, type);
9103
9104 if (TREE_CODE (arg01) == INTEGER_CST)
9105 {
9106 /* Convert X + c > X and X - c < X to true for integers. */
9107 if (code == GT_EXPR
9108 && ((code0 == PLUS_EXPR && is_positive > 0)
9109 || (code0 == MINUS_EXPR && is_positive < 0)))
9110 return constant_boolean_node (1, type);
9111
9112 if (code == LT_EXPR
9113 && ((code0 == MINUS_EXPR && is_positive > 0)
9114 || (code0 == PLUS_EXPR && is_positive < 0)))
9115 return constant_boolean_node (1, type);
9116
9117 /* Convert X + c <= X and X - c >= X to false for integers. */
9118 if (code == LE_EXPR
9119 && ((code0 == PLUS_EXPR && is_positive > 0)
9120 || (code0 == MINUS_EXPR && is_positive < 0)))
9121 return constant_boolean_node (0, type);
9122
9123 if (code == GE_EXPR
9124 && ((code0 == MINUS_EXPR && is_positive > 0)
9125 || (code0 == PLUS_EXPR && is_positive < 0)))
9126 return constant_boolean_node (0, type);
9127 }
9128 }
9129
9130 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9131 {
9132 tree targ0 = strip_float_extensions (arg0);
9133 tree targ1 = strip_float_extensions (arg1);
9134 tree newtype = TREE_TYPE (targ0);
9135
9136 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9137 newtype = TREE_TYPE (targ1);
9138
9139 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9140 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9141 return fold_build2 (code, type, fold_convert (newtype, targ0),
9142 fold_convert (newtype, targ1));
9143
9144 /* (-a) CMP (-b) -> b CMP a */
9145 if (TREE_CODE (arg0) == NEGATE_EXPR
9146 && TREE_CODE (arg1) == NEGATE_EXPR)
9147 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
9148 TREE_OPERAND (arg0, 0));
9149
9150 if (TREE_CODE (arg1) == REAL_CST)
9151 {
9152 REAL_VALUE_TYPE cst;
9153 cst = TREE_REAL_CST (arg1);
9154
9155 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9156 if (TREE_CODE (arg0) == NEGATE_EXPR)
9157 return
9158 fold_build2 (swap_tree_comparison (code), type,
9159 TREE_OPERAND (arg0, 0),
9160 build_real (TREE_TYPE (arg1),
9161 REAL_VALUE_NEGATE (cst)));
9162
9163 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9164 /* a CMP (-0) -> a CMP 0 */
9165 if (REAL_VALUE_MINUS_ZERO (cst))
9166 return fold_build2 (code, type, arg0,
9167 build_real (TREE_TYPE (arg1), dconst0));
9168
9169 /* x != NaN is always true, other ops are always false. */
9170 if (REAL_VALUE_ISNAN (cst)
9171 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9172 {
9173 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9174 return omit_one_operand (type, tem, arg0);
9175 }
9176
9177 /* Fold comparisons against infinity. */
9178 if (REAL_VALUE_ISINF (cst))
9179 {
9180 tem = fold_inf_compare (code, type, arg0, arg1);
9181 if (tem != NULL_TREE)
9182 return tem;
9183 }
9184 }
9185
9186 /* If this is a comparison of a real constant with a PLUS_EXPR
9187 or a MINUS_EXPR of a real constant, we can convert it into a
9188 comparison with a revised real constant as long as no overflow
9189 occurs when unsafe_math_optimizations are enabled. */
9190 if (flag_unsafe_math_optimizations
9191 && TREE_CODE (arg1) == REAL_CST
9192 && (TREE_CODE (arg0) == PLUS_EXPR
9193 || TREE_CODE (arg0) == MINUS_EXPR)
9194 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9195 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9196 ? MINUS_EXPR : PLUS_EXPR,
9197 arg1, TREE_OPERAND (arg0, 1), 0))
9198 && ! TREE_CONSTANT_OVERFLOW (tem))
9199 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9200
9201 /* Likewise, we can simplify a comparison of a real constant with
9202 a MINUS_EXPR whose first operand is also a real constant, i.e.
9203 (c1 - x) < c2 becomes x > c1-c2. */
9204 if (flag_unsafe_math_optimizations
9205 && TREE_CODE (arg1) == REAL_CST
9206 && TREE_CODE (arg0) == MINUS_EXPR
9207 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9208 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9209 arg1, 0))
9210 && ! TREE_CONSTANT_OVERFLOW (tem))
9211 return fold_build2 (swap_tree_comparison (code), type,
9212 TREE_OPERAND (arg0, 1), tem);
9213
9214 /* Fold comparisons against built-in math functions. */
9215 if (TREE_CODE (arg1) == REAL_CST
9216 && flag_unsafe_math_optimizations
9217 && ! flag_errno_math)
9218 {
9219 enum built_in_function fcode = builtin_mathfn_code (arg0);
9220
9221 if (fcode != END_BUILTINS)
9222 {
9223 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
9224 if (tem != NULL_TREE)
9225 return tem;
9226 }
9227 }
9228 }
9229
9230 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
9231 if (TREE_CONSTANT (arg1)
9232 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
9233 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
9234 /* This optimization is invalid for ordered comparisons
9235 if CONST+INCR overflows or if foo+incr might overflow.
9236 This optimization is invalid for floating point due to rounding.
9237 For pointer types we assume overflow doesn't happen. */
9238 && (POINTER_TYPE_P (TREE_TYPE (arg0))
9239 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9240 && (code == EQ_EXPR || code == NE_EXPR))))
9241 {
9242 tree varop, newconst;
9243
9244 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
9245 {
9246 newconst = fold_build2 (PLUS_EXPR, TREE_TYPE (arg0),
9247 arg1, TREE_OPERAND (arg0, 1));
9248 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
9249 TREE_OPERAND (arg0, 0),
9250 TREE_OPERAND (arg0, 1));
9251 }
9252 else
9253 {
9254 newconst = fold_build2 (MINUS_EXPR, TREE_TYPE (arg0),
9255 arg1, TREE_OPERAND (arg0, 1));
9256 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
9257 TREE_OPERAND (arg0, 0),
9258 TREE_OPERAND (arg0, 1));
9259 }
9260
9261
9262 /* If VAROP is a reference to a bitfield, we must mask
9263 the constant by the width of the field. */
9264 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
9265 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
9266 && host_integerp (DECL_SIZE (TREE_OPERAND
9267 (TREE_OPERAND (varop, 0), 1)), 1))
9268 {
9269 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
9270 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
9271 tree folded_compare, shift;
9272
9273 /* First check whether the comparison would come out
9274 always the same. If we don't do that we would
9275 change the meaning with the masking. */
9276 folded_compare = fold_build2 (code, type,
9277 TREE_OPERAND (varop, 0), arg1);
9278 if (integer_zerop (folded_compare)
9279 || integer_onep (folded_compare))
9280 return omit_one_operand (type, folded_compare, varop);
9281
9282 shift = build_int_cst (NULL_TREE,
9283 TYPE_PRECISION (TREE_TYPE (varop)) - size);
9284 shift = fold_convert (TREE_TYPE (varop), shift);
9285 newconst = fold_build2 (LSHIFT_EXPR, TREE_TYPE (varop),
9286 newconst, shift);
9287 newconst = fold_build2 (RSHIFT_EXPR, TREE_TYPE (varop),
9288 newconst, shift);
9289 }
9290
9291 return fold_build2 (code, type, varop, newconst);
9292 }
9293
9294 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
9295 This transformation affects the cases which are handled in later
9296 optimizations involving comparisons with non-negative constants. */
9297 if (TREE_CODE (arg1) == INTEGER_CST
9298 && TREE_CODE (arg0) != INTEGER_CST
9299 && tree_int_cst_sgn (arg1) > 0)
9300 {
9301 switch (code)
9302 {
9303 case GE_EXPR:
9304 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
9305 return fold_build2 (GT_EXPR, type, arg0, arg1);
9306
9307 case LT_EXPR:
9308 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
9309 return fold_build2 (LE_EXPR, type, arg0, arg1);
9310
9311 default:
9312 break;
9313 }
9314 }
9315
9316 /* Comparisons with the highest or lowest possible integer of
9317 the specified size will have known values. */
9318 {
9319 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
9320
9321 if (TREE_CODE (arg1) == INTEGER_CST
9322 && ! TREE_CONSTANT_OVERFLOW (arg1)
9323 && width <= 2 * HOST_BITS_PER_WIDE_INT
9324 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9325 || POINTER_TYPE_P (TREE_TYPE (arg1))))
9326 {
9327 HOST_WIDE_INT signed_max_hi;
9328 unsigned HOST_WIDE_INT signed_max_lo;
9329 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
9330
9331 if (width <= HOST_BITS_PER_WIDE_INT)
9332 {
9333 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
9334 - 1;
9335 signed_max_hi = 0;
9336 max_hi = 0;
9337
9338 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
9339 {
9340 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
9341 min_lo = 0;
9342 min_hi = 0;
9343 }
9344 else
9345 {
9346 max_lo = signed_max_lo;
9347 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
9348 min_hi = -1;
9349 }
9350 }
9351 else
9352 {
9353 width -= HOST_BITS_PER_WIDE_INT;
9354 signed_max_lo = -1;
9355 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
9356 - 1;
9357 max_lo = -1;
9358 min_lo = 0;
9359
9360 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
9361 {
9362 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
9363 min_hi = 0;
9364 }
9365 else
9366 {
9367 max_hi = signed_max_hi;
9368 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
9369 }
9370 }
9371
9372 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
9373 && TREE_INT_CST_LOW (arg1) == max_lo)
9374 switch (code)
9375 {
9376 case GT_EXPR:
9377 return omit_one_operand (type, integer_zero_node, arg0);
9378
9379 case GE_EXPR:
9380 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9381
9382 case LE_EXPR:
9383 return omit_one_operand (type, integer_one_node, arg0);
9384
9385 case LT_EXPR:
9386 return fold_build2 (NE_EXPR, type, arg0, arg1);
9387
9388 /* The GE_EXPR and LT_EXPR cases above are not normally
9389 reached because of previous transformations. */
9390
9391 default:
9392 break;
9393 }
9394 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9395 == max_hi
9396 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
9397 switch (code)
9398 {
9399 case GT_EXPR:
9400 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
9401 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9402 case LE_EXPR:
9403 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
9404 return fold_build2 (NE_EXPR, type, arg0, arg1);
9405 default:
9406 break;
9407 }
9408 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9409 == min_hi
9410 && TREE_INT_CST_LOW (arg1) == min_lo)
9411 switch (code)
9412 {
9413 case LT_EXPR:
9414 return omit_one_operand (type, integer_zero_node, arg0);
9415
9416 case LE_EXPR:
9417 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9418
9419 case GE_EXPR:
9420 return omit_one_operand (type, integer_one_node, arg0);
9421
9422 case GT_EXPR:
9423 return fold_build2 (NE_EXPR, type, arg0, arg1);
9424
9425 default:
9426 break;
9427 }
9428 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9429 == min_hi
9430 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
9431 switch (code)
9432 {
9433 case GE_EXPR:
9434 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
9435 return fold_build2 (NE_EXPR, type, arg0, arg1);
9436 case LT_EXPR:
9437 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
9438 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9439 default:
9440 break;
9441 }
9442
9443 else if (!in_gimple_form
9444 && TREE_INT_CST_HIGH (arg1) == signed_max_hi
9445 && TREE_INT_CST_LOW (arg1) == signed_max_lo
9446 && TYPE_UNSIGNED (TREE_TYPE (arg1))
9447 /* signed_type does not work on pointer types. */
9448 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
9449 {
9450 /* The following case also applies to X < signed_max+1
9451 and X >= signed_max+1 because previous transformations. */
9452 if (code == LE_EXPR || code == GT_EXPR)
9453 {
9454 tree st0, st1;
9455 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
9456 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
9457 return fold
9458 (build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
9459 type, fold_convert (st0, arg0),
9460 fold_convert (st1, integer_zero_node)));
9461 }
9462 }
9463 }
9464 }
9465
9466 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
9467 a MINUS_EXPR of a constant, we can convert it into a comparison with
9468 a revised constant as long as no overflow occurs. */
9469 if ((code == EQ_EXPR || code == NE_EXPR)
9470 && TREE_CODE (arg1) == INTEGER_CST
9471 && (TREE_CODE (arg0) == PLUS_EXPR
9472 || TREE_CODE (arg0) == MINUS_EXPR)
9473 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9474 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9475 ? MINUS_EXPR : PLUS_EXPR,
9476 arg1, TREE_OPERAND (arg0, 1), 0))
9477 && ! TREE_CONSTANT_OVERFLOW (tem))
9478 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9479
9480 /* Similarly for a NEGATE_EXPR. */
9481 else if ((code == EQ_EXPR || code == NE_EXPR)
9482 && TREE_CODE (arg0) == NEGATE_EXPR
9483 && TREE_CODE (arg1) == INTEGER_CST
9484 && 0 != (tem = negate_expr (arg1))
9485 && TREE_CODE (tem) == INTEGER_CST
9486 && ! TREE_CONSTANT_OVERFLOW (tem))
9487 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9488
9489 /* If we have X - Y == 0, we can convert that to X == Y and similarly
9490 for !=. Don't do this for ordered comparisons due to overflow. */
9491 else if ((code == NE_EXPR || code == EQ_EXPR)
9492 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
9493 return fold_build2 (code, type,
9494 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
9495
9496 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9497 && (TREE_CODE (arg0) == NOP_EXPR
9498 || TREE_CODE (arg0) == CONVERT_EXPR))
9499 {
9500 /* If we are widening one operand of an integer comparison,
9501 see if the other operand is similarly being widened. Perhaps we
9502 can do the comparison in the narrower type. */
9503 tem = fold_widened_comparison (code, type, arg0, arg1);
9504 if (tem)
9505 return tem;
9506
9507 /* Or if we are changing signedness. */
9508 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
9509 if (tem)
9510 return tem;
9511 }
9512
9513 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9514 constant, we can simplify it. */
9515 else if (TREE_CODE (arg1) == INTEGER_CST
9516 && (TREE_CODE (arg0) == MIN_EXPR
9517 || TREE_CODE (arg0) == MAX_EXPR)
9518 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9519 {
9520 tem = optimize_minmax_comparison (code, type, op0, op1);
9521 if (tem)
9522 return tem;
9523
9524 return NULL_TREE;
9525 }
9526
9527 /* If we are comparing an ABS_EXPR with a constant, we can
9528 convert all the cases into explicit comparisons, but they may
9529 well not be faster than doing the ABS and one comparison.
9530 But ABS (X) <= C is a range comparison, which becomes a subtraction
9531 and a comparison, and is probably faster. */
9532 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
9533 && TREE_CODE (arg0) == ABS_EXPR
9534 && ! TREE_SIDE_EFFECTS (arg0)
9535 && (0 != (tem = negate_expr (arg1)))
9536 && TREE_CODE (tem) == INTEGER_CST
9537 && ! TREE_CONSTANT_OVERFLOW (tem))
9538 return fold_build2 (TRUTH_ANDIF_EXPR, type,
9539 build2 (GE_EXPR, type,
9540 TREE_OPERAND (arg0, 0), tem),
9541 build2 (LE_EXPR, type,
9542 TREE_OPERAND (arg0, 0), arg1));
9543
9544 /* Convert ABS_EXPR<x> >= 0 to true. */
9545 else if (code == GE_EXPR
9546 && tree_expr_nonnegative_p (arg0)
9547 && (integer_zerop (arg1)
9548 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9549 && real_zerop (arg1))))
9550 return omit_one_operand (type, integer_one_node, arg0);
9551
9552 /* Convert ABS_EXPR<x> < 0 to false. */
9553 else if (code == LT_EXPR
9554 && tree_expr_nonnegative_p (arg0)
9555 && (integer_zerop (arg1) || real_zerop (arg1)))
9556 return omit_one_operand (type, integer_zero_node, arg0);
9557
9558 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
9559 else if ((code == EQ_EXPR || code == NE_EXPR)
9560 && TREE_CODE (arg0) == ABS_EXPR
9561 && (integer_zerop (arg1) || real_zerop (arg1)))
9562 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
9563
9564 /* If this is an EQ or NE comparison with zero and ARG0 is
9565 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
9566 two operations, but the latter can be done in one less insn
9567 on machines that have only two-operand insns or on which a
9568 constant cannot be the first operand. */
9569 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
9570 && TREE_CODE (arg0) == BIT_AND_EXPR)
9571 {
9572 tree arg00 = TREE_OPERAND (arg0, 0);
9573 tree arg01 = TREE_OPERAND (arg0, 1);
9574 if (TREE_CODE (arg00) == LSHIFT_EXPR
9575 && integer_onep (TREE_OPERAND (arg00, 0)))
9576 return
9577 fold_build2 (code, type,
9578 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9579 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
9580 arg01, TREE_OPERAND (arg00, 1)),
9581 fold_convert (TREE_TYPE (arg0),
9582 integer_one_node)),
9583 arg1);
9584 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
9585 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
9586 return
9587 fold_build2 (code, type,
9588 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9589 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
9590 arg00, TREE_OPERAND (arg01, 1)),
9591 fold_convert (TREE_TYPE (arg0),
9592 integer_one_node)),
9593 arg1);
9594 }
9595
9596 /* If this is an NE or EQ comparison of zero against the result of a
9597 signed MOD operation whose second operand is a power of 2, make
9598 the MOD operation unsigned since it is simpler and equivalent. */
9599 if ((code == NE_EXPR || code == EQ_EXPR)
9600 && integer_zerop (arg1)
9601 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
9602 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
9603 || TREE_CODE (arg0) == CEIL_MOD_EXPR
9604 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
9605 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
9606 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9607 {
9608 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
9609 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
9610 fold_convert (newtype,
9611 TREE_OPERAND (arg0, 0)),
9612 fold_convert (newtype,
9613 TREE_OPERAND (arg0, 1)));
9614
9615 return fold_build2 (code, type, newmod,
9616 fold_convert (newtype, arg1));
9617 }
9618
9619 /* If this is an NE comparison of zero with an AND of one, remove the
9620 comparison since the AND will give the correct value. */
9621 if (code == NE_EXPR && integer_zerop (arg1)
9622 && TREE_CODE (arg0) == BIT_AND_EXPR
9623 && integer_onep (TREE_OPERAND (arg0, 1)))
9624 return fold_convert (type, arg0);
9625
9626 /* If we have (A & C) == C where C is a power of 2, convert this into
9627 (A & C) != 0. Similarly for NE_EXPR. */
9628 if ((code == EQ_EXPR || code == NE_EXPR)
9629 && TREE_CODE (arg0) == BIT_AND_EXPR
9630 && integer_pow2p (TREE_OPERAND (arg0, 1))
9631 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9632 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
9633 arg0, fold_convert (TREE_TYPE (arg0),
9634 integer_zero_node));
9635
9636 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
9637 bit, then fold the expression into A < 0 or A >= 0. */
9638 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
9639 if (tem)
9640 return tem;
9641
9642 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
9643 Similarly for NE_EXPR. */
9644 if ((code == EQ_EXPR || code == NE_EXPR)
9645 && TREE_CODE (arg0) == BIT_AND_EXPR
9646 && TREE_CODE (arg1) == INTEGER_CST
9647 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9648 {
9649 tree notc = fold_build1 (BIT_NOT_EXPR,
9650 TREE_TYPE (TREE_OPERAND (arg0, 1)),
9651 TREE_OPERAND (arg0, 1));
9652 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9653 arg1, notc);
9654 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
9655 if (integer_nonzerop (dandnotc))
9656 return omit_one_operand (type, rslt, arg0);
9657 }
9658
9659 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
9660 Similarly for NE_EXPR. */
9661 if ((code == EQ_EXPR || code == NE_EXPR)
9662 && TREE_CODE (arg0) == BIT_IOR_EXPR
9663 && TREE_CODE (arg1) == INTEGER_CST
9664 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9665 {
9666 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
9667 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9668 TREE_OPERAND (arg0, 1), notd);
9669 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
9670 if (integer_nonzerop (candnotd))
9671 return omit_one_operand (type, rslt, arg0);
9672 }
9673
9674 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
9675 and similarly for >= into !=. */
9676 if ((code == LT_EXPR || code == GE_EXPR)
9677 && TYPE_UNSIGNED (TREE_TYPE (arg0))
9678 && TREE_CODE (arg1) == LSHIFT_EXPR
9679 && integer_onep (TREE_OPERAND (arg1, 0)))
9680 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
9681 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
9682 TREE_OPERAND (arg1, 1)),
9683 fold_convert (TREE_TYPE (arg0), integer_zero_node));
9684
9685 else if ((code == LT_EXPR || code == GE_EXPR)
9686 && TYPE_UNSIGNED (TREE_TYPE (arg0))
9687 && (TREE_CODE (arg1) == NOP_EXPR
9688 || TREE_CODE (arg1) == CONVERT_EXPR)
9689 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
9690 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
9691 return
9692 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
9693 fold_convert (TREE_TYPE (arg0),
9694 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
9695 TREE_OPERAND (TREE_OPERAND (arg1, 0),
9696 1))),
9697 fold_convert (TREE_TYPE (arg0), integer_zero_node));
9698
9699 /* Simplify comparison of something with itself. (For IEEE
9700 floating-point, we can only do some of these simplifications.) */
9701 if (operand_equal_p (arg0, arg1, 0))
9702 {
9703 switch (code)
9704 {
9705 case EQ_EXPR:
9706 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9707 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9708 return constant_boolean_node (1, type);
9709 break;
9710
9711 case GE_EXPR:
9712 case LE_EXPR:
9713 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9714 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9715 return constant_boolean_node (1, type);
9716 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9717
9718 case NE_EXPR:
9719 /* For NE, we can only do this simplification if integer
9720 or we don't honor IEEE floating point NaNs. */
9721 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9722 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9723 break;
9724 /* ... fall through ... */
9725 case GT_EXPR:
9726 case LT_EXPR:
9727 return constant_boolean_node (0, type);
9728 default:
9729 gcc_unreachable ();
9730 }
9731 }
9732
9733 /* If we are comparing an expression that just has comparisons
9734 of two integer values, arithmetic expressions of those comparisons,
9735 and constants, we can simplify it. There are only three cases
9736 to check: the two values can either be equal, the first can be
9737 greater, or the second can be greater. Fold the expression for
9738 those three values. Since each value must be 0 or 1, we have
9739 eight possibilities, each of which corresponds to the constant 0
9740 or 1 or one of the six possible comparisons.
9741
9742 This handles common cases like (a > b) == 0 but also handles
9743 expressions like ((x > y) - (y > x)) > 0, which supposedly
9744 occur in macroized code. */
9745
9746 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9747 {
9748 tree cval1 = 0, cval2 = 0;
9749 int save_p = 0;
9750
9751 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9752 /* Don't handle degenerate cases here; they should already
9753 have been handled anyway. */
9754 && cval1 != 0 && cval2 != 0
9755 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9756 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9757 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9758 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9759 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9760 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9761 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9762 {
9763 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9764 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9765
9766 /* We can't just pass T to eval_subst in case cval1 or cval2
9767 was the same as ARG1. */
9768
9769 tree high_result
9770 = fold_build2 (code, type,
9771 eval_subst (arg0, cval1, maxval,
9772 cval2, minval),
9773 arg1);
9774 tree equal_result
9775 = fold_build2 (code, type,
9776 eval_subst (arg0, cval1, maxval,
9777 cval2, maxval),
9778 arg1);
9779 tree low_result
9780 = fold_build2 (code, type,
9781 eval_subst (arg0, cval1, minval,
9782 cval2, maxval),
9783 arg1);
9784
9785 /* All three of these results should be 0 or 1. Confirm they
9786 are. Then use those values to select the proper code
9787 to use. */
9788
9789 if ((integer_zerop (high_result)
9790 || integer_onep (high_result))
9791 && (integer_zerop (equal_result)
9792 || integer_onep (equal_result))
9793 && (integer_zerop (low_result)
9794 || integer_onep (low_result)))
9795 {
9796 /* Make a 3-bit mask with the high-order bit being the
9797 value for `>', the next for '=', and the low for '<'. */
9798 switch ((integer_onep (high_result) * 4)
9799 + (integer_onep (equal_result) * 2)
9800 + integer_onep (low_result))
9801 {
9802 case 0:
9803 /* Always false. */
9804 return omit_one_operand (type, integer_zero_node, arg0);
9805 case 1:
9806 code = LT_EXPR;
9807 break;
9808 case 2:
9809 code = EQ_EXPR;
9810 break;
9811 case 3:
9812 code = LE_EXPR;
9813 break;
9814 case 4:
9815 code = GT_EXPR;
9816 break;
9817 case 5:
9818 code = NE_EXPR;
9819 break;
9820 case 6:
9821 code = GE_EXPR;
9822 break;
9823 case 7:
9824 /* Always true. */
9825 return omit_one_operand (type, integer_one_node, arg0);
9826 }
9827
9828 if (save_p)
9829 return save_expr (build2 (code, type, cval1, cval2));
9830 else
9831 return fold_build2 (code, type, cval1, cval2);
9832 }
9833 }
9834 }
9835
9836 /* If this is a comparison of a field, we may be able to simplify it. */
9837 if (((TREE_CODE (arg0) == COMPONENT_REF
9838 && lang_hooks.can_use_bit_fields_p ())
9839 || TREE_CODE (arg0) == BIT_FIELD_REF)
9840 && (code == EQ_EXPR || code == NE_EXPR)
9841 /* Handle the constant case even without -O
9842 to make sure the warnings are given. */
9843 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
9844 {
9845 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
9846 if (t1)
9847 return t1;
9848 }
9849
9850 /* Fold a comparison of the address of COMPONENT_REFs with the same
9851 type and component to a comparison of the address of the base
9852 object. In short, &x->a OP &y->a to x OP y and
9853 &x->a OP &y.a to x OP &y */
9854 if (TREE_CODE (arg0) == ADDR_EXPR
9855 && TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
9856 && TREE_CODE (arg1) == ADDR_EXPR
9857 && TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
9858 {
9859 tree cref0 = TREE_OPERAND (arg0, 0);
9860 tree cref1 = TREE_OPERAND (arg1, 0);
9861 if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
9862 {
9863 tree op0 = TREE_OPERAND (cref0, 0);
9864 tree op1 = TREE_OPERAND (cref1, 0);
9865 return fold_build2 (code, type,
9866 build_fold_addr_expr (op0),
9867 build_fold_addr_expr (op1));
9868 }
9869 }
9870
9871 /* If this is a comparison of complex values and either or both sides
9872 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
9873 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
9874 This may prevent needless evaluations. */
9875 if ((code == EQ_EXPR || code == NE_EXPR)
9876 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
9877 && (TREE_CODE (arg0) == COMPLEX_EXPR
9878 || TREE_CODE (arg1) == COMPLEX_EXPR
9879 || TREE_CODE (arg0) == COMPLEX_CST
9880 || TREE_CODE (arg1) == COMPLEX_CST))
9881 {
9882 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
9883 tree real0, imag0, real1, imag1;
9884
9885 arg0 = save_expr (arg0);
9886 arg1 = save_expr (arg1);
9887 real0 = fold_build1 (REALPART_EXPR, subtype, arg0);
9888 imag0 = fold_build1 (IMAGPART_EXPR, subtype, arg0);
9889 real1 = fold_build1 (REALPART_EXPR, subtype, arg1);
9890 imag1 = fold_build1 (IMAGPART_EXPR, subtype, arg1);
9891
9892 return fold_build2 ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
9893 : TRUTH_ORIF_EXPR),
9894 type,
9895 fold_build2 (code, type, real0, real1),
9896 fold_build2 (code, type, imag0, imag1));
9897 }
9898
9899 /* Optimize comparisons of strlen vs zero to a compare of the
9900 first character of the string vs zero. To wit,
9901 strlen(ptr) == 0 => *ptr == 0
9902 strlen(ptr) != 0 => *ptr != 0
9903 Other cases should reduce to one of these two (or a constant)
9904 due to the return value of strlen being unsigned. */
9905 if ((code == EQ_EXPR || code == NE_EXPR)
9906 && integer_zerop (arg1)
9907 && TREE_CODE (arg0) == CALL_EXPR)
9908 {
9909 tree fndecl = get_callee_fndecl (arg0);
9910 tree arglist;
9911
9912 if (fndecl
9913 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
9914 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
9915 && (arglist = TREE_OPERAND (arg0, 1))
9916 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
9917 && ! TREE_CHAIN (arglist))
9918 {
9919 tree iref = build_fold_indirect_ref (TREE_VALUE (arglist));
9920 return fold_build2 (code, type, iref,
9921 build_int_cst (TREE_TYPE (iref), 0));
9922 }
9923 }
9924
9925 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9926 into a single range test. */
9927 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9928 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9929 && TREE_CODE (arg1) == INTEGER_CST
9930 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9931 && !integer_zerop (TREE_OPERAND (arg0, 1))
9932 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9933 && !TREE_OVERFLOW (arg1))
9934 {
9935 t1 = fold_div_compare (code, type, arg0, arg1);
9936 if (t1 != NULL_TREE)
9937 return t1;
9938 }
9939
9940 if ((code == EQ_EXPR || code == NE_EXPR)
9941 && !TREE_SIDE_EFFECTS (arg0)
9942 && integer_zerop (arg1)
9943 && tree_expr_nonzero_p (arg0))
9944 return constant_boolean_node (code==NE_EXPR, type);
9945
9946 t1 = fold_relational_const (code, type, arg0, arg1);
9947 return t1 == NULL_TREE ? NULL_TREE : t1;
9948
9949 case UNORDERED_EXPR:
9950 case ORDERED_EXPR:
9951 case UNLT_EXPR:
9952 case UNLE_EXPR:
9953 case UNGT_EXPR:
9954 case UNGE_EXPR:
9955 case UNEQ_EXPR:
9956 case LTGT_EXPR:
9957 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9958 {
9959 t1 = fold_relational_const (code, type, arg0, arg1);
9960 if (t1 != NULL_TREE)
9961 return t1;
9962 }
9963
9964 /* If the first operand is NaN, the result is constant. */
9965 if (TREE_CODE (arg0) == REAL_CST
9966 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
9967 && (code != LTGT_EXPR || ! flag_trapping_math))
9968 {
9969 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
9970 ? integer_zero_node
9971 : integer_one_node;
9972 return omit_one_operand (type, t1, arg1);
9973 }
9974
9975 /* If the second operand is NaN, the result is constant. */
9976 if (TREE_CODE (arg1) == REAL_CST
9977 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
9978 && (code != LTGT_EXPR || ! flag_trapping_math))
9979 {
9980 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
9981 ? integer_zero_node
9982 : integer_one_node;
9983 return omit_one_operand (type, t1, arg0);
9984 }
9985
9986 /* Simplify unordered comparison of something with itself. */
9987 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
9988 && operand_equal_p (arg0, arg1, 0))
9989 return constant_boolean_node (1, type);
9990
9991 if (code == LTGT_EXPR
9992 && !flag_trapping_math
9993 && operand_equal_p (arg0, arg1, 0))
9994 return constant_boolean_node (0, type);
9995
9996 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9997 {
9998 tree targ0 = strip_float_extensions (arg0);
9999 tree targ1 = strip_float_extensions (arg1);
10000 tree newtype = TREE_TYPE (targ0);
10001
10002 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
10003 newtype = TREE_TYPE (targ1);
10004
10005 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
10006 return fold_build2 (code, type, fold_convert (newtype, targ0),
10007 fold_convert (newtype, targ1));
10008 }
10009
10010 return NULL_TREE;
10011
10012 case COMPOUND_EXPR:
10013 /* When pedantic, a compound expression can be neither an lvalue
10014 nor an integer constant expression. */
10015 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
10016 return NULL_TREE;
10017 /* Don't let (0, 0) be null pointer constant. */
10018 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
10019 : fold_convert (type, arg1);
10020 return pedantic_non_lvalue (tem);
10021
10022 case COMPLEX_EXPR:
10023 if (wins)
10024 return build_complex (type, arg0, arg1);
10025 return NULL_TREE;
10026
10027 case ASSERT_EXPR:
10028 /* An ASSERT_EXPR should never be passed to fold_binary. */
10029 gcc_unreachable ();
10030
10031 default:
10032 return NULL_TREE;
10033 } /* switch (code) */
10034 }
10035
10036 /* Callback for walk_tree, looking for LABEL_EXPR.
10037 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
10038 Do not check the sub-tree of GOTO_EXPR. */
10039
10040 static tree
10041 contains_label_1 (tree *tp,
10042 int *walk_subtrees,
10043 void *data ATTRIBUTE_UNUSED)
10044 {
10045 switch (TREE_CODE (*tp))
10046 {
10047 case LABEL_EXPR:
10048 return *tp;
10049 case GOTO_EXPR:
10050 *walk_subtrees = 0;
10051 /* no break */
10052 default:
10053 return NULL_TREE;
10054 }
10055 }
10056
10057 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
10058 accessible from outside the sub-tree. Returns NULL_TREE if no
10059 addressable label is found. */
10060
10061 static bool
10062 contains_label_p (tree st)
10063 {
10064 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
10065 }
10066
10067 /* Fold a ternary expression of code CODE and type TYPE with operands
10068 OP0, OP1, and OP2. Return the folded expression if folding is
10069 successful. Otherwise, return NULL_TREE. */
10070
10071 tree
10072 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
10073 {
10074 tree tem;
10075 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
10076 enum tree_code_class kind = TREE_CODE_CLASS (code);
10077
10078 gcc_assert (IS_EXPR_CODE_CLASS (kind)
10079 && TREE_CODE_LENGTH (code) == 3);
10080
10081 /* Strip any conversions that don't change the mode. This is safe
10082 for every expression, except for a comparison expression because
10083 its signedness is derived from its operands. So, in the latter
10084 case, only strip conversions that don't change the signedness.
10085
10086 Note that this is done as an internal manipulation within the
10087 constant folder, in order to find the simplest representation of
10088 the arguments so that their form can be studied. In any cases,
10089 the appropriate type conversions should be put back in the tree
10090 that will get out of the constant folder. */
10091 if (op0)
10092 {
10093 arg0 = op0;
10094 STRIP_NOPS (arg0);
10095 }
10096
10097 if (op1)
10098 {
10099 arg1 = op1;
10100 STRIP_NOPS (arg1);
10101 }
10102
10103 switch (code)
10104 {
10105 case COMPONENT_REF:
10106 if (TREE_CODE (arg0) == CONSTRUCTOR
10107 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
10108 {
10109 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
10110 if (m)
10111 return TREE_VALUE (m);
10112 }
10113 return NULL_TREE;
10114
10115 case COND_EXPR:
10116 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
10117 so all simple results must be passed through pedantic_non_lvalue. */
10118 if (TREE_CODE (arg0) == INTEGER_CST)
10119 {
10120 tree unused_op = integer_zerop (arg0) ? op1 : op2;
10121 tem = integer_zerop (arg0) ? op2 : op1;
10122 /* Only optimize constant conditions when the selected branch
10123 has the same type as the COND_EXPR. This avoids optimizing
10124 away "c ? x : throw", where the throw has a void type.
10125 Avoid throwing away that operand which contains label. */
10126 if ((!TREE_SIDE_EFFECTS (unused_op)
10127 || !contains_label_p (unused_op))
10128 && (! VOID_TYPE_P (TREE_TYPE (tem))
10129 || VOID_TYPE_P (type)))
10130 return pedantic_non_lvalue (tem);
10131 return NULL_TREE;
10132 }
10133 if (operand_equal_p (arg1, op2, 0))
10134 return pedantic_omit_one_operand (type, arg1, arg0);
10135
10136 /* If we have A op B ? A : C, we may be able to convert this to a
10137 simpler expression, depending on the operation and the values
10138 of B and C. Signed zeros prevent all of these transformations,
10139 for reasons given above each one.
10140
10141 Also try swapping the arguments and inverting the conditional. */
10142 if (COMPARISON_CLASS_P (arg0)
10143 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
10144 arg1, TREE_OPERAND (arg0, 1))
10145 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
10146 {
10147 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
10148 if (tem)
10149 return tem;
10150 }
10151
10152 if (COMPARISON_CLASS_P (arg0)
10153 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
10154 op2,
10155 TREE_OPERAND (arg0, 1))
10156 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
10157 {
10158 tem = invert_truthvalue (arg0);
10159 if (COMPARISON_CLASS_P (tem))
10160 {
10161 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
10162 if (tem)
10163 return tem;
10164 }
10165 }
10166
10167 /* If the second operand is simpler than the third, swap them
10168 since that produces better jump optimization results. */
10169 if (tree_swap_operands_p (op1, op2, false))
10170 {
10171 /* See if this can be inverted. If it can't, possibly because
10172 it was a floating-point inequality comparison, don't do
10173 anything. */
10174 tem = invert_truthvalue (arg0);
10175
10176 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
10177 return fold_build3 (code, type, tem, op2, op1);
10178 }
10179
10180 /* Convert A ? 1 : 0 to simply A. */
10181 if (integer_onep (op1)
10182 && integer_zerop (op2)
10183 /* If we try to convert OP0 to our type, the
10184 call to fold will try to move the conversion inside
10185 a COND, which will recurse. In that case, the COND_EXPR
10186 is probably the best choice, so leave it alone. */
10187 && type == TREE_TYPE (arg0))
10188 return pedantic_non_lvalue (arg0);
10189
10190 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
10191 over COND_EXPR in cases such as floating point comparisons. */
10192 if (integer_zerop (op1)
10193 && integer_onep (op2)
10194 && truth_value_p (TREE_CODE (arg0)))
10195 return pedantic_non_lvalue (fold_convert (type,
10196 invert_truthvalue (arg0)));
10197
10198 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
10199 if (TREE_CODE (arg0) == LT_EXPR
10200 && integer_zerop (TREE_OPERAND (arg0, 1))
10201 && integer_zerop (op2)
10202 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
10203 return fold_convert (type, fold_build2 (BIT_AND_EXPR,
10204 TREE_TYPE (tem), tem, arg1));
10205
10206 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
10207 already handled above. */
10208 if (TREE_CODE (arg0) == BIT_AND_EXPR
10209 && integer_onep (TREE_OPERAND (arg0, 1))
10210 && integer_zerop (op2)
10211 && integer_pow2p (arg1))
10212 {
10213 tree tem = TREE_OPERAND (arg0, 0);
10214 STRIP_NOPS (tem);
10215 if (TREE_CODE (tem) == RSHIFT_EXPR
10216 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
10217 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
10218 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
10219 return fold_build2 (BIT_AND_EXPR, type,
10220 TREE_OPERAND (tem, 0), arg1);
10221 }
10222
10223 /* A & N ? N : 0 is simply A & N if N is a power of two. This
10224 is probably obsolete because the first operand should be a
10225 truth value (that's why we have the two cases above), but let's
10226 leave it in until we can confirm this for all front-ends. */
10227 if (integer_zerop (op2)
10228 && TREE_CODE (arg0) == NE_EXPR
10229 && integer_zerop (TREE_OPERAND (arg0, 1))
10230 && integer_pow2p (arg1)
10231 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10232 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10233 arg1, OEP_ONLY_CONST))
10234 return pedantic_non_lvalue (fold_convert (type,
10235 TREE_OPERAND (arg0, 0)));
10236
10237 /* Convert A ? B : 0 into A && B if A and B are truth values. */
10238 if (integer_zerop (op2)
10239 && truth_value_p (TREE_CODE (arg0))
10240 && truth_value_p (TREE_CODE (arg1)))
10241 return fold_build2 (TRUTH_ANDIF_EXPR, type, arg0, arg1);
10242
10243 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
10244 if (integer_onep (op2)
10245 && truth_value_p (TREE_CODE (arg0))
10246 && truth_value_p (TREE_CODE (arg1)))
10247 {
10248 /* Only perform transformation if ARG0 is easily inverted. */
10249 tem = invert_truthvalue (arg0);
10250 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
10251 return fold_build2 (TRUTH_ORIF_EXPR, type, tem, arg1);
10252 }
10253
10254 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
10255 if (integer_zerop (arg1)
10256 && truth_value_p (TREE_CODE (arg0))
10257 && truth_value_p (TREE_CODE (op2)))
10258 {
10259 /* Only perform transformation if ARG0 is easily inverted. */
10260 tem = invert_truthvalue (arg0);
10261 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
10262 return fold_build2 (TRUTH_ANDIF_EXPR, type, tem, op2);
10263 }
10264
10265 /* Convert A ? 1 : B into A || B if A and B are truth values. */
10266 if (integer_onep (arg1)
10267 && truth_value_p (TREE_CODE (arg0))
10268 && truth_value_p (TREE_CODE (op2)))
10269 return fold_build2 (TRUTH_ORIF_EXPR, type, arg0, op2);
10270
10271 return NULL_TREE;
10272
10273 case CALL_EXPR:
10274 /* Check for a built-in function. */
10275 if (TREE_CODE (op0) == ADDR_EXPR
10276 && TREE_CODE (TREE_OPERAND (op0, 0)) == FUNCTION_DECL
10277 && DECL_BUILT_IN (TREE_OPERAND (op0, 0)))
10278 {
10279 tree fndecl = TREE_OPERAND (op0, 0);
10280 tree arglist = op1;
10281 tree tmp = fold_builtin (fndecl, arglist, false);
10282 if (tmp)
10283 return tmp;
10284 }
10285 return NULL_TREE;
10286
10287 case BIT_FIELD_REF:
10288 if (TREE_CODE (arg0) == VECTOR_CST
10289 && type == TREE_TYPE (TREE_TYPE (arg0))
10290 && host_integerp (arg1, 1)
10291 && host_integerp (op2, 1))
10292 {
10293 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
10294 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
10295
10296 if (width != 0
10297 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
10298 && (idx % width) == 0
10299 && (idx = idx / width)
10300 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
10301 {
10302 tree elements = TREE_VECTOR_CST_ELTS (arg0);
10303 while (idx-- > 0)
10304 elements = TREE_CHAIN (elements);
10305 return TREE_VALUE (elements);
10306 }
10307 }
10308 return NULL_TREE;
10309
10310 default:
10311 return NULL_TREE;
10312 } /* switch (code) */
10313 }
10314
10315 /* Perform constant folding and related simplification of EXPR.
10316 The related simplifications include x*1 => x, x*0 => 0, etc.,
10317 and application of the associative law.
10318 NOP_EXPR conversions may be removed freely (as long as we
10319 are careful not to change the type of the overall expression).
10320 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
10321 but we can constant-fold them if they have constant operands. */
10322
10323 #ifdef ENABLE_FOLD_CHECKING
10324 # define fold(x) fold_1 (x)
10325 static tree fold_1 (tree);
10326 static
10327 #endif
10328 tree
10329 fold (tree expr)
10330 {
10331 const tree t = expr;
10332 enum tree_code code = TREE_CODE (t);
10333 enum tree_code_class kind = TREE_CODE_CLASS (code);
10334 tree tem;
10335
10336 /* Return right away if a constant. */
10337 if (kind == tcc_constant)
10338 return t;
10339
10340 if (IS_EXPR_CODE_CLASS (kind))
10341 {
10342 tree type = TREE_TYPE (t);
10343 tree op0, op1, op2;
10344
10345 switch (TREE_CODE_LENGTH (code))
10346 {
10347 case 1:
10348 op0 = TREE_OPERAND (t, 0);
10349 tem = fold_unary (code, type, op0);
10350 return tem ? tem : expr;
10351 case 2:
10352 op0 = TREE_OPERAND (t, 0);
10353 op1 = TREE_OPERAND (t, 1);
10354 tem = fold_binary (code, type, op0, op1);
10355 return tem ? tem : expr;
10356 case 3:
10357 op0 = TREE_OPERAND (t, 0);
10358 op1 = TREE_OPERAND (t, 1);
10359 op2 = TREE_OPERAND (t, 2);
10360 tem = fold_ternary (code, type, op0, op1, op2);
10361 return tem ? tem : expr;
10362 default:
10363 break;
10364 }
10365 }
10366
10367 switch (code)
10368 {
10369 case CONST_DECL:
10370 return fold (DECL_INITIAL (t));
10371
10372 default:
10373 return t;
10374 } /* switch (code) */
10375 }
10376
10377 #ifdef ENABLE_FOLD_CHECKING
10378 #undef fold
10379
10380 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
10381 static void fold_check_failed (tree, tree);
10382 void print_fold_checksum (tree);
10383
10384 /* When --enable-checking=fold, compute a digest of expr before
10385 and after actual fold call to see if fold did not accidentally
10386 change original expr. */
10387
10388 tree
10389 fold (tree expr)
10390 {
10391 tree ret;
10392 struct md5_ctx ctx;
10393 unsigned char checksum_before[16], checksum_after[16];
10394 htab_t ht;
10395
10396 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10397 md5_init_ctx (&ctx);
10398 fold_checksum_tree (expr, &ctx, ht);
10399 md5_finish_ctx (&ctx, checksum_before);
10400 htab_empty (ht);
10401
10402 ret = fold_1 (expr);
10403
10404 md5_init_ctx (&ctx);
10405 fold_checksum_tree (expr, &ctx, ht);
10406 md5_finish_ctx (&ctx, checksum_after);
10407 htab_delete (ht);
10408
10409 if (memcmp (checksum_before, checksum_after, 16))
10410 fold_check_failed (expr, ret);
10411
10412 return ret;
10413 }
10414
10415 void
10416 print_fold_checksum (tree expr)
10417 {
10418 struct md5_ctx ctx;
10419 unsigned char checksum[16], cnt;
10420 htab_t ht;
10421
10422 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10423 md5_init_ctx (&ctx);
10424 fold_checksum_tree (expr, &ctx, ht);
10425 md5_finish_ctx (&ctx, checksum);
10426 htab_delete (ht);
10427 for (cnt = 0; cnt < 16; ++cnt)
10428 fprintf (stderr, "%02x", checksum[cnt]);
10429 putc ('\n', stderr);
10430 }
10431
10432 static void
10433 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
10434 {
10435 internal_error ("fold check: original tree changed by fold");
10436 }
10437
10438 static void
10439 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
10440 {
10441 void **slot;
10442 enum tree_code code;
10443 char buf[sizeof (struct tree_decl)];
10444 int i, len;
10445
10446 recursive_label:
10447
10448 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
10449 <= sizeof (struct tree_decl))
10450 && sizeof (struct tree_type) <= sizeof (struct tree_decl));
10451 if (expr == NULL)
10452 return;
10453 slot = htab_find_slot (ht, expr, INSERT);
10454 if (*slot != NULL)
10455 return;
10456 *slot = expr;
10457 code = TREE_CODE (expr);
10458 if (TREE_CODE_CLASS (code) == tcc_declaration
10459 && DECL_ASSEMBLER_NAME_SET_P (expr))
10460 {
10461 /* Allow DECL_ASSEMBLER_NAME to be modified. */
10462 memcpy (buf, expr, tree_size (expr));
10463 expr = (tree) buf;
10464 SET_DECL_ASSEMBLER_NAME (expr, NULL);
10465 }
10466 else if (TREE_CODE_CLASS (code) == tcc_type
10467 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
10468 || TYPE_CACHED_VALUES_P (expr)
10469 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
10470 {
10471 /* Allow these fields to be modified. */
10472 memcpy (buf, expr, tree_size (expr));
10473 expr = (tree) buf;
10474 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr) = 0;
10475 TYPE_POINTER_TO (expr) = NULL;
10476 TYPE_REFERENCE_TO (expr) = NULL;
10477 if (TYPE_CACHED_VALUES_P (expr))
10478 {
10479 TYPE_CACHED_VALUES_P (expr) = 0;
10480 TYPE_CACHED_VALUES (expr) = NULL;
10481 }
10482 }
10483 md5_process_bytes (expr, tree_size (expr), ctx);
10484 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
10485 if (TREE_CODE_CLASS (code) != tcc_type
10486 && TREE_CODE_CLASS (code) != tcc_declaration
10487 && code != TREE_LIST)
10488 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
10489 switch (TREE_CODE_CLASS (code))
10490 {
10491 case tcc_constant:
10492 switch (code)
10493 {
10494 case STRING_CST:
10495 md5_process_bytes (TREE_STRING_POINTER (expr),
10496 TREE_STRING_LENGTH (expr), ctx);
10497 break;
10498 case COMPLEX_CST:
10499 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
10500 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
10501 break;
10502 case VECTOR_CST:
10503 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
10504 break;
10505 default:
10506 break;
10507 }
10508 break;
10509 case tcc_exceptional:
10510 switch (code)
10511 {
10512 case TREE_LIST:
10513 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
10514 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
10515 expr = TREE_CHAIN (expr);
10516 goto recursive_label;
10517 break;
10518 case TREE_VEC:
10519 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
10520 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
10521 break;
10522 default:
10523 break;
10524 }
10525 break;
10526 case tcc_expression:
10527 case tcc_reference:
10528 case tcc_comparison:
10529 case tcc_unary:
10530 case tcc_binary:
10531 case tcc_statement:
10532 len = TREE_CODE_LENGTH (code);
10533 for (i = 0; i < len; ++i)
10534 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
10535 break;
10536 case tcc_declaration:
10537 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
10538 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
10539 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
10540 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
10541 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
10542 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
10543 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
10544 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
10545 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
10546 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
10547 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
10548 break;
10549 case tcc_type:
10550 if (TREE_CODE (expr) == ENUMERAL_TYPE)
10551 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
10552 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
10553 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
10554 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
10555 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
10556 if (INTEGRAL_TYPE_P (expr)
10557 || SCALAR_FLOAT_TYPE_P (expr))
10558 {
10559 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
10560 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
10561 }
10562 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
10563 if (TREE_CODE (expr) == RECORD_TYPE
10564 || TREE_CODE (expr) == UNION_TYPE
10565 || TREE_CODE (expr) == QUAL_UNION_TYPE)
10566 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
10567 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
10568 break;
10569 default:
10570 break;
10571 }
10572 }
10573
10574 #endif
10575
10576 /* Fold a unary tree expression with code CODE of type TYPE with an
10577 operand OP0. Return a folded expression if successful. Otherwise,
10578 return a tree expression with code CODE of type TYPE with an
10579 operand OP0. */
10580
10581 tree
10582 fold_build1 (enum tree_code code, tree type, tree op0)
10583 {
10584 tree tem = fold_unary (code, type, op0);
10585 if (tem)
10586 return tem;
10587
10588 return build1 (code, type, op0);
10589 }
10590
10591 /* Fold a binary tree expression with code CODE of type TYPE with
10592 operands OP0 and OP1. Return a folded expression if successful.
10593 Otherwise, return a tree expression with code CODE of type TYPE
10594 with operands OP0 and OP1. */
10595
10596 tree
10597 fold_build2 (enum tree_code code, tree type, tree op0, tree op1)
10598 {
10599 tree tem = fold_binary (code, type, op0, op1);
10600 if (tem)
10601 return tem;
10602
10603 return build2 (code, type, op0, op1);
10604 }
10605
10606 /* Fold a ternary tree expression with code CODE of type TYPE with
10607 operands OP0, OP1, and OP2. Return a folded expression if
10608 successful. Otherwise, return a tree expression with code CODE of
10609 type TYPE with operands OP0, OP1, and OP2. */
10610
10611 tree
10612 fold_build3 (enum tree_code code, tree type, tree op0, tree op1, tree op2)
10613 {
10614 tree tem = fold_ternary (code, type, op0, op1, op2);
10615 if (tem)
10616 return tem;
10617
10618 return build3 (code, type, op0, op1, op2);
10619 }
10620
10621 /* Perform constant folding and related simplification of initializer
10622 expression EXPR. This behaves identically to "fold" but ignores
10623 potential run-time traps and exceptions that fold must preserve. */
10624
10625 tree
10626 fold_initializer (tree expr)
10627 {
10628 int saved_signaling_nans = flag_signaling_nans;
10629 int saved_trapping_math = flag_trapping_math;
10630 int saved_rounding_math = flag_rounding_math;
10631 int saved_trapv = flag_trapv;
10632 tree result;
10633
10634 flag_signaling_nans = 0;
10635 flag_trapping_math = 0;
10636 flag_rounding_math = 0;
10637 flag_trapv = 0;
10638
10639 result = fold (expr);
10640
10641 flag_signaling_nans = saved_signaling_nans;
10642 flag_trapping_math = saved_trapping_math;
10643 flag_rounding_math = saved_rounding_math;
10644 flag_trapv = saved_trapv;
10645
10646 return result;
10647 }
10648
10649 /* Determine if first argument is a multiple of second argument. Return 0 if
10650 it is not, or we cannot easily determined it to be.
10651
10652 An example of the sort of thing we care about (at this point; this routine
10653 could surely be made more general, and expanded to do what the *_DIV_EXPR's
10654 fold cases do now) is discovering that
10655
10656 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
10657
10658 is a multiple of
10659
10660 SAVE_EXPR (J * 8)
10661
10662 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
10663
10664 This code also handles discovering that
10665
10666 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
10667
10668 is a multiple of 8 so we don't have to worry about dealing with a
10669 possible remainder.
10670
10671 Note that we *look* inside a SAVE_EXPR only to determine how it was
10672 calculated; it is not safe for fold to do much of anything else with the
10673 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
10674 at run time. For example, the latter example above *cannot* be implemented
10675 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
10676 evaluation time of the original SAVE_EXPR is not necessarily the same at
10677 the time the new expression is evaluated. The only optimization of this
10678 sort that would be valid is changing
10679
10680 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
10681
10682 divided by 8 to
10683
10684 SAVE_EXPR (I) * SAVE_EXPR (J)
10685
10686 (where the same SAVE_EXPR (J) is used in the original and the
10687 transformed version). */
10688
10689 static int
10690 multiple_of_p (tree type, tree top, tree bottom)
10691 {
10692 if (operand_equal_p (top, bottom, 0))
10693 return 1;
10694
10695 if (TREE_CODE (type) != INTEGER_TYPE)
10696 return 0;
10697
10698 switch (TREE_CODE (top))
10699 {
10700 case BIT_AND_EXPR:
10701 /* Bitwise and provides a power of two multiple. If the mask is
10702 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
10703 if (!integer_pow2p (bottom))
10704 return 0;
10705 /* FALLTHRU */
10706
10707 case MULT_EXPR:
10708 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
10709 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
10710
10711 case PLUS_EXPR:
10712 case MINUS_EXPR:
10713 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
10714 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
10715
10716 case LSHIFT_EXPR:
10717 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
10718 {
10719 tree op1, t1;
10720
10721 op1 = TREE_OPERAND (top, 1);
10722 /* const_binop may not detect overflow correctly,
10723 so check for it explicitly here. */
10724 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
10725 > TREE_INT_CST_LOW (op1)
10726 && TREE_INT_CST_HIGH (op1) == 0
10727 && 0 != (t1 = fold_convert (type,
10728 const_binop (LSHIFT_EXPR,
10729 size_one_node,
10730 op1, 0)))
10731 && ! TREE_OVERFLOW (t1))
10732 return multiple_of_p (type, t1, bottom);
10733 }
10734 return 0;
10735
10736 case NOP_EXPR:
10737 /* Can't handle conversions from non-integral or wider integral type. */
10738 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
10739 || (TYPE_PRECISION (type)
10740 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
10741 return 0;
10742
10743 /* .. fall through ... */
10744
10745 case SAVE_EXPR:
10746 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
10747
10748 case INTEGER_CST:
10749 if (TREE_CODE (bottom) != INTEGER_CST
10750 || (TYPE_UNSIGNED (type)
10751 && (tree_int_cst_sgn (top) < 0
10752 || tree_int_cst_sgn (bottom) < 0)))
10753 return 0;
10754 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
10755 top, bottom, 0));
10756
10757 default:
10758 return 0;
10759 }
10760 }
10761
10762 /* Return true if `t' is known to be non-negative. */
10763
10764 int
10765 tree_expr_nonnegative_p (tree t)
10766 {
10767 switch (TREE_CODE (t))
10768 {
10769 case ABS_EXPR:
10770 return 1;
10771
10772 case INTEGER_CST:
10773 return tree_int_cst_sgn (t) >= 0;
10774
10775 case REAL_CST:
10776 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
10777
10778 case PLUS_EXPR:
10779 if (FLOAT_TYPE_P (TREE_TYPE (t)))
10780 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10781 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10782
10783 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
10784 both unsigned and at least 2 bits shorter than the result. */
10785 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
10786 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
10787 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
10788 {
10789 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
10790 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
10791 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
10792 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
10793 {
10794 unsigned int prec = MAX (TYPE_PRECISION (inner1),
10795 TYPE_PRECISION (inner2)) + 1;
10796 return prec < TYPE_PRECISION (TREE_TYPE (t));
10797 }
10798 }
10799 break;
10800
10801 case MULT_EXPR:
10802 if (FLOAT_TYPE_P (TREE_TYPE (t)))
10803 {
10804 /* x * x for floating point x is always non-negative. */
10805 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
10806 return 1;
10807 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10808 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10809 }
10810
10811 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
10812 both unsigned and their total bits is shorter than the result. */
10813 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
10814 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
10815 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
10816 {
10817 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
10818 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
10819 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
10820 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
10821 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
10822 < TYPE_PRECISION (TREE_TYPE (t));
10823 }
10824 return 0;
10825
10826 case TRUNC_DIV_EXPR:
10827 case CEIL_DIV_EXPR:
10828 case FLOOR_DIV_EXPR:
10829 case ROUND_DIV_EXPR:
10830 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10831 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10832
10833 case TRUNC_MOD_EXPR:
10834 case CEIL_MOD_EXPR:
10835 case FLOOR_MOD_EXPR:
10836 case ROUND_MOD_EXPR:
10837 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10838
10839 case RDIV_EXPR:
10840 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10841 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10842
10843 case BIT_AND_EXPR:
10844 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
10845 || tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10846 case BIT_IOR_EXPR:
10847 case BIT_XOR_EXPR:
10848 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10849 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10850
10851 case NOP_EXPR:
10852 {
10853 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
10854 tree outer_type = TREE_TYPE (t);
10855
10856 if (TREE_CODE (outer_type) == REAL_TYPE)
10857 {
10858 if (TREE_CODE (inner_type) == REAL_TYPE)
10859 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10860 if (TREE_CODE (inner_type) == INTEGER_TYPE)
10861 {
10862 if (TYPE_UNSIGNED (inner_type))
10863 return 1;
10864 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10865 }
10866 }
10867 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
10868 {
10869 if (TREE_CODE (inner_type) == REAL_TYPE)
10870 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
10871 if (TREE_CODE (inner_type) == INTEGER_TYPE)
10872 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
10873 && TYPE_UNSIGNED (inner_type);
10874 }
10875 }
10876 break;
10877
10878 case COND_EXPR:
10879 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
10880 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
10881 case COMPOUND_EXPR:
10882 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10883 case MIN_EXPR:
10884 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10885 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10886 case MAX_EXPR:
10887 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10888 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10889 case MODIFY_EXPR:
10890 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10891 case BIND_EXPR:
10892 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t, 1)));
10893 case SAVE_EXPR:
10894 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10895 case NON_LVALUE_EXPR:
10896 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10897 case FLOAT_EXPR:
10898 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10899
10900 case TARGET_EXPR:
10901 {
10902 tree temp = TARGET_EXPR_SLOT (t);
10903 t = TARGET_EXPR_INITIAL (t);
10904
10905 /* If the initializer is non-void, then it's a normal expression
10906 that will be assigned to the slot. */
10907 if (!VOID_TYPE_P (t))
10908 return tree_expr_nonnegative_p (t);
10909
10910 /* Otherwise, the initializer sets the slot in some way. One common
10911 way is an assignment statement at the end of the initializer. */
10912 while (1)
10913 {
10914 if (TREE_CODE (t) == BIND_EXPR)
10915 t = expr_last (BIND_EXPR_BODY (t));
10916 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
10917 || TREE_CODE (t) == TRY_CATCH_EXPR)
10918 t = expr_last (TREE_OPERAND (t, 0));
10919 else if (TREE_CODE (t) == STATEMENT_LIST)
10920 t = expr_last (t);
10921 else
10922 break;
10923 }
10924 if (TREE_CODE (t) == MODIFY_EXPR
10925 && TREE_OPERAND (t, 0) == temp)
10926 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10927
10928 return 0;
10929 }
10930
10931 case CALL_EXPR:
10932 {
10933 tree fndecl = get_callee_fndecl (t);
10934 tree arglist = TREE_OPERAND (t, 1);
10935 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
10936 switch (DECL_FUNCTION_CODE (fndecl))
10937 {
10938 #define CASE_BUILTIN_F(BUILT_IN_FN) \
10939 case BUILT_IN_FN: case BUILT_IN_FN##F: case BUILT_IN_FN##L:
10940 #define CASE_BUILTIN_I(BUILT_IN_FN) \
10941 case BUILT_IN_FN: case BUILT_IN_FN##L: case BUILT_IN_FN##LL:
10942
10943 CASE_BUILTIN_F (BUILT_IN_ACOS)
10944 CASE_BUILTIN_F (BUILT_IN_ACOSH)
10945 CASE_BUILTIN_F (BUILT_IN_CABS)
10946 CASE_BUILTIN_F (BUILT_IN_COSH)
10947 CASE_BUILTIN_F (BUILT_IN_ERFC)
10948 CASE_BUILTIN_F (BUILT_IN_EXP)
10949 CASE_BUILTIN_F (BUILT_IN_EXP10)
10950 CASE_BUILTIN_F (BUILT_IN_EXP2)
10951 CASE_BUILTIN_F (BUILT_IN_FABS)
10952 CASE_BUILTIN_F (BUILT_IN_FDIM)
10953 CASE_BUILTIN_F (BUILT_IN_FREXP)
10954 CASE_BUILTIN_F (BUILT_IN_HYPOT)
10955 CASE_BUILTIN_F (BUILT_IN_POW10)
10956 CASE_BUILTIN_I (BUILT_IN_FFS)
10957 CASE_BUILTIN_I (BUILT_IN_PARITY)
10958 CASE_BUILTIN_I (BUILT_IN_POPCOUNT)
10959 /* Always true. */
10960 return 1;
10961
10962 CASE_BUILTIN_F (BUILT_IN_SQRT)
10963 /* sqrt(-0.0) is -0.0. */
10964 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
10965 return 1;
10966 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
10967
10968 CASE_BUILTIN_F (BUILT_IN_ASINH)
10969 CASE_BUILTIN_F (BUILT_IN_ATAN)
10970 CASE_BUILTIN_F (BUILT_IN_ATANH)
10971 CASE_BUILTIN_F (BUILT_IN_CBRT)
10972 CASE_BUILTIN_F (BUILT_IN_CEIL)
10973 CASE_BUILTIN_F (BUILT_IN_ERF)
10974 CASE_BUILTIN_F (BUILT_IN_EXPM1)
10975 CASE_BUILTIN_F (BUILT_IN_FLOOR)
10976 CASE_BUILTIN_F (BUILT_IN_FMOD)
10977 CASE_BUILTIN_F (BUILT_IN_LCEIL)
10978 CASE_BUILTIN_F (BUILT_IN_LDEXP)
10979 CASE_BUILTIN_F (BUILT_IN_LFLOOR)
10980 CASE_BUILTIN_F (BUILT_IN_LLCEIL)
10981 CASE_BUILTIN_F (BUILT_IN_LLFLOOR)
10982 CASE_BUILTIN_F (BUILT_IN_LLRINT)
10983 CASE_BUILTIN_F (BUILT_IN_LLROUND)
10984 CASE_BUILTIN_F (BUILT_IN_LRINT)
10985 CASE_BUILTIN_F (BUILT_IN_LROUND)
10986 CASE_BUILTIN_F (BUILT_IN_MODF)
10987 CASE_BUILTIN_F (BUILT_IN_NEARBYINT)
10988 CASE_BUILTIN_F (BUILT_IN_POW)
10989 CASE_BUILTIN_F (BUILT_IN_RINT)
10990 CASE_BUILTIN_F (BUILT_IN_ROUND)
10991 CASE_BUILTIN_F (BUILT_IN_SIGNBIT)
10992 CASE_BUILTIN_F (BUILT_IN_SINH)
10993 CASE_BUILTIN_F (BUILT_IN_TANH)
10994 CASE_BUILTIN_F (BUILT_IN_TRUNC)
10995 /* True if the 1st argument is nonnegative. */
10996 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
10997
10998 CASE_BUILTIN_F (BUILT_IN_FMAX)
10999 /* True if the 1st OR 2nd arguments are nonnegative. */
11000 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
11001 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
11002
11003 CASE_BUILTIN_F (BUILT_IN_FMIN)
11004 /* True if the 1st AND 2nd arguments are nonnegative. */
11005 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
11006 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
11007
11008 CASE_BUILTIN_F (BUILT_IN_COPYSIGN)
11009 /* True if the 2nd argument is nonnegative. */
11010 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
11011
11012 default:
11013 break;
11014 #undef CASE_BUILTIN_F
11015 #undef CASE_BUILTIN_I
11016 }
11017 }
11018
11019 /* ... fall through ... */
11020
11021 default:
11022 if (truth_value_p (TREE_CODE (t)))
11023 /* Truth values evaluate to 0 or 1, which is nonnegative. */
11024 return 1;
11025 }
11026
11027 /* We don't know sign of `t', so be conservative and return false. */
11028 return 0;
11029 }
11030
11031 /* Return true when T is an address and is known to be nonzero.
11032 For floating point we further ensure that T is not denormal.
11033 Similar logic is present in nonzero_address in rtlanal.h. */
11034
11035 static bool
11036 tree_expr_nonzero_p (tree t)
11037 {
11038 tree type = TREE_TYPE (t);
11039
11040 /* Doing something useful for floating point would need more work. */
11041 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
11042 return false;
11043
11044 switch (TREE_CODE (t))
11045 {
11046 case ABS_EXPR:
11047 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
11048 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
11049
11050 case INTEGER_CST:
11051 /* We used to test for !integer_zerop here. This does not work correctly
11052 if TREE_CONSTANT_OVERFLOW (t). */
11053 return (TREE_INT_CST_LOW (t) != 0
11054 || TREE_INT_CST_HIGH (t) != 0);
11055
11056 case PLUS_EXPR:
11057 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
11058 {
11059 /* With the presence of negative values it is hard
11060 to say something. */
11061 if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
11062 || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
11063 return false;
11064 /* One of operands must be positive and the other non-negative. */
11065 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
11066 || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
11067 }
11068 break;
11069
11070 case MULT_EXPR:
11071 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
11072 {
11073 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
11074 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
11075 }
11076 break;
11077
11078 case NOP_EXPR:
11079 {
11080 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
11081 tree outer_type = TREE_TYPE (t);
11082
11083 return (TYPE_PRECISION (inner_type) >= TYPE_PRECISION (outer_type)
11084 && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
11085 }
11086 break;
11087
11088 case ADDR_EXPR:
11089 {
11090 tree base = get_base_address (TREE_OPERAND (t, 0));
11091
11092 if (!base)
11093 return false;
11094
11095 /* Weak declarations may link to NULL. */
11096 if (DECL_P (base))
11097 return !DECL_WEAK (base);
11098
11099 /* Constants are never weak. */
11100 if (CONSTANT_CLASS_P (base))
11101 return true;
11102
11103 return false;
11104 }
11105
11106 case COND_EXPR:
11107 return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
11108 && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
11109
11110 case MIN_EXPR:
11111 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
11112 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
11113
11114 case MAX_EXPR:
11115 if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
11116 {
11117 /* When both operands are nonzero, then MAX must be too. */
11118 if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
11119 return true;
11120
11121 /* MAX where operand 0 is positive is positive. */
11122 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
11123 }
11124 /* MAX where operand 1 is positive is positive. */
11125 else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
11126 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
11127 return true;
11128 break;
11129
11130 case COMPOUND_EXPR:
11131 case MODIFY_EXPR:
11132 case BIND_EXPR:
11133 return tree_expr_nonzero_p (TREE_OPERAND (t, 1));
11134
11135 case SAVE_EXPR:
11136 case NON_LVALUE_EXPR:
11137 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
11138
11139 case BIT_IOR_EXPR:
11140 return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
11141 || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
11142
11143 default:
11144 break;
11145 }
11146 return false;
11147 }
11148
11149 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
11150 attempt to fold the expression to a constant without modifying TYPE,
11151 OP0 or OP1.
11152
11153 If the expression could be simplified to a constant, then return
11154 the constant. If the expression would not be simplified to a
11155 constant, then return NULL_TREE. */
11156
11157 tree
11158 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
11159 {
11160 tree tem = fold_binary (code, type, op0, op1);
11161 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
11162 }
11163
11164 /* Given the components of a unary expression CODE, TYPE and OP0,
11165 attempt to fold the expression to a constant without modifying
11166 TYPE or OP0.
11167
11168 If the expression could be simplified to a constant, then return
11169 the constant. If the expression would not be simplified to a
11170 constant, then return NULL_TREE. */
11171
11172 tree
11173 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
11174 {
11175 tree tem = fold_unary (code, type, op0);
11176 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
11177 }
11178
11179 /* If EXP represents referencing an element in a constant string
11180 (either via pointer arithmetic or array indexing), return the
11181 tree representing the value accessed, otherwise return NULL. */
11182
11183 tree
11184 fold_read_from_constant_string (tree exp)
11185 {
11186 if (TREE_CODE (exp) == INDIRECT_REF || TREE_CODE (exp) == ARRAY_REF)
11187 {
11188 tree exp1 = TREE_OPERAND (exp, 0);
11189 tree index;
11190 tree string;
11191
11192 if (TREE_CODE (exp) == INDIRECT_REF)
11193 string = string_constant (exp1, &index);
11194 else
11195 {
11196 tree low_bound = array_ref_low_bound (exp);
11197 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
11198
11199 /* Optimize the special-case of a zero lower bound.
11200
11201 We convert the low_bound to sizetype to avoid some problems
11202 with constant folding. (E.g. suppose the lower bound is 1,
11203 and its mode is QI. Without the conversion,l (ARRAY
11204 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
11205 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
11206 if (! integer_zerop (low_bound))
11207 index = size_diffop (index, fold_convert (sizetype, low_bound));
11208
11209 string = exp1;
11210 }
11211
11212 if (string
11213 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (string))
11214 && TREE_CODE (string) == STRING_CST
11215 && TREE_CODE (index) == INTEGER_CST
11216 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
11217 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
11218 == MODE_INT)
11219 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
11220 return fold_convert (TREE_TYPE (exp),
11221 build_int_cst (NULL_TREE,
11222 (TREE_STRING_POINTER (string)
11223 [TREE_INT_CST_LOW (index)])));
11224 }
11225 return NULL;
11226 }
11227
11228 /* Return the tree for neg (ARG0) when ARG0 is known to be either
11229 an integer constant or real constant.
11230
11231 TYPE is the type of the result. */
11232
11233 static tree
11234 fold_negate_const (tree arg0, tree type)
11235 {
11236 tree t = NULL_TREE;
11237
11238 switch (TREE_CODE (arg0))
11239 {
11240 case INTEGER_CST:
11241 {
11242 unsigned HOST_WIDE_INT low;
11243 HOST_WIDE_INT high;
11244 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
11245 TREE_INT_CST_HIGH (arg0),
11246 &low, &high);
11247 t = build_int_cst_wide (type, low, high);
11248 t = force_fit_type (t, 1,
11249 (overflow | TREE_OVERFLOW (arg0))
11250 && !TYPE_UNSIGNED (type),
11251 TREE_CONSTANT_OVERFLOW (arg0));
11252 break;
11253 }
11254
11255 case REAL_CST:
11256 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
11257 break;
11258
11259 default:
11260 gcc_unreachable ();
11261 }
11262
11263 return t;
11264 }
11265
11266 /* Return the tree for abs (ARG0) when ARG0 is known to be either
11267 an integer constant or real constant.
11268
11269 TYPE is the type of the result. */
11270
11271 tree
11272 fold_abs_const (tree arg0, tree type)
11273 {
11274 tree t = NULL_TREE;
11275
11276 switch (TREE_CODE (arg0))
11277 {
11278 case INTEGER_CST:
11279 /* If the value is unsigned, then the absolute value is
11280 the same as the ordinary value. */
11281 if (TYPE_UNSIGNED (type))
11282 t = arg0;
11283 /* Similarly, if the value is non-negative. */
11284 else if (INT_CST_LT (integer_minus_one_node, arg0))
11285 t = arg0;
11286 /* If the value is negative, then the absolute value is
11287 its negation. */
11288 else
11289 {
11290 unsigned HOST_WIDE_INT low;
11291 HOST_WIDE_INT high;
11292 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
11293 TREE_INT_CST_HIGH (arg0),
11294 &low, &high);
11295 t = build_int_cst_wide (type, low, high);
11296 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg0),
11297 TREE_CONSTANT_OVERFLOW (arg0));
11298 }
11299 break;
11300
11301 case REAL_CST:
11302 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
11303 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
11304 else
11305 t = arg0;
11306 break;
11307
11308 default:
11309 gcc_unreachable ();
11310 }
11311
11312 return t;
11313 }
11314
11315 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
11316 constant. TYPE is the type of the result. */
11317
11318 static tree
11319 fold_not_const (tree arg0, tree type)
11320 {
11321 tree t = NULL_TREE;
11322
11323 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
11324
11325 t = build_int_cst_wide (type,
11326 ~ TREE_INT_CST_LOW (arg0),
11327 ~ TREE_INT_CST_HIGH (arg0));
11328 t = force_fit_type (t, 0, TREE_OVERFLOW (arg0),
11329 TREE_CONSTANT_OVERFLOW (arg0));
11330
11331 return t;
11332 }
11333
11334 /* Given CODE, a relational operator, the target type, TYPE and two
11335 constant operands OP0 and OP1, return the result of the
11336 relational operation. If the result is not a compile time
11337 constant, then return NULL_TREE. */
11338
11339 static tree
11340 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
11341 {
11342 int result, invert;
11343
11344 /* From here on, the only cases we handle are when the result is
11345 known to be a constant. */
11346
11347 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
11348 {
11349 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
11350 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
11351
11352 /* Handle the cases where either operand is a NaN. */
11353 if (real_isnan (c0) || real_isnan (c1))
11354 {
11355 switch (code)
11356 {
11357 case EQ_EXPR:
11358 case ORDERED_EXPR:
11359 result = 0;
11360 break;
11361
11362 case NE_EXPR:
11363 case UNORDERED_EXPR:
11364 case UNLT_EXPR:
11365 case UNLE_EXPR:
11366 case UNGT_EXPR:
11367 case UNGE_EXPR:
11368 case UNEQ_EXPR:
11369 result = 1;
11370 break;
11371
11372 case LT_EXPR:
11373 case LE_EXPR:
11374 case GT_EXPR:
11375 case GE_EXPR:
11376 case LTGT_EXPR:
11377 if (flag_trapping_math)
11378 return NULL_TREE;
11379 result = 0;
11380 break;
11381
11382 default:
11383 gcc_unreachable ();
11384 }
11385
11386 return constant_boolean_node (result, type);
11387 }
11388
11389 return constant_boolean_node (real_compare (code, c0, c1), type);
11390 }
11391
11392 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
11393
11394 To compute GT, swap the arguments and do LT.
11395 To compute GE, do LT and invert the result.
11396 To compute LE, swap the arguments, do LT and invert the result.
11397 To compute NE, do EQ and invert the result.
11398
11399 Therefore, the code below must handle only EQ and LT. */
11400
11401 if (code == LE_EXPR || code == GT_EXPR)
11402 {
11403 tree tem = op0;
11404 op0 = op1;
11405 op1 = tem;
11406 code = swap_tree_comparison (code);
11407 }
11408
11409 /* Note that it is safe to invert for real values here because we
11410 have already handled the one case that it matters. */
11411
11412 invert = 0;
11413 if (code == NE_EXPR || code == GE_EXPR)
11414 {
11415 invert = 1;
11416 code = invert_tree_comparison (code, false);
11417 }
11418
11419 /* Compute a result for LT or EQ if args permit;
11420 Otherwise return T. */
11421 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
11422 {
11423 if (code == EQ_EXPR)
11424 result = tree_int_cst_equal (op0, op1);
11425 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
11426 result = INT_CST_LT_UNSIGNED (op0, op1);
11427 else
11428 result = INT_CST_LT (op0, op1);
11429 }
11430 else
11431 return NULL_TREE;
11432
11433 if (invert)
11434 result ^= 1;
11435 return constant_boolean_node (result, type);
11436 }
11437
11438 /* Build an expression for the a clean point containing EXPR with type TYPE.
11439 Don't build a cleanup point expression for EXPR which don't have side
11440 effects. */
11441
11442 tree
11443 fold_build_cleanup_point_expr (tree type, tree expr)
11444 {
11445 /* If the expression does not have side effects then we don't have to wrap
11446 it with a cleanup point expression. */
11447 if (!TREE_SIDE_EFFECTS (expr))
11448 return expr;
11449
11450 /* If the expression is a return, check to see if the expression inside the
11451 return has no side effects or the right hand side of the modify expression
11452 inside the return. If either don't have side effects set we don't need to
11453 wrap the expression in a cleanup point expression. Note we don't check the
11454 left hand side of the modify because it should always be a return decl. */
11455 if (TREE_CODE (expr) == RETURN_EXPR)
11456 {
11457 tree op = TREE_OPERAND (expr, 0);
11458 if (!op || !TREE_SIDE_EFFECTS (op))
11459 return expr;
11460 op = TREE_OPERAND (op, 1);
11461 if (!TREE_SIDE_EFFECTS (op))
11462 return expr;
11463 }
11464
11465 return build1 (CLEANUP_POINT_EXPR, type, expr);
11466 }
11467
11468 /* Build an expression for the address of T. Folds away INDIRECT_REF to
11469 avoid confusing the gimplify process. */
11470
11471 tree
11472 build_fold_addr_expr_with_type (tree t, tree ptrtype)
11473 {
11474 /* The size of the object is not relevant when talking about its address. */
11475 if (TREE_CODE (t) == WITH_SIZE_EXPR)
11476 t = TREE_OPERAND (t, 0);
11477
11478 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
11479 if (TREE_CODE (t) == INDIRECT_REF
11480 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
11481 {
11482 t = TREE_OPERAND (t, 0);
11483 if (TREE_TYPE (t) != ptrtype)
11484 t = build1 (NOP_EXPR, ptrtype, t);
11485 }
11486 else
11487 {
11488 tree base = t;
11489
11490 while (handled_component_p (base))
11491 base = TREE_OPERAND (base, 0);
11492 if (DECL_P (base))
11493 TREE_ADDRESSABLE (base) = 1;
11494
11495 t = build1 (ADDR_EXPR, ptrtype, t);
11496 }
11497
11498 return t;
11499 }
11500
11501 tree
11502 build_fold_addr_expr (tree t)
11503 {
11504 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
11505 }
11506
11507 /* Given a pointer value OP0 and a type TYPE, return a simplified version
11508 of an indirection through OP0, or NULL_TREE if no simplification is
11509 possible. */
11510
11511 tree
11512 fold_indirect_ref_1 (tree type, tree op0)
11513 {
11514 tree sub = op0;
11515 tree subtype;
11516
11517 STRIP_NOPS (sub);
11518 subtype = TREE_TYPE (sub);
11519 if (!POINTER_TYPE_P (subtype))
11520 return NULL_TREE;
11521
11522 if (TREE_CODE (sub) == ADDR_EXPR)
11523 {
11524 tree op = TREE_OPERAND (sub, 0);
11525 tree optype = TREE_TYPE (op);
11526 /* *&p => p */
11527 if (type == optype)
11528 return op;
11529 /* *(foo *)&fooarray => fooarray[0] */
11530 else if (TREE_CODE (optype) == ARRAY_TYPE
11531 && type == TREE_TYPE (optype))
11532 {
11533 tree type_domain = TYPE_DOMAIN (optype);
11534 tree min_val = size_zero_node;
11535 if (type_domain && TYPE_MIN_VALUE (type_domain))
11536 min_val = TYPE_MIN_VALUE (type_domain);
11537 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
11538 }
11539 }
11540
11541 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
11542 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
11543 && type == TREE_TYPE (TREE_TYPE (subtype)))
11544 {
11545 tree type_domain;
11546 tree min_val = size_zero_node;
11547 sub = build_fold_indirect_ref (sub);
11548 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
11549 if (type_domain && TYPE_MIN_VALUE (type_domain))
11550 min_val = TYPE_MIN_VALUE (type_domain);
11551 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
11552 }
11553
11554 return NULL_TREE;
11555 }
11556
11557 /* Builds an expression for an indirection through T, simplifying some
11558 cases. */
11559
11560 tree
11561 build_fold_indirect_ref (tree t)
11562 {
11563 tree type = TREE_TYPE (TREE_TYPE (t));
11564 tree sub = fold_indirect_ref_1 (type, t);
11565
11566 if (sub)
11567 return sub;
11568 else
11569 return build1 (INDIRECT_REF, type, t);
11570 }
11571
11572 /* Given an INDIRECT_REF T, return either T or a simplified version. */
11573
11574 tree
11575 fold_indirect_ref (tree t)
11576 {
11577 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
11578
11579 if (sub)
11580 return sub;
11581 else
11582 return t;
11583 }
11584
11585 /* Strip non-trapping, non-side-effecting tree nodes from an expression
11586 whose result is ignored. The type of the returned tree need not be
11587 the same as the original expression. */
11588
11589 tree
11590 fold_ignored_result (tree t)
11591 {
11592 if (!TREE_SIDE_EFFECTS (t))
11593 return integer_zero_node;
11594
11595 for (;;)
11596 switch (TREE_CODE_CLASS (TREE_CODE (t)))
11597 {
11598 case tcc_unary:
11599 t = TREE_OPERAND (t, 0);
11600 break;
11601
11602 case tcc_binary:
11603 case tcc_comparison:
11604 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
11605 t = TREE_OPERAND (t, 0);
11606 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
11607 t = TREE_OPERAND (t, 1);
11608 else
11609 return t;
11610 break;
11611
11612 case tcc_expression:
11613 switch (TREE_CODE (t))
11614 {
11615 case COMPOUND_EXPR:
11616 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
11617 return t;
11618 t = TREE_OPERAND (t, 0);
11619 break;
11620
11621 case COND_EXPR:
11622 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
11623 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
11624 return t;
11625 t = TREE_OPERAND (t, 0);
11626 break;
11627
11628 default:
11629 return t;
11630 }
11631 break;
11632
11633 default:
11634 return t;
11635 }
11636 }
11637
11638 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
11639 This can only be applied to objects of a sizetype. */
11640
11641 tree
11642 round_up (tree value, int divisor)
11643 {
11644 tree div = NULL_TREE;
11645
11646 gcc_assert (divisor > 0);
11647 if (divisor == 1)
11648 return value;
11649
11650 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
11651 have to do anything. Only do this when we are not given a const,
11652 because in that case, this check is more expensive than just
11653 doing it. */
11654 if (TREE_CODE (value) != INTEGER_CST)
11655 {
11656 div = build_int_cst (TREE_TYPE (value), divisor);
11657
11658 if (multiple_of_p (TREE_TYPE (value), value, div))
11659 return value;
11660 }
11661
11662 /* If divisor is a power of two, simplify this to bit manipulation. */
11663 if (divisor == (divisor & -divisor))
11664 {
11665 tree t;
11666
11667 t = build_int_cst (TREE_TYPE (value), divisor - 1);
11668 value = size_binop (PLUS_EXPR, value, t);
11669 t = build_int_cst (TREE_TYPE (value), -divisor);
11670 value = size_binop (BIT_AND_EXPR, value, t);
11671 }
11672 else
11673 {
11674 if (!div)
11675 div = build_int_cst (TREE_TYPE (value), divisor);
11676 value = size_binop (CEIL_DIV_EXPR, value, div);
11677 value = size_binop (MULT_EXPR, value, div);
11678 }
11679
11680 return value;
11681 }
11682
11683 /* Likewise, but round down. */
11684
11685 tree
11686 round_down (tree value, int divisor)
11687 {
11688 tree div = NULL_TREE;
11689
11690 gcc_assert (divisor > 0);
11691 if (divisor == 1)
11692 return value;
11693
11694 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
11695 have to do anything. Only do this when we are not given a const,
11696 because in that case, this check is more expensive than just
11697 doing it. */
11698 if (TREE_CODE (value) != INTEGER_CST)
11699 {
11700 div = build_int_cst (TREE_TYPE (value), divisor);
11701
11702 if (multiple_of_p (TREE_TYPE (value), value, div))
11703 return value;
11704 }
11705
11706 /* If divisor is a power of two, simplify this to bit manipulation. */
11707 if (divisor == (divisor & -divisor))
11708 {
11709 tree t;
11710
11711 t = build_int_cst (TREE_TYPE (value), -divisor);
11712 value = size_binop (BIT_AND_EXPR, value, t);
11713 }
11714 else
11715 {
11716 if (!div)
11717 div = build_int_cst (TREE_TYPE (value), divisor);
11718 value = size_binop (FLOOR_DIV_EXPR, value, div);
11719 value = size_binop (MULT_EXPR, value, div);
11720 }
11721
11722 return value;
11723 }
11724
11725 /* Returns the pointer to the base of the object addressed by EXP and
11726 extracts the information about the offset of the access, storing it
11727 to PBITPOS and POFFSET. */
11728
11729 static tree
11730 split_address_to_core_and_offset (tree exp,
11731 HOST_WIDE_INT *pbitpos, tree *poffset)
11732 {
11733 tree core;
11734 enum machine_mode mode;
11735 int unsignedp, volatilep;
11736 HOST_WIDE_INT bitsize;
11737
11738 if (TREE_CODE (exp) == ADDR_EXPR)
11739 {
11740 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
11741 poffset, &mode, &unsignedp, &volatilep,
11742 false);
11743
11744 if (TREE_CODE (core) == INDIRECT_REF)
11745 core = TREE_OPERAND (core, 0);
11746 }
11747 else
11748 {
11749 core = exp;
11750 *pbitpos = 0;
11751 *poffset = NULL_TREE;
11752 }
11753
11754 return core;
11755 }
11756
11757 /* Returns true if addresses of E1 and E2 differ by a constant, false
11758 otherwise. If they do, E1 - E2 is stored in *DIFF. */
11759
11760 bool
11761 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
11762 {
11763 tree core1, core2;
11764 HOST_WIDE_INT bitpos1, bitpos2;
11765 tree toffset1, toffset2, tdiff, type;
11766
11767 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
11768 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
11769
11770 if (bitpos1 % BITS_PER_UNIT != 0
11771 || bitpos2 % BITS_PER_UNIT != 0
11772 || !operand_equal_p (core1, core2, 0))
11773 return false;
11774
11775 if (toffset1 && toffset2)
11776 {
11777 type = TREE_TYPE (toffset1);
11778 if (type != TREE_TYPE (toffset2))
11779 toffset2 = fold_convert (type, toffset2);
11780
11781 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
11782 if (!host_integerp (tdiff, 0))
11783 return false;
11784
11785 *diff = tree_low_cst (tdiff, 0);
11786 }
11787 else if (toffset1 || toffset2)
11788 {
11789 /* If only one of the offsets is non-constant, the difference cannot
11790 be a constant. */
11791 return false;
11792 }
11793 else
11794 *diff = 0;
11795
11796 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
11797 return true;
11798 }
11799
11800 /* Simplify the floating point expression EXP when the sign of the
11801 result is not significant. Return NULL_TREE if no simplification
11802 is possible. */
11803
11804 tree
11805 fold_strip_sign_ops (tree exp)
11806 {
11807 tree arg0, arg1;
11808
11809 switch (TREE_CODE (exp))
11810 {
11811 case ABS_EXPR:
11812 case NEGATE_EXPR:
11813 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
11814 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
11815
11816 case MULT_EXPR:
11817 case RDIV_EXPR:
11818 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
11819 return NULL_TREE;
11820 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
11821 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
11822 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
11823 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
11824 arg0 ? arg0 : TREE_OPERAND (exp, 0),
11825 arg1 ? arg1 : TREE_OPERAND (exp, 1));
11826 break;
11827
11828 default:
11829 break;
11830 }
11831 return NULL_TREE;
11832 }
11833