stmt.c (expand_case): Use build_int_cst.
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
29
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type.
32
33 fold takes a tree as argument and returns a simplified tree.
34
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
38
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
41
42 force_fit_type takes a constant, an overflowable flag and prior
43 overflow indicators. It forces the value to fit the type and sets
44 TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate. */
45
46 #include "config.h"
47 #include "system.h"
48 #include "coretypes.h"
49 #include "tm.h"
50 #include "flags.h"
51 #include "tree.h"
52 #include "real.h"
53 #include "rtl.h"
54 #include "expr.h"
55 #include "tm_p.h"
56 #include "toplev.h"
57 #include "ggc.h"
58 #include "hashtab.h"
59 #include "langhooks.h"
60 #include "md5.h"
61
62 /* The following constants represent a bit based encoding of GCC's
63 comparison operators. This encoding simplifies transformations
64 on relational comparison operators, such as AND and OR. */
65 enum comparison_code {
66 COMPCODE_FALSE = 0,
67 COMPCODE_LT = 1,
68 COMPCODE_EQ = 2,
69 COMPCODE_LE = 3,
70 COMPCODE_GT = 4,
71 COMPCODE_LTGT = 5,
72 COMPCODE_GE = 6,
73 COMPCODE_ORD = 7,
74 COMPCODE_UNORD = 8,
75 COMPCODE_UNLT = 9,
76 COMPCODE_UNEQ = 10,
77 COMPCODE_UNLE = 11,
78 COMPCODE_UNGT = 12,
79 COMPCODE_NE = 13,
80 COMPCODE_UNGE = 14,
81 COMPCODE_TRUE = 15
82 };
83
84 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
85 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
86 static bool negate_mathfn_p (enum built_in_function);
87 static bool negate_expr_p (tree);
88 static tree negate_expr (tree);
89 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
90 static tree associate_trees (tree, tree, enum tree_code, tree);
91 static tree const_binop (enum tree_code, tree, tree, int);
92 static enum tree_code invert_tree_comparison (enum tree_code, bool);
93 static enum comparison_code comparison_to_compcode (enum tree_code);
94 static enum tree_code compcode_to_comparison (enum comparison_code);
95 static tree combine_comparisons (enum tree_code, enum tree_code,
96 enum tree_code, tree, tree, tree);
97 static int truth_value_p (enum tree_code);
98 static int operand_equal_for_comparison_p (tree, tree, tree);
99 static int twoval_comparison_p (tree, tree *, tree *, int *);
100 static tree eval_subst (tree, tree, tree, tree, tree);
101 static tree pedantic_omit_one_operand (tree, tree, tree);
102 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
103 static tree make_bit_field_ref (tree, tree, int, int, int);
104 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
105 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
106 enum machine_mode *, int *, int *,
107 tree *, tree *);
108 static int all_ones_mask_p (tree, int);
109 static tree sign_bit_p (tree, tree);
110 static int simple_operand_p (tree);
111 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
112 static tree make_range (tree, int *, tree *, tree *);
113 static tree build_range_check (tree, tree, int, tree, tree);
114 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
115 tree);
116 static tree fold_range_test (enum tree_code, tree, tree, tree);
117 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
118 static tree unextend (tree, int, int, tree);
119 static tree fold_truthop (enum tree_code, tree, tree, tree);
120 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
121 static tree extract_muldiv (tree, tree, enum tree_code, tree);
122 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
123 static int multiple_of_p (tree, tree, tree);
124 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
125 tree, tree,
126 tree, tree, int);
127 static bool fold_real_zero_addition_p (tree, tree, int);
128 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
129 tree, tree, tree);
130 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
131 static tree fold_div_compare (enum tree_code, tree, tree, tree);
132 static bool reorder_operands_p (tree, tree);
133 static tree fold_negate_const (tree, tree);
134 static tree fold_not_const (tree, tree);
135 static tree fold_relational_const (enum tree_code, tree, tree, tree);
136 static bool tree_expr_nonzero_p (tree);
137
138 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
139 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
140 and SUM1. Then this yields nonzero if overflow occurred during the
141 addition.
142
143 Overflow occurs if A and B have the same sign, but A and SUM differ in
144 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
145 sign. */
146 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
147 \f
148 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
149 We do that by representing the two-word integer in 4 words, with only
150 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
151 number. The value of the word is LOWPART + HIGHPART * BASE. */
152
153 #define LOWPART(x) \
154 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
155 #define HIGHPART(x) \
156 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
157 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
158
159 /* Unpack a two-word integer into 4 words.
160 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
161 WORDS points to the array of HOST_WIDE_INTs. */
162
163 static void
164 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
165 {
166 words[0] = LOWPART (low);
167 words[1] = HIGHPART (low);
168 words[2] = LOWPART (hi);
169 words[3] = HIGHPART (hi);
170 }
171
172 /* Pack an array of 4 words into a two-word integer.
173 WORDS points to the array of words.
174 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
175
176 static void
177 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
178 HOST_WIDE_INT *hi)
179 {
180 *low = words[0] + words[1] * BASE;
181 *hi = words[2] + words[3] * BASE;
182 }
183 \f
184 /* T is an INT_CST node. OVERFLOWABLE indicates if we are interested
185 in overflow of the value, when >0 we are only interested in signed
186 overflow, for <0 we are interested in any overflow. OVERFLOWED
187 indicates whether overflow has already occurred. CONST_OVERFLOWED
188 indicates whether constant overflow has already occurred. We force
189 T's value to be within range of T's type (by setting to 0 or 1 all
190 the bits outside the type's range). We set TREE_OVERFLOWED if,
191 OVERFLOWED is nonzero,
192 or OVERFLOWABLE is >0 and signed overflow occurs
193 or OVERFLOWABLE is <0 and any overflow occurs
194 We set TREE_CONSTANT_OVERFLOWED if,
195 CONST_OVERFLOWED is nonzero
196 or we set TREE_OVERFLOWED.
197 We return either the original T, or a copy. */
198
199 tree
200 force_fit_type (tree t, int overflowable,
201 bool overflowed, bool overflowed_const)
202 {
203 unsigned HOST_WIDE_INT low;
204 HOST_WIDE_INT high;
205 unsigned int prec;
206 int sign_extended_type;
207
208 gcc_assert (TREE_CODE (t) == INTEGER_CST);
209
210 low = TREE_INT_CST_LOW (t);
211 high = TREE_INT_CST_HIGH (t);
212
213 if (POINTER_TYPE_P (TREE_TYPE (t))
214 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
215 prec = POINTER_SIZE;
216 else
217 prec = TYPE_PRECISION (TREE_TYPE (t));
218 /* Size types *are* sign extended. */
219 sign_extended_type = (!TYPE_UNSIGNED (TREE_TYPE (t))
220 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
221 && TYPE_IS_SIZETYPE (TREE_TYPE (t))));
222
223 /* First clear all bits that are beyond the type's precision. */
224
225 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
226 ;
227 else if (prec > HOST_BITS_PER_WIDE_INT)
228 high &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
229 else
230 {
231 high = 0;
232 if (prec < HOST_BITS_PER_WIDE_INT)
233 low &= ~((HOST_WIDE_INT) (-1) << prec);
234 }
235
236 if (!sign_extended_type)
237 /* No sign extension */;
238 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
239 /* Correct width already. */;
240 else if (prec > HOST_BITS_PER_WIDE_INT)
241 {
242 /* Sign extend top half? */
243 if (high & ((unsigned HOST_WIDE_INT)1
244 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
245 high |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
246 }
247 else if (prec == HOST_BITS_PER_WIDE_INT)
248 {
249 if ((HOST_WIDE_INT)low < 0)
250 high = -1;
251 }
252 else
253 {
254 /* Sign extend bottom half? */
255 if (low & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
256 {
257 high = -1;
258 low |= (HOST_WIDE_INT)(-1) << prec;
259 }
260 }
261
262 /* If the value changed, return a new node. */
263 if (overflowed || overflowed_const
264 || low != TREE_INT_CST_LOW (t) || high != TREE_INT_CST_HIGH (t))
265 {
266 t = build_int_cst_wide (TREE_TYPE (t), low, high);
267
268 if (overflowed
269 || overflowable < 0
270 || (overflowable > 0 && sign_extended_type))
271 {
272 t = copy_node (t);
273 TREE_OVERFLOW (t) = 1;
274 TREE_CONSTANT_OVERFLOW (t) = 1;
275 }
276 else if (overflowed_const)
277 {
278 t = copy_node (t);
279 TREE_CONSTANT_OVERFLOW (t) = 1;
280 }
281 }
282
283 return t;
284 }
285 \f
286 /* Add two doubleword integers with doubleword result.
287 Each argument is given as two `HOST_WIDE_INT' pieces.
288 One argument is L1 and H1; the other, L2 and H2.
289 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
290
291 int
292 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
293 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
294 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
295 {
296 unsigned HOST_WIDE_INT l;
297 HOST_WIDE_INT h;
298
299 l = l1 + l2;
300 h = h1 + h2 + (l < l1);
301
302 *lv = l;
303 *hv = h;
304 return OVERFLOW_SUM_SIGN (h1, h2, h);
305 }
306
307 /* Negate a doubleword integer with doubleword result.
308 Return nonzero if the operation overflows, assuming it's signed.
309 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
310 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
311
312 int
313 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
314 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
315 {
316 if (l1 == 0)
317 {
318 *lv = 0;
319 *hv = - h1;
320 return (*hv & h1) < 0;
321 }
322 else
323 {
324 *lv = -l1;
325 *hv = ~h1;
326 return 0;
327 }
328 }
329 \f
330 /* Multiply two doubleword integers with doubleword result.
331 Return nonzero if the operation overflows, assuming it's signed.
332 Each argument is given as two `HOST_WIDE_INT' pieces.
333 One argument is L1 and H1; the other, L2 and H2.
334 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
335
336 int
337 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
338 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
339 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
340 {
341 HOST_WIDE_INT arg1[4];
342 HOST_WIDE_INT arg2[4];
343 HOST_WIDE_INT prod[4 * 2];
344 unsigned HOST_WIDE_INT carry;
345 int i, j, k;
346 unsigned HOST_WIDE_INT toplow, neglow;
347 HOST_WIDE_INT tophigh, neghigh;
348
349 encode (arg1, l1, h1);
350 encode (arg2, l2, h2);
351
352 memset (prod, 0, sizeof prod);
353
354 for (i = 0; i < 4; i++)
355 {
356 carry = 0;
357 for (j = 0; j < 4; j++)
358 {
359 k = i + j;
360 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
361 carry += arg1[i] * arg2[j];
362 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
363 carry += prod[k];
364 prod[k] = LOWPART (carry);
365 carry = HIGHPART (carry);
366 }
367 prod[i + 4] = carry;
368 }
369
370 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
371
372 /* Check for overflow by calculating the top half of the answer in full;
373 it should agree with the low half's sign bit. */
374 decode (prod + 4, &toplow, &tophigh);
375 if (h1 < 0)
376 {
377 neg_double (l2, h2, &neglow, &neghigh);
378 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
379 }
380 if (h2 < 0)
381 {
382 neg_double (l1, h1, &neglow, &neghigh);
383 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
384 }
385 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
386 }
387 \f
388 /* Shift the doubleword integer in L1, H1 left by COUNT places
389 keeping only PREC bits of result.
390 Shift right if COUNT is negative.
391 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
392 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
393
394 void
395 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
396 HOST_WIDE_INT count, unsigned int prec,
397 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
398 {
399 unsigned HOST_WIDE_INT signmask;
400
401 if (count < 0)
402 {
403 rshift_double (l1, h1, -count, prec, lv, hv, arith);
404 return;
405 }
406
407 if (SHIFT_COUNT_TRUNCATED)
408 count %= prec;
409
410 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
411 {
412 /* Shifting by the host word size is undefined according to the
413 ANSI standard, so we must handle this as a special case. */
414 *hv = 0;
415 *lv = 0;
416 }
417 else if (count >= HOST_BITS_PER_WIDE_INT)
418 {
419 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
420 *lv = 0;
421 }
422 else
423 {
424 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
425 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
426 *lv = l1 << count;
427 }
428
429 /* Sign extend all bits that are beyond the precision. */
430
431 signmask = -((prec > HOST_BITS_PER_WIDE_INT
432 ? ((unsigned HOST_WIDE_INT) *hv
433 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
434 : (*lv >> (prec - 1))) & 1);
435
436 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
437 ;
438 else if (prec >= HOST_BITS_PER_WIDE_INT)
439 {
440 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
441 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
442 }
443 else
444 {
445 *hv = signmask;
446 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
447 *lv |= signmask << prec;
448 }
449 }
450
451 /* Shift the doubleword integer in L1, H1 right by COUNT places
452 keeping only PREC bits of result. COUNT must be positive.
453 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
454 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
455
456 void
457 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
458 HOST_WIDE_INT count, unsigned int prec,
459 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
460 int arith)
461 {
462 unsigned HOST_WIDE_INT signmask;
463
464 signmask = (arith
465 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
466 : 0);
467
468 if (SHIFT_COUNT_TRUNCATED)
469 count %= prec;
470
471 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
472 {
473 /* Shifting by the host word size is undefined according to the
474 ANSI standard, so we must handle this as a special case. */
475 *hv = 0;
476 *lv = 0;
477 }
478 else if (count >= HOST_BITS_PER_WIDE_INT)
479 {
480 *hv = 0;
481 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
482 }
483 else
484 {
485 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
486 *lv = ((l1 >> count)
487 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
488 }
489
490 /* Zero / sign extend all bits that are beyond the precision. */
491
492 if (count >= (HOST_WIDE_INT)prec)
493 {
494 *hv = signmask;
495 *lv = signmask;
496 }
497 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
498 ;
499 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
500 {
501 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
502 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
503 }
504 else
505 {
506 *hv = signmask;
507 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
508 *lv |= signmask << (prec - count);
509 }
510 }
511 \f
512 /* Rotate the doubleword integer in L1, H1 left by COUNT places
513 keeping only PREC bits of result.
514 Rotate right if COUNT is negative.
515 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
516
517 void
518 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
519 HOST_WIDE_INT count, unsigned int prec,
520 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
521 {
522 unsigned HOST_WIDE_INT s1l, s2l;
523 HOST_WIDE_INT s1h, s2h;
524
525 count %= prec;
526 if (count < 0)
527 count += prec;
528
529 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
530 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
531 *lv = s1l | s2l;
532 *hv = s1h | s2h;
533 }
534
535 /* Rotate the doubleword integer in L1, H1 left by COUNT places
536 keeping only PREC bits of result. COUNT must be positive.
537 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
538
539 void
540 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
541 HOST_WIDE_INT count, unsigned int prec,
542 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
543 {
544 unsigned HOST_WIDE_INT s1l, s2l;
545 HOST_WIDE_INT s1h, s2h;
546
547 count %= prec;
548 if (count < 0)
549 count += prec;
550
551 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
552 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
553 *lv = s1l | s2l;
554 *hv = s1h | s2h;
555 }
556 \f
557 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
558 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
559 CODE is a tree code for a kind of division, one of
560 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
561 or EXACT_DIV_EXPR
562 It controls how the quotient is rounded to an integer.
563 Return nonzero if the operation overflows.
564 UNS nonzero says do unsigned division. */
565
566 int
567 div_and_round_double (enum tree_code code, int uns,
568 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
569 HOST_WIDE_INT hnum_orig,
570 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
571 HOST_WIDE_INT hden_orig,
572 unsigned HOST_WIDE_INT *lquo,
573 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
574 HOST_WIDE_INT *hrem)
575 {
576 int quo_neg = 0;
577 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
578 HOST_WIDE_INT den[4], quo[4];
579 int i, j;
580 unsigned HOST_WIDE_INT work;
581 unsigned HOST_WIDE_INT carry = 0;
582 unsigned HOST_WIDE_INT lnum = lnum_orig;
583 HOST_WIDE_INT hnum = hnum_orig;
584 unsigned HOST_WIDE_INT lden = lden_orig;
585 HOST_WIDE_INT hden = hden_orig;
586 int overflow = 0;
587
588 if (hden == 0 && lden == 0)
589 overflow = 1, lden = 1;
590
591 /* Calculate quotient sign and convert operands to unsigned. */
592 if (!uns)
593 {
594 if (hnum < 0)
595 {
596 quo_neg = ~ quo_neg;
597 /* (minimum integer) / (-1) is the only overflow case. */
598 if (neg_double (lnum, hnum, &lnum, &hnum)
599 && ((HOST_WIDE_INT) lden & hden) == -1)
600 overflow = 1;
601 }
602 if (hden < 0)
603 {
604 quo_neg = ~ quo_neg;
605 neg_double (lden, hden, &lden, &hden);
606 }
607 }
608
609 if (hnum == 0 && hden == 0)
610 { /* single precision */
611 *hquo = *hrem = 0;
612 /* This unsigned division rounds toward zero. */
613 *lquo = lnum / lden;
614 goto finish_up;
615 }
616
617 if (hnum == 0)
618 { /* trivial case: dividend < divisor */
619 /* hden != 0 already checked. */
620 *hquo = *lquo = 0;
621 *hrem = hnum;
622 *lrem = lnum;
623 goto finish_up;
624 }
625
626 memset (quo, 0, sizeof quo);
627
628 memset (num, 0, sizeof num); /* to zero 9th element */
629 memset (den, 0, sizeof den);
630
631 encode (num, lnum, hnum);
632 encode (den, lden, hden);
633
634 /* Special code for when the divisor < BASE. */
635 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
636 {
637 /* hnum != 0 already checked. */
638 for (i = 4 - 1; i >= 0; i--)
639 {
640 work = num[i] + carry * BASE;
641 quo[i] = work / lden;
642 carry = work % lden;
643 }
644 }
645 else
646 {
647 /* Full double precision division,
648 with thanks to Don Knuth's "Seminumerical Algorithms". */
649 int num_hi_sig, den_hi_sig;
650 unsigned HOST_WIDE_INT quo_est, scale;
651
652 /* Find the highest nonzero divisor digit. */
653 for (i = 4 - 1;; i--)
654 if (den[i] != 0)
655 {
656 den_hi_sig = i;
657 break;
658 }
659
660 /* Insure that the first digit of the divisor is at least BASE/2.
661 This is required by the quotient digit estimation algorithm. */
662
663 scale = BASE / (den[den_hi_sig] + 1);
664 if (scale > 1)
665 { /* scale divisor and dividend */
666 carry = 0;
667 for (i = 0; i <= 4 - 1; i++)
668 {
669 work = (num[i] * scale) + carry;
670 num[i] = LOWPART (work);
671 carry = HIGHPART (work);
672 }
673
674 num[4] = carry;
675 carry = 0;
676 for (i = 0; i <= 4 - 1; i++)
677 {
678 work = (den[i] * scale) + carry;
679 den[i] = LOWPART (work);
680 carry = HIGHPART (work);
681 if (den[i] != 0) den_hi_sig = i;
682 }
683 }
684
685 num_hi_sig = 4;
686
687 /* Main loop */
688 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
689 {
690 /* Guess the next quotient digit, quo_est, by dividing the first
691 two remaining dividend digits by the high order quotient digit.
692 quo_est is never low and is at most 2 high. */
693 unsigned HOST_WIDE_INT tmp;
694
695 num_hi_sig = i + den_hi_sig + 1;
696 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
697 if (num[num_hi_sig] != den[den_hi_sig])
698 quo_est = work / den[den_hi_sig];
699 else
700 quo_est = BASE - 1;
701
702 /* Refine quo_est so it's usually correct, and at most one high. */
703 tmp = work - quo_est * den[den_hi_sig];
704 if (tmp < BASE
705 && (den[den_hi_sig - 1] * quo_est
706 > (tmp * BASE + num[num_hi_sig - 2])))
707 quo_est--;
708
709 /* Try QUO_EST as the quotient digit, by multiplying the
710 divisor by QUO_EST and subtracting from the remaining dividend.
711 Keep in mind that QUO_EST is the I - 1st digit. */
712
713 carry = 0;
714 for (j = 0; j <= den_hi_sig; j++)
715 {
716 work = quo_est * den[j] + carry;
717 carry = HIGHPART (work);
718 work = num[i + j] - LOWPART (work);
719 num[i + j] = LOWPART (work);
720 carry += HIGHPART (work) != 0;
721 }
722
723 /* If quo_est was high by one, then num[i] went negative and
724 we need to correct things. */
725 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
726 {
727 quo_est--;
728 carry = 0; /* add divisor back in */
729 for (j = 0; j <= den_hi_sig; j++)
730 {
731 work = num[i + j] + den[j] + carry;
732 carry = HIGHPART (work);
733 num[i + j] = LOWPART (work);
734 }
735
736 num [num_hi_sig] += carry;
737 }
738
739 /* Store the quotient digit. */
740 quo[i] = quo_est;
741 }
742 }
743
744 decode (quo, lquo, hquo);
745
746 finish_up:
747 /* If result is negative, make it so. */
748 if (quo_neg)
749 neg_double (*lquo, *hquo, lquo, hquo);
750
751 /* Compute trial remainder: rem = num - (quo * den) */
752 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
753 neg_double (*lrem, *hrem, lrem, hrem);
754 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
755
756 switch (code)
757 {
758 case TRUNC_DIV_EXPR:
759 case TRUNC_MOD_EXPR: /* round toward zero */
760 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
761 return overflow;
762
763 case FLOOR_DIV_EXPR:
764 case FLOOR_MOD_EXPR: /* round toward negative infinity */
765 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
766 {
767 /* quo = quo - 1; */
768 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
769 lquo, hquo);
770 }
771 else
772 return overflow;
773 break;
774
775 case CEIL_DIV_EXPR:
776 case CEIL_MOD_EXPR: /* round toward positive infinity */
777 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
778 {
779 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
780 lquo, hquo);
781 }
782 else
783 return overflow;
784 break;
785
786 case ROUND_DIV_EXPR:
787 case ROUND_MOD_EXPR: /* round to closest integer */
788 {
789 unsigned HOST_WIDE_INT labs_rem = *lrem;
790 HOST_WIDE_INT habs_rem = *hrem;
791 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
792 HOST_WIDE_INT habs_den = hden, htwice;
793
794 /* Get absolute values. */
795 if (*hrem < 0)
796 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
797 if (hden < 0)
798 neg_double (lden, hden, &labs_den, &habs_den);
799
800 /* If (2 * abs (lrem) >= abs (lden)) */
801 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
802 labs_rem, habs_rem, &ltwice, &htwice);
803
804 if (((unsigned HOST_WIDE_INT) habs_den
805 < (unsigned HOST_WIDE_INT) htwice)
806 || (((unsigned HOST_WIDE_INT) habs_den
807 == (unsigned HOST_WIDE_INT) htwice)
808 && (labs_den < ltwice)))
809 {
810 if (*hquo < 0)
811 /* quo = quo - 1; */
812 add_double (*lquo, *hquo,
813 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
814 else
815 /* quo = quo + 1; */
816 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
817 lquo, hquo);
818 }
819 else
820 return overflow;
821 }
822 break;
823
824 default:
825 gcc_unreachable ();
826 }
827
828 /* Compute true remainder: rem = num - (quo * den) */
829 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
830 neg_double (*lrem, *hrem, lrem, hrem);
831 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
832 return overflow;
833 }
834
835 /* If ARG2 divides ARG1 with zero remainder, carries out the division
836 of type CODE and returns the quotient.
837 Otherwise returns NULL_TREE. */
838
839 static tree
840 div_if_zero_remainder (enum tree_code code, tree arg1, tree arg2)
841 {
842 unsigned HOST_WIDE_INT int1l, int2l;
843 HOST_WIDE_INT int1h, int2h;
844 unsigned HOST_WIDE_INT quol, reml;
845 HOST_WIDE_INT quoh, remh;
846 tree type = TREE_TYPE (arg1);
847 int uns = TYPE_UNSIGNED (type);
848
849 int1l = TREE_INT_CST_LOW (arg1);
850 int1h = TREE_INT_CST_HIGH (arg1);
851 int2l = TREE_INT_CST_LOW (arg2);
852 int2h = TREE_INT_CST_HIGH (arg2);
853
854 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
855 &quol, &quoh, &reml, &remh);
856 if (remh != 0 || reml != 0)
857 return NULL_TREE;
858
859 return build_int_cst_wide (type, quol, quoh);
860 }
861 \f
862 /* Return true if built-in mathematical function specified by CODE
863 preserves the sign of it argument, i.e. -f(x) == f(-x). */
864
865 static bool
866 negate_mathfn_p (enum built_in_function code)
867 {
868 switch (code)
869 {
870 case BUILT_IN_ASIN:
871 case BUILT_IN_ASINF:
872 case BUILT_IN_ASINL:
873 case BUILT_IN_ATAN:
874 case BUILT_IN_ATANF:
875 case BUILT_IN_ATANL:
876 case BUILT_IN_SIN:
877 case BUILT_IN_SINF:
878 case BUILT_IN_SINL:
879 case BUILT_IN_TAN:
880 case BUILT_IN_TANF:
881 case BUILT_IN_TANL:
882 return true;
883
884 default:
885 break;
886 }
887 return false;
888 }
889
890 /* Check whether we may negate an integer constant T without causing
891 overflow. */
892
893 bool
894 may_negate_without_overflow_p (tree t)
895 {
896 unsigned HOST_WIDE_INT val;
897 unsigned int prec;
898 tree type;
899
900 gcc_assert (TREE_CODE (t) == INTEGER_CST);
901
902 type = TREE_TYPE (t);
903 if (TYPE_UNSIGNED (type))
904 return false;
905
906 prec = TYPE_PRECISION (type);
907 if (prec > HOST_BITS_PER_WIDE_INT)
908 {
909 if (TREE_INT_CST_LOW (t) != 0)
910 return true;
911 prec -= HOST_BITS_PER_WIDE_INT;
912 val = TREE_INT_CST_HIGH (t);
913 }
914 else
915 val = TREE_INT_CST_LOW (t);
916 if (prec < HOST_BITS_PER_WIDE_INT)
917 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
918 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
919 }
920
921 /* Determine whether an expression T can be cheaply negated using
922 the function negate_expr. */
923
924 static bool
925 negate_expr_p (tree t)
926 {
927 tree type;
928
929 if (t == 0)
930 return false;
931
932 type = TREE_TYPE (t);
933
934 STRIP_SIGN_NOPS (t);
935 switch (TREE_CODE (t))
936 {
937 case INTEGER_CST:
938 if (TYPE_UNSIGNED (type) || ! flag_trapv)
939 return true;
940
941 /* Check that -CST will not overflow type. */
942 return may_negate_without_overflow_p (t);
943
944 case REAL_CST:
945 case NEGATE_EXPR:
946 return true;
947
948 case COMPLEX_CST:
949 return negate_expr_p (TREE_REALPART (t))
950 && negate_expr_p (TREE_IMAGPART (t));
951
952 case PLUS_EXPR:
953 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
954 return false;
955 /* -(A + B) -> (-B) - A. */
956 if (negate_expr_p (TREE_OPERAND (t, 1))
957 && reorder_operands_p (TREE_OPERAND (t, 0),
958 TREE_OPERAND (t, 1)))
959 return true;
960 /* -(A + B) -> (-A) - B. */
961 return negate_expr_p (TREE_OPERAND (t, 0));
962
963 case MINUS_EXPR:
964 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
965 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
966 && reorder_operands_p (TREE_OPERAND (t, 0),
967 TREE_OPERAND (t, 1));
968
969 case MULT_EXPR:
970 if (TYPE_UNSIGNED (TREE_TYPE (t)))
971 break;
972
973 /* Fall through. */
974
975 case RDIV_EXPR:
976 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
977 return negate_expr_p (TREE_OPERAND (t, 1))
978 || negate_expr_p (TREE_OPERAND (t, 0));
979 break;
980
981 case NOP_EXPR:
982 /* Negate -((double)float) as (double)(-float). */
983 if (TREE_CODE (type) == REAL_TYPE)
984 {
985 tree tem = strip_float_extensions (t);
986 if (tem != t)
987 return negate_expr_p (tem);
988 }
989 break;
990
991 case CALL_EXPR:
992 /* Negate -f(x) as f(-x). */
993 if (negate_mathfn_p (builtin_mathfn_code (t)))
994 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
995 break;
996
997 case RSHIFT_EXPR:
998 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
999 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1000 {
1001 tree op1 = TREE_OPERAND (t, 1);
1002 if (TREE_INT_CST_HIGH (op1) == 0
1003 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1004 == TREE_INT_CST_LOW (op1))
1005 return true;
1006 }
1007 break;
1008
1009 default:
1010 break;
1011 }
1012 return false;
1013 }
1014
1015 /* Given T, an expression, return the negation of T. Allow for T to be
1016 null, in which case return null. */
1017
1018 static tree
1019 negate_expr (tree t)
1020 {
1021 tree type;
1022 tree tem;
1023
1024 if (t == 0)
1025 return 0;
1026
1027 type = TREE_TYPE (t);
1028 STRIP_SIGN_NOPS (t);
1029
1030 switch (TREE_CODE (t))
1031 {
1032 case INTEGER_CST:
1033 tem = fold_negate_const (t, type);
1034 if (! TREE_OVERFLOW (tem)
1035 || TYPE_UNSIGNED (type)
1036 || ! flag_trapv)
1037 return tem;
1038 break;
1039
1040 case REAL_CST:
1041 tem = fold_negate_const (t, type);
1042 /* Two's complement FP formats, such as c4x, may overflow. */
1043 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
1044 return fold_convert (type, tem);
1045 break;
1046
1047 case COMPLEX_CST:
1048 {
1049 tree rpart = negate_expr (TREE_REALPART (t));
1050 tree ipart = negate_expr (TREE_IMAGPART (t));
1051
1052 if ((TREE_CODE (rpart) == REAL_CST
1053 && TREE_CODE (ipart) == REAL_CST)
1054 || (TREE_CODE (rpart) == INTEGER_CST
1055 && TREE_CODE (ipart) == INTEGER_CST))
1056 return build_complex (type, rpart, ipart);
1057 }
1058 break;
1059
1060 case NEGATE_EXPR:
1061 return fold_convert (type, TREE_OPERAND (t, 0));
1062
1063 case PLUS_EXPR:
1064 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1065 {
1066 /* -(A + B) -> (-B) - A. */
1067 if (negate_expr_p (TREE_OPERAND (t, 1))
1068 && reorder_operands_p (TREE_OPERAND (t, 0),
1069 TREE_OPERAND (t, 1)))
1070 {
1071 tem = negate_expr (TREE_OPERAND (t, 1));
1072 tem = fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1073 tem, TREE_OPERAND (t, 0));
1074 return fold_convert (type, tem);
1075 }
1076
1077 /* -(A + B) -> (-A) - B. */
1078 if (negate_expr_p (TREE_OPERAND (t, 0)))
1079 {
1080 tem = negate_expr (TREE_OPERAND (t, 0));
1081 tem = fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1082 tem, TREE_OPERAND (t, 1));
1083 return fold_convert (type, tem);
1084 }
1085 }
1086 break;
1087
1088 case MINUS_EXPR:
1089 /* - (A - B) -> B - A */
1090 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1091 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1092 return fold_convert (type,
1093 fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1094 TREE_OPERAND (t, 1),
1095 TREE_OPERAND (t, 0)));
1096 break;
1097
1098 case MULT_EXPR:
1099 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1100 break;
1101
1102 /* Fall through. */
1103
1104 case RDIV_EXPR:
1105 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1106 {
1107 tem = TREE_OPERAND (t, 1);
1108 if (negate_expr_p (tem))
1109 return fold_convert (type,
1110 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1111 TREE_OPERAND (t, 0),
1112 negate_expr (tem)));
1113 tem = TREE_OPERAND (t, 0);
1114 if (negate_expr_p (tem))
1115 return fold_convert (type,
1116 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1117 negate_expr (tem),
1118 TREE_OPERAND (t, 1)));
1119 }
1120 break;
1121
1122 case NOP_EXPR:
1123 /* Convert -((double)float) into (double)(-float). */
1124 if (TREE_CODE (type) == REAL_TYPE)
1125 {
1126 tem = strip_float_extensions (t);
1127 if (tem != t && negate_expr_p (tem))
1128 return fold_convert (type, negate_expr (tem));
1129 }
1130 break;
1131
1132 case CALL_EXPR:
1133 /* Negate -f(x) as f(-x). */
1134 if (negate_mathfn_p (builtin_mathfn_code (t))
1135 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1136 {
1137 tree fndecl, arg, arglist;
1138
1139 fndecl = get_callee_fndecl (t);
1140 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1141 arglist = build_tree_list (NULL_TREE, arg);
1142 return build_function_call_expr (fndecl, arglist);
1143 }
1144 break;
1145
1146 case RSHIFT_EXPR:
1147 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1148 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1149 {
1150 tree op1 = TREE_OPERAND (t, 1);
1151 if (TREE_INT_CST_HIGH (op1) == 0
1152 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1153 == TREE_INT_CST_LOW (op1))
1154 {
1155 tree ntype = TYPE_UNSIGNED (type)
1156 ? lang_hooks.types.signed_type (type)
1157 : lang_hooks.types.unsigned_type (type);
1158 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1159 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1160 return fold_convert (type, temp);
1161 }
1162 }
1163 break;
1164
1165 default:
1166 break;
1167 }
1168
1169 tem = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1170 return fold_convert (type, tem);
1171 }
1172 \f
1173 /* Split a tree IN into a constant, literal and variable parts that could be
1174 combined with CODE to make IN. "constant" means an expression with
1175 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1176 commutative arithmetic operation. Store the constant part into *CONP,
1177 the literal in *LITP and return the variable part. If a part isn't
1178 present, set it to null. If the tree does not decompose in this way,
1179 return the entire tree as the variable part and the other parts as null.
1180
1181 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1182 case, we negate an operand that was subtracted. Except if it is a
1183 literal for which we use *MINUS_LITP instead.
1184
1185 If NEGATE_P is true, we are negating all of IN, again except a literal
1186 for which we use *MINUS_LITP instead.
1187
1188 If IN is itself a literal or constant, return it as appropriate.
1189
1190 Note that we do not guarantee that any of the three values will be the
1191 same type as IN, but they will have the same signedness and mode. */
1192
1193 static tree
1194 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1195 tree *minus_litp, int negate_p)
1196 {
1197 tree var = 0;
1198
1199 *conp = 0;
1200 *litp = 0;
1201 *minus_litp = 0;
1202
1203 /* Strip any conversions that don't change the machine mode or signedness. */
1204 STRIP_SIGN_NOPS (in);
1205
1206 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1207 *litp = in;
1208 else if (TREE_CODE (in) == code
1209 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1210 /* We can associate addition and subtraction together (even
1211 though the C standard doesn't say so) for integers because
1212 the value is not affected. For reals, the value might be
1213 affected, so we can't. */
1214 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1215 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1216 {
1217 tree op0 = TREE_OPERAND (in, 0);
1218 tree op1 = TREE_OPERAND (in, 1);
1219 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1220 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1221
1222 /* First see if either of the operands is a literal, then a constant. */
1223 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1224 *litp = op0, op0 = 0;
1225 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1226 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1227
1228 if (op0 != 0 && TREE_CONSTANT (op0))
1229 *conp = op0, op0 = 0;
1230 else if (op1 != 0 && TREE_CONSTANT (op1))
1231 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1232
1233 /* If we haven't dealt with either operand, this is not a case we can
1234 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1235 if (op0 != 0 && op1 != 0)
1236 var = in;
1237 else if (op0 != 0)
1238 var = op0;
1239 else
1240 var = op1, neg_var_p = neg1_p;
1241
1242 /* Now do any needed negations. */
1243 if (neg_litp_p)
1244 *minus_litp = *litp, *litp = 0;
1245 if (neg_conp_p)
1246 *conp = negate_expr (*conp);
1247 if (neg_var_p)
1248 var = negate_expr (var);
1249 }
1250 else if (TREE_CONSTANT (in))
1251 *conp = in;
1252 else
1253 var = in;
1254
1255 if (negate_p)
1256 {
1257 if (*litp)
1258 *minus_litp = *litp, *litp = 0;
1259 else if (*minus_litp)
1260 *litp = *minus_litp, *minus_litp = 0;
1261 *conp = negate_expr (*conp);
1262 var = negate_expr (var);
1263 }
1264
1265 return var;
1266 }
1267
1268 /* Re-associate trees split by the above function. T1 and T2 are either
1269 expressions to associate or null. Return the new expression, if any. If
1270 we build an operation, do it in TYPE and with CODE. */
1271
1272 static tree
1273 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1274 {
1275 if (t1 == 0)
1276 return t2;
1277 else if (t2 == 0)
1278 return t1;
1279
1280 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1281 try to fold this since we will have infinite recursion. But do
1282 deal with any NEGATE_EXPRs. */
1283 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1284 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1285 {
1286 if (code == PLUS_EXPR)
1287 {
1288 if (TREE_CODE (t1) == NEGATE_EXPR)
1289 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1290 fold_convert (type, TREE_OPERAND (t1, 0)));
1291 else if (TREE_CODE (t2) == NEGATE_EXPR)
1292 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1293 fold_convert (type, TREE_OPERAND (t2, 0)));
1294 else if (integer_zerop (t2))
1295 return fold_convert (type, t1);
1296 }
1297 else if (code == MINUS_EXPR)
1298 {
1299 if (integer_zerop (t2))
1300 return fold_convert (type, t1);
1301 }
1302
1303 return build2 (code, type, fold_convert (type, t1),
1304 fold_convert (type, t2));
1305 }
1306
1307 return fold_build2 (code, type, fold_convert (type, t1),
1308 fold_convert (type, t2));
1309 }
1310 \f
1311 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1312 to produce a new constant.
1313
1314 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1315
1316 tree
1317 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1318 {
1319 unsigned HOST_WIDE_INT int1l, int2l;
1320 HOST_WIDE_INT int1h, int2h;
1321 unsigned HOST_WIDE_INT low;
1322 HOST_WIDE_INT hi;
1323 unsigned HOST_WIDE_INT garbagel;
1324 HOST_WIDE_INT garbageh;
1325 tree t;
1326 tree type = TREE_TYPE (arg1);
1327 int uns = TYPE_UNSIGNED (type);
1328 int is_sizetype
1329 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1330 int overflow = 0;
1331
1332 int1l = TREE_INT_CST_LOW (arg1);
1333 int1h = TREE_INT_CST_HIGH (arg1);
1334 int2l = TREE_INT_CST_LOW (arg2);
1335 int2h = TREE_INT_CST_HIGH (arg2);
1336
1337 switch (code)
1338 {
1339 case BIT_IOR_EXPR:
1340 low = int1l | int2l, hi = int1h | int2h;
1341 break;
1342
1343 case BIT_XOR_EXPR:
1344 low = int1l ^ int2l, hi = int1h ^ int2h;
1345 break;
1346
1347 case BIT_AND_EXPR:
1348 low = int1l & int2l, hi = int1h & int2h;
1349 break;
1350
1351 case RSHIFT_EXPR:
1352 int2l = -int2l;
1353 case LSHIFT_EXPR:
1354 /* It's unclear from the C standard whether shifts can overflow.
1355 The following code ignores overflow; perhaps a C standard
1356 interpretation ruling is needed. */
1357 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1358 &low, &hi, !uns);
1359 break;
1360
1361 case RROTATE_EXPR:
1362 int2l = - int2l;
1363 case LROTATE_EXPR:
1364 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1365 &low, &hi);
1366 break;
1367
1368 case PLUS_EXPR:
1369 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1370 break;
1371
1372 case MINUS_EXPR:
1373 neg_double (int2l, int2h, &low, &hi);
1374 add_double (int1l, int1h, low, hi, &low, &hi);
1375 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1376 break;
1377
1378 case MULT_EXPR:
1379 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1380 break;
1381
1382 case TRUNC_DIV_EXPR:
1383 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1384 case EXACT_DIV_EXPR:
1385 /* This is a shortcut for a common special case. */
1386 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1387 && ! TREE_CONSTANT_OVERFLOW (arg1)
1388 && ! TREE_CONSTANT_OVERFLOW (arg2)
1389 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1390 {
1391 if (code == CEIL_DIV_EXPR)
1392 int1l += int2l - 1;
1393
1394 low = int1l / int2l, hi = 0;
1395 break;
1396 }
1397
1398 /* ... fall through ... */
1399
1400 case ROUND_DIV_EXPR:
1401 if (int2h == 0 && int2l == 1)
1402 {
1403 low = int1l, hi = int1h;
1404 break;
1405 }
1406 if (int1l == int2l && int1h == int2h
1407 && ! (int1l == 0 && int1h == 0))
1408 {
1409 low = 1, hi = 0;
1410 break;
1411 }
1412 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1413 &low, &hi, &garbagel, &garbageh);
1414 break;
1415
1416 case TRUNC_MOD_EXPR:
1417 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1418 /* This is a shortcut for a common special case. */
1419 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1420 && ! TREE_CONSTANT_OVERFLOW (arg1)
1421 && ! TREE_CONSTANT_OVERFLOW (arg2)
1422 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1423 {
1424 if (code == CEIL_MOD_EXPR)
1425 int1l += int2l - 1;
1426 low = int1l % int2l, hi = 0;
1427 break;
1428 }
1429
1430 /* ... fall through ... */
1431
1432 case ROUND_MOD_EXPR:
1433 overflow = div_and_round_double (code, uns,
1434 int1l, int1h, int2l, int2h,
1435 &garbagel, &garbageh, &low, &hi);
1436 break;
1437
1438 case MIN_EXPR:
1439 case MAX_EXPR:
1440 if (uns)
1441 low = (((unsigned HOST_WIDE_INT) int1h
1442 < (unsigned HOST_WIDE_INT) int2h)
1443 || (((unsigned HOST_WIDE_INT) int1h
1444 == (unsigned HOST_WIDE_INT) int2h)
1445 && int1l < int2l));
1446 else
1447 low = (int1h < int2h
1448 || (int1h == int2h && int1l < int2l));
1449
1450 if (low == (code == MIN_EXPR))
1451 low = int1l, hi = int1h;
1452 else
1453 low = int2l, hi = int2h;
1454 break;
1455
1456 default:
1457 gcc_unreachable ();
1458 }
1459
1460 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1461
1462 if (notrunc)
1463 {
1464 /* Propagate overflow flags ourselves. */
1465 if (((!uns || is_sizetype) && overflow)
1466 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1467 {
1468 t = copy_node (t);
1469 TREE_OVERFLOW (t) = 1;
1470 TREE_CONSTANT_OVERFLOW (t) = 1;
1471 }
1472 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1473 {
1474 t = copy_node (t);
1475 TREE_CONSTANT_OVERFLOW (t) = 1;
1476 }
1477 }
1478 else
1479 t = force_fit_type (t, 1,
1480 ((!uns || is_sizetype) && overflow)
1481 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
1482 TREE_CONSTANT_OVERFLOW (arg1)
1483 | TREE_CONSTANT_OVERFLOW (arg2));
1484
1485 return t;
1486 }
1487
1488 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1489 constant. We assume ARG1 and ARG2 have the same data type, or at least
1490 are the same kind of constant and the same machine mode.
1491
1492 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1493
1494 static tree
1495 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1496 {
1497 STRIP_NOPS (arg1);
1498 STRIP_NOPS (arg2);
1499
1500 if (TREE_CODE (arg1) == INTEGER_CST)
1501 return int_const_binop (code, arg1, arg2, notrunc);
1502
1503 if (TREE_CODE (arg1) == REAL_CST)
1504 {
1505 enum machine_mode mode;
1506 REAL_VALUE_TYPE d1;
1507 REAL_VALUE_TYPE d2;
1508 REAL_VALUE_TYPE value;
1509 REAL_VALUE_TYPE result;
1510 bool inexact;
1511 tree t, type;
1512
1513 d1 = TREE_REAL_CST (arg1);
1514 d2 = TREE_REAL_CST (arg2);
1515
1516 type = TREE_TYPE (arg1);
1517 mode = TYPE_MODE (type);
1518
1519 /* Don't perform operation if we honor signaling NaNs and
1520 either operand is a NaN. */
1521 if (HONOR_SNANS (mode)
1522 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1523 return NULL_TREE;
1524
1525 /* Don't perform operation if it would raise a division
1526 by zero exception. */
1527 if (code == RDIV_EXPR
1528 && REAL_VALUES_EQUAL (d2, dconst0)
1529 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1530 return NULL_TREE;
1531
1532 /* If either operand is a NaN, just return it. Otherwise, set up
1533 for floating-point trap; we return an overflow. */
1534 if (REAL_VALUE_ISNAN (d1))
1535 return arg1;
1536 else if (REAL_VALUE_ISNAN (d2))
1537 return arg2;
1538
1539 inexact = real_arithmetic (&value, code, &d1, &d2);
1540 real_convert (&result, mode, &value);
1541
1542 /* Don't constant fold this floating point operation if the
1543 result may dependent upon the run-time rounding mode and
1544 flag_rounding_math is set, or if GCC's software emulation
1545 is unable to accurately represent the result. */
1546
1547 if ((flag_rounding_math
1548 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1549 && !flag_unsafe_math_optimizations))
1550 && (inexact || !real_identical (&result, &value)))
1551 return NULL_TREE;
1552
1553 t = build_real (type, result);
1554
1555 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1556 TREE_CONSTANT_OVERFLOW (t)
1557 = TREE_OVERFLOW (t)
1558 | TREE_CONSTANT_OVERFLOW (arg1)
1559 | TREE_CONSTANT_OVERFLOW (arg2);
1560 return t;
1561 }
1562 if (TREE_CODE (arg1) == COMPLEX_CST)
1563 {
1564 tree type = TREE_TYPE (arg1);
1565 tree r1 = TREE_REALPART (arg1);
1566 tree i1 = TREE_IMAGPART (arg1);
1567 tree r2 = TREE_REALPART (arg2);
1568 tree i2 = TREE_IMAGPART (arg2);
1569 tree t;
1570
1571 switch (code)
1572 {
1573 case PLUS_EXPR:
1574 t = build_complex (type,
1575 const_binop (PLUS_EXPR, r1, r2, notrunc),
1576 const_binop (PLUS_EXPR, i1, i2, notrunc));
1577 break;
1578
1579 case MINUS_EXPR:
1580 t = build_complex (type,
1581 const_binop (MINUS_EXPR, r1, r2, notrunc),
1582 const_binop (MINUS_EXPR, i1, i2, notrunc));
1583 break;
1584
1585 case MULT_EXPR:
1586 t = build_complex (type,
1587 const_binop (MINUS_EXPR,
1588 const_binop (MULT_EXPR,
1589 r1, r2, notrunc),
1590 const_binop (MULT_EXPR,
1591 i1, i2, notrunc),
1592 notrunc),
1593 const_binop (PLUS_EXPR,
1594 const_binop (MULT_EXPR,
1595 r1, i2, notrunc),
1596 const_binop (MULT_EXPR,
1597 i1, r2, notrunc),
1598 notrunc));
1599 break;
1600
1601 case RDIV_EXPR:
1602 {
1603 tree t1, t2, real, imag;
1604 tree magsquared
1605 = const_binop (PLUS_EXPR,
1606 const_binop (MULT_EXPR, r2, r2, notrunc),
1607 const_binop (MULT_EXPR, i2, i2, notrunc),
1608 notrunc);
1609
1610 t1 = const_binop (PLUS_EXPR,
1611 const_binop (MULT_EXPR, r1, r2, notrunc),
1612 const_binop (MULT_EXPR, i1, i2, notrunc),
1613 notrunc);
1614 t2 = const_binop (MINUS_EXPR,
1615 const_binop (MULT_EXPR, i1, r2, notrunc),
1616 const_binop (MULT_EXPR, r1, i2, notrunc),
1617 notrunc);
1618
1619 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1620 {
1621 real = const_binop (TRUNC_DIV_EXPR, t1, magsquared, notrunc);
1622 imag = const_binop (TRUNC_DIV_EXPR, t2, magsquared, notrunc);
1623 }
1624 else
1625 {
1626 real = const_binop (RDIV_EXPR, t1, magsquared, notrunc);
1627 imag = const_binop (RDIV_EXPR, t2, magsquared, notrunc);
1628 if (!real || !imag)
1629 return NULL_TREE;
1630 }
1631
1632 t = build_complex (type, real, imag);
1633 }
1634 break;
1635
1636 default:
1637 gcc_unreachable ();
1638 }
1639 return t;
1640 }
1641 return 0;
1642 }
1643
1644 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1645 indicates which particular sizetype to create. */
1646
1647 tree
1648 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1649 {
1650 return build_int_cst (sizetype_tab[(int) kind], number);
1651 }
1652 \f
1653 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1654 is a tree code. The type of the result is taken from the operands.
1655 Both must be the same type integer type and it must be a size type.
1656 If the operands are constant, so is the result. */
1657
1658 tree
1659 size_binop (enum tree_code code, tree arg0, tree arg1)
1660 {
1661 tree type = TREE_TYPE (arg0);
1662
1663 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1664 && type == TREE_TYPE (arg1));
1665
1666 /* Handle the special case of two integer constants faster. */
1667 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1668 {
1669 /* And some specific cases even faster than that. */
1670 if (code == PLUS_EXPR && integer_zerop (arg0))
1671 return arg1;
1672 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1673 && integer_zerop (arg1))
1674 return arg0;
1675 else if (code == MULT_EXPR && integer_onep (arg0))
1676 return arg1;
1677
1678 /* Handle general case of two integer constants. */
1679 return int_const_binop (code, arg0, arg1, 0);
1680 }
1681
1682 if (arg0 == error_mark_node || arg1 == error_mark_node)
1683 return error_mark_node;
1684
1685 return fold_build2 (code, type, arg0, arg1);
1686 }
1687
1688 /* Given two values, either both of sizetype or both of bitsizetype,
1689 compute the difference between the two values. Return the value
1690 in signed type corresponding to the type of the operands. */
1691
1692 tree
1693 size_diffop (tree arg0, tree arg1)
1694 {
1695 tree type = TREE_TYPE (arg0);
1696 tree ctype;
1697
1698 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1699 && type == TREE_TYPE (arg1));
1700
1701 /* If the type is already signed, just do the simple thing. */
1702 if (!TYPE_UNSIGNED (type))
1703 return size_binop (MINUS_EXPR, arg0, arg1);
1704
1705 ctype = type == bitsizetype ? sbitsizetype : ssizetype;
1706
1707 /* If either operand is not a constant, do the conversions to the signed
1708 type and subtract. The hardware will do the right thing with any
1709 overflow in the subtraction. */
1710 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1711 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1712 fold_convert (ctype, arg1));
1713
1714 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1715 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1716 overflow) and negate (which can't either). Special-case a result
1717 of zero while we're here. */
1718 if (tree_int_cst_equal (arg0, arg1))
1719 return fold_convert (ctype, integer_zero_node);
1720 else if (tree_int_cst_lt (arg1, arg0))
1721 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1722 else
1723 return size_binop (MINUS_EXPR, fold_convert (ctype, integer_zero_node),
1724 fold_convert (ctype, size_binop (MINUS_EXPR,
1725 arg1, arg0)));
1726 }
1727 \f
1728 /* A subroutine of fold_convert_const handling conversions of an
1729 INTEGER_CST to another integer type. */
1730
1731 static tree
1732 fold_convert_const_int_from_int (tree type, tree arg1)
1733 {
1734 tree t;
1735
1736 /* Given an integer constant, make new constant with new type,
1737 appropriately sign-extended or truncated. */
1738 t = build_int_cst_wide (type, TREE_INT_CST_LOW (arg1),
1739 TREE_INT_CST_HIGH (arg1));
1740
1741 t = force_fit_type (t,
1742 /* Don't set the overflow when
1743 converting a pointer */
1744 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1745 (TREE_INT_CST_HIGH (arg1) < 0
1746 && (TYPE_UNSIGNED (type)
1747 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1748 | TREE_OVERFLOW (arg1),
1749 TREE_CONSTANT_OVERFLOW (arg1));
1750
1751 return t;
1752 }
1753
1754 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1755 to an integer type. */
1756
1757 static tree
1758 fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
1759 {
1760 int overflow = 0;
1761 tree t;
1762
1763 /* The following code implements the floating point to integer
1764 conversion rules required by the Java Language Specification,
1765 that IEEE NaNs are mapped to zero and values that overflow
1766 the target precision saturate, i.e. values greater than
1767 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1768 are mapped to INT_MIN. These semantics are allowed by the
1769 C and C++ standards that simply state that the behavior of
1770 FP-to-integer conversion is unspecified upon overflow. */
1771
1772 HOST_WIDE_INT high, low;
1773 REAL_VALUE_TYPE r;
1774 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1775
1776 switch (code)
1777 {
1778 case FIX_TRUNC_EXPR:
1779 real_trunc (&r, VOIDmode, &x);
1780 break;
1781
1782 case FIX_CEIL_EXPR:
1783 real_ceil (&r, VOIDmode, &x);
1784 break;
1785
1786 case FIX_FLOOR_EXPR:
1787 real_floor (&r, VOIDmode, &x);
1788 break;
1789
1790 case FIX_ROUND_EXPR:
1791 real_round (&r, VOIDmode, &x);
1792 break;
1793
1794 default:
1795 gcc_unreachable ();
1796 }
1797
1798 /* If R is NaN, return zero and show we have an overflow. */
1799 if (REAL_VALUE_ISNAN (r))
1800 {
1801 overflow = 1;
1802 high = 0;
1803 low = 0;
1804 }
1805
1806 /* See if R is less than the lower bound or greater than the
1807 upper bound. */
1808
1809 if (! overflow)
1810 {
1811 tree lt = TYPE_MIN_VALUE (type);
1812 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1813 if (REAL_VALUES_LESS (r, l))
1814 {
1815 overflow = 1;
1816 high = TREE_INT_CST_HIGH (lt);
1817 low = TREE_INT_CST_LOW (lt);
1818 }
1819 }
1820
1821 if (! overflow)
1822 {
1823 tree ut = TYPE_MAX_VALUE (type);
1824 if (ut)
1825 {
1826 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1827 if (REAL_VALUES_LESS (u, r))
1828 {
1829 overflow = 1;
1830 high = TREE_INT_CST_HIGH (ut);
1831 low = TREE_INT_CST_LOW (ut);
1832 }
1833 }
1834 }
1835
1836 if (! overflow)
1837 REAL_VALUE_TO_INT (&low, &high, r);
1838
1839 t = build_int_cst_wide (type, low, high);
1840
1841 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1),
1842 TREE_CONSTANT_OVERFLOW (arg1));
1843 return t;
1844 }
1845
1846 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1847 to another floating point type. */
1848
1849 static tree
1850 fold_convert_const_real_from_real (tree type, tree arg1)
1851 {
1852 REAL_VALUE_TYPE value;
1853 tree t;
1854
1855 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1856 t = build_real (type, value);
1857
1858 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1859 TREE_CONSTANT_OVERFLOW (t)
1860 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1861 return t;
1862 }
1863
1864 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1865 type TYPE. If no simplification can be done return NULL_TREE. */
1866
1867 static tree
1868 fold_convert_const (enum tree_code code, tree type, tree arg1)
1869 {
1870 if (TREE_TYPE (arg1) == type)
1871 return arg1;
1872
1873 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1874 {
1875 if (TREE_CODE (arg1) == INTEGER_CST)
1876 return fold_convert_const_int_from_int (type, arg1);
1877 else if (TREE_CODE (arg1) == REAL_CST)
1878 return fold_convert_const_int_from_real (code, type, arg1);
1879 }
1880 else if (TREE_CODE (type) == REAL_TYPE)
1881 {
1882 if (TREE_CODE (arg1) == INTEGER_CST)
1883 return build_real_from_int_cst (type, arg1);
1884 if (TREE_CODE (arg1) == REAL_CST)
1885 return fold_convert_const_real_from_real (type, arg1);
1886 }
1887 return NULL_TREE;
1888 }
1889
1890 /* Construct a vector of zero elements of vector type TYPE. */
1891
1892 static tree
1893 build_zero_vector (tree type)
1894 {
1895 tree elem, list;
1896 int i, units;
1897
1898 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1899 units = TYPE_VECTOR_SUBPARTS (type);
1900
1901 list = NULL_TREE;
1902 for (i = 0; i < units; i++)
1903 list = tree_cons (NULL_TREE, elem, list);
1904 return build_vector (type, list);
1905 }
1906
1907 /* Convert expression ARG to type TYPE. Used by the middle-end for
1908 simple conversions in preference to calling the front-end's convert. */
1909
1910 tree
1911 fold_convert (tree type, tree arg)
1912 {
1913 tree orig = TREE_TYPE (arg);
1914 tree tem;
1915
1916 if (type == orig)
1917 return arg;
1918
1919 if (TREE_CODE (arg) == ERROR_MARK
1920 || TREE_CODE (type) == ERROR_MARK
1921 || TREE_CODE (orig) == ERROR_MARK)
1922 return error_mark_node;
1923
1924 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
1925 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
1926 TYPE_MAIN_VARIANT (orig)))
1927 return fold_build1 (NOP_EXPR, type, arg);
1928
1929 switch (TREE_CODE (type))
1930 {
1931 case INTEGER_TYPE: case CHAR_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1932 case POINTER_TYPE: case REFERENCE_TYPE:
1933 case OFFSET_TYPE:
1934 if (TREE_CODE (arg) == INTEGER_CST)
1935 {
1936 tem = fold_convert_const (NOP_EXPR, type, arg);
1937 if (tem != NULL_TREE)
1938 return tem;
1939 }
1940 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1941 || TREE_CODE (orig) == OFFSET_TYPE)
1942 return fold_build1 (NOP_EXPR, type, arg);
1943 if (TREE_CODE (orig) == COMPLEX_TYPE)
1944 {
1945 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
1946 return fold_convert (type, tem);
1947 }
1948 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1949 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1950 return fold_build1 (NOP_EXPR, type, arg);
1951
1952 case REAL_TYPE:
1953 if (TREE_CODE (arg) == INTEGER_CST)
1954 {
1955 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1956 if (tem != NULL_TREE)
1957 return tem;
1958 }
1959 else if (TREE_CODE (arg) == REAL_CST)
1960 {
1961 tem = fold_convert_const (NOP_EXPR, type, arg);
1962 if (tem != NULL_TREE)
1963 return tem;
1964 }
1965
1966 switch (TREE_CODE (orig))
1967 {
1968 case INTEGER_TYPE: case CHAR_TYPE:
1969 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1970 case POINTER_TYPE: case REFERENCE_TYPE:
1971 return fold_build1 (FLOAT_EXPR, type, arg);
1972
1973 case REAL_TYPE:
1974 return fold_build1 (flag_float_store ? CONVERT_EXPR : NOP_EXPR,
1975 type, arg);
1976
1977 case COMPLEX_TYPE:
1978 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
1979 return fold_convert (type, tem);
1980
1981 default:
1982 gcc_unreachable ();
1983 }
1984
1985 case COMPLEX_TYPE:
1986 switch (TREE_CODE (orig))
1987 {
1988 case INTEGER_TYPE: case CHAR_TYPE:
1989 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1990 case POINTER_TYPE: case REFERENCE_TYPE:
1991 case REAL_TYPE:
1992 return build2 (COMPLEX_EXPR, type,
1993 fold_convert (TREE_TYPE (type), arg),
1994 fold_convert (TREE_TYPE (type), integer_zero_node));
1995 case COMPLEX_TYPE:
1996 {
1997 tree rpart, ipart;
1998
1999 if (TREE_CODE (arg) == COMPLEX_EXPR)
2000 {
2001 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2002 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2003 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2004 }
2005
2006 arg = save_expr (arg);
2007 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2008 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2009 rpart = fold_convert (TREE_TYPE (type), rpart);
2010 ipart = fold_convert (TREE_TYPE (type), ipart);
2011 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2012 }
2013
2014 default:
2015 gcc_unreachable ();
2016 }
2017
2018 case VECTOR_TYPE:
2019 if (integer_zerop (arg))
2020 return build_zero_vector (type);
2021 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2022 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2023 || TREE_CODE (orig) == VECTOR_TYPE);
2024 return fold_build1 (NOP_EXPR, type, arg);
2025
2026 case VOID_TYPE:
2027 return fold_build1 (CONVERT_EXPR, type, fold_ignored_result (arg));
2028
2029 default:
2030 gcc_unreachable ();
2031 }
2032 }
2033 \f
2034 /* Return false if expr can be assumed not to be an value, true
2035 otherwise. */
2036
2037 static bool
2038 maybe_lvalue_p (tree x)
2039 {
2040 /* We only need to wrap lvalue tree codes. */
2041 switch (TREE_CODE (x))
2042 {
2043 case VAR_DECL:
2044 case PARM_DECL:
2045 case RESULT_DECL:
2046 case LABEL_DECL:
2047 case FUNCTION_DECL:
2048 case SSA_NAME:
2049
2050 case COMPONENT_REF:
2051 case INDIRECT_REF:
2052 case ALIGN_INDIRECT_REF:
2053 case MISALIGNED_INDIRECT_REF:
2054 case ARRAY_REF:
2055 case ARRAY_RANGE_REF:
2056 case BIT_FIELD_REF:
2057 case OBJ_TYPE_REF:
2058
2059 case REALPART_EXPR:
2060 case IMAGPART_EXPR:
2061 case PREINCREMENT_EXPR:
2062 case PREDECREMENT_EXPR:
2063 case SAVE_EXPR:
2064 case TRY_CATCH_EXPR:
2065 case WITH_CLEANUP_EXPR:
2066 case COMPOUND_EXPR:
2067 case MODIFY_EXPR:
2068 case TARGET_EXPR:
2069 case COND_EXPR:
2070 case BIND_EXPR:
2071 case MIN_EXPR:
2072 case MAX_EXPR:
2073 break;
2074
2075 default:
2076 /* Assume the worst for front-end tree codes. */
2077 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2078 break;
2079 return false;
2080 }
2081
2082 return true;
2083 }
2084
2085 /* Return an expr equal to X but certainly not valid as an lvalue. */
2086
2087 tree
2088 non_lvalue (tree x)
2089 {
2090 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2091 us. */
2092 if (in_gimple_form)
2093 return x;
2094
2095 if (! maybe_lvalue_p (x))
2096 return x;
2097 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2098 }
2099
2100 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2101 Zero means allow extended lvalues. */
2102
2103 int pedantic_lvalues;
2104
2105 /* When pedantic, return an expr equal to X but certainly not valid as a
2106 pedantic lvalue. Otherwise, return X. */
2107
2108 static tree
2109 pedantic_non_lvalue (tree x)
2110 {
2111 if (pedantic_lvalues)
2112 return non_lvalue (x);
2113 else
2114 return x;
2115 }
2116 \f
2117 /* Given a tree comparison code, return the code that is the logical inverse
2118 of the given code. It is not safe to do this for floating-point
2119 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2120 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2121
2122 static enum tree_code
2123 invert_tree_comparison (enum tree_code code, bool honor_nans)
2124 {
2125 if (honor_nans && flag_trapping_math)
2126 return ERROR_MARK;
2127
2128 switch (code)
2129 {
2130 case EQ_EXPR:
2131 return NE_EXPR;
2132 case NE_EXPR:
2133 return EQ_EXPR;
2134 case GT_EXPR:
2135 return honor_nans ? UNLE_EXPR : LE_EXPR;
2136 case GE_EXPR:
2137 return honor_nans ? UNLT_EXPR : LT_EXPR;
2138 case LT_EXPR:
2139 return honor_nans ? UNGE_EXPR : GE_EXPR;
2140 case LE_EXPR:
2141 return honor_nans ? UNGT_EXPR : GT_EXPR;
2142 case LTGT_EXPR:
2143 return UNEQ_EXPR;
2144 case UNEQ_EXPR:
2145 return LTGT_EXPR;
2146 case UNGT_EXPR:
2147 return LE_EXPR;
2148 case UNGE_EXPR:
2149 return LT_EXPR;
2150 case UNLT_EXPR:
2151 return GE_EXPR;
2152 case UNLE_EXPR:
2153 return GT_EXPR;
2154 case ORDERED_EXPR:
2155 return UNORDERED_EXPR;
2156 case UNORDERED_EXPR:
2157 return ORDERED_EXPR;
2158 default:
2159 gcc_unreachable ();
2160 }
2161 }
2162
2163 /* Similar, but return the comparison that results if the operands are
2164 swapped. This is safe for floating-point. */
2165
2166 enum tree_code
2167 swap_tree_comparison (enum tree_code code)
2168 {
2169 switch (code)
2170 {
2171 case EQ_EXPR:
2172 case NE_EXPR:
2173 return code;
2174 case GT_EXPR:
2175 return LT_EXPR;
2176 case GE_EXPR:
2177 return LE_EXPR;
2178 case LT_EXPR:
2179 return GT_EXPR;
2180 case LE_EXPR:
2181 return GE_EXPR;
2182 default:
2183 gcc_unreachable ();
2184 }
2185 }
2186
2187
2188 /* Convert a comparison tree code from an enum tree_code representation
2189 into a compcode bit-based encoding. This function is the inverse of
2190 compcode_to_comparison. */
2191
2192 static enum comparison_code
2193 comparison_to_compcode (enum tree_code code)
2194 {
2195 switch (code)
2196 {
2197 case LT_EXPR:
2198 return COMPCODE_LT;
2199 case EQ_EXPR:
2200 return COMPCODE_EQ;
2201 case LE_EXPR:
2202 return COMPCODE_LE;
2203 case GT_EXPR:
2204 return COMPCODE_GT;
2205 case NE_EXPR:
2206 return COMPCODE_NE;
2207 case GE_EXPR:
2208 return COMPCODE_GE;
2209 case ORDERED_EXPR:
2210 return COMPCODE_ORD;
2211 case UNORDERED_EXPR:
2212 return COMPCODE_UNORD;
2213 case UNLT_EXPR:
2214 return COMPCODE_UNLT;
2215 case UNEQ_EXPR:
2216 return COMPCODE_UNEQ;
2217 case UNLE_EXPR:
2218 return COMPCODE_UNLE;
2219 case UNGT_EXPR:
2220 return COMPCODE_UNGT;
2221 case LTGT_EXPR:
2222 return COMPCODE_LTGT;
2223 case UNGE_EXPR:
2224 return COMPCODE_UNGE;
2225 default:
2226 gcc_unreachable ();
2227 }
2228 }
2229
2230 /* Convert a compcode bit-based encoding of a comparison operator back
2231 to GCC's enum tree_code representation. This function is the
2232 inverse of comparison_to_compcode. */
2233
2234 static enum tree_code
2235 compcode_to_comparison (enum comparison_code code)
2236 {
2237 switch (code)
2238 {
2239 case COMPCODE_LT:
2240 return LT_EXPR;
2241 case COMPCODE_EQ:
2242 return EQ_EXPR;
2243 case COMPCODE_LE:
2244 return LE_EXPR;
2245 case COMPCODE_GT:
2246 return GT_EXPR;
2247 case COMPCODE_NE:
2248 return NE_EXPR;
2249 case COMPCODE_GE:
2250 return GE_EXPR;
2251 case COMPCODE_ORD:
2252 return ORDERED_EXPR;
2253 case COMPCODE_UNORD:
2254 return UNORDERED_EXPR;
2255 case COMPCODE_UNLT:
2256 return UNLT_EXPR;
2257 case COMPCODE_UNEQ:
2258 return UNEQ_EXPR;
2259 case COMPCODE_UNLE:
2260 return UNLE_EXPR;
2261 case COMPCODE_UNGT:
2262 return UNGT_EXPR;
2263 case COMPCODE_LTGT:
2264 return LTGT_EXPR;
2265 case COMPCODE_UNGE:
2266 return UNGE_EXPR;
2267 default:
2268 gcc_unreachable ();
2269 }
2270 }
2271
2272 /* Return a tree for the comparison which is the combination of
2273 doing the AND or OR (depending on CODE) of the two operations LCODE
2274 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2275 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2276 if this makes the transformation invalid. */
2277
2278 tree
2279 combine_comparisons (enum tree_code code, enum tree_code lcode,
2280 enum tree_code rcode, tree truth_type,
2281 tree ll_arg, tree lr_arg)
2282 {
2283 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2284 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2285 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2286 enum comparison_code compcode;
2287
2288 switch (code)
2289 {
2290 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2291 compcode = lcompcode & rcompcode;
2292 break;
2293
2294 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2295 compcode = lcompcode | rcompcode;
2296 break;
2297
2298 default:
2299 return NULL_TREE;
2300 }
2301
2302 if (!honor_nans)
2303 {
2304 /* Eliminate unordered comparisons, as well as LTGT and ORD
2305 which are not used unless the mode has NaNs. */
2306 compcode &= ~COMPCODE_UNORD;
2307 if (compcode == COMPCODE_LTGT)
2308 compcode = COMPCODE_NE;
2309 else if (compcode == COMPCODE_ORD)
2310 compcode = COMPCODE_TRUE;
2311 }
2312 else if (flag_trapping_math)
2313 {
2314 /* Check that the original operation and the optimized ones will trap
2315 under the same condition. */
2316 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2317 && (lcompcode != COMPCODE_EQ)
2318 && (lcompcode != COMPCODE_ORD);
2319 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2320 && (rcompcode != COMPCODE_EQ)
2321 && (rcompcode != COMPCODE_ORD);
2322 bool trap = (compcode & COMPCODE_UNORD) == 0
2323 && (compcode != COMPCODE_EQ)
2324 && (compcode != COMPCODE_ORD);
2325
2326 /* In a short-circuited boolean expression the LHS might be
2327 such that the RHS, if evaluated, will never trap. For
2328 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2329 if neither x nor y is NaN. (This is a mixed blessing: for
2330 example, the expression above will never trap, hence
2331 optimizing it to x < y would be invalid). */
2332 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2333 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2334 rtrap = false;
2335
2336 /* If the comparison was short-circuited, and only the RHS
2337 trapped, we may now generate a spurious trap. */
2338 if (rtrap && !ltrap
2339 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2340 return NULL_TREE;
2341
2342 /* If we changed the conditions that cause a trap, we lose. */
2343 if ((ltrap || rtrap) != trap)
2344 return NULL_TREE;
2345 }
2346
2347 if (compcode == COMPCODE_TRUE)
2348 return constant_boolean_node (true, truth_type);
2349 else if (compcode == COMPCODE_FALSE)
2350 return constant_boolean_node (false, truth_type);
2351 else
2352 return fold_build2 (compcode_to_comparison (compcode),
2353 truth_type, ll_arg, lr_arg);
2354 }
2355
2356 /* Return nonzero if CODE is a tree code that represents a truth value. */
2357
2358 static int
2359 truth_value_p (enum tree_code code)
2360 {
2361 return (TREE_CODE_CLASS (code) == tcc_comparison
2362 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2363 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2364 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2365 }
2366 \f
2367 /* Return nonzero if two operands (typically of the same tree node)
2368 are necessarily equal. If either argument has side-effects this
2369 function returns zero. FLAGS modifies behavior as follows:
2370
2371 If OEP_ONLY_CONST is set, only return nonzero for constants.
2372 This function tests whether the operands are indistinguishable;
2373 it does not test whether they are equal using C's == operation.
2374 The distinction is important for IEEE floating point, because
2375 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2376 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2377
2378 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2379 even though it may hold multiple values during a function.
2380 This is because a GCC tree node guarantees that nothing else is
2381 executed between the evaluation of its "operands" (which may often
2382 be evaluated in arbitrary order). Hence if the operands themselves
2383 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2384 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2385 unset means assuming isochronic (or instantaneous) tree equivalence.
2386 Unless comparing arbitrary expression trees, such as from different
2387 statements, this flag can usually be left unset.
2388
2389 If OEP_PURE_SAME is set, then pure functions with identical arguments
2390 are considered the same. It is used when the caller has other ways
2391 to ensure that global memory is unchanged in between. */
2392
2393 int
2394 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2395 {
2396 /* If either is ERROR_MARK, they aren't equal. */
2397 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2398 return 0;
2399
2400 /* If both types don't have the same signedness, then we can't consider
2401 them equal. We must check this before the STRIP_NOPS calls
2402 because they may change the signedness of the arguments. */
2403 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2404 return 0;
2405
2406 STRIP_NOPS (arg0);
2407 STRIP_NOPS (arg1);
2408
2409 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2410 /* This is needed for conversions and for COMPONENT_REF.
2411 Might as well play it safe and always test this. */
2412 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2413 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2414 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2415 return 0;
2416
2417 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2418 We don't care about side effects in that case because the SAVE_EXPR
2419 takes care of that for us. In all other cases, two expressions are
2420 equal if they have no side effects. If we have two identical
2421 expressions with side effects that should be treated the same due
2422 to the only side effects being identical SAVE_EXPR's, that will
2423 be detected in the recursive calls below. */
2424 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2425 && (TREE_CODE (arg0) == SAVE_EXPR
2426 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2427 return 1;
2428
2429 /* Next handle constant cases, those for which we can return 1 even
2430 if ONLY_CONST is set. */
2431 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2432 switch (TREE_CODE (arg0))
2433 {
2434 case INTEGER_CST:
2435 return (! TREE_CONSTANT_OVERFLOW (arg0)
2436 && ! TREE_CONSTANT_OVERFLOW (arg1)
2437 && tree_int_cst_equal (arg0, arg1));
2438
2439 case REAL_CST:
2440 return (! TREE_CONSTANT_OVERFLOW (arg0)
2441 && ! TREE_CONSTANT_OVERFLOW (arg1)
2442 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2443 TREE_REAL_CST (arg1)));
2444
2445 case VECTOR_CST:
2446 {
2447 tree v1, v2;
2448
2449 if (TREE_CONSTANT_OVERFLOW (arg0)
2450 || TREE_CONSTANT_OVERFLOW (arg1))
2451 return 0;
2452
2453 v1 = TREE_VECTOR_CST_ELTS (arg0);
2454 v2 = TREE_VECTOR_CST_ELTS (arg1);
2455 while (v1 && v2)
2456 {
2457 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2458 flags))
2459 return 0;
2460 v1 = TREE_CHAIN (v1);
2461 v2 = TREE_CHAIN (v2);
2462 }
2463
2464 return 1;
2465 }
2466
2467 case COMPLEX_CST:
2468 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2469 flags)
2470 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2471 flags));
2472
2473 case STRING_CST:
2474 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2475 && ! memcmp (TREE_STRING_POINTER (arg0),
2476 TREE_STRING_POINTER (arg1),
2477 TREE_STRING_LENGTH (arg0)));
2478
2479 case ADDR_EXPR:
2480 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2481 0);
2482 default:
2483 break;
2484 }
2485
2486 if (flags & OEP_ONLY_CONST)
2487 return 0;
2488
2489 /* Define macros to test an operand from arg0 and arg1 for equality and a
2490 variant that allows null and views null as being different from any
2491 non-null value. In the latter case, if either is null, the both
2492 must be; otherwise, do the normal comparison. */
2493 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2494 TREE_OPERAND (arg1, N), flags)
2495
2496 #define OP_SAME_WITH_NULL(N) \
2497 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2498 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2499
2500 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2501 {
2502 case tcc_unary:
2503 /* Two conversions are equal only if signedness and modes match. */
2504 switch (TREE_CODE (arg0))
2505 {
2506 case NOP_EXPR:
2507 case CONVERT_EXPR:
2508 case FIX_CEIL_EXPR:
2509 case FIX_TRUNC_EXPR:
2510 case FIX_FLOOR_EXPR:
2511 case FIX_ROUND_EXPR:
2512 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2513 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2514 return 0;
2515 break;
2516 default:
2517 break;
2518 }
2519
2520 return OP_SAME (0);
2521
2522
2523 case tcc_comparison:
2524 case tcc_binary:
2525 if (OP_SAME (0) && OP_SAME (1))
2526 return 1;
2527
2528 /* For commutative ops, allow the other order. */
2529 return (commutative_tree_code (TREE_CODE (arg0))
2530 && operand_equal_p (TREE_OPERAND (arg0, 0),
2531 TREE_OPERAND (arg1, 1), flags)
2532 && operand_equal_p (TREE_OPERAND (arg0, 1),
2533 TREE_OPERAND (arg1, 0), flags));
2534
2535 case tcc_reference:
2536 /* If either of the pointer (or reference) expressions we are
2537 dereferencing contain a side effect, these cannot be equal. */
2538 if (TREE_SIDE_EFFECTS (arg0)
2539 || TREE_SIDE_EFFECTS (arg1))
2540 return 0;
2541
2542 switch (TREE_CODE (arg0))
2543 {
2544 case INDIRECT_REF:
2545 case ALIGN_INDIRECT_REF:
2546 case MISALIGNED_INDIRECT_REF:
2547 case REALPART_EXPR:
2548 case IMAGPART_EXPR:
2549 return OP_SAME (0);
2550
2551 case ARRAY_REF:
2552 case ARRAY_RANGE_REF:
2553 /* Operands 2 and 3 may be null. */
2554 return (OP_SAME (0)
2555 && OP_SAME (1)
2556 && OP_SAME_WITH_NULL (2)
2557 && OP_SAME_WITH_NULL (3));
2558
2559 case COMPONENT_REF:
2560 /* Handle operand 2 the same as for ARRAY_REF. */
2561 return OP_SAME (0) && OP_SAME (1) && OP_SAME_WITH_NULL (2);
2562
2563 case BIT_FIELD_REF:
2564 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2565
2566 default:
2567 return 0;
2568 }
2569
2570 case tcc_expression:
2571 switch (TREE_CODE (arg0))
2572 {
2573 case ADDR_EXPR:
2574 case TRUTH_NOT_EXPR:
2575 return OP_SAME (0);
2576
2577 case TRUTH_ANDIF_EXPR:
2578 case TRUTH_ORIF_EXPR:
2579 return OP_SAME (0) && OP_SAME (1);
2580
2581 case TRUTH_AND_EXPR:
2582 case TRUTH_OR_EXPR:
2583 case TRUTH_XOR_EXPR:
2584 if (OP_SAME (0) && OP_SAME (1))
2585 return 1;
2586
2587 /* Otherwise take into account this is a commutative operation. */
2588 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2589 TREE_OPERAND (arg1, 1), flags)
2590 && operand_equal_p (TREE_OPERAND (arg0, 1),
2591 TREE_OPERAND (arg1, 0), flags));
2592
2593 case CALL_EXPR:
2594 /* If the CALL_EXPRs call different functions, then they
2595 clearly can not be equal. */
2596 if (!OP_SAME (0))
2597 return 0;
2598
2599 {
2600 unsigned int cef = call_expr_flags (arg0);
2601 if (flags & OEP_PURE_SAME)
2602 cef &= ECF_CONST | ECF_PURE;
2603 else
2604 cef &= ECF_CONST;
2605 if (!cef)
2606 return 0;
2607 }
2608
2609 /* Now see if all the arguments are the same. operand_equal_p
2610 does not handle TREE_LIST, so we walk the operands here
2611 feeding them to operand_equal_p. */
2612 arg0 = TREE_OPERAND (arg0, 1);
2613 arg1 = TREE_OPERAND (arg1, 1);
2614 while (arg0 && arg1)
2615 {
2616 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2617 flags))
2618 return 0;
2619
2620 arg0 = TREE_CHAIN (arg0);
2621 arg1 = TREE_CHAIN (arg1);
2622 }
2623
2624 /* If we get here and both argument lists are exhausted
2625 then the CALL_EXPRs are equal. */
2626 return ! (arg0 || arg1);
2627
2628 default:
2629 return 0;
2630 }
2631
2632 case tcc_declaration:
2633 /* Consider __builtin_sqrt equal to sqrt. */
2634 return (TREE_CODE (arg0) == FUNCTION_DECL
2635 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2636 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2637 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2638
2639 default:
2640 return 0;
2641 }
2642
2643 #undef OP_SAME
2644 #undef OP_SAME_WITH_NULL
2645 }
2646 \f
2647 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2648 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2649
2650 When in doubt, return 0. */
2651
2652 static int
2653 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2654 {
2655 int unsignedp1, unsignedpo;
2656 tree primarg0, primarg1, primother;
2657 unsigned int correct_width;
2658
2659 if (operand_equal_p (arg0, arg1, 0))
2660 return 1;
2661
2662 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2663 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2664 return 0;
2665
2666 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2667 and see if the inner values are the same. This removes any
2668 signedness comparison, which doesn't matter here. */
2669 primarg0 = arg0, primarg1 = arg1;
2670 STRIP_NOPS (primarg0);
2671 STRIP_NOPS (primarg1);
2672 if (operand_equal_p (primarg0, primarg1, 0))
2673 return 1;
2674
2675 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2676 actual comparison operand, ARG0.
2677
2678 First throw away any conversions to wider types
2679 already present in the operands. */
2680
2681 primarg1 = get_narrower (arg1, &unsignedp1);
2682 primother = get_narrower (other, &unsignedpo);
2683
2684 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2685 if (unsignedp1 == unsignedpo
2686 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2687 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2688 {
2689 tree type = TREE_TYPE (arg0);
2690
2691 /* Make sure shorter operand is extended the right way
2692 to match the longer operand. */
2693 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2694 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2695
2696 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2697 return 1;
2698 }
2699
2700 return 0;
2701 }
2702 \f
2703 /* See if ARG is an expression that is either a comparison or is performing
2704 arithmetic on comparisons. The comparisons must only be comparing
2705 two different values, which will be stored in *CVAL1 and *CVAL2; if
2706 they are nonzero it means that some operands have already been found.
2707 No variables may be used anywhere else in the expression except in the
2708 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2709 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2710
2711 If this is true, return 1. Otherwise, return zero. */
2712
2713 static int
2714 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2715 {
2716 enum tree_code code = TREE_CODE (arg);
2717 enum tree_code_class class = TREE_CODE_CLASS (code);
2718
2719 /* We can handle some of the tcc_expression cases here. */
2720 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2721 class = tcc_unary;
2722 else if (class == tcc_expression
2723 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2724 || code == COMPOUND_EXPR))
2725 class = tcc_binary;
2726
2727 else if (class == tcc_expression && code == SAVE_EXPR
2728 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2729 {
2730 /* If we've already found a CVAL1 or CVAL2, this expression is
2731 two complex to handle. */
2732 if (*cval1 || *cval2)
2733 return 0;
2734
2735 class = tcc_unary;
2736 *save_p = 1;
2737 }
2738
2739 switch (class)
2740 {
2741 case tcc_unary:
2742 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2743
2744 case tcc_binary:
2745 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2746 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2747 cval1, cval2, save_p));
2748
2749 case tcc_constant:
2750 return 1;
2751
2752 case tcc_expression:
2753 if (code == COND_EXPR)
2754 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2755 cval1, cval2, save_p)
2756 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2757 cval1, cval2, save_p)
2758 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2759 cval1, cval2, save_p));
2760 return 0;
2761
2762 case tcc_comparison:
2763 /* First see if we can handle the first operand, then the second. For
2764 the second operand, we know *CVAL1 can't be zero. It must be that
2765 one side of the comparison is each of the values; test for the
2766 case where this isn't true by failing if the two operands
2767 are the same. */
2768
2769 if (operand_equal_p (TREE_OPERAND (arg, 0),
2770 TREE_OPERAND (arg, 1), 0))
2771 return 0;
2772
2773 if (*cval1 == 0)
2774 *cval1 = TREE_OPERAND (arg, 0);
2775 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2776 ;
2777 else if (*cval2 == 0)
2778 *cval2 = TREE_OPERAND (arg, 0);
2779 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2780 ;
2781 else
2782 return 0;
2783
2784 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2785 ;
2786 else if (*cval2 == 0)
2787 *cval2 = TREE_OPERAND (arg, 1);
2788 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2789 ;
2790 else
2791 return 0;
2792
2793 return 1;
2794
2795 default:
2796 return 0;
2797 }
2798 }
2799 \f
2800 /* ARG is a tree that is known to contain just arithmetic operations and
2801 comparisons. Evaluate the operations in the tree substituting NEW0 for
2802 any occurrence of OLD0 as an operand of a comparison and likewise for
2803 NEW1 and OLD1. */
2804
2805 static tree
2806 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2807 {
2808 tree type = TREE_TYPE (arg);
2809 enum tree_code code = TREE_CODE (arg);
2810 enum tree_code_class class = TREE_CODE_CLASS (code);
2811
2812 /* We can handle some of the tcc_expression cases here. */
2813 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2814 class = tcc_unary;
2815 else if (class == tcc_expression
2816 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2817 class = tcc_binary;
2818
2819 switch (class)
2820 {
2821 case tcc_unary:
2822 return fold_build1 (code, type,
2823 eval_subst (TREE_OPERAND (arg, 0),
2824 old0, new0, old1, new1));
2825
2826 case tcc_binary:
2827 return fold_build2 (code, type,
2828 eval_subst (TREE_OPERAND (arg, 0),
2829 old0, new0, old1, new1),
2830 eval_subst (TREE_OPERAND (arg, 1),
2831 old0, new0, old1, new1));
2832
2833 case tcc_expression:
2834 switch (code)
2835 {
2836 case SAVE_EXPR:
2837 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2838
2839 case COMPOUND_EXPR:
2840 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2841
2842 case COND_EXPR:
2843 return fold_build3 (code, type,
2844 eval_subst (TREE_OPERAND (arg, 0),
2845 old0, new0, old1, new1),
2846 eval_subst (TREE_OPERAND (arg, 1),
2847 old0, new0, old1, new1),
2848 eval_subst (TREE_OPERAND (arg, 2),
2849 old0, new0, old1, new1));
2850 default:
2851 break;
2852 }
2853 /* Fall through - ??? */
2854
2855 case tcc_comparison:
2856 {
2857 tree arg0 = TREE_OPERAND (arg, 0);
2858 tree arg1 = TREE_OPERAND (arg, 1);
2859
2860 /* We need to check both for exact equality and tree equality. The
2861 former will be true if the operand has a side-effect. In that
2862 case, we know the operand occurred exactly once. */
2863
2864 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2865 arg0 = new0;
2866 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2867 arg0 = new1;
2868
2869 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2870 arg1 = new0;
2871 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2872 arg1 = new1;
2873
2874 return fold_build2 (code, type, arg0, arg1);
2875 }
2876
2877 default:
2878 return arg;
2879 }
2880 }
2881 \f
2882 /* Return a tree for the case when the result of an expression is RESULT
2883 converted to TYPE and OMITTED was previously an operand of the expression
2884 but is now not needed (e.g., we folded OMITTED * 0).
2885
2886 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2887 the conversion of RESULT to TYPE. */
2888
2889 tree
2890 omit_one_operand (tree type, tree result, tree omitted)
2891 {
2892 tree t = fold_convert (type, result);
2893
2894 if (TREE_SIDE_EFFECTS (omitted))
2895 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2896
2897 return non_lvalue (t);
2898 }
2899
2900 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2901
2902 static tree
2903 pedantic_omit_one_operand (tree type, tree result, tree omitted)
2904 {
2905 tree t = fold_convert (type, result);
2906
2907 if (TREE_SIDE_EFFECTS (omitted))
2908 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2909
2910 return pedantic_non_lvalue (t);
2911 }
2912
2913 /* Return a tree for the case when the result of an expression is RESULT
2914 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
2915 of the expression but are now not needed.
2916
2917 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
2918 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
2919 evaluated before OMITTED2. Otherwise, if neither has side effects,
2920 just do the conversion of RESULT to TYPE. */
2921
2922 tree
2923 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
2924 {
2925 tree t = fold_convert (type, result);
2926
2927 if (TREE_SIDE_EFFECTS (omitted2))
2928 t = build2 (COMPOUND_EXPR, type, omitted2, t);
2929 if (TREE_SIDE_EFFECTS (omitted1))
2930 t = build2 (COMPOUND_EXPR, type, omitted1, t);
2931
2932 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
2933 }
2934
2935 \f
2936 /* Return a simplified tree node for the truth-negation of ARG. This
2937 never alters ARG itself. We assume that ARG is an operation that
2938 returns a truth value (0 or 1).
2939
2940 FIXME: one would think we would fold the result, but it causes
2941 problems with the dominator optimizer. */
2942 tree
2943 invert_truthvalue (tree arg)
2944 {
2945 tree type = TREE_TYPE (arg);
2946 enum tree_code code = TREE_CODE (arg);
2947
2948 if (code == ERROR_MARK)
2949 return arg;
2950
2951 /* If this is a comparison, we can simply invert it, except for
2952 floating-point non-equality comparisons, in which case we just
2953 enclose a TRUTH_NOT_EXPR around what we have. */
2954
2955 if (TREE_CODE_CLASS (code) == tcc_comparison)
2956 {
2957 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
2958 if (FLOAT_TYPE_P (op_type)
2959 && flag_trapping_math
2960 && code != ORDERED_EXPR && code != UNORDERED_EXPR
2961 && code != NE_EXPR && code != EQ_EXPR)
2962 return build1 (TRUTH_NOT_EXPR, type, arg);
2963 else
2964 {
2965 code = invert_tree_comparison (code,
2966 HONOR_NANS (TYPE_MODE (op_type)));
2967 if (code == ERROR_MARK)
2968 return build1 (TRUTH_NOT_EXPR, type, arg);
2969 else
2970 return build2 (code, type,
2971 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2972 }
2973 }
2974
2975 switch (code)
2976 {
2977 case INTEGER_CST:
2978 return constant_boolean_node (integer_zerop (arg), type);
2979
2980 case TRUTH_AND_EXPR:
2981 return build2 (TRUTH_OR_EXPR, type,
2982 invert_truthvalue (TREE_OPERAND (arg, 0)),
2983 invert_truthvalue (TREE_OPERAND (arg, 1)));
2984
2985 case TRUTH_OR_EXPR:
2986 return build2 (TRUTH_AND_EXPR, type,
2987 invert_truthvalue (TREE_OPERAND (arg, 0)),
2988 invert_truthvalue (TREE_OPERAND (arg, 1)));
2989
2990 case TRUTH_XOR_EXPR:
2991 /* Here we can invert either operand. We invert the first operand
2992 unless the second operand is a TRUTH_NOT_EXPR in which case our
2993 result is the XOR of the first operand with the inside of the
2994 negation of the second operand. */
2995
2996 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2997 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2998 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2999 else
3000 return build2 (TRUTH_XOR_EXPR, type,
3001 invert_truthvalue (TREE_OPERAND (arg, 0)),
3002 TREE_OPERAND (arg, 1));
3003
3004 case TRUTH_ANDIF_EXPR:
3005 return build2 (TRUTH_ORIF_EXPR, type,
3006 invert_truthvalue (TREE_OPERAND (arg, 0)),
3007 invert_truthvalue (TREE_OPERAND (arg, 1)));
3008
3009 case TRUTH_ORIF_EXPR:
3010 return build2 (TRUTH_ANDIF_EXPR, type,
3011 invert_truthvalue (TREE_OPERAND (arg, 0)),
3012 invert_truthvalue (TREE_OPERAND (arg, 1)));
3013
3014 case TRUTH_NOT_EXPR:
3015 return TREE_OPERAND (arg, 0);
3016
3017 case COND_EXPR:
3018 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3019 invert_truthvalue (TREE_OPERAND (arg, 1)),
3020 invert_truthvalue (TREE_OPERAND (arg, 2)));
3021
3022 case COMPOUND_EXPR:
3023 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3024 invert_truthvalue (TREE_OPERAND (arg, 1)));
3025
3026 case NON_LVALUE_EXPR:
3027 return invert_truthvalue (TREE_OPERAND (arg, 0));
3028
3029 case NOP_EXPR:
3030 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3031 break;
3032
3033 case CONVERT_EXPR:
3034 case FLOAT_EXPR:
3035 return build1 (TREE_CODE (arg), type,
3036 invert_truthvalue (TREE_OPERAND (arg, 0)));
3037
3038 case BIT_AND_EXPR:
3039 if (!integer_onep (TREE_OPERAND (arg, 1)))
3040 break;
3041 return build2 (EQ_EXPR, type, arg,
3042 fold_convert (type, integer_zero_node));
3043
3044 case SAVE_EXPR:
3045 return build1 (TRUTH_NOT_EXPR, type, arg);
3046
3047 case CLEANUP_POINT_EXPR:
3048 return build1 (CLEANUP_POINT_EXPR, type,
3049 invert_truthvalue (TREE_OPERAND (arg, 0)));
3050
3051 default:
3052 break;
3053 }
3054 gcc_assert (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE);
3055 return build1 (TRUTH_NOT_EXPR, type, arg);
3056 }
3057
3058 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3059 operands are another bit-wise operation with a common input. If so,
3060 distribute the bit operations to save an operation and possibly two if
3061 constants are involved. For example, convert
3062 (A | B) & (A | C) into A | (B & C)
3063 Further simplification will occur if B and C are constants.
3064
3065 If this optimization cannot be done, 0 will be returned. */
3066
3067 static tree
3068 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3069 {
3070 tree common;
3071 tree left, right;
3072
3073 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3074 || TREE_CODE (arg0) == code
3075 || (TREE_CODE (arg0) != BIT_AND_EXPR
3076 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3077 return 0;
3078
3079 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3080 {
3081 common = TREE_OPERAND (arg0, 0);
3082 left = TREE_OPERAND (arg0, 1);
3083 right = TREE_OPERAND (arg1, 1);
3084 }
3085 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3086 {
3087 common = TREE_OPERAND (arg0, 0);
3088 left = TREE_OPERAND (arg0, 1);
3089 right = TREE_OPERAND (arg1, 0);
3090 }
3091 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3092 {
3093 common = TREE_OPERAND (arg0, 1);
3094 left = TREE_OPERAND (arg0, 0);
3095 right = TREE_OPERAND (arg1, 1);
3096 }
3097 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3098 {
3099 common = TREE_OPERAND (arg0, 1);
3100 left = TREE_OPERAND (arg0, 0);
3101 right = TREE_OPERAND (arg1, 0);
3102 }
3103 else
3104 return 0;
3105
3106 return fold_build2 (TREE_CODE (arg0), type, common,
3107 fold_build2 (code, type, left, right));
3108 }
3109
3110 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3111 with code CODE. This optimization is unsafe. */
3112 static tree
3113 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3114 {
3115 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3116 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3117
3118 /* (A / C) +- (B / C) -> (A +- B) / C. */
3119 if (mul0 == mul1
3120 && operand_equal_p (TREE_OPERAND (arg0, 1),
3121 TREE_OPERAND (arg1, 1), 0))
3122 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3123 fold_build2 (code, type,
3124 TREE_OPERAND (arg0, 0),
3125 TREE_OPERAND (arg1, 0)),
3126 TREE_OPERAND (arg0, 1));
3127
3128 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3129 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3130 TREE_OPERAND (arg1, 0), 0)
3131 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3132 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3133 {
3134 REAL_VALUE_TYPE r0, r1;
3135 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3136 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3137 if (!mul0)
3138 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3139 if (!mul1)
3140 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3141 real_arithmetic (&r0, code, &r0, &r1);
3142 return fold_build2 (MULT_EXPR, type,
3143 TREE_OPERAND (arg0, 0),
3144 build_real (type, r0));
3145 }
3146
3147 return NULL_TREE;
3148 }
3149 \f
3150 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3151 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3152
3153 static tree
3154 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3155 int unsignedp)
3156 {
3157 tree result;
3158
3159 if (bitpos == 0)
3160 {
3161 tree size = TYPE_SIZE (TREE_TYPE (inner));
3162 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3163 || POINTER_TYPE_P (TREE_TYPE (inner)))
3164 && host_integerp (size, 0)
3165 && tree_low_cst (size, 0) == bitsize)
3166 return fold_convert (type, inner);
3167 }
3168
3169 result = build3 (BIT_FIELD_REF, type, inner,
3170 size_int (bitsize), bitsize_int (bitpos));
3171
3172 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3173
3174 return result;
3175 }
3176
3177 /* Optimize a bit-field compare.
3178
3179 There are two cases: First is a compare against a constant and the
3180 second is a comparison of two items where the fields are at the same
3181 bit position relative to the start of a chunk (byte, halfword, word)
3182 large enough to contain it. In these cases we can avoid the shift
3183 implicit in bitfield extractions.
3184
3185 For constants, we emit a compare of the shifted constant with the
3186 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3187 compared. For two fields at the same position, we do the ANDs with the
3188 similar mask and compare the result of the ANDs.
3189
3190 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3191 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3192 are the left and right operands of the comparison, respectively.
3193
3194 If the optimization described above can be done, we return the resulting
3195 tree. Otherwise we return zero. */
3196
3197 static tree
3198 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3199 tree lhs, tree rhs)
3200 {
3201 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3202 tree type = TREE_TYPE (lhs);
3203 tree signed_type, unsigned_type;
3204 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3205 enum machine_mode lmode, rmode, nmode;
3206 int lunsignedp, runsignedp;
3207 int lvolatilep = 0, rvolatilep = 0;
3208 tree linner, rinner = NULL_TREE;
3209 tree mask;
3210 tree offset;
3211
3212 /* Get all the information about the extractions being done. If the bit size
3213 if the same as the size of the underlying object, we aren't doing an
3214 extraction at all and so can do nothing. We also don't want to
3215 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3216 then will no longer be able to replace it. */
3217 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3218 &lunsignedp, &lvolatilep, false);
3219 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3220 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3221 return 0;
3222
3223 if (!const_p)
3224 {
3225 /* If this is not a constant, we can only do something if bit positions,
3226 sizes, and signedness are the same. */
3227 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3228 &runsignedp, &rvolatilep, false);
3229
3230 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3231 || lunsignedp != runsignedp || offset != 0
3232 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3233 return 0;
3234 }
3235
3236 /* See if we can find a mode to refer to this field. We should be able to,
3237 but fail if we can't. */
3238 nmode = get_best_mode (lbitsize, lbitpos,
3239 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3240 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3241 TYPE_ALIGN (TREE_TYPE (rinner))),
3242 word_mode, lvolatilep || rvolatilep);
3243 if (nmode == VOIDmode)
3244 return 0;
3245
3246 /* Set signed and unsigned types of the precision of this mode for the
3247 shifts below. */
3248 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3249 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3250
3251 /* Compute the bit position and size for the new reference and our offset
3252 within it. If the new reference is the same size as the original, we
3253 won't optimize anything, so return zero. */
3254 nbitsize = GET_MODE_BITSIZE (nmode);
3255 nbitpos = lbitpos & ~ (nbitsize - 1);
3256 lbitpos -= nbitpos;
3257 if (nbitsize == lbitsize)
3258 return 0;
3259
3260 if (BYTES_BIG_ENDIAN)
3261 lbitpos = nbitsize - lbitsize - lbitpos;
3262
3263 /* Make the mask to be used against the extracted field. */
3264 mask = build_int_cst (unsigned_type, -1);
3265 mask = force_fit_type (mask, 0, false, false);
3266 mask = fold_convert (unsigned_type, mask);
3267 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3268 mask = const_binop (RSHIFT_EXPR, mask,
3269 size_int (nbitsize - lbitsize - lbitpos), 0);
3270
3271 if (! const_p)
3272 /* If not comparing with constant, just rework the comparison
3273 and return. */
3274 return build2 (code, compare_type,
3275 build2 (BIT_AND_EXPR, unsigned_type,
3276 make_bit_field_ref (linner, unsigned_type,
3277 nbitsize, nbitpos, 1),
3278 mask),
3279 build2 (BIT_AND_EXPR, unsigned_type,
3280 make_bit_field_ref (rinner, unsigned_type,
3281 nbitsize, nbitpos, 1),
3282 mask));
3283
3284 /* Otherwise, we are handling the constant case. See if the constant is too
3285 big for the field. Warn and return a tree of for 0 (false) if so. We do
3286 this not only for its own sake, but to avoid having to test for this
3287 error case below. If we didn't, we might generate wrong code.
3288
3289 For unsigned fields, the constant shifted right by the field length should
3290 be all zero. For signed fields, the high-order bits should agree with
3291 the sign bit. */
3292
3293 if (lunsignedp)
3294 {
3295 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3296 fold_convert (unsigned_type, rhs),
3297 size_int (lbitsize), 0)))
3298 {
3299 warning (0, "comparison is always %d due to width of bit-field",
3300 code == NE_EXPR);
3301 return constant_boolean_node (code == NE_EXPR, compare_type);
3302 }
3303 }
3304 else
3305 {
3306 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3307 size_int (lbitsize - 1), 0);
3308 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3309 {
3310 warning (0, "comparison is always %d due to width of bit-field",
3311 code == NE_EXPR);
3312 return constant_boolean_node (code == NE_EXPR, compare_type);
3313 }
3314 }
3315
3316 /* Single-bit compares should always be against zero. */
3317 if (lbitsize == 1 && ! integer_zerop (rhs))
3318 {
3319 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3320 rhs = fold_convert (type, integer_zero_node);
3321 }
3322
3323 /* Make a new bitfield reference, shift the constant over the
3324 appropriate number of bits and mask it with the computed mask
3325 (in case this was a signed field). If we changed it, make a new one. */
3326 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3327 if (lvolatilep)
3328 {
3329 TREE_SIDE_EFFECTS (lhs) = 1;
3330 TREE_THIS_VOLATILE (lhs) = 1;
3331 }
3332
3333 rhs = fold (const_binop (BIT_AND_EXPR,
3334 const_binop (LSHIFT_EXPR,
3335 fold_convert (unsigned_type, rhs),
3336 size_int (lbitpos), 0),
3337 mask, 0));
3338
3339 return build2 (code, compare_type,
3340 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3341 rhs);
3342 }
3343 \f
3344 /* Subroutine for fold_truthop: decode a field reference.
3345
3346 If EXP is a comparison reference, we return the innermost reference.
3347
3348 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3349 set to the starting bit number.
3350
3351 If the innermost field can be completely contained in a mode-sized
3352 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3353
3354 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3355 otherwise it is not changed.
3356
3357 *PUNSIGNEDP is set to the signedness of the field.
3358
3359 *PMASK is set to the mask used. This is either contained in a
3360 BIT_AND_EXPR or derived from the width of the field.
3361
3362 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3363
3364 Return 0 if this is not a component reference or is one that we can't
3365 do anything with. */
3366
3367 static tree
3368 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3369 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3370 int *punsignedp, int *pvolatilep,
3371 tree *pmask, tree *pand_mask)
3372 {
3373 tree outer_type = 0;
3374 tree and_mask = 0;
3375 tree mask, inner, offset;
3376 tree unsigned_type;
3377 unsigned int precision;
3378
3379 /* All the optimizations using this function assume integer fields.
3380 There are problems with FP fields since the type_for_size call
3381 below can fail for, e.g., XFmode. */
3382 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3383 return 0;
3384
3385 /* We are interested in the bare arrangement of bits, so strip everything
3386 that doesn't affect the machine mode. However, record the type of the
3387 outermost expression if it may matter below. */
3388 if (TREE_CODE (exp) == NOP_EXPR
3389 || TREE_CODE (exp) == CONVERT_EXPR
3390 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3391 outer_type = TREE_TYPE (exp);
3392 STRIP_NOPS (exp);
3393
3394 if (TREE_CODE (exp) == BIT_AND_EXPR)
3395 {
3396 and_mask = TREE_OPERAND (exp, 1);
3397 exp = TREE_OPERAND (exp, 0);
3398 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3399 if (TREE_CODE (and_mask) != INTEGER_CST)
3400 return 0;
3401 }
3402
3403 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3404 punsignedp, pvolatilep, false);
3405 if ((inner == exp && and_mask == 0)
3406 || *pbitsize < 0 || offset != 0
3407 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3408 return 0;
3409
3410 /* If the number of bits in the reference is the same as the bitsize of
3411 the outer type, then the outer type gives the signedness. Otherwise
3412 (in case of a small bitfield) the signedness is unchanged. */
3413 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3414 *punsignedp = TYPE_UNSIGNED (outer_type);
3415
3416 /* Compute the mask to access the bitfield. */
3417 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3418 precision = TYPE_PRECISION (unsigned_type);
3419
3420 mask = build_int_cst (unsigned_type, -1);
3421 mask = force_fit_type (mask, 0, false, false);
3422
3423 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3424 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3425
3426 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3427 if (and_mask != 0)
3428 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3429 fold_convert (unsigned_type, and_mask), mask);
3430
3431 *pmask = mask;
3432 *pand_mask = and_mask;
3433 return inner;
3434 }
3435
3436 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3437 bit positions. */
3438
3439 static int
3440 all_ones_mask_p (tree mask, int size)
3441 {
3442 tree type = TREE_TYPE (mask);
3443 unsigned int precision = TYPE_PRECISION (type);
3444 tree tmask;
3445
3446 tmask = build_int_cst (lang_hooks.types.signed_type (type), -1);
3447 tmask = force_fit_type (tmask, 0, false, false);
3448
3449 return
3450 tree_int_cst_equal (mask,
3451 const_binop (RSHIFT_EXPR,
3452 const_binop (LSHIFT_EXPR, tmask,
3453 size_int (precision - size),
3454 0),
3455 size_int (precision - size), 0));
3456 }
3457
3458 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3459 represents the sign bit of EXP's type. If EXP represents a sign
3460 or zero extension, also test VAL against the unextended type.
3461 The return value is the (sub)expression whose sign bit is VAL,
3462 or NULL_TREE otherwise. */
3463
3464 static tree
3465 sign_bit_p (tree exp, tree val)
3466 {
3467 unsigned HOST_WIDE_INT mask_lo, lo;
3468 HOST_WIDE_INT mask_hi, hi;
3469 int width;
3470 tree t;
3471
3472 /* Tree EXP must have an integral type. */
3473 t = TREE_TYPE (exp);
3474 if (! INTEGRAL_TYPE_P (t))
3475 return NULL_TREE;
3476
3477 /* Tree VAL must be an integer constant. */
3478 if (TREE_CODE (val) != INTEGER_CST
3479 || TREE_CONSTANT_OVERFLOW (val))
3480 return NULL_TREE;
3481
3482 width = TYPE_PRECISION (t);
3483 if (width > HOST_BITS_PER_WIDE_INT)
3484 {
3485 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3486 lo = 0;
3487
3488 mask_hi = ((unsigned HOST_WIDE_INT) -1
3489 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3490 mask_lo = -1;
3491 }
3492 else
3493 {
3494 hi = 0;
3495 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3496
3497 mask_hi = 0;
3498 mask_lo = ((unsigned HOST_WIDE_INT) -1
3499 >> (HOST_BITS_PER_WIDE_INT - width));
3500 }
3501
3502 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3503 treat VAL as if it were unsigned. */
3504 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3505 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3506 return exp;
3507
3508 /* Handle extension from a narrower type. */
3509 if (TREE_CODE (exp) == NOP_EXPR
3510 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3511 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3512
3513 return NULL_TREE;
3514 }
3515
3516 /* Subroutine for fold_truthop: determine if an operand is simple enough
3517 to be evaluated unconditionally. */
3518
3519 static int
3520 simple_operand_p (tree exp)
3521 {
3522 /* Strip any conversions that don't change the machine mode. */
3523 STRIP_NOPS (exp);
3524
3525 return (CONSTANT_CLASS_P (exp)
3526 || TREE_CODE (exp) == SSA_NAME
3527 || (DECL_P (exp)
3528 && ! TREE_ADDRESSABLE (exp)
3529 && ! TREE_THIS_VOLATILE (exp)
3530 && ! DECL_NONLOCAL (exp)
3531 /* Don't regard global variables as simple. They may be
3532 allocated in ways unknown to the compiler (shared memory,
3533 #pragma weak, etc). */
3534 && ! TREE_PUBLIC (exp)
3535 && ! DECL_EXTERNAL (exp)
3536 /* Loading a static variable is unduly expensive, but global
3537 registers aren't expensive. */
3538 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3539 }
3540 \f
3541 /* The following functions are subroutines to fold_range_test and allow it to
3542 try to change a logical combination of comparisons into a range test.
3543
3544 For example, both
3545 X == 2 || X == 3 || X == 4 || X == 5
3546 and
3547 X >= 2 && X <= 5
3548 are converted to
3549 (unsigned) (X - 2) <= 3
3550
3551 We describe each set of comparisons as being either inside or outside
3552 a range, using a variable named like IN_P, and then describe the
3553 range with a lower and upper bound. If one of the bounds is omitted,
3554 it represents either the highest or lowest value of the type.
3555
3556 In the comments below, we represent a range by two numbers in brackets
3557 preceded by a "+" to designate being inside that range, or a "-" to
3558 designate being outside that range, so the condition can be inverted by
3559 flipping the prefix. An omitted bound is represented by a "-". For
3560 example, "- [-, 10]" means being outside the range starting at the lowest
3561 possible value and ending at 10, in other words, being greater than 10.
3562 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3563 always false.
3564
3565 We set up things so that the missing bounds are handled in a consistent
3566 manner so neither a missing bound nor "true" and "false" need to be
3567 handled using a special case. */
3568
3569 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3570 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3571 and UPPER1_P are nonzero if the respective argument is an upper bound
3572 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3573 must be specified for a comparison. ARG1 will be converted to ARG0's
3574 type if both are specified. */
3575
3576 static tree
3577 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3578 tree arg1, int upper1_p)
3579 {
3580 tree tem;
3581 int result;
3582 int sgn0, sgn1;
3583
3584 /* If neither arg represents infinity, do the normal operation.
3585 Else, if not a comparison, return infinity. Else handle the special
3586 comparison rules. Note that most of the cases below won't occur, but
3587 are handled for consistency. */
3588
3589 if (arg0 != 0 && arg1 != 0)
3590 {
3591 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3592 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3593 STRIP_NOPS (tem);
3594 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3595 }
3596
3597 if (TREE_CODE_CLASS (code) != tcc_comparison)
3598 return 0;
3599
3600 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3601 for neither. In real maths, we cannot assume open ended ranges are
3602 the same. But, this is computer arithmetic, where numbers are finite.
3603 We can therefore make the transformation of any unbounded range with
3604 the value Z, Z being greater than any representable number. This permits
3605 us to treat unbounded ranges as equal. */
3606 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3607 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3608 switch (code)
3609 {
3610 case EQ_EXPR:
3611 result = sgn0 == sgn1;
3612 break;
3613 case NE_EXPR:
3614 result = sgn0 != sgn1;
3615 break;
3616 case LT_EXPR:
3617 result = sgn0 < sgn1;
3618 break;
3619 case LE_EXPR:
3620 result = sgn0 <= sgn1;
3621 break;
3622 case GT_EXPR:
3623 result = sgn0 > sgn1;
3624 break;
3625 case GE_EXPR:
3626 result = sgn0 >= sgn1;
3627 break;
3628 default:
3629 gcc_unreachable ();
3630 }
3631
3632 return constant_boolean_node (result, type);
3633 }
3634 \f
3635 /* Given EXP, a logical expression, set the range it is testing into
3636 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3637 actually being tested. *PLOW and *PHIGH will be made of the same type
3638 as the returned expression. If EXP is not a comparison, we will most
3639 likely not be returning a useful value and range. */
3640
3641 static tree
3642 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3643 {
3644 enum tree_code code;
3645 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3646 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3647 int in_p, n_in_p;
3648 tree low, high, n_low, n_high;
3649
3650 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3651 and see if we can refine the range. Some of the cases below may not
3652 happen, but it doesn't seem worth worrying about this. We "continue"
3653 the outer loop when we've changed something; otherwise we "break"
3654 the switch, which will "break" the while. */
3655
3656 in_p = 0;
3657 low = high = fold_convert (TREE_TYPE (exp), integer_zero_node);
3658
3659 while (1)
3660 {
3661 code = TREE_CODE (exp);
3662 exp_type = TREE_TYPE (exp);
3663
3664 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3665 {
3666 if (TREE_CODE_LENGTH (code) > 0)
3667 arg0 = TREE_OPERAND (exp, 0);
3668 if (TREE_CODE_CLASS (code) == tcc_comparison
3669 || TREE_CODE_CLASS (code) == tcc_unary
3670 || TREE_CODE_CLASS (code) == tcc_binary)
3671 arg0_type = TREE_TYPE (arg0);
3672 if (TREE_CODE_CLASS (code) == tcc_binary
3673 || TREE_CODE_CLASS (code) == tcc_comparison
3674 || (TREE_CODE_CLASS (code) == tcc_expression
3675 && TREE_CODE_LENGTH (code) > 1))
3676 arg1 = TREE_OPERAND (exp, 1);
3677 }
3678
3679 switch (code)
3680 {
3681 case TRUTH_NOT_EXPR:
3682 in_p = ! in_p, exp = arg0;
3683 continue;
3684
3685 case EQ_EXPR: case NE_EXPR:
3686 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3687 /* We can only do something if the range is testing for zero
3688 and if the second operand is an integer constant. Note that
3689 saying something is "in" the range we make is done by
3690 complementing IN_P since it will set in the initial case of
3691 being not equal to zero; "out" is leaving it alone. */
3692 if (low == 0 || high == 0
3693 || ! integer_zerop (low) || ! integer_zerop (high)
3694 || TREE_CODE (arg1) != INTEGER_CST)
3695 break;
3696
3697 switch (code)
3698 {
3699 case NE_EXPR: /* - [c, c] */
3700 low = high = arg1;
3701 break;
3702 case EQ_EXPR: /* + [c, c] */
3703 in_p = ! in_p, low = high = arg1;
3704 break;
3705 case GT_EXPR: /* - [-, c] */
3706 low = 0, high = arg1;
3707 break;
3708 case GE_EXPR: /* + [c, -] */
3709 in_p = ! in_p, low = arg1, high = 0;
3710 break;
3711 case LT_EXPR: /* - [c, -] */
3712 low = arg1, high = 0;
3713 break;
3714 case LE_EXPR: /* + [-, c] */
3715 in_p = ! in_p, low = 0, high = arg1;
3716 break;
3717 default:
3718 gcc_unreachable ();
3719 }
3720
3721 /* If this is an unsigned comparison, we also know that EXP is
3722 greater than or equal to zero. We base the range tests we make
3723 on that fact, so we record it here so we can parse existing
3724 range tests. We test arg0_type since often the return type
3725 of, e.g. EQ_EXPR, is boolean. */
3726 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3727 {
3728 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3729 in_p, low, high, 1,
3730 fold_convert (arg0_type, integer_zero_node),
3731 NULL_TREE))
3732 break;
3733
3734 in_p = n_in_p, low = n_low, high = n_high;
3735
3736 /* If the high bound is missing, but we have a nonzero low
3737 bound, reverse the range so it goes from zero to the low bound
3738 minus 1. */
3739 if (high == 0 && low && ! integer_zerop (low))
3740 {
3741 in_p = ! in_p;
3742 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3743 integer_one_node, 0);
3744 low = fold_convert (arg0_type, integer_zero_node);
3745 }
3746 }
3747
3748 exp = arg0;
3749 continue;
3750
3751 case NEGATE_EXPR:
3752 /* (-x) IN [a,b] -> x in [-b, -a] */
3753 n_low = range_binop (MINUS_EXPR, exp_type,
3754 fold_convert (exp_type, integer_zero_node),
3755 0, high, 1);
3756 n_high = range_binop (MINUS_EXPR, exp_type,
3757 fold_convert (exp_type, integer_zero_node),
3758 0, low, 0);
3759 low = n_low, high = n_high;
3760 exp = arg0;
3761 continue;
3762
3763 case BIT_NOT_EXPR:
3764 /* ~ X -> -X - 1 */
3765 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
3766 fold_convert (exp_type, integer_one_node));
3767 continue;
3768
3769 case PLUS_EXPR: case MINUS_EXPR:
3770 if (TREE_CODE (arg1) != INTEGER_CST)
3771 break;
3772
3773 /* If EXP is signed, any overflow in the computation is undefined,
3774 so we don't worry about it so long as our computations on
3775 the bounds don't overflow. For unsigned, overflow is defined
3776 and this is exactly the right thing. */
3777 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3778 arg0_type, low, 0, arg1, 0);
3779 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3780 arg0_type, high, 1, arg1, 0);
3781 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3782 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3783 break;
3784
3785 /* Check for an unsigned range which has wrapped around the maximum
3786 value thus making n_high < n_low, and normalize it. */
3787 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3788 {
3789 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3790 integer_one_node, 0);
3791 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3792 integer_one_node, 0);
3793
3794 /* If the range is of the form +/- [ x+1, x ], we won't
3795 be able to normalize it. But then, it represents the
3796 whole range or the empty set, so make it
3797 +/- [ -, - ]. */
3798 if (tree_int_cst_equal (n_low, low)
3799 && tree_int_cst_equal (n_high, high))
3800 low = high = 0;
3801 else
3802 in_p = ! in_p;
3803 }
3804 else
3805 low = n_low, high = n_high;
3806
3807 exp = arg0;
3808 continue;
3809
3810 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3811 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3812 break;
3813
3814 if (! INTEGRAL_TYPE_P (arg0_type)
3815 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3816 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3817 break;
3818
3819 n_low = low, n_high = high;
3820
3821 if (n_low != 0)
3822 n_low = fold_convert (arg0_type, n_low);
3823
3824 if (n_high != 0)
3825 n_high = fold_convert (arg0_type, n_high);
3826
3827
3828 /* If we're converting arg0 from an unsigned type, to exp,
3829 a signed type, we will be doing the comparison as unsigned.
3830 The tests above have already verified that LOW and HIGH
3831 are both positive.
3832
3833 So we have to ensure that we will handle large unsigned
3834 values the same way that the current signed bounds treat
3835 negative values. */
3836
3837 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
3838 {
3839 tree high_positive;
3840 tree equiv_type = lang_hooks.types.type_for_mode
3841 (TYPE_MODE (arg0_type), 1);
3842
3843 /* A range without an upper bound is, naturally, unbounded.
3844 Since convert would have cropped a very large value, use
3845 the max value for the destination type. */
3846 high_positive
3847 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3848 : TYPE_MAX_VALUE (arg0_type);
3849
3850 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
3851 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
3852 fold_convert (arg0_type,
3853 high_positive),
3854 fold_convert (arg0_type,
3855 integer_one_node));
3856
3857 /* If the low bound is specified, "and" the range with the
3858 range for which the original unsigned value will be
3859 positive. */
3860 if (low != 0)
3861 {
3862 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3863 1, n_low, n_high, 1,
3864 fold_convert (arg0_type,
3865 integer_zero_node),
3866 high_positive))
3867 break;
3868
3869 in_p = (n_in_p == in_p);
3870 }
3871 else
3872 {
3873 /* Otherwise, "or" the range with the range of the input
3874 that will be interpreted as negative. */
3875 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3876 0, n_low, n_high, 1,
3877 fold_convert (arg0_type,
3878 integer_zero_node),
3879 high_positive))
3880 break;
3881
3882 in_p = (in_p != n_in_p);
3883 }
3884 }
3885
3886 exp = arg0;
3887 low = n_low, high = n_high;
3888 continue;
3889
3890 default:
3891 break;
3892 }
3893
3894 break;
3895 }
3896
3897 /* If EXP is a constant, we can evaluate whether this is true or false. */
3898 if (TREE_CODE (exp) == INTEGER_CST)
3899 {
3900 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3901 exp, 0, low, 0))
3902 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3903 exp, 1, high, 1)));
3904 low = high = 0;
3905 exp = 0;
3906 }
3907
3908 *pin_p = in_p, *plow = low, *phigh = high;
3909 return exp;
3910 }
3911 \f
3912 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3913 type, TYPE, return an expression to test if EXP is in (or out of, depending
3914 on IN_P) the range. Return 0 if the test couldn't be created. */
3915
3916 static tree
3917 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
3918 {
3919 tree etype = TREE_TYPE (exp);
3920 tree value;
3921
3922 if (! in_p)
3923 {
3924 value = build_range_check (type, exp, 1, low, high);
3925 if (value != 0)
3926 return invert_truthvalue (value);
3927
3928 return 0;
3929 }
3930
3931 if (low == 0 && high == 0)
3932 return fold_convert (type, integer_one_node);
3933
3934 if (low == 0)
3935 return fold_build2 (LE_EXPR, type, exp, high);
3936
3937 if (high == 0)
3938 return fold_build2 (GE_EXPR, type, exp, low);
3939
3940 if (operand_equal_p (low, high, 0))
3941 return fold_build2 (EQ_EXPR, type, exp, low);
3942
3943 if (integer_zerop (low))
3944 {
3945 if (! TYPE_UNSIGNED (etype))
3946 {
3947 etype = lang_hooks.types.unsigned_type (etype);
3948 high = fold_convert (etype, high);
3949 exp = fold_convert (etype, exp);
3950 }
3951 return build_range_check (type, exp, 1, 0, high);
3952 }
3953
3954 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3955 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
3956 {
3957 unsigned HOST_WIDE_INT lo;
3958 HOST_WIDE_INT hi;
3959 int prec;
3960
3961 prec = TYPE_PRECISION (etype);
3962 if (prec <= HOST_BITS_PER_WIDE_INT)
3963 {
3964 hi = 0;
3965 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
3966 }
3967 else
3968 {
3969 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
3970 lo = (unsigned HOST_WIDE_INT) -1;
3971 }
3972
3973 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
3974 {
3975 if (TYPE_UNSIGNED (etype))
3976 {
3977 etype = lang_hooks.types.signed_type (etype);
3978 exp = fold_convert (etype, exp);
3979 }
3980 return fold_build2 (GT_EXPR, type, exp,
3981 fold_convert (etype, integer_zero_node));
3982 }
3983 }
3984
3985 value = const_binop (MINUS_EXPR, high, low, 0);
3986 if (value != 0 && TREE_OVERFLOW (value) && ! TYPE_UNSIGNED (etype))
3987 {
3988 tree utype, minv, maxv;
3989
3990 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
3991 for the type in question, as we rely on this here. */
3992 switch (TREE_CODE (etype))
3993 {
3994 case INTEGER_TYPE:
3995 case ENUMERAL_TYPE:
3996 case CHAR_TYPE:
3997 utype = lang_hooks.types.unsigned_type (etype);
3998 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
3999 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4000 integer_one_node, 1);
4001 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4002 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4003 minv, 1, maxv, 1)))
4004 {
4005 etype = utype;
4006 high = fold_convert (etype, high);
4007 low = fold_convert (etype, low);
4008 exp = fold_convert (etype, exp);
4009 value = const_binop (MINUS_EXPR, high, low, 0);
4010 }
4011 break;
4012 default:
4013 break;
4014 }
4015 }
4016
4017 if (value != 0 && ! TREE_OVERFLOW (value))
4018 return build_range_check (type,
4019 fold_build2 (MINUS_EXPR, etype, exp, low),
4020 1, fold_convert (etype, integer_zero_node),
4021 value);
4022
4023 return 0;
4024 }
4025 \f
4026 /* Given two ranges, see if we can merge them into one. Return 1 if we
4027 can, 0 if we can't. Set the output range into the specified parameters. */
4028
4029 static int
4030 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4031 tree high0, int in1_p, tree low1, tree high1)
4032 {
4033 int no_overlap;
4034 int subset;
4035 int temp;
4036 tree tem;
4037 int in_p;
4038 tree low, high;
4039 int lowequal = ((low0 == 0 && low1 == 0)
4040 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4041 low0, 0, low1, 0)));
4042 int highequal = ((high0 == 0 && high1 == 0)
4043 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4044 high0, 1, high1, 1)));
4045
4046 /* Make range 0 be the range that starts first, or ends last if they
4047 start at the same value. Swap them if it isn't. */
4048 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4049 low0, 0, low1, 0))
4050 || (lowequal
4051 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4052 high1, 1, high0, 1))))
4053 {
4054 temp = in0_p, in0_p = in1_p, in1_p = temp;
4055 tem = low0, low0 = low1, low1 = tem;
4056 tem = high0, high0 = high1, high1 = tem;
4057 }
4058
4059 /* Now flag two cases, whether the ranges are disjoint or whether the
4060 second range is totally subsumed in the first. Note that the tests
4061 below are simplified by the ones above. */
4062 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4063 high0, 1, low1, 0));
4064 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4065 high1, 1, high0, 1));
4066
4067 /* We now have four cases, depending on whether we are including or
4068 excluding the two ranges. */
4069 if (in0_p && in1_p)
4070 {
4071 /* If they don't overlap, the result is false. If the second range
4072 is a subset it is the result. Otherwise, the range is from the start
4073 of the second to the end of the first. */
4074 if (no_overlap)
4075 in_p = 0, low = high = 0;
4076 else if (subset)
4077 in_p = 1, low = low1, high = high1;
4078 else
4079 in_p = 1, low = low1, high = high0;
4080 }
4081
4082 else if (in0_p && ! in1_p)
4083 {
4084 /* If they don't overlap, the result is the first range. If they are
4085 equal, the result is false. If the second range is a subset of the
4086 first, and the ranges begin at the same place, we go from just after
4087 the end of the first range to the end of the second. If the second
4088 range is not a subset of the first, or if it is a subset and both
4089 ranges end at the same place, the range starts at the start of the
4090 first range and ends just before the second range.
4091 Otherwise, we can't describe this as a single range. */
4092 if (no_overlap)
4093 in_p = 1, low = low0, high = high0;
4094 else if (lowequal && highequal)
4095 in_p = 0, low = high = 0;
4096 else if (subset && lowequal)
4097 {
4098 in_p = 1, high = high0;
4099 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
4100 integer_one_node, 0);
4101 }
4102 else if (! subset || highequal)
4103 {
4104 in_p = 1, low = low0;
4105 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4106 integer_one_node, 0);
4107 }
4108 else
4109 return 0;
4110 }
4111
4112 else if (! in0_p && in1_p)
4113 {
4114 /* If they don't overlap, the result is the second range. If the second
4115 is a subset of the first, the result is false. Otherwise,
4116 the range starts just after the first range and ends at the
4117 end of the second. */
4118 if (no_overlap)
4119 in_p = 1, low = low1, high = high1;
4120 else if (subset || highequal)
4121 in_p = 0, low = high = 0;
4122 else
4123 {
4124 in_p = 1, high = high1;
4125 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4126 integer_one_node, 0);
4127 }
4128 }
4129
4130 else
4131 {
4132 /* The case where we are excluding both ranges. Here the complex case
4133 is if they don't overlap. In that case, the only time we have a
4134 range is if they are adjacent. If the second is a subset of the
4135 first, the result is the first. Otherwise, the range to exclude
4136 starts at the beginning of the first range and ends at the end of the
4137 second. */
4138 if (no_overlap)
4139 {
4140 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4141 range_binop (PLUS_EXPR, NULL_TREE,
4142 high0, 1,
4143 integer_one_node, 1),
4144 1, low1, 0)))
4145 in_p = 0, low = low0, high = high1;
4146 else
4147 {
4148 /* Canonicalize - [min, x] into - [-, x]. */
4149 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4150 switch (TREE_CODE (TREE_TYPE (low0)))
4151 {
4152 case ENUMERAL_TYPE:
4153 if (TYPE_PRECISION (TREE_TYPE (low0))
4154 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4155 break;
4156 /* FALLTHROUGH */
4157 case INTEGER_TYPE:
4158 case CHAR_TYPE:
4159 if (tree_int_cst_equal (low0,
4160 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4161 low0 = 0;
4162 break;
4163 case POINTER_TYPE:
4164 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4165 && integer_zerop (low0))
4166 low0 = 0;
4167 break;
4168 default:
4169 break;
4170 }
4171
4172 /* Canonicalize - [x, max] into - [x, -]. */
4173 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4174 switch (TREE_CODE (TREE_TYPE (high1)))
4175 {
4176 case ENUMERAL_TYPE:
4177 if (TYPE_PRECISION (TREE_TYPE (high1))
4178 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4179 break;
4180 /* FALLTHROUGH */
4181 case INTEGER_TYPE:
4182 case CHAR_TYPE:
4183 if (tree_int_cst_equal (high1,
4184 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4185 high1 = 0;
4186 break;
4187 case POINTER_TYPE:
4188 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4189 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4190 high1, 1,
4191 integer_one_node, 1)))
4192 high1 = 0;
4193 break;
4194 default:
4195 break;
4196 }
4197
4198 /* The ranges might be also adjacent between the maximum and
4199 minimum values of the given type. For
4200 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4201 return + [x + 1, y - 1]. */
4202 if (low0 == 0 && high1 == 0)
4203 {
4204 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4205 integer_one_node, 1);
4206 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4207 integer_one_node, 0);
4208 if (low == 0 || high == 0)
4209 return 0;
4210
4211 in_p = 1;
4212 }
4213 else
4214 return 0;
4215 }
4216 }
4217 else if (subset)
4218 in_p = 0, low = low0, high = high0;
4219 else
4220 in_p = 0, low = low0, high = high1;
4221 }
4222
4223 *pin_p = in_p, *plow = low, *phigh = high;
4224 return 1;
4225 }
4226 \f
4227
4228 /* Subroutine of fold, looking inside expressions of the form
4229 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4230 of the COND_EXPR. This function is being used also to optimize
4231 A op B ? C : A, by reversing the comparison first.
4232
4233 Return a folded expression whose code is not a COND_EXPR
4234 anymore, or NULL_TREE if no folding opportunity is found. */
4235
4236 static tree
4237 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4238 {
4239 enum tree_code comp_code = TREE_CODE (arg0);
4240 tree arg00 = TREE_OPERAND (arg0, 0);
4241 tree arg01 = TREE_OPERAND (arg0, 1);
4242 tree arg1_type = TREE_TYPE (arg1);
4243 tree tem;
4244
4245 STRIP_NOPS (arg1);
4246 STRIP_NOPS (arg2);
4247
4248 /* If we have A op 0 ? A : -A, consider applying the following
4249 transformations:
4250
4251 A == 0? A : -A same as -A
4252 A != 0? A : -A same as A
4253 A >= 0? A : -A same as abs (A)
4254 A > 0? A : -A same as abs (A)
4255 A <= 0? A : -A same as -abs (A)
4256 A < 0? A : -A same as -abs (A)
4257
4258 None of these transformations work for modes with signed
4259 zeros. If A is +/-0, the first two transformations will
4260 change the sign of the result (from +0 to -0, or vice
4261 versa). The last four will fix the sign of the result,
4262 even though the original expressions could be positive or
4263 negative, depending on the sign of A.
4264
4265 Note that all these transformations are correct if A is
4266 NaN, since the two alternatives (A and -A) are also NaNs. */
4267 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4268 ? real_zerop (arg01)
4269 : integer_zerop (arg01))
4270 && ((TREE_CODE (arg2) == NEGATE_EXPR
4271 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4272 /* In the case that A is of the form X-Y, '-A' (arg2) may
4273 have already been folded to Y-X, check for that. */
4274 || (TREE_CODE (arg1) == MINUS_EXPR
4275 && TREE_CODE (arg2) == MINUS_EXPR
4276 && operand_equal_p (TREE_OPERAND (arg1, 0),
4277 TREE_OPERAND (arg2, 1), 0)
4278 && operand_equal_p (TREE_OPERAND (arg1, 1),
4279 TREE_OPERAND (arg2, 0), 0))))
4280 switch (comp_code)
4281 {
4282 case EQ_EXPR:
4283 case UNEQ_EXPR:
4284 tem = fold_convert (arg1_type, arg1);
4285 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4286 case NE_EXPR:
4287 case LTGT_EXPR:
4288 return pedantic_non_lvalue (fold_convert (type, arg1));
4289 case UNGE_EXPR:
4290 case UNGT_EXPR:
4291 if (flag_trapping_math)
4292 break;
4293 /* Fall through. */
4294 case GE_EXPR:
4295 case GT_EXPR:
4296 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4297 arg1 = fold_convert (lang_hooks.types.signed_type
4298 (TREE_TYPE (arg1)), arg1);
4299 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4300 return pedantic_non_lvalue (fold_convert (type, tem));
4301 case UNLE_EXPR:
4302 case UNLT_EXPR:
4303 if (flag_trapping_math)
4304 break;
4305 case LE_EXPR:
4306 case LT_EXPR:
4307 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4308 arg1 = fold_convert (lang_hooks.types.signed_type
4309 (TREE_TYPE (arg1)), arg1);
4310 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4311 return negate_expr (fold_convert (type, tem));
4312 default:
4313 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4314 break;
4315 }
4316
4317 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4318 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4319 both transformations are correct when A is NaN: A != 0
4320 is then true, and A == 0 is false. */
4321
4322 if (integer_zerop (arg01) && integer_zerop (arg2))
4323 {
4324 if (comp_code == NE_EXPR)
4325 return pedantic_non_lvalue (fold_convert (type, arg1));
4326 else if (comp_code == EQ_EXPR)
4327 return fold_convert (type, integer_zero_node);
4328 }
4329
4330 /* Try some transformations of A op B ? A : B.
4331
4332 A == B? A : B same as B
4333 A != B? A : B same as A
4334 A >= B? A : B same as max (A, B)
4335 A > B? A : B same as max (B, A)
4336 A <= B? A : B same as min (A, B)
4337 A < B? A : B same as min (B, A)
4338
4339 As above, these transformations don't work in the presence
4340 of signed zeros. For example, if A and B are zeros of
4341 opposite sign, the first two transformations will change
4342 the sign of the result. In the last four, the original
4343 expressions give different results for (A=+0, B=-0) and
4344 (A=-0, B=+0), but the transformed expressions do not.
4345
4346 The first two transformations are correct if either A or B
4347 is a NaN. In the first transformation, the condition will
4348 be false, and B will indeed be chosen. In the case of the
4349 second transformation, the condition A != B will be true,
4350 and A will be chosen.
4351
4352 The conversions to max() and min() are not correct if B is
4353 a number and A is not. The conditions in the original
4354 expressions will be false, so all four give B. The min()
4355 and max() versions would give a NaN instead. */
4356 if (operand_equal_for_comparison_p (arg01, arg2, arg00)
4357 /* Avoid these transformations if the COND_EXPR may be used
4358 as an lvalue in the C++ front-end. PR c++/19199. */
4359 && (in_gimple_form
4360 || strcmp (lang_hooks.name, "GNU C++") != 0
4361 || ! maybe_lvalue_p (arg1)
4362 || ! maybe_lvalue_p (arg2)))
4363 {
4364 tree comp_op0 = arg00;
4365 tree comp_op1 = arg01;
4366 tree comp_type = TREE_TYPE (comp_op0);
4367
4368 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4369 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4370 {
4371 comp_type = type;
4372 comp_op0 = arg1;
4373 comp_op1 = arg2;
4374 }
4375
4376 switch (comp_code)
4377 {
4378 case EQ_EXPR:
4379 return pedantic_non_lvalue (fold_convert (type, arg2));
4380 case NE_EXPR:
4381 return pedantic_non_lvalue (fold_convert (type, arg1));
4382 case LE_EXPR:
4383 case LT_EXPR:
4384 case UNLE_EXPR:
4385 case UNLT_EXPR:
4386 /* In C++ a ?: expression can be an lvalue, so put the
4387 operand which will be used if they are equal first
4388 so that we can convert this back to the
4389 corresponding COND_EXPR. */
4390 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4391 {
4392 comp_op0 = fold_convert (comp_type, comp_op0);
4393 comp_op1 = fold_convert (comp_type, comp_op1);
4394 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4395 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4396 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4397 return pedantic_non_lvalue (fold_convert (type, tem));
4398 }
4399 break;
4400 case GE_EXPR:
4401 case GT_EXPR:
4402 case UNGE_EXPR:
4403 case UNGT_EXPR:
4404 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4405 {
4406 comp_op0 = fold_convert (comp_type, comp_op0);
4407 comp_op1 = fold_convert (comp_type, comp_op1);
4408 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4409 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
4410 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
4411 return pedantic_non_lvalue (fold_convert (type, tem));
4412 }
4413 break;
4414 case UNEQ_EXPR:
4415 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4416 return pedantic_non_lvalue (fold_convert (type, arg2));
4417 break;
4418 case LTGT_EXPR:
4419 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4420 return pedantic_non_lvalue (fold_convert (type, arg1));
4421 break;
4422 default:
4423 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4424 break;
4425 }
4426 }
4427
4428 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4429 we might still be able to simplify this. For example,
4430 if C1 is one less or one more than C2, this might have started
4431 out as a MIN or MAX and been transformed by this function.
4432 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4433
4434 if (INTEGRAL_TYPE_P (type)
4435 && TREE_CODE (arg01) == INTEGER_CST
4436 && TREE_CODE (arg2) == INTEGER_CST)
4437 switch (comp_code)
4438 {
4439 case EQ_EXPR:
4440 /* We can replace A with C1 in this case. */
4441 arg1 = fold_convert (type, arg01);
4442 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
4443
4444 case LT_EXPR:
4445 /* If C1 is C2 + 1, this is min(A, C2). */
4446 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4447 OEP_ONLY_CONST)
4448 && operand_equal_p (arg01,
4449 const_binop (PLUS_EXPR, arg2,
4450 integer_one_node, 0),
4451 OEP_ONLY_CONST))
4452 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4453 type, arg1, arg2));
4454 break;
4455
4456 case LE_EXPR:
4457 /* If C1 is C2 - 1, this is min(A, C2). */
4458 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4459 OEP_ONLY_CONST)
4460 && operand_equal_p (arg01,
4461 const_binop (MINUS_EXPR, arg2,
4462 integer_one_node, 0),
4463 OEP_ONLY_CONST))
4464 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4465 type, arg1, arg2));
4466 break;
4467
4468 case GT_EXPR:
4469 /* If C1 is C2 - 1, this is max(A, C2). */
4470 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4471 OEP_ONLY_CONST)
4472 && operand_equal_p (arg01,
4473 const_binop (MINUS_EXPR, arg2,
4474 integer_one_node, 0),
4475 OEP_ONLY_CONST))
4476 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4477 type, arg1, arg2));
4478 break;
4479
4480 case GE_EXPR:
4481 /* If C1 is C2 + 1, this is max(A, C2). */
4482 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4483 OEP_ONLY_CONST)
4484 && operand_equal_p (arg01,
4485 const_binop (PLUS_EXPR, arg2,
4486 integer_one_node, 0),
4487 OEP_ONLY_CONST))
4488 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4489 type, arg1, arg2));
4490 break;
4491 case NE_EXPR:
4492 break;
4493 default:
4494 gcc_unreachable ();
4495 }
4496
4497 return NULL_TREE;
4498 }
4499
4500
4501 \f
4502 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4503 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4504 #endif
4505
4506 /* EXP is some logical combination of boolean tests. See if we can
4507 merge it into some range test. Return the new tree if so. */
4508
4509 static tree
4510 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
4511 {
4512 int or_op = (code == TRUTH_ORIF_EXPR
4513 || code == TRUTH_OR_EXPR);
4514 int in0_p, in1_p, in_p;
4515 tree low0, low1, low, high0, high1, high;
4516 tree lhs = make_range (op0, &in0_p, &low0, &high0);
4517 tree rhs = make_range (op1, &in1_p, &low1, &high1);
4518 tree tem;
4519
4520 /* If this is an OR operation, invert both sides; we will invert
4521 again at the end. */
4522 if (or_op)
4523 in0_p = ! in0_p, in1_p = ! in1_p;
4524
4525 /* If both expressions are the same, if we can merge the ranges, and we
4526 can build the range test, return it or it inverted. If one of the
4527 ranges is always true or always false, consider it to be the same
4528 expression as the other. */
4529 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4530 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4531 in1_p, low1, high1)
4532 && 0 != (tem = (build_range_check (type,
4533 lhs != 0 ? lhs
4534 : rhs != 0 ? rhs : integer_zero_node,
4535 in_p, low, high))))
4536 return or_op ? invert_truthvalue (tem) : tem;
4537
4538 /* On machines where the branch cost is expensive, if this is a
4539 short-circuited branch and the underlying object on both sides
4540 is the same, make a non-short-circuit operation. */
4541 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4542 && lhs != 0 && rhs != 0
4543 && (code == TRUTH_ANDIF_EXPR
4544 || code == TRUTH_ORIF_EXPR)
4545 && operand_equal_p (lhs, rhs, 0))
4546 {
4547 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4548 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4549 which cases we can't do this. */
4550 if (simple_operand_p (lhs))
4551 return build2 (code == TRUTH_ANDIF_EXPR
4552 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4553 type, op0, op1);
4554
4555 else if (lang_hooks.decls.global_bindings_p () == 0
4556 && ! CONTAINS_PLACEHOLDER_P (lhs))
4557 {
4558 tree common = save_expr (lhs);
4559
4560 if (0 != (lhs = build_range_check (type, common,
4561 or_op ? ! in0_p : in0_p,
4562 low0, high0))
4563 && (0 != (rhs = build_range_check (type, common,
4564 or_op ? ! in1_p : in1_p,
4565 low1, high1))))
4566 return build2 (code == TRUTH_ANDIF_EXPR
4567 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4568 type, lhs, rhs);
4569 }
4570 }
4571
4572 return 0;
4573 }
4574 \f
4575 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4576 bit value. Arrange things so the extra bits will be set to zero if and
4577 only if C is signed-extended to its full width. If MASK is nonzero,
4578 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4579
4580 static tree
4581 unextend (tree c, int p, int unsignedp, tree mask)
4582 {
4583 tree type = TREE_TYPE (c);
4584 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4585 tree temp;
4586
4587 if (p == modesize || unsignedp)
4588 return c;
4589
4590 /* We work by getting just the sign bit into the low-order bit, then
4591 into the high-order bit, then sign-extend. We then XOR that value
4592 with C. */
4593 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4594 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4595
4596 /* We must use a signed type in order to get an arithmetic right shift.
4597 However, we must also avoid introducing accidental overflows, so that
4598 a subsequent call to integer_zerop will work. Hence we must
4599 do the type conversion here. At this point, the constant is either
4600 zero or one, and the conversion to a signed type can never overflow.
4601 We could get an overflow if this conversion is done anywhere else. */
4602 if (TYPE_UNSIGNED (type))
4603 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4604
4605 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4606 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4607 if (mask != 0)
4608 temp = const_binop (BIT_AND_EXPR, temp,
4609 fold_convert (TREE_TYPE (c), mask), 0);
4610 /* If necessary, convert the type back to match the type of C. */
4611 if (TYPE_UNSIGNED (type))
4612 temp = fold_convert (type, temp);
4613
4614 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4615 }
4616 \f
4617 /* Find ways of folding logical expressions of LHS and RHS:
4618 Try to merge two comparisons to the same innermost item.
4619 Look for range tests like "ch >= '0' && ch <= '9'".
4620 Look for combinations of simple terms on machines with expensive branches
4621 and evaluate the RHS unconditionally.
4622
4623 For example, if we have p->a == 2 && p->b == 4 and we can make an
4624 object large enough to span both A and B, we can do this with a comparison
4625 against the object ANDed with the a mask.
4626
4627 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4628 operations to do this with one comparison.
4629
4630 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4631 function and the one above.
4632
4633 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4634 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4635
4636 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4637 two operands.
4638
4639 We return the simplified tree or 0 if no optimization is possible. */
4640
4641 static tree
4642 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
4643 {
4644 /* If this is the "or" of two comparisons, we can do something if
4645 the comparisons are NE_EXPR. If this is the "and", we can do something
4646 if the comparisons are EQ_EXPR. I.e.,
4647 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4648
4649 WANTED_CODE is this operation code. For single bit fields, we can
4650 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4651 comparison for one-bit fields. */
4652
4653 enum tree_code wanted_code;
4654 enum tree_code lcode, rcode;
4655 tree ll_arg, lr_arg, rl_arg, rr_arg;
4656 tree ll_inner, lr_inner, rl_inner, rr_inner;
4657 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
4658 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
4659 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
4660 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
4661 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
4662 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
4663 enum machine_mode lnmode, rnmode;
4664 tree ll_mask, lr_mask, rl_mask, rr_mask;
4665 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
4666 tree l_const, r_const;
4667 tree lntype, rntype, result;
4668 int first_bit, end_bit;
4669 int volatilep;
4670
4671 /* Start by getting the comparison codes. Fail if anything is volatile.
4672 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4673 it were surrounded with a NE_EXPR. */
4674
4675 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
4676 return 0;
4677
4678 lcode = TREE_CODE (lhs);
4679 rcode = TREE_CODE (rhs);
4680
4681 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
4682 {
4683 lhs = build2 (NE_EXPR, truth_type, lhs,
4684 fold_convert (TREE_TYPE (lhs), integer_zero_node));
4685 lcode = NE_EXPR;
4686 }
4687
4688 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
4689 {
4690 rhs = build2 (NE_EXPR, truth_type, rhs,
4691 fold_convert (TREE_TYPE (rhs), integer_zero_node));
4692 rcode = NE_EXPR;
4693 }
4694
4695 if (TREE_CODE_CLASS (lcode) != tcc_comparison
4696 || TREE_CODE_CLASS (rcode) != tcc_comparison)
4697 return 0;
4698
4699 ll_arg = TREE_OPERAND (lhs, 0);
4700 lr_arg = TREE_OPERAND (lhs, 1);
4701 rl_arg = TREE_OPERAND (rhs, 0);
4702 rr_arg = TREE_OPERAND (rhs, 1);
4703
4704 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4705 if (simple_operand_p (ll_arg)
4706 && simple_operand_p (lr_arg))
4707 {
4708 tree result;
4709 if (operand_equal_p (ll_arg, rl_arg, 0)
4710 && operand_equal_p (lr_arg, rr_arg, 0))
4711 {
4712 result = combine_comparisons (code, lcode, rcode,
4713 truth_type, ll_arg, lr_arg);
4714 if (result)
4715 return result;
4716 }
4717 else if (operand_equal_p (ll_arg, rr_arg, 0)
4718 && operand_equal_p (lr_arg, rl_arg, 0))
4719 {
4720 result = combine_comparisons (code, lcode,
4721 swap_tree_comparison (rcode),
4722 truth_type, ll_arg, lr_arg);
4723 if (result)
4724 return result;
4725 }
4726 }
4727
4728 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
4729 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
4730
4731 /* If the RHS can be evaluated unconditionally and its operands are
4732 simple, it wins to evaluate the RHS unconditionally on machines
4733 with expensive branches. In this case, this isn't a comparison
4734 that can be merged. Avoid doing this if the RHS is a floating-point
4735 comparison since those can trap. */
4736
4737 if (BRANCH_COST >= 2
4738 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
4739 && simple_operand_p (rl_arg)
4740 && simple_operand_p (rr_arg))
4741 {
4742 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4743 if (code == TRUTH_OR_EXPR
4744 && lcode == NE_EXPR && integer_zerop (lr_arg)
4745 && rcode == NE_EXPR && integer_zerop (rr_arg)
4746 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4747 return build2 (NE_EXPR, truth_type,
4748 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4749 ll_arg, rl_arg),
4750 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4751
4752 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4753 if (code == TRUTH_AND_EXPR
4754 && lcode == EQ_EXPR && integer_zerop (lr_arg)
4755 && rcode == EQ_EXPR && integer_zerop (rr_arg)
4756 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4757 return build2 (EQ_EXPR, truth_type,
4758 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4759 ll_arg, rl_arg),
4760 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4761
4762 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
4763 return build2 (code, truth_type, lhs, rhs);
4764 }
4765
4766 /* See if the comparisons can be merged. Then get all the parameters for
4767 each side. */
4768
4769 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
4770 || (rcode != EQ_EXPR && rcode != NE_EXPR))
4771 return 0;
4772
4773 volatilep = 0;
4774 ll_inner = decode_field_reference (ll_arg,
4775 &ll_bitsize, &ll_bitpos, &ll_mode,
4776 &ll_unsignedp, &volatilep, &ll_mask,
4777 &ll_and_mask);
4778 lr_inner = decode_field_reference (lr_arg,
4779 &lr_bitsize, &lr_bitpos, &lr_mode,
4780 &lr_unsignedp, &volatilep, &lr_mask,
4781 &lr_and_mask);
4782 rl_inner = decode_field_reference (rl_arg,
4783 &rl_bitsize, &rl_bitpos, &rl_mode,
4784 &rl_unsignedp, &volatilep, &rl_mask,
4785 &rl_and_mask);
4786 rr_inner = decode_field_reference (rr_arg,
4787 &rr_bitsize, &rr_bitpos, &rr_mode,
4788 &rr_unsignedp, &volatilep, &rr_mask,
4789 &rr_and_mask);
4790
4791 /* It must be true that the inner operation on the lhs of each
4792 comparison must be the same if we are to be able to do anything.
4793 Then see if we have constants. If not, the same must be true for
4794 the rhs's. */
4795 if (volatilep || ll_inner == 0 || rl_inner == 0
4796 || ! operand_equal_p (ll_inner, rl_inner, 0))
4797 return 0;
4798
4799 if (TREE_CODE (lr_arg) == INTEGER_CST
4800 && TREE_CODE (rr_arg) == INTEGER_CST)
4801 l_const = lr_arg, r_const = rr_arg;
4802 else if (lr_inner == 0 || rr_inner == 0
4803 || ! operand_equal_p (lr_inner, rr_inner, 0))
4804 return 0;
4805 else
4806 l_const = r_const = 0;
4807
4808 /* If either comparison code is not correct for our logical operation,
4809 fail. However, we can convert a one-bit comparison against zero into
4810 the opposite comparison against that bit being set in the field. */
4811
4812 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
4813 if (lcode != wanted_code)
4814 {
4815 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
4816 {
4817 /* Make the left operand unsigned, since we are only interested
4818 in the value of one bit. Otherwise we are doing the wrong
4819 thing below. */
4820 ll_unsignedp = 1;
4821 l_const = ll_mask;
4822 }
4823 else
4824 return 0;
4825 }
4826
4827 /* This is analogous to the code for l_const above. */
4828 if (rcode != wanted_code)
4829 {
4830 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
4831 {
4832 rl_unsignedp = 1;
4833 r_const = rl_mask;
4834 }
4835 else
4836 return 0;
4837 }
4838
4839 /* After this point all optimizations will generate bit-field
4840 references, which we might not want. */
4841 if (! lang_hooks.can_use_bit_fields_p ())
4842 return 0;
4843
4844 /* See if we can find a mode that contains both fields being compared on
4845 the left. If we can't, fail. Otherwise, update all constants and masks
4846 to be relative to a field of that size. */
4847 first_bit = MIN (ll_bitpos, rl_bitpos);
4848 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
4849 lnmode = get_best_mode (end_bit - first_bit, first_bit,
4850 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
4851 volatilep);
4852 if (lnmode == VOIDmode)
4853 return 0;
4854
4855 lnbitsize = GET_MODE_BITSIZE (lnmode);
4856 lnbitpos = first_bit & ~ (lnbitsize - 1);
4857 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
4858 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
4859
4860 if (BYTES_BIG_ENDIAN)
4861 {
4862 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
4863 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
4864 }
4865
4866 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
4867 size_int (xll_bitpos), 0);
4868 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
4869 size_int (xrl_bitpos), 0);
4870
4871 if (l_const)
4872 {
4873 l_const = fold_convert (lntype, l_const);
4874 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
4875 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
4876 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
4877 fold_build1 (BIT_NOT_EXPR,
4878 lntype, ll_mask),
4879 0)))
4880 {
4881 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
4882
4883 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4884 }
4885 }
4886 if (r_const)
4887 {
4888 r_const = fold_convert (lntype, r_const);
4889 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
4890 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
4891 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
4892 fold_build1 (BIT_NOT_EXPR,
4893 lntype, rl_mask),
4894 0)))
4895 {
4896 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
4897
4898 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4899 }
4900 }
4901
4902 /* If the right sides are not constant, do the same for it. Also,
4903 disallow this optimization if a size or signedness mismatch occurs
4904 between the left and right sides. */
4905 if (l_const == 0)
4906 {
4907 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
4908 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
4909 /* Make sure the two fields on the right
4910 correspond to the left without being swapped. */
4911 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
4912 return 0;
4913
4914 first_bit = MIN (lr_bitpos, rr_bitpos);
4915 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
4916 rnmode = get_best_mode (end_bit - first_bit, first_bit,
4917 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
4918 volatilep);
4919 if (rnmode == VOIDmode)
4920 return 0;
4921
4922 rnbitsize = GET_MODE_BITSIZE (rnmode);
4923 rnbitpos = first_bit & ~ (rnbitsize - 1);
4924 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
4925 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
4926
4927 if (BYTES_BIG_ENDIAN)
4928 {
4929 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
4930 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
4931 }
4932
4933 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
4934 size_int (xlr_bitpos), 0);
4935 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
4936 size_int (xrr_bitpos), 0);
4937
4938 /* Make a mask that corresponds to both fields being compared.
4939 Do this for both items being compared. If the operands are the
4940 same size and the bits being compared are in the same position
4941 then we can do this by masking both and comparing the masked
4942 results. */
4943 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4944 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
4945 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
4946 {
4947 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4948 ll_unsignedp || rl_unsignedp);
4949 if (! all_ones_mask_p (ll_mask, lnbitsize))
4950 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
4951
4952 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
4953 lr_unsignedp || rr_unsignedp);
4954 if (! all_ones_mask_p (lr_mask, rnbitsize))
4955 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
4956
4957 return build2 (wanted_code, truth_type, lhs, rhs);
4958 }
4959
4960 /* There is still another way we can do something: If both pairs of
4961 fields being compared are adjacent, we may be able to make a wider
4962 field containing them both.
4963
4964 Note that we still must mask the lhs/rhs expressions. Furthermore,
4965 the mask must be shifted to account for the shift done by
4966 make_bit_field_ref. */
4967 if ((ll_bitsize + ll_bitpos == rl_bitpos
4968 && lr_bitsize + lr_bitpos == rr_bitpos)
4969 || (ll_bitpos == rl_bitpos + rl_bitsize
4970 && lr_bitpos == rr_bitpos + rr_bitsize))
4971 {
4972 tree type;
4973
4974 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
4975 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
4976 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
4977 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
4978
4979 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
4980 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
4981 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
4982 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
4983
4984 /* Convert to the smaller type before masking out unwanted bits. */
4985 type = lntype;
4986 if (lntype != rntype)
4987 {
4988 if (lnbitsize > rnbitsize)
4989 {
4990 lhs = fold_convert (rntype, lhs);
4991 ll_mask = fold_convert (rntype, ll_mask);
4992 type = rntype;
4993 }
4994 else if (lnbitsize < rnbitsize)
4995 {
4996 rhs = fold_convert (lntype, rhs);
4997 lr_mask = fold_convert (lntype, lr_mask);
4998 type = lntype;
4999 }
5000 }
5001
5002 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5003 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5004
5005 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5006 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5007
5008 return build2 (wanted_code, truth_type, lhs, rhs);
5009 }
5010
5011 return 0;
5012 }
5013
5014 /* Handle the case of comparisons with constants. If there is something in
5015 common between the masks, those bits of the constants must be the same.
5016 If not, the condition is always false. Test for this to avoid generating
5017 incorrect code below. */
5018 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5019 if (! integer_zerop (result)
5020 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5021 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5022 {
5023 if (wanted_code == NE_EXPR)
5024 {
5025 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5026 return constant_boolean_node (true, truth_type);
5027 }
5028 else
5029 {
5030 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5031 return constant_boolean_node (false, truth_type);
5032 }
5033 }
5034
5035 /* Construct the expression we will return. First get the component
5036 reference we will make. Unless the mask is all ones the width of
5037 that field, perform the mask operation. Then compare with the
5038 merged constant. */
5039 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5040 ll_unsignedp || rl_unsignedp);
5041
5042 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5043 if (! all_ones_mask_p (ll_mask, lnbitsize))
5044 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5045
5046 return build2 (wanted_code, truth_type, result,
5047 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5048 }
5049 \f
5050 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5051 constant. */
5052
5053 static tree
5054 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5055 {
5056 tree arg0 = op0;
5057 enum tree_code op_code;
5058 tree comp_const = op1;
5059 tree minmax_const;
5060 int consts_equal, consts_lt;
5061 tree inner;
5062
5063 STRIP_SIGN_NOPS (arg0);
5064
5065 op_code = TREE_CODE (arg0);
5066 minmax_const = TREE_OPERAND (arg0, 1);
5067 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5068 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5069 inner = TREE_OPERAND (arg0, 0);
5070
5071 /* If something does not permit us to optimize, return the original tree. */
5072 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5073 || TREE_CODE (comp_const) != INTEGER_CST
5074 || TREE_CONSTANT_OVERFLOW (comp_const)
5075 || TREE_CODE (minmax_const) != INTEGER_CST
5076 || TREE_CONSTANT_OVERFLOW (minmax_const))
5077 return NULL_TREE;
5078
5079 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5080 and GT_EXPR, doing the rest with recursive calls using logical
5081 simplifications. */
5082 switch (code)
5083 {
5084 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5085 {
5086 /* FIXME: We should be able to invert code without building a
5087 scratch tree node, but doing so would require us to
5088 duplicate a part of invert_truthvalue here. */
5089 tree tem = invert_truthvalue (build2 (code, type, op0, op1));
5090 tem = optimize_minmax_comparison (TREE_CODE (tem),
5091 TREE_TYPE (tem),
5092 TREE_OPERAND (tem, 0),
5093 TREE_OPERAND (tem, 1));
5094 return invert_truthvalue (tem);
5095 }
5096
5097 case GE_EXPR:
5098 return
5099 fold_build2 (TRUTH_ORIF_EXPR, type,
5100 optimize_minmax_comparison
5101 (EQ_EXPR, type, arg0, comp_const),
5102 optimize_minmax_comparison
5103 (GT_EXPR, type, arg0, comp_const));
5104
5105 case EQ_EXPR:
5106 if (op_code == MAX_EXPR && consts_equal)
5107 /* MAX (X, 0) == 0 -> X <= 0 */
5108 return fold_build2 (LE_EXPR, type, inner, comp_const);
5109
5110 else if (op_code == MAX_EXPR && consts_lt)
5111 /* MAX (X, 0) == 5 -> X == 5 */
5112 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5113
5114 else if (op_code == MAX_EXPR)
5115 /* MAX (X, 0) == -1 -> false */
5116 return omit_one_operand (type, integer_zero_node, inner);
5117
5118 else if (consts_equal)
5119 /* MIN (X, 0) == 0 -> X >= 0 */
5120 return fold_build2 (GE_EXPR, type, inner, comp_const);
5121
5122 else if (consts_lt)
5123 /* MIN (X, 0) == 5 -> false */
5124 return omit_one_operand (type, integer_zero_node, inner);
5125
5126 else
5127 /* MIN (X, 0) == -1 -> X == -1 */
5128 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5129
5130 case GT_EXPR:
5131 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5132 /* MAX (X, 0) > 0 -> X > 0
5133 MAX (X, 0) > 5 -> X > 5 */
5134 return fold_build2 (GT_EXPR, type, inner, comp_const);
5135
5136 else if (op_code == MAX_EXPR)
5137 /* MAX (X, 0) > -1 -> true */
5138 return omit_one_operand (type, integer_one_node, inner);
5139
5140 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5141 /* MIN (X, 0) > 0 -> false
5142 MIN (X, 0) > 5 -> false */
5143 return omit_one_operand (type, integer_zero_node, inner);
5144
5145 else
5146 /* MIN (X, 0) > -1 -> X > -1 */
5147 return fold_build2 (GT_EXPR, type, inner, comp_const);
5148
5149 default:
5150 return NULL_TREE;
5151 }
5152 }
5153 \f
5154 /* T is an integer expression that is being multiplied, divided, or taken a
5155 modulus (CODE says which and what kind of divide or modulus) by a
5156 constant C. See if we can eliminate that operation by folding it with
5157 other operations already in T. WIDE_TYPE, if non-null, is a type that
5158 should be used for the computation if wider than our type.
5159
5160 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5161 (X * 2) + (Y * 4). We must, however, be assured that either the original
5162 expression would not overflow or that overflow is undefined for the type
5163 in the language in question.
5164
5165 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5166 the machine has a multiply-accumulate insn or that this is part of an
5167 addressing calculation.
5168
5169 If we return a non-null expression, it is an equivalent form of the
5170 original computation, but need not be in the original type. */
5171
5172 static tree
5173 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
5174 {
5175 /* To avoid exponential search depth, refuse to allow recursion past
5176 three levels. Beyond that (1) it's highly unlikely that we'll find
5177 something interesting and (2) we've probably processed it before
5178 when we built the inner expression. */
5179
5180 static int depth;
5181 tree ret;
5182
5183 if (depth > 3)
5184 return NULL;
5185
5186 depth++;
5187 ret = extract_muldiv_1 (t, c, code, wide_type);
5188 depth--;
5189
5190 return ret;
5191 }
5192
5193 static tree
5194 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
5195 {
5196 tree type = TREE_TYPE (t);
5197 enum tree_code tcode = TREE_CODE (t);
5198 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5199 > GET_MODE_SIZE (TYPE_MODE (type)))
5200 ? wide_type : type);
5201 tree t1, t2;
5202 int same_p = tcode == code;
5203 tree op0 = NULL_TREE, op1 = NULL_TREE;
5204
5205 /* Don't deal with constants of zero here; they confuse the code below. */
5206 if (integer_zerop (c))
5207 return NULL_TREE;
5208
5209 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5210 op0 = TREE_OPERAND (t, 0);
5211
5212 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5213 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5214
5215 /* Note that we need not handle conditional operations here since fold
5216 already handles those cases. So just do arithmetic here. */
5217 switch (tcode)
5218 {
5219 case INTEGER_CST:
5220 /* For a constant, we can always simplify if we are a multiply
5221 or (for divide and modulus) if it is a multiple of our constant. */
5222 if (code == MULT_EXPR
5223 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5224 return const_binop (code, fold_convert (ctype, t),
5225 fold_convert (ctype, c), 0);
5226 break;
5227
5228 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5229 /* If op0 is an expression ... */
5230 if ((COMPARISON_CLASS_P (op0)
5231 || UNARY_CLASS_P (op0)
5232 || BINARY_CLASS_P (op0)
5233 || EXPRESSION_CLASS_P (op0))
5234 /* ... and is unsigned, and its type is smaller than ctype,
5235 then we cannot pass through as widening. */
5236 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5237 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5238 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5239 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5240 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5241 /* ... or this is a truncation (t is narrower than op0),
5242 then we cannot pass through this narrowing. */
5243 || (GET_MODE_SIZE (TYPE_MODE (type))
5244 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5245 /* ... or signedness changes for division or modulus,
5246 then we cannot pass through this conversion. */
5247 || (code != MULT_EXPR
5248 && (TYPE_UNSIGNED (ctype)
5249 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5250 break;
5251
5252 /* Pass the constant down and see if we can make a simplification. If
5253 we can, replace this expression with the inner simplification for
5254 possible later conversion to our or some other type. */
5255 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5256 && TREE_CODE (t2) == INTEGER_CST
5257 && ! TREE_CONSTANT_OVERFLOW (t2)
5258 && (0 != (t1 = extract_muldiv (op0, t2, code,
5259 code == MULT_EXPR
5260 ? ctype : NULL_TREE))))
5261 return t1;
5262 break;
5263
5264 case ABS_EXPR:
5265 /* If widening the type changes it from signed to unsigned, then we
5266 must avoid building ABS_EXPR itself as unsigned. */
5267 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5268 {
5269 tree cstype = (*lang_hooks.types.signed_type) (ctype);
5270 if ((t1 = extract_muldiv (op0, c, code, cstype)) != 0)
5271 {
5272 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5273 return fold_convert (ctype, t1);
5274 }
5275 break;
5276 }
5277 /* FALLTHROUGH */
5278 case NEGATE_EXPR:
5279 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5280 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5281 break;
5282
5283 case MIN_EXPR: case MAX_EXPR:
5284 /* If widening the type changes the signedness, then we can't perform
5285 this optimization as that changes the result. */
5286 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5287 break;
5288
5289 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5290 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
5291 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
5292 {
5293 if (tree_int_cst_sgn (c) < 0)
5294 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5295
5296 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5297 fold_convert (ctype, t2));
5298 }
5299 break;
5300
5301 case LSHIFT_EXPR: case RSHIFT_EXPR:
5302 /* If the second operand is constant, this is a multiplication
5303 or floor division, by a power of two, so we can treat it that
5304 way unless the multiplier or divisor overflows. Signed
5305 left-shift overflow is implementation-defined rather than
5306 undefined in C90, so do not convert signed left shift into
5307 multiplication. */
5308 if (TREE_CODE (op1) == INTEGER_CST
5309 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5310 /* const_binop may not detect overflow correctly,
5311 so check for it explicitly here. */
5312 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5313 && TREE_INT_CST_HIGH (op1) == 0
5314 && 0 != (t1 = fold_convert (ctype,
5315 const_binop (LSHIFT_EXPR,
5316 size_one_node,
5317 op1, 0)))
5318 && ! TREE_OVERFLOW (t1))
5319 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5320 ? MULT_EXPR : FLOOR_DIV_EXPR,
5321 ctype, fold_convert (ctype, op0), t1),
5322 c, code, wide_type);
5323 break;
5324
5325 case PLUS_EXPR: case MINUS_EXPR:
5326 /* See if we can eliminate the operation on both sides. If we can, we
5327 can return a new PLUS or MINUS. If we can't, the only remaining
5328 cases where we can do anything are if the second operand is a
5329 constant. */
5330 t1 = extract_muldiv (op0, c, code, wide_type);
5331 t2 = extract_muldiv (op1, c, code, wide_type);
5332 if (t1 != 0 && t2 != 0
5333 && (code == MULT_EXPR
5334 /* If not multiplication, we can only do this if both operands
5335 are divisible by c. */
5336 || (multiple_of_p (ctype, op0, c)
5337 && multiple_of_p (ctype, op1, c))))
5338 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5339 fold_convert (ctype, t2));
5340
5341 /* If this was a subtraction, negate OP1 and set it to be an addition.
5342 This simplifies the logic below. */
5343 if (tcode == MINUS_EXPR)
5344 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5345
5346 if (TREE_CODE (op1) != INTEGER_CST)
5347 break;
5348
5349 /* If either OP1 or C are negative, this optimization is not safe for
5350 some of the division and remainder types while for others we need
5351 to change the code. */
5352 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5353 {
5354 if (code == CEIL_DIV_EXPR)
5355 code = FLOOR_DIV_EXPR;
5356 else if (code == FLOOR_DIV_EXPR)
5357 code = CEIL_DIV_EXPR;
5358 else if (code != MULT_EXPR
5359 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5360 break;
5361 }
5362
5363 /* If it's a multiply or a division/modulus operation of a multiple
5364 of our constant, do the operation and verify it doesn't overflow. */
5365 if (code == MULT_EXPR
5366 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5367 {
5368 op1 = const_binop (code, fold_convert (ctype, op1),
5369 fold_convert (ctype, c), 0);
5370 /* We allow the constant to overflow with wrapping semantics. */
5371 if (op1 == 0
5372 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
5373 break;
5374 }
5375 else
5376 break;
5377
5378 /* If we have an unsigned type is not a sizetype, we cannot widen
5379 the operation since it will change the result if the original
5380 computation overflowed. */
5381 if (TYPE_UNSIGNED (ctype)
5382 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5383 && ctype != type)
5384 break;
5385
5386 /* If we were able to eliminate our operation from the first side,
5387 apply our operation to the second side and reform the PLUS. */
5388 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5389 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5390
5391 /* The last case is if we are a multiply. In that case, we can
5392 apply the distributive law to commute the multiply and addition
5393 if the multiplication of the constants doesn't overflow. */
5394 if (code == MULT_EXPR)
5395 return fold_build2 (tcode, ctype,
5396 fold_build2 (code, ctype,
5397 fold_convert (ctype, op0),
5398 fold_convert (ctype, c)),
5399 op1);
5400
5401 break;
5402
5403 case MULT_EXPR:
5404 /* We have a special case here if we are doing something like
5405 (C * 8) % 4 since we know that's zero. */
5406 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5407 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5408 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5409 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5410 return omit_one_operand (type, integer_zero_node, op0);
5411
5412 /* ... fall through ... */
5413
5414 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5415 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5416 /* If we can extract our operation from the LHS, do so and return a
5417 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5418 do something only if the second operand is a constant. */
5419 if (same_p
5420 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5421 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5422 fold_convert (ctype, op1));
5423 else if (tcode == MULT_EXPR && code == MULT_EXPR
5424 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
5425 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5426 fold_convert (ctype, t1));
5427 else if (TREE_CODE (op1) != INTEGER_CST)
5428 return 0;
5429
5430 /* If these are the same operation types, we can associate them
5431 assuming no overflow. */
5432 if (tcode == code
5433 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5434 fold_convert (ctype, c), 0))
5435 && ! TREE_OVERFLOW (t1))
5436 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5437
5438 /* If these operations "cancel" each other, we have the main
5439 optimizations of this pass, which occur when either constant is a
5440 multiple of the other, in which case we replace this with either an
5441 operation or CODE or TCODE.
5442
5443 If we have an unsigned type that is not a sizetype, we cannot do
5444 this since it will change the result if the original computation
5445 overflowed. */
5446 if ((! TYPE_UNSIGNED (ctype)
5447 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5448 && ! flag_wrapv
5449 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5450 || (tcode == MULT_EXPR
5451 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5452 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5453 {
5454 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5455 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5456 fold_convert (ctype,
5457 const_binop (TRUNC_DIV_EXPR,
5458 op1, c, 0)));
5459 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5460 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5461 fold_convert (ctype,
5462 const_binop (TRUNC_DIV_EXPR,
5463 c, op1, 0)));
5464 }
5465 break;
5466
5467 default:
5468 break;
5469 }
5470
5471 return 0;
5472 }
5473 \f
5474 /* Return a node which has the indicated constant VALUE (either 0 or
5475 1), and is of the indicated TYPE. */
5476
5477 tree
5478 constant_boolean_node (int value, tree type)
5479 {
5480 if (type == integer_type_node)
5481 return value ? integer_one_node : integer_zero_node;
5482 else if (type == boolean_type_node)
5483 return value ? boolean_true_node : boolean_false_node;
5484 else
5485 return build_int_cst (type, value);
5486 }
5487
5488
5489 /* Return true if expr looks like an ARRAY_REF and set base and
5490 offset to the appropriate trees. If there is no offset,
5491 offset is set to NULL_TREE. Base will be canonicalized to
5492 something you can get the element type from using
5493 TREE_TYPE (TREE_TYPE (base)). */
5494
5495 static bool
5496 extract_array_ref (tree expr, tree *base, tree *offset)
5497 {
5498 /* One canonical form is a PLUS_EXPR with the first
5499 argument being an ADDR_EXPR with a possible NOP_EXPR
5500 attached. */
5501 if (TREE_CODE (expr) == PLUS_EXPR)
5502 {
5503 tree op0 = TREE_OPERAND (expr, 0);
5504 tree inner_base, dummy1;
5505 /* Strip NOP_EXPRs here because the C frontends and/or
5506 folders present us (int *)&x.a + 4B possibly. */
5507 STRIP_NOPS (op0);
5508 if (extract_array_ref (op0, &inner_base, &dummy1))
5509 {
5510 *base = inner_base;
5511 if (dummy1 == NULL_TREE)
5512 *offset = TREE_OPERAND (expr, 1);
5513 else
5514 *offset = fold_build2 (PLUS_EXPR, TREE_TYPE (expr),
5515 dummy1, TREE_OPERAND (expr, 1));
5516 return true;
5517 }
5518 }
5519 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5520 which we transform into an ADDR_EXPR with appropriate
5521 offset. For other arguments to the ADDR_EXPR we assume
5522 zero offset and as such do not care about the ADDR_EXPR
5523 type and strip possible nops from it. */
5524 else if (TREE_CODE (expr) == ADDR_EXPR)
5525 {
5526 tree op0 = TREE_OPERAND (expr, 0);
5527 if (TREE_CODE (op0) == ARRAY_REF)
5528 {
5529 *base = TREE_OPERAND (op0, 0);
5530 *offset = TREE_OPERAND (op0, 1);
5531 }
5532 else
5533 {
5534 /* Handle array-to-pointer decay as &a. */
5535 if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
5536 *base = TREE_OPERAND (expr, 0);
5537 else
5538 *base = expr;
5539 *offset = NULL_TREE;
5540 }
5541 return true;
5542 }
5543 /* The next canonical form is a VAR_DECL with POINTER_TYPE. */
5544 else if (SSA_VAR_P (expr)
5545 && TREE_CODE (TREE_TYPE (expr)) == POINTER_TYPE)
5546 {
5547 *base = expr;
5548 *offset = NULL_TREE;
5549 return true;
5550 }
5551
5552 return false;
5553 }
5554
5555
5556 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5557 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5558 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5559 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5560 COND is the first argument to CODE; otherwise (as in the example
5561 given here), it is the second argument. TYPE is the type of the
5562 original expression. Return NULL_TREE if no simplification is
5563 possible. */
5564
5565 static tree
5566 fold_binary_op_with_conditional_arg (enum tree_code code,
5567 tree type, tree op0, tree op1,
5568 tree cond, tree arg, int cond_first_p)
5569 {
5570 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5571 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5572 tree test, true_value, false_value;
5573 tree lhs = NULL_TREE;
5574 tree rhs = NULL_TREE;
5575
5576 /* This transformation is only worthwhile if we don't have to wrap
5577 arg in a SAVE_EXPR, and the operation can be simplified on at least
5578 one of the branches once its pushed inside the COND_EXPR. */
5579 if (!TREE_CONSTANT (arg))
5580 return NULL_TREE;
5581
5582 if (TREE_CODE (cond) == COND_EXPR)
5583 {
5584 test = TREE_OPERAND (cond, 0);
5585 true_value = TREE_OPERAND (cond, 1);
5586 false_value = TREE_OPERAND (cond, 2);
5587 /* If this operand throws an expression, then it does not make
5588 sense to try to perform a logical or arithmetic operation
5589 involving it. */
5590 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5591 lhs = true_value;
5592 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5593 rhs = false_value;
5594 }
5595 else
5596 {
5597 tree testtype = TREE_TYPE (cond);
5598 test = cond;
5599 true_value = constant_boolean_node (true, testtype);
5600 false_value = constant_boolean_node (false, testtype);
5601 }
5602
5603 arg = fold_convert (arg_type, arg);
5604 if (lhs == 0)
5605 {
5606 true_value = fold_convert (cond_type, true_value);
5607 if (cond_first_p)
5608 lhs = fold_build2 (code, type, true_value, arg);
5609 else
5610 lhs = fold_build2 (code, type, arg, true_value);
5611 }
5612 if (rhs == 0)
5613 {
5614 false_value = fold_convert (cond_type, false_value);
5615 if (cond_first_p)
5616 rhs = fold_build2 (code, type, false_value, arg);
5617 else
5618 rhs = fold_build2 (code, type, arg, false_value);
5619 }
5620
5621 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
5622 return fold_convert (type, test);
5623 }
5624
5625 \f
5626 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5627
5628 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5629 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5630 ADDEND is the same as X.
5631
5632 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5633 and finite. The problematic cases are when X is zero, and its mode
5634 has signed zeros. In the case of rounding towards -infinity,
5635 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5636 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5637
5638 static bool
5639 fold_real_zero_addition_p (tree type, tree addend, int negate)
5640 {
5641 if (!real_zerop (addend))
5642 return false;
5643
5644 /* Don't allow the fold with -fsignaling-nans. */
5645 if (HONOR_SNANS (TYPE_MODE (type)))
5646 return false;
5647
5648 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5649 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
5650 return true;
5651
5652 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5653 if (TREE_CODE (addend) == REAL_CST
5654 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
5655 negate = !negate;
5656
5657 /* The mode has signed zeros, and we have to honor their sign.
5658 In this situation, there is only one case we can return true for.
5659 X - 0 is the same as X unless rounding towards -infinity is
5660 supported. */
5661 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
5662 }
5663
5664 /* Subroutine of fold() that checks comparisons of built-in math
5665 functions against real constants.
5666
5667 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5668 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5669 is the type of the result and ARG0 and ARG1 are the operands of the
5670 comparison. ARG1 must be a TREE_REAL_CST.
5671
5672 The function returns the constant folded tree if a simplification
5673 can be made, and NULL_TREE otherwise. */
5674
5675 static tree
5676 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5677 tree type, tree arg0, tree arg1)
5678 {
5679 REAL_VALUE_TYPE c;
5680
5681 if (BUILTIN_SQRT_P (fcode))
5682 {
5683 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5684 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5685
5686 c = TREE_REAL_CST (arg1);
5687 if (REAL_VALUE_NEGATIVE (c))
5688 {
5689 /* sqrt(x) < y is always false, if y is negative. */
5690 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5691 return omit_one_operand (type, integer_zero_node, arg);
5692
5693 /* sqrt(x) > y is always true, if y is negative and we
5694 don't care about NaNs, i.e. negative values of x. */
5695 if (code == NE_EXPR || !HONOR_NANS (mode))
5696 return omit_one_operand (type, integer_one_node, arg);
5697
5698 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5699 return fold_build2 (GE_EXPR, type, arg,
5700 build_real (TREE_TYPE (arg), dconst0));
5701 }
5702 else if (code == GT_EXPR || code == GE_EXPR)
5703 {
5704 REAL_VALUE_TYPE c2;
5705
5706 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5707 real_convert (&c2, mode, &c2);
5708
5709 if (REAL_VALUE_ISINF (c2))
5710 {
5711 /* sqrt(x) > y is x == +Inf, when y is very large. */
5712 if (HONOR_INFINITIES (mode))
5713 return fold_build2 (EQ_EXPR, type, arg,
5714 build_real (TREE_TYPE (arg), c2));
5715
5716 /* sqrt(x) > y is always false, when y is very large
5717 and we don't care about infinities. */
5718 return omit_one_operand (type, integer_zero_node, arg);
5719 }
5720
5721 /* sqrt(x) > c is the same as x > c*c. */
5722 return fold_build2 (code, type, arg,
5723 build_real (TREE_TYPE (arg), c2));
5724 }
5725 else if (code == LT_EXPR || code == LE_EXPR)
5726 {
5727 REAL_VALUE_TYPE c2;
5728
5729 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5730 real_convert (&c2, mode, &c2);
5731
5732 if (REAL_VALUE_ISINF (c2))
5733 {
5734 /* sqrt(x) < y is always true, when y is a very large
5735 value and we don't care about NaNs or Infinities. */
5736 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5737 return omit_one_operand (type, integer_one_node, arg);
5738
5739 /* sqrt(x) < y is x != +Inf when y is very large and we
5740 don't care about NaNs. */
5741 if (! HONOR_NANS (mode))
5742 return fold_build2 (NE_EXPR, type, arg,
5743 build_real (TREE_TYPE (arg), c2));
5744
5745 /* sqrt(x) < y is x >= 0 when y is very large and we
5746 don't care about Infinities. */
5747 if (! HONOR_INFINITIES (mode))
5748 return fold_build2 (GE_EXPR, type, arg,
5749 build_real (TREE_TYPE (arg), dconst0));
5750
5751 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5752 if (lang_hooks.decls.global_bindings_p () != 0
5753 || CONTAINS_PLACEHOLDER_P (arg))
5754 return NULL_TREE;
5755
5756 arg = save_expr (arg);
5757 return fold_build2 (TRUTH_ANDIF_EXPR, type,
5758 fold_build2 (GE_EXPR, type, arg,
5759 build_real (TREE_TYPE (arg),
5760 dconst0)),
5761 fold_build2 (NE_EXPR, type, arg,
5762 build_real (TREE_TYPE (arg),
5763 c2)));
5764 }
5765
5766 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5767 if (! HONOR_NANS (mode))
5768 return fold_build2 (code, type, arg,
5769 build_real (TREE_TYPE (arg), c2));
5770
5771 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5772 if (lang_hooks.decls.global_bindings_p () == 0
5773 && ! CONTAINS_PLACEHOLDER_P (arg))
5774 {
5775 arg = save_expr (arg);
5776 return fold_build2 (TRUTH_ANDIF_EXPR, type,
5777 fold_build2 (GE_EXPR, type, arg,
5778 build_real (TREE_TYPE (arg),
5779 dconst0)),
5780 fold_build2 (code, type, arg,
5781 build_real (TREE_TYPE (arg),
5782 c2)));
5783 }
5784 }
5785 }
5786
5787 return NULL_TREE;
5788 }
5789
5790 /* Subroutine of fold() that optimizes comparisons against Infinities,
5791 either +Inf or -Inf.
5792
5793 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5794 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5795 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5796
5797 The function returns the constant folded tree if a simplification
5798 can be made, and NULL_TREE otherwise. */
5799
5800 static tree
5801 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5802 {
5803 enum machine_mode mode;
5804 REAL_VALUE_TYPE max;
5805 tree temp;
5806 bool neg;
5807
5808 mode = TYPE_MODE (TREE_TYPE (arg0));
5809
5810 /* For negative infinity swap the sense of the comparison. */
5811 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
5812 if (neg)
5813 code = swap_tree_comparison (code);
5814
5815 switch (code)
5816 {
5817 case GT_EXPR:
5818 /* x > +Inf is always false, if with ignore sNANs. */
5819 if (HONOR_SNANS (mode))
5820 return NULL_TREE;
5821 return omit_one_operand (type, integer_zero_node, arg0);
5822
5823 case LE_EXPR:
5824 /* x <= +Inf is always true, if we don't case about NaNs. */
5825 if (! HONOR_NANS (mode))
5826 return omit_one_operand (type, integer_one_node, arg0);
5827
5828 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5829 if (lang_hooks.decls.global_bindings_p () == 0
5830 && ! CONTAINS_PLACEHOLDER_P (arg0))
5831 {
5832 arg0 = save_expr (arg0);
5833 return fold_build2 (EQ_EXPR, type, arg0, arg0);
5834 }
5835 break;
5836
5837 case EQ_EXPR:
5838 case GE_EXPR:
5839 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
5840 real_maxval (&max, neg, mode);
5841 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
5842 arg0, build_real (TREE_TYPE (arg0), max));
5843
5844 case LT_EXPR:
5845 /* x < +Inf is always equal to x <= DBL_MAX. */
5846 real_maxval (&max, neg, mode);
5847 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
5848 arg0, build_real (TREE_TYPE (arg0), max));
5849
5850 case NE_EXPR:
5851 /* x != +Inf is always equal to !(x > DBL_MAX). */
5852 real_maxval (&max, neg, mode);
5853 if (! HONOR_NANS (mode))
5854 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
5855 arg0, build_real (TREE_TYPE (arg0), max));
5856
5857 /* The transformation below creates non-gimple code and thus is
5858 not appropriate if we are in gimple form. */
5859 if (in_gimple_form)
5860 return NULL_TREE;
5861
5862 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
5863 arg0, build_real (TREE_TYPE (arg0), max));
5864 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
5865
5866 default:
5867 break;
5868 }
5869
5870 return NULL_TREE;
5871 }
5872
5873 /* Subroutine of fold() that optimizes comparisons of a division by
5874 a nonzero integer constant against an integer constant, i.e.
5875 X/C1 op C2.
5876
5877 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5878 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5879 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5880
5881 The function returns the constant folded tree if a simplification
5882 can be made, and NULL_TREE otherwise. */
5883
5884 static tree
5885 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5886 {
5887 tree prod, tmp, hi, lo;
5888 tree arg00 = TREE_OPERAND (arg0, 0);
5889 tree arg01 = TREE_OPERAND (arg0, 1);
5890 unsigned HOST_WIDE_INT lpart;
5891 HOST_WIDE_INT hpart;
5892 int overflow;
5893
5894 /* We have to do this the hard way to detect unsigned overflow.
5895 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
5896 overflow = mul_double (TREE_INT_CST_LOW (arg01),
5897 TREE_INT_CST_HIGH (arg01),
5898 TREE_INT_CST_LOW (arg1),
5899 TREE_INT_CST_HIGH (arg1), &lpart, &hpart);
5900 prod = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
5901 prod = force_fit_type (prod, -1, overflow, false);
5902
5903 if (TYPE_UNSIGNED (TREE_TYPE (arg0)))
5904 {
5905 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5906 lo = prod;
5907
5908 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
5909 overflow = add_double (TREE_INT_CST_LOW (prod),
5910 TREE_INT_CST_HIGH (prod),
5911 TREE_INT_CST_LOW (tmp),
5912 TREE_INT_CST_HIGH (tmp),
5913 &lpart, &hpart);
5914 hi = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
5915 hi = force_fit_type (hi, -1, overflow | TREE_OVERFLOW (prod),
5916 TREE_CONSTANT_OVERFLOW (prod));
5917 }
5918 else if (tree_int_cst_sgn (arg01) >= 0)
5919 {
5920 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5921 switch (tree_int_cst_sgn (arg1))
5922 {
5923 case -1:
5924 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5925 hi = prod;
5926 break;
5927
5928 case 0:
5929 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
5930 hi = tmp;
5931 break;
5932
5933 case 1:
5934 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
5935 lo = prod;
5936 break;
5937
5938 default:
5939 gcc_unreachable ();
5940 }
5941 }
5942 else
5943 {
5944 /* A negative divisor reverses the relational operators. */
5945 code = swap_tree_comparison (code);
5946
5947 tmp = int_const_binop (PLUS_EXPR, arg01, integer_one_node, 0);
5948 switch (tree_int_cst_sgn (arg1))
5949 {
5950 case -1:
5951 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5952 lo = prod;
5953 break;
5954
5955 case 0:
5956 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
5957 lo = tmp;
5958 break;
5959
5960 case 1:
5961 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
5962 hi = prod;
5963 break;
5964
5965 default:
5966 gcc_unreachable ();
5967 }
5968 }
5969
5970 switch (code)
5971 {
5972 case EQ_EXPR:
5973 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
5974 return omit_one_operand (type, integer_zero_node, arg00);
5975 if (TREE_OVERFLOW (hi))
5976 return fold_build2 (GE_EXPR, type, arg00, lo);
5977 if (TREE_OVERFLOW (lo))
5978 return fold_build2 (LE_EXPR, type, arg00, hi);
5979 return build_range_check (type, arg00, 1, lo, hi);
5980
5981 case NE_EXPR:
5982 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
5983 return omit_one_operand (type, integer_one_node, arg00);
5984 if (TREE_OVERFLOW (hi))
5985 return fold_build2 (LT_EXPR, type, arg00, lo);
5986 if (TREE_OVERFLOW (lo))
5987 return fold_build2 (GT_EXPR, type, arg00, hi);
5988 return build_range_check (type, arg00, 0, lo, hi);
5989
5990 case LT_EXPR:
5991 if (TREE_OVERFLOW (lo))
5992 return omit_one_operand (type, integer_zero_node, arg00);
5993 return fold_build2 (LT_EXPR, type, arg00, lo);
5994
5995 case LE_EXPR:
5996 if (TREE_OVERFLOW (hi))
5997 return omit_one_operand (type, integer_one_node, arg00);
5998 return fold_build2 (LE_EXPR, type, arg00, hi);
5999
6000 case GT_EXPR:
6001 if (TREE_OVERFLOW (hi))
6002 return omit_one_operand (type, integer_zero_node, arg00);
6003 return fold_build2 (GT_EXPR, type, arg00, hi);
6004
6005 case GE_EXPR:
6006 if (TREE_OVERFLOW (lo))
6007 return omit_one_operand (type, integer_one_node, arg00);
6008 return fold_build2 (GE_EXPR, type, arg00, lo);
6009
6010 default:
6011 break;
6012 }
6013
6014 return NULL_TREE;
6015 }
6016
6017
6018 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6019 equality/inequality test, then return a simplified form of the test
6020 using a sign testing. Otherwise return NULL. TYPE is the desired
6021 result type. */
6022
6023 static tree
6024 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6025 tree result_type)
6026 {
6027 /* If this is testing a single bit, we can optimize the test. */
6028 if ((code == NE_EXPR || code == EQ_EXPR)
6029 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6030 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6031 {
6032 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6033 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6034 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6035
6036 if (arg00 != NULL_TREE
6037 /* This is only a win if casting to a signed type is cheap,
6038 i.e. when arg00's type is not a partial mode. */
6039 && TYPE_PRECISION (TREE_TYPE (arg00))
6040 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6041 {
6042 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
6043 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6044 result_type, fold_convert (stype, arg00),
6045 fold_convert (stype, integer_zero_node));
6046 }
6047 }
6048
6049 return NULL_TREE;
6050 }
6051
6052 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6053 equality/inequality test, then return a simplified form of
6054 the test using shifts and logical operations. Otherwise return
6055 NULL. TYPE is the desired result type. */
6056
6057 tree
6058 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6059 tree result_type)
6060 {
6061 /* If this is testing a single bit, we can optimize the test. */
6062 if ((code == NE_EXPR || code == EQ_EXPR)
6063 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6064 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6065 {
6066 tree inner = TREE_OPERAND (arg0, 0);
6067 tree type = TREE_TYPE (arg0);
6068 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6069 enum machine_mode operand_mode = TYPE_MODE (type);
6070 int ops_unsigned;
6071 tree signed_type, unsigned_type, intermediate_type;
6072 tree tem;
6073
6074 /* First, see if we can fold the single bit test into a sign-bit
6075 test. */
6076 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6077 result_type);
6078 if (tem)
6079 return tem;
6080
6081 /* Otherwise we have (A & C) != 0 where C is a single bit,
6082 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6083 Similarly for (A & C) == 0. */
6084
6085 /* If INNER is a right shift of a constant and it plus BITNUM does
6086 not overflow, adjust BITNUM and INNER. */
6087 if (TREE_CODE (inner) == RSHIFT_EXPR
6088 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6089 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6090 && bitnum < TYPE_PRECISION (type)
6091 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6092 bitnum - TYPE_PRECISION (type)))
6093 {
6094 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6095 inner = TREE_OPERAND (inner, 0);
6096 }
6097
6098 /* If we are going to be able to omit the AND below, we must do our
6099 operations as unsigned. If we must use the AND, we have a choice.
6100 Normally unsigned is faster, but for some machines signed is. */
6101 #ifdef LOAD_EXTEND_OP
6102 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6103 && !flag_syntax_only) ? 0 : 1;
6104 #else
6105 ops_unsigned = 1;
6106 #endif
6107
6108 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6109 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6110 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6111 inner = fold_convert (intermediate_type, inner);
6112
6113 if (bitnum != 0)
6114 inner = build2 (RSHIFT_EXPR, intermediate_type,
6115 inner, size_int (bitnum));
6116
6117 if (code == EQ_EXPR)
6118 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type,
6119 inner, integer_one_node);
6120
6121 /* Put the AND last so it can combine with more things. */
6122 inner = build2 (BIT_AND_EXPR, intermediate_type,
6123 inner, integer_one_node);
6124
6125 /* Make sure to return the proper type. */
6126 inner = fold_convert (result_type, inner);
6127
6128 return inner;
6129 }
6130 return NULL_TREE;
6131 }
6132
6133 /* Check whether we are allowed to reorder operands arg0 and arg1,
6134 such that the evaluation of arg1 occurs before arg0. */
6135
6136 static bool
6137 reorder_operands_p (tree arg0, tree arg1)
6138 {
6139 if (! flag_evaluation_order)
6140 return true;
6141 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6142 return true;
6143 return ! TREE_SIDE_EFFECTS (arg0)
6144 && ! TREE_SIDE_EFFECTS (arg1);
6145 }
6146
6147 /* Test whether it is preferable two swap two operands, ARG0 and
6148 ARG1, for example because ARG0 is an integer constant and ARG1
6149 isn't. If REORDER is true, only recommend swapping if we can
6150 evaluate the operands in reverse order. */
6151
6152 bool
6153 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
6154 {
6155 STRIP_SIGN_NOPS (arg0);
6156 STRIP_SIGN_NOPS (arg1);
6157
6158 if (TREE_CODE (arg1) == INTEGER_CST)
6159 return 0;
6160 if (TREE_CODE (arg0) == INTEGER_CST)
6161 return 1;
6162
6163 if (TREE_CODE (arg1) == REAL_CST)
6164 return 0;
6165 if (TREE_CODE (arg0) == REAL_CST)
6166 return 1;
6167
6168 if (TREE_CODE (arg1) == COMPLEX_CST)
6169 return 0;
6170 if (TREE_CODE (arg0) == COMPLEX_CST)
6171 return 1;
6172
6173 if (TREE_CONSTANT (arg1))
6174 return 0;
6175 if (TREE_CONSTANT (arg0))
6176 return 1;
6177
6178 if (optimize_size)
6179 return 0;
6180
6181 if (reorder && flag_evaluation_order
6182 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6183 return 0;
6184
6185 if (DECL_P (arg1))
6186 return 0;
6187 if (DECL_P (arg0))
6188 return 1;
6189
6190 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6191 for commutative and comparison operators. Ensuring a canonical
6192 form allows the optimizers to find additional redundancies without
6193 having to explicitly check for both orderings. */
6194 if (TREE_CODE (arg0) == SSA_NAME
6195 && TREE_CODE (arg1) == SSA_NAME
6196 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6197 return 1;
6198
6199 return 0;
6200 }
6201
6202 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6203 ARG0 is extended to a wider type. */
6204
6205 static tree
6206 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6207 {
6208 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6209 tree arg1_unw;
6210 tree shorter_type, outer_type;
6211 tree min, max;
6212 bool above, below;
6213
6214 if (arg0_unw == arg0)
6215 return NULL_TREE;
6216 shorter_type = TREE_TYPE (arg0_unw);
6217
6218 #ifdef HAVE_canonicalize_funcptr_for_compare
6219 /* Disable this optimization if we're casting a function pointer
6220 type on targets that require function pointer canonicalization. */
6221 if (HAVE_canonicalize_funcptr_for_compare
6222 && TREE_CODE (shorter_type) == POINTER_TYPE
6223 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6224 return NULL_TREE;
6225 #endif
6226
6227 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6228 return NULL_TREE;
6229
6230 arg1_unw = get_unwidened (arg1, shorter_type);
6231 if (!arg1_unw)
6232 return NULL_TREE;
6233
6234 /* If possible, express the comparison in the shorter mode. */
6235 if ((code == EQ_EXPR || code == NE_EXPR
6236 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6237 && (TREE_TYPE (arg1_unw) == shorter_type
6238 || (TREE_CODE (arg1_unw) == INTEGER_CST
6239 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6240 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6241 && int_fits_type_p (arg1_unw, shorter_type))))
6242 return fold_build2 (code, type, arg0_unw,
6243 fold_convert (shorter_type, arg1_unw));
6244
6245 if (TREE_CODE (arg1_unw) != INTEGER_CST)
6246 return NULL_TREE;
6247
6248 /* If we are comparing with the integer that does not fit into the range
6249 of the shorter type, the result is known. */
6250 outer_type = TREE_TYPE (arg1_unw);
6251 min = lower_bound_in_type (outer_type, shorter_type);
6252 max = upper_bound_in_type (outer_type, shorter_type);
6253
6254 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6255 max, arg1_unw));
6256 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6257 arg1_unw, min));
6258
6259 switch (code)
6260 {
6261 case EQ_EXPR:
6262 if (above || below)
6263 return omit_one_operand (type, integer_zero_node, arg0);
6264 break;
6265
6266 case NE_EXPR:
6267 if (above || below)
6268 return omit_one_operand (type, integer_one_node, arg0);
6269 break;
6270
6271 case LT_EXPR:
6272 case LE_EXPR:
6273 if (above)
6274 return omit_one_operand (type, integer_one_node, arg0);
6275 else if (below)
6276 return omit_one_operand (type, integer_zero_node, arg0);
6277
6278 case GT_EXPR:
6279 case GE_EXPR:
6280 if (above)
6281 return omit_one_operand (type, integer_zero_node, arg0);
6282 else if (below)
6283 return omit_one_operand (type, integer_one_node, arg0);
6284
6285 default:
6286 break;
6287 }
6288
6289 return NULL_TREE;
6290 }
6291
6292 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6293 ARG0 just the signedness is changed. */
6294
6295 static tree
6296 fold_sign_changed_comparison (enum tree_code code, tree type,
6297 tree arg0, tree arg1)
6298 {
6299 tree arg0_inner, tmp;
6300 tree inner_type, outer_type;
6301
6302 if (TREE_CODE (arg0) != NOP_EXPR
6303 && TREE_CODE (arg0) != CONVERT_EXPR)
6304 return NULL_TREE;
6305
6306 outer_type = TREE_TYPE (arg0);
6307 arg0_inner = TREE_OPERAND (arg0, 0);
6308 inner_type = TREE_TYPE (arg0_inner);
6309
6310 #ifdef HAVE_canonicalize_funcptr_for_compare
6311 /* Disable this optimization if we're casting a function pointer
6312 type on targets that require function pointer canonicalization. */
6313 if (HAVE_canonicalize_funcptr_for_compare
6314 && TREE_CODE (inner_type) == POINTER_TYPE
6315 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6316 return NULL_TREE;
6317 #endif
6318
6319 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6320 return NULL_TREE;
6321
6322 if (TREE_CODE (arg1) != INTEGER_CST
6323 && !((TREE_CODE (arg1) == NOP_EXPR
6324 || TREE_CODE (arg1) == CONVERT_EXPR)
6325 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6326 return NULL_TREE;
6327
6328 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6329 && code != NE_EXPR
6330 && code != EQ_EXPR)
6331 return NULL_TREE;
6332
6333 if (TREE_CODE (arg1) == INTEGER_CST)
6334 {
6335 tmp = build_int_cst_wide (inner_type,
6336 TREE_INT_CST_LOW (arg1),
6337 TREE_INT_CST_HIGH (arg1));
6338 arg1 = force_fit_type (tmp, 0,
6339 TREE_OVERFLOW (arg1),
6340 TREE_CONSTANT_OVERFLOW (arg1));
6341 }
6342 else
6343 arg1 = fold_convert (inner_type, arg1);
6344
6345 return fold_build2 (code, type, arg0_inner, arg1);
6346 }
6347
6348 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6349 step of the array. Reconstructs s and delta in the case of s * delta
6350 being an integer constant (and thus already folded).
6351 ADDR is the address. MULT is the multiplicative expression.
6352 If the function succeeds, the new address expression is returned. Otherwise
6353 NULL_TREE is returned. */
6354
6355 static tree
6356 try_move_mult_to_index (enum tree_code code, tree addr, tree op1)
6357 {
6358 tree s, delta, step;
6359 tree ref = TREE_OPERAND (addr, 0), pref;
6360 tree ret, pos;
6361 tree itype;
6362
6363 /* Canonicalize op1 into a possibly non-constant delta
6364 and an INTEGER_CST s. */
6365 if (TREE_CODE (op1) == MULT_EXPR)
6366 {
6367 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6368
6369 STRIP_NOPS (arg0);
6370 STRIP_NOPS (arg1);
6371
6372 if (TREE_CODE (arg0) == INTEGER_CST)
6373 {
6374 s = arg0;
6375 delta = arg1;
6376 }
6377 else if (TREE_CODE (arg1) == INTEGER_CST)
6378 {
6379 s = arg1;
6380 delta = arg0;
6381 }
6382 else
6383 return NULL_TREE;
6384 }
6385 else if (TREE_CODE (op1) == INTEGER_CST)
6386 {
6387 delta = op1;
6388 s = NULL_TREE;
6389 }
6390 else
6391 {
6392 /* Simulate we are delta * 1. */
6393 delta = op1;
6394 s = integer_one_node;
6395 }
6396
6397 for (;; ref = TREE_OPERAND (ref, 0))
6398 {
6399 if (TREE_CODE (ref) == ARRAY_REF)
6400 {
6401 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6402 if (! itype)
6403 continue;
6404
6405 step = array_ref_element_size (ref);
6406 if (TREE_CODE (step) != INTEGER_CST)
6407 continue;
6408
6409 if (s)
6410 {
6411 if (! tree_int_cst_equal (step, s))
6412 continue;
6413 }
6414 else
6415 {
6416 /* Try if delta is a multiple of step. */
6417 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step);
6418 if (! tmp)
6419 continue;
6420 delta = tmp;
6421 }
6422
6423 break;
6424 }
6425
6426 if (!handled_component_p (ref))
6427 return NULL_TREE;
6428 }
6429
6430 /* We found the suitable array reference. So copy everything up to it,
6431 and replace the index. */
6432
6433 pref = TREE_OPERAND (addr, 0);
6434 ret = copy_node (pref);
6435 pos = ret;
6436
6437 while (pref != ref)
6438 {
6439 pref = TREE_OPERAND (pref, 0);
6440 TREE_OPERAND (pos, 0) = copy_node (pref);
6441 pos = TREE_OPERAND (pos, 0);
6442 }
6443
6444 TREE_OPERAND (pos, 1) = fold_build2 (code, itype,
6445 fold_convert (itype,
6446 TREE_OPERAND (pos, 1)),
6447 fold_convert (itype, delta));
6448
6449 return build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6450 }
6451
6452
6453 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6454 means A >= Y && A != MAX, but in this case we know that
6455 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6456
6457 static tree
6458 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6459 {
6460 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6461
6462 if (TREE_CODE (bound) == LT_EXPR)
6463 a = TREE_OPERAND (bound, 0);
6464 else if (TREE_CODE (bound) == GT_EXPR)
6465 a = TREE_OPERAND (bound, 1);
6466 else
6467 return NULL_TREE;
6468
6469 typea = TREE_TYPE (a);
6470 if (!INTEGRAL_TYPE_P (typea)
6471 && !POINTER_TYPE_P (typea))
6472 return NULL_TREE;
6473
6474 if (TREE_CODE (ineq) == LT_EXPR)
6475 {
6476 a1 = TREE_OPERAND (ineq, 1);
6477 y = TREE_OPERAND (ineq, 0);
6478 }
6479 else if (TREE_CODE (ineq) == GT_EXPR)
6480 {
6481 a1 = TREE_OPERAND (ineq, 0);
6482 y = TREE_OPERAND (ineq, 1);
6483 }
6484 else
6485 return NULL_TREE;
6486
6487 if (TREE_TYPE (a1) != typea)
6488 return NULL_TREE;
6489
6490 diff = fold_build2 (MINUS_EXPR, typea, a1, a);
6491 if (!integer_onep (diff))
6492 return NULL_TREE;
6493
6494 return fold_build2 (GE_EXPR, type, a, y);
6495 }
6496
6497 /* Fold complex addition when both components are accessible by parts.
6498 Return non-null if successful. CODE should be PLUS_EXPR for addition,
6499 or MINUS_EXPR for subtraction. */
6500
6501 static tree
6502 fold_complex_add (tree type, tree ac, tree bc, enum tree_code code)
6503 {
6504 tree ar, ai, br, bi, rr, ri, inner_type;
6505
6506 if (TREE_CODE (ac) == COMPLEX_EXPR)
6507 ar = TREE_OPERAND (ac, 0), ai = TREE_OPERAND (ac, 1);
6508 else if (TREE_CODE (ac) == COMPLEX_CST)
6509 ar = TREE_REALPART (ac), ai = TREE_IMAGPART (ac);
6510 else
6511 return NULL;
6512
6513 if (TREE_CODE (bc) == COMPLEX_EXPR)
6514 br = TREE_OPERAND (bc, 0), bi = TREE_OPERAND (bc, 1);
6515 else if (TREE_CODE (bc) == COMPLEX_CST)
6516 br = TREE_REALPART (bc), bi = TREE_IMAGPART (bc);
6517 else
6518 return NULL;
6519
6520 inner_type = TREE_TYPE (type);
6521
6522 rr = fold_build2 (code, inner_type, ar, br);
6523 ri = fold_build2 (code, inner_type, ai, bi);
6524
6525 return fold_build2 (COMPLEX_EXPR, type, rr, ri);
6526 }
6527
6528 /* Perform some simplifications of complex multiplication when one or more
6529 of the components are constants or zeros. Return non-null if successful. */
6530
6531 tree
6532 fold_complex_mult_parts (tree type, tree ar, tree ai, tree br, tree bi)
6533 {
6534 tree rr, ri, inner_type, zero;
6535 bool ar0, ai0, br0, bi0, bi1;
6536
6537 inner_type = TREE_TYPE (type);
6538 zero = NULL;
6539
6540 if (SCALAR_FLOAT_TYPE_P (inner_type))
6541 {
6542 ar0 = ai0 = br0 = bi0 = bi1 = false;
6543
6544 /* We're only interested in +0.0 here, thus we don't use real_zerop. */
6545
6546 if (TREE_CODE (ar) == REAL_CST
6547 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (ar), dconst0))
6548 ar0 = true, zero = ar;
6549
6550 if (TREE_CODE (ai) == REAL_CST
6551 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (ai), dconst0))
6552 ai0 = true, zero = ai;
6553
6554 if (TREE_CODE (br) == REAL_CST
6555 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (br), dconst0))
6556 br0 = true, zero = br;
6557
6558 if (TREE_CODE (bi) == REAL_CST)
6559 {
6560 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (bi), dconst0))
6561 bi0 = true, zero = bi;
6562 else if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (bi), dconst1))
6563 bi1 = true;
6564 }
6565 }
6566 else
6567 {
6568 ar0 = integer_zerop (ar);
6569 if (ar0)
6570 zero = ar;
6571 ai0 = integer_zerop (ai);
6572 if (ai0)
6573 zero = ai;
6574 br0 = integer_zerop (br);
6575 if (br0)
6576 zero = br;
6577 bi0 = integer_zerop (bi);
6578 if (bi0)
6579 {
6580 zero = bi;
6581 bi1 = false;
6582 }
6583 else
6584 bi1 = integer_onep (bi);
6585 }
6586
6587 /* We won't optimize anything below unless something is zero. */
6588 if (zero == NULL)
6589 return NULL;
6590
6591 if (ai0 && br0 && bi1)
6592 {
6593 rr = zero;
6594 ri = ar;
6595 }
6596 else if (ai0 && bi0)
6597 {
6598 rr = fold_build2 (MULT_EXPR, inner_type, ar, br);
6599 ri = zero;
6600 }
6601 else if (ai0 && br0)
6602 {
6603 rr = zero;
6604 ri = fold_build2 (MULT_EXPR, inner_type, ar, bi);
6605 }
6606 else if (ar0 && bi0)
6607 {
6608 rr = zero;
6609 ri = fold_build2 (MULT_EXPR, inner_type, ai, br);
6610 }
6611 else if (ar0 && br0)
6612 {
6613 rr = fold_build2 (MULT_EXPR, inner_type, ai, bi);
6614 rr = fold_build1 (NEGATE_EXPR, inner_type, rr);
6615 ri = zero;
6616 }
6617 else if (bi0)
6618 {
6619 rr = fold_build2 (MULT_EXPR, inner_type, ar, br);
6620 ri = fold_build2 (MULT_EXPR, inner_type, ai, br);
6621 }
6622 else if (ai0)
6623 {
6624 rr = fold_build2 (MULT_EXPR, inner_type, ar, br);
6625 ri = fold_build2 (MULT_EXPR, inner_type, ar, bi);
6626 }
6627 else if (br0)
6628 {
6629 rr = fold_build2 (MULT_EXPR, inner_type, ai, bi);
6630 rr = fold_build1 (NEGATE_EXPR, inner_type, rr);
6631 ri = fold_build2 (MULT_EXPR, inner_type, ar, bi);
6632 }
6633 else if (ar0)
6634 {
6635 rr = fold_build2 (MULT_EXPR, inner_type, ai, bi);
6636 rr = fold_build1 (NEGATE_EXPR, inner_type, rr);
6637 ri = fold_build2 (MULT_EXPR, inner_type, ai, br);
6638 }
6639 else
6640 return NULL;
6641
6642 return fold_build2 (COMPLEX_EXPR, type, rr, ri);
6643 }
6644
6645 static tree
6646 fold_complex_mult (tree type, tree ac, tree bc)
6647 {
6648 tree ar, ai, br, bi;
6649
6650 if (TREE_CODE (ac) == COMPLEX_EXPR)
6651 ar = TREE_OPERAND (ac, 0), ai = TREE_OPERAND (ac, 1);
6652 else if (TREE_CODE (ac) == COMPLEX_CST)
6653 ar = TREE_REALPART (ac), ai = TREE_IMAGPART (ac);
6654 else
6655 return NULL;
6656
6657 if (TREE_CODE (bc) == COMPLEX_EXPR)
6658 br = TREE_OPERAND (bc, 0), bi = TREE_OPERAND (bc, 1);
6659 else if (TREE_CODE (bc) == COMPLEX_CST)
6660 br = TREE_REALPART (bc), bi = TREE_IMAGPART (bc);
6661 else
6662 return NULL;
6663
6664 return fold_complex_mult_parts (type, ar, ai, br, bi);
6665 }
6666
6667 /* Perform some simplifications of complex division when one or more of
6668 the components are constants or zeros. Return non-null if successful. */
6669
6670 tree
6671 fold_complex_div_parts (tree type, tree ar, tree ai, tree br, tree bi,
6672 enum tree_code code)
6673 {
6674 tree rr, ri, inner_type, zero;
6675 bool ar0, ai0, br0, bi0, bi1;
6676
6677 inner_type = TREE_TYPE (type);
6678 zero = NULL;
6679
6680 if (SCALAR_FLOAT_TYPE_P (inner_type))
6681 {
6682 ar0 = ai0 = br0 = bi0 = bi1 = false;
6683
6684 /* We're only interested in +0.0 here, thus we don't use real_zerop. */
6685
6686 if (TREE_CODE (ar) == REAL_CST
6687 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (ar), dconst0))
6688 ar0 = true, zero = ar;
6689
6690 if (TREE_CODE (ai) == REAL_CST
6691 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (ai), dconst0))
6692 ai0 = true, zero = ai;
6693
6694 if (TREE_CODE (br) == REAL_CST
6695 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (br), dconst0))
6696 br0 = true, zero = br;
6697
6698 if (TREE_CODE (bi) == REAL_CST)
6699 {
6700 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (bi), dconst0))
6701 bi0 = true, zero = bi;
6702 else if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (bi), dconst1))
6703 bi1 = true;
6704 }
6705 }
6706 else
6707 {
6708 ar0 = integer_zerop (ar);
6709 if (ar0)
6710 zero = ar;
6711 ai0 = integer_zerop (ai);
6712 if (ai0)
6713 zero = ai;
6714 br0 = integer_zerop (br);
6715 if (br0)
6716 zero = br;
6717 bi0 = integer_zerop (bi);
6718 if (bi0)
6719 {
6720 zero = bi;
6721 bi1 = false;
6722 }
6723 else
6724 bi1 = integer_onep (bi);
6725 }
6726
6727 /* We won't optimize anything below unless something is zero. */
6728 if (zero == NULL)
6729 return NULL;
6730
6731 if (ai0 && bi0)
6732 {
6733 rr = fold_build2 (code, inner_type, ar, br);
6734 ri = zero;
6735 }
6736 else if (ai0 && br0)
6737 {
6738 rr = zero;
6739 ri = fold_build2 (code, inner_type, ar, bi);
6740 ri = fold_build1 (NEGATE_EXPR, inner_type, ri);
6741 }
6742 else if (ar0 && bi0)
6743 {
6744 rr = zero;
6745 ri = fold_build2 (code, inner_type, ai, br);
6746 }
6747 else if (ar0 && br0)
6748 {
6749 rr = fold_build2 (code, inner_type, ai, bi);
6750 ri = zero;
6751 }
6752 else if (bi0)
6753 {
6754 rr = fold_build2 (code, inner_type, ar, br);
6755 ri = fold_build2 (code, inner_type, ai, br);
6756 }
6757 else if (br0)
6758 {
6759 rr = fold_build2 (code, inner_type, ai, bi);
6760 ri = fold_build2 (code, inner_type, ar, bi);
6761 ri = fold_build1 (NEGATE_EXPR, inner_type, ri);
6762 }
6763 else
6764 return NULL;
6765
6766 return fold_build2 (COMPLEX_EXPR, type, rr, ri);
6767 }
6768
6769 static tree
6770 fold_complex_div (tree type, tree ac, tree bc, enum tree_code code)
6771 {
6772 tree ar, ai, br, bi;
6773
6774 if (TREE_CODE (ac) == COMPLEX_EXPR)
6775 ar = TREE_OPERAND (ac, 0), ai = TREE_OPERAND (ac, 1);
6776 else if (TREE_CODE (ac) == COMPLEX_CST)
6777 ar = TREE_REALPART (ac), ai = TREE_IMAGPART (ac);
6778 else
6779 return NULL;
6780
6781 if (TREE_CODE (bc) == COMPLEX_EXPR)
6782 br = TREE_OPERAND (bc, 0), bi = TREE_OPERAND (bc, 1);
6783 else if (TREE_CODE (bc) == COMPLEX_CST)
6784 br = TREE_REALPART (bc), bi = TREE_IMAGPART (bc);
6785 else
6786 return NULL;
6787
6788 return fold_complex_div_parts (type, ar, ai, br, bi, code);
6789 }
6790
6791 /* Fold a unary expression of code CODE and type TYPE with operand
6792 OP0. Return the folded expression if folding is successful.
6793 Otherwise, return NULL_TREE. */
6794
6795 tree
6796 fold_unary (enum tree_code code, tree type, tree op0)
6797 {
6798 tree tem;
6799 tree arg0;
6800 enum tree_code_class kind = TREE_CODE_CLASS (code);
6801
6802 gcc_assert (IS_EXPR_CODE_CLASS (kind)
6803 && TREE_CODE_LENGTH (code) == 1);
6804
6805 arg0 = op0;
6806 if (arg0)
6807 {
6808 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
6809 {
6810 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
6811 STRIP_SIGN_NOPS (arg0);
6812 }
6813 else
6814 {
6815 /* Strip any conversions that don't change the mode. This
6816 is safe for every expression, except for a comparison
6817 expression because its signedness is derived from its
6818 operands.
6819
6820 Note that this is done as an internal manipulation within
6821 the constant folder, in order to find the simplest
6822 representation of the arguments so that their form can be
6823 studied. In any cases, the appropriate type conversions
6824 should be put back in the tree that will get out of the
6825 constant folder. */
6826 STRIP_NOPS (arg0);
6827 }
6828 }
6829
6830 if (TREE_CODE_CLASS (code) == tcc_unary)
6831 {
6832 if (TREE_CODE (arg0) == COMPOUND_EXPR)
6833 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6834 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
6835 else if (TREE_CODE (arg0) == COND_EXPR)
6836 {
6837 tree arg01 = TREE_OPERAND (arg0, 1);
6838 tree arg02 = TREE_OPERAND (arg0, 2);
6839 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
6840 arg01 = fold_build1 (code, type, arg01);
6841 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
6842 arg02 = fold_build1 (code, type, arg02);
6843 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
6844 arg01, arg02);
6845
6846 /* If this was a conversion, and all we did was to move into
6847 inside the COND_EXPR, bring it back out. But leave it if
6848 it is a conversion from integer to integer and the
6849 result precision is no wider than a word since such a
6850 conversion is cheap and may be optimized away by combine,
6851 while it couldn't if it were outside the COND_EXPR. Then return
6852 so we don't get into an infinite recursion loop taking the
6853 conversion out and then back in. */
6854
6855 if ((code == NOP_EXPR || code == CONVERT_EXPR
6856 || code == NON_LVALUE_EXPR)
6857 && TREE_CODE (tem) == COND_EXPR
6858 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
6859 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
6860 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
6861 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
6862 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
6863 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
6864 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
6865 && (INTEGRAL_TYPE_P
6866 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
6867 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
6868 || flag_syntax_only))
6869 tem = build1 (code, type,
6870 build3 (COND_EXPR,
6871 TREE_TYPE (TREE_OPERAND
6872 (TREE_OPERAND (tem, 1), 0)),
6873 TREE_OPERAND (tem, 0),
6874 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
6875 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
6876 return tem;
6877 }
6878 else if (COMPARISON_CLASS_P (arg0))
6879 {
6880 if (TREE_CODE (type) == BOOLEAN_TYPE)
6881 {
6882 arg0 = copy_node (arg0);
6883 TREE_TYPE (arg0) = type;
6884 return arg0;
6885 }
6886 else if (TREE_CODE (type) != INTEGER_TYPE)
6887 return fold_build3 (COND_EXPR, type, arg0,
6888 fold_build1 (code, type,
6889 integer_one_node),
6890 fold_build1 (code, type,
6891 integer_zero_node));
6892 }
6893 }
6894
6895 switch (code)
6896 {
6897 case NOP_EXPR:
6898 case FLOAT_EXPR:
6899 case CONVERT_EXPR:
6900 case FIX_TRUNC_EXPR:
6901 case FIX_CEIL_EXPR:
6902 case FIX_FLOOR_EXPR:
6903 case FIX_ROUND_EXPR:
6904 if (TREE_TYPE (op0) == type)
6905 return op0;
6906
6907 /* Handle cases of two conversions in a row. */
6908 if (TREE_CODE (op0) == NOP_EXPR
6909 || TREE_CODE (op0) == CONVERT_EXPR)
6910 {
6911 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
6912 tree inter_type = TREE_TYPE (op0);
6913 int inside_int = INTEGRAL_TYPE_P (inside_type);
6914 int inside_ptr = POINTER_TYPE_P (inside_type);
6915 int inside_float = FLOAT_TYPE_P (inside_type);
6916 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
6917 unsigned int inside_prec = TYPE_PRECISION (inside_type);
6918 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
6919 int inter_int = INTEGRAL_TYPE_P (inter_type);
6920 int inter_ptr = POINTER_TYPE_P (inter_type);
6921 int inter_float = FLOAT_TYPE_P (inter_type);
6922 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
6923 unsigned int inter_prec = TYPE_PRECISION (inter_type);
6924 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
6925 int final_int = INTEGRAL_TYPE_P (type);
6926 int final_ptr = POINTER_TYPE_P (type);
6927 int final_float = FLOAT_TYPE_P (type);
6928 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
6929 unsigned int final_prec = TYPE_PRECISION (type);
6930 int final_unsignedp = TYPE_UNSIGNED (type);
6931
6932 /* In addition to the cases of two conversions in a row
6933 handled below, if we are converting something to its own
6934 type via an object of identical or wider precision, neither
6935 conversion is needed. */
6936 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
6937 && ((inter_int && final_int) || (inter_float && final_float))
6938 && inter_prec >= final_prec)
6939 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6940
6941 /* Likewise, if the intermediate and final types are either both
6942 float or both integer, we don't need the middle conversion if
6943 it is wider than the final type and doesn't change the signedness
6944 (for integers). Avoid this if the final type is a pointer
6945 since then we sometimes need the inner conversion. Likewise if
6946 the outer has a precision not equal to the size of its mode. */
6947 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
6948 || (inter_float && inside_float)
6949 || (inter_vec && inside_vec))
6950 && inter_prec >= inside_prec
6951 && (inter_float || inter_vec
6952 || inter_unsignedp == inside_unsignedp)
6953 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6954 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6955 && ! final_ptr
6956 && (! final_vec || inter_prec == inside_prec))
6957 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6958
6959 /* If we have a sign-extension of a zero-extended value, we can
6960 replace that by a single zero-extension. */
6961 if (inside_int && inter_int && final_int
6962 && inside_prec < inter_prec && inter_prec < final_prec
6963 && inside_unsignedp && !inter_unsignedp)
6964 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6965
6966 /* Two conversions in a row are not needed unless:
6967 - some conversion is floating-point (overstrict for now), or
6968 - some conversion is a vector (overstrict for now), or
6969 - the intermediate type is narrower than both initial and
6970 final, or
6971 - the intermediate type and innermost type differ in signedness,
6972 and the outermost type is wider than the intermediate, or
6973 - the initial type is a pointer type and the precisions of the
6974 intermediate and final types differ, or
6975 - the final type is a pointer type and the precisions of the
6976 initial and intermediate types differ. */
6977 if (! inside_float && ! inter_float && ! final_float
6978 && ! inside_vec && ! inter_vec && ! final_vec
6979 && (inter_prec > inside_prec || inter_prec > final_prec)
6980 && ! (inside_int && inter_int
6981 && inter_unsignedp != inside_unsignedp
6982 && inter_prec < final_prec)
6983 && ((inter_unsignedp && inter_prec > inside_prec)
6984 == (final_unsignedp && final_prec > inter_prec))
6985 && ! (inside_ptr && inter_prec != final_prec)
6986 && ! (final_ptr && inside_prec != inter_prec)
6987 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6988 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6989 && ! final_ptr)
6990 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6991 }
6992
6993 if (TREE_CODE (op0) == MODIFY_EXPR
6994 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
6995 /* Detect assigning a bitfield. */
6996 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
6997 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
6998 {
6999 /* Don't leave an assignment inside a conversion
7000 unless assigning a bitfield. */
7001 tem = fold_build1 (code, type, TREE_OPERAND (op0, 1));
7002 /* First do the assignment, then return converted constant. */
7003 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7004 TREE_NO_WARNING (tem) = 1;
7005 TREE_USED (tem) = 1;
7006 return tem;
7007 }
7008
7009 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7010 constants (if x has signed type, the sign bit cannot be set
7011 in c). This folds extension into the BIT_AND_EXPR. */
7012 if (INTEGRAL_TYPE_P (type)
7013 && TREE_CODE (type) != BOOLEAN_TYPE
7014 && TREE_CODE (op0) == BIT_AND_EXPR
7015 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7016 {
7017 tree and = op0;
7018 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
7019 int change = 0;
7020
7021 if (TYPE_UNSIGNED (TREE_TYPE (and))
7022 || (TYPE_PRECISION (type)
7023 <= TYPE_PRECISION (TREE_TYPE (and))))
7024 change = 1;
7025 else if (TYPE_PRECISION (TREE_TYPE (and1))
7026 <= HOST_BITS_PER_WIDE_INT
7027 && host_integerp (and1, 1))
7028 {
7029 unsigned HOST_WIDE_INT cst;
7030
7031 cst = tree_low_cst (and1, 1);
7032 cst &= (HOST_WIDE_INT) -1
7033 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7034 change = (cst == 0);
7035 #ifdef LOAD_EXTEND_OP
7036 if (change
7037 && !flag_syntax_only
7038 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7039 == ZERO_EXTEND))
7040 {
7041 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
7042 and0 = fold_convert (uns, and0);
7043 and1 = fold_convert (uns, and1);
7044 }
7045 #endif
7046 }
7047 if (change)
7048 {
7049 tem = build_int_cst_wide (type, TREE_INT_CST_LOW (and1),
7050 TREE_INT_CST_HIGH (and1));
7051 tem = force_fit_type (tem, 0, TREE_OVERFLOW (and1),
7052 TREE_CONSTANT_OVERFLOW (and1));
7053 return fold_build2 (BIT_AND_EXPR, type,
7054 fold_convert (type, and0), tem);
7055 }
7056 }
7057
7058 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
7059 T2 being pointers to types of the same size. */
7060 if (POINTER_TYPE_P (type)
7061 && BINARY_CLASS_P (arg0)
7062 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7063 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7064 {
7065 tree arg00 = TREE_OPERAND (arg0, 0);
7066 tree t0 = type;
7067 tree t1 = TREE_TYPE (arg00);
7068 tree tt0 = TREE_TYPE (t0);
7069 tree tt1 = TREE_TYPE (t1);
7070 tree s0 = TYPE_SIZE (tt0);
7071 tree s1 = TYPE_SIZE (tt1);
7072
7073 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
7074 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
7075 TREE_OPERAND (arg0, 1));
7076 }
7077
7078 tem = fold_convert_const (code, type, arg0);
7079 return tem ? tem : NULL_TREE;
7080
7081 case VIEW_CONVERT_EXPR:
7082 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
7083 return build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
7084 return NULL_TREE;
7085
7086 case NEGATE_EXPR:
7087 if (negate_expr_p (arg0))
7088 return fold_convert (type, negate_expr (arg0));
7089 /* Convert - (~A) to A + 1. */
7090 if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == BIT_NOT_EXPR)
7091 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (arg0, 0),
7092 build_int_cst (type, 1));
7093 return NULL_TREE;
7094
7095 case ABS_EXPR:
7096 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
7097 return fold_abs_const (arg0, type);
7098 else if (TREE_CODE (arg0) == NEGATE_EXPR)
7099 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
7100 /* Convert fabs((double)float) into (double)fabsf(float). */
7101 else if (TREE_CODE (arg0) == NOP_EXPR
7102 && TREE_CODE (type) == REAL_TYPE)
7103 {
7104 tree targ0 = strip_float_extensions (arg0);
7105 if (targ0 != arg0)
7106 return fold_convert (type, fold_build1 (ABS_EXPR,
7107 TREE_TYPE (targ0),
7108 targ0));
7109 }
7110 else if (tree_expr_nonnegative_p (arg0))
7111 return arg0;
7112
7113 /* Strip sign ops from argument. */
7114 if (TREE_CODE (type) == REAL_TYPE)
7115 {
7116 tem = fold_strip_sign_ops (arg0);
7117 if (tem)
7118 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
7119 }
7120 return NULL_TREE;
7121
7122 case CONJ_EXPR:
7123 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7124 return fold_convert (type, arg0);
7125 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7126 return build2 (COMPLEX_EXPR, type,
7127 TREE_OPERAND (arg0, 0),
7128 negate_expr (TREE_OPERAND (arg0, 1)));
7129 else if (TREE_CODE (arg0) == COMPLEX_CST)
7130 return build_complex (type, TREE_REALPART (arg0),
7131 negate_expr (TREE_IMAGPART (arg0)));
7132 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7133 return fold_build2 (TREE_CODE (arg0), type,
7134 fold_build1 (CONJ_EXPR, type,
7135 TREE_OPERAND (arg0, 0)),
7136 fold_build1 (CONJ_EXPR, type,
7137 TREE_OPERAND (arg0, 1)));
7138 else if (TREE_CODE (arg0) == CONJ_EXPR)
7139 return TREE_OPERAND (arg0, 0);
7140 return NULL_TREE;
7141
7142 case BIT_NOT_EXPR:
7143 if (TREE_CODE (arg0) == INTEGER_CST)
7144 return fold_not_const (arg0, type);
7145 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
7146 return TREE_OPERAND (arg0, 0);
7147 /* Convert ~ (-A) to A - 1. */
7148 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
7149 return fold_build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0),
7150 build_int_cst (type, 1));
7151 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
7152 else if (INTEGRAL_TYPE_P (type)
7153 && ((TREE_CODE (arg0) == MINUS_EXPR
7154 && integer_onep (TREE_OPERAND (arg0, 1)))
7155 || (TREE_CODE (arg0) == PLUS_EXPR
7156 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
7157 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7158 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7159 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7160 && (tem = fold_unary (BIT_NOT_EXPR, type,
7161 fold_convert (type,
7162 TREE_OPERAND (arg0, 0)))))
7163 return fold_build2 (BIT_XOR_EXPR, type, tem,
7164 fold_convert (type, TREE_OPERAND (arg0, 1)));
7165 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7166 && (tem = fold_unary (BIT_NOT_EXPR, type,
7167 fold_convert (type,
7168 TREE_OPERAND (arg0, 1)))))
7169 return fold_build2 (BIT_XOR_EXPR, type,
7170 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
7171
7172 return NULL_TREE;
7173
7174 case TRUTH_NOT_EXPR:
7175 /* The argument to invert_truthvalue must have Boolean type. */
7176 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
7177 arg0 = fold_convert (boolean_type_node, arg0);
7178
7179 /* Note that the operand of this must be an int
7180 and its values must be 0 or 1.
7181 ("true" is a fixed value perhaps depending on the language,
7182 but we don't handle values other than 1 correctly yet.) */
7183 tem = invert_truthvalue (arg0);
7184 /* Avoid infinite recursion. */
7185 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
7186 return NULL_TREE;
7187 return fold_convert (type, tem);
7188
7189 case REALPART_EXPR:
7190 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7191 return NULL_TREE;
7192 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7193 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
7194 TREE_OPERAND (arg0, 1));
7195 else if (TREE_CODE (arg0) == COMPLEX_CST)
7196 return TREE_REALPART (arg0);
7197 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7198 return fold_build2 (TREE_CODE (arg0), type,
7199 fold_build1 (REALPART_EXPR, type,
7200 TREE_OPERAND (arg0, 0)),
7201 fold_build1 (REALPART_EXPR, type,
7202 TREE_OPERAND (arg0, 1)));
7203 return NULL_TREE;
7204
7205 case IMAGPART_EXPR:
7206 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7207 return fold_convert (type, integer_zero_node);
7208 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7209 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
7210 TREE_OPERAND (arg0, 0));
7211 else if (TREE_CODE (arg0) == COMPLEX_CST)
7212 return TREE_IMAGPART (arg0);
7213 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7214 return fold_build2 (TREE_CODE (arg0), type,
7215 fold_build1 (IMAGPART_EXPR, type,
7216 TREE_OPERAND (arg0, 0)),
7217 fold_build1 (IMAGPART_EXPR, type,
7218 TREE_OPERAND (arg0, 1)));
7219 return NULL_TREE;
7220
7221 default:
7222 return NULL_TREE;
7223 } /* switch (code) */
7224 }
7225
7226 /* Fold a binary expression of code CODE and type TYPE with operands
7227 OP0 and OP1. Return the folded expression if folding is
7228 successful. Otherwise, return NULL_TREE. */
7229
7230 tree
7231 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
7232 {
7233 tree t1 = NULL_TREE;
7234 tree tem;
7235 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
7236 enum tree_code_class kind = TREE_CODE_CLASS (code);
7237
7238 /* WINS will be nonzero when the switch is done
7239 if all operands are constant. */
7240 int wins = 1;
7241
7242 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7243 && TREE_CODE_LENGTH (code) == 2);
7244
7245 arg0 = op0;
7246 arg1 = op1;
7247
7248 if (arg0)
7249 {
7250 tree subop;
7251
7252 /* Strip any conversions that don't change the mode. This is
7253 safe for every expression, except for a comparison expression
7254 because its signedness is derived from its operands. So, in
7255 the latter case, only strip conversions that don't change the
7256 signedness.
7257
7258 Note that this is done as an internal manipulation within the
7259 constant folder, in order to find the simplest representation
7260 of the arguments so that their form can be studied. In any
7261 cases, the appropriate type conversions should be put back in
7262 the tree that will get out of the constant folder. */
7263 if (kind == tcc_comparison)
7264 STRIP_SIGN_NOPS (arg0);
7265 else
7266 STRIP_NOPS (arg0);
7267
7268 if (TREE_CODE (arg0) == COMPLEX_CST)
7269 subop = TREE_REALPART (arg0);
7270 else
7271 subop = arg0;
7272
7273 if (TREE_CODE (subop) != INTEGER_CST
7274 && TREE_CODE (subop) != REAL_CST)
7275 /* Note that TREE_CONSTANT isn't enough:
7276 static var addresses are constant but we can't
7277 do arithmetic on them. */
7278 wins = 0;
7279 }
7280
7281 if (arg1)
7282 {
7283 tree subop;
7284
7285 /* Strip any conversions that don't change the mode. This is
7286 safe for every expression, except for a comparison expression
7287 because its signedness is derived from its operands. So, in
7288 the latter case, only strip conversions that don't change the
7289 signedness.
7290
7291 Note that this is done as an internal manipulation within the
7292 constant folder, in order to find the simplest representation
7293 of the arguments so that their form can be studied. In any
7294 cases, the appropriate type conversions should be put back in
7295 the tree that will get out of the constant folder. */
7296 if (kind == tcc_comparison)
7297 STRIP_SIGN_NOPS (arg1);
7298 else
7299 STRIP_NOPS (arg1);
7300
7301 if (TREE_CODE (arg1) == COMPLEX_CST)
7302 subop = TREE_REALPART (arg1);
7303 else
7304 subop = arg1;
7305
7306 if (TREE_CODE (subop) != INTEGER_CST
7307 && TREE_CODE (subop) != REAL_CST)
7308 /* Note that TREE_CONSTANT isn't enough:
7309 static var addresses are constant but we can't
7310 do arithmetic on them. */
7311 wins = 0;
7312 }
7313
7314 /* If this is a commutative operation, and ARG0 is a constant, move it
7315 to ARG1 to reduce the number of tests below. */
7316 if (commutative_tree_code (code)
7317 && tree_swap_operands_p (arg0, arg1, true))
7318 return fold_build2 (code, type, op1, op0);
7319
7320 /* Now WINS is set as described above,
7321 ARG0 is the first operand of EXPR,
7322 and ARG1 is the second operand (if it has more than one operand).
7323
7324 First check for cases where an arithmetic operation is applied to a
7325 compound, conditional, or comparison operation. Push the arithmetic
7326 operation inside the compound or conditional to see if any folding
7327 can then be done. Convert comparison to conditional for this purpose.
7328 The also optimizes non-constant cases that used to be done in
7329 expand_expr.
7330
7331 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
7332 one of the operands is a comparison and the other is a comparison, a
7333 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
7334 code below would make the expression more complex. Change it to a
7335 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
7336 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
7337
7338 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
7339 || code == EQ_EXPR || code == NE_EXPR)
7340 && ((truth_value_p (TREE_CODE (arg0))
7341 && (truth_value_p (TREE_CODE (arg1))
7342 || (TREE_CODE (arg1) == BIT_AND_EXPR
7343 && integer_onep (TREE_OPERAND (arg1, 1)))))
7344 || (truth_value_p (TREE_CODE (arg1))
7345 && (truth_value_p (TREE_CODE (arg0))
7346 || (TREE_CODE (arg0) == BIT_AND_EXPR
7347 && integer_onep (TREE_OPERAND (arg0, 1)))))))
7348 {
7349 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
7350 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
7351 : TRUTH_XOR_EXPR,
7352 boolean_type_node,
7353 fold_convert (boolean_type_node, arg0),
7354 fold_convert (boolean_type_node, arg1));
7355
7356 if (code == EQ_EXPR)
7357 tem = invert_truthvalue (tem);
7358
7359 return fold_convert (type, tem);
7360 }
7361
7362 if (TREE_CODE_CLASS (code) == tcc_comparison
7363 && TREE_CODE (arg0) == COMPOUND_EXPR)
7364 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7365 fold_build2 (code, type, TREE_OPERAND (arg0, 1), arg1));
7366 else if (TREE_CODE_CLASS (code) == tcc_comparison
7367 && TREE_CODE (arg1) == COMPOUND_EXPR)
7368 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
7369 fold_build2 (code, type, arg0, TREE_OPERAND (arg1, 1)));
7370 else if (TREE_CODE_CLASS (code) == tcc_binary
7371 || TREE_CODE_CLASS (code) == tcc_comparison)
7372 {
7373 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7374 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7375 fold_build2 (code, type, TREE_OPERAND (arg0, 1),
7376 arg1));
7377 if (TREE_CODE (arg1) == COMPOUND_EXPR
7378 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
7379 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
7380 fold_build2 (code, type,
7381 arg0, TREE_OPERAND (arg1, 1)));
7382
7383 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
7384 {
7385 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
7386 arg0, arg1,
7387 /*cond_first_p=*/1);
7388 if (tem != NULL_TREE)
7389 return tem;
7390 }
7391
7392 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
7393 {
7394 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
7395 arg1, arg0,
7396 /*cond_first_p=*/0);
7397 if (tem != NULL_TREE)
7398 return tem;
7399 }
7400 }
7401
7402 switch (code)
7403 {
7404 case PLUS_EXPR:
7405 /* A + (-B) -> A - B */
7406 if (TREE_CODE (arg1) == NEGATE_EXPR)
7407 return fold_build2 (MINUS_EXPR, type,
7408 fold_convert (type, arg0),
7409 fold_convert (type, TREE_OPERAND (arg1, 0)));
7410 /* (-A) + B -> B - A */
7411 if (TREE_CODE (arg0) == NEGATE_EXPR
7412 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
7413 return fold_build2 (MINUS_EXPR, type,
7414 fold_convert (type, arg1),
7415 fold_convert (type, TREE_OPERAND (arg0, 0)));
7416 /* Convert ~A + 1 to -A. */
7417 if (INTEGRAL_TYPE_P (type)
7418 && TREE_CODE (arg0) == BIT_NOT_EXPR
7419 && integer_onep (arg1))
7420 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7421
7422 if (TREE_CODE (type) == COMPLEX_TYPE)
7423 {
7424 tem = fold_complex_add (type, arg0, arg1, PLUS_EXPR);
7425 if (tem)
7426 return tem;
7427 }
7428
7429 if (! FLOAT_TYPE_P (type))
7430 {
7431 if (integer_zerop (arg1))
7432 return non_lvalue (fold_convert (type, arg0));
7433
7434 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
7435 with a constant, and the two constants have no bits in common,
7436 we should treat this as a BIT_IOR_EXPR since this may produce more
7437 simplifications. */
7438 if (TREE_CODE (arg0) == BIT_AND_EXPR
7439 && TREE_CODE (arg1) == BIT_AND_EXPR
7440 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7441 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
7442 && integer_zerop (const_binop (BIT_AND_EXPR,
7443 TREE_OPERAND (arg0, 1),
7444 TREE_OPERAND (arg1, 1), 0)))
7445 {
7446 code = BIT_IOR_EXPR;
7447 goto bit_ior;
7448 }
7449
7450 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
7451 (plus (plus (mult) (mult)) (foo)) so that we can
7452 take advantage of the factoring cases below. */
7453 if (((TREE_CODE (arg0) == PLUS_EXPR
7454 || TREE_CODE (arg0) == MINUS_EXPR)
7455 && TREE_CODE (arg1) == MULT_EXPR)
7456 || ((TREE_CODE (arg1) == PLUS_EXPR
7457 || TREE_CODE (arg1) == MINUS_EXPR)
7458 && TREE_CODE (arg0) == MULT_EXPR))
7459 {
7460 tree parg0, parg1, parg, marg;
7461 enum tree_code pcode;
7462
7463 if (TREE_CODE (arg1) == MULT_EXPR)
7464 parg = arg0, marg = arg1;
7465 else
7466 parg = arg1, marg = arg0;
7467 pcode = TREE_CODE (parg);
7468 parg0 = TREE_OPERAND (parg, 0);
7469 parg1 = TREE_OPERAND (parg, 1);
7470 STRIP_NOPS (parg0);
7471 STRIP_NOPS (parg1);
7472
7473 if (TREE_CODE (parg0) == MULT_EXPR
7474 && TREE_CODE (parg1) != MULT_EXPR)
7475 return fold_build2 (pcode, type,
7476 fold_build2 (PLUS_EXPR, type,
7477 fold_convert (type, parg0),
7478 fold_convert (type, marg)),
7479 fold_convert (type, parg1));
7480 if (TREE_CODE (parg0) != MULT_EXPR
7481 && TREE_CODE (parg1) == MULT_EXPR)
7482 return fold_build2 (PLUS_EXPR, type,
7483 fold_convert (type, parg0),
7484 fold_build2 (pcode, type,
7485 fold_convert (type, marg),
7486 fold_convert (type,
7487 parg1)));
7488 }
7489
7490 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
7491 {
7492 tree arg00, arg01, arg10, arg11;
7493 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7494
7495 /* (A * C) + (B * C) -> (A+B) * C.
7496 We are most concerned about the case where C is a constant,
7497 but other combinations show up during loop reduction. Since
7498 it is not difficult, try all four possibilities. */
7499
7500 arg00 = TREE_OPERAND (arg0, 0);
7501 arg01 = TREE_OPERAND (arg0, 1);
7502 arg10 = TREE_OPERAND (arg1, 0);
7503 arg11 = TREE_OPERAND (arg1, 1);
7504 same = NULL_TREE;
7505
7506 if (operand_equal_p (arg01, arg11, 0))
7507 same = arg01, alt0 = arg00, alt1 = arg10;
7508 else if (operand_equal_p (arg00, arg10, 0))
7509 same = arg00, alt0 = arg01, alt1 = arg11;
7510 else if (operand_equal_p (arg00, arg11, 0))
7511 same = arg00, alt0 = arg01, alt1 = arg10;
7512 else if (operand_equal_p (arg01, arg10, 0))
7513 same = arg01, alt0 = arg00, alt1 = arg11;
7514
7515 /* No identical multiplicands; see if we can find a common
7516 power-of-two factor in non-power-of-two multiplies. This
7517 can help in multi-dimensional array access. */
7518 else if (TREE_CODE (arg01) == INTEGER_CST
7519 && TREE_CODE (arg11) == INTEGER_CST
7520 && TREE_INT_CST_HIGH (arg01) == 0
7521 && TREE_INT_CST_HIGH (arg11) == 0)
7522 {
7523 HOST_WIDE_INT int01, int11, tmp;
7524 int01 = TREE_INT_CST_LOW (arg01);
7525 int11 = TREE_INT_CST_LOW (arg11);
7526
7527 /* Move min of absolute values to int11. */
7528 if ((int01 >= 0 ? int01 : -int01)
7529 < (int11 >= 0 ? int11 : -int11))
7530 {
7531 tmp = int01, int01 = int11, int11 = tmp;
7532 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7533 alt0 = arg01, arg01 = arg11, arg11 = alt0;
7534 }
7535
7536 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
7537 {
7538 alt0 = fold_build2 (MULT_EXPR, type, arg00,
7539 build_int_cst (NULL_TREE,
7540 int01 / int11));
7541 alt1 = arg10;
7542 same = arg11;
7543 }
7544 }
7545
7546 if (same)
7547 return fold_build2 (MULT_EXPR, type,
7548 fold_build2 (PLUS_EXPR, type,
7549 fold_convert (type, alt0),
7550 fold_convert (type, alt1)),
7551 fold_convert (type, same));
7552 }
7553
7554 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
7555 of the array. Loop optimizer sometimes produce this type of
7556 expressions. */
7557 if (TREE_CODE (arg0) == ADDR_EXPR)
7558 {
7559 tem = try_move_mult_to_index (PLUS_EXPR, arg0, arg1);
7560 if (tem)
7561 return fold_convert (type, fold (tem));
7562 }
7563 else if (TREE_CODE (arg1) == ADDR_EXPR)
7564 {
7565 tem = try_move_mult_to_index (PLUS_EXPR, arg1, arg0);
7566 if (tem)
7567 return fold_convert (type, fold (tem));
7568 }
7569 }
7570 else
7571 {
7572 /* See if ARG1 is zero and X + ARG1 reduces to X. */
7573 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
7574 return non_lvalue (fold_convert (type, arg0));
7575
7576 /* Likewise if the operands are reversed. */
7577 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
7578 return non_lvalue (fold_convert (type, arg1));
7579
7580 /* Convert X + -C into X - C. */
7581 if (TREE_CODE (arg1) == REAL_CST
7582 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
7583 {
7584 tem = fold_negate_const (arg1, type);
7585 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
7586 return fold_build2 (MINUS_EXPR, type,
7587 fold_convert (type, arg0),
7588 fold_convert (type, tem));
7589 }
7590
7591 if (flag_unsafe_math_optimizations
7592 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
7593 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
7594 && (tem = distribute_real_division (code, type, arg0, arg1)))
7595 return tem;
7596
7597 /* Convert x+x into x*2.0. */
7598 if (operand_equal_p (arg0, arg1, 0)
7599 && SCALAR_FLOAT_TYPE_P (type))
7600 return fold_build2 (MULT_EXPR, type, arg0,
7601 build_real (type, dconst2));
7602
7603 /* Convert x*c+x into x*(c+1). */
7604 if (flag_unsafe_math_optimizations
7605 && TREE_CODE (arg0) == MULT_EXPR
7606 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7607 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
7608 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7609 {
7610 REAL_VALUE_TYPE c;
7611
7612 c = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
7613 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7614 return fold_build2 (MULT_EXPR, type, arg1,
7615 build_real (type, c));
7616 }
7617
7618 /* Convert x+x*c into x*(c+1). */
7619 if (flag_unsafe_math_optimizations
7620 && TREE_CODE (arg1) == MULT_EXPR
7621 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
7622 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
7623 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
7624 {
7625 REAL_VALUE_TYPE c;
7626
7627 c = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
7628 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7629 return fold_build2 (MULT_EXPR, type, arg0,
7630 build_real (type, c));
7631 }
7632
7633 /* Convert x*c1+x*c2 into x*(c1+c2). */
7634 if (flag_unsafe_math_optimizations
7635 && TREE_CODE (arg0) == MULT_EXPR
7636 && TREE_CODE (arg1) == MULT_EXPR
7637 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7638 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
7639 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
7640 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
7641 && operand_equal_p (TREE_OPERAND (arg0, 0),
7642 TREE_OPERAND (arg1, 0), 0))
7643 {
7644 REAL_VALUE_TYPE c1, c2;
7645
7646 c1 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
7647 c2 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
7648 real_arithmetic (&c1, PLUS_EXPR, &c1, &c2);
7649 return fold_build2 (MULT_EXPR, type,
7650 TREE_OPERAND (arg0, 0),
7651 build_real (type, c1));
7652 }
7653 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
7654 if (flag_unsafe_math_optimizations
7655 && TREE_CODE (arg1) == PLUS_EXPR
7656 && TREE_CODE (arg0) != MULT_EXPR)
7657 {
7658 tree tree10 = TREE_OPERAND (arg1, 0);
7659 tree tree11 = TREE_OPERAND (arg1, 1);
7660 if (TREE_CODE (tree11) == MULT_EXPR
7661 && TREE_CODE (tree10) == MULT_EXPR)
7662 {
7663 tree tree0;
7664 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
7665 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
7666 }
7667 }
7668 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
7669 if (flag_unsafe_math_optimizations
7670 && TREE_CODE (arg0) == PLUS_EXPR
7671 && TREE_CODE (arg1) != MULT_EXPR)
7672 {
7673 tree tree00 = TREE_OPERAND (arg0, 0);
7674 tree tree01 = TREE_OPERAND (arg0, 1);
7675 if (TREE_CODE (tree01) == MULT_EXPR
7676 && TREE_CODE (tree00) == MULT_EXPR)
7677 {
7678 tree tree0;
7679 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
7680 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
7681 }
7682 }
7683 }
7684
7685 bit_rotate:
7686 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
7687 is a rotate of A by C1 bits. */
7688 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
7689 is a rotate of A by B bits. */
7690 {
7691 enum tree_code code0, code1;
7692 code0 = TREE_CODE (arg0);
7693 code1 = TREE_CODE (arg1);
7694 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
7695 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
7696 && operand_equal_p (TREE_OPERAND (arg0, 0),
7697 TREE_OPERAND (arg1, 0), 0)
7698 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7699 {
7700 tree tree01, tree11;
7701 enum tree_code code01, code11;
7702
7703 tree01 = TREE_OPERAND (arg0, 1);
7704 tree11 = TREE_OPERAND (arg1, 1);
7705 STRIP_NOPS (tree01);
7706 STRIP_NOPS (tree11);
7707 code01 = TREE_CODE (tree01);
7708 code11 = TREE_CODE (tree11);
7709 if (code01 == INTEGER_CST
7710 && code11 == INTEGER_CST
7711 && TREE_INT_CST_HIGH (tree01) == 0
7712 && TREE_INT_CST_HIGH (tree11) == 0
7713 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
7714 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
7715 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
7716 code0 == LSHIFT_EXPR ? tree01 : tree11);
7717 else if (code11 == MINUS_EXPR)
7718 {
7719 tree tree110, tree111;
7720 tree110 = TREE_OPERAND (tree11, 0);
7721 tree111 = TREE_OPERAND (tree11, 1);
7722 STRIP_NOPS (tree110);
7723 STRIP_NOPS (tree111);
7724 if (TREE_CODE (tree110) == INTEGER_CST
7725 && 0 == compare_tree_int (tree110,
7726 TYPE_PRECISION
7727 (TREE_TYPE (TREE_OPERAND
7728 (arg0, 0))))
7729 && operand_equal_p (tree01, tree111, 0))
7730 return build2 ((code0 == LSHIFT_EXPR
7731 ? LROTATE_EXPR
7732 : RROTATE_EXPR),
7733 type, TREE_OPERAND (arg0, 0), tree01);
7734 }
7735 else if (code01 == MINUS_EXPR)
7736 {
7737 tree tree010, tree011;
7738 tree010 = TREE_OPERAND (tree01, 0);
7739 tree011 = TREE_OPERAND (tree01, 1);
7740 STRIP_NOPS (tree010);
7741 STRIP_NOPS (tree011);
7742 if (TREE_CODE (tree010) == INTEGER_CST
7743 && 0 == compare_tree_int (tree010,
7744 TYPE_PRECISION
7745 (TREE_TYPE (TREE_OPERAND
7746 (arg0, 0))))
7747 && operand_equal_p (tree11, tree011, 0))
7748 return build2 ((code0 != LSHIFT_EXPR
7749 ? LROTATE_EXPR
7750 : RROTATE_EXPR),
7751 type, TREE_OPERAND (arg0, 0), tree11);
7752 }
7753 }
7754 }
7755
7756 associate:
7757 /* In most languages, can't associate operations on floats through
7758 parentheses. Rather than remember where the parentheses were, we
7759 don't associate floats at all, unless the user has specified
7760 -funsafe-math-optimizations. */
7761
7762 if (! wins
7763 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
7764 {
7765 tree var0, con0, lit0, minus_lit0;
7766 tree var1, con1, lit1, minus_lit1;
7767
7768 /* Split both trees into variables, constants, and literals. Then
7769 associate each group together, the constants with literals,
7770 then the result with variables. This increases the chances of
7771 literals being recombined later and of generating relocatable
7772 expressions for the sum of a constant and literal. */
7773 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
7774 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
7775 code == MINUS_EXPR);
7776
7777 /* Only do something if we found more than two objects. Otherwise,
7778 nothing has changed and we risk infinite recursion. */
7779 if (2 < ((var0 != 0) + (var1 != 0)
7780 + (con0 != 0) + (con1 != 0)
7781 + (lit0 != 0) + (lit1 != 0)
7782 + (minus_lit0 != 0) + (minus_lit1 != 0)))
7783 {
7784 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
7785 if (code == MINUS_EXPR)
7786 code = PLUS_EXPR;
7787
7788 var0 = associate_trees (var0, var1, code, type);
7789 con0 = associate_trees (con0, con1, code, type);
7790 lit0 = associate_trees (lit0, lit1, code, type);
7791 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
7792
7793 /* Preserve the MINUS_EXPR if the negative part of the literal is
7794 greater than the positive part. Otherwise, the multiplicative
7795 folding code (i.e extract_muldiv) may be fooled in case
7796 unsigned constants are subtracted, like in the following
7797 example: ((X*2 + 4) - 8U)/2. */
7798 if (minus_lit0 && lit0)
7799 {
7800 if (TREE_CODE (lit0) == INTEGER_CST
7801 && TREE_CODE (minus_lit0) == INTEGER_CST
7802 && tree_int_cst_lt (lit0, minus_lit0))
7803 {
7804 minus_lit0 = associate_trees (minus_lit0, lit0,
7805 MINUS_EXPR, type);
7806 lit0 = 0;
7807 }
7808 else
7809 {
7810 lit0 = associate_trees (lit0, minus_lit0,
7811 MINUS_EXPR, type);
7812 minus_lit0 = 0;
7813 }
7814 }
7815 if (minus_lit0)
7816 {
7817 if (con0 == 0)
7818 return fold_convert (type,
7819 associate_trees (var0, minus_lit0,
7820 MINUS_EXPR, type));
7821 else
7822 {
7823 con0 = associate_trees (con0, minus_lit0,
7824 MINUS_EXPR, type);
7825 return fold_convert (type,
7826 associate_trees (var0, con0,
7827 PLUS_EXPR, type));
7828 }
7829 }
7830
7831 con0 = associate_trees (con0, lit0, code, type);
7832 return fold_convert (type, associate_trees (var0, con0,
7833 code, type));
7834 }
7835 }
7836
7837 binary:
7838 if (wins)
7839 t1 = const_binop (code, arg0, arg1, 0);
7840 if (t1 != NULL_TREE)
7841 {
7842 /* The return value should always have
7843 the same type as the original expression. */
7844 if (TREE_TYPE (t1) != type)
7845 t1 = fold_convert (type, t1);
7846
7847 return t1;
7848 }
7849 return NULL_TREE;
7850
7851 case MINUS_EXPR:
7852 /* A - (-B) -> A + B */
7853 if (TREE_CODE (arg1) == NEGATE_EXPR)
7854 return fold_build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0));
7855 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
7856 if (TREE_CODE (arg0) == NEGATE_EXPR
7857 && (FLOAT_TYPE_P (type)
7858 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
7859 && negate_expr_p (arg1)
7860 && reorder_operands_p (arg0, arg1))
7861 return fold_build2 (MINUS_EXPR, type, negate_expr (arg1),
7862 TREE_OPERAND (arg0, 0));
7863 /* Convert -A - 1 to ~A. */
7864 if (INTEGRAL_TYPE_P (type)
7865 && TREE_CODE (arg0) == NEGATE_EXPR
7866 && integer_onep (arg1))
7867 return fold_build1 (BIT_NOT_EXPR, type, TREE_OPERAND (arg0, 0));
7868
7869 /* Convert -1 - A to ~A. */
7870 if (INTEGRAL_TYPE_P (type)
7871 && integer_all_onesp (arg0))
7872 return fold_build1 (BIT_NOT_EXPR, type, arg1);
7873
7874 if (TREE_CODE (type) == COMPLEX_TYPE)
7875 {
7876 tem = fold_complex_add (type, arg0, arg1, MINUS_EXPR);
7877 if (tem)
7878 return tem;
7879 }
7880
7881 if (! FLOAT_TYPE_P (type))
7882 {
7883 if (! wins && integer_zerop (arg0))
7884 return negate_expr (fold_convert (type, arg1));
7885 if (integer_zerop (arg1))
7886 return non_lvalue (fold_convert (type, arg0));
7887
7888 /* Fold A - (A & B) into ~B & A. */
7889 if (!TREE_SIDE_EFFECTS (arg0)
7890 && TREE_CODE (arg1) == BIT_AND_EXPR)
7891 {
7892 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
7893 return fold_build2 (BIT_AND_EXPR, type,
7894 fold_build1 (BIT_NOT_EXPR, type,
7895 TREE_OPERAND (arg1, 0)),
7896 arg0);
7897 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7898 return fold_build2 (BIT_AND_EXPR, type,
7899 fold_build1 (BIT_NOT_EXPR, type,
7900 TREE_OPERAND (arg1, 1)),
7901 arg0);
7902 }
7903
7904 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
7905 any power of 2 minus 1. */
7906 if (TREE_CODE (arg0) == BIT_AND_EXPR
7907 && TREE_CODE (arg1) == BIT_AND_EXPR
7908 && operand_equal_p (TREE_OPERAND (arg0, 0),
7909 TREE_OPERAND (arg1, 0), 0))
7910 {
7911 tree mask0 = TREE_OPERAND (arg0, 1);
7912 tree mask1 = TREE_OPERAND (arg1, 1);
7913 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
7914
7915 if (operand_equal_p (tem, mask1, 0))
7916 {
7917 tem = fold_build2 (BIT_XOR_EXPR, type,
7918 TREE_OPERAND (arg0, 0), mask1);
7919 return fold_build2 (MINUS_EXPR, type, tem, mask1);
7920 }
7921 }
7922 }
7923
7924 /* See if ARG1 is zero and X - ARG1 reduces to X. */
7925 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
7926 return non_lvalue (fold_convert (type, arg0));
7927
7928 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
7929 ARG0 is zero and X + ARG0 reduces to X, since that would mean
7930 (-ARG1 + ARG0) reduces to -ARG1. */
7931 else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
7932 return negate_expr (fold_convert (type, arg1));
7933
7934 /* Fold &x - &x. This can happen from &x.foo - &x.
7935 This is unsafe for certain floats even in non-IEEE formats.
7936 In IEEE, it is unsafe because it does wrong for NaNs.
7937 Also note that operand_equal_p is always false if an operand
7938 is volatile. */
7939
7940 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
7941 && operand_equal_p (arg0, arg1, 0))
7942 return fold_convert (type, integer_zero_node);
7943
7944 /* A - B -> A + (-B) if B is easily negatable. */
7945 if (!wins && negate_expr_p (arg1)
7946 && ((FLOAT_TYPE_P (type)
7947 /* Avoid this transformation if B is a positive REAL_CST. */
7948 && (TREE_CODE (arg1) != REAL_CST
7949 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
7950 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
7951 return fold_build2 (PLUS_EXPR, type,
7952 fold_convert (type, arg0),
7953 fold_convert (type, negate_expr (arg1)));
7954
7955 /* Try folding difference of addresses. */
7956 {
7957 HOST_WIDE_INT diff;
7958
7959 if ((TREE_CODE (arg0) == ADDR_EXPR
7960 || TREE_CODE (arg1) == ADDR_EXPR)
7961 && ptr_difference_const (arg0, arg1, &diff))
7962 return build_int_cst_type (type, diff);
7963 }
7964
7965 /* Fold &a[i] - &a[j] to i-j. */
7966 if (TREE_CODE (arg0) == ADDR_EXPR
7967 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
7968 && TREE_CODE (arg1) == ADDR_EXPR
7969 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
7970 {
7971 tree aref0 = TREE_OPERAND (arg0, 0);
7972 tree aref1 = TREE_OPERAND (arg1, 0);
7973 if (operand_equal_p (TREE_OPERAND (aref0, 0),
7974 TREE_OPERAND (aref1, 0), 0))
7975 {
7976 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
7977 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
7978 tree esz = array_ref_element_size (aref0);
7979 tree diff = build2 (MINUS_EXPR, type, op0, op1);
7980 return fold_build2 (MULT_EXPR, type, diff,
7981 fold_convert (type, esz));
7982
7983 }
7984 }
7985
7986 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
7987 of the array. Loop optimizer sometimes produce this type of
7988 expressions. */
7989 if (TREE_CODE (arg0) == ADDR_EXPR)
7990 {
7991 tem = try_move_mult_to_index (MINUS_EXPR, arg0, arg1);
7992 if (tem)
7993 return fold_convert (type, fold (tem));
7994 }
7995
7996 if (flag_unsafe_math_optimizations
7997 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
7998 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
7999 && (tem = distribute_real_division (code, type, arg0, arg1)))
8000 return tem;
8001
8002 if (TREE_CODE (arg0) == MULT_EXPR
8003 && TREE_CODE (arg1) == MULT_EXPR
8004 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
8005 {
8006 /* (A * C) - (B * C) -> (A-B) * C. */
8007 if (operand_equal_p (TREE_OPERAND (arg0, 1),
8008 TREE_OPERAND (arg1, 1), 0))
8009 return fold_build2 (MULT_EXPR, type,
8010 fold_build2 (MINUS_EXPR, type,
8011 TREE_OPERAND (arg0, 0),
8012 TREE_OPERAND (arg1, 0)),
8013 TREE_OPERAND (arg0, 1));
8014 /* (A * C1) - (A * C2) -> A * (C1-C2). */
8015 if (operand_equal_p (TREE_OPERAND (arg0, 0),
8016 TREE_OPERAND (arg1, 0), 0))
8017 return fold_build2 (MULT_EXPR, type,
8018 TREE_OPERAND (arg0, 0),
8019 fold_build2 (MINUS_EXPR, type,
8020 TREE_OPERAND (arg0, 1),
8021 TREE_OPERAND (arg1, 1)));
8022 }
8023
8024 goto associate;
8025
8026 case MULT_EXPR:
8027 /* (-A) * (-B) -> A * B */
8028 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
8029 return fold_build2 (MULT_EXPR, type,
8030 TREE_OPERAND (arg0, 0),
8031 negate_expr (arg1));
8032 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
8033 return fold_build2 (MULT_EXPR, type,
8034 negate_expr (arg0),
8035 TREE_OPERAND (arg1, 0));
8036
8037 if (TREE_CODE (type) == COMPLEX_TYPE)
8038 {
8039 tem = fold_complex_mult (type, arg0, arg1);
8040 if (tem)
8041 return tem;
8042 }
8043
8044 if (! FLOAT_TYPE_P (type))
8045 {
8046 if (integer_zerop (arg1))
8047 return omit_one_operand (type, arg1, arg0);
8048 if (integer_onep (arg1))
8049 return non_lvalue (fold_convert (type, arg0));
8050 /* Transform x * -1 into -x. */
8051 if (integer_all_onesp (arg1))
8052 return fold_convert (type, negate_expr (arg0));
8053
8054 /* (a * (1 << b)) is (a << b) */
8055 if (TREE_CODE (arg1) == LSHIFT_EXPR
8056 && integer_onep (TREE_OPERAND (arg1, 0)))
8057 return fold_build2 (LSHIFT_EXPR, type, arg0,
8058 TREE_OPERAND (arg1, 1));
8059 if (TREE_CODE (arg0) == LSHIFT_EXPR
8060 && integer_onep (TREE_OPERAND (arg0, 0)))
8061 return fold_build2 (LSHIFT_EXPR, type, arg1,
8062 TREE_OPERAND (arg0, 1));
8063
8064 if (TREE_CODE (arg1) == INTEGER_CST
8065 && 0 != (tem = extract_muldiv (op0,
8066 fold_convert (type, arg1),
8067 code, NULL_TREE)))
8068 return fold_convert (type, tem);
8069
8070 }
8071 else
8072 {
8073 /* Maybe fold x * 0 to 0. The expressions aren't the same
8074 when x is NaN, since x * 0 is also NaN. Nor are they the
8075 same in modes with signed zeros, since multiplying a
8076 negative value by 0 gives -0, not +0. */
8077 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
8078 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
8079 && real_zerop (arg1))
8080 return omit_one_operand (type, arg1, arg0);
8081 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
8082 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8083 && real_onep (arg1))
8084 return non_lvalue (fold_convert (type, arg0));
8085
8086 /* Transform x * -1.0 into -x. */
8087 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8088 && real_minus_onep (arg1))
8089 return fold_convert (type, negate_expr (arg0));
8090
8091 /* Convert (C1/X)*C2 into (C1*C2)/X. */
8092 if (flag_unsafe_math_optimizations
8093 && TREE_CODE (arg0) == RDIV_EXPR
8094 && TREE_CODE (arg1) == REAL_CST
8095 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
8096 {
8097 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
8098 arg1, 0);
8099 if (tem)
8100 return fold_build2 (RDIV_EXPR, type, tem,
8101 TREE_OPERAND (arg0, 1));
8102 }
8103
8104 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
8105 if (operand_equal_p (arg0, arg1, 0))
8106 {
8107 tree tem = fold_strip_sign_ops (arg0);
8108 if (tem != NULL_TREE)
8109 {
8110 tem = fold_convert (type, tem);
8111 return fold_build2 (MULT_EXPR, type, tem, tem);
8112 }
8113 }
8114
8115 if (flag_unsafe_math_optimizations)
8116 {
8117 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
8118 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
8119
8120 /* Optimizations of root(...)*root(...). */
8121 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
8122 {
8123 tree rootfn, arg, arglist;
8124 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8125 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
8126
8127 /* Optimize sqrt(x)*sqrt(x) as x. */
8128 if (BUILTIN_SQRT_P (fcode0)
8129 && operand_equal_p (arg00, arg10, 0)
8130 && ! HONOR_SNANS (TYPE_MODE (type)))
8131 return arg00;
8132
8133 /* Optimize root(x)*root(y) as root(x*y). */
8134 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8135 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
8136 arglist = build_tree_list (NULL_TREE, arg);
8137 return build_function_call_expr (rootfn, arglist);
8138 }
8139
8140 /* Optimize expN(x)*expN(y) as expN(x+y). */
8141 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
8142 {
8143 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8144 tree arg = fold_build2 (PLUS_EXPR, type,
8145 TREE_VALUE (TREE_OPERAND (arg0, 1)),
8146 TREE_VALUE (TREE_OPERAND (arg1, 1)));
8147 tree arglist = build_tree_list (NULL_TREE, arg);
8148 return build_function_call_expr (expfn, arglist);
8149 }
8150
8151 /* Optimizations of pow(...)*pow(...). */
8152 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
8153 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
8154 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
8155 {
8156 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8157 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
8158 1)));
8159 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
8160 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
8161 1)));
8162
8163 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
8164 if (operand_equal_p (arg01, arg11, 0))
8165 {
8166 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8167 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
8168 tree arglist = tree_cons (NULL_TREE, arg,
8169 build_tree_list (NULL_TREE,
8170 arg01));
8171 return build_function_call_expr (powfn, arglist);
8172 }
8173
8174 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
8175 if (operand_equal_p (arg00, arg10, 0))
8176 {
8177 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8178 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
8179 tree arglist = tree_cons (NULL_TREE, arg00,
8180 build_tree_list (NULL_TREE,
8181 arg));
8182 return build_function_call_expr (powfn, arglist);
8183 }
8184 }
8185
8186 /* Optimize tan(x)*cos(x) as sin(x). */
8187 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
8188 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
8189 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
8190 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
8191 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
8192 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
8193 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
8194 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
8195 {
8196 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
8197
8198 if (sinfn != NULL_TREE)
8199 return build_function_call_expr (sinfn,
8200 TREE_OPERAND (arg0, 1));
8201 }
8202
8203 /* Optimize x*pow(x,c) as pow(x,c+1). */
8204 if (fcode1 == BUILT_IN_POW
8205 || fcode1 == BUILT_IN_POWF
8206 || fcode1 == BUILT_IN_POWL)
8207 {
8208 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
8209 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
8210 1)));
8211 if (TREE_CODE (arg11) == REAL_CST
8212 && ! TREE_CONSTANT_OVERFLOW (arg11)
8213 && operand_equal_p (arg0, arg10, 0))
8214 {
8215 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
8216 REAL_VALUE_TYPE c;
8217 tree arg, arglist;
8218
8219 c = TREE_REAL_CST (arg11);
8220 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
8221 arg = build_real (type, c);
8222 arglist = build_tree_list (NULL_TREE, arg);
8223 arglist = tree_cons (NULL_TREE, arg0, arglist);
8224 return build_function_call_expr (powfn, arglist);
8225 }
8226 }
8227
8228 /* Optimize pow(x,c)*x as pow(x,c+1). */
8229 if (fcode0 == BUILT_IN_POW
8230 || fcode0 == BUILT_IN_POWF
8231 || fcode0 == BUILT_IN_POWL)
8232 {
8233 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8234 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
8235 1)));
8236 if (TREE_CODE (arg01) == REAL_CST
8237 && ! TREE_CONSTANT_OVERFLOW (arg01)
8238 && operand_equal_p (arg1, arg00, 0))
8239 {
8240 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8241 REAL_VALUE_TYPE c;
8242 tree arg, arglist;
8243
8244 c = TREE_REAL_CST (arg01);
8245 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
8246 arg = build_real (type, c);
8247 arglist = build_tree_list (NULL_TREE, arg);
8248 arglist = tree_cons (NULL_TREE, arg1, arglist);
8249 return build_function_call_expr (powfn, arglist);
8250 }
8251 }
8252
8253 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
8254 if (! optimize_size
8255 && operand_equal_p (arg0, arg1, 0))
8256 {
8257 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
8258
8259 if (powfn)
8260 {
8261 tree arg = build_real (type, dconst2);
8262 tree arglist = build_tree_list (NULL_TREE, arg);
8263 arglist = tree_cons (NULL_TREE, arg0, arglist);
8264 return build_function_call_expr (powfn, arglist);
8265 }
8266 }
8267 }
8268 }
8269 goto associate;
8270
8271 case BIT_IOR_EXPR:
8272 bit_ior:
8273 if (integer_all_onesp (arg1))
8274 return omit_one_operand (type, arg1, arg0);
8275 if (integer_zerop (arg1))
8276 return non_lvalue (fold_convert (type, arg0));
8277 if (operand_equal_p (arg0, arg1, 0))
8278 return non_lvalue (fold_convert (type, arg0));
8279
8280 /* ~X | X is -1. */
8281 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8282 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8283 {
8284 t1 = build_int_cst (type, -1);
8285 t1 = force_fit_type (t1, 0, false, false);
8286 return omit_one_operand (type, t1, arg1);
8287 }
8288
8289 /* X | ~X is -1. */
8290 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8291 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8292 {
8293 t1 = build_int_cst (type, -1);
8294 t1 = force_fit_type (t1, 0, false, false);
8295 return omit_one_operand (type, t1, arg0);
8296 }
8297
8298 t1 = distribute_bit_expr (code, type, arg0, arg1);
8299 if (t1 != NULL_TREE)
8300 return t1;
8301
8302 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
8303
8304 This results in more efficient code for machines without a NAND
8305 instruction. Combine will canonicalize to the first form
8306 which will allow use of NAND instructions provided by the
8307 backend if they exist. */
8308 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8309 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8310 {
8311 return fold_build1 (BIT_NOT_EXPR, type,
8312 build2 (BIT_AND_EXPR, type,
8313 TREE_OPERAND (arg0, 0),
8314 TREE_OPERAND (arg1, 0)));
8315 }
8316
8317 /* See if this can be simplified into a rotate first. If that
8318 is unsuccessful continue in the association code. */
8319 goto bit_rotate;
8320
8321 case BIT_XOR_EXPR:
8322 if (integer_zerop (arg1))
8323 return non_lvalue (fold_convert (type, arg0));
8324 if (integer_all_onesp (arg1))
8325 return fold_build1 (BIT_NOT_EXPR, type, arg0);
8326 if (operand_equal_p (arg0, arg1, 0))
8327 return omit_one_operand (type, integer_zero_node, arg0);
8328
8329 /* ~X ^ X is -1. */
8330 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8331 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8332 {
8333 t1 = build_int_cst (type, -1);
8334 t1 = force_fit_type (t1, 0, false, false);
8335 return omit_one_operand (type, t1, arg1);
8336 }
8337
8338 /* X ^ ~X is -1. */
8339 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8340 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8341 {
8342 t1 = build_int_cst (type, -1);
8343 t1 = force_fit_type (t1, 0, false, false);
8344 return omit_one_operand (type, t1, arg0);
8345 }
8346
8347 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
8348 with a constant, and the two constants have no bits in common,
8349 we should treat this as a BIT_IOR_EXPR since this may produce more
8350 simplifications. */
8351 if (TREE_CODE (arg0) == BIT_AND_EXPR
8352 && TREE_CODE (arg1) == BIT_AND_EXPR
8353 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8354 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8355 && integer_zerop (const_binop (BIT_AND_EXPR,
8356 TREE_OPERAND (arg0, 1),
8357 TREE_OPERAND (arg1, 1), 0)))
8358 {
8359 code = BIT_IOR_EXPR;
8360 goto bit_ior;
8361 }
8362
8363 /* Convert ~X ^ ~Y to X ^ Y. */
8364 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8365 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8366 return fold_build2 (code, type,
8367 fold_convert (type, TREE_OPERAND (arg0, 0)),
8368 fold_convert (type, TREE_OPERAND (arg1, 0)));
8369
8370 /* See if this can be simplified into a rotate first. If that
8371 is unsuccessful continue in the association code. */
8372 goto bit_rotate;
8373
8374 case BIT_AND_EXPR:
8375 if (integer_all_onesp (arg1))
8376 return non_lvalue (fold_convert (type, arg0));
8377 if (integer_zerop (arg1))
8378 return omit_one_operand (type, arg1, arg0);
8379 if (operand_equal_p (arg0, arg1, 0))
8380 return non_lvalue (fold_convert (type, arg0));
8381
8382 /* ~X & X is always zero. */
8383 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8384 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8385 return omit_one_operand (type, integer_zero_node, arg1);
8386
8387 /* X & ~X is always zero. */
8388 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8389 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8390 return omit_one_operand (type, integer_zero_node, arg0);
8391
8392 t1 = distribute_bit_expr (code, type, arg0, arg1);
8393 if (t1 != NULL_TREE)
8394 return t1;
8395 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
8396 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
8397 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
8398 {
8399 unsigned int prec
8400 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
8401
8402 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
8403 && (~TREE_INT_CST_LOW (arg1)
8404 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
8405 return fold_convert (type, TREE_OPERAND (arg0, 0));
8406 }
8407
8408 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
8409
8410 This results in more efficient code for machines without a NOR
8411 instruction. Combine will canonicalize to the first form
8412 which will allow use of NOR instructions provided by the
8413 backend if they exist. */
8414 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8415 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8416 {
8417 return fold_build1 (BIT_NOT_EXPR, type,
8418 build2 (BIT_IOR_EXPR, type,
8419 TREE_OPERAND (arg0, 0),
8420 TREE_OPERAND (arg1, 0)));
8421 }
8422
8423 goto associate;
8424
8425 case RDIV_EXPR:
8426 /* Don't touch a floating-point divide by zero unless the mode
8427 of the constant can represent infinity. */
8428 if (TREE_CODE (arg1) == REAL_CST
8429 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
8430 && real_zerop (arg1))
8431 return NULL_TREE;
8432
8433 /* (-A) / (-B) -> A / B */
8434 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
8435 return fold_build2 (RDIV_EXPR, type,
8436 TREE_OPERAND (arg0, 0),
8437 negate_expr (arg1));
8438 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
8439 return fold_build2 (RDIV_EXPR, type,
8440 negate_expr (arg0),
8441 TREE_OPERAND (arg1, 0));
8442
8443 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
8444 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8445 && real_onep (arg1))
8446 return non_lvalue (fold_convert (type, arg0));
8447
8448 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
8449 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8450 && real_minus_onep (arg1))
8451 return non_lvalue (fold_convert (type, negate_expr (arg0)));
8452
8453 /* If ARG1 is a constant, we can convert this to a multiply by the
8454 reciprocal. This does not have the same rounding properties,
8455 so only do this if -funsafe-math-optimizations. We can actually
8456 always safely do it if ARG1 is a power of two, but it's hard to
8457 tell if it is or not in a portable manner. */
8458 if (TREE_CODE (arg1) == REAL_CST)
8459 {
8460 if (flag_unsafe_math_optimizations
8461 && 0 != (tem = const_binop (code, build_real (type, dconst1),
8462 arg1, 0)))
8463 return fold_build2 (MULT_EXPR, type, arg0, tem);
8464 /* Find the reciprocal if optimizing and the result is exact. */
8465 if (optimize)
8466 {
8467 REAL_VALUE_TYPE r;
8468 r = TREE_REAL_CST (arg1);
8469 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
8470 {
8471 tem = build_real (type, r);
8472 return fold_build2 (MULT_EXPR, type,
8473 fold_convert (type, arg0), tem);
8474 }
8475 }
8476 }
8477 /* Convert A/B/C to A/(B*C). */
8478 if (flag_unsafe_math_optimizations
8479 && TREE_CODE (arg0) == RDIV_EXPR)
8480 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
8481 fold_build2 (MULT_EXPR, type,
8482 TREE_OPERAND (arg0, 1), arg1));
8483
8484 /* Convert A/(B/C) to (A/B)*C. */
8485 if (flag_unsafe_math_optimizations
8486 && TREE_CODE (arg1) == RDIV_EXPR)
8487 return fold_build2 (MULT_EXPR, type,
8488 fold_build2 (RDIV_EXPR, type, arg0,
8489 TREE_OPERAND (arg1, 0)),
8490 TREE_OPERAND (arg1, 1));
8491
8492 /* Convert C1/(X*C2) into (C1/C2)/X. */
8493 if (flag_unsafe_math_optimizations
8494 && TREE_CODE (arg1) == MULT_EXPR
8495 && TREE_CODE (arg0) == REAL_CST
8496 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
8497 {
8498 tree tem = const_binop (RDIV_EXPR, arg0,
8499 TREE_OPERAND (arg1, 1), 0);
8500 if (tem)
8501 return fold_build2 (RDIV_EXPR, type, tem,
8502 TREE_OPERAND (arg1, 0));
8503 }
8504
8505 if (TREE_CODE (type) == COMPLEX_TYPE)
8506 {
8507 tem = fold_complex_div (type, arg0, arg1, code);
8508 if (tem)
8509 return tem;
8510 }
8511
8512 if (flag_unsafe_math_optimizations)
8513 {
8514 enum built_in_function fcode = builtin_mathfn_code (arg1);
8515 /* Optimize x/expN(y) into x*expN(-y). */
8516 if (BUILTIN_EXPONENT_P (fcode))
8517 {
8518 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
8519 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
8520 tree arglist = build_tree_list (NULL_TREE,
8521 fold_convert (type, arg));
8522 arg1 = build_function_call_expr (expfn, arglist);
8523 return fold_build2 (MULT_EXPR, type, arg0, arg1);
8524 }
8525
8526 /* Optimize x/pow(y,z) into x*pow(y,-z). */
8527 if (fcode == BUILT_IN_POW
8528 || fcode == BUILT_IN_POWF
8529 || fcode == BUILT_IN_POWL)
8530 {
8531 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
8532 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
8533 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
8534 tree neg11 = fold_convert (type, negate_expr (arg11));
8535 tree arglist = tree_cons(NULL_TREE, arg10,
8536 build_tree_list (NULL_TREE, neg11));
8537 arg1 = build_function_call_expr (powfn, arglist);
8538 return fold_build2 (MULT_EXPR, type, arg0, arg1);
8539 }
8540 }
8541
8542 if (flag_unsafe_math_optimizations)
8543 {
8544 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
8545 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
8546
8547 /* Optimize sin(x)/cos(x) as tan(x). */
8548 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
8549 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
8550 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
8551 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
8552 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
8553 {
8554 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
8555
8556 if (tanfn != NULL_TREE)
8557 return build_function_call_expr (tanfn,
8558 TREE_OPERAND (arg0, 1));
8559 }
8560
8561 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
8562 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
8563 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
8564 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
8565 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
8566 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
8567 {
8568 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
8569
8570 if (tanfn != NULL_TREE)
8571 {
8572 tree tmp = TREE_OPERAND (arg0, 1);
8573 tmp = build_function_call_expr (tanfn, tmp);
8574 return fold_build2 (RDIV_EXPR, type,
8575 build_real (type, dconst1), tmp);
8576 }
8577 }
8578
8579 /* Optimize pow(x,c)/x as pow(x,c-1). */
8580 if (fcode0 == BUILT_IN_POW
8581 || fcode0 == BUILT_IN_POWF
8582 || fcode0 == BUILT_IN_POWL)
8583 {
8584 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8585 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
8586 if (TREE_CODE (arg01) == REAL_CST
8587 && ! TREE_CONSTANT_OVERFLOW (arg01)
8588 && operand_equal_p (arg1, arg00, 0))
8589 {
8590 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8591 REAL_VALUE_TYPE c;
8592 tree arg, arglist;
8593
8594 c = TREE_REAL_CST (arg01);
8595 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
8596 arg = build_real (type, c);
8597 arglist = build_tree_list (NULL_TREE, arg);
8598 arglist = tree_cons (NULL_TREE, arg1, arglist);
8599 return build_function_call_expr (powfn, arglist);
8600 }
8601 }
8602 }
8603 goto binary;
8604
8605 case TRUNC_DIV_EXPR:
8606 case ROUND_DIV_EXPR:
8607 case FLOOR_DIV_EXPR:
8608 case CEIL_DIV_EXPR:
8609 case EXACT_DIV_EXPR:
8610 if (integer_onep (arg1))
8611 return non_lvalue (fold_convert (type, arg0));
8612 if (integer_zerop (arg1))
8613 return NULL_TREE;
8614 /* X / -1 is -X. */
8615 if (!TYPE_UNSIGNED (type)
8616 && TREE_CODE (arg1) == INTEGER_CST
8617 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
8618 && TREE_INT_CST_HIGH (arg1) == -1)
8619 return fold_convert (type, negate_expr (arg0));
8620
8621 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
8622 operation, EXACT_DIV_EXPR.
8623
8624 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
8625 At one time others generated faster code, it's not clear if they do
8626 after the last round to changes to the DIV code in expmed.c. */
8627 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
8628 && multiple_of_p (type, arg0, arg1))
8629 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
8630
8631 if (TREE_CODE (arg1) == INTEGER_CST
8632 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
8633 return fold_convert (type, tem);
8634
8635 if (TREE_CODE (type) == COMPLEX_TYPE)
8636 {
8637 tem = fold_complex_div (type, arg0, arg1, code);
8638 if (tem)
8639 return tem;
8640 }
8641 goto binary;
8642
8643 case CEIL_MOD_EXPR:
8644 case FLOOR_MOD_EXPR:
8645 case ROUND_MOD_EXPR:
8646 case TRUNC_MOD_EXPR:
8647 /* X % 1 is always zero, but be sure to preserve any side
8648 effects in X. */
8649 if (integer_onep (arg1))
8650 return omit_one_operand (type, integer_zero_node, arg0);
8651
8652 /* X % 0, return X % 0 unchanged so that we can get the
8653 proper warnings and errors. */
8654 if (integer_zerop (arg1))
8655 return NULL_TREE;
8656
8657 /* 0 % X is always zero, but be sure to preserve any side
8658 effects in X. Place this after checking for X == 0. */
8659 if (integer_zerop (arg0))
8660 return omit_one_operand (type, integer_zero_node, arg1);
8661
8662 /* X % -1 is zero. */
8663 if (!TYPE_UNSIGNED (type)
8664 && TREE_CODE (arg1) == INTEGER_CST
8665 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
8666 && TREE_INT_CST_HIGH (arg1) == -1)
8667 return omit_one_operand (type, integer_zero_node, arg0);
8668
8669 /* Optimize unsigned TRUNC_MOD_EXPR by a power of two into a
8670 BIT_AND_EXPR, i.e. "X % C" into "X & C2". */
8671 if (code == TRUNC_MOD_EXPR
8672 && TYPE_UNSIGNED (type)
8673 && integer_pow2p (arg1))
8674 {
8675 unsigned HOST_WIDE_INT high, low;
8676 tree mask;
8677 int l;
8678
8679 l = tree_log2 (arg1);
8680 if (l >= HOST_BITS_PER_WIDE_INT)
8681 {
8682 high = ((unsigned HOST_WIDE_INT) 1
8683 << (l - HOST_BITS_PER_WIDE_INT)) - 1;
8684 low = -1;
8685 }
8686 else
8687 {
8688 high = 0;
8689 low = ((unsigned HOST_WIDE_INT) 1 << l) - 1;
8690 }
8691
8692 mask = build_int_cst_wide (type, low, high);
8693 return fold_build2 (BIT_AND_EXPR, type,
8694 fold_convert (type, arg0), mask);
8695 }
8696
8697 /* X % -C is the same as X % C. */
8698 if (code == TRUNC_MOD_EXPR
8699 && !TYPE_UNSIGNED (type)
8700 && TREE_CODE (arg1) == INTEGER_CST
8701 && !TREE_CONSTANT_OVERFLOW (arg1)
8702 && TREE_INT_CST_HIGH (arg1) < 0
8703 && !flag_trapv
8704 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
8705 && !sign_bit_p (arg1, arg1))
8706 return fold_build2 (code, type, fold_convert (type, arg0),
8707 fold_convert (type, negate_expr (arg1)));
8708
8709 /* X % -Y is the same as X % Y. */
8710 if (code == TRUNC_MOD_EXPR
8711 && !TYPE_UNSIGNED (type)
8712 && TREE_CODE (arg1) == NEGATE_EXPR
8713 && !flag_trapv)
8714 return fold_build2 (code, type, fold_convert (type, arg0),
8715 fold_convert (type, TREE_OPERAND (arg1, 0)));
8716
8717 if (TREE_CODE (arg1) == INTEGER_CST
8718 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
8719 return fold_convert (type, tem);
8720
8721 goto binary;
8722
8723 case LROTATE_EXPR:
8724 case RROTATE_EXPR:
8725 if (integer_all_onesp (arg0))
8726 return omit_one_operand (type, arg0, arg1);
8727 goto shift;
8728
8729 case RSHIFT_EXPR:
8730 /* Optimize -1 >> x for arithmetic right shifts. */
8731 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
8732 return omit_one_operand (type, arg0, arg1);
8733 /* ... fall through ... */
8734
8735 case LSHIFT_EXPR:
8736 shift:
8737 if (integer_zerop (arg1))
8738 return non_lvalue (fold_convert (type, arg0));
8739 if (integer_zerop (arg0))
8740 return omit_one_operand (type, arg0, arg1);
8741
8742 /* Since negative shift count is not well-defined,
8743 don't try to compute it in the compiler. */
8744 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
8745 return NULL_TREE;
8746 /* Rewrite an LROTATE_EXPR by a constant into an
8747 RROTATE_EXPR by a new constant. */
8748 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
8749 {
8750 tree tem = build_int_cst (NULL_TREE,
8751 GET_MODE_BITSIZE (TYPE_MODE (type)));
8752 tem = fold_convert (TREE_TYPE (arg1), tem);
8753 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
8754 return fold_build2 (RROTATE_EXPR, type, arg0, tem);
8755 }
8756
8757 /* If we have a rotate of a bit operation with the rotate count and
8758 the second operand of the bit operation both constant,
8759 permute the two operations. */
8760 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8761 && (TREE_CODE (arg0) == BIT_AND_EXPR
8762 || TREE_CODE (arg0) == BIT_IOR_EXPR
8763 || TREE_CODE (arg0) == BIT_XOR_EXPR)
8764 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8765 return fold_build2 (TREE_CODE (arg0), type,
8766 fold_build2 (code, type,
8767 TREE_OPERAND (arg0, 0), arg1),
8768 fold_build2 (code, type,
8769 TREE_OPERAND (arg0, 1), arg1));
8770
8771 /* Two consecutive rotates adding up to the width of the mode can
8772 be ignored. */
8773 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8774 && TREE_CODE (arg0) == RROTATE_EXPR
8775 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8776 && TREE_INT_CST_HIGH (arg1) == 0
8777 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
8778 && ((TREE_INT_CST_LOW (arg1)
8779 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
8780 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
8781 return TREE_OPERAND (arg0, 0);
8782
8783 goto binary;
8784
8785 case MIN_EXPR:
8786 if (operand_equal_p (arg0, arg1, 0))
8787 return omit_one_operand (type, arg0, arg1);
8788 if (INTEGRAL_TYPE_P (type)
8789 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
8790 return omit_one_operand (type, arg1, arg0);
8791 goto associate;
8792
8793 case MAX_EXPR:
8794 if (operand_equal_p (arg0, arg1, 0))
8795 return omit_one_operand (type, arg0, arg1);
8796 if (INTEGRAL_TYPE_P (type)
8797 && TYPE_MAX_VALUE (type)
8798 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
8799 return omit_one_operand (type, arg1, arg0);
8800 goto associate;
8801
8802 case TRUTH_ANDIF_EXPR:
8803 /* Note that the operands of this must be ints
8804 and their values must be 0 or 1.
8805 ("true" is a fixed value perhaps depending on the language.) */
8806 /* If first arg is constant zero, return it. */
8807 if (integer_zerop (arg0))
8808 return fold_convert (type, arg0);
8809 case TRUTH_AND_EXPR:
8810 /* If either arg is constant true, drop it. */
8811 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8812 return non_lvalue (fold_convert (type, arg1));
8813 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
8814 /* Preserve sequence points. */
8815 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
8816 return non_lvalue (fold_convert (type, arg0));
8817 /* If second arg is constant zero, result is zero, but first arg
8818 must be evaluated. */
8819 if (integer_zerop (arg1))
8820 return omit_one_operand (type, arg1, arg0);
8821 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
8822 case will be handled here. */
8823 if (integer_zerop (arg0))
8824 return omit_one_operand (type, arg0, arg1);
8825
8826 /* !X && X is always false. */
8827 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8828 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8829 return omit_one_operand (type, integer_zero_node, arg1);
8830 /* X && !X is always false. */
8831 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8832 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8833 return omit_one_operand (type, integer_zero_node, arg0);
8834
8835 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
8836 means A >= Y && A != MAX, but in this case we know that
8837 A < X <= MAX. */
8838
8839 if (!TREE_SIDE_EFFECTS (arg0)
8840 && !TREE_SIDE_EFFECTS (arg1))
8841 {
8842 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
8843 if (tem)
8844 return fold_build2 (code, type, tem, arg1);
8845
8846 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
8847 if (tem)
8848 return fold_build2 (code, type, arg0, tem);
8849 }
8850
8851 truth_andor:
8852 /* We only do these simplifications if we are optimizing. */
8853 if (!optimize)
8854 return NULL_TREE;
8855
8856 /* Check for things like (A || B) && (A || C). We can convert this
8857 to A || (B && C). Note that either operator can be any of the four
8858 truth and/or operations and the transformation will still be
8859 valid. Also note that we only care about order for the
8860 ANDIF and ORIF operators. If B contains side effects, this
8861 might change the truth-value of A. */
8862 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8863 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8864 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8865 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8866 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8867 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8868 {
8869 tree a00 = TREE_OPERAND (arg0, 0);
8870 tree a01 = TREE_OPERAND (arg0, 1);
8871 tree a10 = TREE_OPERAND (arg1, 0);
8872 tree a11 = TREE_OPERAND (arg1, 1);
8873 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8874 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8875 && (code == TRUTH_AND_EXPR
8876 || code == TRUTH_OR_EXPR));
8877
8878 if (operand_equal_p (a00, a10, 0))
8879 return fold_build2 (TREE_CODE (arg0), type, a00,
8880 fold_build2 (code, type, a01, a11));
8881 else if (commutative && operand_equal_p (a00, a11, 0))
8882 return fold_build2 (TREE_CODE (arg0), type, a00,
8883 fold_build2 (code, type, a01, a10));
8884 else if (commutative && operand_equal_p (a01, a10, 0))
8885 return fold_build2 (TREE_CODE (arg0), type, a01,
8886 fold_build2 (code, type, a00, a11));
8887
8888 /* This case if tricky because we must either have commutative
8889 operators or else A10 must not have side-effects. */
8890
8891 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8892 && operand_equal_p (a01, a11, 0))
8893 return fold_build2 (TREE_CODE (arg0), type,
8894 fold_build2 (code, type, a00, a10),
8895 a01);
8896 }
8897
8898 /* See if we can build a range comparison. */
8899 if (0 != (tem = fold_range_test (code, type, op0, op1)))
8900 return tem;
8901
8902 /* Check for the possibility of merging component references. If our
8903 lhs is another similar operation, try to merge its rhs with our
8904 rhs. Then try to merge our lhs and rhs. */
8905 if (TREE_CODE (arg0) == code
8906 && 0 != (tem = fold_truthop (code, type,
8907 TREE_OPERAND (arg0, 1), arg1)))
8908 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
8909
8910 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
8911 return tem;
8912
8913 return NULL_TREE;
8914
8915 case TRUTH_ORIF_EXPR:
8916 /* Note that the operands of this must be ints
8917 and their values must be 0 or true.
8918 ("true" is a fixed value perhaps depending on the language.) */
8919 /* If first arg is constant true, return it. */
8920 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8921 return fold_convert (type, arg0);
8922 case TRUTH_OR_EXPR:
8923 /* If either arg is constant zero, drop it. */
8924 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
8925 return non_lvalue (fold_convert (type, arg1));
8926 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
8927 /* Preserve sequence points. */
8928 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
8929 return non_lvalue (fold_convert (type, arg0));
8930 /* If second arg is constant true, result is true, but we must
8931 evaluate first arg. */
8932 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
8933 return omit_one_operand (type, arg1, arg0);
8934 /* Likewise for first arg, but note this only occurs here for
8935 TRUTH_OR_EXPR. */
8936 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8937 return omit_one_operand (type, arg0, arg1);
8938
8939 /* !X || X is always true. */
8940 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8941 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8942 return omit_one_operand (type, integer_one_node, arg1);
8943 /* X || !X is always true. */
8944 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8945 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8946 return omit_one_operand (type, integer_one_node, arg0);
8947
8948 goto truth_andor;
8949
8950 case TRUTH_XOR_EXPR:
8951 /* If the second arg is constant zero, drop it. */
8952 if (integer_zerop (arg1))
8953 return non_lvalue (fold_convert (type, arg0));
8954 /* If the second arg is constant true, this is a logical inversion. */
8955 if (integer_onep (arg1))
8956 {
8957 /* Only call invert_truthvalue if operand is a truth value. */
8958 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8959 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
8960 else
8961 tem = invert_truthvalue (arg0);
8962 return non_lvalue (fold_convert (type, tem));
8963 }
8964 /* Identical arguments cancel to zero. */
8965 if (operand_equal_p (arg0, arg1, 0))
8966 return omit_one_operand (type, integer_zero_node, arg0);
8967
8968 /* !X ^ X is always true. */
8969 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8970 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8971 return omit_one_operand (type, integer_one_node, arg1);
8972
8973 /* X ^ !X is always true. */
8974 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8975 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8976 return omit_one_operand (type, integer_one_node, arg0);
8977
8978 return NULL_TREE;
8979
8980 case EQ_EXPR:
8981 case NE_EXPR:
8982 case LT_EXPR:
8983 case GT_EXPR:
8984 case LE_EXPR:
8985 case GE_EXPR:
8986 /* If one arg is a real or integer constant, put it last. */
8987 if (tree_swap_operands_p (arg0, arg1, true))
8988 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
8989
8990 /* bool_var != 0 becomes bool_var. */
8991 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
8992 && code == NE_EXPR)
8993 return non_lvalue (fold_convert (type, arg0));
8994
8995 /* bool_var == 1 becomes bool_var. */
8996 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
8997 && code == EQ_EXPR)
8998 return non_lvalue (fold_convert (type, arg0));
8999
9000 /* If this is an equality comparison of the address of a non-weak
9001 object against zero, then we know the result. */
9002 if ((code == EQ_EXPR || code == NE_EXPR)
9003 && TREE_CODE (arg0) == ADDR_EXPR
9004 && DECL_P (TREE_OPERAND (arg0, 0))
9005 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
9006 && integer_zerop (arg1))
9007 return constant_boolean_node (code != EQ_EXPR, type);
9008
9009 /* If this is an equality comparison of the address of two non-weak,
9010 unaliased symbols neither of which are extern (since we do not
9011 have access to attributes for externs), then we know the result. */
9012 if ((code == EQ_EXPR || code == NE_EXPR)
9013 && TREE_CODE (arg0) == ADDR_EXPR
9014 && DECL_P (TREE_OPERAND (arg0, 0))
9015 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
9016 && ! lookup_attribute ("alias",
9017 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
9018 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
9019 && TREE_CODE (arg1) == ADDR_EXPR
9020 && DECL_P (TREE_OPERAND (arg1, 0))
9021 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
9022 && ! lookup_attribute ("alias",
9023 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
9024 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
9025 return constant_boolean_node (operand_equal_p (arg0, arg1, 0)
9026 ? code == EQ_EXPR : code != EQ_EXPR,
9027 type);
9028
9029 /* If this is a comparison of two exprs that look like an
9030 ARRAY_REF of the same object, then we can fold this to a
9031 comparison of the two offsets. */
9032 if (TREE_CODE_CLASS (code) == tcc_comparison)
9033 {
9034 tree base0, offset0, base1, offset1;
9035
9036 if (extract_array_ref (arg0, &base0, &offset0)
9037 && extract_array_ref (arg1, &base1, &offset1)
9038 && operand_equal_p (base0, base1, 0))
9039 {
9040 if (TYPE_SIZE (TREE_TYPE (TREE_TYPE (base0)))
9041 && integer_zerop (TYPE_SIZE (TREE_TYPE (TREE_TYPE (base0)))))
9042 offset0 = NULL_TREE;
9043 if (TYPE_SIZE (TREE_TYPE (TREE_TYPE (base1)))
9044 && integer_zerop (TYPE_SIZE (TREE_TYPE (TREE_TYPE (base1)))))
9045 offset1 = NULL_TREE;
9046 if (offset0 == NULL_TREE
9047 && offset1 == NULL_TREE)
9048 {
9049 offset0 = integer_zero_node;
9050 offset1 = integer_zero_node;
9051 }
9052 else if (offset0 == NULL_TREE)
9053 offset0 = build_int_cst (TREE_TYPE (offset1), 0);
9054 else if (offset1 == NULL_TREE)
9055 offset1 = build_int_cst (TREE_TYPE (offset0), 0);
9056
9057 if (TREE_TYPE (offset0) == TREE_TYPE (offset1))
9058 return fold_build2 (code, type, offset0, offset1);
9059 }
9060 }
9061
9062 /* Transform comparisons of the form X +- C CMP X. */
9063 if ((code != EQ_EXPR && code != NE_EXPR)
9064 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9065 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9066 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9067 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
9068 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9069 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
9070 && !(flag_wrapv || flag_trapv))))
9071 {
9072 tree arg01 = TREE_OPERAND (arg0, 1);
9073 enum tree_code code0 = TREE_CODE (arg0);
9074 int is_positive;
9075
9076 if (TREE_CODE (arg01) == REAL_CST)
9077 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
9078 else
9079 is_positive = tree_int_cst_sgn (arg01);
9080
9081 /* (X - c) > X becomes false. */
9082 if (code == GT_EXPR
9083 && ((code0 == MINUS_EXPR && is_positive >= 0)
9084 || (code0 == PLUS_EXPR && is_positive <= 0)))
9085 return constant_boolean_node (0, type);
9086
9087 /* Likewise (X + c) < X becomes false. */
9088 if (code == LT_EXPR
9089 && ((code0 == PLUS_EXPR && is_positive >= 0)
9090 || (code0 == MINUS_EXPR && is_positive <= 0)))
9091 return constant_boolean_node (0, type);
9092
9093 /* Convert (X - c) <= X to true. */
9094 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
9095 && code == LE_EXPR
9096 && ((code0 == MINUS_EXPR && is_positive >= 0)
9097 || (code0 == PLUS_EXPR && is_positive <= 0)))
9098 return constant_boolean_node (1, type);
9099
9100 /* Convert (X + c) >= X to true. */
9101 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
9102 && code == GE_EXPR
9103 && ((code0 == PLUS_EXPR && is_positive >= 0)
9104 || (code0 == MINUS_EXPR && is_positive <= 0)))
9105 return constant_boolean_node (1, type);
9106
9107 if (TREE_CODE (arg01) == INTEGER_CST)
9108 {
9109 /* Convert X + c > X and X - c < X to true for integers. */
9110 if (code == GT_EXPR
9111 && ((code0 == PLUS_EXPR && is_positive > 0)
9112 || (code0 == MINUS_EXPR && is_positive < 0)))
9113 return constant_boolean_node (1, type);
9114
9115 if (code == LT_EXPR
9116 && ((code0 == MINUS_EXPR && is_positive > 0)
9117 || (code0 == PLUS_EXPR && is_positive < 0)))
9118 return constant_boolean_node (1, type);
9119
9120 /* Convert X + c <= X and X - c >= X to false for integers. */
9121 if (code == LE_EXPR
9122 && ((code0 == PLUS_EXPR && is_positive > 0)
9123 || (code0 == MINUS_EXPR && is_positive < 0)))
9124 return constant_boolean_node (0, type);
9125
9126 if (code == GE_EXPR
9127 && ((code0 == MINUS_EXPR && is_positive > 0)
9128 || (code0 == PLUS_EXPR && is_positive < 0)))
9129 return constant_boolean_node (0, type);
9130 }
9131 }
9132
9133 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9134 {
9135 tree targ0 = strip_float_extensions (arg0);
9136 tree targ1 = strip_float_extensions (arg1);
9137 tree newtype = TREE_TYPE (targ0);
9138
9139 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9140 newtype = TREE_TYPE (targ1);
9141
9142 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9143 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9144 return fold_build2 (code, type, fold_convert (newtype, targ0),
9145 fold_convert (newtype, targ1));
9146
9147 /* (-a) CMP (-b) -> b CMP a */
9148 if (TREE_CODE (arg0) == NEGATE_EXPR
9149 && TREE_CODE (arg1) == NEGATE_EXPR)
9150 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
9151 TREE_OPERAND (arg0, 0));
9152
9153 if (TREE_CODE (arg1) == REAL_CST)
9154 {
9155 REAL_VALUE_TYPE cst;
9156 cst = TREE_REAL_CST (arg1);
9157
9158 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9159 if (TREE_CODE (arg0) == NEGATE_EXPR)
9160 return
9161 fold_build2 (swap_tree_comparison (code), type,
9162 TREE_OPERAND (arg0, 0),
9163 build_real (TREE_TYPE (arg1),
9164 REAL_VALUE_NEGATE (cst)));
9165
9166 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9167 /* a CMP (-0) -> a CMP 0 */
9168 if (REAL_VALUE_MINUS_ZERO (cst))
9169 return fold_build2 (code, type, arg0,
9170 build_real (TREE_TYPE (arg1), dconst0));
9171
9172 /* x != NaN is always true, other ops are always false. */
9173 if (REAL_VALUE_ISNAN (cst)
9174 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9175 {
9176 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9177 return omit_one_operand (type, tem, arg0);
9178 }
9179
9180 /* Fold comparisons against infinity. */
9181 if (REAL_VALUE_ISINF (cst))
9182 {
9183 tem = fold_inf_compare (code, type, arg0, arg1);
9184 if (tem != NULL_TREE)
9185 return tem;
9186 }
9187 }
9188
9189 /* If this is a comparison of a real constant with a PLUS_EXPR
9190 or a MINUS_EXPR of a real constant, we can convert it into a
9191 comparison with a revised real constant as long as no overflow
9192 occurs when unsafe_math_optimizations are enabled. */
9193 if (flag_unsafe_math_optimizations
9194 && TREE_CODE (arg1) == REAL_CST
9195 && (TREE_CODE (arg0) == PLUS_EXPR
9196 || TREE_CODE (arg0) == MINUS_EXPR)
9197 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9198 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9199 ? MINUS_EXPR : PLUS_EXPR,
9200 arg1, TREE_OPERAND (arg0, 1), 0))
9201 && ! TREE_CONSTANT_OVERFLOW (tem))
9202 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9203
9204 /* Likewise, we can simplify a comparison of a real constant with
9205 a MINUS_EXPR whose first operand is also a real constant, i.e.
9206 (c1 - x) < c2 becomes x > c1-c2. */
9207 if (flag_unsafe_math_optimizations
9208 && TREE_CODE (arg1) == REAL_CST
9209 && TREE_CODE (arg0) == MINUS_EXPR
9210 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9211 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9212 arg1, 0))
9213 && ! TREE_CONSTANT_OVERFLOW (tem))
9214 return fold_build2 (swap_tree_comparison (code), type,
9215 TREE_OPERAND (arg0, 1), tem);
9216
9217 /* Fold comparisons against built-in math functions. */
9218 if (TREE_CODE (arg1) == REAL_CST
9219 && flag_unsafe_math_optimizations
9220 && ! flag_errno_math)
9221 {
9222 enum built_in_function fcode = builtin_mathfn_code (arg0);
9223
9224 if (fcode != END_BUILTINS)
9225 {
9226 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
9227 if (tem != NULL_TREE)
9228 return tem;
9229 }
9230 }
9231 }
9232
9233 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
9234 if (TREE_CONSTANT (arg1)
9235 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
9236 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
9237 /* This optimization is invalid for ordered comparisons
9238 if CONST+INCR overflows or if foo+incr might overflow.
9239 This optimization is invalid for floating point due to rounding.
9240 For pointer types we assume overflow doesn't happen. */
9241 && (POINTER_TYPE_P (TREE_TYPE (arg0))
9242 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9243 && (code == EQ_EXPR || code == NE_EXPR))))
9244 {
9245 tree varop, newconst;
9246
9247 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
9248 {
9249 newconst = fold_build2 (PLUS_EXPR, TREE_TYPE (arg0),
9250 arg1, TREE_OPERAND (arg0, 1));
9251 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
9252 TREE_OPERAND (arg0, 0),
9253 TREE_OPERAND (arg0, 1));
9254 }
9255 else
9256 {
9257 newconst = fold_build2 (MINUS_EXPR, TREE_TYPE (arg0),
9258 arg1, TREE_OPERAND (arg0, 1));
9259 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
9260 TREE_OPERAND (arg0, 0),
9261 TREE_OPERAND (arg0, 1));
9262 }
9263
9264
9265 /* If VAROP is a reference to a bitfield, we must mask
9266 the constant by the width of the field. */
9267 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
9268 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
9269 && host_integerp (DECL_SIZE (TREE_OPERAND
9270 (TREE_OPERAND (varop, 0), 1)), 1))
9271 {
9272 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
9273 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
9274 tree folded_compare, shift;
9275
9276 /* First check whether the comparison would come out
9277 always the same. If we don't do that we would
9278 change the meaning with the masking. */
9279 folded_compare = fold_build2 (code, type,
9280 TREE_OPERAND (varop, 0), arg1);
9281 if (integer_zerop (folded_compare)
9282 || integer_onep (folded_compare))
9283 return omit_one_operand (type, folded_compare, varop);
9284
9285 shift = build_int_cst (NULL_TREE,
9286 TYPE_PRECISION (TREE_TYPE (varop)) - size);
9287 shift = fold_convert (TREE_TYPE (varop), shift);
9288 newconst = fold_build2 (LSHIFT_EXPR, TREE_TYPE (varop),
9289 newconst, shift);
9290 newconst = fold_build2 (RSHIFT_EXPR, TREE_TYPE (varop),
9291 newconst, shift);
9292 }
9293
9294 return fold_build2 (code, type, varop, newconst);
9295 }
9296
9297 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
9298 This transformation affects the cases which are handled in later
9299 optimizations involving comparisons with non-negative constants. */
9300 if (TREE_CODE (arg1) == INTEGER_CST
9301 && TREE_CODE (arg0) != INTEGER_CST
9302 && tree_int_cst_sgn (arg1) > 0)
9303 {
9304 switch (code)
9305 {
9306 case GE_EXPR:
9307 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
9308 return fold_build2 (GT_EXPR, type, arg0, arg1);
9309
9310 case LT_EXPR:
9311 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
9312 return fold_build2 (LE_EXPR, type, arg0, arg1);
9313
9314 default:
9315 break;
9316 }
9317 }
9318
9319 /* Comparisons with the highest or lowest possible integer of
9320 the specified size will have known values. */
9321 {
9322 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
9323
9324 if (TREE_CODE (arg1) == INTEGER_CST
9325 && ! TREE_CONSTANT_OVERFLOW (arg1)
9326 && width <= 2 * HOST_BITS_PER_WIDE_INT
9327 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9328 || POINTER_TYPE_P (TREE_TYPE (arg1))))
9329 {
9330 HOST_WIDE_INT signed_max_hi;
9331 unsigned HOST_WIDE_INT signed_max_lo;
9332 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
9333
9334 if (width <= HOST_BITS_PER_WIDE_INT)
9335 {
9336 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
9337 - 1;
9338 signed_max_hi = 0;
9339 max_hi = 0;
9340
9341 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
9342 {
9343 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
9344 min_lo = 0;
9345 min_hi = 0;
9346 }
9347 else
9348 {
9349 max_lo = signed_max_lo;
9350 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
9351 min_hi = -1;
9352 }
9353 }
9354 else
9355 {
9356 width -= HOST_BITS_PER_WIDE_INT;
9357 signed_max_lo = -1;
9358 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
9359 - 1;
9360 max_lo = -1;
9361 min_lo = 0;
9362
9363 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
9364 {
9365 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
9366 min_hi = 0;
9367 }
9368 else
9369 {
9370 max_hi = signed_max_hi;
9371 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
9372 }
9373 }
9374
9375 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
9376 && TREE_INT_CST_LOW (arg1) == max_lo)
9377 switch (code)
9378 {
9379 case GT_EXPR:
9380 return omit_one_operand (type, integer_zero_node, arg0);
9381
9382 case GE_EXPR:
9383 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9384
9385 case LE_EXPR:
9386 return omit_one_operand (type, integer_one_node, arg0);
9387
9388 case LT_EXPR:
9389 return fold_build2 (NE_EXPR, type, arg0, arg1);
9390
9391 /* The GE_EXPR and LT_EXPR cases above are not normally
9392 reached because of previous transformations. */
9393
9394 default:
9395 break;
9396 }
9397 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9398 == max_hi
9399 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
9400 switch (code)
9401 {
9402 case GT_EXPR:
9403 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
9404 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9405 case LE_EXPR:
9406 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
9407 return fold_build2 (NE_EXPR, type, arg0, arg1);
9408 default:
9409 break;
9410 }
9411 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9412 == min_hi
9413 && TREE_INT_CST_LOW (arg1) == min_lo)
9414 switch (code)
9415 {
9416 case LT_EXPR:
9417 return omit_one_operand (type, integer_zero_node, arg0);
9418
9419 case LE_EXPR:
9420 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9421
9422 case GE_EXPR:
9423 return omit_one_operand (type, integer_one_node, arg0);
9424
9425 case GT_EXPR:
9426 return fold_build2 (NE_EXPR, type, arg0, arg1);
9427
9428 default:
9429 break;
9430 }
9431 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9432 == min_hi
9433 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
9434 switch (code)
9435 {
9436 case GE_EXPR:
9437 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
9438 return fold_build2 (NE_EXPR, type, arg0, arg1);
9439 case LT_EXPR:
9440 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
9441 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9442 default:
9443 break;
9444 }
9445
9446 else if (!in_gimple_form
9447 && TREE_INT_CST_HIGH (arg1) == signed_max_hi
9448 && TREE_INT_CST_LOW (arg1) == signed_max_lo
9449 && TYPE_UNSIGNED (TREE_TYPE (arg1))
9450 /* signed_type does not work on pointer types. */
9451 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
9452 {
9453 /* The following case also applies to X < signed_max+1
9454 and X >= signed_max+1 because previous transformations. */
9455 if (code == LE_EXPR || code == GT_EXPR)
9456 {
9457 tree st0, st1;
9458 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
9459 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
9460 return fold
9461 (build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
9462 type, fold_convert (st0, arg0),
9463 fold_convert (st1, integer_zero_node)));
9464 }
9465 }
9466 }
9467 }
9468
9469 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
9470 a MINUS_EXPR of a constant, we can convert it into a comparison with
9471 a revised constant as long as no overflow occurs. */
9472 if ((code == EQ_EXPR || code == NE_EXPR)
9473 && TREE_CODE (arg1) == INTEGER_CST
9474 && (TREE_CODE (arg0) == PLUS_EXPR
9475 || TREE_CODE (arg0) == MINUS_EXPR)
9476 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9477 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9478 ? MINUS_EXPR : PLUS_EXPR,
9479 arg1, TREE_OPERAND (arg0, 1), 0))
9480 && ! TREE_CONSTANT_OVERFLOW (tem))
9481 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9482
9483 /* Similarly for a NEGATE_EXPR. */
9484 else if ((code == EQ_EXPR || code == NE_EXPR)
9485 && TREE_CODE (arg0) == NEGATE_EXPR
9486 && TREE_CODE (arg1) == INTEGER_CST
9487 && 0 != (tem = negate_expr (arg1))
9488 && TREE_CODE (tem) == INTEGER_CST
9489 && ! TREE_CONSTANT_OVERFLOW (tem))
9490 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9491
9492 /* If we have X - Y == 0, we can convert that to X == Y and similarly
9493 for !=. Don't do this for ordered comparisons due to overflow. */
9494 else if ((code == NE_EXPR || code == EQ_EXPR)
9495 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
9496 return fold_build2 (code, type,
9497 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
9498
9499 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9500 && (TREE_CODE (arg0) == NOP_EXPR
9501 || TREE_CODE (arg0) == CONVERT_EXPR))
9502 {
9503 /* If we are widening one operand of an integer comparison,
9504 see if the other operand is similarly being widened. Perhaps we
9505 can do the comparison in the narrower type. */
9506 tem = fold_widened_comparison (code, type, arg0, arg1);
9507 if (tem)
9508 return tem;
9509
9510 /* Or if we are changing signedness. */
9511 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
9512 if (tem)
9513 return tem;
9514 }
9515
9516 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9517 constant, we can simplify it. */
9518 else if (TREE_CODE (arg1) == INTEGER_CST
9519 && (TREE_CODE (arg0) == MIN_EXPR
9520 || TREE_CODE (arg0) == MAX_EXPR)
9521 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9522 {
9523 tem = optimize_minmax_comparison (code, type, op0, op1);
9524 if (tem)
9525 return tem;
9526
9527 return NULL_TREE;
9528 }
9529
9530 /* If we are comparing an ABS_EXPR with a constant, we can
9531 convert all the cases into explicit comparisons, but they may
9532 well not be faster than doing the ABS and one comparison.
9533 But ABS (X) <= C is a range comparison, which becomes a subtraction
9534 and a comparison, and is probably faster. */
9535 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
9536 && TREE_CODE (arg0) == ABS_EXPR
9537 && ! TREE_SIDE_EFFECTS (arg0)
9538 && (0 != (tem = negate_expr (arg1)))
9539 && TREE_CODE (tem) == INTEGER_CST
9540 && ! TREE_CONSTANT_OVERFLOW (tem))
9541 return fold_build2 (TRUTH_ANDIF_EXPR, type,
9542 build2 (GE_EXPR, type,
9543 TREE_OPERAND (arg0, 0), tem),
9544 build2 (LE_EXPR, type,
9545 TREE_OPERAND (arg0, 0), arg1));
9546
9547 /* Convert ABS_EXPR<x> >= 0 to true. */
9548 else if (code == GE_EXPR
9549 && tree_expr_nonnegative_p (arg0)
9550 && (integer_zerop (arg1)
9551 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9552 && real_zerop (arg1))))
9553 return omit_one_operand (type, integer_one_node, arg0);
9554
9555 /* Convert ABS_EXPR<x> < 0 to false. */
9556 else if (code == LT_EXPR
9557 && tree_expr_nonnegative_p (arg0)
9558 && (integer_zerop (arg1) || real_zerop (arg1)))
9559 return omit_one_operand (type, integer_zero_node, arg0);
9560
9561 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
9562 else if ((code == EQ_EXPR || code == NE_EXPR)
9563 && TREE_CODE (arg0) == ABS_EXPR
9564 && (integer_zerop (arg1) || real_zerop (arg1)))
9565 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
9566
9567 /* If this is an EQ or NE comparison with zero and ARG0 is
9568 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
9569 two operations, but the latter can be done in one less insn
9570 on machines that have only two-operand insns or on which a
9571 constant cannot be the first operand. */
9572 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
9573 && TREE_CODE (arg0) == BIT_AND_EXPR)
9574 {
9575 tree arg00 = TREE_OPERAND (arg0, 0);
9576 tree arg01 = TREE_OPERAND (arg0, 1);
9577 if (TREE_CODE (arg00) == LSHIFT_EXPR
9578 && integer_onep (TREE_OPERAND (arg00, 0)))
9579 return
9580 fold_build2 (code, type,
9581 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9582 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
9583 arg01, TREE_OPERAND (arg00, 1)),
9584 fold_convert (TREE_TYPE (arg0),
9585 integer_one_node)),
9586 arg1);
9587 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
9588 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
9589 return
9590 fold_build2 (code, type,
9591 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9592 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
9593 arg00, TREE_OPERAND (arg01, 1)),
9594 fold_convert (TREE_TYPE (arg0),
9595 integer_one_node)),
9596 arg1);
9597 }
9598
9599 /* If this is an NE or EQ comparison of zero against the result of a
9600 signed MOD operation whose second operand is a power of 2, make
9601 the MOD operation unsigned since it is simpler and equivalent. */
9602 if ((code == NE_EXPR || code == EQ_EXPR)
9603 && integer_zerop (arg1)
9604 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
9605 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
9606 || TREE_CODE (arg0) == CEIL_MOD_EXPR
9607 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
9608 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
9609 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9610 {
9611 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
9612 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
9613 fold_convert (newtype,
9614 TREE_OPERAND (arg0, 0)),
9615 fold_convert (newtype,
9616 TREE_OPERAND (arg0, 1)));
9617
9618 return fold_build2 (code, type, newmod,
9619 fold_convert (newtype, arg1));
9620 }
9621
9622 /* If this is an NE comparison of zero with an AND of one, remove the
9623 comparison since the AND will give the correct value. */
9624 if (code == NE_EXPR && integer_zerop (arg1)
9625 && TREE_CODE (arg0) == BIT_AND_EXPR
9626 && integer_onep (TREE_OPERAND (arg0, 1)))
9627 return fold_convert (type, arg0);
9628
9629 /* If we have (A & C) == C where C is a power of 2, convert this into
9630 (A & C) != 0. Similarly for NE_EXPR. */
9631 if ((code == EQ_EXPR || code == NE_EXPR)
9632 && TREE_CODE (arg0) == BIT_AND_EXPR
9633 && integer_pow2p (TREE_OPERAND (arg0, 1))
9634 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9635 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
9636 arg0, fold_convert (TREE_TYPE (arg0),
9637 integer_zero_node));
9638
9639 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
9640 bit, then fold the expression into A < 0 or A >= 0. */
9641 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
9642 if (tem)
9643 return tem;
9644
9645 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
9646 Similarly for NE_EXPR. */
9647 if ((code == EQ_EXPR || code == NE_EXPR)
9648 && TREE_CODE (arg0) == BIT_AND_EXPR
9649 && TREE_CODE (arg1) == INTEGER_CST
9650 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9651 {
9652 tree notc = fold_build1 (BIT_NOT_EXPR,
9653 TREE_TYPE (TREE_OPERAND (arg0, 1)),
9654 TREE_OPERAND (arg0, 1));
9655 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9656 arg1, notc);
9657 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
9658 if (integer_nonzerop (dandnotc))
9659 return omit_one_operand (type, rslt, arg0);
9660 }
9661
9662 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
9663 Similarly for NE_EXPR. */
9664 if ((code == EQ_EXPR || code == NE_EXPR)
9665 && TREE_CODE (arg0) == BIT_IOR_EXPR
9666 && TREE_CODE (arg1) == INTEGER_CST
9667 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9668 {
9669 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
9670 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9671 TREE_OPERAND (arg0, 1), notd);
9672 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
9673 if (integer_nonzerop (candnotd))
9674 return omit_one_operand (type, rslt, arg0);
9675 }
9676
9677 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
9678 and similarly for >= into !=. */
9679 if ((code == LT_EXPR || code == GE_EXPR)
9680 && TYPE_UNSIGNED (TREE_TYPE (arg0))
9681 && TREE_CODE (arg1) == LSHIFT_EXPR
9682 && integer_onep (TREE_OPERAND (arg1, 0)))
9683 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
9684 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
9685 TREE_OPERAND (arg1, 1)),
9686 fold_convert (TREE_TYPE (arg0), integer_zero_node));
9687
9688 else if ((code == LT_EXPR || code == GE_EXPR)
9689 && TYPE_UNSIGNED (TREE_TYPE (arg0))
9690 && (TREE_CODE (arg1) == NOP_EXPR
9691 || TREE_CODE (arg1) == CONVERT_EXPR)
9692 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
9693 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
9694 return
9695 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
9696 fold_convert (TREE_TYPE (arg0),
9697 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
9698 TREE_OPERAND (TREE_OPERAND (arg1, 0),
9699 1))),
9700 fold_convert (TREE_TYPE (arg0), integer_zero_node));
9701
9702 /* Simplify comparison of something with itself. (For IEEE
9703 floating-point, we can only do some of these simplifications.) */
9704 if (operand_equal_p (arg0, arg1, 0))
9705 {
9706 switch (code)
9707 {
9708 case EQ_EXPR:
9709 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9710 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9711 return constant_boolean_node (1, type);
9712 break;
9713
9714 case GE_EXPR:
9715 case LE_EXPR:
9716 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9717 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9718 return constant_boolean_node (1, type);
9719 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9720
9721 case NE_EXPR:
9722 /* For NE, we can only do this simplification if integer
9723 or we don't honor IEEE floating point NaNs. */
9724 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9725 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9726 break;
9727 /* ... fall through ... */
9728 case GT_EXPR:
9729 case LT_EXPR:
9730 return constant_boolean_node (0, type);
9731 default:
9732 gcc_unreachable ();
9733 }
9734 }
9735
9736 /* If we are comparing an expression that just has comparisons
9737 of two integer values, arithmetic expressions of those comparisons,
9738 and constants, we can simplify it. There are only three cases
9739 to check: the two values can either be equal, the first can be
9740 greater, or the second can be greater. Fold the expression for
9741 those three values. Since each value must be 0 or 1, we have
9742 eight possibilities, each of which corresponds to the constant 0
9743 or 1 or one of the six possible comparisons.
9744
9745 This handles common cases like (a > b) == 0 but also handles
9746 expressions like ((x > y) - (y > x)) > 0, which supposedly
9747 occur in macroized code. */
9748
9749 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9750 {
9751 tree cval1 = 0, cval2 = 0;
9752 int save_p = 0;
9753
9754 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9755 /* Don't handle degenerate cases here; they should already
9756 have been handled anyway. */
9757 && cval1 != 0 && cval2 != 0
9758 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9759 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9760 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9761 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9762 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9763 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9764 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9765 {
9766 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9767 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9768
9769 /* We can't just pass T to eval_subst in case cval1 or cval2
9770 was the same as ARG1. */
9771
9772 tree high_result
9773 = fold_build2 (code, type,
9774 eval_subst (arg0, cval1, maxval,
9775 cval2, minval),
9776 arg1);
9777 tree equal_result
9778 = fold_build2 (code, type,
9779 eval_subst (arg0, cval1, maxval,
9780 cval2, maxval),
9781 arg1);
9782 tree low_result
9783 = fold_build2 (code, type,
9784 eval_subst (arg0, cval1, minval,
9785 cval2, maxval),
9786 arg1);
9787
9788 /* All three of these results should be 0 or 1. Confirm they
9789 are. Then use those values to select the proper code
9790 to use. */
9791
9792 if ((integer_zerop (high_result)
9793 || integer_onep (high_result))
9794 && (integer_zerop (equal_result)
9795 || integer_onep (equal_result))
9796 && (integer_zerop (low_result)
9797 || integer_onep (low_result)))
9798 {
9799 /* Make a 3-bit mask with the high-order bit being the
9800 value for `>', the next for '=', and the low for '<'. */
9801 switch ((integer_onep (high_result) * 4)
9802 + (integer_onep (equal_result) * 2)
9803 + integer_onep (low_result))
9804 {
9805 case 0:
9806 /* Always false. */
9807 return omit_one_operand (type, integer_zero_node, arg0);
9808 case 1:
9809 code = LT_EXPR;
9810 break;
9811 case 2:
9812 code = EQ_EXPR;
9813 break;
9814 case 3:
9815 code = LE_EXPR;
9816 break;
9817 case 4:
9818 code = GT_EXPR;
9819 break;
9820 case 5:
9821 code = NE_EXPR;
9822 break;
9823 case 6:
9824 code = GE_EXPR;
9825 break;
9826 case 7:
9827 /* Always true. */
9828 return omit_one_operand (type, integer_one_node, arg0);
9829 }
9830
9831 if (save_p)
9832 return save_expr (build2 (code, type, cval1, cval2));
9833 else
9834 return fold_build2 (code, type, cval1, cval2);
9835 }
9836 }
9837 }
9838
9839 /* If this is a comparison of a field, we may be able to simplify it. */
9840 if (((TREE_CODE (arg0) == COMPONENT_REF
9841 && lang_hooks.can_use_bit_fields_p ())
9842 || TREE_CODE (arg0) == BIT_FIELD_REF)
9843 && (code == EQ_EXPR || code == NE_EXPR)
9844 /* Handle the constant case even without -O
9845 to make sure the warnings are given. */
9846 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
9847 {
9848 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
9849 if (t1)
9850 return t1;
9851 }
9852
9853 /* Fold a comparison of the address of COMPONENT_REFs with the same
9854 type and component to a comparison of the address of the base
9855 object. In short, &x->a OP &y->a to x OP y and
9856 &x->a OP &y.a to x OP &y */
9857 if (TREE_CODE (arg0) == ADDR_EXPR
9858 && TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
9859 && TREE_CODE (arg1) == ADDR_EXPR
9860 && TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
9861 {
9862 tree cref0 = TREE_OPERAND (arg0, 0);
9863 tree cref1 = TREE_OPERAND (arg1, 0);
9864 if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
9865 {
9866 tree op0 = TREE_OPERAND (cref0, 0);
9867 tree op1 = TREE_OPERAND (cref1, 0);
9868 return fold_build2 (code, type,
9869 build_fold_addr_expr (op0),
9870 build_fold_addr_expr (op1));
9871 }
9872 }
9873
9874 /* If this is a comparison of complex values and either or both sides
9875 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
9876 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
9877 This may prevent needless evaluations. */
9878 if ((code == EQ_EXPR || code == NE_EXPR)
9879 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
9880 && (TREE_CODE (arg0) == COMPLEX_EXPR
9881 || TREE_CODE (arg1) == COMPLEX_EXPR
9882 || TREE_CODE (arg0) == COMPLEX_CST
9883 || TREE_CODE (arg1) == COMPLEX_CST))
9884 {
9885 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
9886 tree real0, imag0, real1, imag1;
9887
9888 arg0 = save_expr (arg0);
9889 arg1 = save_expr (arg1);
9890 real0 = fold_build1 (REALPART_EXPR, subtype, arg0);
9891 imag0 = fold_build1 (IMAGPART_EXPR, subtype, arg0);
9892 real1 = fold_build1 (REALPART_EXPR, subtype, arg1);
9893 imag1 = fold_build1 (IMAGPART_EXPR, subtype, arg1);
9894
9895 return fold_build2 ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
9896 : TRUTH_ORIF_EXPR),
9897 type,
9898 fold_build2 (code, type, real0, real1),
9899 fold_build2 (code, type, imag0, imag1));
9900 }
9901
9902 /* Optimize comparisons of strlen vs zero to a compare of the
9903 first character of the string vs zero. To wit,
9904 strlen(ptr) == 0 => *ptr == 0
9905 strlen(ptr) != 0 => *ptr != 0
9906 Other cases should reduce to one of these two (or a constant)
9907 due to the return value of strlen being unsigned. */
9908 if ((code == EQ_EXPR || code == NE_EXPR)
9909 && integer_zerop (arg1)
9910 && TREE_CODE (arg0) == CALL_EXPR)
9911 {
9912 tree fndecl = get_callee_fndecl (arg0);
9913 tree arglist;
9914
9915 if (fndecl
9916 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
9917 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
9918 && (arglist = TREE_OPERAND (arg0, 1))
9919 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
9920 && ! TREE_CHAIN (arglist))
9921 {
9922 tree iref = build_fold_indirect_ref (TREE_VALUE (arglist));
9923 return fold_build2 (code, type, iref,
9924 build_int_cst (TREE_TYPE (iref), 0));
9925 }
9926 }
9927
9928 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9929 into a single range test. */
9930 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9931 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9932 && TREE_CODE (arg1) == INTEGER_CST
9933 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9934 && !integer_zerop (TREE_OPERAND (arg0, 1))
9935 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9936 && !TREE_OVERFLOW (arg1))
9937 {
9938 t1 = fold_div_compare (code, type, arg0, arg1);
9939 if (t1 != NULL_TREE)
9940 return t1;
9941 }
9942
9943 if ((code == EQ_EXPR || code == NE_EXPR)
9944 && !TREE_SIDE_EFFECTS (arg0)
9945 && integer_zerop (arg1)
9946 && tree_expr_nonzero_p (arg0))
9947 return constant_boolean_node (code==NE_EXPR, type);
9948
9949 t1 = fold_relational_const (code, type, arg0, arg1);
9950 return t1 == NULL_TREE ? NULL_TREE : t1;
9951
9952 case UNORDERED_EXPR:
9953 case ORDERED_EXPR:
9954 case UNLT_EXPR:
9955 case UNLE_EXPR:
9956 case UNGT_EXPR:
9957 case UNGE_EXPR:
9958 case UNEQ_EXPR:
9959 case LTGT_EXPR:
9960 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9961 {
9962 t1 = fold_relational_const (code, type, arg0, arg1);
9963 if (t1 != NULL_TREE)
9964 return t1;
9965 }
9966
9967 /* If the first operand is NaN, the result is constant. */
9968 if (TREE_CODE (arg0) == REAL_CST
9969 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
9970 && (code != LTGT_EXPR || ! flag_trapping_math))
9971 {
9972 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
9973 ? integer_zero_node
9974 : integer_one_node;
9975 return omit_one_operand (type, t1, arg1);
9976 }
9977
9978 /* If the second operand is NaN, the result is constant. */
9979 if (TREE_CODE (arg1) == REAL_CST
9980 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
9981 && (code != LTGT_EXPR || ! flag_trapping_math))
9982 {
9983 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
9984 ? integer_zero_node
9985 : integer_one_node;
9986 return omit_one_operand (type, t1, arg0);
9987 }
9988
9989 /* Simplify unordered comparison of something with itself. */
9990 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
9991 && operand_equal_p (arg0, arg1, 0))
9992 return constant_boolean_node (1, type);
9993
9994 if (code == LTGT_EXPR
9995 && !flag_trapping_math
9996 && operand_equal_p (arg0, arg1, 0))
9997 return constant_boolean_node (0, type);
9998
9999 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
10000 {
10001 tree targ0 = strip_float_extensions (arg0);
10002 tree targ1 = strip_float_extensions (arg1);
10003 tree newtype = TREE_TYPE (targ0);
10004
10005 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
10006 newtype = TREE_TYPE (targ1);
10007
10008 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
10009 return fold_build2 (code, type, fold_convert (newtype, targ0),
10010 fold_convert (newtype, targ1));
10011 }
10012
10013 return NULL_TREE;
10014
10015 case COMPOUND_EXPR:
10016 /* When pedantic, a compound expression can be neither an lvalue
10017 nor an integer constant expression. */
10018 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
10019 return NULL_TREE;
10020 /* Don't let (0, 0) be null pointer constant. */
10021 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
10022 : fold_convert (type, arg1);
10023 return pedantic_non_lvalue (tem);
10024
10025 case COMPLEX_EXPR:
10026 if (wins)
10027 return build_complex (type, arg0, arg1);
10028 return NULL_TREE;
10029
10030 case ASSERT_EXPR:
10031 /* An ASSERT_EXPR should never be passed to fold_binary. */
10032 gcc_unreachable ();
10033
10034 default:
10035 return NULL_TREE;
10036 } /* switch (code) */
10037 }
10038
10039 /* Callback for walk_tree, looking for LABEL_EXPR.
10040 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
10041 Do not check the sub-tree of GOTO_EXPR. */
10042
10043 static tree
10044 contains_label_1 (tree *tp,
10045 int *walk_subtrees,
10046 void *data ATTRIBUTE_UNUSED)
10047 {
10048 switch (TREE_CODE (*tp))
10049 {
10050 case LABEL_EXPR:
10051 return *tp;
10052 case GOTO_EXPR:
10053 *walk_subtrees = 0;
10054 /* no break */
10055 default:
10056 return NULL_TREE;
10057 }
10058 }
10059
10060 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
10061 accessible from outside the sub-tree. Returns NULL_TREE if no
10062 addressable label is found. */
10063
10064 static bool
10065 contains_label_p (tree st)
10066 {
10067 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
10068 }
10069
10070 /* Fold a ternary expression of code CODE and type TYPE with operands
10071 OP0, OP1, and OP2. Return the folded expression if folding is
10072 successful. Otherwise, return NULL_TREE. */
10073
10074 tree
10075 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
10076 {
10077 tree tem;
10078 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
10079 enum tree_code_class kind = TREE_CODE_CLASS (code);
10080
10081 gcc_assert (IS_EXPR_CODE_CLASS (kind)
10082 && TREE_CODE_LENGTH (code) == 3);
10083
10084 /* Strip any conversions that don't change the mode. This is safe
10085 for every expression, except for a comparison expression because
10086 its signedness is derived from its operands. So, in the latter
10087 case, only strip conversions that don't change the signedness.
10088
10089 Note that this is done as an internal manipulation within the
10090 constant folder, in order to find the simplest representation of
10091 the arguments so that their form can be studied. In any cases,
10092 the appropriate type conversions should be put back in the tree
10093 that will get out of the constant folder. */
10094 if (op0)
10095 {
10096 arg0 = op0;
10097 STRIP_NOPS (arg0);
10098 }
10099
10100 if (op1)
10101 {
10102 arg1 = op1;
10103 STRIP_NOPS (arg1);
10104 }
10105
10106 switch (code)
10107 {
10108 case COMPONENT_REF:
10109 if (TREE_CODE (arg0) == CONSTRUCTOR
10110 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
10111 {
10112 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
10113 if (m)
10114 return TREE_VALUE (m);
10115 }
10116 return NULL_TREE;
10117
10118 case COND_EXPR:
10119 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
10120 so all simple results must be passed through pedantic_non_lvalue. */
10121 if (TREE_CODE (arg0) == INTEGER_CST)
10122 {
10123 tree unused_op = integer_zerop (arg0) ? op1 : op2;
10124 tem = integer_zerop (arg0) ? op2 : op1;
10125 /* Only optimize constant conditions when the selected branch
10126 has the same type as the COND_EXPR. This avoids optimizing
10127 away "c ? x : throw", where the throw has a void type.
10128 Avoid throwing away that operand which contains label. */
10129 if ((!TREE_SIDE_EFFECTS (unused_op)
10130 || !contains_label_p (unused_op))
10131 && (! VOID_TYPE_P (TREE_TYPE (tem))
10132 || VOID_TYPE_P (type)))
10133 return pedantic_non_lvalue (tem);
10134 return NULL_TREE;
10135 }
10136 if (operand_equal_p (arg1, op2, 0))
10137 return pedantic_omit_one_operand (type, arg1, arg0);
10138
10139 /* If we have A op B ? A : C, we may be able to convert this to a
10140 simpler expression, depending on the operation and the values
10141 of B and C. Signed zeros prevent all of these transformations,
10142 for reasons given above each one.
10143
10144 Also try swapping the arguments and inverting the conditional. */
10145 if (COMPARISON_CLASS_P (arg0)
10146 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
10147 arg1, TREE_OPERAND (arg0, 1))
10148 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
10149 {
10150 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
10151 if (tem)
10152 return tem;
10153 }
10154
10155 if (COMPARISON_CLASS_P (arg0)
10156 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
10157 op2,
10158 TREE_OPERAND (arg0, 1))
10159 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
10160 {
10161 tem = invert_truthvalue (arg0);
10162 if (COMPARISON_CLASS_P (tem))
10163 {
10164 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
10165 if (tem)
10166 return tem;
10167 }
10168 }
10169
10170 /* If the second operand is simpler than the third, swap them
10171 since that produces better jump optimization results. */
10172 if (tree_swap_operands_p (op1, op2, false))
10173 {
10174 /* See if this can be inverted. If it can't, possibly because
10175 it was a floating-point inequality comparison, don't do
10176 anything. */
10177 tem = invert_truthvalue (arg0);
10178
10179 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
10180 return fold_build3 (code, type, tem, op2, op1);
10181 }
10182
10183 /* Convert A ? 1 : 0 to simply A. */
10184 if (integer_onep (op1)
10185 && integer_zerop (op2)
10186 /* If we try to convert OP0 to our type, the
10187 call to fold will try to move the conversion inside
10188 a COND, which will recurse. In that case, the COND_EXPR
10189 is probably the best choice, so leave it alone. */
10190 && type == TREE_TYPE (arg0))
10191 return pedantic_non_lvalue (arg0);
10192
10193 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
10194 over COND_EXPR in cases such as floating point comparisons. */
10195 if (integer_zerop (op1)
10196 && integer_onep (op2)
10197 && truth_value_p (TREE_CODE (arg0)))
10198 return pedantic_non_lvalue (fold_convert (type,
10199 invert_truthvalue (arg0)));
10200
10201 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
10202 if (TREE_CODE (arg0) == LT_EXPR
10203 && integer_zerop (TREE_OPERAND (arg0, 1))
10204 && integer_zerop (op2)
10205 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
10206 return fold_convert (type, fold_build2 (BIT_AND_EXPR,
10207 TREE_TYPE (tem), tem, arg1));
10208
10209 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
10210 already handled above. */
10211 if (TREE_CODE (arg0) == BIT_AND_EXPR
10212 && integer_onep (TREE_OPERAND (arg0, 1))
10213 && integer_zerop (op2)
10214 && integer_pow2p (arg1))
10215 {
10216 tree tem = TREE_OPERAND (arg0, 0);
10217 STRIP_NOPS (tem);
10218 if (TREE_CODE (tem) == RSHIFT_EXPR
10219 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
10220 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
10221 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
10222 return fold_build2 (BIT_AND_EXPR, type,
10223 TREE_OPERAND (tem, 0), arg1);
10224 }
10225
10226 /* A & N ? N : 0 is simply A & N if N is a power of two. This
10227 is probably obsolete because the first operand should be a
10228 truth value (that's why we have the two cases above), but let's
10229 leave it in until we can confirm this for all front-ends. */
10230 if (integer_zerop (op2)
10231 && TREE_CODE (arg0) == NE_EXPR
10232 && integer_zerop (TREE_OPERAND (arg0, 1))
10233 && integer_pow2p (arg1)
10234 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10235 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10236 arg1, OEP_ONLY_CONST))
10237 return pedantic_non_lvalue (fold_convert (type,
10238 TREE_OPERAND (arg0, 0)));
10239
10240 /* Convert A ? B : 0 into A && B if A and B are truth values. */
10241 if (integer_zerop (op2)
10242 && truth_value_p (TREE_CODE (arg0))
10243 && truth_value_p (TREE_CODE (arg1)))
10244 return fold_build2 (TRUTH_ANDIF_EXPR, type, arg0, arg1);
10245
10246 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
10247 if (integer_onep (op2)
10248 && truth_value_p (TREE_CODE (arg0))
10249 && truth_value_p (TREE_CODE (arg1)))
10250 {
10251 /* Only perform transformation if ARG0 is easily inverted. */
10252 tem = invert_truthvalue (arg0);
10253 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
10254 return fold_build2 (TRUTH_ORIF_EXPR, type, tem, arg1);
10255 }
10256
10257 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
10258 if (integer_zerop (arg1)
10259 && truth_value_p (TREE_CODE (arg0))
10260 && truth_value_p (TREE_CODE (op2)))
10261 {
10262 /* Only perform transformation if ARG0 is easily inverted. */
10263 tem = invert_truthvalue (arg0);
10264 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
10265 return fold_build2 (TRUTH_ANDIF_EXPR, type, tem, op2);
10266 }
10267
10268 /* Convert A ? 1 : B into A || B if A and B are truth values. */
10269 if (integer_onep (arg1)
10270 && truth_value_p (TREE_CODE (arg0))
10271 && truth_value_p (TREE_CODE (op2)))
10272 return fold_build2 (TRUTH_ORIF_EXPR, type, arg0, op2);
10273
10274 return NULL_TREE;
10275
10276 case CALL_EXPR:
10277 /* Check for a built-in function. */
10278 if (TREE_CODE (op0) == ADDR_EXPR
10279 && TREE_CODE (TREE_OPERAND (op0, 0)) == FUNCTION_DECL
10280 && DECL_BUILT_IN (TREE_OPERAND (op0, 0)))
10281 {
10282 tree fndecl = TREE_OPERAND (op0, 0);
10283 tree arglist = op1;
10284 tree tmp = fold_builtin (fndecl, arglist, false);
10285 if (tmp)
10286 return tmp;
10287 }
10288 return NULL_TREE;
10289
10290 case BIT_FIELD_REF:
10291 if (TREE_CODE (arg0) == VECTOR_CST
10292 && type == TREE_TYPE (TREE_TYPE (arg0))
10293 && host_integerp (arg1, 1)
10294 && host_integerp (op2, 1))
10295 {
10296 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
10297 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
10298
10299 if (width != 0
10300 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
10301 && (idx % width) == 0
10302 && (idx = idx / width)
10303 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
10304 {
10305 tree elements = TREE_VECTOR_CST_ELTS (arg0);
10306 while (idx-- > 0)
10307 elements = TREE_CHAIN (elements);
10308 return TREE_VALUE (elements);
10309 }
10310 }
10311 return NULL_TREE;
10312
10313 default:
10314 return NULL_TREE;
10315 } /* switch (code) */
10316 }
10317
10318 /* Perform constant folding and related simplification of EXPR.
10319 The related simplifications include x*1 => x, x*0 => 0, etc.,
10320 and application of the associative law.
10321 NOP_EXPR conversions may be removed freely (as long as we
10322 are careful not to change the type of the overall expression).
10323 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
10324 but we can constant-fold them if they have constant operands. */
10325
10326 #ifdef ENABLE_FOLD_CHECKING
10327 # define fold(x) fold_1 (x)
10328 static tree fold_1 (tree);
10329 static
10330 #endif
10331 tree
10332 fold (tree expr)
10333 {
10334 const tree t = expr;
10335 enum tree_code code = TREE_CODE (t);
10336 enum tree_code_class kind = TREE_CODE_CLASS (code);
10337 tree tem;
10338
10339 /* Return right away if a constant. */
10340 if (kind == tcc_constant)
10341 return t;
10342
10343 if (IS_EXPR_CODE_CLASS (kind))
10344 {
10345 tree type = TREE_TYPE (t);
10346 tree op0, op1, op2;
10347
10348 switch (TREE_CODE_LENGTH (code))
10349 {
10350 case 1:
10351 op0 = TREE_OPERAND (t, 0);
10352 tem = fold_unary (code, type, op0);
10353 return tem ? tem : expr;
10354 case 2:
10355 op0 = TREE_OPERAND (t, 0);
10356 op1 = TREE_OPERAND (t, 1);
10357 tem = fold_binary (code, type, op0, op1);
10358 return tem ? tem : expr;
10359 case 3:
10360 op0 = TREE_OPERAND (t, 0);
10361 op1 = TREE_OPERAND (t, 1);
10362 op2 = TREE_OPERAND (t, 2);
10363 tem = fold_ternary (code, type, op0, op1, op2);
10364 return tem ? tem : expr;
10365 default:
10366 break;
10367 }
10368 }
10369
10370 switch (code)
10371 {
10372 case CONST_DECL:
10373 return fold (DECL_INITIAL (t));
10374
10375 default:
10376 return t;
10377 } /* switch (code) */
10378 }
10379
10380 #ifdef ENABLE_FOLD_CHECKING
10381 #undef fold
10382
10383 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
10384 static void fold_check_failed (tree, tree);
10385 void print_fold_checksum (tree);
10386
10387 /* When --enable-checking=fold, compute a digest of expr before
10388 and after actual fold call to see if fold did not accidentally
10389 change original expr. */
10390
10391 tree
10392 fold (tree expr)
10393 {
10394 tree ret;
10395 struct md5_ctx ctx;
10396 unsigned char checksum_before[16], checksum_after[16];
10397 htab_t ht;
10398
10399 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10400 md5_init_ctx (&ctx);
10401 fold_checksum_tree (expr, &ctx, ht);
10402 md5_finish_ctx (&ctx, checksum_before);
10403 htab_empty (ht);
10404
10405 ret = fold_1 (expr);
10406
10407 md5_init_ctx (&ctx);
10408 fold_checksum_tree (expr, &ctx, ht);
10409 md5_finish_ctx (&ctx, checksum_after);
10410 htab_delete (ht);
10411
10412 if (memcmp (checksum_before, checksum_after, 16))
10413 fold_check_failed (expr, ret);
10414
10415 return ret;
10416 }
10417
10418 void
10419 print_fold_checksum (tree expr)
10420 {
10421 struct md5_ctx ctx;
10422 unsigned char checksum[16], cnt;
10423 htab_t ht;
10424
10425 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10426 md5_init_ctx (&ctx);
10427 fold_checksum_tree (expr, &ctx, ht);
10428 md5_finish_ctx (&ctx, checksum);
10429 htab_delete (ht);
10430 for (cnt = 0; cnt < 16; ++cnt)
10431 fprintf (stderr, "%02x", checksum[cnt]);
10432 putc ('\n', stderr);
10433 }
10434
10435 static void
10436 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
10437 {
10438 internal_error ("fold check: original tree changed by fold");
10439 }
10440
10441 static void
10442 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
10443 {
10444 void **slot;
10445 enum tree_code code;
10446 char buf[sizeof (struct tree_decl)];
10447 int i, len;
10448
10449 recursive_label:
10450
10451 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
10452 <= sizeof (struct tree_decl))
10453 && sizeof (struct tree_type) <= sizeof (struct tree_decl));
10454 if (expr == NULL)
10455 return;
10456 slot = htab_find_slot (ht, expr, INSERT);
10457 if (*slot != NULL)
10458 return;
10459 *slot = expr;
10460 code = TREE_CODE (expr);
10461 if (TREE_CODE_CLASS (code) == tcc_declaration
10462 && DECL_ASSEMBLER_NAME_SET_P (expr))
10463 {
10464 /* Allow DECL_ASSEMBLER_NAME to be modified. */
10465 memcpy (buf, expr, tree_size (expr));
10466 expr = (tree) buf;
10467 SET_DECL_ASSEMBLER_NAME (expr, NULL);
10468 }
10469 else if (TREE_CODE_CLASS (code) == tcc_type
10470 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
10471 || TYPE_CACHED_VALUES_P (expr)
10472 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
10473 {
10474 /* Allow these fields to be modified. */
10475 memcpy (buf, expr, tree_size (expr));
10476 expr = (tree) buf;
10477 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr) = 0;
10478 TYPE_POINTER_TO (expr) = NULL;
10479 TYPE_REFERENCE_TO (expr) = NULL;
10480 if (TYPE_CACHED_VALUES_P (expr))
10481 {
10482 TYPE_CACHED_VALUES_P (expr) = 0;
10483 TYPE_CACHED_VALUES (expr) = NULL;
10484 }
10485 }
10486 md5_process_bytes (expr, tree_size (expr), ctx);
10487 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
10488 if (TREE_CODE_CLASS (code) != tcc_type
10489 && TREE_CODE_CLASS (code) != tcc_declaration
10490 && code != TREE_LIST)
10491 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
10492 switch (TREE_CODE_CLASS (code))
10493 {
10494 case tcc_constant:
10495 switch (code)
10496 {
10497 case STRING_CST:
10498 md5_process_bytes (TREE_STRING_POINTER (expr),
10499 TREE_STRING_LENGTH (expr), ctx);
10500 break;
10501 case COMPLEX_CST:
10502 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
10503 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
10504 break;
10505 case VECTOR_CST:
10506 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
10507 break;
10508 default:
10509 break;
10510 }
10511 break;
10512 case tcc_exceptional:
10513 switch (code)
10514 {
10515 case TREE_LIST:
10516 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
10517 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
10518 expr = TREE_CHAIN (expr);
10519 goto recursive_label;
10520 break;
10521 case TREE_VEC:
10522 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
10523 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
10524 break;
10525 default:
10526 break;
10527 }
10528 break;
10529 case tcc_expression:
10530 case tcc_reference:
10531 case tcc_comparison:
10532 case tcc_unary:
10533 case tcc_binary:
10534 case tcc_statement:
10535 len = TREE_CODE_LENGTH (code);
10536 for (i = 0; i < len; ++i)
10537 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
10538 break;
10539 case tcc_declaration:
10540 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
10541 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
10542 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
10543 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
10544 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
10545 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
10546 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
10547 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
10548 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
10549 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
10550 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
10551 break;
10552 case tcc_type:
10553 if (TREE_CODE (expr) == ENUMERAL_TYPE)
10554 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
10555 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
10556 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
10557 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
10558 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
10559 if (INTEGRAL_TYPE_P (expr)
10560 || SCALAR_FLOAT_TYPE_P (expr))
10561 {
10562 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
10563 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
10564 }
10565 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
10566 if (TREE_CODE (expr) == RECORD_TYPE
10567 || TREE_CODE (expr) == UNION_TYPE
10568 || TREE_CODE (expr) == QUAL_UNION_TYPE)
10569 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
10570 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
10571 break;
10572 default:
10573 break;
10574 }
10575 }
10576
10577 #endif
10578
10579 /* Fold a unary tree expression with code CODE of type TYPE with an
10580 operand OP0. Return a folded expression if successful. Otherwise,
10581 return a tree expression with code CODE of type TYPE with an
10582 operand OP0. */
10583
10584 tree
10585 fold_build1 (enum tree_code code, tree type, tree op0)
10586 {
10587 tree tem = fold_unary (code, type, op0);
10588 if (tem)
10589 return tem;
10590
10591 return build1 (code, type, op0);
10592 }
10593
10594 /* Fold a binary tree expression with code CODE of type TYPE with
10595 operands OP0 and OP1. Return a folded expression if successful.
10596 Otherwise, return a tree expression with code CODE of type TYPE
10597 with operands OP0 and OP1. */
10598
10599 tree
10600 fold_build2 (enum tree_code code, tree type, tree op0, tree op1)
10601 {
10602 tree tem = fold_binary (code, type, op0, op1);
10603 if (tem)
10604 return tem;
10605
10606 return build2 (code, type, op0, op1);
10607 }
10608
10609 /* Fold a ternary tree expression with code CODE of type TYPE with
10610 operands OP0, OP1, and OP2. Return a folded expression if
10611 successful. Otherwise, return a tree expression with code CODE of
10612 type TYPE with operands OP0, OP1, and OP2. */
10613
10614 tree
10615 fold_build3 (enum tree_code code, tree type, tree op0, tree op1, tree op2)
10616 {
10617 tree tem = fold_ternary (code, type, op0, op1, op2);
10618 if (tem)
10619 return tem;
10620
10621 return build3 (code, type, op0, op1, op2);
10622 }
10623
10624 /* Perform constant folding and related simplification of initializer
10625 expression EXPR. This behaves identically to "fold" but ignores
10626 potential run-time traps and exceptions that fold must preserve. */
10627
10628 tree
10629 fold_initializer (tree expr)
10630 {
10631 int saved_signaling_nans = flag_signaling_nans;
10632 int saved_trapping_math = flag_trapping_math;
10633 int saved_rounding_math = flag_rounding_math;
10634 int saved_trapv = flag_trapv;
10635 tree result;
10636
10637 flag_signaling_nans = 0;
10638 flag_trapping_math = 0;
10639 flag_rounding_math = 0;
10640 flag_trapv = 0;
10641
10642 result = fold (expr);
10643
10644 flag_signaling_nans = saved_signaling_nans;
10645 flag_trapping_math = saved_trapping_math;
10646 flag_rounding_math = saved_rounding_math;
10647 flag_trapv = saved_trapv;
10648
10649 return result;
10650 }
10651
10652 /* Determine if first argument is a multiple of second argument. Return 0 if
10653 it is not, or we cannot easily determined it to be.
10654
10655 An example of the sort of thing we care about (at this point; this routine
10656 could surely be made more general, and expanded to do what the *_DIV_EXPR's
10657 fold cases do now) is discovering that
10658
10659 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
10660
10661 is a multiple of
10662
10663 SAVE_EXPR (J * 8)
10664
10665 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
10666
10667 This code also handles discovering that
10668
10669 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
10670
10671 is a multiple of 8 so we don't have to worry about dealing with a
10672 possible remainder.
10673
10674 Note that we *look* inside a SAVE_EXPR only to determine how it was
10675 calculated; it is not safe for fold to do much of anything else with the
10676 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
10677 at run time. For example, the latter example above *cannot* be implemented
10678 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
10679 evaluation time of the original SAVE_EXPR is not necessarily the same at
10680 the time the new expression is evaluated. The only optimization of this
10681 sort that would be valid is changing
10682
10683 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
10684
10685 divided by 8 to
10686
10687 SAVE_EXPR (I) * SAVE_EXPR (J)
10688
10689 (where the same SAVE_EXPR (J) is used in the original and the
10690 transformed version). */
10691
10692 static int
10693 multiple_of_p (tree type, tree top, tree bottom)
10694 {
10695 if (operand_equal_p (top, bottom, 0))
10696 return 1;
10697
10698 if (TREE_CODE (type) != INTEGER_TYPE)
10699 return 0;
10700
10701 switch (TREE_CODE (top))
10702 {
10703 case BIT_AND_EXPR:
10704 /* Bitwise and provides a power of two multiple. If the mask is
10705 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
10706 if (!integer_pow2p (bottom))
10707 return 0;
10708 /* FALLTHRU */
10709
10710 case MULT_EXPR:
10711 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
10712 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
10713
10714 case PLUS_EXPR:
10715 case MINUS_EXPR:
10716 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
10717 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
10718
10719 case LSHIFT_EXPR:
10720 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
10721 {
10722 tree op1, t1;
10723
10724 op1 = TREE_OPERAND (top, 1);
10725 /* const_binop may not detect overflow correctly,
10726 so check for it explicitly here. */
10727 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
10728 > TREE_INT_CST_LOW (op1)
10729 && TREE_INT_CST_HIGH (op1) == 0
10730 && 0 != (t1 = fold_convert (type,
10731 const_binop (LSHIFT_EXPR,
10732 size_one_node,
10733 op1, 0)))
10734 && ! TREE_OVERFLOW (t1))
10735 return multiple_of_p (type, t1, bottom);
10736 }
10737 return 0;
10738
10739 case NOP_EXPR:
10740 /* Can't handle conversions from non-integral or wider integral type. */
10741 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
10742 || (TYPE_PRECISION (type)
10743 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
10744 return 0;
10745
10746 /* .. fall through ... */
10747
10748 case SAVE_EXPR:
10749 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
10750
10751 case INTEGER_CST:
10752 if (TREE_CODE (bottom) != INTEGER_CST
10753 || (TYPE_UNSIGNED (type)
10754 && (tree_int_cst_sgn (top) < 0
10755 || tree_int_cst_sgn (bottom) < 0)))
10756 return 0;
10757 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
10758 top, bottom, 0));
10759
10760 default:
10761 return 0;
10762 }
10763 }
10764
10765 /* Return true if `t' is known to be non-negative. */
10766
10767 int
10768 tree_expr_nonnegative_p (tree t)
10769 {
10770 switch (TREE_CODE (t))
10771 {
10772 case ABS_EXPR:
10773 return 1;
10774
10775 case INTEGER_CST:
10776 return tree_int_cst_sgn (t) >= 0;
10777
10778 case REAL_CST:
10779 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
10780
10781 case PLUS_EXPR:
10782 if (FLOAT_TYPE_P (TREE_TYPE (t)))
10783 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10784 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10785
10786 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
10787 both unsigned and at least 2 bits shorter than the result. */
10788 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
10789 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
10790 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
10791 {
10792 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
10793 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
10794 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
10795 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
10796 {
10797 unsigned int prec = MAX (TYPE_PRECISION (inner1),
10798 TYPE_PRECISION (inner2)) + 1;
10799 return prec < TYPE_PRECISION (TREE_TYPE (t));
10800 }
10801 }
10802 break;
10803
10804 case MULT_EXPR:
10805 if (FLOAT_TYPE_P (TREE_TYPE (t)))
10806 {
10807 /* x * x for floating point x is always non-negative. */
10808 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
10809 return 1;
10810 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10811 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10812 }
10813
10814 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
10815 both unsigned and their total bits is shorter than the result. */
10816 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
10817 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
10818 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
10819 {
10820 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
10821 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
10822 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
10823 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
10824 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
10825 < TYPE_PRECISION (TREE_TYPE (t));
10826 }
10827 return 0;
10828
10829 case TRUNC_DIV_EXPR:
10830 case CEIL_DIV_EXPR:
10831 case FLOOR_DIV_EXPR:
10832 case ROUND_DIV_EXPR:
10833 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10834 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10835
10836 case TRUNC_MOD_EXPR:
10837 case CEIL_MOD_EXPR:
10838 case FLOOR_MOD_EXPR:
10839 case ROUND_MOD_EXPR:
10840 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10841
10842 case RDIV_EXPR:
10843 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10844 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10845
10846 case BIT_AND_EXPR:
10847 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
10848 || tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10849 case BIT_IOR_EXPR:
10850 case BIT_XOR_EXPR:
10851 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10852 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10853
10854 case NOP_EXPR:
10855 {
10856 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
10857 tree outer_type = TREE_TYPE (t);
10858
10859 if (TREE_CODE (outer_type) == REAL_TYPE)
10860 {
10861 if (TREE_CODE (inner_type) == REAL_TYPE)
10862 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10863 if (TREE_CODE (inner_type) == INTEGER_TYPE)
10864 {
10865 if (TYPE_UNSIGNED (inner_type))
10866 return 1;
10867 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10868 }
10869 }
10870 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
10871 {
10872 if (TREE_CODE (inner_type) == REAL_TYPE)
10873 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
10874 if (TREE_CODE (inner_type) == INTEGER_TYPE)
10875 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
10876 && TYPE_UNSIGNED (inner_type);
10877 }
10878 }
10879 break;
10880
10881 case COND_EXPR:
10882 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
10883 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
10884 case COMPOUND_EXPR:
10885 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10886 case MIN_EXPR:
10887 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10888 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10889 case MAX_EXPR:
10890 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10891 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10892 case MODIFY_EXPR:
10893 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10894 case BIND_EXPR:
10895 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t, 1)));
10896 case SAVE_EXPR:
10897 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10898 case NON_LVALUE_EXPR:
10899 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10900 case FLOAT_EXPR:
10901 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10902
10903 case TARGET_EXPR:
10904 {
10905 tree temp = TARGET_EXPR_SLOT (t);
10906 t = TARGET_EXPR_INITIAL (t);
10907
10908 /* If the initializer is non-void, then it's a normal expression
10909 that will be assigned to the slot. */
10910 if (!VOID_TYPE_P (t))
10911 return tree_expr_nonnegative_p (t);
10912
10913 /* Otherwise, the initializer sets the slot in some way. One common
10914 way is an assignment statement at the end of the initializer. */
10915 while (1)
10916 {
10917 if (TREE_CODE (t) == BIND_EXPR)
10918 t = expr_last (BIND_EXPR_BODY (t));
10919 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
10920 || TREE_CODE (t) == TRY_CATCH_EXPR)
10921 t = expr_last (TREE_OPERAND (t, 0));
10922 else if (TREE_CODE (t) == STATEMENT_LIST)
10923 t = expr_last (t);
10924 else
10925 break;
10926 }
10927 if (TREE_CODE (t) == MODIFY_EXPR
10928 && TREE_OPERAND (t, 0) == temp)
10929 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10930
10931 return 0;
10932 }
10933
10934 case CALL_EXPR:
10935 {
10936 tree fndecl = get_callee_fndecl (t);
10937 tree arglist = TREE_OPERAND (t, 1);
10938 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
10939 switch (DECL_FUNCTION_CODE (fndecl))
10940 {
10941 #define CASE_BUILTIN_F(BUILT_IN_FN) \
10942 case BUILT_IN_FN: case BUILT_IN_FN##F: case BUILT_IN_FN##L:
10943 #define CASE_BUILTIN_I(BUILT_IN_FN) \
10944 case BUILT_IN_FN: case BUILT_IN_FN##L: case BUILT_IN_FN##LL:
10945
10946 CASE_BUILTIN_F (BUILT_IN_ACOS)
10947 CASE_BUILTIN_F (BUILT_IN_ACOSH)
10948 CASE_BUILTIN_F (BUILT_IN_CABS)
10949 CASE_BUILTIN_F (BUILT_IN_COSH)
10950 CASE_BUILTIN_F (BUILT_IN_ERFC)
10951 CASE_BUILTIN_F (BUILT_IN_EXP)
10952 CASE_BUILTIN_F (BUILT_IN_EXP10)
10953 CASE_BUILTIN_F (BUILT_IN_EXP2)
10954 CASE_BUILTIN_F (BUILT_IN_FABS)
10955 CASE_BUILTIN_F (BUILT_IN_FDIM)
10956 CASE_BUILTIN_F (BUILT_IN_FREXP)
10957 CASE_BUILTIN_F (BUILT_IN_HYPOT)
10958 CASE_BUILTIN_F (BUILT_IN_POW10)
10959 CASE_BUILTIN_I (BUILT_IN_FFS)
10960 CASE_BUILTIN_I (BUILT_IN_PARITY)
10961 CASE_BUILTIN_I (BUILT_IN_POPCOUNT)
10962 /* Always true. */
10963 return 1;
10964
10965 CASE_BUILTIN_F (BUILT_IN_SQRT)
10966 /* sqrt(-0.0) is -0.0. */
10967 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
10968 return 1;
10969 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
10970
10971 CASE_BUILTIN_F (BUILT_IN_ASINH)
10972 CASE_BUILTIN_F (BUILT_IN_ATAN)
10973 CASE_BUILTIN_F (BUILT_IN_ATANH)
10974 CASE_BUILTIN_F (BUILT_IN_CBRT)
10975 CASE_BUILTIN_F (BUILT_IN_CEIL)
10976 CASE_BUILTIN_F (BUILT_IN_ERF)
10977 CASE_BUILTIN_F (BUILT_IN_EXPM1)
10978 CASE_BUILTIN_F (BUILT_IN_FLOOR)
10979 CASE_BUILTIN_F (BUILT_IN_FMOD)
10980 CASE_BUILTIN_F (BUILT_IN_LCEIL)
10981 CASE_BUILTIN_F (BUILT_IN_LDEXP)
10982 CASE_BUILTIN_F (BUILT_IN_LFLOOR)
10983 CASE_BUILTIN_F (BUILT_IN_LLCEIL)
10984 CASE_BUILTIN_F (BUILT_IN_LLFLOOR)
10985 CASE_BUILTIN_F (BUILT_IN_LLRINT)
10986 CASE_BUILTIN_F (BUILT_IN_LLROUND)
10987 CASE_BUILTIN_F (BUILT_IN_LRINT)
10988 CASE_BUILTIN_F (BUILT_IN_LROUND)
10989 CASE_BUILTIN_F (BUILT_IN_MODF)
10990 CASE_BUILTIN_F (BUILT_IN_NEARBYINT)
10991 CASE_BUILTIN_F (BUILT_IN_POW)
10992 CASE_BUILTIN_F (BUILT_IN_RINT)
10993 CASE_BUILTIN_F (BUILT_IN_ROUND)
10994 CASE_BUILTIN_F (BUILT_IN_SIGNBIT)
10995 CASE_BUILTIN_F (BUILT_IN_SINH)
10996 CASE_BUILTIN_F (BUILT_IN_TANH)
10997 CASE_BUILTIN_F (BUILT_IN_TRUNC)
10998 /* True if the 1st argument is nonnegative. */
10999 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
11000
11001 CASE_BUILTIN_F (BUILT_IN_FMAX)
11002 /* True if the 1st OR 2nd arguments are nonnegative. */
11003 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
11004 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
11005
11006 CASE_BUILTIN_F (BUILT_IN_FMIN)
11007 /* True if the 1st AND 2nd arguments are nonnegative. */
11008 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
11009 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
11010
11011 CASE_BUILTIN_F (BUILT_IN_COPYSIGN)
11012 /* True if the 2nd argument is nonnegative. */
11013 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
11014
11015 default:
11016 break;
11017 #undef CASE_BUILTIN_F
11018 #undef CASE_BUILTIN_I
11019 }
11020 }
11021
11022 /* ... fall through ... */
11023
11024 default:
11025 if (truth_value_p (TREE_CODE (t)))
11026 /* Truth values evaluate to 0 or 1, which is nonnegative. */
11027 return 1;
11028 }
11029
11030 /* We don't know sign of `t', so be conservative and return false. */
11031 return 0;
11032 }
11033
11034 /* Return true when T is an address and is known to be nonzero.
11035 For floating point we further ensure that T is not denormal.
11036 Similar logic is present in nonzero_address in rtlanal.h. */
11037
11038 static bool
11039 tree_expr_nonzero_p (tree t)
11040 {
11041 tree type = TREE_TYPE (t);
11042
11043 /* Doing something useful for floating point would need more work. */
11044 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
11045 return false;
11046
11047 switch (TREE_CODE (t))
11048 {
11049 case ABS_EXPR:
11050 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
11051 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
11052
11053 case INTEGER_CST:
11054 /* We used to test for !integer_zerop here. This does not work correctly
11055 if TREE_CONSTANT_OVERFLOW (t). */
11056 return (TREE_INT_CST_LOW (t) != 0
11057 || TREE_INT_CST_HIGH (t) != 0);
11058
11059 case PLUS_EXPR:
11060 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
11061 {
11062 /* With the presence of negative values it is hard
11063 to say something. */
11064 if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
11065 || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
11066 return false;
11067 /* One of operands must be positive and the other non-negative. */
11068 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
11069 || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
11070 }
11071 break;
11072
11073 case MULT_EXPR:
11074 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
11075 {
11076 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
11077 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
11078 }
11079 break;
11080
11081 case NOP_EXPR:
11082 {
11083 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
11084 tree outer_type = TREE_TYPE (t);
11085
11086 return (TYPE_PRECISION (inner_type) >= TYPE_PRECISION (outer_type)
11087 && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
11088 }
11089 break;
11090
11091 case ADDR_EXPR:
11092 {
11093 tree base = get_base_address (TREE_OPERAND (t, 0));
11094
11095 if (!base)
11096 return false;
11097
11098 /* Weak declarations may link to NULL. */
11099 if (DECL_P (base))
11100 return !DECL_WEAK (base);
11101
11102 /* Constants are never weak. */
11103 if (CONSTANT_CLASS_P (base))
11104 return true;
11105
11106 return false;
11107 }
11108
11109 case COND_EXPR:
11110 return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
11111 && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
11112
11113 case MIN_EXPR:
11114 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
11115 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
11116
11117 case MAX_EXPR:
11118 if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
11119 {
11120 /* When both operands are nonzero, then MAX must be too. */
11121 if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
11122 return true;
11123
11124 /* MAX where operand 0 is positive is positive. */
11125 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
11126 }
11127 /* MAX where operand 1 is positive is positive. */
11128 else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
11129 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
11130 return true;
11131 break;
11132
11133 case COMPOUND_EXPR:
11134 case MODIFY_EXPR:
11135 case BIND_EXPR:
11136 return tree_expr_nonzero_p (TREE_OPERAND (t, 1));
11137
11138 case SAVE_EXPR:
11139 case NON_LVALUE_EXPR:
11140 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
11141
11142 case BIT_IOR_EXPR:
11143 return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
11144 || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
11145
11146 default:
11147 break;
11148 }
11149 return false;
11150 }
11151
11152 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
11153 attempt to fold the expression to a constant without modifying TYPE,
11154 OP0 or OP1.
11155
11156 If the expression could be simplified to a constant, then return
11157 the constant. If the expression would not be simplified to a
11158 constant, then return NULL_TREE. */
11159
11160 tree
11161 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
11162 {
11163 tree tem = fold_binary (code, type, op0, op1);
11164 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
11165 }
11166
11167 /* Given the components of a unary expression CODE, TYPE and OP0,
11168 attempt to fold the expression to a constant without modifying
11169 TYPE or OP0.
11170
11171 If the expression could be simplified to a constant, then return
11172 the constant. If the expression would not be simplified to a
11173 constant, then return NULL_TREE. */
11174
11175 tree
11176 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
11177 {
11178 tree tem = fold_unary (code, type, op0);
11179 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
11180 }
11181
11182 /* If EXP represents referencing an element in a constant string
11183 (either via pointer arithmetic or array indexing), return the
11184 tree representing the value accessed, otherwise return NULL. */
11185
11186 tree
11187 fold_read_from_constant_string (tree exp)
11188 {
11189 if (TREE_CODE (exp) == INDIRECT_REF || TREE_CODE (exp) == ARRAY_REF)
11190 {
11191 tree exp1 = TREE_OPERAND (exp, 0);
11192 tree index;
11193 tree string;
11194
11195 if (TREE_CODE (exp) == INDIRECT_REF)
11196 string = string_constant (exp1, &index);
11197 else
11198 {
11199 tree low_bound = array_ref_low_bound (exp);
11200 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
11201
11202 /* Optimize the special-case of a zero lower bound.
11203
11204 We convert the low_bound to sizetype to avoid some problems
11205 with constant folding. (E.g. suppose the lower bound is 1,
11206 and its mode is QI. Without the conversion,l (ARRAY
11207 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
11208 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
11209 if (! integer_zerop (low_bound))
11210 index = size_diffop (index, fold_convert (sizetype, low_bound));
11211
11212 string = exp1;
11213 }
11214
11215 if (string
11216 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (string))
11217 && TREE_CODE (string) == STRING_CST
11218 && TREE_CODE (index) == INTEGER_CST
11219 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
11220 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
11221 == MODE_INT)
11222 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
11223 return fold_convert (TREE_TYPE (exp),
11224 build_int_cst (NULL_TREE,
11225 (TREE_STRING_POINTER (string)
11226 [TREE_INT_CST_LOW (index)])));
11227 }
11228 return NULL;
11229 }
11230
11231 /* Return the tree for neg (ARG0) when ARG0 is known to be either
11232 an integer constant or real constant.
11233
11234 TYPE is the type of the result. */
11235
11236 static tree
11237 fold_negate_const (tree arg0, tree type)
11238 {
11239 tree t = NULL_TREE;
11240
11241 switch (TREE_CODE (arg0))
11242 {
11243 case INTEGER_CST:
11244 {
11245 unsigned HOST_WIDE_INT low;
11246 HOST_WIDE_INT high;
11247 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
11248 TREE_INT_CST_HIGH (arg0),
11249 &low, &high);
11250 t = build_int_cst_wide (type, low, high);
11251 t = force_fit_type (t, 1,
11252 (overflow | TREE_OVERFLOW (arg0))
11253 && !TYPE_UNSIGNED (type),
11254 TREE_CONSTANT_OVERFLOW (arg0));
11255 break;
11256 }
11257
11258 case REAL_CST:
11259 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
11260 break;
11261
11262 default:
11263 gcc_unreachable ();
11264 }
11265
11266 return t;
11267 }
11268
11269 /* Return the tree for abs (ARG0) when ARG0 is known to be either
11270 an integer constant or real constant.
11271
11272 TYPE is the type of the result. */
11273
11274 tree
11275 fold_abs_const (tree arg0, tree type)
11276 {
11277 tree t = NULL_TREE;
11278
11279 switch (TREE_CODE (arg0))
11280 {
11281 case INTEGER_CST:
11282 /* If the value is unsigned, then the absolute value is
11283 the same as the ordinary value. */
11284 if (TYPE_UNSIGNED (type))
11285 t = arg0;
11286 /* Similarly, if the value is non-negative. */
11287 else if (INT_CST_LT (integer_minus_one_node, arg0))
11288 t = arg0;
11289 /* If the value is negative, then the absolute value is
11290 its negation. */
11291 else
11292 {
11293 unsigned HOST_WIDE_INT low;
11294 HOST_WIDE_INT high;
11295 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
11296 TREE_INT_CST_HIGH (arg0),
11297 &low, &high);
11298 t = build_int_cst_wide (type, low, high);
11299 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg0),
11300 TREE_CONSTANT_OVERFLOW (arg0));
11301 }
11302 break;
11303
11304 case REAL_CST:
11305 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
11306 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
11307 else
11308 t = arg0;
11309 break;
11310
11311 default:
11312 gcc_unreachable ();
11313 }
11314
11315 return t;
11316 }
11317
11318 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
11319 constant. TYPE is the type of the result. */
11320
11321 static tree
11322 fold_not_const (tree arg0, tree type)
11323 {
11324 tree t = NULL_TREE;
11325
11326 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
11327
11328 t = build_int_cst_wide (type,
11329 ~ TREE_INT_CST_LOW (arg0),
11330 ~ TREE_INT_CST_HIGH (arg0));
11331 t = force_fit_type (t, 0, TREE_OVERFLOW (arg0),
11332 TREE_CONSTANT_OVERFLOW (arg0));
11333
11334 return t;
11335 }
11336
11337 /* Given CODE, a relational operator, the target type, TYPE and two
11338 constant operands OP0 and OP1, return the result of the
11339 relational operation. If the result is not a compile time
11340 constant, then return NULL_TREE. */
11341
11342 static tree
11343 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
11344 {
11345 int result, invert;
11346
11347 /* From here on, the only cases we handle are when the result is
11348 known to be a constant. */
11349
11350 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
11351 {
11352 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
11353 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
11354
11355 /* Handle the cases where either operand is a NaN. */
11356 if (real_isnan (c0) || real_isnan (c1))
11357 {
11358 switch (code)
11359 {
11360 case EQ_EXPR:
11361 case ORDERED_EXPR:
11362 result = 0;
11363 break;
11364
11365 case NE_EXPR:
11366 case UNORDERED_EXPR:
11367 case UNLT_EXPR:
11368 case UNLE_EXPR:
11369 case UNGT_EXPR:
11370 case UNGE_EXPR:
11371 case UNEQ_EXPR:
11372 result = 1;
11373 break;
11374
11375 case LT_EXPR:
11376 case LE_EXPR:
11377 case GT_EXPR:
11378 case GE_EXPR:
11379 case LTGT_EXPR:
11380 if (flag_trapping_math)
11381 return NULL_TREE;
11382 result = 0;
11383 break;
11384
11385 default:
11386 gcc_unreachable ();
11387 }
11388
11389 return constant_boolean_node (result, type);
11390 }
11391
11392 return constant_boolean_node (real_compare (code, c0, c1), type);
11393 }
11394
11395 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
11396
11397 To compute GT, swap the arguments and do LT.
11398 To compute GE, do LT and invert the result.
11399 To compute LE, swap the arguments, do LT and invert the result.
11400 To compute NE, do EQ and invert the result.
11401
11402 Therefore, the code below must handle only EQ and LT. */
11403
11404 if (code == LE_EXPR || code == GT_EXPR)
11405 {
11406 tree tem = op0;
11407 op0 = op1;
11408 op1 = tem;
11409 code = swap_tree_comparison (code);
11410 }
11411
11412 /* Note that it is safe to invert for real values here because we
11413 have already handled the one case that it matters. */
11414
11415 invert = 0;
11416 if (code == NE_EXPR || code == GE_EXPR)
11417 {
11418 invert = 1;
11419 code = invert_tree_comparison (code, false);
11420 }
11421
11422 /* Compute a result for LT or EQ if args permit;
11423 Otherwise return T. */
11424 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
11425 {
11426 if (code == EQ_EXPR)
11427 result = tree_int_cst_equal (op0, op1);
11428 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
11429 result = INT_CST_LT_UNSIGNED (op0, op1);
11430 else
11431 result = INT_CST_LT (op0, op1);
11432 }
11433 else
11434 return NULL_TREE;
11435
11436 if (invert)
11437 result ^= 1;
11438 return constant_boolean_node (result, type);
11439 }
11440
11441 /* Build an expression for the a clean point containing EXPR with type TYPE.
11442 Don't build a cleanup point expression for EXPR which don't have side
11443 effects. */
11444
11445 tree
11446 fold_build_cleanup_point_expr (tree type, tree expr)
11447 {
11448 /* If the expression does not have side effects then we don't have to wrap
11449 it with a cleanup point expression. */
11450 if (!TREE_SIDE_EFFECTS (expr))
11451 return expr;
11452
11453 /* If the expression is a return, check to see if the expression inside the
11454 return has no side effects or the right hand side of the modify expression
11455 inside the return. If either don't have side effects set we don't need to
11456 wrap the expression in a cleanup point expression. Note we don't check the
11457 left hand side of the modify because it should always be a return decl. */
11458 if (TREE_CODE (expr) == RETURN_EXPR)
11459 {
11460 tree op = TREE_OPERAND (expr, 0);
11461 if (!op || !TREE_SIDE_EFFECTS (op))
11462 return expr;
11463 op = TREE_OPERAND (op, 1);
11464 if (!TREE_SIDE_EFFECTS (op))
11465 return expr;
11466 }
11467
11468 return build1 (CLEANUP_POINT_EXPR, type, expr);
11469 }
11470
11471 /* Build an expression for the address of T. Folds away INDIRECT_REF to
11472 avoid confusing the gimplify process. */
11473
11474 tree
11475 build_fold_addr_expr_with_type (tree t, tree ptrtype)
11476 {
11477 /* The size of the object is not relevant when talking about its address. */
11478 if (TREE_CODE (t) == WITH_SIZE_EXPR)
11479 t = TREE_OPERAND (t, 0);
11480
11481 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
11482 if (TREE_CODE (t) == INDIRECT_REF
11483 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
11484 {
11485 t = TREE_OPERAND (t, 0);
11486 if (TREE_TYPE (t) != ptrtype)
11487 t = build1 (NOP_EXPR, ptrtype, t);
11488 }
11489 else
11490 {
11491 tree base = t;
11492
11493 while (handled_component_p (base))
11494 base = TREE_OPERAND (base, 0);
11495 if (DECL_P (base))
11496 TREE_ADDRESSABLE (base) = 1;
11497
11498 t = build1 (ADDR_EXPR, ptrtype, t);
11499 }
11500
11501 return t;
11502 }
11503
11504 tree
11505 build_fold_addr_expr (tree t)
11506 {
11507 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
11508 }
11509
11510 /* Given a pointer value OP0 and a type TYPE, return a simplified version
11511 of an indirection through OP0, or NULL_TREE if no simplification is
11512 possible. */
11513
11514 tree
11515 fold_indirect_ref_1 (tree type, tree op0)
11516 {
11517 tree sub = op0;
11518 tree subtype;
11519
11520 STRIP_NOPS (sub);
11521 subtype = TREE_TYPE (sub);
11522 if (!POINTER_TYPE_P (subtype))
11523 return NULL_TREE;
11524
11525 if (TREE_CODE (sub) == ADDR_EXPR)
11526 {
11527 tree op = TREE_OPERAND (sub, 0);
11528 tree optype = TREE_TYPE (op);
11529 /* *&p => p */
11530 if (type == optype)
11531 return op;
11532 /* *(foo *)&fooarray => fooarray[0] */
11533 else if (TREE_CODE (optype) == ARRAY_TYPE
11534 && type == TREE_TYPE (optype))
11535 {
11536 tree type_domain = TYPE_DOMAIN (optype);
11537 tree min_val = size_zero_node;
11538 if (type_domain && TYPE_MIN_VALUE (type_domain))
11539 min_val = TYPE_MIN_VALUE (type_domain);
11540 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
11541 }
11542 }
11543
11544 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
11545 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
11546 && type == TREE_TYPE (TREE_TYPE (subtype)))
11547 {
11548 tree type_domain;
11549 tree min_val = size_zero_node;
11550 sub = build_fold_indirect_ref (sub);
11551 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
11552 if (type_domain && TYPE_MIN_VALUE (type_domain))
11553 min_val = TYPE_MIN_VALUE (type_domain);
11554 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
11555 }
11556
11557 return NULL_TREE;
11558 }
11559
11560 /* Builds an expression for an indirection through T, simplifying some
11561 cases. */
11562
11563 tree
11564 build_fold_indirect_ref (tree t)
11565 {
11566 tree type = TREE_TYPE (TREE_TYPE (t));
11567 tree sub = fold_indirect_ref_1 (type, t);
11568
11569 if (sub)
11570 return sub;
11571 else
11572 return build1 (INDIRECT_REF, type, t);
11573 }
11574
11575 /* Given an INDIRECT_REF T, return either T or a simplified version. */
11576
11577 tree
11578 fold_indirect_ref (tree t)
11579 {
11580 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
11581
11582 if (sub)
11583 return sub;
11584 else
11585 return t;
11586 }
11587
11588 /* Strip non-trapping, non-side-effecting tree nodes from an expression
11589 whose result is ignored. The type of the returned tree need not be
11590 the same as the original expression. */
11591
11592 tree
11593 fold_ignored_result (tree t)
11594 {
11595 if (!TREE_SIDE_EFFECTS (t))
11596 return integer_zero_node;
11597
11598 for (;;)
11599 switch (TREE_CODE_CLASS (TREE_CODE (t)))
11600 {
11601 case tcc_unary:
11602 t = TREE_OPERAND (t, 0);
11603 break;
11604
11605 case tcc_binary:
11606 case tcc_comparison:
11607 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
11608 t = TREE_OPERAND (t, 0);
11609 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
11610 t = TREE_OPERAND (t, 1);
11611 else
11612 return t;
11613 break;
11614
11615 case tcc_expression:
11616 switch (TREE_CODE (t))
11617 {
11618 case COMPOUND_EXPR:
11619 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
11620 return t;
11621 t = TREE_OPERAND (t, 0);
11622 break;
11623
11624 case COND_EXPR:
11625 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
11626 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
11627 return t;
11628 t = TREE_OPERAND (t, 0);
11629 break;
11630
11631 default:
11632 return t;
11633 }
11634 break;
11635
11636 default:
11637 return t;
11638 }
11639 }
11640
11641 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
11642 This can only be applied to objects of a sizetype. */
11643
11644 tree
11645 round_up (tree value, int divisor)
11646 {
11647 tree div = NULL_TREE;
11648
11649 gcc_assert (divisor > 0);
11650 if (divisor == 1)
11651 return value;
11652
11653 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
11654 have to do anything. Only do this when we are not given a const,
11655 because in that case, this check is more expensive than just
11656 doing it. */
11657 if (TREE_CODE (value) != INTEGER_CST)
11658 {
11659 div = build_int_cst (TREE_TYPE (value), divisor);
11660
11661 if (multiple_of_p (TREE_TYPE (value), value, div))
11662 return value;
11663 }
11664
11665 /* If divisor is a power of two, simplify this to bit manipulation. */
11666 if (divisor == (divisor & -divisor))
11667 {
11668 tree t;
11669
11670 t = build_int_cst (TREE_TYPE (value), divisor - 1);
11671 value = size_binop (PLUS_EXPR, value, t);
11672 t = build_int_cst (TREE_TYPE (value), -divisor);
11673 value = size_binop (BIT_AND_EXPR, value, t);
11674 }
11675 else
11676 {
11677 if (!div)
11678 div = build_int_cst (TREE_TYPE (value), divisor);
11679 value = size_binop (CEIL_DIV_EXPR, value, div);
11680 value = size_binop (MULT_EXPR, value, div);
11681 }
11682
11683 return value;
11684 }
11685
11686 /* Likewise, but round down. */
11687
11688 tree
11689 round_down (tree value, int divisor)
11690 {
11691 tree div = NULL_TREE;
11692
11693 gcc_assert (divisor > 0);
11694 if (divisor == 1)
11695 return value;
11696
11697 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
11698 have to do anything. Only do this when we are not given a const,
11699 because in that case, this check is more expensive than just
11700 doing it. */
11701 if (TREE_CODE (value) != INTEGER_CST)
11702 {
11703 div = build_int_cst (TREE_TYPE (value), divisor);
11704
11705 if (multiple_of_p (TREE_TYPE (value), value, div))
11706 return value;
11707 }
11708
11709 /* If divisor is a power of two, simplify this to bit manipulation. */
11710 if (divisor == (divisor & -divisor))
11711 {
11712 tree t;
11713
11714 t = build_int_cst (TREE_TYPE (value), -divisor);
11715 value = size_binop (BIT_AND_EXPR, value, t);
11716 }
11717 else
11718 {
11719 if (!div)
11720 div = build_int_cst (TREE_TYPE (value), divisor);
11721 value = size_binop (FLOOR_DIV_EXPR, value, div);
11722 value = size_binop (MULT_EXPR, value, div);
11723 }
11724
11725 return value;
11726 }
11727
11728 /* Returns the pointer to the base of the object addressed by EXP and
11729 extracts the information about the offset of the access, storing it
11730 to PBITPOS and POFFSET. */
11731
11732 static tree
11733 split_address_to_core_and_offset (tree exp,
11734 HOST_WIDE_INT *pbitpos, tree *poffset)
11735 {
11736 tree core;
11737 enum machine_mode mode;
11738 int unsignedp, volatilep;
11739 HOST_WIDE_INT bitsize;
11740
11741 if (TREE_CODE (exp) == ADDR_EXPR)
11742 {
11743 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
11744 poffset, &mode, &unsignedp, &volatilep,
11745 false);
11746
11747 if (TREE_CODE (core) == INDIRECT_REF)
11748 core = TREE_OPERAND (core, 0);
11749 }
11750 else
11751 {
11752 core = exp;
11753 *pbitpos = 0;
11754 *poffset = NULL_TREE;
11755 }
11756
11757 return core;
11758 }
11759
11760 /* Returns true if addresses of E1 and E2 differ by a constant, false
11761 otherwise. If they do, E1 - E2 is stored in *DIFF. */
11762
11763 bool
11764 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
11765 {
11766 tree core1, core2;
11767 HOST_WIDE_INT bitpos1, bitpos2;
11768 tree toffset1, toffset2, tdiff, type;
11769
11770 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
11771 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
11772
11773 if (bitpos1 % BITS_PER_UNIT != 0
11774 || bitpos2 % BITS_PER_UNIT != 0
11775 || !operand_equal_p (core1, core2, 0))
11776 return false;
11777
11778 if (toffset1 && toffset2)
11779 {
11780 type = TREE_TYPE (toffset1);
11781 if (type != TREE_TYPE (toffset2))
11782 toffset2 = fold_convert (type, toffset2);
11783
11784 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
11785 if (!host_integerp (tdiff, 0))
11786 return false;
11787
11788 *diff = tree_low_cst (tdiff, 0);
11789 }
11790 else if (toffset1 || toffset2)
11791 {
11792 /* If only one of the offsets is non-constant, the difference cannot
11793 be a constant. */
11794 return false;
11795 }
11796 else
11797 *diff = 0;
11798
11799 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
11800 return true;
11801 }
11802
11803 /* Simplify the floating point expression EXP when the sign of the
11804 result is not significant. Return NULL_TREE if no simplification
11805 is possible. */
11806
11807 tree
11808 fold_strip_sign_ops (tree exp)
11809 {
11810 tree arg0, arg1;
11811
11812 switch (TREE_CODE (exp))
11813 {
11814 case ABS_EXPR:
11815 case NEGATE_EXPR:
11816 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
11817 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
11818
11819 case MULT_EXPR:
11820 case RDIV_EXPR:
11821 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
11822 return NULL_TREE;
11823 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
11824 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
11825 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
11826 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
11827 arg0 ? arg0 : TREE_OPERAND (exp, 0),
11828 arg1 ? arg1 : TREE_OPERAND (exp, 1));
11829 break;
11830
11831 default:
11832 break;
11833 }
11834 return NULL_TREE;
11835 }
11836