abaac755e3b30704d6391cbfa317ccb9676dd08f
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20 02110-1301, USA. */
21
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
29
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type.
32
33 fold takes a tree as argument and returns a simplified tree.
34
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
38
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
41
42 force_fit_type takes a constant, an overflowable flag and prior
43 overflow indicators. It forces the value to fit the type and sets
44 TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate. */
45
46 #include "config.h"
47 #include "system.h"
48 #include "coretypes.h"
49 #include "tm.h"
50 #include "flags.h"
51 #include "tree.h"
52 #include "real.h"
53 #include "rtl.h"
54 #include "expr.h"
55 #include "tm_p.h"
56 #include "toplev.h"
57 #include "ggc.h"
58 #include "hashtab.h"
59 #include "langhooks.h"
60 #include "md5.h"
61
62 /* The following constants represent a bit based encoding of GCC's
63 comparison operators. This encoding simplifies transformations
64 on relational comparison operators, such as AND and OR. */
65 enum comparison_code {
66 COMPCODE_FALSE = 0,
67 COMPCODE_LT = 1,
68 COMPCODE_EQ = 2,
69 COMPCODE_LE = 3,
70 COMPCODE_GT = 4,
71 COMPCODE_LTGT = 5,
72 COMPCODE_GE = 6,
73 COMPCODE_ORD = 7,
74 COMPCODE_UNORD = 8,
75 COMPCODE_UNLT = 9,
76 COMPCODE_UNEQ = 10,
77 COMPCODE_UNLE = 11,
78 COMPCODE_UNGT = 12,
79 COMPCODE_NE = 13,
80 COMPCODE_UNGE = 14,
81 COMPCODE_TRUE = 15
82 };
83
84 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
85 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
86 static bool negate_mathfn_p (enum built_in_function);
87 static bool negate_expr_p (tree);
88 static tree negate_expr (tree);
89 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
90 static tree associate_trees (tree, tree, enum tree_code, tree);
91 static tree const_binop (enum tree_code, tree, tree, int);
92 static enum comparison_code comparison_to_compcode (enum tree_code);
93 static enum tree_code compcode_to_comparison (enum comparison_code);
94 static tree combine_comparisons (enum tree_code, enum tree_code,
95 enum tree_code, tree, tree, tree);
96 static int truth_value_p (enum tree_code);
97 static int operand_equal_for_comparison_p (tree, tree, tree);
98 static int twoval_comparison_p (tree, tree *, tree *, int *);
99 static tree eval_subst (tree, tree, tree, tree, tree);
100 static tree pedantic_omit_one_operand (tree, tree, tree);
101 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
102 static tree make_bit_field_ref (tree, tree, int, int, int);
103 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
104 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
105 enum machine_mode *, int *, int *,
106 tree *, tree *);
107 static int all_ones_mask_p (tree, int);
108 static tree sign_bit_p (tree, tree);
109 static int simple_operand_p (tree);
110 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
111 static tree make_range (tree, int *, tree *, tree *);
112 static tree build_range_check (tree, tree, int, tree, tree);
113 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
114 tree);
115 static tree fold_range_test (enum tree_code, tree, tree, tree);
116 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
117 static tree unextend (tree, int, int, tree);
118 static tree fold_truthop (enum tree_code, tree, tree, tree);
119 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
120 static tree extract_muldiv (tree, tree, enum tree_code, tree);
121 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
122 static int multiple_of_p (tree, tree, tree);
123 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
124 tree, tree,
125 tree, tree, int);
126 static bool fold_real_zero_addition_p (tree, tree, int);
127 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
128 tree, tree, tree);
129 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
130 static tree fold_div_compare (enum tree_code, tree, tree, tree);
131 static bool reorder_operands_p (tree, tree);
132 static tree fold_negate_const (tree, tree);
133 static tree fold_not_const (tree, tree);
134 static tree fold_relational_const (enum tree_code, tree, tree, tree);
135
136 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
137 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
138 and SUM1. Then this yields nonzero if overflow occurred during the
139 addition.
140
141 Overflow occurs if A and B have the same sign, but A and SUM differ in
142 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
143 sign. */
144 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
145 \f
146 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
147 We do that by representing the two-word integer in 4 words, with only
148 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
149 number. The value of the word is LOWPART + HIGHPART * BASE. */
150
151 #define LOWPART(x) \
152 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
153 #define HIGHPART(x) \
154 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
155 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
156
157 /* Unpack a two-word integer into 4 words.
158 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
159 WORDS points to the array of HOST_WIDE_INTs. */
160
161 static void
162 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
163 {
164 words[0] = LOWPART (low);
165 words[1] = HIGHPART (low);
166 words[2] = LOWPART (hi);
167 words[3] = HIGHPART (hi);
168 }
169
170 /* Pack an array of 4 words into a two-word integer.
171 WORDS points to the array of words.
172 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
173
174 static void
175 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
176 HOST_WIDE_INT *hi)
177 {
178 *low = words[0] + words[1] * BASE;
179 *hi = words[2] + words[3] * BASE;
180 }
181 \f
182 /* T is an INT_CST node. OVERFLOWABLE indicates if we are interested
183 in overflow of the value, when >0 we are only interested in signed
184 overflow, for <0 we are interested in any overflow. OVERFLOWED
185 indicates whether overflow has already occurred. CONST_OVERFLOWED
186 indicates whether constant overflow has already occurred. We force
187 T's value to be within range of T's type (by setting to 0 or 1 all
188 the bits outside the type's range). We set TREE_OVERFLOWED if,
189 OVERFLOWED is nonzero,
190 or OVERFLOWABLE is >0 and signed overflow occurs
191 or OVERFLOWABLE is <0 and any overflow occurs
192 We set TREE_CONSTANT_OVERFLOWED if,
193 CONST_OVERFLOWED is nonzero
194 or we set TREE_OVERFLOWED.
195 We return either the original T, or a copy. */
196
197 tree
198 force_fit_type (tree t, int overflowable,
199 bool overflowed, bool overflowed_const)
200 {
201 unsigned HOST_WIDE_INT low;
202 HOST_WIDE_INT high;
203 unsigned int prec;
204 int sign_extended_type;
205
206 gcc_assert (TREE_CODE (t) == INTEGER_CST);
207
208 low = TREE_INT_CST_LOW (t);
209 high = TREE_INT_CST_HIGH (t);
210
211 if (POINTER_TYPE_P (TREE_TYPE (t))
212 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
213 prec = POINTER_SIZE;
214 else
215 prec = TYPE_PRECISION (TREE_TYPE (t));
216 /* Size types *are* sign extended. */
217 sign_extended_type = (!TYPE_UNSIGNED (TREE_TYPE (t))
218 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
219 && TYPE_IS_SIZETYPE (TREE_TYPE (t))));
220
221 /* First clear all bits that are beyond the type's precision. */
222
223 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
224 ;
225 else if (prec > HOST_BITS_PER_WIDE_INT)
226 high &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
227 else
228 {
229 high = 0;
230 if (prec < HOST_BITS_PER_WIDE_INT)
231 low &= ~((HOST_WIDE_INT) (-1) << prec);
232 }
233
234 if (!sign_extended_type)
235 /* No sign extension */;
236 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
237 /* Correct width already. */;
238 else if (prec > HOST_BITS_PER_WIDE_INT)
239 {
240 /* Sign extend top half? */
241 if (high & ((unsigned HOST_WIDE_INT)1
242 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
243 high |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
244 }
245 else if (prec == HOST_BITS_PER_WIDE_INT)
246 {
247 if ((HOST_WIDE_INT)low < 0)
248 high = -1;
249 }
250 else
251 {
252 /* Sign extend bottom half? */
253 if (low & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
254 {
255 high = -1;
256 low |= (HOST_WIDE_INT)(-1) << prec;
257 }
258 }
259
260 /* If the value changed, return a new node. */
261 if (overflowed || overflowed_const
262 || low != TREE_INT_CST_LOW (t) || high != TREE_INT_CST_HIGH (t))
263 {
264 t = build_int_cst_wide (TREE_TYPE (t), low, high);
265
266 if (overflowed
267 || overflowable < 0
268 || (overflowable > 0 && sign_extended_type))
269 {
270 t = copy_node (t);
271 TREE_OVERFLOW (t) = 1;
272 TREE_CONSTANT_OVERFLOW (t) = 1;
273 }
274 else if (overflowed_const)
275 {
276 t = copy_node (t);
277 TREE_CONSTANT_OVERFLOW (t) = 1;
278 }
279 }
280
281 return t;
282 }
283 \f
284 /* Add two doubleword integers with doubleword result.
285 Each argument is given as two `HOST_WIDE_INT' pieces.
286 One argument is L1 and H1; the other, L2 and H2.
287 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
288
289 int
290 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
291 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
292 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
293 {
294 unsigned HOST_WIDE_INT l;
295 HOST_WIDE_INT h;
296
297 l = l1 + l2;
298 h = h1 + h2 + (l < l1);
299
300 *lv = l;
301 *hv = h;
302 return OVERFLOW_SUM_SIGN (h1, h2, h);
303 }
304
305 /* Negate a doubleword integer with doubleword result.
306 Return nonzero if the operation overflows, assuming it's signed.
307 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
308 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
309
310 int
311 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
312 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
313 {
314 if (l1 == 0)
315 {
316 *lv = 0;
317 *hv = - h1;
318 return (*hv & h1) < 0;
319 }
320 else
321 {
322 *lv = -l1;
323 *hv = ~h1;
324 return 0;
325 }
326 }
327 \f
328 /* Multiply two doubleword integers with doubleword result.
329 Return nonzero if the operation overflows, assuming it's signed.
330 Each argument is given as two `HOST_WIDE_INT' pieces.
331 One argument is L1 and H1; the other, L2 and H2.
332 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
333
334 int
335 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
336 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
337 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
338 {
339 HOST_WIDE_INT arg1[4];
340 HOST_WIDE_INT arg2[4];
341 HOST_WIDE_INT prod[4 * 2];
342 unsigned HOST_WIDE_INT carry;
343 int i, j, k;
344 unsigned HOST_WIDE_INT toplow, neglow;
345 HOST_WIDE_INT tophigh, neghigh;
346
347 encode (arg1, l1, h1);
348 encode (arg2, l2, h2);
349
350 memset (prod, 0, sizeof prod);
351
352 for (i = 0; i < 4; i++)
353 {
354 carry = 0;
355 for (j = 0; j < 4; j++)
356 {
357 k = i + j;
358 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
359 carry += arg1[i] * arg2[j];
360 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
361 carry += prod[k];
362 prod[k] = LOWPART (carry);
363 carry = HIGHPART (carry);
364 }
365 prod[i + 4] = carry;
366 }
367
368 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
369
370 /* Check for overflow by calculating the top half of the answer in full;
371 it should agree with the low half's sign bit. */
372 decode (prod + 4, &toplow, &tophigh);
373 if (h1 < 0)
374 {
375 neg_double (l2, h2, &neglow, &neghigh);
376 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
377 }
378 if (h2 < 0)
379 {
380 neg_double (l1, h1, &neglow, &neghigh);
381 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
382 }
383 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
384 }
385 \f
386 /* Shift the doubleword integer in L1, H1 left by COUNT places
387 keeping only PREC bits of result.
388 Shift right if COUNT is negative.
389 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
390 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
391
392 void
393 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
394 HOST_WIDE_INT count, unsigned int prec,
395 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
396 {
397 unsigned HOST_WIDE_INT signmask;
398
399 if (count < 0)
400 {
401 rshift_double (l1, h1, -count, prec, lv, hv, arith);
402 return;
403 }
404
405 if (SHIFT_COUNT_TRUNCATED)
406 count %= prec;
407
408 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
409 {
410 /* Shifting by the host word size is undefined according to the
411 ANSI standard, so we must handle this as a special case. */
412 *hv = 0;
413 *lv = 0;
414 }
415 else if (count >= HOST_BITS_PER_WIDE_INT)
416 {
417 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
418 *lv = 0;
419 }
420 else
421 {
422 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
423 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
424 *lv = l1 << count;
425 }
426
427 /* Sign extend all bits that are beyond the precision. */
428
429 signmask = -((prec > HOST_BITS_PER_WIDE_INT
430 ? ((unsigned HOST_WIDE_INT) *hv
431 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
432 : (*lv >> (prec - 1))) & 1);
433
434 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
435 ;
436 else if (prec >= HOST_BITS_PER_WIDE_INT)
437 {
438 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
439 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
440 }
441 else
442 {
443 *hv = signmask;
444 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
445 *lv |= signmask << prec;
446 }
447 }
448
449 /* Shift the doubleword integer in L1, H1 right by COUNT places
450 keeping only PREC bits of result. COUNT must be positive.
451 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
452 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
453
454 void
455 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
456 HOST_WIDE_INT count, unsigned int prec,
457 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
458 int arith)
459 {
460 unsigned HOST_WIDE_INT signmask;
461
462 signmask = (arith
463 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
464 : 0);
465
466 if (SHIFT_COUNT_TRUNCATED)
467 count %= prec;
468
469 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
470 {
471 /* Shifting by the host word size is undefined according to the
472 ANSI standard, so we must handle this as a special case. */
473 *hv = 0;
474 *lv = 0;
475 }
476 else if (count >= HOST_BITS_PER_WIDE_INT)
477 {
478 *hv = 0;
479 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
480 }
481 else
482 {
483 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
484 *lv = ((l1 >> count)
485 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
486 }
487
488 /* Zero / sign extend all bits that are beyond the precision. */
489
490 if (count >= (HOST_WIDE_INT)prec)
491 {
492 *hv = signmask;
493 *lv = signmask;
494 }
495 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
496 ;
497 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
498 {
499 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
500 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
501 }
502 else
503 {
504 *hv = signmask;
505 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
506 *lv |= signmask << (prec - count);
507 }
508 }
509 \f
510 /* Rotate the doubleword integer in L1, H1 left by COUNT places
511 keeping only PREC bits of result.
512 Rotate right if COUNT is negative.
513 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
514
515 void
516 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
517 HOST_WIDE_INT count, unsigned int prec,
518 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
519 {
520 unsigned HOST_WIDE_INT s1l, s2l;
521 HOST_WIDE_INT s1h, s2h;
522
523 count %= prec;
524 if (count < 0)
525 count += prec;
526
527 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
528 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
529 *lv = s1l | s2l;
530 *hv = s1h | s2h;
531 }
532
533 /* Rotate the doubleword integer in L1, H1 left by COUNT places
534 keeping only PREC bits of result. COUNT must be positive.
535 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
536
537 void
538 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
539 HOST_WIDE_INT count, unsigned int prec,
540 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
541 {
542 unsigned HOST_WIDE_INT s1l, s2l;
543 HOST_WIDE_INT s1h, s2h;
544
545 count %= prec;
546 if (count < 0)
547 count += prec;
548
549 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
550 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
551 *lv = s1l | s2l;
552 *hv = s1h | s2h;
553 }
554 \f
555 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
556 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
557 CODE is a tree code for a kind of division, one of
558 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
559 or EXACT_DIV_EXPR
560 It controls how the quotient is rounded to an integer.
561 Return nonzero if the operation overflows.
562 UNS nonzero says do unsigned division. */
563
564 int
565 div_and_round_double (enum tree_code code, int uns,
566 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
567 HOST_WIDE_INT hnum_orig,
568 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
569 HOST_WIDE_INT hden_orig,
570 unsigned HOST_WIDE_INT *lquo,
571 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
572 HOST_WIDE_INT *hrem)
573 {
574 int quo_neg = 0;
575 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
576 HOST_WIDE_INT den[4], quo[4];
577 int i, j;
578 unsigned HOST_WIDE_INT work;
579 unsigned HOST_WIDE_INT carry = 0;
580 unsigned HOST_WIDE_INT lnum = lnum_orig;
581 HOST_WIDE_INT hnum = hnum_orig;
582 unsigned HOST_WIDE_INT lden = lden_orig;
583 HOST_WIDE_INT hden = hden_orig;
584 int overflow = 0;
585
586 if (hden == 0 && lden == 0)
587 overflow = 1, lden = 1;
588
589 /* Calculate quotient sign and convert operands to unsigned. */
590 if (!uns)
591 {
592 if (hnum < 0)
593 {
594 quo_neg = ~ quo_neg;
595 /* (minimum integer) / (-1) is the only overflow case. */
596 if (neg_double (lnum, hnum, &lnum, &hnum)
597 && ((HOST_WIDE_INT) lden & hden) == -1)
598 overflow = 1;
599 }
600 if (hden < 0)
601 {
602 quo_neg = ~ quo_neg;
603 neg_double (lden, hden, &lden, &hden);
604 }
605 }
606
607 if (hnum == 0 && hden == 0)
608 { /* single precision */
609 *hquo = *hrem = 0;
610 /* This unsigned division rounds toward zero. */
611 *lquo = lnum / lden;
612 goto finish_up;
613 }
614
615 if (hnum == 0)
616 { /* trivial case: dividend < divisor */
617 /* hden != 0 already checked. */
618 *hquo = *lquo = 0;
619 *hrem = hnum;
620 *lrem = lnum;
621 goto finish_up;
622 }
623
624 memset (quo, 0, sizeof quo);
625
626 memset (num, 0, sizeof num); /* to zero 9th element */
627 memset (den, 0, sizeof den);
628
629 encode (num, lnum, hnum);
630 encode (den, lden, hden);
631
632 /* Special code for when the divisor < BASE. */
633 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
634 {
635 /* hnum != 0 already checked. */
636 for (i = 4 - 1; i >= 0; i--)
637 {
638 work = num[i] + carry * BASE;
639 quo[i] = work / lden;
640 carry = work % lden;
641 }
642 }
643 else
644 {
645 /* Full double precision division,
646 with thanks to Don Knuth's "Seminumerical Algorithms". */
647 int num_hi_sig, den_hi_sig;
648 unsigned HOST_WIDE_INT quo_est, scale;
649
650 /* Find the highest nonzero divisor digit. */
651 for (i = 4 - 1;; i--)
652 if (den[i] != 0)
653 {
654 den_hi_sig = i;
655 break;
656 }
657
658 /* Insure that the first digit of the divisor is at least BASE/2.
659 This is required by the quotient digit estimation algorithm. */
660
661 scale = BASE / (den[den_hi_sig] + 1);
662 if (scale > 1)
663 { /* scale divisor and dividend */
664 carry = 0;
665 for (i = 0; i <= 4 - 1; i++)
666 {
667 work = (num[i] * scale) + carry;
668 num[i] = LOWPART (work);
669 carry = HIGHPART (work);
670 }
671
672 num[4] = carry;
673 carry = 0;
674 for (i = 0; i <= 4 - 1; i++)
675 {
676 work = (den[i] * scale) + carry;
677 den[i] = LOWPART (work);
678 carry = HIGHPART (work);
679 if (den[i] != 0) den_hi_sig = i;
680 }
681 }
682
683 num_hi_sig = 4;
684
685 /* Main loop */
686 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
687 {
688 /* Guess the next quotient digit, quo_est, by dividing the first
689 two remaining dividend digits by the high order quotient digit.
690 quo_est is never low and is at most 2 high. */
691 unsigned HOST_WIDE_INT tmp;
692
693 num_hi_sig = i + den_hi_sig + 1;
694 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
695 if (num[num_hi_sig] != den[den_hi_sig])
696 quo_est = work / den[den_hi_sig];
697 else
698 quo_est = BASE - 1;
699
700 /* Refine quo_est so it's usually correct, and at most one high. */
701 tmp = work - quo_est * den[den_hi_sig];
702 if (tmp < BASE
703 && (den[den_hi_sig - 1] * quo_est
704 > (tmp * BASE + num[num_hi_sig - 2])))
705 quo_est--;
706
707 /* Try QUO_EST as the quotient digit, by multiplying the
708 divisor by QUO_EST and subtracting from the remaining dividend.
709 Keep in mind that QUO_EST is the I - 1st digit. */
710
711 carry = 0;
712 for (j = 0; j <= den_hi_sig; j++)
713 {
714 work = quo_est * den[j] + carry;
715 carry = HIGHPART (work);
716 work = num[i + j] - LOWPART (work);
717 num[i + j] = LOWPART (work);
718 carry += HIGHPART (work) != 0;
719 }
720
721 /* If quo_est was high by one, then num[i] went negative and
722 we need to correct things. */
723 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
724 {
725 quo_est--;
726 carry = 0; /* add divisor back in */
727 for (j = 0; j <= den_hi_sig; j++)
728 {
729 work = num[i + j] + den[j] + carry;
730 carry = HIGHPART (work);
731 num[i + j] = LOWPART (work);
732 }
733
734 num [num_hi_sig] += carry;
735 }
736
737 /* Store the quotient digit. */
738 quo[i] = quo_est;
739 }
740 }
741
742 decode (quo, lquo, hquo);
743
744 finish_up:
745 /* If result is negative, make it so. */
746 if (quo_neg)
747 neg_double (*lquo, *hquo, lquo, hquo);
748
749 /* Compute trial remainder: rem = num - (quo * den) */
750 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
751 neg_double (*lrem, *hrem, lrem, hrem);
752 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
753
754 switch (code)
755 {
756 case TRUNC_DIV_EXPR:
757 case TRUNC_MOD_EXPR: /* round toward zero */
758 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
759 return overflow;
760
761 case FLOOR_DIV_EXPR:
762 case FLOOR_MOD_EXPR: /* round toward negative infinity */
763 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
764 {
765 /* quo = quo - 1; */
766 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
767 lquo, hquo);
768 }
769 else
770 return overflow;
771 break;
772
773 case CEIL_DIV_EXPR:
774 case CEIL_MOD_EXPR: /* round toward positive infinity */
775 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
776 {
777 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
778 lquo, hquo);
779 }
780 else
781 return overflow;
782 break;
783
784 case ROUND_DIV_EXPR:
785 case ROUND_MOD_EXPR: /* round to closest integer */
786 {
787 unsigned HOST_WIDE_INT labs_rem = *lrem;
788 HOST_WIDE_INT habs_rem = *hrem;
789 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
790 HOST_WIDE_INT habs_den = hden, htwice;
791
792 /* Get absolute values. */
793 if (*hrem < 0)
794 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
795 if (hden < 0)
796 neg_double (lden, hden, &labs_den, &habs_den);
797
798 /* If (2 * abs (lrem) >= abs (lden)) */
799 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
800 labs_rem, habs_rem, &ltwice, &htwice);
801
802 if (((unsigned HOST_WIDE_INT) habs_den
803 < (unsigned HOST_WIDE_INT) htwice)
804 || (((unsigned HOST_WIDE_INT) habs_den
805 == (unsigned HOST_WIDE_INT) htwice)
806 && (labs_den < ltwice)))
807 {
808 if (*hquo < 0)
809 /* quo = quo - 1; */
810 add_double (*lquo, *hquo,
811 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
812 else
813 /* quo = quo + 1; */
814 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
815 lquo, hquo);
816 }
817 else
818 return overflow;
819 }
820 break;
821
822 default:
823 gcc_unreachable ();
824 }
825
826 /* Compute true remainder: rem = num - (quo * den) */
827 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
828 neg_double (*lrem, *hrem, lrem, hrem);
829 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
830 return overflow;
831 }
832
833 /* If ARG2 divides ARG1 with zero remainder, carries out the division
834 of type CODE and returns the quotient.
835 Otherwise returns NULL_TREE. */
836
837 static tree
838 div_if_zero_remainder (enum tree_code code, tree arg1, tree arg2)
839 {
840 unsigned HOST_WIDE_INT int1l, int2l;
841 HOST_WIDE_INT int1h, int2h;
842 unsigned HOST_WIDE_INT quol, reml;
843 HOST_WIDE_INT quoh, remh;
844 tree type = TREE_TYPE (arg1);
845 int uns = TYPE_UNSIGNED (type);
846
847 int1l = TREE_INT_CST_LOW (arg1);
848 int1h = TREE_INT_CST_HIGH (arg1);
849 int2l = TREE_INT_CST_LOW (arg2);
850 int2h = TREE_INT_CST_HIGH (arg2);
851
852 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
853 &quol, &quoh, &reml, &remh);
854 if (remh != 0 || reml != 0)
855 return NULL_TREE;
856
857 return build_int_cst_wide (type, quol, quoh);
858 }
859 \f
860 /* Return true if built-in mathematical function specified by CODE
861 preserves the sign of it argument, i.e. -f(x) == f(-x). */
862
863 static bool
864 negate_mathfn_p (enum built_in_function code)
865 {
866 switch (code)
867 {
868 case BUILT_IN_ASIN:
869 case BUILT_IN_ASINF:
870 case BUILT_IN_ASINL:
871 case BUILT_IN_ATAN:
872 case BUILT_IN_ATANF:
873 case BUILT_IN_ATANL:
874 case BUILT_IN_SIN:
875 case BUILT_IN_SINF:
876 case BUILT_IN_SINL:
877 case BUILT_IN_TAN:
878 case BUILT_IN_TANF:
879 case BUILT_IN_TANL:
880 return true;
881
882 default:
883 break;
884 }
885 return false;
886 }
887
888 /* Check whether we may negate an integer constant T without causing
889 overflow. */
890
891 bool
892 may_negate_without_overflow_p (tree t)
893 {
894 unsigned HOST_WIDE_INT val;
895 unsigned int prec;
896 tree type;
897
898 gcc_assert (TREE_CODE (t) == INTEGER_CST);
899
900 type = TREE_TYPE (t);
901 if (TYPE_UNSIGNED (type))
902 return false;
903
904 prec = TYPE_PRECISION (type);
905 if (prec > HOST_BITS_PER_WIDE_INT)
906 {
907 if (TREE_INT_CST_LOW (t) != 0)
908 return true;
909 prec -= HOST_BITS_PER_WIDE_INT;
910 val = TREE_INT_CST_HIGH (t);
911 }
912 else
913 val = TREE_INT_CST_LOW (t);
914 if (prec < HOST_BITS_PER_WIDE_INT)
915 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
916 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
917 }
918
919 /* Determine whether an expression T can be cheaply negated using
920 the function negate_expr. */
921
922 static bool
923 negate_expr_p (tree t)
924 {
925 tree type;
926
927 if (t == 0)
928 return false;
929
930 type = TREE_TYPE (t);
931
932 STRIP_SIGN_NOPS (t);
933 switch (TREE_CODE (t))
934 {
935 case INTEGER_CST:
936 if (TYPE_UNSIGNED (type) || ! flag_trapv)
937 return true;
938
939 /* Check that -CST will not overflow type. */
940 return may_negate_without_overflow_p (t);
941
942 case REAL_CST:
943 case NEGATE_EXPR:
944 return true;
945
946 case COMPLEX_CST:
947 return negate_expr_p (TREE_REALPART (t))
948 && negate_expr_p (TREE_IMAGPART (t));
949
950 case PLUS_EXPR:
951 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
952 return false;
953 /* -(A + B) -> (-B) - A. */
954 if (negate_expr_p (TREE_OPERAND (t, 1))
955 && reorder_operands_p (TREE_OPERAND (t, 0),
956 TREE_OPERAND (t, 1)))
957 return true;
958 /* -(A + B) -> (-A) - B. */
959 return negate_expr_p (TREE_OPERAND (t, 0));
960
961 case MINUS_EXPR:
962 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
963 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
964 && reorder_operands_p (TREE_OPERAND (t, 0),
965 TREE_OPERAND (t, 1));
966
967 case MULT_EXPR:
968 if (TYPE_UNSIGNED (TREE_TYPE (t)))
969 break;
970
971 /* Fall through. */
972
973 case RDIV_EXPR:
974 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
975 return negate_expr_p (TREE_OPERAND (t, 1))
976 || negate_expr_p (TREE_OPERAND (t, 0));
977 break;
978
979 case NOP_EXPR:
980 /* Negate -((double)float) as (double)(-float). */
981 if (TREE_CODE (type) == REAL_TYPE)
982 {
983 tree tem = strip_float_extensions (t);
984 if (tem != t)
985 return negate_expr_p (tem);
986 }
987 break;
988
989 case CALL_EXPR:
990 /* Negate -f(x) as f(-x). */
991 if (negate_mathfn_p (builtin_mathfn_code (t)))
992 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
993 break;
994
995 case RSHIFT_EXPR:
996 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
997 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
998 {
999 tree op1 = TREE_OPERAND (t, 1);
1000 if (TREE_INT_CST_HIGH (op1) == 0
1001 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1002 == TREE_INT_CST_LOW (op1))
1003 return true;
1004 }
1005 break;
1006
1007 default:
1008 break;
1009 }
1010 return false;
1011 }
1012
1013 /* Given T, an expression, return the negation of T. Allow for T to be
1014 null, in which case return null. */
1015
1016 static tree
1017 negate_expr (tree t)
1018 {
1019 tree type;
1020 tree tem;
1021
1022 if (t == 0)
1023 return 0;
1024
1025 type = TREE_TYPE (t);
1026 STRIP_SIGN_NOPS (t);
1027
1028 switch (TREE_CODE (t))
1029 {
1030 case INTEGER_CST:
1031 tem = fold_negate_const (t, type);
1032 if (! TREE_OVERFLOW (tem)
1033 || TYPE_UNSIGNED (type)
1034 || ! flag_trapv)
1035 return tem;
1036 break;
1037
1038 case REAL_CST:
1039 tem = fold_negate_const (t, type);
1040 /* Two's complement FP formats, such as c4x, may overflow. */
1041 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
1042 return fold_convert (type, tem);
1043 break;
1044
1045 case COMPLEX_CST:
1046 {
1047 tree rpart = negate_expr (TREE_REALPART (t));
1048 tree ipart = negate_expr (TREE_IMAGPART (t));
1049
1050 if ((TREE_CODE (rpart) == REAL_CST
1051 && TREE_CODE (ipart) == REAL_CST)
1052 || (TREE_CODE (rpart) == INTEGER_CST
1053 && TREE_CODE (ipart) == INTEGER_CST))
1054 return build_complex (type, rpart, ipart);
1055 }
1056 break;
1057
1058 case NEGATE_EXPR:
1059 return fold_convert (type, TREE_OPERAND (t, 0));
1060
1061 case PLUS_EXPR:
1062 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1063 {
1064 /* -(A + B) -> (-B) - A. */
1065 if (negate_expr_p (TREE_OPERAND (t, 1))
1066 && reorder_operands_p (TREE_OPERAND (t, 0),
1067 TREE_OPERAND (t, 1)))
1068 {
1069 tem = negate_expr (TREE_OPERAND (t, 1));
1070 tem = fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1071 tem, TREE_OPERAND (t, 0));
1072 return fold_convert (type, tem);
1073 }
1074
1075 /* -(A + B) -> (-A) - B. */
1076 if (negate_expr_p (TREE_OPERAND (t, 0)))
1077 {
1078 tem = negate_expr (TREE_OPERAND (t, 0));
1079 tem = fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1080 tem, TREE_OPERAND (t, 1));
1081 return fold_convert (type, tem);
1082 }
1083 }
1084 break;
1085
1086 case MINUS_EXPR:
1087 /* - (A - B) -> B - A */
1088 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1089 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1090 return fold_convert (type,
1091 fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1092 TREE_OPERAND (t, 1),
1093 TREE_OPERAND (t, 0)));
1094 break;
1095
1096 case MULT_EXPR:
1097 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1098 break;
1099
1100 /* Fall through. */
1101
1102 case RDIV_EXPR:
1103 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1104 {
1105 tem = TREE_OPERAND (t, 1);
1106 if (negate_expr_p (tem))
1107 return fold_convert (type,
1108 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1109 TREE_OPERAND (t, 0),
1110 negate_expr (tem)));
1111 tem = TREE_OPERAND (t, 0);
1112 if (negate_expr_p (tem))
1113 return fold_convert (type,
1114 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1115 negate_expr (tem),
1116 TREE_OPERAND (t, 1)));
1117 }
1118 break;
1119
1120 case NOP_EXPR:
1121 /* Convert -((double)float) into (double)(-float). */
1122 if (TREE_CODE (type) == REAL_TYPE)
1123 {
1124 tem = strip_float_extensions (t);
1125 if (tem != t && negate_expr_p (tem))
1126 return fold_convert (type, negate_expr (tem));
1127 }
1128 break;
1129
1130 case CALL_EXPR:
1131 /* Negate -f(x) as f(-x). */
1132 if (negate_mathfn_p (builtin_mathfn_code (t))
1133 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1134 {
1135 tree fndecl, arg, arglist;
1136
1137 fndecl = get_callee_fndecl (t);
1138 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1139 arglist = build_tree_list (NULL_TREE, arg);
1140 return build_function_call_expr (fndecl, arglist);
1141 }
1142 break;
1143
1144 case RSHIFT_EXPR:
1145 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1146 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1147 {
1148 tree op1 = TREE_OPERAND (t, 1);
1149 if (TREE_INT_CST_HIGH (op1) == 0
1150 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1151 == TREE_INT_CST_LOW (op1))
1152 {
1153 tree ntype = TYPE_UNSIGNED (type)
1154 ? lang_hooks.types.signed_type (type)
1155 : lang_hooks.types.unsigned_type (type);
1156 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1157 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1158 return fold_convert (type, temp);
1159 }
1160 }
1161 break;
1162
1163 default:
1164 break;
1165 }
1166
1167 tem = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1168 return fold_convert (type, tem);
1169 }
1170 \f
1171 /* Split a tree IN into a constant, literal and variable parts that could be
1172 combined with CODE to make IN. "constant" means an expression with
1173 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1174 commutative arithmetic operation. Store the constant part into *CONP,
1175 the literal in *LITP and return the variable part. If a part isn't
1176 present, set it to null. If the tree does not decompose in this way,
1177 return the entire tree as the variable part and the other parts as null.
1178
1179 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1180 case, we negate an operand that was subtracted. Except if it is a
1181 literal for which we use *MINUS_LITP instead.
1182
1183 If NEGATE_P is true, we are negating all of IN, again except a literal
1184 for which we use *MINUS_LITP instead.
1185
1186 If IN is itself a literal or constant, return it as appropriate.
1187
1188 Note that we do not guarantee that any of the three values will be the
1189 same type as IN, but they will have the same signedness and mode. */
1190
1191 static tree
1192 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1193 tree *minus_litp, int negate_p)
1194 {
1195 tree var = 0;
1196
1197 *conp = 0;
1198 *litp = 0;
1199 *minus_litp = 0;
1200
1201 /* Strip any conversions that don't change the machine mode or signedness. */
1202 STRIP_SIGN_NOPS (in);
1203
1204 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1205 *litp = in;
1206 else if (TREE_CODE (in) == code
1207 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1208 /* We can associate addition and subtraction together (even
1209 though the C standard doesn't say so) for integers because
1210 the value is not affected. For reals, the value might be
1211 affected, so we can't. */
1212 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1213 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1214 {
1215 tree op0 = TREE_OPERAND (in, 0);
1216 tree op1 = TREE_OPERAND (in, 1);
1217 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1218 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1219
1220 /* First see if either of the operands is a literal, then a constant. */
1221 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1222 *litp = op0, op0 = 0;
1223 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1224 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1225
1226 if (op0 != 0 && TREE_CONSTANT (op0))
1227 *conp = op0, op0 = 0;
1228 else if (op1 != 0 && TREE_CONSTANT (op1))
1229 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1230
1231 /* If we haven't dealt with either operand, this is not a case we can
1232 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1233 if (op0 != 0 && op1 != 0)
1234 var = in;
1235 else if (op0 != 0)
1236 var = op0;
1237 else
1238 var = op1, neg_var_p = neg1_p;
1239
1240 /* Now do any needed negations. */
1241 if (neg_litp_p)
1242 *minus_litp = *litp, *litp = 0;
1243 if (neg_conp_p)
1244 *conp = negate_expr (*conp);
1245 if (neg_var_p)
1246 var = negate_expr (var);
1247 }
1248 else if (TREE_CONSTANT (in))
1249 *conp = in;
1250 else
1251 var = in;
1252
1253 if (negate_p)
1254 {
1255 if (*litp)
1256 *minus_litp = *litp, *litp = 0;
1257 else if (*minus_litp)
1258 *litp = *minus_litp, *minus_litp = 0;
1259 *conp = negate_expr (*conp);
1260 var = negate_expr (var);
1261 }
1262
1263 return var;
1264 }
1265
1266 /* Re-associate trees split by the above function. T1 and T2 are either
1267 expressions to associate or null. Return the new expression, if any. If
1268 we build an operation, do it in TYPE and with CODE. */
1269
1270 static tree
1271 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1272 {
1273 if (t1 == 0)
1274 return t2;
1275 else if (t2 == 0)
1276 return t1;
1277
1278 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1279 try to fold this since we will have infinite recursion. But do
1280 deal with any NEGATE_EXPRs. */
1281 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1282 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1283 {
1284 if (code == PLUS_EXPR)
1285 {
1286 if (TREE_CODE (t1) == NEGATE_EXPR)
1287 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1288 fold_convert (type, TREE_OPERAND (t1, 0)));
1289 else if (TREE_CODE (t2) == NEGATE_EXPR)
1290 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1291 fold_convert (type, TREE_OPERAND (t2, 0)));
1292 else if (integer_zerop (t2))
1293 return fold_convert (type, t1);
1294 }
1295 else if (code == MINUS_EXPR)
1296 {
1297 if (integer_zerop (t2))
1298 return fold_convert (type, t1);
1299 }
1300
1301 return build2 (code, type, fold_convert (type, t1),
1302 fold_convert (type, t2));
1303 }
1304
1305 return fold_build2 (code, type, fold_convert (type, t1),
1306 fold_convert (type, t2));
1307 }
1308 \f
1309 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1310 to produce a new constant.
1311
1312 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1313
1314 tree
1315 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1316 {
1317 unsigned HOST_WIDE_INT int1l, int2l;
1318 HOST_WIDE_INT int1h, int2h;
1319 unsigned HOST_WIDE_INT low;
1320 HOST_WIDE_INT hi;
1321 unsigned HOST_WIDE_INT garbagel;
1322 HOST_WIDE_INT garbageh;
1323 tree t;
1324 tree type = TREE_TYPE (arg1);
1325 int uns = TYPE_UNSIGNED (type);
1326 int is_sizetype
1327 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1328 int overflow = 0;
1329
1330 int1l = TREE_INT_CST_LOW (arg1);
1331 int1h = TREE_INT_CST_HIGH (arg1);
1332 int2l = TREE_INT_CST_LOW (arg2);
1333 int2h = TREE_INT_CST_HIGH (arg2);
1334
1335 switch (code)
1336 {
1337 case BIT_IOR_EXPR:
1338 low = int1l | int2l, hi = int1h | int2h;
1339 break;
1340
1341 case BIT_XOR_EXPR:
1342 low = int1l ^ int2l, hi = int1h ^ int2h;
1343 break;
1344
1345 case BIT_AND_EXPR:
1346 low = int1l & int2l, hi = int1h & int2h;
1347 break;
1348
1349 case RSHIFT_EXPR:
1350 int2l = -int2l;
1351 case LSHIFT_EXPR:
1352 /* It's unclear from the C standard whether shifts can overflow.
1353 The following code ignores overflow; perhaps a C standard
1354 interpretation ruling is needed. */
1355 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1356 &low, &hi, !uns);
1357 break;
1358
1359 case RROTATE_EXPR:
1360 int2l = - int2l;
1361 case LROTATE_EXPR:
1362 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1363 &low, &hi);
1364 break;
1365
1366 case PLUS_EXPR:
1367 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1368 break;
1369
1370 case MINUS_EXPR:
1371 neg_double (int2l, int2h, &low, &hi);
1372 add_double (int1l, int1h, low, hi, &low, &hi);
1373 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1374 break;
1375
1376 case MULT_EXPR:
1377 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1378 break;
1379
1380 case TRUNC_DIV_EXPR:
1381 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1382 case EXACT_DIV_EXPR:
1383 /* This is a shortcut for a common special case. */
1384 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1385 && ! TREE_CONSTANT_OVERFLOW (arg1)
1386 && ! TREE_CONSTANT_OVERFLOW (arg2)
1387 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1388 {
1389 if (code == CEIL_DIV_EXPR)
1390 int1l += int2l - 1;
1391
1392 low = int1l / int2l, hi = 0;
1393 break;
1394 }
1395
1396 /* ... fall through ... */
1397
1398 case ROUND_DIV_EXPR:
1399 if (int2h == 0 && int2l == 1)
1400 {
1401 low = int1l, hi = int1h;
1402 break;
1403 }
1404 if (int1l == int2l && int1h == int2h
1405 && ! (int1l == 0 && int1h == 0))
1406 {
1407 low = 1, hi = 0;
1408 break;
1409 }
1410 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1411 &low, &hi, &garbagel, &garbageh);
1412 break;
1413
1414 case TRUNC_MOD_EXPR:
1415 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1416 /* This is a shortcut for a common special case. */
1417 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1418 && ! TREE_CONSTANT_OVERFLOW (arg1)
1419 && ! TREE_CONSTANT_OVERFLOW (arg2)
1420 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1421 {
1422 if (code == CEIL_MOD_EXPR)
1423 int1l += int2l - 1;
1424 low = int1l % int2l, hi = 0;
1425 break;
1426 }
1427
1428 /* ... fall through ... */
1429
1430 case ROUND_MOD_EXPR:
1431 overflow = div_and_round_double (code, uns,
1432 int1l, int1h, int2l, int2h,
1433 &garbagel, &garbageh, &low, &hi);
1434 break;
1435
1436 case MIN_EXPR:
1437 case MAX_EXPR:
1438 if (uns)
1439 low = (((unsigned HOST_WIDE_INT) int1h
1440 < (unsigned HOST_WIDE_INT) int2h)
1441 || (((unsigned HOST_WIDE_INT) int1h
1442 == (unsigned HOST_WIDE_INT) int2h)
1443 && int1l < int2l));
1444 else
1445 low = (int1h < int2h
1446 || (int1h == int2h && int1l < int2l));
1447
1448 if (low == (code == MIN_EXPR))
1449 low = int1l, hi = int1h;
1450 else
1451 low = int2l, hi = int2h;
1452 break;
1453
1454 default:
1455 gcc_unreachable ();
1456 }
1457
1458 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1459
1460 if (notrunc)
1461 {
1462 /* Propagate overflow flags ourselves. */
1463 if (((!uns || is_sizetype) && overflow)
1464 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1465 {
1466 t = copy_node (t);
1467 TREE_OVERFLOW (t) = 1;
1468 TREE_CONSTANT_OVERFLOW (t) = 1;
1469 }
1470 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1471 {
1472 t = copy_node (t);
1473 TREE_CONSTANT_OVERFLOW (t) = 1;
1474 }
1475 }
1476 else
1477 t = force_fit_type (t, 1,
1478 ((!uns || is_sizetype) && overflow)
1479 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
1480 TREE_CONSTANT_OVERFLOW (arg1)
1481 | TREE_CONSTANT_OVERFLOW (arg2));
1482
1483 return t;
1484 }
1485
1486 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1487 constant. We assume ARG1 and ARG2 have the same data type, or at least
1488 are the same kind of constant and the same machine mode.
1489
1490 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1491
1492 static tree
1493 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1494 {
1495 STRIP_NOPS (arg1);
1496 STRIP_NOPS (arg2);
1497
1498 if (TREE_CODE (arg1) == INTEGER_CST)
1499 return int_const_binop (code, arg1, arg2, notrunc);
1500
1501 if (TREE_CODE (arg1) == REAL_CST)
1502 {
1503 enum machine_mode mode;
1504 REAL_VALUE_TYPE d1;
1505 REAL_VALUE_TYPE d2;
1506 REAL_VALUE_TYPE value;
1507 REAL_VALUE_TYPE result;
1508 bool inexact;
1509 tree t, type;
1510
1511 d1 = TREE_REAL_CST (arg1);
1512 d2 = TREE_REAL_CST (arg2);
1513
1514 type = TREE_TYPE (arg1);
1515 mode = TYPE_MODE (type);
1516
1517 /* Don't perform operation if we honor signaling NaNs and
1518 either operand is a NaN. */
1519 if (HONOR_SNANS (mode)
1520 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1521 return NULL_TREE;
1522
1523 /* Don't perform operation if it would raise a division
1524 by zero exception. */
1525 if (code == RDIV_EXPR
1526 && REAL_VALUES_EQUAL (d2, dconst0)
1527 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1528 return NULL_TREE;
1529
1530 /* If either operand is a NaN, just return it. Otherwise, set up
1531 for floating-point trap; we return an overflow. */
1532 if (REAL_VALUE_ISNAN (d1))
1533 return arg1;
1534 else if (REAL_VALUE_ISNAN (d2))
1535 return arg2;
1536
1537 inexact = real_arithmetic (&value, code, &d1, &d2);
1538 real_convert (&result, mode, &value);
1539
1540 /* Don't constant fold this floating point operation if
1541 the result has overflowed and flag_trapping_math. */
1542
1543 if (flag_trapping_math
1544 && MODE_HAS_INFINITIES (mode)
1545 && REAL_VALUE_ISINF (result)
1546 && !REAL_VALUE_ISINF (d1)
1547 && !REAL_VALUE_ISINF (d2))
1548 return NULL_TREE;
1549
1550 /* Don't constant fold this floating point operation if the
1551 result may dependent upon the run-time rounding mode and
1552 flag_rounding_math is set, or if GCC's software emulation
1553 is unable to accurately represent the result. */
1554
1555 if ((flag_rounding_math
1556 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1557 && !flag_unsafe_math_optimizations))
1558 && (inexact || !real_identical (&result, &value)))
1559 return NULL_TREE;
1560
1561 t = build_real (type, result);
1562
1563 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1564 TREE_CONSTANT_OVERFLOW (t)
1565 = TREE_OVERFLOW (t)
1566 | TREE_CONSTANT_OVERFLOW (arg1)
1567 | TREE_CONSTANT_OVERFLOW (arg2);
1568 return t;
1569 }
1570 if (TREE_CODE (arg1) == COMPLEX_CST)
1571 {
1572 tree type = TREE_TYPE (arg1);
1573 tree r1 = TREE_REALPART (arg1);
1574 tree i1 = TREE_IMAGPART (arg1);
1575 tree r2 = TREE_REALPART (arg2);
1576 tree i2 = TREE_IMAGPART (arg2);
1577 tree t;
1578
1579 switch (code)
1580 {
1581 case PLUS_EXPR:
1582 t = build_complex (type,
1583 const_binop (PLUS_EXPR, r1, r2, notrunc),
1584 const_binop (PLUS_EXPR, i1, i2, notrunc));
1585 break;
1586
1587 case MINUS_EXPR:
1588 t = build_complex (type,
1589 const_binop (MINUS_EXPR, r1, r2, notrunc),
1590 const_binop (MINUS_EXPR, i1, i2, notrunc));
1591 break;
1592
1593 case MULT_EXPR:
1594 t = build_complex (type,
1595 const_binop (MINUS_EXPR,
1596 const_binop (MULT_EXPR,
1597 r1, r2, notrunc),
1598 const_binop (MULT_EXPR,
1599 i1, i2, notrunc),
1600 notrunc),
1601 const_binop (PLUS_EXPR,
1602 const_binop (MULT_EXPR,
1603 r1, i2, notrunc),
1604 const_binop (MULT_EXPR,
1605 i1, r2, notrunc),
1606 notrunc));
1607 break;
1608
1609 case RDIV_EXPR:
1610 {
1611 tree t1, t2, real, imag;
1612 tree magsquared
1613 = const_binop (PLUS_EXPR,
1614 const_binop (MULT_EXPR, r2, r2, notrunc),
1615 const_binop (MULT_EXPR, i2, i2, notrunc),
1616 notrunc);
1617
1618 t1 = const_binop (PLUS_EXPR,
1619 const_binop (MULT_EXPR, r1, r2, notrunc),
1620 const_binop (MULT_EXPR, i1, i2, notrunc),
1621 notrunc);
1622 t2 = const_binop (MINUS_EXPR,
1623 const_binop (MULT_EXPR, i1, r2, notrunc),
1624 const_binop (MULT_EXPR, r1, i2, notrunc),
1625 notrunc);
1626
1627 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1628 {
1629 real = const_binop (TRUNC_DIV_EXPR, t1, magsquared, notrunc);
1630 imag = const_binop (TRUNC_DIV_EXPR, t2, magsquared, notrunc);
1631 }
1632 else
1633 {
1634 real = const_binop (RDIV_EXPR, t1, magsquared, notrunc);
1635 imag = const_binop (RDIV_EXPR, t2, magsquared, notrunc);
1636 if (!real || !imag)
1637 return NULL_TREE;
1638 }
1639
1640 t = build_complex (type, real, imag);
1641 }
1642 break;
1643
1644 default:
1645 gcc_unreachable ();
1646 }
1647 return t;
1648 }
1649 return 0;
1650 }
1651
1652 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1653 indicates which particular sizetype to create. */
1654
1655 tree
1656 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1657 {
1658 return build_int_cst (sizetype_tab[(int) kind], number);
1659 }
1660 \f
1661 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1662 is a tree code. The type of the result is taken from the operands.
1663 Both must be the same type integer type and it must be a size type.
1664 If the operands are constant, so is the result. */
1665
1666 tree
1667 size_binop (enum tree_code code, tree arg0, tree arg1)
1668 {
1669 tree type = TREE_TYPE (arg0);
1670
1671 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1672 && type == TREE_TYPE (arg1));
1673
1674 /* Handle the special case of two integer constants faster. */
1675 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1676 {
1677 /* And some specific cases even faster than that. */
1678 if (code == PLUS_EXPR && integer_zerop (arg0))
1679 return arg1;
1680 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1681 && integer_zerop (arg1))
1682 return arg0;
1683 else if (code == MULT_EXPR && integer_onep (arg0))
1684 return arg1;
1685
1686 /* Handle general case of two integer constants. */
1687 return int_const_binop (code, arg0, arg1, 0);
1688 }
1689
1690 if (arg0 == error_mark_node || arg1 == error_mark_node)
1691 return error_mark_node;
1692
1693 return fold_build2 (code, type, arg0, arg1);
1694 }
1695
1696 /* Given two values, either both of sizetype or both of bitsizetype,
1697 compute the difference between the two values. Return the value
1698 in signed type corresponding to the type of the operands. */
1699
1700 tree
1701 size_diffop (tree arg0, tree arg1)
1702 {
1703 tree type = TREE_TYPE (arg0);
1704 tree ctype;
1705
1706 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1707 && type == TREE_TYPE (arg1));
1708
1709 /* If the type is already signed, just do the simple thing. */
1710 if (!TYPE_UNSIGNED (type))
1711 return size_binop (MINUS_EXPR, arg0, arg1);
1712
1713 ctype = type == bitsizetype ? sbitsizetype : ssizetype;
1714
1715 /* If either operand is not a constant, do the conversions to the signed
1716 type and subtract. The hardware will do the right thing with any
1717 overflow in the subtraction. */
1718 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1719 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1720 fold_convert (ctype, arg1));
1721
1722 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1723 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1724 overflow) and negate (which can't either). Special-case a result
1725 of zero while we're here. */
1726 if (tree_int_cst_equal (arg0, arg1))
1727 return fold_convert (ctype, integer_zero_node);
1728 else if (tree_int_cst_lt (arg1, arg0))
1729 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1730 else
1731 return size_binop (MINUS_EXPR, fold_convert (ctype, integer_zero_node),
1732 fold_convert (ctype, size_binop (MINUS_EXPR,
1733 arg1, arg0)));
1734 }
1735 \f
1736 /* A subroutine of fold_convert_const handling conversions of an
1737 INTEGER_CST to another integer type. */
1738
1739 static tree
1740 fold_convert_const_int_from_int (tree type, tree arg1)
1741 {
1742 tree t;
1743
1744 /* Given an integer constant, make new constant with new type,
1745 appropriately sign-extended or truncated. */
1746 t = build_int_cst_wide (type, TREE_INT_CST_LOW (arg1),
1747 TREE_INT_CST_HIGH (arg1));
1748
1749 t = force_fit_type (t,
1750 /* Don't set the overflow when
1751 converting a pointer */
1752 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1753 (TREE_INT_CST_HIGH (arg1) < 0
1754 && (TYPE_UNSIGNED (type)
1755 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1756 | TREE_OVERFLOW (arg1),
1757 TREE_CONSTANT_OVERFLOW (arg1));
1758
1759 return t;
1760 }
1761
1762 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1763 to an integer type. */
1764
1765 static tree
1766 fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
1767 {
1768 int overflow = 0;
1769 tree t;
1770
1771 /* The following code implements the floating point to integer
1772 conversion rules required by the Java Language Specification,
1773 that IEEE NaNs are mapped to zero and values that overflow
1774 the target precision saturate, i.e. values greater than
1775 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1776 are mapped to INT_MIN. These semantics are allowed by the
1777 C and C++ standards that simply state that the behavior of
1778 FP-to-integer conversion is unspecified upon overflow. */
1779
1780 HOST_WIDE_INT high, low;
1781 REAL_VALUE_TYPE r;
1782 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1783
1784 switch (code)
1785 {
1786 case FIX_TRUNC_EXPR:
1787 real_trunc (&r, VOIDmode, &x);
1788 break;
1789
1790 case FIX_CEIL_EXPR:
1791 real_ceil (&r, VOIDmode, &x);
1792 break;
1793
1794 case FIX_FLOOR_EXPR:
1795 real_floor (&r, VOIDmode, &x);
1796 break;
1797
1798 case FIX_ROUND_EXPR:
1799 real_round (&r, VOIDmode, &x);
1800 break;
1801
1802 default:
1803 gcc_unreachable ();
1804 }
1805
1806 /* If R is NaN, return zero and show we have an overflow. */
1807 if (REAL_VALUE_ISNAN (r))
1808 {
1809 overflow = 1;
1810 high = 0;
1811 low = 0;
1812 }
1813
1814 /* See if R is less than the lower bound or greater than the
1815 upper bound. */
1816
1817 if (! overflow)
1818 {
1819 tree lt = TYPE_MIN_VALUE (type);
1820 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1821 if (REAL_VALUES_LESS (r, l))
1822 {
1823 overflow = 1;
1824 high = TREE_INT_CST_HIGH (lt);
1825 low = TREE_INT_CST_LOW (lt);
1826 }
1827 }
1828
1829 if (! overflow)
1830 {
1831 tree ut = TYPE_MAX_VALUE (type);
1832 if (ut)
1833 {
1834 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1835 if (REAL_VALUES_LESS (u, r))
1836 {
1837 overflow = 1;
1838 high = TREE_INT_CST_HIGH (ut);
1839 low = TREE_INT_CST_LOW (ut);
1840 }
1841 }
1842 }
1843
1844 if (! overflow)
1845 REAL_VALUE_TO_INT (&low, &high, r);
1846
1847 t = build_int_cst_wide (type, low, high);
1848
1849 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1),
1850 TREE_CONSTANT_OVERFLOW (arg1));
1851 return t;
1852 }
1853
1854 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1855 to another floating point type. */
1856
1857 static tree
1858 fold_convert_const_real_from_real (tree type, tree arg1)
1859 {
1860 REAL_VALUE_TYPE value;
1861 tree t;
1862
1863 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1864 t = build_real (type, value);
1865
1866 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1867 TREE_CONSTANT_OVERFLOW (t)
1868 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1869 return t;
1870 }
1871
1872 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1873 type TYPE. If no simplification can be done return NULL_TREE. */
1874
1875 static tree
1876 fold_convert_const (enum tree_code code, tree type, tree arg1)
1877 {
1878 if (TREE_TYPE (arg1) == type)
1879 return arg1;
1880
1881 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1882 {
1883 if (TREE_CODE (arg1) == INTEGER_CST)
1884 return fold_convert_const_int_from_int (type, arg1);
1885 else if (TREE_CODE (arg1) == REAL_CST)
1886 return fold_convert_const_int_from_real (code, type, arg1);
1887 }
1888 else if (TREE_CODE (type) == REAL_TYPE)
1889 {
1890 if (TREE_CODE (arg1) == INTEGER_CST)
1891 return build_real_from_int_cst (type, arg1);
1892 if (TREE_CODE (arg1) == REAL_CST)
1893 return fold_convert_const_real_from_real (type, arg1);
1894 }
1895 return NULL_TREE;
1896 }
1897
1898 /* Construct a vector of zero elements of vector type TYPE. */
1899
1900 static tree
1901 build_zero_vector (tree type)
1902 {
1903 tree elem, list;
1904 int i, units;
1905
1906 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1907 units = TYPE_VECTOR_SUBPARTS (type);
1908
1909 list = NULL_TREE;
1910 for (i = 0; i < units; i++)
1911 list = tree_cons (NULL_TREE, elem, list);
1912 return build_vector (type, list);
1913 }
1914
1915 /* Convert expression ARG to type TYPE. Used by the middle-end for
1916 simple conversions in preference to calling the front-end's convert. */
1917
1918 tree
1919 fold_convert (tree type, tree arg)
1920 {
1921 tree orig = TREE_TYPE (arg);
1922 tree tem;
1923
1924 if (type == orig)
1925 return arg;
1926
1927 if (TREE_CODE (arg) == ERROR_MARK
1928 || TREE_CODE (type) == ERROR_MARK
1929 || TREE_CODE (orig) == ERROR_MARK)
1930 return error_mark_node;
1931
1932 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
1933 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
1934 TYPE_MAIN_VARIANT (orig)))
1935 return fold_build1 (NOP_EXPR, type, arg);
1936
1937 switch (TREE_CODE (type))
1938 {
1939 case INTEGER_TYPE: case CHAR_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1940 case POINTER_TYPE: case REFERENCE_TYPE:
1941 case OFFSET_TYPE:
1942 if (TREE_CODE (arg) == INTEGER_CST)
1943 {
1944 tem = fold_convert_const (NOP_EXPR, type, arg);
1945 if (tem != NULL_TREE)
1946 return tem;
1947 }
1948 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1949 || TREE_CODE (orig) == OFFSET_TYPE)
1950 return fold_build1 (NOP_EXPR, type, arg);
1951 if (TREE_CODE (orig) == COMPLEX_TYPE)
1952 {
1953 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
1954 return fold_convert (type, tem);
1955 }
1956 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1957 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1958 return fold_build1 (NOP_EXPR, type, arg);
1959
1960 case REAL_TYPE:
1961 if (TREE_CODE (arg) == INTEGER_CST)
1962 {
1963 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1964 if (tem != NULL_TREE)
1965 return tem;
1966 }
1967 else if (TREE_CODE (arg) == REAL_CST)
1968 {
1969 tem = fold_convert_const (NOP_EXPR, type, arg);
1970 if (tem != NULL_TREE)
1971 return tem;
1972 }
1973
1974 switch (TREE_CODE (orig))
1975 {
1976 case INTEGER_TYPE: case CHAR_TYPE:
1977 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1978 case POINTER_TYPE: case REFERENCE_TYPE:
1979 return fold_build1 (FLOAT_EXPR, type, arg);
1980
1981 case REAL_TYPE:
1982 return fold_build1 (flag_float_store ? CONVERT_EXPR : NOP_EXPR,
1983 type, arg);
1984
1985 case COMPLEX_TYPE:
1986 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
1987 return fold_convert (type, tem);
1988
1989 default:
1990 gcc_unreachable ();
1991 }
1992
1993 case COMPLEX_TYPE:
1994 switch (TREE_CODE (orig))
1995 {
1996 case INTEGER_TYPE: case CHAR_TYPE:
1997 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1998 case POINTER_TYPE: case REFERENCE_TYPE:
1999 case REAL_TYPE:
2000 return build2 (COMPLEX_EXPR, type,
2001 fold_convert (TREE_TYPE (type), arg),
2002 fold_convert (TREE_TYPE (type), integer_zero_node));
2003 case COMPLEX_TYPE:
2004 {
2005 tree rpart, ipart;
2006
2007 if (TREE_CODE (arg) == COMPLEX_EXPR)
2008 {
2009 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2010 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2011 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2012 }
2013
2014 arg = save_expr (arg);
2015 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2016 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2017 rpart = fold_convert (TREE_TYPE (type), rpart);
2018 ipart = fold_convert (TREE_TYPE (type), ipart);
2019 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2020 }
2021
2022 default:
2023 gcc_unreachable ();
2024 }
2025
2026 case VECTOR_TYPE:
2027 if (integer_zerop (arg))
2028 return build_zero_vector (type);
2029 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2030 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2031 || TREE_CODE (orig) == VECTOR_TYPE);
2032 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2033
2034 case VOID_TYPE:
2035 return fold_build1 (CONVERT_EXPR, type, fold_ignored_result (arg));
2036
2037 default:
2038 gcc_unreachable ();
2039 }
2040 }
2041 \f
2042 /* Return false if expr can be assumed not to be an lvalue, true
2043 otherwise. */
2044
2045 static bool
2046 maybe_lvalue_p (tree x)
2047 {
2048 /* We only need to wrap lvalue tree codes. */
2049 switch (TREE_CODE (x))
2050 {
2051 case VAR_DECL:
2052 case PARM_DECL:
2053 case RESULT_DECL:
2054 case LABEL_DECL:
2055 case FUNCTION_DECL:
2056 case SSA_NAME:
2057
2058 case COMPONENT_REF:
2059 case INDIRECT_REF:
2060 case ALIGN_INDIRECT_REF:
2061 case MISALIGNED_INDIRECT_REF:
2062 case ARRAY_REF:
2063 case ARRAY_RANGE_REF:
2064 case BIT_FIELD_REF:
2065 case OBJ_TYPE_REF:
2066
2067 case REALPART_EXPR:
2068 case IMAGPART_EXPR:
2069 case PREINCREMENT_EXPR:
2070 case PREDECREMENT_EXPR:
2071 case SAVE_EXPR:
2072 case TRY_CATCH_EXPR:
2073 case WITH_CLEANUP_EXPR:
2074 case COMPOUND_EXPR:
2075 case MODIFY_EXPR:
2076 case TARGET_EXPR:
2077 case COND_EXPR:
2078 case BIND_EXPR:
2079 case MIN_EXPR:
2080 case MAX_EXPR:
2081 break;
2082
2083 default:
2084 /* Assume the worst for front-end tree codes. */
2085 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2086 break;
2087 return false;
2088 }
2089
2090 return true;
2091 }
2092
2093 /* Return an expr equal to X but certainly not valid as an lvalue. */
2094
2095 tree
2096 non_lvalue (tree x)
2097 {
2098 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2099 us. */
2100 if (in_gimple_form)
2101 return x;
2102
2103 if (! maybe_lvalue_p (x))
2104 return x;
2105 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2106 }
2107
2108 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2109 Zero means allow extended lvalues. */
2110
2111 int pedantic_lvalues;
2112
2113 /* When pedantic, return an expr equal to X but certainly not valid as a
2114 pedantic lvalue. Otherwise, return X. */
2115
2116 static tree
2117 pedantic_non_lvalue (tree x)
2118 {
2119 if (pedantic_lvalues)
2120 return non_lvalue (x);
2121 else
2122 return x;
2123 }
2124 \f
2125 /* Given a tree comparison code, return the code that is the logical inverse
2126 of the given code. It is not safe to do this for floating-point
2127 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2128 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2129
2130 enum tree_code
2131 invert_tree_comparison (enum tree_code code, bool honor_nans)
2132 {
2133 if (honor_nans && flag_trapping_math)
2134 return ERROR_MARK;
2135
2136 switch (code)
2137 {
2138 case EQ_EXPR:
2139 return NE_EXPR;
2140 case NE_EXPR:
2141 return EQ_EXPR;
2142 case GT_EXPR:
2143 return honor_nans ? UNLE_EXPR : LE_EXPR;
2144 case GE_EXPR:
2145 return honor_nans ? UNLT_EXPR : LT_EXPR;
2146 case LT_EXPR:
2147 return honor_nans ? UNGE_EXPR : GE_EXPR;
2148 case LE_EXPR:
2149 return honor_nans ? UNGT_EXPR : GT_EXPR;
2150 case LTGT_EXPR:
2151 return UNEQ_EXPR;
2152 case UNEQ_EXPR:
2153 return LTGT_EXPR;
2154 case UNGT_EXPR:
2155 return LE_EXPR;
2156 case UNGE_EXPR:
2157 return LT_EXPR;
2158 case UNLT_EXPR:
2159 return GE_EXPR;
2160 case UNLE_EXPR:
2161 return GT_EXPR;
2162 case ORDERED_EXPR:
2163 return UNORDERED_EXPR;
2164 case UNORDERED_EXPR:
2165 return ORDERED_EXPR;
2166 default:
2167 gcc_unreachable ();
2168 }
2169 }
2170
2171 /* Similar, but return the comparison that results if the operands are
2172 swapped. This is safe for floating-point. */
2173
2174 enum tree_code
2175 swap_tree_comparison (enum tree_code code)
2176 {
2177 switch (code)
2178 {
2179 case EQ_EXPR:
2180 case NE_EXPR:
2181 case ORDERED_EXPR:
2182 case UNORDERED_EXPR:
2183 case LTGT_EXPR:
2184 case UNEQ_EXPR:
2185 return code;
2186 case GT_EXPR:
2187 return LT_EXPR;
2188 case GE_EXPR:
2189 return LE_EXPR;
2190 case LT_EXPR:
2191 return GT_EXPR;
2192 case LE_EXPR:
2193 return GE_EXPR;
2194 case UNGT_EXPR:
2195 return UNLT_EXPR;
2196 case UNGE_EXPR:
2197 return UNLE_EXPR;
2198 case UNLT_EXPR:
2199 return UNGT_EXPR;
2200 case UNLE_EXPR:
2201 return UNGE_EXPR;
2202 default:
2203 gcc_unreachable ();
2204 }
2205 }
2206
2207
2208 /* Convert a comparison tree code from an enum tree_code representation
2209 into a compcode bit-based encoding. This function is the inverse of
2210 compcode_to_comparison. */
2211
2212 static enum comparison_code
2213 comparison_to_compcode (enum tree_code code)
2214 {
2215 switch (code)
2216 {
2217 case LT_EXPR:
2218 return COMPCODE_LT;
2219 case EQ_EXPR:
2220 return COMPCODE_EQ;
2221 case LE_EXPR:
2222 return COMPCODE_LE;
2223 case GT_EXPR:
2224 return COMPCODE_GT;
2225 case NE_EXPR:
2226 return COMPCODE_NE;
2227 case GE_EXPR:
2228 return COMPCODE_GE;
2229 case ORDERED_EXPR:
2230 return COMPCODE_ORD;
2231 case UNORDERED_EXPR:
2232 return COMPCODE_UNORD;
2233 case UNLT_EXPR:
2234 return COMPCODE_UNLT;
2235 case UNEQ_EXPR:
2236 return COMPCODE_UNEQ;
2237 case UNLE_EXPR:
2238 return COMPCODE_UNLE;
2239 case UNGT_EXPR:
2240 return COMPCODE_UNGT;
2241 case LTGT_EXPR:
2242 return COMPCODE_LTGT;
2243 case UNGE_EXPR:
2244 return COMPCODE_UNGE;
2245 default:
2246 gcc_unreachable ();
2247 }
2248 }
2249
2250 /* Convert a compcode bit-based encoding of a comparison operator back
2251 to GCC's enum tree_code representation. This function is the
2252 inverse of comparison_to_compcode. */
2253
2254 static enum tree_code
2255 compcode_to_comparison (enum comparison_code code)
2256 {
2257 switch (code)
2258 {
2259 case COMPCODE_LT:
2260 return LT_EXPR;
2261 case COMPCODE_EQ:
2262 return EQ_EXPR;
2263 case COMPCODE_LE:
2264 return LE_EXPR;
2265 case COMPCODE_GT:
2266 return GT_EXPR;
2267 case COMPCODE_NE:
2268 return NE_EXPR;
2269 case COMPCODE_GE:
2270 return GE_EXPR;
2271 case COMPCODE_ORD:
2272 return ORDERED_EXPR;
2273 case COMPCODE_UNORD:
2274 return UNORDERED_EXPR;
2275 case COMPCODE_UNLT:
2276 return UNLT_EXPR;
2277 case COMPCODE_UNEQ:
2278 return UNEQ_EXPR;
2279 case COMPCODE_UNLE:
2280 return UNLE_EXPR;
2281 case COMPCODE_UNGT:
2282 return UNGT_EXPR;
2283 case COMPCODE_LTGT:
2284 return LTGT_EXPR;
2285 case COMPCODE_UNGE:
2286 return UNGE_EXPR;
2287 default:
2288 gcc_unreachable ();
2289 }
2290 }
2291
2292 /* Return a tree for the comparison which is the combination of
2293 doing the AND or OR (depending on CODE) of the two operations LCODE
2294 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2295 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2296 if this makes the transformation invalid. */
2297
2298 tree
2299 combine_comparisons (enum tree_code code, enum tree_code lcode,
2300 enum tree_code rcode, tree truth_type,
2301 tree ll_arg, tree lr_arg)
2302 {
2303 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2304 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2305 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2306 enum comparison_code compcode;
2307
2308 switch (code)
2309 {
2310 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2311 compcode = lcompcode & rcompcode;
2312 break;
2313
2314 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2315 compcode = lcompcode | rcompcode;
2316 break;
2317
2318 default:
2319 return NULL_TREE;
2320 }
2321
2322 if (!honor_nans)
2323 {
2324 /* Eliminate unordered comparisons, as well as LTGT and ORD
2325 which are not used unless the mode has NaNs. */
2326 compcode &= ~COMPCODE_UNORD;
2327 if (compcode == COMPCODE_LTGT)
2328 compcode = COMPCODE_NE;
2329 else if (compcode == COMPCODE_ORD)
2330 compcode = COMPCODE_TRUE;
2331 }
2332 else if (flag_trapping_math)
2333 {
2334 /* Check that the original operation and the optimized ones will trap
2335 under the same condition. */
2336 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2337 && (lcompcode != COMPCODE_EQ)
2338 && (lcompcode != COMPCODE_ORD);
2339 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2340 && (rcompcode != COMPCODE_EQ)
2341 && (rcompcode != COMPCODE_ORD);
2342 bool trap = (compcode & COMPCODE_UNORD) == 0
2343 && (compcode != COMPCODE_EQ)
2344 && (compcode != COMPCODE_ORD);
2345
2346 /* In a short-circuited boolean expression the LHS might be
2347 such that the RHS, if evaluated, will never trap. For
2348 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2349 if neither x nor y is NaN. (This is a mixed blessing: for
2350 example, the expression above will never trap, hence
2351 optimizing it to x < y would be invalid). */
2352 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2353 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2354 rtrap = false;
2355
2356 /* If the comparison was short-circuited, and only the RHS
2357 trapped, we may now generate a spurious trap. */
2358 if (rtrap && !ltrap
2359 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2360 return NULL_TREE;
2361
2362 /* If we changed the conditions that cause a trap, we lose. */
2363 if ((ltrap || rtrap) != trap)
2364 return NULL_TREE;
2365 }
2366
2367 if (compcode == COMPCODE_TRUE)
2368 return constant_boolean_node (true, truth_type);
2369 else if (compcode == COMPCODE_FALSE)
2370 return constant_boolean_node (false, truth_type);
2371 else
2372 return fold_build2 (compcode_to_comparison (compcode),
2373 truth_type, ll_arg, lr_arg);
2374 }
2375
2376 /* Return nonzero if CODE is a tree code that represents a truth value. */
2377
2378 static int
2379 truth_value_p (enum tree_code code)
2380 {
2381 return (TREE_CODE_CLASS (code) == tcc_comparison
2382 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2383 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2384 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2385 }
2386 \f
2387 /* Return nonzero if two operands (typically of the same tree node)
2388 are necessarily equal. If either argument has side-effects this
2389 function returns zero. FLAGS modifies behavior as follows:
2390
2391 If OEP_ONLY_CONST is set, only return nonzero for constants.
2392 This function tests whether the operands are indistinguishable;
2393 it does not test whether they are equal using C's == operation.
2394 The distinction is important for IEEE floating point, because
2395 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2396 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2397
2398 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2399 even though it may hold multiple values during a function.
2400 This is because a GCC tree node guarantees that nothing else is
2401 executed between the evaluation of its "operands" (which may often
2402 be evaluated in arbitrary order). Hence if the operands themselves
2403 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2404 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2405 unset means assuming isochronic (or instantaneous) tree equivalence.
2406 Unless comparing arbitrary expression trees, such as from different
2407 statements, this flag can usually be left unset.
2408
2409 If OEP_PURE_SAME is set, then pure functions with identical arguments
2410 are considered the same. It is used when the caller has other ways
2411 to ensure that global memory is unchanged in between. */
2412
2413 int
2414 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2415 {
2416 /* If either is ERROR_MARK, they aren't equal. */
2417 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2418 return 0;
2419
2420 /* If both types don't have the same signedness, then we can't consider
2421 them equal. We must check this before the STRIP_NOPS calls
2422 because they may change the signedness of the arguments. */
2423 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2424 return 0;
2425
2426 STRIP_NOPS (arg0);
2427 STRIP_NOPS (arg1);
2428
2429 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2430 /* This is needed for conversions and for COMPONENT_REF.
2431 Might as well play it safe and always test this. */
2432 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2433 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2434 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2435 return 0;
2436
2437 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2438 We don't care about side effects in that case because the SAVE_EXPR
2439 takes care of that for us. In all other cases, two expressions are
2440 equal if they have no side effects. If we have two identical
2441 expressions with side effects that should be treated the same due
2442 to the only side effects being identical SAVE_EXPR's, that will
2443 be detected in the recursive calls below. */
2444 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2445 && (TREE_CODE (arg0) == SAVE_EXPR
2446 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2447 return 1;
2448
2449 /* Next handle constant cases, those for which we can return 1 even
2450 if ONLY_CONST is set. */
2451 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2452 switch (TREE_CODE (arg0))
2453 {
2454 case INTEGER_CST:
2455 return (! TREE_CONSTANT_OVERFLOW (arg0)
2456 && ! TREE_CONSTANT_OVERFLOW (arg1)
2457 && tree_int_cst_equal (arg0, arg1));
2458
2459 case REAL_CST:
2460 return (! TREE_CONSTANT_OVERFLOW (arg0)
2461 && ! TREE_CONSTANT_OVERFLOW (arg1)
2462 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2463 TREE_REAL_CST (arg1)));
2464
2465 case VECTOR_CST:
2466 {
2467 tree v1, v2;
2468
2469 if (TREE_CONSTANT_OVERFLOW (arg0)
2470 || TREE_CONSTANT_OVERFLOW (arg1))
2471 return 0;
2472
2473 v1 = TREE_VECTOR_CST_ELTS (arg0);
2474 v2 = TREE_VECTOR_CST_ELTS (arg1);
2475 while (v1 && v2)
2476 {
2477 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2478 flags))
2479 return 0;
2480 v1 = TREE_CHAIN (v1);
2481 v2 = TREE_CHAIN (v2);
2482 }
2483
2484 return v1 == v2;
2485 }
2486
2487 case COMPLEX_CST:
2488 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2489 flags)
2490 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2491 flags));
2492
2493 case STRING_CST:
2494 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2495 && ! memcmp (TREE_STRING_POINTER (arg0),
2496 TREE_STRING_POINTER (arg1),
2497 TREE_STRING_LENGTH (arg0)));
2498
2499 case ADDR_EXPR:
2500 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2501 0);
2502 default:
2503 break;
2504 }
2505
2506 if (flags & OEP_ONLY_CONST)
2507 return 0;
2508
2509 /* Define macros to test an operand from arg0 and arg1 for equality and a
2510 variant that allows null and views null as being different from any
2511 non-null value. In the latter case, if either is null, the both
2512 must be; otherwise, do the normal comparison. */
2513 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2514 TREE_OPERAND (arg1, N), flags)
2515
2516 #define OP_SAME_WITH_NULL(N) \
2517 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2518 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2519
2520 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2521 {
2522 case tcc_unary:
2523 /* Two conversions are equal only if signedness and modes match. */
2524 switch (TREE_CODE (arg0))
2525 {
2526 case NOP_EXPR:
2527 case CONVERT_EXPR:
2528 case FIX_CEIL_EXPR:
2529 case FIX_TRUNC_EXPR:
2530 case FIX_FLOOR_EXPR:
2531 case FIX_ROUND_EXPR:
2532 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2533 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2534 return 0;
2535 break;
2536 default:
2537 break;
2538 }
2539
2540 return OP_SAME (0);
2541
2542
2543 case tcc_comparison:
2544 case tcc_binary:
2545 if (OP_SAME (0) && OP_SAME (1))
2546 return 1;
2547
2548 /* For commutative ops, allow the other order. */
2549 return (commutative_tree_code (TREE_CODE (arg0))
2550 && operand_equal_p (TREE_OPERAND (arg0, 0),
2551 TREE_OPERAND (arg1, 1), flags)
2552 && operand_equal_p (TREE_OPERAND (arg0, 1),
2553 TREE_OPERAND (arg1, 0), flags));
2554
2555 case tcc_reference:
2556 /* If either of the pointer (or reference) expressions we are
2557 dereferencing contain a side effect, these cannot be equal. */
2558 if (TREE_SIDE_EFFECTS (arg0)
2559 || TREE_SIDE_EFFECTS (arg1))
2560 return 0;
2561
2562 switch (TREE_CODE (arg0))
2563 {
2564 case INDIRECT_REF:
2565 case ALIGN_INDIRECT_REF:
2566 case MISALIGNED_INDIRECT_REF:
2567 case REALPART_EXPR:
2568 case IMAGPART_EXPR:
2569 return OP_SAME (0);
2570
2571 case ARRAY_REF:
2572 case ARRAY_RANGE_REF:
2573 /* Operands 2 and 3 may be null. */
2574 return (OP_SAME (0)
2575 && OP_SAME (1)
2576 && OP_SAME_WITH_NULL (2)
2577 && OP_SAME_WITH_NULL (3));
2578
2579 case COMPONENT_REF:
2580 /* Handle operand 2 the same as for ARRAY_REF. */
2581 return OP_SAME (0) && OP_SAME (1) && OP_SAME_WITH_NULL (2);
2582
2583 case BIT_FIELD_REF:
2584 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2585
2586 default:
2587 return 0;
2588 }
2589
2590 case tcc_expression:
2591 switch (TREE_CODE (arg0))
2592 {
2593 case ADDR_EXPR:
2594 case TRUTH_NOT_EXPR:
2595 return OP_SAME (0);
2596
2597 case TRUTH_ANDIF_EXPR:
2598 case TRUTH_ORIF_EXPR:
2599 return OP_SAME (0) && OP_SAME (1);
2600
2601 case TRUTH_AND_EXPR:
2602 case TRUTH_OR_EXPR:
2603 case TRUTH_XOR_EXPR:
2604 if (OP_SAME (0) && OP_SAME (1))
2605 return 1;
2606
2607 /* Otherwise take into account this is a commutative operation. */
2608 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2609 TREE_OPERAND (arg1, 1), flags)
2610 && operand_equal_p (TREE_OPERAND (arg0, 1),
2611 TREE_OPERAND (arg1, 0), flags));
2612
2613 case CALL_EXPR:
2614 /* If the CALL_EXPRs call different functions, then they
2615 clearly can not be equal. */
2616 if (!OP_SAME (0))
2617 return 0;
2618
2619 {
2620 unsigned int cef = call_expr_flags (arg0);
2621 if (flags & OEP_PURE_SAME)
2622 cef &= ECF_CONST | ECF_PURE;
2623 else
2624 cef &= ECF_CONST;
2625 if (!cef)
2626 return 0;
2627 }
2628
2629 /* Now see if all the arguments are the same. operand_equal_p
2630 does not handle TREE_LIST, so we walk the operands here
2631 feeding them to operand_equal_p. */
2632 arg0 = TREE_OPERAND (arg0, 1);
2633 arg1 = TREE_OPERAND (arg1, 1);
2634 while (arg0 && arg1)
2635 {
2636 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2637 flags))
2638 return 0;
2639
2640 arg0 = TREE_CHAIN (arg0);
2641 arg1 = TREE_CHAIN (arg1);
2642 }
2643
2644 /* If we get here and both argument lists are exhausted
2645 then the CALL_EXPRs are equal. */
2646 return ! (arg0 || arg1);
2647
2648 default:
2649 return 0;
2650 }
2651
2652 case tcc_declaration:
2653 /* Consider __builtin_sqrt equal to sqrt. */
2654 return (TREE_CODE (arg0) == FUNCTION_DECL
2655 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2656 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2657 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2658
2659 default:
2660 return 0;
2661 }
2662
2663 #undef OP_SAME
2664 #undef OP_SAME_WITH_NULL
2665 }
2666 \f
2667 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2668 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2669
2670 When in doubt, return 0. */
2671
2672 static int
2673 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2674 {
2675 int unsignedp1, unsignedpo;
2676 tree primarg0, primarg1, primother;
2677 unsigned int correct_width;
2678
2679 if (operand_equal_p (arg0, arg1, 0))
2680 return 1;
2681
2682 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2683 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2684 return 0;
2685
2686 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2687 and see if the inner values are the same. This removes any
2688 signedness comparison, which doesn't matter here. */
2689 primarg0 = arg0, primarg1 = arg1;
2690 STRIP_NOPS (primarg0);
2691 STRIP_NOPS (primarg1);
2692 if (operand_equal_p (primarg0, primarg1, 0))
2693 return 1;
2694
2695 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2696 actual comparison operand, ARG0.
2697
2698 First throw away any conversions to wider types
2699 already present in the operands. */
2700
2701 primarg1 = get_narrower (arg1, &unsignedp1);
2702 primother = get_narrower (other, &unsignedpo);
2703
2704 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2705 if (unsignedp1 == unsignedpo
2706 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2707 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2708 {
2709 tree type = TREE_TYPE (arg0);
2710
2711 /* Make sure shorter operand is extended the right way
2712 to match the longer operand. */
2713 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2714 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2715
2716 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2717 return 1;
2718 }
2719
2720 return 0;
2721 }
2722 \f
2723 /* See if ARG is an expression that is either a comparison or is performing
2724 arithmetic on comparisons. The comparisons must only be comparing
2725 two different values, which will be stored in *CVAL1 and *CVAL2; if
2726 they are nonzero it means that some operands have already been found.
2727 No variables may be used anywhere else in the expression except in the
2728 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2729 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2730
2731 If this is true, return 1. Otherwise, return zero. */
2732
2733 static int
2734 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2735 {
2736 enum tree_code code = TREE_CODE (arg);
2737 enum tree_code_class class = TREE_CODE_CLASS (code);
2738
2739 /* We can handle some of the tcc_expression cases here. */
2740 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2741 class = tcc_unary;
2742 else if (class == tcc_expression
2743 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2744 || code == COMPOUND_EXPR))
2745 class = tcc_binary;
2746
2747 else if (class == tcc_expression && code == SAVE_EXPR
2748 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2749 {
2750 /* If we've already found a CVAL1 or CVAL2, this expression is
2751 two complex to handle. */
2752 if (*cval1 || *cval2)
2753 return 0;
2754
2755 class = tcc_unary;
2756 *save_p = 1;
2757 }
2758
2759 switch (class)
2760 {
2761 case tcc_unary:
2762 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2763
2764 case tcc_binary:
2765 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2766 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2767 cval1, cval2, save_p));
2768
2769 case tcc_constant:
2770 return 1;
2771
2772 case tcc_expression:
2773 if (code == COND_EXPR)
2774 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2775 cval1, cval2, save_p)
2776 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2777 cval1, cval2, save_p)
2778 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2779 cval1, cval2, save_p));
2780 return 0;
2781
2782 case tcc_comparison:
2783 /* First see if we can handle the first operand, then the second. For
2784 the second operand, we know *CVAL1 can't be zero. It must be that
2785 one side of the comparison is each of the values; test for the
2786 case where this isn't true by failing if the two operands
2787 are the same. */
2788
2789 if (operand_equal_p (TREE_OPERAND (arg, 0),
2790 TREE_OPERAND (arg, 1), 0))
2791 return 0;
2792
2793 if (*cval1 == 0)
2794 *cval1 = TREE_OPERAND (arg, 0);
2795 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2796 ;
2797 else if (*cval2 == 0)
2798 *cval2 = TREE_OPERAND (arg, 0);
2799 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2800 ;
2801 else
2802 return 0;
2803
2804 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2805 ;
2806 else if (*cval2 == 0)
2807 *cval2 = TREE_OPERAND (arg, 1);
2808 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2809 ;
2810 else
2811 return 0;
2812
2813 return 1;
2814
2815 default:
2816 return 0;
2817 }
2818 }
2819 \f
2820 /* ARG is a tree that is known to contain just arithmetic operations and
2821 comparisons. Evaluate the operations in the tree substituting NEW0 for
2822 any occurrence of OLD0 as an operand of a comparison and likewise for
2823 NEW1 and OLD1. */
2824
2825 static tree
2826 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2827 {
2828 tree type = TREE_TYPE (arg);
2829 enum tree_code code = TREE_CODE (arg);
2830 enum tree_code_class class = TREE_CODE_CLASS (code);
2831
2832 /* We can handle some of the tcc_expression cases here. */
2833 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2834 class = tcc_unary;
2835 else if (class == tcc_expression
2836 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2837 class = tcc_binary;
2838
2839 switch (class)
2840 {
2841 case tcc_unary:
2842 return fold_build1 (code, type,
2843 eval_subst (TREE_OPERAND (arg, 0),
2844 old0, new0, old1, new1));
2845
2846 case tcc_binary:
2847 return fold_build2 (code, type,
2848 eval_subst (TREE_OPERAND (arg, 0),
2849 old0, new0, old1, new1),
2850 eval_subst (TREE_OPERAND (arg, 1),
2851 old0, new0, old1, new1));
2852
2853 case tcc_expression:
2854 switch (code)
2855 {
2856 case SAVE_EXPR:
2857 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2858
2859 case COMPOUND_EXPR:
2860 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2861
2862 case COND_EXPR:
2863 return fold_build3 (code, type,
2864 eval_subst (TREE_OPERAND (arg, 0),
2865 old0, new0, old1, new1),
2866 eval_subst (TREE_OPERAND (arg, 1),
2867 old0, new0, old1, new1),
2868 eval_subst (TREE_OPERAND (arg, 2),
2869 old0, new0, old1, new1));
2870 default:
2871 break;
2872 }
2873 /* Fall through - ??? */
2874
2875 case tcc_comparison:
2876 {
2877 tree arg0 = TREE_OPERAND (arg, 0);
2878 tree arg1 = TREE_OPERAND (arg, 1);
2879
2880 /* We need to check both for exact equality and tree equality. The
2881 former will be true if the operand has a side-effect. In that
2882 case, we know the operand occurred exactly once. */
2883
2884 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2885 arg0 = new0;
2886 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2887 arg0 = new1;
2888
2889 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2890 arg1 = new0;
2891 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2892 arg1 = new1;
2893
2894 return fold_build2 (code, type, arg0, arg1);
2895 }
2896
2897 default:
2898 return arg;
2899 }
2900 }
2901 \f
2902 /* Return a tree for the case when the result of an expression is RESULT
2903 converted to TYPE and OMITTED was previously an operand of the expression
2904 but is now not needed (e.g., we folded OMITTED * 0).
2905
2906 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2907 the conversion of RESULT to TYPE. */
2908
2909 tree
2910 omit_one_operand (tree type, tree result, tree omitted)
2911 {
2912 tree t = fold_convert (type, result);
2913
2914 if (TREE_SIDE_EFFECTS (omitted))
2915 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2916
2917 return non_lvalue (t);
2918 }
2919
2920 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2921
2922 static tree
2923 pedantic_omit_one_operand (tree type, tree result, tree omitted)
2924 {
2925 tree t = fold_convert (type, result);
2926
2927 if (TREE_SIDE_EFFECTS (omitted))
2928 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2929
2930 return pedantic_non_lvalue (t);
2931 }
2932
2933 /* Return a tree for the case when the result of an expression is RESULT
2934 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
2935 of the expression but are now not needed.
2936
2937 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
2938 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
2939 evaluated before OMITTED2. Otherwise, if neither has side effects,
2940 just do the conversion of RESULT to TYPE. */
2941
2942 tree
2943 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
2944 {
2945 tree t = fold_convert (type, result);
2946
2947 if (TREE_SIDE_EFFECTS (omitted2))
2948 t = build2 (COMPOUND_EXPR, type, omitted2, t);
2949 if (TREE_SIDE_EFFECTS (omitted1))
2950 t = build2 (COMPOUND_EXPR, type, omitted1, t);
2951
2952 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
2953 }
2954
2955 \f
2956 /* Return a simplified tree node for the truth-negation of ARG. This
2957 never alters ARG itself. We assume that ARG is an operation that
2958 returns a truth value (0 or 1).
2959
2960 FIXME: one would think we would fold the result, but it causes
2961 problems with the dominator optimizer. */
2962 tree
2963 invert_truthvalue (tree arg)
2964 {
2965 tree type = TREE_TYPE (arg);
2966 enum tree_code code = TREE_CODE (arg);
2967
2968 if (code == ERROR_MARK)
2969 return arg;
2970
2971 /* If this is a comparison, we can simply invert it, except for
2972 floating-point non-equality comparisons, in which case we just
2973 enclose a TRUTH_NOT_EXPR around what we have. */
2974
2975 if (TREE_CODE_CLASS (code) == tcc_comparison)
2976 {
2977 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
2978 if (FLOAT_TYPE_P (op_type)
2979 && flag_trapping_math
2980 && code != ORDERED_EXPR && code != UNORDERED_EXPR
2981 && code != NE_EXPR && code != EQ_EXPR)
2982 return build1 (TRUTH_NOT_EXPR, type, arg);
2983 else
2984 {
2985 code = invert_tree_comparison (code,
2986 HONOR_NANS (TYPE_MODE (op_type)));
2987 if (code == ERROR_MARK)
2988 return build1 (TRUTH_NOT_EXPR, type, arg);
2989 else
2990 return build2 (code, type,
2991 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2992 }
2993 }
2994
2995 switch (code)
2996 {
2997 case INTEGER_CST:
2998 return constant_boolean_node (integer_zerop (arg), type);
2999
3000 case TRUTH_AND_EXPR:
3001 return build2 (TRUTH_OR_EXPR, type,
3002 invert_truthvalue (TREE_OPERAND (arg, 0)),
3003 invert_truthvalue (TREE_OPERAND (arg, 1)));
3004
3005 case TRUTH_OR_EXPR:
3006 return build2 (TRUTH_AND_EXPR, type,
3007 invert_truthvalue (TREE_OPERAND (arg, 0)),
3008 invert_truthvalue (TREE_OPERAND (arg, 1)));
3009
3010 case TRUTH_XOR_EXPR:
3011 /* Here we can invert either operand. We invert the first operand
3012 unless the second operand is a TRUTH_NOT_EXPR in which case our
3013 result is the XOR of the first operand with the inside of the
3014 negation of the second operand. */
3015
3016 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3017 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3018 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3019 else
3020 return build2 (TRUTH_XOR_EXPR, type,
3021 invert_truthvalue (TREE_OPERAND (arg, 0)),
3022 TREE_OPERAND (arg, 1));
3023
3024 case TRUTH_ANDIF_EXPR:
3025 return build2 (TRUTH_ORIF_EXPR, type,
3026 invert_truthvalue (TREE_OPERAND (arg, 0)),
3027 invert_truthvalue (TREE_OPERAND (arg, 1)));
3028
3029 case TRUTH_ORIF_EXPR:
3030 return build2 (TRUTH_ANDIF_EXPR, type,
3031 invert_truthvalue (TREE_OPERAND (arg, 0)),
3032 invert_truthvalue (TREE_OPERAND (arg, 1)));
3033
3034 case TRUTH_NOT_EXPR:
3035 return TREE_OPERAND (arg, 0);
3036
3037 case COND_EXPR:
3038 {
3039 tree arg1 = TREE_OPERAND (arg, 1);
3040 tree arg2 = TREE_OPERAND (arg, 2);
3041 /* A COND_EXPR may have a throw as one operand, which
3042 then has void type. Just leave void operands
3043 as they are. */
3044 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3045 VOID_TYPE_P (TREE_TYPE (arg1))
3046 ? arg1 : invert_truthvalue (arg1),
3047 VOID_TYPE_P (TREE_TYPE (arg2))
3048 ? arg2 : invert_truthvalue (arg2));
3049 }
3050
3051 case COMPOUND_EXPR:
3052 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3053 invert_truthvalue (TREE_OPERAND (arg, 1)));
3054
3055 case NON_LVALUE_EXPR:
3056 return invert_truthvalue (TREE_OPERAND (arg, 0));
3057
3058 case NOP_EXPR:
3059 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3060 break;
3061
3062 case CONVERT_EXPR:
3063 case FLOAT_EXPR:
3064 return build1 (TREE_CODE (arg), type,
3065 invert_truthvalue (TREE_OPERAND (arg, 0)));
3066
3067 case BIT_AND_EXPR:
3068 if (!integer_onep (TREE_OPERAND (arg, 1)))
3069 break;
3070 return build2 (EQ_EXPR, type, arg,
3071 fold_convert (type, integer_zero_node));
3072
3073 case SAVE_EXPR:
3074 return build1 (TRUTH_NOT_EXPR, type, arg);
3075
3076 case CLEANUP_POINT_EXPR:
3077 return build1 (CLEANUP_POINT_EXPR, type,
3078 invert_truthvalue (TREE_OPERAND (arg, 0)));
3079
3080 default:
3081 break;
3082 }
3083 gcc_assert (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE);
3084 return build1 (TRUTH_NOT_EXPR, type, arg);
3085 }
3086
3087 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3088 operands are another bit-wise operation with a common input. If so,
3089 distribute the bit operations to save an operation and possibly two if
3090 constants are involved. For example, convert
3091 (A | B) & (A | C) into A | (B & C)
3092 Further simplification will occur if B and C are constants.
3093
3094 If this optimization cannot be done, 0 will be returned. */
3095
3096 static tree
3097 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3098 {
3099 tree common;
3100 tree left, right;
3101
3102 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3103 || TREE_CODE (arg0) == code
3104 || (TREE_CODE (arg0) != BIT_AND_EXPR
3105 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3106 return 0;
3107
3108 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3109 {
3110 common = TREE_OPERAND (arg0, 0);
3111 left = TREE_OPERAND (arg0, 1);
3112 right = TREE_OPERAND (arg1, 1);
3113 }
3114 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3115 {
3116 common = TREE_OPERAND (arg0, 0);
3117 left = TREE_OPERAND (arg0, 1);
3118 right = TREE_OPERAND (arg1, 0);
3119 }
3120 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3121 {
3122 common = TREE_OPERAND (arg0, 1);
3123 left = TREE_OPERAND (arg0, 0);
3124 right = TREE_OPERAND (arg1, 1);
3125 }
3126 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3127 {
3128 common = TREE_OPERAND (arg0, 1);
3129 left = TREE_OPERAND (arg0, 0);
3130 right = TREE_OPERAND (arg1, 0);
3131 }
3132 else
3133 return 0;
3134
3135 return fold_build2 (TREE_CODE (arg0), type, common,
3136 fold_build2 (code, type, left, right));
3137 }
3138
3139 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3140 with code CODE. This optimization is unsafe. */
3141 static tree
3142 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3143 {
3144 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3145 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3146
3147 /* (A / C) +- (B / C) -> (A +- B) / C. */
3148 if (mul0 == mul1
3149 && operand_equal_p (TREE_OPERAND (arg0, 1),
3150 TREE_OPERAND (arg1, 1), 0))
3151 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3152 fold_build2 (code, type,
3153 TREE_OPERAND (arg0, 0),
3154 TREE_OPERAND (arg1, 0)),
3155 TREE_OPERAND (arg0, 1));
3156
3157 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3158 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3159 TREE_OPERAND (arg1, 0), 0)
3160 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3161 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3162 {
3163 REAL_VALUE_TYPE r0, r1;
3164 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3165 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3166 if (!mul0)
3167 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3168 if (!mul1)
3169 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3170 real_arithmetic (&r0, code, &r0, &r1);
3171 return fold_build2 (MULT_EXPR, type,
3172 TREE_OPERAND (arg0, 0),
3173 build_real (type, r0));
3174 }
3175
3176 return NULL_TREE;
3177 }
3178 \f
3179 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3180 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3181
3182 static tree
3183 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3184 int unsignedp)
3185 {
3186 tree result;
3187
3188 if (bitpos == 0)
3189 {
3190 tree size = TYPE_SIZE (TREE_TYPE (inner));
3191 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3192 || POINTER_TYPE_P (TREE_TYPE (inner)))
3193 && host_integerp (size, 0)
3194 && tree_low_cst (size, 0) == bitsize)
3195 return fold_convert (type, inner);
3196 }
3197
3198 result = build3 (BIT_FIELD_REF, type, inner,
3199 size_int (bitsize), bitsize_int (bitpos));
3200
3201 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3202
3203 return result;
3204 }
3205
3206 /* Optimize a bit-field compare.
3207
3208 There are two cases: First is a compare against a constant and the
3209 second is a comparison of two items where the fields are at the same
3210 bit position relative to the start of a chunk (byte, halfword, word)
3211 large enough to contain it. In these cases we can avoid the shift
3212 implicit in bitfield extractions.
3213
3214 For constants, we emit a compare of the shifted constant with the
3215 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3216 compared. For two fields at the same position, we do the ANDs with the
3217 similar mask and compare the result of the ANDs.
3218
3219 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3220 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3221 are the left and right operands of the comparison, respectively.
3222
3223 If the optimization described above can be done, we return the resulting
3224 tree. Otherwise we return zero. */
3225
3226 static tree
3227 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3228 tree lhs, tree rhs)
3229 {
3230 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3231 tree type = TREE_TYPE (lhs);
3232 tree signed_type, unsigned_type;
3233 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3234 enum machine_mode lmode, rmode, nmode;
3235 int lunsignedp, runsignedp;
3236 int lvolatilep = 0, rvolatilep = 0;
3237 tree linner, rinner = NULL_TREE;
3238 tree mask;
3239 tree offset;
3240
3241 /* Get all the information about the extractions being done. If the bit size
3242 if the same as the size of the underlying object, we aren't doing an
3243 extraction at all and so can do nothing. We also don't want to
3244 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3245 then will no longer be able to replace it. */
3246 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3247 &lunsignedp, &lvolatilep, false);
3248 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3249 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3250 return 0;
3251
3252 if (!const_p)
3253 {
3254 /* If this is not a constant, we can only do something if bit positions,
3255 sizes, and signedness are the same. */
3256 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3257 &runsignedp, &rvolatilep, false);
3258
3259 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3260 || lunsignedp != runsignedp || offset != 0
3261 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3262 return 0;
3263 }
3264
3265 /* See if we can find a mode to refer to this field. We should be able to,
3266 but fail if we can't. */
3267 nmode = get_best_mode (lbitsize, lbitpos,
3268 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3269 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3270 TYPE_ALIGN (TREE_TYPE (rinner))),
3271 word_mode, lvolatilep || rvolatilep);
3272 if (nmode == VOIDmode)
3273 return 0;
3274
3275 /* Set signed and unsigned types of the precision of this mode for the
3276 shifts below. */
3277 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3278 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3279
3280 /* Compute the bit position and size for the new reference and our offset
3281 within it. If the new reference is the same size as the original, we
3282 won't optimize anything, so return zero. */
3283 nbitsize = GET_MODE_BITSIZE (nmode);
3284 nbitpos = lbitpos & ~ (nbitsize - 1);
3285 lbitpos -= nbitpos;
3286 if (nbitsize == lbitsize)
3287 return 0;
3288
3289 if (BYTES_BIG_ENDIAN)
3290 lbitpos = nbitsize - lbitsize - lbitpos;
3291
3292 /* Make the mask to be used against the extracted field. */
3293 mask = build_int_cst (unsigned_type, -1);
3294 mask = force_fit_type (mask, 0, false, false);
3295 mask = fold_convert (unsigned_type, mask);
3296 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3297 mask = const_binop (RSHIFT_EXPR, mask,
3298 size_int (nbitsize - lbitsize - lbitpos), 0);
3299
3300 if (! const_p)
3301 /* If not comparing with constant, just rework the comparison
3302 and return. */
3303 return build2 (code, compare_type,
3304 build2 (BIT_AND_EXPR, unsigned_type,
3305 make_bit_field_ref (linner, unsigned_type,
3306 nbitsize, nbitpos, 1),
3307 mask),
3308 build2 (BIT_AND_EXPR, unsigned_type,
3309 make_bit_field_ref (rinner, unsigned_type,
3310 nbitsize, nbitpos, 1),
3311 mask));
3312
3313 /* Otherwise, we are handling the constant case. See if the constant is too
3314 big for the field. Warn and return a tree of for 0 (false) if so. We do
3315 this not only for its own sake, but to avoid having to test for this
3316 error case below. If we didn't, we might generate wrong code.
3317
3318 For unsigned fields, the constant shifted right by the field length should
3319 be all zero. For signed fields, the high-order bits should agree with
3320 the sign bit. */
3321
3322 if (lunsignedp)
3323 {
3324 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3325 fold_convert (unsigned_type, rhs),
3326 size_int (lbitsize), 0)))
3327 {
3328 warning (0, "comparison is always %d due to width of bit-field",
3329 code == NE_EXPR);
3330 return constant_boolean_node (code == NE_EXPR, compare_type);
3331 }
3332 }
3333 else
3334 {
3335 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3336 size_int (lbitsize - 1), 0);
3337 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3338 {
3339 warning (0, "comparison is always %d due to width of bit-field",
3340 code == NE_EXPR);
3341 return constant_boolean_node (code == NE_EXPR, compare_type);
3342 }
3343 }
3344
3345 /* Single-bit compares should always be against zero. */
3346 if (lbitsize == 1 && ! integer_zerop (rhs))
3347 {
3348 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3349 rhs = fold_convert (type, integer_zero_node);
3350 }
3351
3352 /* Make a new bitfield reference, shift the constant over the
3353 appropriate number of bits and mask it with the computed mask
3354 (in case this was a signed field). If we changed it, make a new one. */
3355 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3356 if (lvolatilep)
3357 {
3358 TREE_SIDE_EFFECTS (lhs) = 1;
3359 TREE_THIS_VOLATILE (lhs) = 1;
3360 }
3361
3362 rhs = const_binop (BIT_AND_EXPR,
3363 const_binop (LSHIFT_EXPR,
3364 fold_convert (unsigned_type, rhs),
3365 size_int (lbitpos), 0),
3366 mask, 0);
3367
3368 return build2 (code, compare_type,
3369 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3370 rhs);
3371 }
3372 \f
3373 /* Subroutine for fold_truthop: decode a field reference.
3374
3375 If EXP is a comparison reference, we return the innermost reference.
3376
3377 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3378 set to the starting bit number.
3379
3380 If the innermost field can be completely contained in a mode-sized
3381 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3382
3383 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3384 otherwise it is not changed.
3385
3386 *PUNSIGNEDP is set to the signedness of the field.
3387
3388 *PMASK is set to the mask used. This is either contained in a
3389 BIT_AND_EXPR or derived from the width of the field.
3390
3391 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3392
3393 Return 0 if this is not a component reference or is one that we can't
3394 do anything with. */
3395
3396 static tree
3397 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3398 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3399 int *punsignedp, int *pvolatilep,
3400 tree *pmask, tree *pand_mask)
3401 {
3402 tree outer_type = 0;
3403 tree and_mask = 0;
3404 tree mask, inner, offset;
3405 tree unsigned_type;
3406 unsigned int precision;
3407
3408 /* All the optimizations using this function assume integer fields.
3409 There are problems with FP fields since the type_for_size call
3410 below can fail for, e.g., XFmode. */
3411 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3412 return 0;
3413
3414 /* We are interested in the bare arrangement of bits, so strip everything
3415 that doesn't affect the machine mode. However, record the type of the
3416 outermost expression if it may matter below. */
3417 if (TREE_CODE (exp) == NOP_EXPR
3418 || TREE_CODE (exp) == CONVERT_EXPR
3419 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3420 outer_type = TREE_TYPE (exp);
3421 STRIP_NOPS (exp);
3422
3423 if (TREE_CODE (exp) == BIT_AND_EXPR)
3424 {
3425 and_mask = TREE_OPERAND (exp, 1);
3426 exp = TREE_OPERAND (exp, 0);
3427 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3428 if (TREE_CODE (and_mask) != INTEGER_CST)
3429 return 0;
3430 }
3431
3432 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3433 punsignedp, pvolatilep, false);
3434 if ((inner == exp && and_mask == 0)
3435 || *pbitsize < 0 || offset != 0
3436 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3437 return 0;
3438
3439 /* If the number of bits in the reference is the same as the bitsize of
3440 the outer type, then the outer type gives the signedness. Otherwise
3441 (in case of a small bitfield) the signedness is unchanged. */
3442 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3443 *punsignedp = TYPE_UNSIGNED (outer_type);
3444
3445 /* Compute the mask to access the bitfield. */
3446 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3447 precision = TYPE_PRECISION (unsigned_type);
3448
3449 mask = build_int_cst (unsigned_type, -1);
3450 mask = force_fit_type (mask, 0, false, false);
3451
3452 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3453 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3454
3455 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3456 if (and_mask != 0)
3457 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3458 fold_convert (unsigned_type, and_mask), mask);
3459
3460 *pmask = mask;
3461 *pand_mask = and_mask;
3462 return inner;
3463 }
3464
3465 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3466 bit positions. */
3467
3468 static int
3469 all_ones_mask_p (tree mask, int size)
3470 {
3471 tree type = TREE_TYPE (mask);
3472 unsigned int precision = TYPE_PRECISION (type);
3473 tree tmask;
3474
3475 tmask = build_int_cst (lang_hooks.types.signed_type (type), -1);
3476 tmask = force_fit_type (tmask, 0, false, false);
3477
3478 return
3479 tree_int_cst_equal (mask,
3480 const_binop (RSHIFT_EXPR,
3481 const_binop (LSHIFT_EXPR, tmask,
3482 size_int (precision - size),
3483 0),
3484 size_int (precision - size), 0));
3485 }
3486
3487 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3488 represents the sign bit of EXP's type. If EXP represents a sign
3489 or zero extension, also test VAL against the unextended type.
3490 The return value is the (sub)expression whose sign bit is VAL,
3491 or NULL_TREE otherwise. */
3492
3493 static tree
3494 sign_bit_p (tree exp, tree val)
3495 {
3496 unsigned HOST_WIDE_INT mask_lo, lo;
3497 HOST_WIDE_INT mask_hi, hi;
3498 int width;
3499 tree t;
3500
3501 /* Tree EXP must have an integral type. */
3502 t = TREE_TYPE (exp);
3503 if (! INTEGRAL_TYPE_P (t))
3504 return NULL_TREE;
3505
3506 /* Tree VAL must be an integer constant. */
3507 if (TREE_CODE (val) != INTEGER_CST
3508 || TREE_CONSTANT_OVERFLOW (val))
3509 return NULL_TREE;
3510
3511 width = TYPE_PRECISION (t);
3512 if (width > HOST_BITS_PER_WIDE_INT)
3513 {
3514 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3515 lo = 0;
3516
3517 mask_hi = ((unsigned HOST_WIDE_INT) -1
3518 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3519 mask_lo = -1;
3520 }
3521 else
3522 {
3523 hi = 0;
3524 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3525
3526 mask_hi = 0;
3527 mask_lo = ((unsigned HOST_WIDE_INT) -1
3528 >> (HOST_BITS_PER_WIDE_INT - width));
3529 }
3530
3531 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3532 treat VAL as if it were unsigned. */
3533 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3534 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3535 return exp;
3536
3537 /* Handle extension from a narrower type. */
3538 if (TREE_CODE (exp) == NOP_EXPR
3539 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3540 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3541
3542 return NULL_TREE;
3543 }
3544
3545 /* Subroutine for fold_truthop: determine if an operand is simple enough
3546 to be evaluated unconditionally. */
3547
3548 static int
3549 simple_operand_p (tree exp)
3550 {
3551 /* Strip any conversions that don't change the machine mode. */
3552 STRIP_NOPS (exp);
3553
3554 return (CONSTANT_CLASS_P (exp)
3555 || TREE_CODE (exp) == SSA_NAME
3556 || (DECL_P (exp)
3557 && ! TREE_ADDRESSABLE (exp)
3558 && ! TREE_THIS_VOLATILE (exp)
3559 && ! DECL_NONLOCAL (exp)
3560 /* Don't regard global variables as simple. They may be
3561 allocated in ways unknown to the compiler (shared memory,
3562 #pragma weak, etc). */
3563 && ! TREE_PUBLIC (exp)
3564 && ! DECL_EXTERNAL (exp)
3565 /* Loading a static variable is unduly expensive, but global
3566 registers aren't expensive. */
3567 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3568 }
3569 \f
3570 /* The following functions are subroutines to fold_range_test and allow it to
3571 try to change a logical combination of comparisons into a range test.
3572
3573 For example, both
3574 X == 2 || X == 3 || X == 4 || X == 5
3575 and
3576 X >= 2 && X <= 5
3577 are converted to
3578 (unsigned) (X - 2) <= 3
3579
3580 We describe each set of comparisons as being either inside or outside
3581 a range, using a variable named like IN_P, and then describe the
3582 range with a lower and upper bound. If one of the bounds is omitted,
3583 it represents either the highest or lowest value of the type.
3584
3585 In the comments below, we represent a range by two numbers in brackets
3586 preceded by a "+" to designate being inside that range, or a "-" to
3587 designate being outside that range, so the condition can be inverted by
3588 flipping the prefix. An omitted bound is represented by a "-". For
3589 example, "- [-, 10]" means being outside the range starting at the lowest
3590 possible value and ending at 10, in other words, being greater than 10.
3591 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3592 always false.
3593
3594 We set up things so that the missing bounds are handled in a consistent
3595 manner so neither a missing bound nor "true" and "false" need to be
3596 handled using a special case. */
3597
3598 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3599 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3600 and UPPER1_P are nonzero if the respective argument is an upper bound
3601 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3602 must be specified for a comparison. ARG1 will be converted to ARG0's
3603 type if both are specified. */
3604
3605 static tree
3606 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3607 tree arg1, int upper1_p)
3608 {
3609 tree tem;
3610 int result;
3611 int sgn0, sgn1;
3612
3613 /* If neither arg represents infinity, do the normal operation.
3614 Else, if not a comparison, return infinity. Else handle the special
3615 comparison rules. Note that most of the cases below won't occur, but
3616 are handled for consistency. */
3617
3618 if (arg0 != 0 && arg1 != 0)
3619 {
3620 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3621 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3622 STRIP_NOPS (tem);
3623 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3624 }
3625
3626 if (TREE_CODE_CLASS (code) != tcc_comparison)
3627 return 0;
3628
3629 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3630 for neither. In real maths, we cannot assume open ended ranges are
3631 the same. But, this is computer arithmetic, where numbers are finite.
3632 We can therefore make the transformation of any unbounded range with
3633 the value Z, Z being greater than any representable number. This permits
3634 us to treat unbounded ranges as equal. */
3635 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3636 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3637 switch (code)
3638 {
3639 case EQ_EXPR:
3640 result = sgn0 == sgn1;
3641 break;
3642 case NE_EXPR:
3643 result = sgn0 != sgn1;
3644 break;
3645 case LT_EXPR:
3646 result = sgn0 < sgn1;
3647 break;
3648 case LE_EXPR:
3649 result = sgn0 <= sgn1;
3650 break;
3651 case GT_EXPR:
3652 result = sgn0 > sgn1;
3653 break;
3654 case GE_EXPR:
3655 result = sgn0 >= sgn1;
3656 break;
3657 default:
3658 gcc_unreachable ();
3659 }
3660
3661 return constant_boolean_node (result, type);
3662 }
3663 \f
3664 /* Given EXP, a logical expression, set the range it is testing into
3665 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3666 actually being tested. *PLOW and *PHIGH will be made of the same type
3667 as the returned expression. If EXP is not a comparison, we will most
3668 likely not be returning a useful value and range. */
3669
3670 static tree
3671 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3672 {
3673 enum tree_code code;
3674 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3675 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3676 int in_p, n_in_p;
3677 tree low, high, n_low, n_high;
3678
3679 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3680 and see if we can refine the range. Some of the cases below may not
3681 happen, but it doesn't seem worth worrying about this. We "continue"
3682 the outer loop when we've changed something; otherwise we "break"
3683 the switch, which will "break" the while. */
3684
3685 in_p = 0;
3686 low = high = fold_convert (TREE_TYPE (exp), integer_zero_node);
3687
3688 while (1)
3689 {
3690 code = TREE_CODE (exp);
3691 exp_type = TREE_TYPE (exp);
3692
3693 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3694 {
3695 if (TREE_CODE_LENGTH (code) > 0)
3696 arg0 = TREE_OPERAND (exp, 0);
3697 if (TREE_CODE_CLASS (code) == tcc_comparison
3698 || TREE_CODE_CLASS (code) == tcc_unary
3699 || TREE_CODE_CLASS (code) == tcc_binary)
3700 arg0_type = TREE_TYPE (arg0);
3701 if (TREE_CODE_CLASS (code) == tcc_binary
3702 || TREE_CODE_CLASS (code) == tcc_comparison
3703 || (TREE_CODE_CLASS (code) == tcc_expression
3704 && TREE_CODE_LENGTH (code) > 1))
3705 arg1 = TREE_OPERAND (exp, 1);
3706 }
3707
3708 switch (code)
3709 {
3710 case TRUTH_NOT_EXPR:
3711 in_p = ! in_p, exp = arg0;
3712 continue;
3713
3714 case EQ_EXPR: case NE_EXPR:
3715 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3716 /* We can only do something if the range is testing for zero
3717 and if the second operand is an integer constant. Note that
3718 saying something is "in" the range we make is done by
3719 complementing IN_P since it will set in the initial case of
3720 being not equal to zero; "out" is leaving it alone. */
3721 if (low == 0 || high == 0
3722 || ! integer_zerop (low) || ! integer_zerop (high)
3723 || TREE_CODE (arg1) != INTEGER_CST)
3724 break;
3725
3726 switch (code)
3727 {
3728 case NE_EXPR: /* - [c, c] */
3729 low = high = arg1;
3730 break;
3731 case EQ_EXPR: /* + [c, c] */
3732 in_p = ! in_p, low = high = arg1;
3733 break;
3734 case GT_EXPR: /* - [-, c] */
3735 low = 0, high = arg1;
3736 break;
3737 case GE_EXPR: /* + [c, -] */
3738 in_p = ! in_p, low = arg1, high = 0;
3739 break;
3740 case LT_EXPR: /* - [c, -] */
3741 low = arg1, high = 0;
3742 break;
3743 case LE_EXPR: /* + [-, c] */
3744 in_p = ! in_p, low = 0, high = arg1;
3745 break;
3746 default:
3747 gcc_unreachable ();
3748 }
3749
3750 /* If this is an unsigned comparison, we also know that EXP is
3751 greater than or equal to zero. We base the range tests we make
3752 on that fact, so we record it here so we can parse existing
3753 range tests. We test arg0_type since often the return type
3754 of, e.g. EQ_EXPR, is boolean. */
3755 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3756 {
3757 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3758 in_p, low, high, 1,
3759 fold_convert (arg0_type, integer_zero_node),
3760 NULL_TREE))
3761 break;
3762
3763 in_p = n_in_p, low = n_low, high = n_high;
3764
3765 /* If the high bound is missing, but we have a nonzero low
3766 bound, reverse the range so it goes from zero to the low bound
3767 minus 1. */
3768 if (high == 0 && low && ! integer_zerop (low))
3769 {
3770 in_p = ! in_p;
3771 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3772 integer_one_node, 0);
3773 low = fold_convert (arg0_type, integer_zero_node);
3774 }
3775 }
3776
3777 exp = arg0;
3778 continue;
3779
3780 case NEGATE_EXPR:
3781 /* (-x) IN [a,b] -> x in [-b, -a] */
3782 n_low = range_binop (MINUS_EXPR, exp_type,
3783 fold_convert (exp_type, integer_zero_node),
3784 0, high, 1);
3785 n_high = range_binop (MINUS_EXPR, exp_type,
3786 fold_convert (exp_type, integer_zero_node),
3787 0, low, 0);
3788 low = n_low, high = n_high;
3789 exp = arg0;
3790 continue;
3791
3792 case BIT_NOT_EXPR:
3793 /* ~ X -> -X - 1 */
3794 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
3795 fold_convert (exp_type, integer_one_node));
3796 continue;
3797
3798 case PLUS_EXPR: case MINUS_EXPR:
3799 if (TREE_CODE (arg1) != INTEGER_CST)
3800 break;
3801
3802 /* If EXP is signed, any overflow in the computation is undefined,
3803 so we don't worry about it so long as our computations on
3804 the bounds don't overflow. For unsigned, overflow is defined
3805 and this is exactly the right thing. */
3806 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3807 arg0_type, low, 0, arg1, 0);
3808 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3809 arg0_type, high, 1, arg1, 0);
3810 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3811 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3812 break;
3813
3814 /* Check for an unsigned range which has wrapped around the maximum
3815 value thus making n_high < n_low, and normalize it. */
3816 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3817 {
3818 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3819 integer_one_node, 0);
3820 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3821 integer_one_node, 0);
3822
3823 /* If the range is of the form +/- [ x+1, x ], we won't
3824 be able to normalize it. But then, it represents the
3825 whole range or the empty set, so make it
3826 +/- [ -, - ]. */
3827 if (tree_int_cst_equal (n_low, low)
3828 && tree_int_cst_equal (n_high, high))
3829 low = high = 0;
3830 else
3831 in_p = ! in_p;
3832 }
3833 else
3834 low = n_low, high = n_high;
3835
3836 exp = arg0;
3837 continue;
3838
3839 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3840 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3841 break;
3842
3843 if (! INTEGRAL_TYPE_P (arg0_type)
3844 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3845 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3846 break;
3847
3848 n_low = low, n_high = high;
3849
3850 if (n_low != 0)
3851 n_low = fold_convert (arg0_type, n_low);
3852
3853 if (n_high != 0)
3854 n_high = fold_convert (arg0_type, n_high);
3855
3856
3857 /* If we're converting arg0 from an unsigned type, to exp,
3858 a signed type, we will be doing the comparison as unsigned.
3859 The tests above have already verified that LOW and HIGH
3860 are both positive.
3861
3862 So we have to ensure that we will handle large unsigned
3863 values the same way that the current signed bounds treat
3864 negative values. */
3865
3866 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
3867 {
3868 tree high_positive;
3869 tree equiv_type = lang_hooks.types.type_for_mode
3870 (TYPE_MODE (arg0_type), 1);
3871
3872 /* A range without an upper bound is, naturally, unbounded.
3873 Since convert would have cropped a very large value, use
3874 the max value for the destination type. */
3875 high_positive
3876 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3877 : TYPE_MAX_VALUE (arg0_type);
3878
3879 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
3880 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
3881 fold_convert (arg0_type,
3882 high_positive),
3883 fold_convert (arg0_type,
3884 integer_one_node));
3885
3886 /* If the low bound is specified, "and" the range with the
3887 range for which the original unsigned value will be
3888 positive. */
3889 if (low != 0)
3890 {
3891 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3892 1, n_low, n_high, 1,
3893 fold_convert (arg0_type,
3894 integer_zero_node),
3895 high_positive))
3896 break;
3897
3898 in_p = (n_in_p == in_p);
3899 }
3900 else
3901 {
3902 /* Otherwise, "or" the range with the range of the input
3903 that will be interpreted as negative. */
3904 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3905 0, n_low, n_high, 1,
3906 fold_convert (arg0_type,
3907 integer_zero_node),
3908 high_positive))
3909 break;
3910
3911 in_p = (in_p != n_in_p);
3912 }
3913 }
3914
3915 exp = arg0;
3916 low = n_low, high = n_high;
3917 continue;
3918
3919 default:
3920 break;
3921 }
3922
3923 break;
3924 }
3925
3926 /* If EXP is a constant, we can evaluate whether this is true or false. */
3927 if (TREE_CODE (exp) == INTEGER_CST)
3928 {
3929 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3930 exp, 0, low, 0))
3931 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3932 exp, 1, high, 1)));
3933 low = high = 0;
3934 exp = 0;
3935 }
3936
3937 *pin_p = in_p, *plow = low, *phigh = high;
3938 return exp;
3939 }
3940 \f
3941 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3942 type, TYPE, return an expression to test if EXP is in (or out of, depending
3943 on IN_P) the range. Return 0 if the test couldn't be created. */
3944
3945 static tree
3946 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
3947 {
3948 tree etype = TREE_TYPE (exp);
3949 tree value;
3950
3951 #ifdef HAVE_canonicalize_funcptr_for_compare
3952 /* Disable this optimization for function pointer expressions
3953 on targets that require function pointer canonicalization. */
3954 if (HAVE_canonicalize_funcptr_for_compare
3955 && TREE_CODE (etype) == POINTER_TYPE
3956 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
3957 return NULL_TREE;
3958 #endif
3959
3960 if (! in_p)
3961 {
3962 value = build_range_check (type, exp, 1, low, high);
3963 if (value != 0)
3964 return invert_truthvalue (value);
3965
3966 return 0;
3967 }
3968
3969 if (low == 0 && high == 0)
3970 return fold_convert (type, integer_one_node);
3971
3972 if (low == 0)
3973 return fold_build2 (LE_EXPR, type, exp,
3974 fold_convert (etype, high));
3975
3976 if (high == 0)
3977 return fold_build2 (GE_EXPR, type, exp,
3978 fold_convert (etype, low));
3979
3980 if (operand_equal_p (low, high, 0))
3981 return fold_build2 (EQ_EXPR, type, exp,
3982 fold_convert (etype, low));
3983
3984 if (integer_zerop (low))
3985 {
3986 if (! TYPE_UNSIGNED (etype))
3987 {
3988 etype = lang_hooks.types.unsigned_type (etype);
3989 high = fold_convert (etype, high);
3990 exp = fold_convert (etype, exp);
3991 }
3992 return build_range_check (type, exp, 1, 0, high);
3993 }
3994
3995 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3996 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
3997 {
3998 unsigned HOST_WIDE_INT lo;
3999 HOST_WIDE_INT hi;
4000 int prec;
4001
4002 prec = TYPE_PRECISION (etype);
4003 if (prec <= HOST_BITS_PER_WIDE_INT)
4004 {
4005 hi = 0;
4006 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4007 }
4008 else
4009 {
4010 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4011 lo = (unsigned HOST_WIDE_INT) -1;
4012 }
4013
4014 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4015 {
4016 if (TYPE_UNSIGNED (etype))
4017 {
4018 etype = lang_hooks.types.signed_type (etype);
4019 exp = fold_convert (etype, exp);
4020 }
4021 return fold_build2 (GT_EXPR, type, exp,
4022 fold_convert (etype, integer_zero_node));
4023 }
4024 }
4025
4026 value = const_binop (MINUS_EXPR, high, low, 0);
4027 if (value != 0 && (!flag_wrapv || TREE_OVERFLOW (value))
4028 && ! TYPE_UNSIGNED (etype))
4029 {
4030 tree utype, minv, maxv;
4031
4032 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4033 for the type in question, as we rely on this here. */
4034 switch (TREE_CODE (etype))
4035 {
4036 case INTEGER_TYPE:
4037 case ENUMERAL_TYPE:
4038 case CHAR_TYPE:
4039 /* There is no requirement that LOW be within the range of ETYPE
4040 if the latter is a subtype. It must, however, be within the base
4041 type of ETYPE. So be sure we do the subtraction in that type. */
4042 if (TREE_TYPE (etype))
4043 etype = TREE_TYPE (etype);
4044 utype = lang_hooks.types.unsigned_type (etype);
4045 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4046 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4047 integer_one_node, 1);
4048 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4049 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4050 minv, 1, maxv, 1)))
4051 {
4052 etype = utype;
4053 high = fold_convert (etype, high);
4054 low = fold_convert (etype, low);
4055 exp = fold_convert (etype, exp);
4056 value = const_binop (MINUS_EXPR, high, low, 0);
4057 }
4058 break;
4059 default:
4060 break;
4061 }
4062 }
4063
4064 if (value != 0 && ! TREE_OVERFLOW (value))
4065 {
4066 /* There is no requirement that LOW be within the range of ETYPE
4067 if the latter is a subtype. It must, however, be within the base
4068 type of ETYPE. So be sure we do the subtraction in that type. */
4069 if (INTEGRAL_TYPE_P (etype) && TREE_TYPE (etype))
4070 {
4071 etype = TREE_TYPE (etype);
4072 exp = fold_convert (etype, exp);
4073 low = fold_convert (etype, low);
4074 value = fold_convert (etype, value);
4075 }
4076
4077 return build_range_check (type,
4078 fold_build2 (MINUS_EXPR, etype, exp, low),
4079 1, build_int_cst (etype, 0), value);
4080 }
4081
4082 return 0;
4083 }
4084 \f
4085 /* Given two ranges, see if we can merge them into one. Return 1 if we
4086 can, 0 if we can't. Set the output range into the specified parameters. */
4087
4088 static int
4089 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4090 tree high0, int in1_p, tree low1, tree high1)
4091 {
4092 int no_overlap;
4093 int subset;
4094 int temp;
4095 tree tem;
4096 int in_p;
4097 tree low, high;
4098 int lowequal = ((low0 == 0 && low1 == 0)
4099 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4100 low0, 0, low1, 0)));
4101 int highequal = ((high0 == 0 && high1 == 0)
4102 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4103 high0, 1, high1, 1)));
4104
4105 /* Make range 0 be the range that starts first, or ends last if they
4106 start at the same value. Swap them if it isn't. */
4107 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4108 low0, 0, low1, 0))
4109 || (lowequal
4110 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4111 high1, 1, high0, 1))))
4112 {
4113 temp = in0_p, in0_p = in1_p, in1_p = temp;
4114 tem = low0, low0 = low1, low1 = tem;
4115 tem = high0, high0 = high1, high1 = tem;
4116 }
4117
4118 /* Now flag two cases, whether the ranges are disjoint or whether the
4119 second range is totally subsumed in the first. Note that the tests
4120 below are simplified by the ones above. */
4121 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4122 high0, 1, low1, 0));
4123 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4124 high1, 1, high0, 1));
4125
4126 /* We now have four cases, depending on whether we are including or
4127 excluding the two ranges. */
4128 if (in0_p && in1_p)
4129 {
4130 /* If they don't overlap, the result is false. If the second range
4131 is a subset it is the result. Otherwise, the range is from the start
4132 of the second to the end of the first. */
4133 if (no_overlap)
4134 in_p = 0, low = high = 0;
4135 else if (subset)
4136 in_p = 1, low = low1, high = high1;
4137 else
4138 in_p = 1, low = low1, high = high0;
4139 }
4140
4141 else if (in0_p && ! in1_p)
4142 {
4143 /* If they don't overlap, the result is the first range. If they are
4144 equal, the result is false. If the second range is a subset of the
4145 first, and the ranges begin at the same place, we go from just after
4146 the end of the first range to the end of the second. If the second
4147 range is not a subset of the first, or if it is a subset and both
4148 ranges end at the same place, the range starts at the start of the
4149 first range and ends just before the second range.
4150 Otherwise, we can't describe this as a single range. */
4151 if (no_overlap)
4152 in_p = 1, low = low0, high = high0;
4153 else if (lowequal && highequal)
4154 in_p = 0, low = high = 0;
4155 else if (subset && lowequal)
4156 {
4157 in_p = 1, high = high0;
4158 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
4159 integer_one_node, 0);
4160 }
4161 else if (! subset || highequal)
4162 {
4163 in_p = 1, low = low0;
4164 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4165 integer_one_node, 0);
4166 }
4167 else
4168 return 0;
4169 }
4170
4171 else if (! in0_p && in1_p)
4172 {
4173 /* If they don't overlap, the result is the second range. If the second
4174 is a subset of the first, the result is false. Otherwise,
4175 the range starts just after the first range and ends at the
4176 end of the second. */
4177 if (no_overlap)
4178 in_p = 1, low = low1, high = high1;
4179 else if (subset || highequal)
4180 in_p = 0, low = high = 0;
4181 else
4182 {
4183 in_p = 1, high = high1;
4184 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4185 integer_one_node, 0);
4186 }
4187 }
4188
4189 else
4190 {
4191 /* The case where we are excluding both ranges. Here the complex case
4192 is if they don't overlap. In that case, the only time we have a
4193 range is if they are adjacent. If the second is a subset of the
4194 first, the result is the first. Otherwise, the range to exclude
4195 starts at the beginning of the first range and ends at the end of the
4196 second. */
4197 if (no_overlap)
4198 {
4199 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4200 range_binop (PLUS_EXPR, NULL_TREE,
4201 high0, 1,
4202 integer_one_node, 1),
4203 1, low1, 0)))
4204 in_p = 0, low = low0, high = high1;
4205 else
4206 {
4207 /* Canonicalize - [min, x] into - [-, x]. */
4208 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4209 switch (TREE_CODE (TREE_TYPE (low0)))
4210 {
4211 case ENUMERAL_TYPE:
4212 if (TYPE_PRECISION (TREE_TYPE (low0))
4213 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4214 break;
4215 /* FALLTHROUGH */
4216 case INTEGER_TYPE:
4217 case CHAR_TYPE:
4218 if (tree_int_cst_equal (low0,
4219 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4220 low0 = 0;
4221 break;
4222 case POINTER_TYPE:
4223 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4224 && integer_zerop (low0))
4225 low0 = 0;
4226 break;
4227 default:
4228 break;
4229 }
4230
4231 /* Canonicalize - [x, max] into - [x, -]. */
4232 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4233 switch (TREE_CODE (TREE_TYPE (high1)))
4234 {
4235 case ENUMERAL_TYPE:
4236 if (TYPE_PRECISION (TREE_TYPE (high1))
4237 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4238 break;
4239 /* FALLTHROUGH */
4240 case INTEGER_TYPE:
4241 case CHAR_TYPE:
4242 if (tree_int_cst_equal (high1,
4243 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4244 high1 = 0;
4245 break;
4246 case POINTER_TYPE:
4247 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4248 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4249 high1, 1,
4250 integer_one_node, 1)))
4251 high1 = 0;
4252 break;
4253 default:
4254 break;
4255 }
4256
4257 /* The ranges might be also adjacent between the maximum and
4258 minimum values of the given type. For
4259 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4260 return + [x + 1, y - 1]. */
4261 if (low0 == 0 && high1 == 0)
4262 {
4263 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4264 integer_one_node, 1);
4265 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4266 integer_one_node, 0);
4267 if (low == 0 || high == 0)
4268 return 0;
4269
4270 in_p = 1;
4271 }
4272 else
4273 return 0;
4274 }
4275 }
4276 else if (subset)
4277 in_p = 0, low = low0, high = high0;
4278 else
4279 in_p = 0, low = low0, high = high1;
4280 }
4281
4282 *pin_p = in_p, *plow = low, *phigh = high;
4283 return 1;
4284 }
4285 \f
4286
4287 /* Subroutine of fold, looking inside expressions of the form
4288 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4289 of the COND_EXPR. This function is being used also to optimize
4290 A op B ? C : A, by reversing the comparison first.
4291
4292 Return a folded expression whose code is not a COND_EXPR
4293 anymore, or NULL_TREE if no folding opportunity is found. */
4294
4295 static tree
4296 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4297 {
4298 enum tree_code comp_code = TREE_CODE (arg0);
4299 tree arg00 = TREE_OPERAND (arg0, 0);
4300 tree arg01 = TREE_OPERAND (arg0, 1);
4301 tree arg1_type = TREE_TYPE (arg1);
4302 tree tem;
4303
4304 STRIP_NOPS (arg1);
4305 STRIP_NOPS (arg2);
4306
4307 /* If we have A op 0 ? A : -A, consider applying the following
4308 transformations:
4309
4310 A == 0? A : -A same as -A
4311 A != 0? A : -A same as A
4312 A >= 0? A : -A same as abs (A)
4313 A > 0? A : -A same as abs (A)
4314 A <= 0? A : -A same as -abs (A)
4315 A < 0? A : -A same as -abs (A)
4316
4317 None of these transformations work for modes with signed
4318 zeros. If A is +/-0, the first two transformations will
4319 change the sign of the result (from +0 to -0, or vice
4320 versa). The last four will fix the sign of the result,
4321 even though the original expressions could be positive or
4322 negative, depending on the sign of A.
4323
4324 Note that all these transformations are correct if A is
4325 NaN, since the two alternatives (A and -A) are also NaNs. */
4326 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4327 ? real_zerop (arg01)
4328 : integer_zerop (arg01))
4329 && ((TREE_CODE (arg2) == NEGATE_EXPR
4330 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4331 /* In the case that A is of the form X-Y, '-A' (arg2) may
4332 have already been folded to Y-X, check for that. */
4333 || (TREE_CODE (arg1) == MINUS_EXPR
4334 && TREE_CODE (arg2) == MINUS_EXPR
4335 && operand_equal_p (TREE_OPERAND (arg1, 0),
4336 TREE_OPERAND (arg2, 1), 0)
4337 && operand_equal_p (TREE_OPERAND (arg1, 1),
4338 TREE_OPERAND (arg2, 0), 0))))
4339 switch (comp_code)
4340 {
4341 case EQ_EXPR:
4342 case UNEQ_EXPR:
4343 tem = fold_convert (arg1_type, arg1);
4344 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4345 case NE_EXPR:
4346 case LTGT_EXPR:
4347 return pedantic_non_lvalue (fold_convert (type, arg1));
4348 case UNGE_EXPR:
4349 case UNGT_EXPR:
4350 if (flag_trapping_math)
4351 break;
4352 /* Fall through. */
4353 case GE_EXPR:
4354 case GT_EXPR:
4355 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4356 arg1 = fold_convert (lang_hooks.types.signed_type
4357 (TREE_TYPE (arg1)), arg1);
4358 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4359 return pedantic_non_lvalue (fold_convert (type, tem));
4360 case UNLE_EXPR:
4361 case UNLT_EXPR:
4362 if (flag_trapping_math)
4363 break;
4364 case LE_EXPR:
4365 case LT_EXPR:
4366 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4367 arg1 = fold_convert (lang_hooks.types.signed_type
4368 (TREE_TYPE (arg1)), arg1);
4369 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4370 return negate_expr (fold_convert (type, tem));
4371 default:
4372 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4373 break;
4374 }
4375
4376 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4377 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4378 both transformations are correct when A is NaN: A != 0
4379 is then true, and A == 0 is false. */
4380
4381 if (integer_zerop (arg01) && integer_zerop (arg2))
4382 {
4383 if (comp_code == NE_EXPR)
4384 return pedantic_non_lvalue (fold_convert (type, arg1));
4385 else if (comp_code == EQ_EXPR)
4386 return fold_convert (type, integer_zero_node);
4387 }
4388
4389 /* Try some transformations of A op B ? A : B.
4390
4391 A == B? A : B same as B
4392 A != B? A : B same as A
4393 A >= B? A : B same as max (A, B)
4394 A > B? A : B same as max (B, A)
4395 A <= B? A : B same as min (A, B)
4396 A < B? A : B same as min (B, A)
4397
4398 As above, these transformations don't work in the presence
4399 of signed zeros. For example, if A and B are zeros of
4400 opposite sign, the first two transformations will change
4401 the sign of the result. In the last four, the original
4402 expressions give different results for (A=+0, B=-0) and
4403 (A=-0, B=+0), but the transformed expressions do not.
4404
4405 The first two transformations are correct if either A or B
4406 is a NaN. In the first transformation, the condition will
4407 be false, and B will indeed be chosen. In the case of the
4408 second transformation, the condition A != B will be true,
4409 and A will be chosen.
4410
4411 The conversions to max() and min() are not correct if B is
4412 a number and A is not. The conditions in the original
4413 expressions will be false, so all four give B. The min()
4414 and max() versions would give a NaN instead. */
4415 if (operand_equal_for_comparison_p (arg01, arg2, arg00)
4416 /* Avoid these transformations if the COND_EXPR may be used
4417 as an lvalue in the C++ front-end. PR c++/19199. */
4418 && (in_gimple_form
4419 || strcmp (lang_hooks.name, "GNU C++") != 0
4420 || ! maybe_lvalue_p (arg1)
4421 || ! maybe_lvalue_p (arg2)))
4422 {
4423 tree comp_op0 = arg00;
4424 tree comp_op1 = arg01;
4425 tree comp_type = TREE_TYPE (comp_op0);
4426
4427 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4428 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4429 {
4430 comp_type = type;
4431 comp_op0 = arg1;
4432 comp_op1 = arg2;
4433 }
4434
4435 switch (comp_code)
4436 {
4437 case EQ_EXPR:
4438 return pedantic_non_lvalue (fold_convert (type, arg2));
4439 case NE_EXPR:
4440 return pedantic_non_lvalue (fold_convert (type, arg1));
4441 case LE_EXPR:
4442 case LT_EXPR:
4443 case UNLE_EXPR:
4444 case UNLT_EXPR:
4445 /* In C++ a ?: expression can be an lvalue, so put the
4446 operand which will be used if they are equal first
4447 so that we can convert this back to the
4448 corresponding COND_EXPR. */
4449 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4450 {
4451 comp_op0 = fold_convert (comp_type, comp_op0);
4452 comp_op1 = fold_convert (comp_type, comp_op1);
4453 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4454 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4455 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4456 return pedantic_non_lvalue (fold_convert (type, tem));
4457 }
4458 break;
4459 case GE_EXPR:
4460 case GT_EXPR:
4461 case UNGE_EXPR:
4462 case UNGT_EXPR:
4463 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4464 {
4465 comp_op0 = fold_convert (comp_type, comp_op0);
4466 comp_op1 = fold_convert (comp_type, comp_op1);
4467 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4468 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
4469 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
4470 return pedantic_non_lvalue (fold_convert (type, tem));
4471 }
4472 break;
4473 case UNEQ_EXPR:
4474 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4475 return pedantic_non_lvalue (fold_convert (type, arg2));
4476 break;
4477 case LTGT_EXPR:
4478 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4479 return pedantic_non_lvalue (fold_convert (type, arg1));
4480 break;
4481 default:
4482 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4483 break;
4484 }
4485 }
4486
4487 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4488 we might still be able to simplify this. For example,
4489 if C1 is one less or one more than C2, this might have started
4490 out as a MIN or MAX and been transformed by this function.
4491 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4492
4493 if (INTEGRAL_TYPE_P (type)
4494 && TREE_CODE (arg01) == INTEGER_CST
4495 && TREE_CODE (arg2) == INTEGER_CST)
4496 switch (comp_code)
4497 {
4498 case EQ_EXPR:
4499 /* We can replace A with C1 in this case. */
4500 arg1 = fold_convert (type, arg01);
4501 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
4502
4503 case LT_EXPR:
4504 /* If C1 is C2 + 1, this is min(A, C2). */
4505 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4506 OEP_ONLY_CONST)
4507 && operand_equal_p (arg01,
4508 const_binop (PLUS_EXPR, arg2,
4509 integer_one_node, 0),
4510 OEP_ONLY_CONST))
4511 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4512 type, arg1, arg2));
4513 break;
4514
4515 case LE_EXPR:
4516 /* If C1 is C2 - 1, this is min(A, C2). */
4517 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4518 OEP_ONLY_CONST)
4519 && operand_equal_p (arg01,
4520 const_binop (MINUS_EXPR, arg2,
4521 integer_one_node, 0),
4522 OEP_ONLY_CONST))
4523 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4524 type, arg1, arg2));
4525 break;
4526
4527 case GT_EXPR:
4528 /* If C1 is C2 - 1, this is max(A, C2). */
4529 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4530 OEP_ONLY_CONST)
4531 && operand_equal_p (arg01,
4532 const_binop (MINUS_EXPR, arg2,
4533 integer_one_node, 0),
4534 OEP_ONLY_CONST))
4535 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4536 type, arg1, arg2));
4537 break;
4538
4539 case GE_EXPR:
4540 /* If C1 is C2 + 1, this is max(A, C2). */
4541 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4542 OEP_ONLY_CONST)
4543 && operand_equal_p (arg01,
4544 const_binop (PLUS_EXPR, arg2,
4545 integer_one_node, 0),
4546 OEP_ONLY_CONST))
4547 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4548 type, arg1, arg2));
4549 break;
4550 case NE_EXPR:
4551 break;
4552 default:
4553 gcc_unreachable ();
4554 }
4555
4556 return NULL_TREE;
4557 }
4558
4559
4560 \f
4561 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4562 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4563 #endif
4564
4565 /* EXP is some logical combination of boolean tests. See if we can
4566 merge it into some range test. Return the new tree if so. */
4567
4568 static tree
4569 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
4570 {
4571 int or_op = (code == TRUTH_ORIF_EXPR
4572 || code == TRUTH_OR_EXPR);
4573 int in0_p, in1_p, in_p;
4574 tree low0, low1, low, high0, high1, high;
4575 tree lhs = make_range (op0, &in0_p, &low0, &high0);
4576 tree rhs = make_range (op1, &in1_p, &low1, &high1);
4577 tree tem;
4578
4579 /* If this is an OR operation, invert both sides; we will invert
4580 again at the end. */
4581 if (or_op)
4582 in0_p = ! in0_p, in1_p = ! in1_p;
4583
4584 /* If both expressions are the same, if we can merge the ranges, and we
4585 can build the range test, return it or it inverted. If one of the
4586 ranges is always true or always false, consider it to be the same
4587 expression as the other. */
4588 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4589 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4590 in1_p, low1, high1)
4591 && 0 != (tem = (build_range_check (type,
4592 lhs != 0 ? lhs
4593 : rhs != 0 ? rhs : integer_zero_node,
4594 in_p, low, high))))
4595 return or_op ? invert_truthvalue (tem) : tem;
4596
4597 /* On machines where the branch cost is expensive, if this is a
4598 short-circuited branch and the underlying object on both sides
4599 is the same, make a non-short-circuit operation. */
4600 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4601 && lhs != 0 && rhs != 0
4602 && (code == TRUTH_ANDIF_EXPR
4603 || code == TRUTH_ORIF_EXPR)
4604 && operand_equal_p (lhs, rhs, 0))
4605 {
4606 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4607 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4608 which cases we can't do this. */
4609 if (simple_operand_p (lhs))
4610 return build2 (code == TRUTH_ANDIF_EXPR
4611 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4612 type, op0, op1);
4613
4614 else if (lang_hooks.decls.global_bindings_p () == 0
4615 && ! CONTAINS_PLACEHOLDER_P (lhs))
4616 {
4617 tree common = save_expr (lhs);
4618
4619 if (0 != (lhs = build_range_check (type, common,
4620 or_op ? ! in0_p : in0_p,
4621 low0, high0))
4622 && (0 != (rhs = build_range_check (type, common,
4623 or_op ? ! in1_p : in1_p,
4624 low1, high1))))
4625 return build2 (code == TRUTH_ANDIF_EXPR
4626 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4627 type, lhs, rhs);
4628 }
4629 }
4630
4631 return 0;
4632 }
4633 \f
4634 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4635 bit value. Arrange things so the extra bits will be set to zero if and
4636 only if C is signed-extended to its full width. If MASK is nonzero,
4637 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4638
4639 static tree
4640 unextend (tree c, int p, int unsignedp, tree mask)
4641 {
4642 tree type = TREE_TYPE (c);
4643 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4644 tree temp;
4645
4646 if (p == modesize || unsignedp)
4647 return c;
4648
4649 /* We work by getting just the sign bit into the low-order bit, then
4650 into the high-order bit, then sign-extend. We then XOR that value
4651 with C. */
4652 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4653 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4654
4655 /* We must use a signed type in order to get an arithmetic right shift.
4656 However, we must also avoid introducing accidental overflows, so that
4657 a subsequent call to integer_zerop will work. Hence we must
4658 do the type conversion here. At this point, the constant is either
4659 zero or one, and the conversion to a signed type can never overflow.
4660 We could get an overflow if this conversion is done anywhere else. */
4661 if (TYPE_UNSIGNED (type))
4662 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4663
4664 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4665 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4666 if (mask != 0)
4667 temp = const_binop (BIT_AND_EXPR, temp,
4668 fold_convert (TREE_TYPE (c), mask), 0);
4669 /* If necessary, convert the type back to match the type of C. */
4670 if (TYPE_UNSIGNED (type))
4671 temp = fold_convert (type, temp);
4672
4673 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4674 }
4675 \f
4676 /* Find ways of folding logical expressions of LHS and RHS:
4677 Try to merge two comparisons to the same innermost item.
4678 Look for range tests like "ch >= '0' && ch <= '9'".
4679 Look for combinations of simple terms on machines with expensive branches
4680 and evaluate the RHS unconditionally.
4681
4682 For example, if we have p->a == 2 && p->b == 4 and we can make an
4683 object large enough to span both A and B, we can do this with a comparison
4684 against the object ANDed with the a mask.
4685
4686 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4687 operations to do this with one comparison.
4688
4689 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4690 function and the one above.
4691
4692 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4693 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4694
4695 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4696 two operands.
4697
4698 We return the simplified tree or 0 if no optimization is possible. */
4699
4700 static tree
4701 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
4702 {
4703 /* If this is the "or" of two comparisons, we can do something if
4704 the comparisons are NE_EXPR. If this is the "and", we can do something
4705 if the comparisons are EQ_EXPR. I.e.,
4706 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4707
4708 WANTED_CODE is this operation code. For single bit fields, we can
4709 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4710 comparison for one-bit fields. */
4711
4712 enum tree_code wanted_code;
4713 enum tree_code lcode, rcode;
4714 tree ll_arg, lr_arg, rl_arg, rr_arg;
4715 tree ll_inner, lr_inner, rl_inner, rr_inner;
4716 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
4717 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
4718 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
4719 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
4720 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
4721 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
4722 enum machine_mode lnmode, rnmode;
4723 tree ll_mask, lr_mask, rl_mask, rr_mask;
4724 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
4725 tree l_const, r_const;
4726 tree lntype, rntype, result;
4727 int first_bit, end_bit;
4728 int volatilep;
4729
4730 /* Start by getting the comparison codes. Fail if anything is volatile.
4731 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4732 it were surrounded with a NE_EXPR. */
4733
4734 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
4735 return 0;
4736
4737 lcode = TREE_CODE (lhs);
4738 rcode = TREE_CODE (rhs);
4739
4740 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
4741 {
4742 lhs = build2 (NE_EXPR, truth_type, lhs,
4743 fold_convert (TREE_TYPE (lhs), integer_zero_node));
4744 lcode = NE_EXPR;
4745 }
4746
4747 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
4748 {
4749 rhs = build2 (NE_EXPR, truth_type, rhs,
4750 fold_convert (TREE_TYPE (rhs), integer_zero_node));
4751 rcode = NE_EXPR;
4752 }
4753
4754 if (TREE_CODE_CLASS (lcode) != tcc_comparison
4755 || TREE_CODE_CLASS (rcode) != tcc_comparison)
4756 return 0;
4757
4758 ll_arg = TREE_OPERAND (lhs, 0);
4759 lr_arg = TREE_OPERAND (lhs, 1);
4760 rl_arg = TREE_OPERAND (rhs, 0);
4761 rr_arg = TREE_OPERAND (rhs, 1);
4762
4763 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4764 if (simple_operand_p (ll_arg)
4765 && simple_operand_p (lr_arg))
4766 {
4767 tree result;
4768 if (operand_equal_p (ll_arg, rl_arg, 0)
4769 && operand_equal_p (lr_arg, rr_arg, 0))
4770 {
4771 result = combine_comparisons (code, lcode, rcode,
4772 truth_type, ll_arg, lr_arg);
4773 if (result)
4774 return result;
4775 }
4776 else if (operand_equal_p (ll_arg, rr_arg, 0)
4777 && operand_equal_p (lr_arg, rl_arg, 0))
4778 {
4779 result = combine_comparisons (code, lcode,
4780 swap_tree_comparison (rcode),
4781 truth_type, ll_arg, lr_arg);
4782 if (result)
4783 return result;
4784 }
4785 }
4786
4787 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
4788 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
4789
4790 /* If the RHS can be evaluated unconditionally and its operands are
4791 simple, it wins to evaluate the RHS unconditionally on machines
4792 with expensive branches. In this case, this isn't a comparison
4793 that can be merged. Avoid doing this if the RHS is a floating-point
4794 comparison since those can trap. */
4795
4796 if (BRANCH_COST >= 2
4797 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
4798 && simple_operand_p (rl_arg)
4799 && simple_operand_p (rr_arg))
4800 {
4801 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4802 if (code == TRUTH_OR_EXPR
4803 && lcode == NE_EXPR && integer_zerop (lr_arg)
4804 && rcode == NE_EXPR && integer_zerop (rr_arg)
4805 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4806 return build2 (NE_EXPR, truth_type,
4807 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4808 ll_arg, rl_arg),
4809 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4810
4811 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4812 if (code == TRUTH_AND_EXPR
4813 && lcode == EQ_EXPR && integer_zerop (lr_arg)
4814 && rcode == EQ_EXPR && integer_zerop (rr_arg)
4815 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4816 return build2 (EQ_EXPR, truth_type,
4817 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4818 ll_arg, rl_arg),
4819 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4820
4821 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
4822 return build2 (code, truth_type, lhs, rhs);
4823 }
4824
4825 /* See if the comparisons can be merged. Then get all the parameters for
4826 each side. */
4827
4828 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
4829 || (rcode != EQ_EXPR && rcode != NE_EXPR))
4830 return 0;
4831
4832 volatilep = 0;
4833 ll_inner = decode_field_reference (ll_arg,
4834 &ll_bitsize, &ll_bitpos, &ll_mode,
4835 &ll_unsignedp, &volatilep, &ll_mask,
4836 &ll_and_mask);
4837 lr_inner = decode_field_reference (lr_arg,
4838 &lr_bitsize, &lr_bitpos, &lr_mode,
4839 &lr_unsignedp, &volatilep, &lr_mask,
4840 &lr_and_mask);
4841 rl_inner = decode_field_reference (rl_arg,
4842 &rl_bitsize, &rl_bitpos, &rl_mode,
4843 &rl_unsignedp, &volatilep, &rl_mask,
4844 &rl_and_mask);
4845 rr_inner = decode_field_reference (rr_arg,
4846 &rr_bitsize, &rr_bitpos, &rr_mode,
4847 &rr_unsignedp, &volatilep, &rr_mask,
4848 &rr_and_mask);
4849
4850 /* It must be true that the inner operation on the lhs of each
4851 comparison must be the same if we are to be able to do anything.
4852 Then see if we have constants. If not, the same must be true for
4853 the rhs's. */
4854 if (volatilep || ll_inner == 0 || rl_inner == 0
4855 || ! operand_equal_p (ll_inner, rl_inner, 0))
4856 return 0;
4857
4858 if (TREE_CODE (lr_arg) == INTEGER_CST
4859 && TREE_CODE (rr_arg) == INTEGER_CST)
4860 l_const = lr_arg, r_const = rr_arg;
4861 else if (lr_inner == 0 || rr_inner == 0
4862 || ! operand_equal_p (lr_inner, rr_inner, 0))
4863 return 0;
4864 else
4865 l_const = r_const = 0;
4866
4867 /* If either comparison code is not correct for our logical operation,
4868 fail. However, we can convert a one-bit comparison against zero into
4869 the opposite comparison against that bit being set in the field. */
4870
4871 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
4872 if (lcode != wanted_code)
4873 {
4874 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
4875 {
4876 /* Make the left operand unsigned, since we are only interested
4877 in the value of one bit. Otherwise we are doing the wrong
4878 thing below. */
4879 ll_unsignedp = 1;
4880 l_const = ll_mask;
4881 }
4882 else
4883 return 0;
4884 }
4885
4886 /* This is analogous to the code for l_const above. */
4887 if (rcode != wanted_code)
4888 {
4889 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
4890 {
4891 rl_unsignedp = 1;
4892 r_const = rl_mask;
4893 }
4894 else
4895 return 0;
4896 }
4897
4898 /* After this point all optimizations will generate bit-field
4899 references, which we might not want. */
4900 if (! lang_hooks.can_use_bit_fields_p ())
4901 return 0;
4902
4903 /* See if we can find a mode that contains both fields being compared on
4904 the left. If we can't, fail. Otherwise, update all constants and masks
4905 to be relative to a field of that size. */
4906 first_bit = MIN (ll_bitpos, rl_bitpos);
4907 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
4908 lnmode = get_best_mode (end_bit - first_bit, first_bit,
4909 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
4910 volatilep);
4911 if (lnmode == VOIDmode)
4912 return 0;
4913
4914 lnbitsize = GET_MODE_BITSIZE (lnmode);
4915 lnbitpos = first_bit & ~ (lnbitsize - 1);
4916 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
4917 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
4918
4919 if (BYTES_BIG_ENDIAN)
4920 {
4921 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
4922 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
4923 }
4924
4925 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
4926 size_int (xll_bitpos), 0);
4927 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
4928 size_int (xrl_bitpos), 0);
4929
4930 if (l_const)
4931 {
4932 l_const = fold_convert (lntype, l_const);
4933 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
4934 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
4935 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
4936 fold_build1 (BIT_NOT_EXPR,
4937 lntype, ll_mask),
4938 0)))
4939 {
4940 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
4941
4942 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4943 }
4944 }
4945 if (r_const)
4946 {
4947 r_const = fold_convert (lntype, r_const);
4948 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
4949 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
4950 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
4951 fold_build1 (BIT_NOT_EXPR,
4952 lntype, rl_mask),
4953 0)))
4954 {
4955 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
4956
4957 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4958 }
4959 }
4960
4961 /* If the right sides are not constant, do the same for it. Also,
4962 disallow this optimization if a size or signedness mismatch occurs
4963 between the left and right sides. */
4964 if (l_const == 0)
4965 {
4966 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
4967 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
4968 /* Make sure the two fields on the right
4969 correspond to the left without being swapped. */
4970 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
4971 return 0;
4972
4973 first_bit = MIN (lr_bitpos, rr_bitpos);
4974 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
4975 rnmode = get_best_mode (end_bit - first_bit, first_bit,
4976 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
4977 volatilep);
4978 if (rnmode == VOIDmode)
4979 return 0;
4980
4981 rnbitsize = GET_MODE_BITSIZE (rnmode);
4982 rnbitpos = first_bit & ~ (rnbitsize - 1);
4983 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
4984 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
4985
4986 if (BYTES_BIG_ENDIAN)
4987 {
4988 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
4989 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
4990 }
4991
4992 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
4993 size_int (xlr_bitpos), 0);
4994 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
4995 size_int (xrr_bitpos), 0);
4996
4997 /* Make a mask that corresponds to both fields being compared.
4998 Do this for both items being compared. If the operands are the
4999 same size and the bits being compared are in the same position
5000 then we can do this by masking both and comparing the masked
5001 results. */
5002 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5003 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
5004 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5005 {
5006 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5007 ll_unsignedp || rl_unsignedp);
5008 if (! all_ones_mask_p (ll_mask, lnbitsize))
5009 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5010
5011 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
5012 lr_unsignedp || rr_unsignedp);
5013 if (! all_ones_mask_p (lr_mask, rnbitsize))
5014 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5015
5016 return build2 (wanted_code, truth_type, lhs, rhs);
5017 }
5018
5019 /* There is still another way we can do something: If both pairs of
5020 fields being compared are adjacent, we may be able to make a wider
5021 field containing them both.
5022
5023 Note that we still must mask the lhs/rhs expressions. Furthermore,
5024 the mask must be shifted to account for the shift done by
5025 make_bit_field_ref. */
5026 if ((ll_bitsize + ll_bitpos == rl_bitpos
5027 && lr_bitsize + lr_bitpos == rr_bitpos)
5028 || (ll_bitpos == rl_bitpos + rl_bitsize
5029 && lr_bitpos == rr_bitpos + rr_bitsize))
5030 {
5031 tree type;
5032
5033 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
5034 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5035 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
5036 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5037
5038 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5039 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
5040 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5041 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
5042
5043 /* Convert to the smaller type before masking out unwanted bits. */
5044 type = lntype;
5045 if (lntype != rntype)
5046 {
5047 if (lnbitsize > rnbitsize)
5048 {
5049 lhs = fold_convert (rntype, lhs);
5050 ll_mask = fold_convert (rntype, ll_mask);
5051 type = rntype;
5052 }
5053 else if (lnbitsize < rnbitsize)
5054 {
5055 rhs = fold_convert (lntype, rhs);
5056 lr_mask = fold_convert (lntype, lr_mask);
5057 type = lntype;
5058 }
5059 }
5060
5061 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5062 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5063
5064 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5065 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5066
5067 return build2 (wanted_code, truth_type, lhs, rhs);
5068 }
5069
5070 return 0;
5071 }
5072
5073 /* Handle the case of comparisons with constants. If there is something in
5074 common between the masks, those bits of the constants must be the same.
5075 If not, the condition is always false. Test for this to avoid generating
5076 incorrect code below. */
5077 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5078 if (! integer_zerop (result)
5079 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5080 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5081 {
5082 if (wanted_code == NE_EXPR)
5083 {
5084 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5085 return constant_boolean_node (true, truth_type);
5086 }
5087 else
5088 {
5089 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5090 return constant_boolean_node (false, truth_type);
5091 }
5092 }
5093
5094 /* Construct the expression we will return. First get the component
5095 reference we will make. Unless the mask is all ones the width of
5096 that field, perform the mask operation. Then compare with the
5097 merged constant. */
5098 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5099 ll_unsignedp || rl_unsignedp);
5100
5101 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5102 if (! all_ones_mask_p (ll_mask, lnbitsize))
5103 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5104
5105 return build2 (wanted_code, truth_type, result,
5106 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5107 }
5108 \f
5109 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5110 constant. */
5111
5112 static tree
5113 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5114 {
5115 tree arg0 = op0;
5116 enum tree_code op_code;
5117 tree comp_const = op1;
5118 tree minmax_const;
5119 int consts_equal, consts_lt;
5120 tree inner;
5121
5122 STRIP_SIGN_NOPS (arg0);
5123
5124 op_code = TREE_CODE (arg0);
5125 minmax_const = TREE_OPERAND (arg0, 1);
5126 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5127 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5128 inner = TREE_OPERAND (arg0, 0);
5129
5130 /* If something does not permit us to optimize, return the original tree. */
5131 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5132 || TREE_CODE (comp_const) != INTEGER_CST
5133 || TREE_CONSTANT_OVERFLOW (comp_const)
5134 || TREE_CODE (minmax_const) != INTEGER_CST
5135 || TREE_CONSTANT_OVERFLOW (minmax_const))
5136 return NULL_TREE;
5137
5138 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5139 and GT_EXPR, doing the rest with recursive calls using logical
5140 simplifications. */
5141 switch (code)
5142 {
5143 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5144 {
5145 /* FIXME: We should be able to invert code without building a
5146 scratch tree node, but doing so would require us to
5147 duplicate a part of invert_truthvalue here. */
5148 tree tem = invert_truthvalue (build2 (code, type, op0, op1));
5149 tem = optimize_minmax_comparison (TREE_CODE (tem),
5150 TREE_TYPE (tem),
5151 TREE_OPERAND (tem, 0),
5152 TREE_OPERAND (tem, 1));
5153 return invert_truthvalue (tem);
5154 }
5155
5156 case GE_EXPR:
5157 return
5158 fold_build2 (TRUTH_ORIF_EXPR, type,
5159 optimize_minmax_comparison
5160 (EQ_EXPR, type, arg0, comp_const),
5161 optimize_minmax_comparison
5162 (GT_EXPR, type, arg0, comp_const));
5163
5164 case EQ_EXPR:
5165 if (op_code == MAX_EXPR && consts_equal)
5166 /* MAX (X, 0) == 0 -> X <= 0 */
5167 return fold_build2 (LE_EXPR, type, inner, comp_const);
5168
5169 else if (op_code == MAX_EXPR && consts_lt)
5170 /* MAX (X, 0) == 5 -> X == 5 */
5171 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5172
5173 else if (op_code == MAX_EXPR)
5174 /* MAX (X, 0) == -1 -> false */
5175 return omit_one_operand (type, integer_zero_node, inner);
5176
5177 else if (consts_equal)
5178 /* MIN (X, 0) == 0 -> X >= 0 */
5179 return fold_build2 (GE_EXPR, type, inner, comp_const);
5180
5181 else if (consts_lt)
5182 /* MIN (X, 0) == 5 -> false */
5183 return omit_one_operand (type, integer_zero_node, inner);
5184
5185 else
5186 /* MIN (X, 0) == -1 -> X == -1 */
5187 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5188
5189 case GT_EXPR:
5190 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5191 /* MAX (X, 0) > 0 -> X > 0
5192 MAX (X, 0) > 5 -> X > 5 */
5193 return fold_build2 (GT_EXPR, type, inner, comp_const);
5194
5195 else if (op_code == MAX_EXPR)
5196 /* MAX (X, 0) > -1 -> true */
5197 return omit_one_operand (type, integer_one_node, inner);
5198
5199 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5200 /* MIN (X, 0) > 0 -> false
5201 MIN (X, 0) > 5 -> false */
5202 return omit_one_operand (type, integer_zero_node, inner);
5203
5204 else
5205 /* MIN (X, 0) > -1 -> X > -1 */
5206 return fold_build2 (GT_EXPR, type, inner, comp_const);
5207
5208 default:
5209 return NULL_TREE;
5210 }
5211 }
5212 \f
5213 /* T is an integer expression that is being multiplied, divided, or taken a
5214 modulus (CODE says which and what kind of divide or modulus) by a
5215 constant C. See if we can eliminate that operation by folding it with
5216 other operations already in T. WIDE_TYPE, if non-null, is a type that
5217 should be used for the computation if wider than our type.
5218
5219 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5220 (X * 2) + (Y * 4). We must, however, be assured that either the original
5221 expression would not overflow or that overflow is undefined for the type
5222 in the language in question.
5223
5224 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5225 the machine has a multiply-accumulate insn or that this is part of an
5226 addressing calculation.
5227
5228 If we return a non-null expression, it is an equivalent form of the
5229 original computation, but need not be in the original type. */
5230
5231 static tree
5232 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
5233 {
5234 /* To avoid exponential search depth, refuse to allow recursion past
5235 three levels. Beyond that (1) it's highly unlikely that we'll find
5236 something interesting and (2) we've probably processed it before
5237 when we built the inner expression. */
5238
5239 static int depth;
5240 tree ret;
5241
5242 if (depth > 3)
5243 return NULL;
5244
5245 depth++;
5246 ret = extract_muldiv_1 (t, c, code, wide_type);
5247 depth--;
5248
5249 return ret;
5250 }
5251
5252 static tree
5253 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
5254 {
5255 tree type = TREE_TYPE (t);
5256 enum tree_code tcode = TREE_CODE (t);
5257 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5258 > GET_MODE_SIZE (TYPE_MODE (type)))
5259 ? wide_type : type);
5260 tree t1, t2;
5261 int same_p = tcode == code;
5262 tree op0 = NULL_TREE, op1 = NULL_TREE;
5263
5264 /* Don't deal with constants of zero here; they confuse the code below. */
5265 if (integer_zerop (c))
5266 return NULL_TREE;
5267
5268 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5269 op0 = TREE_OPERAND (t, 0);
5270
5271 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5272 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5273
5274 /* Note that we need not handle conditional operations here since fold
5275 already handles those cases. So just do arithmetic here. */
5276 switch (tcode)
5277 {
5278 case INTEGER_CST:
5279 /* For a constant, we can always simplify if we are a multiply
5280 or (for divide and modulus) if it is a multiple of our constant. */
5281 if (code == MULT_EXPR
5282 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5283 return const_binop (code, fold_convert (ctype, t),
5284 fold_convert (ctype, c), 0);
5285 break;
5286
5287 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5288 /* If op0 is an expression ... */
5289 if ((COMPARISON_CLASS_P (op0)
5290 || UNARY_CLASS_P (op0)
5291 || BINARY_CLASS_P (op0)
5292 || EXPRESSION_CLASS_P (op0))
5293 /* ... and is unsigned, and its type is smaller than ctype,
5294 then we cannot pass through as widening. */
5295 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5296 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5297 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5298 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5299 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5300 /* ... or this is a truncation (t is narrower than op0),
5301 then we cannot pass through this narrowing. */
5302 || (GET_MODE_SIZE (TYPE_MODE (type))
5303 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5304 /* ... or signedness changes for division or modulus,
5305 then we cannot pass through this conversion. */
5306 || (code != MULT_EXPR
5307 && (TYPE_UNSIGNED (ctype)
5308 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5309 break;
5310
5311 /* Pass the constant down and see if we can make a simplification. If
5312 we can, replace this expression with the inner simplification for
5313 possible later conversion to our or some other type. */
5314 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5315 && TREE_CODE (t2) == INTEGER_CST
5316 && ! TREE_CONSTANT_OVERFLOW (t2)
5317 && (0 != (t1 = extract_muldiv (op0, t2, code,
5318 code == MULT_EXPR
5319 ? ctype : NULL_TREE))))
5320 return t1;
5321 break;
5322
5323 case ABS_EXPR:
5324 /* If widening the type changes it from signed to unsigned, then we
5325 must avoid building ABS_EXPR itself as unsigned. */
5326 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5327 {
5328 tree cstype = (*lang_hooks.types.signed_type) (ctype);
5329 if ((t1 = extract_muldiv (op0, c, code, cstype)) != 0)
5330 {
5331 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5332 return fold_convert (ctype, t1);
5333 }
5334 break;
5335 }
5336 /* FALLTHROUGH */
5337 case NEGATE_EXPR:
5338 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5339 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5340 break;
5341
5342 case MIN_EXPR: case MAX_EXPR:
5343 /* If widening the type changes the signedness, then we can't perform
5344 this optimization as that changes the result. */
5345 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5346 break;
5347
5348 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5349 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
5350 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
5351 {
5352 if (tree_int_cst_sgn (c) < 0)
5353 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5354
5355 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5356 fold_convert (ctype, t2));
5357 }
5358 break;
5359
5360 case LSHIFT_EXPR: case RSHIFT_EXPR:
5361 /* If the second operand is constant, this is a multiplication
5362 or floor division, by a power of two, so we can treat it that
5363 way unless the multiplier or divisor overflows. Signed
5364 left-shift overflow is implementation-defined rather than
5365 undefined in C90, so do not convert signed left shift into
5366 multiplication. */
5367 if (TREE_CODE (op1) == INTEGER_CST
5368 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5369 /* const_binop may not detect overflow correctly,
5370 so check for it explicitly here. */
5371 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5372 && TREE_INT_CST_HIGH (op1) == 0
5373 && 0 != (t1 = fold_convert (ctype,
5374 const_binop (LSHIFT_EXPR,
5375 size_one_node,
5376 op1, 0)))
5377 && ! TREE_OVERFLOW (t1))
5378 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5379 ? MULT_EXPR : FLOOR_DIV_EXPR,
5380 ctype, fold_convert (ctype, op0), t1),
5381 c, code, wide_type);
5382 break;
5383
5384 case PLUS_EXPR: case MINUS_EXPR:
5385 /* See if we can eliminate the operation on both sides. If we can, we
5386 can return a new PLUS or MINUS. If we can't, the only remaining
5387 cases where we can do anything are if the second operand is a
5388 constant. */
5389 t1 = extract_muldiv (op0, c, code, wide_type);
5390 t2 = extract_muldiv (op1, c, code, wide_type);
5391 if (t1 != 0 && t2 != 0
5392 && (code == MULT_EXPR
5393 /* If not multiplication, we can only do this if both operands
5394 are divisible by c. */
5395 || (multiple_of_p (ctype, op0, c)
5396 && multiple_of_p (ctype, op1, c))))
5397 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5398 fold_convert (ctype, t2));
5399
5400 /* If this was a subtraction, negate OP1 and set it to be an addition.
5401 This simplifies the logic below. */
5402 if (tcode == MINUS_EXPR)
5403 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5404
5405 if (TREE_CODE (op1) != INTEGER_CST)
5406 break;
5407
5408 /* If either OP1 or C are negative, this optimization is not safe for
5409 some of the division and remainder types while for others we need
5410 to change the code. */
5411 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5412 {
5413 if (code == CEIL_DIV_EXPR)
5414 code = FLOOR_DIV_EXPR;
5415 else if (code == FLOOR_DIV_EXPR)
5416 code = CEIL_DIV_EXPR;
5417 else if (code != MULT_EXPR
5418 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5419 break;
5420 }
5421
5422 /* If it's a multiply or a division/modulus operation of a multiple
5423 of our constant, do the operation and verify it doesn't overflow. */
5424 if (code == MULT_EXPR
5425 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5426 {
5427 op1 = const_binop (code, fold_convert (ctype, op1),
5428 fold_convert (ctype, c), 0);
5429 /* We allow the constant to overflow with wrapping semantics. */
5430 if (op1 == 0
5431 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
5432 break;
5433 }
5434 else
5435 break;
5436
5437 /* If we have an unsigned type is not a sizetype, we cannot widen
5438 the operation since it will change the result if the original
5439 computation overflowed. */
5440 if (TYPE_UNSIGNED (ctype)
5441 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5442 && ctype != type)
5443 break;
5444
5445 /* If we were able to eliminate our operation from the first side,
5446 apply our operation to the second side and reform the PLUS. */
5447 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5448 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5449
5450 /* The last case is if we are a multiply. In that case, we can
5451 apply the distributive law to commute the multiply and addition
5452 if the multiplication of the constants doesn't overflow. */
5453 if (code == MULT_EXPR)
5454 return fold_build2 (tcode, ctype,
5455 fold_build2 (code, ctype,
5456 fold_convert (ctype, op0),
5457 fold_convert (ctype, c)),
5458 op1);
5459
5460 break;
5461
5462 case MULT_EXPR:
5463 /* We have a special case here if we are doing something like
5464 (C * 8) % 4 since we know that's zero. */
5465 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5466 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5467 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5468 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5469 return omit_one_operand (type, integer_zero_node, op0);
5470
5471 /* ... fall through ... */
5472
5473 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5474 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5475 /* If we can extract our operation from the LHS, do so and return a
5476 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5477 do something only if the second operand is a constant. */
5478 if (same_p
5479 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5480 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5481 fold_convert (ctype, op1));
5482 else if (tcode == MULT_EXPR && code == MULT_EXPR
5483 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
5484 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5485 fold_convert (ctype, t1));
5486 else if (TREE_CODE (op1) != INTEGER_CST)
5487 return 0;
5488
5489 /* If these are the same operation types, we can associate them
5490 assuming no overflow. */
5491 if (tcode == code
5492 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5493 fold_convert (ctype, c), 0))
5494 && ! TREE_OVERFLOW (t1))
5495 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5496
5497 /* If these operations "cancel" each other, we have the main
5498 optimizations of this pass, which occur when either constant is a
5499 multiple of the other, in which case we replace this with either an
5500 operation or CODE or TCODE.
5501
5502 If we have an unsigned type that is not a sizetype, we cannot do
5503 this since it will change the result if the original computation
5504 overflowed. */
5505 if ((! TYPE_UNSIGNED (ctype)
5506 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5507 && ! flag_wrapv
5508 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5509 || (tcode == MULT_EXPR
5510 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5511 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5512 {
5513 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5514 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5515 fold_convert (ctype,
5516 const_binop (TRUNC_DIV_EXPR,
5517 op1, c, 0)));
5518 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5519 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5520 fold_convert (ctype,
5521 const_binop (TRUNC_DIV_EXPR,
5522 c, op1, 0)));
5523 }
5524 break;
5525
5526 default:
5527 break;
5528 }
5529
5530 return 0;
5531 }
5532 \f
5533 /* Return a node which has the indicated constant VALUE (either 0 or
5534 1), and is of the indicated TYPE. */
5535
5536 tree
5537 constant_boolean_node (int value, tree type)
5538 {
5539 if (type == integer_type_node)
5540 return value ? integer_one_node : integer_zero_node;
5541 else if (type == boolean_type_node)
5542 return value ? boolean_true_node : boolean_false_node;
5543 else
5544 return build_int_cst (type, value);
5545 }
5546
5547
5548 /* Return true if expr looks like an ARRAY_REF and set base and
5549 offset to the appropriate trees. If there is no offset,
5550 offset is set to NULL_TREE. Base will be canonicalized to
5551 something you can get the element type from using
5552 TREE_TYPE (TREE_TYPE (base)). Offset will be the offset
5553 in bytes to the base. */
5554
5555 static bool
5556 extract_array_ref (tree expr, tree *base, tree *offset)
5557 {
5558 /* One canonical form is a PLUS_EXPR with the first
5559 argument being an ADDR_EXPR with a possible NOP_EXPR
5560 attached. */
5561 if (TREE_CODE (expr) == PLUS_EXPR)
5562 {
5563 tree op0 = TREE_OPERAND (expr, 0);
5564 tree inner_base, dummy1;
5565 /* Strip NOP_EXPRs here because the C frontends and/or
5566 folders present us (int *)&x.a + 4B possibly. */
5567 STRIP_NOPS (op0);
5568 if (extract_array_ref (op0, &inner_base, &dummy1))
5569 {
5570 *base = inner_base;
5571 if (dummy1 == NULL_TREE)
5572 *offset = TREE_OPERAND (expr, 1);
5573 else
5574 *offset = fold_build2 (PLUS_EXPR, TREE_TYPE (expr),
5575 dummy1, TREE_OPERAND (expr, 1));
5576 return true;
5577 }
5578 }
5579 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5580 which we transform into an ADDR_EXPR with appropriate
5581 offset. For other arguments to the ADDR_EXPR we assume
5582 zero offset and as such do not care about the ADDR_EXPR
5583 type and strip possible nops from it. */
5584 else if (TREE_CODE (expr) == ADDR_EXPR)
5585 {
5586 tree op0 = TREE_OPERAND (expr, 0);
5587 if (TREE_CODE (op0) == ARRAY_REF)
5588 {
5589 tree idx = TREE_OPERAND (op0, 1);
5590 *base = TREE_OPERAND (op0, 0);
5591 *offset = fold_build2 (MULT_EXPR, TREE_TYPE (idx), idx,
5592 array_ref_element_size (op0));
5593 }
5594 else
5595 {
5596 /* Handle array-to-pointer decay as &a. */
5597 if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
5598 *base = TREE_OPERAND (expr, 0);
5599 else
5600 *base = expr;
5601 *offset = NULL_TREE;
5602 }
5603 return true;
5604 }
5605 /* The next canonical form is a VAR_DECL with POINTER_TYPE. */
5606 else if (SSA_VAR_P (expr)
5607 && TREE_CODE (TREE_TYPE (expr)) == POINTER_TYPE)
5608 {
5609 *base = expr;
5610 *offset = NULL_TREE;
5611 return true;
5612 }
5613
5614 return false;
5615 }
5616
5617
5618 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5619 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5620 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5621 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5622 COND is the first argument to CODE; otherwise (as in the example
5623 given here), it is the second argument. TYPE is the type of the
5624 original expression. Return NULL_TREE if no simplification is
5625 possible. */
5626
5627 static tree
5628 fold_binary_op_with_conditional_arg (enum tree_code code,
5629 tree type, tree op0, tree op1,
5630 tree cond, tree arg, int cond_first_p)
5631 {
5632 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5633 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5634 tree test, true_value, false_value;
5635 tree lhs = NULL_TREE;
5636 tree rhs = NULL_TREE;
5637
5638 /* This transformation is only worthwhile if we don't have to wrap
5639 arg in a SAVE_EXPR, and the operation can be simplified on at least
5640 one of the branches once its pushed inside the COND_EXPR. */
5641 if (!TREE_CONSTANT (arg))
5642 return NULL_TREE;
5643
5644 if (TREE_CODE (cond) == COND_EXPR)
5645 {
5646 test = TREE_OPERAND (cond, 0);
5647 true_value = TREE_OPERAND (cond, 1);
5648 false_value = TREE_OPERAND (cond, 2);
5649 /* If this operand throws an expression, then it does not make
5650 sense to try to perform a logical or arithmetic operation
5651 involving it. */
5652 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5653 lhs = true_value;
5654 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5655 rhs = false_value;
5656 }
5657 else
5658 {
5659 tree testtype = TREE_TYPE (cond);
5660 test = cond;
5661 true_value = constant_boolean_node (true, testtype);
5662 false_value = constant_boolean_node (false, testtype);
5663 }
5664
5665 arg = fold_convert (arg_type, arg);
5666 if (lhs == 0)
5667 {
5668 true_value = fold_convert (cond_type, true_value);
5669 if (cond_first_p)
5670 lhs = fold_build2 (code, type, true_value, arg);
5671 else
5672 lhs = fold_build2 (code, type, arg, true_value);
5673 }
5674 if (rhs == 0)
5675 {
5676 false_value = fold_convert (cond_type, false_value);
5677 if (cond_first_p)
5678 rhs = fold_build2 (code, type, false_value, arg);
5679 else
5680 rhs = fold_build2 (code, type, arg, false_value);
5681 }
5682
5683 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
5684 return fold_convert (type, test);
5685 }
5686
5687 \f
5688 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5689
5690 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5691 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5692 ADDEND is the same as X.
5693
5694 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5695 and finite. The problematic cases are when X is zero, and its mode
5696 has signed zeros. In the case of rounding towards -infinity,
5697 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5698 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5699
5700 static bool
5701 fold_real_zero_addition_p (tree type, tree addend, int negate)
5702 {
5703 if (!real_zerop (addend))
5704 return false;
5705
5706 /* Don't allow the fold with -fsignaling-nans. */
5707 if (HONOR_SNANS (TYPE_MODE (type)))
5708 return false;
5709
5710 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5711 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
5712 return true;
5713
5714 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5715 if (TREE_CODE (addend) == REAL_CST
5716 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
5717 negate = !negate;
5718
5719 /* The mode has signed zeros, and we have to honor their sign.
5720 In this situation, there is only one case we can return true for.
5721 X - 0 is the same as X unless rounding towards -infinity is
5722 supported. */
5723 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
5724 }
5725
5726 /* Subroutine of fold() that checks comparisons of built-in math
5727 functions against real constants.
5728
5729 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5730 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5731 is the type of the result and ARG0 and ARG1 are the operands of the
5732 comparison. ARG1 must be a TREE_REAL_CST.
5733
5734 The function returns the constant folded tree if a simplification
5735 can be made, and NULL_TREE otherwise. */
5736
5737 static tree
5738 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5739 tree type, tree arg0, tree arg1)
5740 {
5741 REAL_VALUE_TYPE c;
5742
5743 if (BUILTIN_SQRT_P (fcode))
5744 {
5745 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5746 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5747
5748 c = TREE_REAL_CST (arg1);
5749 if (REAL_VALUE_NEGATIVE (c))
5750 {
5751 /* sqrt(x) < y is always false, if y is negative. */
5752 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5753 return omit_one_operand (type, integer_zero_node, arg);
5754
5755 /* sqrt(x) > y is always true, if y is negative and we
5756 don't care about NaNs, i.e. negative values of x. */
5757 if (code == NE_EXPR || !HONOR_NANS (mode))
5758 return omit_one_operand (type, integer_one_node, arg);
5759
5760 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5761 return fold_build2 (GE_EXPR, type, arg,
5762 build_real (TREE_TYPE (arg), dconst0));
5763 }
5764 else if (code == GT_EXPR || code == GE_EXPR)
5765 {
5766 REAL_VALUE_TYPE c2;
5767
5768 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5769 real_convert (&c2, mode, &c2);
5770
5771 if (REAL_VALUE_ISINF (c2))
5772 {
5773 /* sqrt(x) > y is x == +Inf, when y is very large. */
5774 if (HONOR_INFINITIES (mode))
5775 return fold_build2 (EQ_EXPR, type, arg,
5776 build_real (TREE_TYPE (arg), c2));
5777
5778 /* sqrt(x) > y is always false, when y is very large
5779 and we don't care about infinities. */
5780 return omit_one_operand (type, integer_zero_node, arg);
5781 }
5782
5783 /* sqrt(x) > c is the same as x > c*c. */
5784 return fold_build2 (code, type, arg,
5785 build_real (TREE_TYPE (arg), c2));
5786 }
5787 else if (code == LT_EXPR || code == LE_EXPR)
5788 {
5789 REAL_VALUE_TYPE c2;
5790
5791 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5792 real_convert (&c2, mode, &c2);
5793
5794 if (REAL_VALUE_ISINF (c2))
5795 {
5796 /* sqrt(x) < y is always true, when y is a very large
5797 value and we don't care about NaNs or Infinities. */
5798 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5799 return omit_one_operand (type, integer_one_node, arg);
5800
5801 /* sqrt(x) < y is x != +Inf when y is very large and we
5802 don't care about NaNs. */
5803 if (! HONOR_NANS (mode))
5804 return fold_build2 (NE_EXPR, type, arg,
5805 build_real (TREE_TYPE (arg), c2));
5806
5807 /* sqrt(x) < y is x >= 0 when y is very large and we
5808 don't care about Infinities. */
5809 if (! HONOR_INFINITIES (mode))
5810 return fold_build2 (GE_EXPR, type, arg,
5811 build_real (TREE_TYPE (arg), dconst0));
5812
5813 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5814 if (lang_hooks.decls.global_bindings_p () != 0
5815 || CONTAINS_PLACEHOLDER_P (arg))
5816 return NULL_TREE;
5817
5818 arg = save_expr (arg);
5819 return fold_build2 (TRUTH_ANDIF_EXPR, type,
5820 fold_build2 (GE_EXPR, type, arg,
5821 build_real (TREE_TYPE (arg),
5822 dconst0)),
5823 fold_build2 (NE_EXPR, type, arg,
5824 build_real (TREE_TYPE (arg),
5825 c2)));
5826 }
5827
5828 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5829 if (! HONOR_NANS (mode))
5830 return fold_build2 (code, type, arg,
5831 build_real (TREE_TYPE (arg), c2));
5832
5833 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5834 if (lang_hooks.decls.global_bindings_p () == 0
5835 && ! CONTAINS_PLACEHOLDER_P (arg))
5836 {
5837 arg = save_expr (arg);
5838 return fold_build2 (TRUTH_ANDIF_EXPR, type,
5839 fold_build2 (GE_EXPR, type, arg,
5840 build_real (TREE_TYPE (arg),
5841 dconst0)),
5842 fold_build2 (code, type, arg,
5843 build_real (TREE_TYPE (arg),
5844 c2)));
5845 }
5846 }
5847 }
5848
5849 return NULL_TREE;
5850 }
5851
5852 /* Subroutine of fold() that optimizes comparisons against Infinities,
5853 either +Inf or -Inf.
5854
5855 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5856 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5857 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5858
5859 The function returns the constant folded tree if a simplification
5860 can be made, and NULL_TREE otherwise. */
5861
5862 static tree
5863 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5864 {
5865 enum machine_mode mode;
5866 REAL_VALUE_TYPE max;
5867 tree temp;
5868 bool neg;
5869
5870 mode = TYPE_MODE (TREE_TYPE (arg0));
5871
5872 /* For negative infinity swap the sense of the comparison. */
5873 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
5874 if (neg)
5875 code = swap_tree_comparison (code);
5876
5877 switch (code)
5878 {
5879 case GT_EXPR:
5880 /* x > +Inf is always false, if with ignore sNANs. */
5881 if (HONOR_SNANS (mode))
5882 return NULL_TREE;
5883 return omit_one_operand (type, integer_zero_node, arg0);
5884
5885 case LE_EXPR:
5886 /* x <= +Inf is always true, if we don't case about NaNs. */
5887 if (! HONOR_NANS (mode))
5888 return omit_one_operand (type, integer_one_node, arg0);
5889
5890 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5891 if (lang_hooks.decls.global_bindings_p () == 0
5892 && ! CONTAINS_PLACEHOLDER_P (arg0))
5893 {
5894 arg0 = save_expr (arg0);
5895 return fold_build2 (EQ_EXPR, type, arg0, arg0);
5896 }
5897 break;
5898
5899 case EQ_EXPR:
5900 case GE_EXPR:
5901 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
5902 real_maxval (&max, neg, mode);
5903 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
5904 arg0, build_real (TREE_TYPE (arg0), max));
5905
5906 case LT_EXPR:
5907 /* x < +Inf is always equal to x <= DBL_MAX. */
5908 real_maxval (&max, neg, mode);
5909 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
5910 arg0, build_real (TREE_TYPE (arg0), max));
5911
5912 case NE_EXPR:
5913 /* x != +Inf is always equal to !(x > DBL_MAX). */
5914 real_maxval (&max, neg, mode);
5915 if (! HONOR_NANS (mode))
5916 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
5917 arg0, build_real (TREE_TYPE (arg0), max));
5918
5919 /* The transformation below creates non-gimple code and thus is
5920 not appropriate if we are in gimple form. */
5921 if (in_gimple_form)
5922 return NULL_TREE;
5923
5924 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
5925 arg0, build_real (TREE_TYPE (arg0), max));
5926 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
5927
5928 default:
5929 break;
5930 }
5931
5932 return NULL_TREE;
5933 }
5934
5935 /* Subroutine of fold() that optimizes comparisons of a division by
5936 a nonzero integer constant against an integer constant, i.e.
5937 X/C1 op C2.
5938
5939 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5940 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5941 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5942
5943 The function returns the constant folded tree if a simplification
5944 can be made, and NULL_TREE otherwise. */
5945
5946 static tree
5947 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5948 {
5949 tree prod, tmp, hi, lo;
5950 tree arg00 = TREE_OPERAND (arg0, 0);
5951 tree arg01 = TREE_OPERAND (arg0, 1);
5952 unsigned HOST_WIDE_INT lpart;
5953 HOST_WIDE_INT hpart;
5954 int overflow;
5955
5956 /* We have to do this the hard way to detect unsigned overflow.
5957 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
5958 overflow = mul_double (TREE_INT_CST_LOW (arg01),
5959 TREE_INT_CST_HIGH (arg01),
5960 TREE_INT_CST_LOW (arg1),
5961 TREE_INT_CST_HIGH (arg1), &lpart, &hpart);
5962 prod = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
5963 prod = force_fit_type (prod, -1, overflow, false);
5964
5965 if (TYPE_UNSIGNED (TREE_TYPE (arg0)))
5966 {
5967 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5968 lo = prod;
5969
5970 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
5971 overflow = add_double (TREE_INT_CST_LOW (prod),
5972 TREE_INT_CST_HIGH (prod),
5973 TREE_INT_CST_LOW (tmp),
5974 TREE_INT_CST_HIGH (tmp),
5975 &lpart, &hpart);
5976 hi = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
5977 hi = force_fit_type (hi, -1, overflow | TREE_OVERFLOW (prod),
5978 TREE_CONSTANT_OVERFLOW (prod));
5979 }
5980 else if (tree_int_cst_sgn (arg01) >= 0)
5981 {
5982 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5983 switch (tree_int_cst_sgn (arg1))
5984 {
5985 case -1:
5986 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5987 hi = prod;
5988 break;
5989
5990 case 0:
5991 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
5992 hi = tmp;
5993 break;
5994
5995 case 1:
5996 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
5997 lo = prod;
5998 break;
5999
6000 default:
6001 gcc_unreachable ();
6002 }
6003 }
6004 else
6005 {
6006 /* A negative divisor reverses the relational operators. */
6007 code = swap_tree_comparison (code);
6008
6009 tmp = int_const_binop (PLUS_EXPR, arg01, integer_one_node, 0);
6010 switch (tree_int_cst_sgn (arg1))
6011 {
6012 case -1:
6013 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6014 lo = prod;
6015 break;
6016
6017 case 0:
6018 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6019 lo = tmp;
6020 break;
6021
6022 case 1:
6023 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6024 hi = prod;
6025 break;
6026
6027 default:
6028 gcc_unreachable ();
6029 }
6030 }
6031
6032 switch (code)
6033 {
6034 case EQ_EXPR:
6035 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6036 return omit_one_operand (type, integer_zero_node, arg00);
6037 if (TREE_OVERFLOW (hi))
6038 return fold_build2 (GE_EXPR, type, arg00, lo);
6039 if (TREE_OVERFLOW (lo))
6040 return fold_build2 (LE_EXPR, type, arg00, hi);
6041 return build_range_check (type, arg00, 1, lo, hi);
6042
6043 case NE_EXPR:
6044 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6045 return omit_one_operand (type, integer_one_node, arg00);
6046 if (TREE_OVERFLOW (hi))
6047 return fold_build2 (LT_EXPR, type, arg00, lo);
6048 if (TREE_OVERFLOW (lo))
6049 return fold_build2 (GT_EXPR, type, arg00, hi);
6050 return build_range_check (type, arg00, 0, lo, hi);
6051
6052 case LT_EXPR:
6053 if (TREE_OVERFLOW (lo))
6054 return omit_one_operand (type, integer_zero_node, arg00);
6055 return fold_build2 (LT_EXPR, type, arg00, lo);
6056
6057 case LE_EXPR:
6058 if (TREE_OVERFLOW (hi))
6059 return omit_one_operand (type, integer_one_node, arg00);
6060 return fold_build2 (LE_EXPR, type, arg00, hi);
6061
6062 case GT_EXPR:
6063 if (TREE_OVERFLOW (hi))
6064 return omit_one_operand (type, integer_zero_node, arg00);
6065 return fold_build2 (GT_EXPR, type, arg00, hi);
6066
6067 case GE_EXPR:
6068 if (TREE_OVERFLOW (lo))
6069 return omit_one_operand (type, integer_one_node, arg00);
6070 return fold_build2 (GE_EXPR, type, arg00, lo);
6071
6072 default:
6073 break;
6074 }
6075
6076 return NULL_TREE;
6077 }
6078
6079
6080 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6081 equality/inequality test, then return a simplified form of the test
6082 using a sign testing. Otherwise return NULL. TYPE is the desired
6083 result type. */
6084
6085 static tree
6086 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6087 tree result_type)
6088 {
6089 /* If this is testing a single bit, we can optimize the test. */
6090 if ((code == NE_EXPR || code == EQ_EXPR)
6091 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6092 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6093 {
6094 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6095 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6096 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6097
6098 if (arg00 != NULL_TREE
6099 /* This is only a win if casting to a signed type is cheap,
6100 i.e. when arg00's type is not a partial mode. */
6101 && TYPE_PRECISION (TREE_TYPE (arg00))
6102 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6103 {
6104 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
6105 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6106 result_type, fold_convert (stype, arg00),
6107 fold_convert (stype, integer_zero_node));
6108 }
6109 }
6110
6111 return NULL_TREE;
6112 }
6113
6114 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6115 equality/inequality test, then return a simplified form of
6116 the test using shifts and logical operations. Otherwise return
6117 NULL. TYPE is the desired result type. */
6118
6119 tree
6120 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6121 tree result_type)
6122 {
6123 /* If this is testing a single bit, we can optimize the test. */
6124 if ((code == NE_EXPR || code == EQ_EXPR)
6125 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6126 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6127 {
6128 tree inner = TREE_OPERAND (arg0, 0);
6129 tree type = TREE_TYPE (arg0);
6130 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6131 enum machine_mode operand_mode = TYPE_MODE (type);
6132 int ops_unsigned;
6133 tree signed_type, unsigned_type, intermediate_type;
6134 tree tem;
6135
6136 /* First, see if we can fold the single bit test into a sign-bit
6137 test. */
6138 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6139 result_type);
6140 if (tem)
6141 return tem;
6142
6143 /* Otherwise we have (A & C) != 0 where C is a single bit,
6144 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6145 Similarly for (A & C) == 0. */
6146
6147 /* If INNER is a right shift of a constant and it plus BITNUM does
6148 not overflow, adjust BITNUM and INNER. */
6149 if (TREE_CODE (inner) == RSHIFT_EXPR
6150 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6151 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6152 && bitnum < TYPE_PRECISION (type)
6153 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6154 bitnum - TYPE_PRECISION (type)))
6155 {
6156 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6157 inner = TREE_OPERAND (inner, 0);
6158 }
6159
6160 /* If we are going to be able to omit the AND below, we must do our
6161 operations as unsigned. If we must use the AND, we have a choice.
6162 Normally unsigned is faster, but for some machines signed is. */
6163 #ifdef LOAD_EXTEND_OP
6164 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6165 && !flag_syntax_only) ? 0 : 1;
6166 #else
6167 ops_unsigned = 1;
6168 #endif
6169
6170 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6171 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6172 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6173 inner = fold_convert (intermediate_type, inner);
6174
6175 if (bitnum != 0)
6176 inner = build2 (RSHIFT_EXPR, intermediate_type,
6177 inner, size_int (bitnum));
6178
6179 if (code == EQ_EXPR)
6180 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type,
6181 inner, integer_one_node);
6182
6183 /* Put the AND last so it can combine with more things. */
6184 inner = build2 (BIT_AND_EXPR, intermediate_type,
6185 inner, integer_one_node);
6186
6187 /* Make sure to return the proper type. */
6188 inner = fold_convert (result_type, inner);
6189
6190 return inner;
6191 }
6192 return NULL_TREE;
6193 }
6194
6195 /* Check whether we are allowed to reorder operands arg0 and arg1,
6196 such that the evaluation of arg1 occurs before arg0. */
6197
6198 static bool
6199 reorder_operands_p (tree arg0, tree arg1)
6200 {
6201 if (! flag_evaluation_order)
6202 return true;
6203 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6204 return true;
6205 return ! TREE_SIDE_EFFECTS (arg0)
6206 && ! TREE_SIDE_EFFECTS (arg1);
6207 }
6208
6209 /* Test whether it is preferable two swap two operands, ARG0 and
6210 ARG1, for example because ARG0 is an integer constant and ARG1
6211 isn't. If REORDER is true, only recommend swapping if we can
6212 evaluate the operands in reverse order. */
6213
6214 bool
6215 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
6216 {
6217 STRIP_SIGN_NOPS (arg0);
6218 STRIP_SIGN_NOPS (arg1);
6219
6220 if (TREE_CODE (arg1) == INTEGER_CST)
6221 return 0;
6222 if (TREE_CODE (arg0) == INTEGER_CST)
6223 return 1;
6224
6225 if (TREE_CODE (arg1) == REAL_CST)
6226 return 0;
6227 if (TREE_CODE (arg0) == REAL_CST)
6228 return 1;
6229
6230 if (TREE_CODE (arg1) == COMPLEX_CST)
6231 return 0;
6232 if (TREE_CODE (arg0) == COMPLEX_CST)
6233 return 1;
6234
6235 if (TREE_CONSTANT (arg1))
6236 return 0;
6237 if (TREE_CONSTANT (arg0))
6238 return 1;
6239
6240 if (optimize_size)
6241 return 0;
6242
6243 if (reorder && flag_evaluation_order
6244 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6245 return 0;
6246
6247 if (DECL_P (arg1))
6248 return 0;
6249 if (DECL_P (arg0))
6250 return 1;
6251
6252 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6253 for commutative and comparison operators. Ensuring a canonical
6254 form allows the optimizers to find additional redundancies without
6255 having to explicitly check for both orderings. */
6256 if (TREE_CODE (arg0) == SSA_NAME
6257 && TREE_CODE (arg1) == SSA_NAME
6258 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6259 return 1;
6260
6261 return 0;
6262 }
6263
6264 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6265 ARG0 is extended to a wider type. */
6266
6267 static tree
6268 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6269 {
6270 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6271 tree arg1_unw;
6272 tree shorter_type, outer_type;
6273 tree min, max;
6274 bool above, below;
6275
6276 if (arg0_unw == arg0)
6277 return NULL_TREE;
6278 shorter_type = TREE_TYPE (arg0_unw);
6279
6280 #ifdef HAVE_canonicalize_funcptr_for_compare
6281 /* Disable this optimization if we're casting a function pointer
6282 type on targets that require function pointer canonicalization. */
6283 if (HAVE_canonicalize_funcptr_for_compare
6284 && TREE_CODE (shorter_type) == POINTER_TYPE
6285 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6286 return NULL_TREE;
6287 #endif
6288
6289 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6290 return NULL_TREE;
6291
6292 arg1_unw = get_unwidened (arg1, shorter_type);
6293
6294 /* If possible, express the comparison in the shorter mode. */
6295 if ((code == EQ_EXPR || code == NE_EXPR
6296 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6297 && (TREE_TYPE (arg1_unw) == shorter_type
6298 || (TREE_CODE (arg1_unw) == INTEGER_CST
6299 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6300 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6301 && int_fits_type_p (arg1_unw, shorter_type))))
6302 return fold_build2 (code, type, arg0_unw,
6303 fold_convert (shorter_type, arg1_unw));
6304
6305 if (TREE_CODE (arg1_unw) != INTEGER_CST
6306 || TREE_CODE (shorter_type) != INTEGER_TYPE
6307 || !int_fits_type_p (arg1_unw, shorter_type))
6308 return NULL_TREE;
6309
6310 /* If we are comparing with the integer that does not fit into the range
6311 of the shorter type, the result is known. */
6312 outer_type = TREE_TYPE (arg1_unw);
6313 min = lower_bound_in_type (outer_type, shorter_type);
6314 max = upper_bound_in_type (outer_type, shorter_type);
6315
6316 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6317 max, arg1_unw));
6318 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6319 arg1_unw, min));
6320
6321 switch (code)
6322 {
6323 case EQ_EXPR:
6324 if (above || below)
6325 return omit_one_operand (type, integer_zero_node, arg0);
6326 break;
6327
6328 case NE_EXPR:
6329 if (above || below)
6330 return omit_one_operand (type, integer_one_node, arg0);
6331 break;
6332
6333 case LT_EXPR:
6334 case LE_EXPR:
6335 if (above)
6336 return omit_one_operand (type, integer_one_node, arg0);
6337 else if (below)
6338 return omit_one_operand (type, integer_zero_node, arg0);
6339
6340 case GT_EXPR:
6341 case GE_EXPR:
6342 if (above)
6343 return omit_one_operand (type, integer_zero_node, arg0);
6344 else if (below)
6345 return omit_one_operand (type, integer_one_node, arg0);
6346
6347 default:
6348 break;
6349 }
6350
6351 return NULL_TREE;
6352 }
6353
6354 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6355 ARG0 just the signedness is changed. */
6356
6357 static tree
6358 fold_sign_changed_comparison (enum tree_code code, tree type,
6359 tree arg0, tree arg1)
6360 {
6361 tree arg0_inner, tmp;
6362 tree inner_type, outer_type;
6363
6364 if (TREE_CODE (arg0) != NOP_EXPR
6365 && TREE_CODE (arg0) != CONVERT_EXPR)
6366 return NULL_TREE;
6367
6368 outer_type = TREE_TYPE (arg0);
6369 arg0_inner = TREE_OPERAND (arg0, 0);
6370 inner_type = TREE_TYPE (arg0_inner);
6371
6372 #ifdef HAVE_canonicalize_funcptr_for_compare
6373 /* Disable this optimization if we're casting a function pointer
6374 type on targets that require function pointer canonicalization. */
6375 if (HAVE_canonicalize_funcptr_for_compare
6376 && TREE_CODE (inner_type) == POINTER_TYPE
6377 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6378 return NULL_TREE;
6379 #endif
6380
6381 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6382 return NULL_TREE;
6383
6384 if (TREE_CODE (arg1) != INTEGER_CST
6385 && !((TREE_CODE (arg1) == NOP_EXPR
6386 || TREE_CODE (arg1) == CONVERT_EXPR)
6387 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6388 return NULL_TREE;
6389
6390 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6391 && code != NE_EXPR
6392 && code != EQ_EXPR)
6393 return NULL_TREE;
6394
6395 if (TREE_CODE (arg1) == INTEGER_CST)
6396 {
6397 tmp = build_int_cst_wide (inner_type,
6398 TREE_INT_CST_LOW (arg1),
6399 TREE_INT_CST_HIGH (arg1));
6400 arg1 = force_fit_type (tmp, 0,
6401 TREE_OVERFLOW (arg1),
6402 TREE_CONSTANT_OVERFLOW (arg1));
6403 }
6404 else
6405 arg1 = fold_convert (inner_type, arg1);
6406
6407 return fold_build2 (code, type, arg0_inner, arg1);
6408 }
6409
6410 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6411 step of the array. Reconstructs s and delta in the case of s * delta
6412 being an integer constant (and thus already folded).
6413 ADDR is the address. MULT is the multiplicative expression.
6414 If the function succeeds, the new address expression is returned. Otherwise
6415 NULL_TREE is returned. */
6416
6417 static tree
6418 try_move_mult_to_index (enum tree_code code, tree addr, tree op1)
6419 {
6420 tree s, delta, step;
6421 tree ref = TREE_OPERAND (addr, 0), pref;
6422 tree ret, pos;
6423 tree itype;
6424
6425 /* Canonicalize op1 into a possibly non-constant delta
6426 and an INTEGER_CST s. */
6427 if (TREE_CODE (op1) == MULT_EXPR)
6428 {
6429 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6430
6431 STRIP_NOPS (arg0);
6432 STRIP_NOPS (arg1);
6433
6434 if (TREE_CODE (arg0) == INTEGER_CST)
6435 {
6436 s = arg0;
6437 delta = arg1;
6438 }
6439 else if (TREE_CODE (arg1) == INTEGER_CST)
6440 {
6441 s = arg1;
6442 delta = arg0;
6443 }
6444 else
6445 return NULL_TREE;
6446 }
6447 else if (TREE_CODE (op1) == INTEGER_CST)
6448 {
6449 delta = op1;
6450 s = NULL_TREE;
6451 }
6452 else
6453 {
6454 /* Simulate we are delta * 1. */
6455 delta = op1;
6456 s = integer_one_node;
6457 }
6458
6459 for (;; ref = TREE_OPERAND (ref, 0))
6460 {
6461 if (TREE_CODE (ref) == ARRAY_REF)
6462 {
6463 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6464 if (! itype)
6465 continue;
6466
6467 step = array_ref_element_size (ref);
6468 if (TREE_CODE (step) != INTEGER_CST)
6469 continue;
6470
6471 if (s)
6472 {
6473 if (! tree_int_cst_equal (step, s))
6474 continue;
6475 }
6476 else
6477 {
6478 /* Try if delta is a multiple of step. */
6479 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step);
6480 if (! tmp)
6481 continue;
6482 delta = tmp;
6483 }
6484
6485 break;
6486 }
6487
6488 if (!handled_component_p (ref))
6489 return NULL_TREE;
6490 }
6491
6492 /* We found the suitable array reference. So copy everything up to it,
6493 and replace the index. */
6494
6495 pref = TREE_OPERAND (addr, 0);
6496 ret = copy_node (pref);
6497 pos = ret;
6498
6499 while (pref != ref)
6500 {
6501 pref = TREE_OPERAND (pref, 0);
6502 TREE_OPERAND (pos, 0) = copy_node (pref);
6503 pos = TREE_OPERAND (pos, 0);
6504 }
6505
6506 TREE_OPERAND (pos, 1) = fold_build2 (code, itype,
6507 fold_convert (itype,
6508 TREE_OPERAND (pos, 1)),
6509 fold_convert (itype, delta));
6510
6511 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6512 }
6513
6514
6515 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6516 means A >= Y && A != MAX, but in this case we know that
6517 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6518
6519 static tree
6520 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6521 {
6522 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6523
6524 if (TREE_CODE (bound) == LT_EXPR)
6525 a = TREE_OPERAND (bound, 0);
6526 else if (TREE_CODE (bound) == GT_EXPR)
6527 a = TREE_OPERAND (bound, 1);
6528 else
6529 return NULL_TREE;
6530
6531 typea = TREE_TYPE (a);
6532 if (!INTEGRAL_TYPE_P (typea)
6533 && !POINTER_TYPE_P (typea))
6534 return NULL_TREE;
6535
6536 if (TREE_CODE (ineq) == LT_EXPR)
6537 {
6538 a1 = TREE_OPERAND (ineq, 1);
6539 y = TREE_OPERAND (ineq, 0);
6540 }
6541 else if (TREE_CODE (ineq) == GT_EXPR)
6542 {
6543 a1 = TREE_OPERAND (ineq, 0);
6544 y = TREE_OPERAND (ineq, 1);
6545 }
6546 else
6547 return NULL_TREE;
6548
6549 if (TREE_TYPE (a1) != typea)
6550 return NULL_TREE;
6551
6552 diff = fold_build2 (MINUS_EXPR, typea, a1, a);
6553 if (!integer_onep (diff))
6554 return NULL_TREE;
6555
6556 return fold_build2 (GE_EXPR, type, a, y);
6557 }
6558
6559 /* Fold a unary expression of code CODE and type TYPE with operand
6560 OP0. Return the folded expression if folding is successful.
6561 Otherwise, return NULL_TREE. */
6562
6563 tree
6564 fold_unary (enum tree_code code, tree type, tree op0)
6565 {
6566 tree tem;
6567 tree arg0;
6568 enum tree_code_class kind = TREE_CODE_CLASS (code);
6569
6570 gcc_assert (IS_EXPR_CODE_CLASS (kind)
6571 && TREE_CODE_LENGTH (code) == 1);
6572
6573 arg0 = op0;
6574 if (arg0)
6575 {
6576 if (code == NOP_EXPR || code == CONVERT_EXPR
6577 || code == FLOAT_EXPR || code == ABS_EXPR)
6578 {
6579 /* Don't use STRIP_NOPS, because signedness of argument type
6580 matters. */
6581 STRIP_SIGN_NOPS (arg0);
6582 }
6583 else
6584 {
6585 /* Strip any conversions that don't change the mode. This
6586 is safe for every expression, except for a comparison
6587 expression because its signedness is derived from its
6588 operands.
6589
6590 Note that this is done as an internal manipulation within
6591 the constant folder, in order to find the simplest
6592 representation of the arguments so that their form can be
6593 studied. In any cases, the appropriate type conversions
6594 should be put back in the tree that will get out of the
6595 constant folder. */
6596 STRIP_NOPS (arg0);
6597 }
6598 }
6599
6600 if (TREE_CODE_CLASS (code) == tcc_unary)
6601 {
6602 if (TREE_CODE (arg0) == COMPOUND_EXPR)
6603 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6604 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
6605 else if (TREE_CODE (arg0) == COND_EXPR)
6606 {
6607 tree arg01 = TREE_OPERAND (arg0, 1);
6608 tree arg02 = TREE_OPERAND (arg0, 2);
6609 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
6610 arg01 = fold_build1 (code, type, arg01);
6611 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
6612 arg02 = fold_build1 (code, type, arg02);
6613 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
6614 arg01, arg02);
6615
6616 /* If this was a conversion, and all we did was to move into
6617 inside the COND_EXPR, bring it back out. But leave it if
6618 it is a conversion from integer to integer and the
6619 result precision is no wider than a word since such a
6620 conversion is cheap and may be optimized away by combine,
6621 while it couldn't if it were outside the COND_EXPR. Then return
6622 so we don't get into an infinite recursion loop taking the
6623 conversion out and then back in. */
6624
6625 if ((code == NOP_EXPR || code == CONVERT_EXPR
6626 || code == NON_LVALUE_EXPR)
6627 && TREE_CODE (tem) == COND_EXPR
6628 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
6629 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
6630 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
6631 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
6632 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
6633 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
6634 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
6635 && (INTEGRAL_TYPE_P
6636 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
6637 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
6638 || flag_syntax_only))
6639 tem = build1 (code, type,
6640 build3 (COND_EXPR,
6641 TREE_TYPE (TREE_OPERAND
6642 (TREE_OPERAND (tem, 1), 0)),
6643 TREE_OPERAND (tem, 0),
6644 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
6645 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
6646 return tem;
6647 }
6648 else if (COMPARISON_CLASS_P (arg0))
6649 {
6650 if (TREE_CODE (type) == BOOLEAN_TYPE)
6651 {
6652 arg0 = copy_node (arg0);
6653 TREE_TYPE (arg0) = type;
6654 return arg0;
6655 }
6656 else if (TREE_CODE (type) != INTEGER_TYPE)
6657 return fold_build3 (COND_EXPR, type, arg0,
6658 fold_build1 (code, type,
6659 integer_one_node),
6660 fold_build1 (code, type,
6661 integer_zero_node));
6662 }
6663 }
6664
6665 switch (code)
6666 {
6667 case NOP_EXPR:
6668 case FLOAT_EXPR:
6669 case CONVERT_EXPR:
6670 case FIX_TRUNC_EXPR:
6671 case FIX_CEIL_EXPR:
6672 case FIX_FLOOR_EXPR:
6673 case FIX_ROUND_EXPR:
6674 if (TREE_TYPE (op0) == type)
6675 return op0;
6676
6677 /* Handle cases of two conversions in a row. */
6678 if (TREE_CODE (op0) == NOP_EXPR
6679 || TREE_CODE (op0) == CONVERT_EXPR)
6680 {
6681 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
6682 tree inter_type = TREE_TYPE (op0);
6683 int inside_int = INTEGRAL_TYPE_P (inside_type);
6684 int inside_ptr = POINTER_TYPE_P (inside_type);
6685 int inside_float = FLOAT_TYPE_P (inside_type);
6686 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
6687 unsigned int inside_prec = TYPE_PRECISION (inside_type);
6688 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
6689 int inter_int = INTEGRAL_TYPE_P (inter_type);
6690 int inter_ptr = POINTER_TYPE_P (inter_type);
6691 int inter_float = FLOAT_TYPE_P (inter_type);
6692 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
6693 unsigned int inter_prec = TYPE_PRECISION (inter_type);
6694 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
6695 int final_int = INTEGRAL_TYPE_P (type);
6696 int final_ptr = POINTER_TYPE_P (type);
6697 int final_float = FLOAT_TYPE_P (type);
6698 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
6699 unsigned int final_prec = TYPE_PRECISION (type);
6700 int final_unsignedp = TYPE_UNSIGNED (type);
6701
6702 /* In addition to the cases of two conversions in a row
6703 handled below, if we are converting something to its own
6704 type via an object of identical or wider precision, neither
6705 conversion is needed. */
6706 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
6707 && ((inter_int && final_int) || (inter_float && final_float))
6708 && inter_prec >= final_prec)
6709 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6710
6711 /* Likewise, if the intermediate and final types are either both
6712 float or both integer, we don't need the middle conversion if
6713 it is wider than the final type and doesn't change the signedness
6714 (for integers). Avoid this if the final type is a pointer
6715 since then we sometimes need the inner conversion. Likewise if
6716 the outer has a precision not equal to the size of its mode. */
6717 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
6718 || (inter_float && inside_float)
6719 || (inter_vec && inside_vec))
6720 && inter_prec >= inside_prec
6721 && (inter_float || inter_vec
6722 || inter_unsignedp == inside_unsignedp)
6723 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6724 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6725 && ! final_ptr
6726 && (! final_vec || inter_prec == inside_prec))
6727 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6728
6729 /* If we have a sign-extension of a zero-extended value, we can
6730 replace that by a single zero-extension. */
6731 if (inside_int && inter_int && final_int
6732 && inside_prec < inter_prec && inter_prec < final_prec
6733 && inside_unsignedp && !inter_unsignedp)
6734 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6735
6736 /* Two conversions in a row are not needed unless:
6737 - some conversion is floating-point (overstrict for now), or
6738 - some conversion is a vector (overstrict for now), or
6739 - the intermediate type is narrower than both initial and
6740 final, or
6741 - the intermediate type and innermost type differ in signedness,
6742 and the outermost type is wider than the intermediate, or
6743 - the initial type is a pointer type and the precisions of the
6744 intermediate and final types differ, or
6745 - the final type is a pointer type and the precisions of the
6746 initial and intermediate types differ. */
6747 if (! inside_float && ! inter_float && ! final_float
6748 && ! inside_vec && ! inter_vec && ! final_vec
6749 && (inter_prec > inside_prec || inter_prec > final_prec)
6750 && ! (inside_int && inter_int
6751 && inter_unsignedp != inside_unsignedp
6752 && inter_prec < final_prec)
6753 && ((inter_unsignedp && inter_prec > inside_prec)
6754 == (final_unsignedp && final_prec > inter_prec))
6755 && ! (inside_ptr && inter_prec != final_prec)
6756 && ! (final_ptr && inside_prec != inter_prec)
6757 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6758 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6759 && ! final_ptr)
6760 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6761 }
6762
6763 /* Handle (T *)&A.B.C for A being of type T and B and C
6764 living at offset zero. This occurs frequently in
6765 C++ upcasting and then accessing the base. */
6766 if (TREE_CODE (op0) == ADDR_EXPR
6767 && POINTER_TYPE_P (type)
6768 && handled_component_p (TREE_OPERAND (op0, 0)))
6769 {
6770 HOST_WIDE_INT bitsize, bitpos;
6771 tree offset;
6772 enum machine_mode mode;
6773 int unsignedp, volatilep;
6774 tree base = TREE_OPERAND (op0, 0);
6775 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
6776 &mode, &unsignedp, &volatilep, false);
6777 /* If the reference was to a (constant) zero offset, we can use
6778 the address of the base if it has the same base type
6779 as the result type. */
6780 if (! offset && bitpos == 0
6781 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
6782 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
6783 return fold_convert (type, build_fold_addr_expr (base));
6784 }
6785
6786 if (TREE_CODE (op0) == MODIFY_EXPR
6787 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
6788 /* Detect assigning a bitfield. */
6789 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
6790 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
6791 {
6792 /* Don't leave an assignment inside a conversion
6793 unless assigning a bitfield. */
6794 tem = fold_build1 (code, type, TREE_OPERAND (op0, 1));
6795 /* First do the assignment, then return converted constant. */
6796 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
6797 TREE_NO_WARNING (tem) = 1;
6798 TREE_USED (tem) = 1;
6799 return tem;
6800 }
6801
6802 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
6803 constants (if x has signed type, the sign bit cannot be set
6804 in c). This folds extension into the BIT_AND_EXPR. */
6805 if (INTEGRAL_TYPE_P (type)
6806 && TREE_CODE (type) != BOOLEAN_TYPE
6807 && TREE_CODE (op0) == BIT_AND_EXPR
6808 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
6809 {
6810 tree and = op0;
6811 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
6812 int change = 0;
6813
6814 if (TYPE_UNSIGNED (TREE_TYPE (and))
6815 || (TYPE_PRECISION (type)
6816 <= TYPE_PRECISION (TREE_TYPE (and))))
6817 change = 1;
6818 else if (TYPE_PRECISION (TREE_TYPE (and1))
6819 <= HOST_BITS_PER_WIDE_INT
6820 && host_integerp (and1, 1))
6821 {
6822 unsigned HOST_WIDE_INT cst;
6823
6824 cst = tree_low_cst (and1, 1);
6825 cst &= (HOST_WIDE_INT) -1
6826 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
6827 change = (cst == 0);
6828 #ifdef LOAD_EXTEND_OP
6829 if (change
6830 && !flag_syntax_only
6831 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
6832 == ZERO_EXTEND))
6833 {
6834 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
6835 and0 = fold_convert (uns, and0);
6836 and1 = fold_convert (uns, and1);
6837 }
6838 #endif
6839 }
6840 if (change)
6841 {
6842 tem = build_int_cst_wide (type, TREE_INT_CST_LOW (and1),
6843 TREE_INT_CST_HIGH (and1));
6844 tem = force_fit_type (tem, 0, TREE_OVERFLOW (and1),
6845 TREE_CONSTANT_OVERFLOW (and1));
6846 return fold_build2 (BIT_AND_EXPR, type,
6847 fold_convert (type, and0), tem);
6848 }
6849 }
6850
6851 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
6852 T2 being pointers to types of the same size. */
6853 if (POINTER_TYPE_P (type)
6854 && BINARY_CLASS_P (arg0)
6855 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
6856 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6857 {
6858 tree arg00 = TREE_OPERAND (arg0, 0);
6859 tree t0 = type;
6860 tree t1 = TREE_TYPE (arg00);
6861 tree tt0 = TREE_TYPE (t0);
6862 tree tt1 = TREE_TYPE (t1);
6863 tree s0 = TYPE_SIZE (tt0);
6864 tree s1 = TYPE_SIZE (tt1);
6865
6866 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
6867 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
6868 TREE_OPERAND (arg0, 1));
6869 }
6870
6871 tem = fold_convert_const (code, type, arg0);
6872 return tem ? tem : NULL_TREE;
6873
6874 case VIEW_CONVERT_EXPR:
6875 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
6876 return build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
6877 return NULL_TREE;
6878
6879 case NEGATE_EXPR:
6880 if (negate_expr_p (arg0))
6881 return fold_convert (type, negate_expr (arg0));
6882 /* Convert - (~A) to A + 1. */
6883 if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == BIT_NOT_EXPR)
6884 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (arg0, 0),
6885 build_int_cst (type, 1));
6886 return NULL_TREE;
6887
6888 case ABS_EXPR:
6889 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
6890 return fold_abs_const (arg0, type);
6891 else if (TREE_CODE (arg0) == NEGATE_EXPR)
6892 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
6893 /* Convert fabs((double)float) into (double)fabsf(float). */
6894 else if (TREE_CODE (arg0) == NOP_EXPR
6895 && TREE_CODE (type) == REAL_TYPE)
6896 {
6897 tree targ0 = strip_float_extensions (arg0);
6898 if (targ0 != arg0)
6899 return fold_convert (type, fold_build1 (ABS_EXPR,
6900 TREE_TYPE (targ0),
6901 targ0));
6902 }
6903 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
6904 else if (tree_expr_nonnegative_p (arg0) || TREE_CODE (arg0) == ABS_EXPR)
6905 return arg0;
6906
6907 /* Strip sign ops from argument. */
6908 if (TREE_CODE (type) == REAL_TYPE)
6909 {
6910 tem = fold_strip_sign_ops (arg0);
6911 if (tem)
6912 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
6913 }
6914 return NULL_TREE;
6915
6916 case CONJ_EXPR:
6917 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
6918 return fold_convert (type, arg0);
6919 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
6920 return build2 (COMPLEX_EXPR, type,
6921 TREE_OPERAND (arg0, 0),
6922 negate_expr (TREE_OPERAND (arg0, 1)));
6923 else if (TREE_CODE (arg0) == COMPLEX_CST)
6924 return build_complex (type, TREE_REALPART (arg0),
6925 negate_expr (TREE_IMAGPART (arg0)));
6926 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
6927 return fold_build2 (TREE_CODE (arg0), type,
6928 fold_build1 (CONJ_EXPR, type,
6929 TREE_OPERAND (arg0, 0)),
6930 fold_build1 (CONJ_EXPR, type,
6931 TREE_OPERAND (arg0, 1)));
6932 else if (TREE_CODE (arg0) == CONJ_EXPR)
6933 return TREE_OPERAND (arg0, 0);
6934 return NULL_TREE;
6935
6936 case BIT_NOT_EXPR:
6937 if (TREE_CODE (arg0) == INTEGER_CST)
6938 return fold_not_const (arg0, type);
6939 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
6940 return TREE_OPERAND (arg0, 0);
6941 /* Convert ~ (-A) to A - 1. */
6942 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
6943 return fold_build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0),
6944 build_int_cst (type, 1));
6945 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
6946 else if (INTEGRAL_TYPE_P (type)
6947 && ((TREE_CODE (arg0) == MINUS_EXPR
6948 && integer_onep (TREE_OPERAND (arg0, 1)))
6949 || (TREE_CODE (arg0) == PLUS_EXPR
6950 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
6951 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
6952 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
6953 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
6954 && (tem = fold_unary (BIT_NOT_EXPR, type,
6955 fold_convert (type,
6956 TREE_OPERAND (arg0, 0)))))
6957 return fold_build2 (BIT_XOR_EXPR, type, tem,
6958 fold_convert (type, TREE_OPERAND (arg0, 1)));
6959 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
6960 && (tem = fold_unary (BIT_NOT_EXPR, type,
6961 fold_convert (type,
6962 TREE_OPERAND (arg0, 1)))))
6963 return fold_build2 (BIT_XOR_EXPR, type,
6964 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
6965
6966 return NULL_TREE;
6967
6968 case TRUTH_NOT_EXPR:
6969 /* The argument to invert_truthvalue must have Boolean type. */
6970 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
6971 arg0 = fold_convert (boolean_type_node, arg0);
6972
6973 /* Note that the operand of this must be an int
6974 and its values must be 0 or 1.
6975 ("true" is a fixed value perhaps depending on the language,
6976 but we don't handle values other than 1 correctly yet.) */
6977 tem = invert_truthvalue (arg0);
6978 /* Avoid infinite recursion. */
6979 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
6980 return NULL_TREE;
6981 return fold_convert (type, tem);
6982
6983 case REALPART_EXPR:
6984 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
6985 return NULL_TREE;
6986 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
6987 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
6988 TREE_OPERAND (arg0, 1));
6989 else if (TREE_CODE (arg0) == COMPLEX_CST)
6990 return TREE_REALPART (arg0);
6991 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
6992 return fold_build2 (TREE_CODE (arg0), type,
6993 fold_build1 (REALPART_EXPR, type,
6994 TREE_OPERAND (arg0, 0)),
6995 fold_build1 (REALPART_EXPR, type,
6996 TREE_OPERAND (arg0, 1)));
6997 return NULL_TREE;
6998
6999 case IMAGPART_EXPR:
7000 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7001 return fold_convert (type, integer_zero_node);
7002 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7003 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
7004 TREE_OPERAND (arg0, 0));
7005 else if (TREE_CODE (arg0) == COMPLEX_CST)
7006 return TREE_IMAGPART (arg0);
7007 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7008 return fold_build2 (TREE_CODE (arg0), type,
7009 fold_build1 (IMAGPART_EXPR, type,
7010 TREE_OPERAND (arg0, 0)),
7011 fold_build1 (IMAGPART_EXPR, type,
7012 TREE_OPERAND (arg0, 1)));
7013 return NULL_TREE;
7014
7015 default:
7016 return NULL_TREE;
7017 } /* switch (code) */
7018 }
7019
7020 /* Fold a binary expression of code CODE and type TYPE with operands
7021 OP0 and OP1. Return the folded expression if folding is
7022 successful. Otherwise, return NULL_TREE. */
7023
7024 tree
7025 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
7026 {
7027 tree t1 = NULL_TREE;
7028 tree tem;
7029 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
7030 enum tree_code_class kind = TREE_CODE_CLASS (code);
7031
7032 /* WINS will be nonzero when the switch is done
7033 if all operands are constant. */
7034 int wins = 1;
7035
7036 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7037 && TREE_CODE_LENGTH (code) == 2);
7038
7039 arg0 = op0;
7040 arg1 = op1;
7041
7042 if (arg0)
7043 {
7044 tree subop;
7045
7046 /* Strip any conversions that don't change the mode. This is
7047 safe for every expression, except for a comparison expression
7048 because its signedness is derived from its operands. So, in
7049 the latter case, only strip conversions that don't change the
7050 signedness.
7051
7052 Note that this is done as an internal manipulation within the
7053 constant folder, in order to find the simplest representation
7054 of the arguments so that their form can be studied. In any
7055 cases, the appropriate type conversions should be put back in
7056 the tree that will get out of the constant folder. */
7057 if (kind == tcc_comparison)
7058 STRIP_SIGN_NOPS (arg0);
7059 else
7060 STRIP_NOPS (arg0);
7061
7062 if (TREE_CODE (arg0) == COMPLEX_CST)
7063 subop = TREE_REALPART (arg0);
7064 else
7065 subop = arg0;
7066
7067 if (TREE_CODE (subop) != INTEGER_CST
7068 && TREE_CODE (subop) != REAL_CST)
7069 /* Note that TREE_CONSTANT isn't enough:
7070 static var addresses are constant but we can't
7071 do arithmetic on them. */
7072 wins = 0;
7073 }
7074
7075 if (arg1)
7076 {
7077 tree subop;
7078
7079 /* Strip any conversions that don't change the mode. This is
7080 safe for every expression, except for a comparison expression
7081 because its signedness is derived from its operands. So, in
7082 the latter case, only strip conversions that don't change the
7083 signedness.
7084
7085 Note that this is done as an internal manipulation within the
7086 constant folder, in order to find the simplest representation
7087 of the arguments so that their form can be studied. In any
7088 cases, the appropriate type conversions should be put back in
7089 the tree that will get out of the constant folder. */
7090 if (kind == tcc_comparison)
7091 STRIP_SIGN_NOPS (arg1);
7092 else
7093 STRIP_NOPS (arg1);
7094
7095 if (TREE_CODE (arg1) == COMPLEX_CST)
7096 subop = TREE_REALPART (arg1);
7097 else
7098 subop = arg1;
7099
7100 if (TREE_CODE (subop) != INTEGER_CST
7101 && TREE_CODE (subop) != REAL_CST)
7102 /* Note that TREE_CONSTANT isn't enough:
7103 static var addresses are constant but we can't
7104 do arithmetic on them. */
7105 wins = 0;
7106 }
7107
7108 /* If this is a commutative operation, and ARG0 is a constant, move it
7109 to ARG1 to reduce the number of tests below. */
7110 if (commutative_tree_code (code)
7111 && tree_swap_operands_p (arg0, arg1, true))
7112 return fold_build2 (code, type, op1, op0);
7113
7114 /* Now WINS is set as described above,
7115 ARG0 is the first operand of EXPR,
7116 and ARG1 is the second operand (if it has more than one operand).
7117
7118 First check for cases where an arithmetic operation is applied to a
7119 compound, conditional, or comparison operation. Push the arithmetic
7120 operation inside the compound or conditional to see if any folding
7121 can then be done. Convert comparison to conditional for this purpose.
7122 The also optimizes non-constant cases that used to be done in
7123 expand_expr.
7124
7125 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
7126 one of the operands is a comparison and the other is a comparison, a
7127 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
7128 code below would make the expression more complex. Change it to a
7129 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
7130 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
7131
7132 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
7133 || code == EQ_EXPR || code == NE_EXPR)
7134 && ((truth_value_p (TREE_CODE (arg0))
7135 && (truth_value_p (TREE_CODE (arg1))
7136 || (TREE_CODE (arg1) == BIT_AND_EXPR
7137 && integer_onep (TREE_OPERAND (arg1, 1)))))
7138 || (truth_value_p (TREE_CODE (arg1))
7139 && (truth_value_p (TREE_CODE (arg0))
7140 || (TREE_CODE (arg0) == BIT_AND_EXPR
7141 && integer_onep (TREE_OPERAND (arg0, 1)))))))
7142 {
7143 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
7144 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
7145 : TRUTH_XOR_EXPR,
7146 boolean_type_node,
7147 fold_convert (boolean_type_node, arg0),
7148 fold_convert (boolean_type_node, arg1));
7149
7150 if (code == EQ_EXPR)
7151 tem = invert_truthvalue (tem);
7152
7153 return fold_convert (type, tem);
7154 }
7155
7156 if (TREE_CODE_CLASS (code) == tcc_binary
7157 || TREE_CODE_CLASS (code) == tcc_comparison)
7158 {
7159 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7160 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7161 fold_build2 (code, type,
7162 TREE_OPERAND (arg0, 1), op1));
7163 if (TREE_CODE (arg1) == COMPOUND_EXPR
7164 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
7165 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
7166 fold_build2 (code, type,
7167 op0, TREE_OPERAND (arg1, 1)));
7168
7169 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
7170 {
7171 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
7172 arg0, arg1,
7173 /*cond_first_p=*/1);
7174 if (tem != NULL_TREE)
7175 return tem;
7176 }
7177
7178 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
7179 {
7180 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
7181 arg1, arg0,
7182 /*cond_first_p=*/0);
7183 if (tem != NULL_TREE)
7184 return tem;
7185 }
7186 }
7187
7188 switch (code)
7189 {
7190 case PLUS_EXPR:
7191 /* A + (-B) -> A - B */
7192 if (TREE_CODE (arg1) == NEGATE_EXPR)
7193 return fold_build2 (MINUS_EXPR, type,
7194 fold_convert (type, arg0),
7195 fold_convert (type, TREE_OPERAND (arg1, 0)));
7196 /* (-A) + B -> B - A */
7197 if (TREE_CODE (arg0) == NEGATE_EXPR
7198 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
7199 return fold_build2 (MINUS_EXPR, type,
7200 fold_convert (type, arg1),
7201 fold_convert (type, TREE_OPERAND (arg0, 0)));
7202 /* Convert ~A + 1 to -A. */
7203 if (INTEGRAL_TYPE_P (type)
7204 && TREE_CODE (arg0) == BIT_NOT_EXPR
7205 && integer_onep (arg1))
7206 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7207
7208 if (! FLOAT_TYPE_P (type))
7209 {
7210 if (integer_zerop (arg1))
7211 return non_lvalue (fold_convert (type, arg0));
7212
7213 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
7214 with a constant, and the two constants have no bits in common,
7215 we should treat this as a BIT_IOR_EXPR since this may produce more
7216 simplifications. */
7217 if (TREE_CODE (arg0) == BIT_AND_EXPR
7218 && TREE_CODE (arg1) == BIT_AND_EXPR
7219 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7220 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
7221 && integer_zerop (const_binop (BIT_AND_EXPR,
7222 TREE_OPERAND (arg0, 1),
7223 TREE_OPERAND (arg1, 1), 0)))
7224 {
7225 code = BIT_IOR_EXPR;
7226 goto bit_ior;
7227 }
7228
7229 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
7230 (plus (plus (mult) (mult)) (foo)) so that we can
7231 take advantage of the factoring cases below. */
7232 if (((TREE_CODE (arg0) == PLUS_EXPR
7233 || TREE_CODE (arg0) == MINUS_EXPR)
7234 && TREE_CODE (arg1) == MULT_EXPR)
7235 || ((TREE_CODE (arg1) == PLUS_EXPR
7236 || TREE_CODE (arg1) == MINUS_EXPR)
7237 && TREE_CODE (arg0) == MULT_EXPR))
7238 {
7239 tree parg0, parg1, parg, marg;
7240 enum tree_code pcode;
7241
7242 if (TREE_CODE (arg1) == MULT_EXPR)
7243 parg = arg0, marg = arg1;
7244 else
7245 parg = arg1, marg = arg0;
7246 pcode = TREE_CODE (parg);
7247 parg0 = TREE_OPERAND (parg, 0);
7248 parg1 = TREE_OPERAND (parg, 1);
7249 STRIP_NOPS (parg0);
7250 STRIP_NOPS (parg1);
7251
7252 if (TREE_CODE (parg0) == MULT_EXPR
7253 && TREE_CODE (parg1) != MULT_EXPR)
7254 return fold_build2 (pcode, type,
7255 fold_build2 (PLUS_EXPR, type,
7256 fold_convert (type, parg0),
7257 fold_convert (type, marg)),
7258 fold_convert (type, parg1));
7259 if (TREE_CODE (parg0) != MULT_EXPR
7260 && TREE_CODE (parg1) == MULT_EXPR)
7261 return fold_build2 (PLUS_EXPR, type,
7262 fold_convert (type, parg0),
7263 fold_build2 (pcode, type,
7264 fold_convert (type, marg),
7265 fold_convert (type,
7266 parg1)));
7267 }
7268
7269 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
7270 {
7271 tree arg00, arg01, arg10, arg11;
7272 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7273
7274 /* (A * C) + (B * C) -> (A+B) * C.
7275 We are most concerned about the case where C is a constant,
7276 but other combinations show up during loop reduction. Since
7277 it is not difficult, try all four possibilities. */
7278
7279 arg00 = TREE_OPERAND (arg0, 0);
7280 arg01 = TREE_OPERAND (arg0, 1);
7281 arg10 = TREE_OPERAND (arg1, 0);
7282 arg11 = TREE_OPERAND (arg1, 1);
7283 same = NULL_TREE;
7284
7285 if (operand_equal_p (arg01, arg11, 0))
7286 same = arg01, alt0 = arg00, alt1 = arg10;
7287 else if (operand_equal_p (arg00, arg10, 0))
7288 same = arg00, alt0 = arg01, alt1 = arg11;
7289 else if (operand_equal_p (arg00, arg11, 0))
7290 same = arg00, alt0 = arg01, alt1 = arg10;
7291 else if (operand_equal_p (arg01, arg10, 0))
7292 same = arg01, alt0 = arg00, alt1 = arg11;
7293
7294 /* No identical multiplicands; see if we can find a common
7295 power-of-two factor in non-power-of-two multiplies. This
7296 can help in multi-dimensional array access. */
7297 else if (TREE_CODE (arg01) == INTEGER_CST
7298 && TREE_CODE (arg11) == INTEGER_CST
7299 && TREE_INT_CST_HIGH (arg01) == 0
7300 && TREE_INT_CST_HIGH (arg11) == 0)
7301 {
7302 HOST_WIDE_INT int01, int11, tmp;
7303 int01 = TREE_INT_CST_LOW (arg01);
7304 int11 = TREE_INT_CST_LOW (arg11);
7305
7306 /* Move min of absolute values to int11. */
7307 if ((int01 >= 0 ? int01 : -int01)
7308 < (int11 >= 0 ? int11 : -int11))
7309 {
7310 tmp = int01, int01 = int11, int11 = tmp;
7311 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7312 alt0 = arg01, arg01 = arg11, arg11 = alt0;
7313 }
7314
7315 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
7316 {
7317 alt0 = fold_build2 (MULT_EXPR, type, arg00,
7318 build_int_cst (NULL_TREE,
7319 int01 / int11));
7320 alt1 = arg10;
7321 same = arg11;
7322 }
7323 }
7324
7325 if (same)
7326 return fold_build2 (MULT_EXPR, type,
7327 fold_build2 (PLUS_EXPR, type,
7328 fold_convert (type, alt0),
7329 fold_convert (type, alt1)),
7330 fold_convert (type, same));
7331 }
7332
7333 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
7334 of the array. Loop optimizer sometimes produce this type of
7335 expressions. */
7336 if (TREE_CODE (arg0) == ADDR_EXPR)
7337 {
7338 tem = try_move_mult_to_index (PLUS_EXPR, arg0, arg1);
7339 if (tem)
7340 return fold_convert (type, tem);
7341 }
7342 else if (TREE_CODE (arg1) == ADDR_EXPR)
7343 {
7344 tem = try_move_mult_to_index (PLUS_EXPR, arg1, arg0);
7345 if (tem)
7346 return fold_convert (type, tem);
7347 }
7348 }
7349 else
7350 {
7351 /* See if ARG1 is zero and X + ARG1 reduces to X. */
7352 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
7353 return non_lvalue (fold_convert (type, arg0));
7354
7355 /* Likewise if the operands are reversed. */
7356 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
7357 return non_lvalue (fold_convert (type, arg1));
7358
7359 /* Convert X + -C into X - C. */
7360 if (TREE_CODE (arg1) == REAL_CST
7361 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
7362 {
7363 tem = fold_negate_const (arg1, type);
7364 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
7365 return fold_build2 (MINUS_EXPR, type,
7366 fold_convert (type, arg0),
7367 fold_convert (type, tem));
7368 }
7369
7370 if (flag_unsafe_math_optimizations
7371 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
7372 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
7373 && (tem = distribute_real_division (code, type, arg0, arg1)))
7374 return tem;
7375
7376 /* Convert x+x into x*2.0. */
7377 if (operand_equal_p (arg0, arg1, 0)
7378 && SCALAR_FLOAT_TYPE_P (type))
7379 return fold_build2 (MULT_EXPR, type, arg0,
7380 build_real (type, dconst2));
7381
7382 /* Convert x*c+x into x*(c+1). */
7383 if (flag_unsafe_math_optimizations
7384 && TREE_CODE (arg0) == MULT_EXPR
7385 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7386 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
7387 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7388 {
7389 REAL_VALUE_TYPE c;
7390
7391 c = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
7392 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7393 return fold_build2 (MULT_EXPR, type, arg1,
7394 build_real (type, c));
7395 }
7396
7397 /* Convert x+x*c into x*(c+1). */
7398 if (flag_unsafe_math_optimizations
7399 && TREE_CODE (arg1) == MULT_EXPR
7400 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
7401 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
7402 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
7403 {
7404 REAL_VALUE_TYPE c;
7405
7406 c = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
7407 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7408 return fold_build2 (MULT_EXPR, type, arg0,
7409 build_real (type, c));
7410 }
7411
7412 /* Convert x*c1+x*c2 into x*(c1+c2). */
7413 if (flag_unsafe_math_optimizations
7414 && TREE_CODE (arg0) == MULT_EXPR
7415 && TREE_CODE (arg1) == MULT_EXPR
7416 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7417 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
7418 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
7419 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
7420 && operand_equal_p (TREE_OPERAND (arg0, 0),
7421 TREE_OPERAND (arg1, 0), 0))
7422 {
7423 REAL_VALUE_TYPE c1, c2;
7424
7425 c1 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
7426 c2 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
7427 real_arithmetic (&c1, PLUS_EXPR, &c1, &c2);
7428 return fold_build2 (MULT_EXPR, type,
7429 TREE_OPERAND (arg0, 0),
7430 build_real (type, c1));
7431 }
7432 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
7433 if (flag_unsafe_math_optimizations
7434 && TREE_CODE (arg1) == PLUS_EXPR
7435 && TREE_CODE (arg0) != MULT_EXPR)
7436 {
7437 tree tree10 = TREE_OPERAND (arg1, 0);
7438 tree tree11 = TREE_OPERAND (arg1, 1);
7439 if (TREE_CODE (tree11) == MULT_EXPR
7440 && TREE_CODE (tree10) == MULT_EXPR)
7441 {
7442 tree tree0;
7443 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
7444 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
7445 }
7446 }
7447 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
7448 if (flag_unsafe_math_optimizations
7449 && TREE_CODE (arg0) == PLUS_EXPR
7450 && TREE_CODE (arg1) != MULT_EXPR)
7451 {
7452 tree tree00 = TREE_OPERAND (arg0, 0);
7453 tree tree01 = TREE_OPERAND (arg0, 1);
7454 if (TREE_CODE (tree01) == MULT_EXPR
7455 && TREE_CODE (tree00) == MULT_EXPR)
7456 {
7457 tree tree0;
7458 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
7459 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
7460 }
7461 }
7462 }
7463
7464 bit_rotate:
7465 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
7466 is a rotate of A by C1 bits. */
7467 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
7468 is a rotate of A by B bits. */
7469 {
7470 enum tree_code code0, code1;
7471 code0 = TREE_CODE (arg0);
7472 code1 = TREE_CODE (arg1);
7473 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
7474 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
7475 && operand_equal_p (TREE_OPERAND (arg0, 0),
7476 TREE_OPERAND (arg1, 0), 0)
7477 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7478 {
7479 tree tree01, tree11;
7480 enum tree_code code01, code11;
7481
7482 tree01 = TREE_OPERAND (arg0, 1);
7483 tree11 = TREE_OPERAND (arg1, 1);
7484 STRIP_NOPS (tree01);
7485 STRIP_NOPS (tree11);
7486 code01 = TREE_CODE (tree01);
7487 code11 = TREE_CODE (tree11);
7488 if (code01 == INTEGER_CST
7489 && code11 == INTEGER_CST
7490 && TREE_INT_CST_HIGH (tree01) == 0
7491 && TREE_INT_CST_HIGH (tree11) == 0
7492 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
7493 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
7494 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
7495 code0 == LSHIFT_EXPR ? tree01 : tree11);
7496 else if (code11 == MINUS_EXPR)
7497 {
7498 tree tree110, tree111;
7499 tree110 = TREE_OPERAND (tree11, 0);
7500 tree111 = TREE_OPERAND (tree11, 1);
7501 STRIP_NOPS (tree110);
7502 STRIP_NOPS (tree111);
7503 if (TREE_CODE (tree110) == INTEGER_CST
7504 && 0 == compare_tree_int (tree110,
7505 TYPE_PRECISION
7506 (TREE_TYPE (TREE_OPERAND
7507 (arg0, 0))))
7508 && operand_equal_p (tree01, tree111, 0))
7509 return build2 ((code0 == LSHIFT_EXPR
7510 ? LROTATE_EXPR
7511 : RROTATE_EXPR),
7512 type, TREE_OPERAND (arg0, 0), tree01);
7513 }
7514 else if (code01 == MINUS_EXPR)
7515 {
7516 tree tree010, tree011;
7517 tree010 = TREE_OPERAND (tree01, 0);
7518 tree011 = TREE_OPERAND (tree01, 1);
7519 STRIP_NOPS (tree010);
7520 STRIP_NOPS (tree011);
7521 if (TREE_CODE (tree010) == INTEGER_CST
7522 && 0 == compare_tree_int (tree010,
7523 TYPE_PRECISION
7524 (TREE_TYPE (TREE_OPERAND
7525 (arg0, 0))))
7526 && operand_equal_p (tree11, tree011, 0))
7527 return build2 ((code0 != LSHIFT_EXPR
7528 ? LROTATE_EXPR
7529 : RROTATE_EXPR),
7530 type, TREE_OPERAND (arg0, 0), tree11);
7531 }
7532 }
7533 }
7534
7535 associate:
7536 /* In most languages, can't associate operations on floats through
7537 parentheses. Rather than remember where the parentheses were, we
7538 don't associate floats at all, unless the user has specified
7539 -funsafe-math-optimizations. */
7540
7541 if (! wins
7542 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
7543 {
7544 tree var0, con0, lit0, minus_lit0;
7545 tree var1, con1, lit1, minus_lit1;
7546
7547 /* Split both trees into variables, constants, and literals. Then
7548 associate each group together, the constants with literals,
7549 then the result with variables. This increases the chances of
7550 literals being recombined later and of generating relocatable
7551 expressions for the sum of a constant and literal. */
7552 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
7553 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
7554 code == MINUS_EXPR);
7555
7556 /* Only do something if we found more than two objects. Otherwise,
7557 nothing has changed and we risk infinite recursion. */
7558 if (2 < ((var0 != 0) + (var1 != 0)
7559 + (con0 != 0) + (con1 != 0)
7560 + (lit0 != 0) + (lit1 != 0)
7561 + (minus_lit0 != 0) + (minus_lit1 != 0)))
7562 {
7563 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
7564 if (code == MINUS_EXPR)
7565 code = PLUS_EXPR;
7566
7567 var0 = associate_trees (var0, var1, code, type);
7568 con0 = associate_trees (con0, con1, code, type);
7569 lit0 = associate_trees (lit0, lit1, code, type);
7570 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
7571
7572 /* Preserve the MINUS_EXPR if the negative part of the literal is
7573 greater than the positive part. Otherwise, the multiplicative
7574 folding code (i.e extract_muldiv) may be fooled in case
7575 unsigned constants are subtracted, like in the following
7576 example: ((X*2 + 4) - 8U)/2. */
7577 if (minus_lit0 && lit0)
7578 {
7579 if (TREE_CODE (lit0) == INTEGER_CST
7580 && TREE_CODE (minus_lit0) == INTEGER_CST
7581 && tree_int_cst_lt (lit0, minus_lit0))
7582 {
7583 minus_lit0 = associate_trees (minus_lit0, lit0,
7584 MINUS_EXPR, type);
7585 lit0 = 0;
7586 }
7587 else
7588 {
7589 lit0 = associate_trees (lit0, minus_lit0,
7590 MINUS_EXPR, type);
7591 minus_lit0 = 0;
7592 }
7593 }
7594 if (minus_lit0)
7595 {
7596 if (con0 == 0)
7597 return fold_convert (type,
7598 associate_trees (var0, minus_lit0,
7599 MINUS_EXPR, type));
7600 else
7601 {
7602 con0 = associate_trees (con0, minus_lit0,
7603 MINUS_EXPR, type);
7604 return fold_convert (type,
7605 associate_trees (var0, con0,
7606 PLUS_EXPR, type));
7607 }
7608 }
7609
7610 con0 = associate_trees (con0, lit0, code, type);
7611 return fold_convert (type, associate_trees (var0, con0,
7612 code, type));
7613 }
7614 }
7615
7616 binary:
7617 if (wins)
7618 t1 = const_binop (code, arg0, arg1, 0);
7619 if (t1 != NULL_TREE)
7620 {
7621 /* The return value should always have
7622 the same type as the original expression. */
7623 if (TREE_TYPE (t1) != type)
7624 t1 = fold_convert (type, t1);
7625
7626 return t1;
7627 }
7628 return NULL_TREE;
7629
7630 case MINUS_EXPR:
7631 /* A - (-B) -> A + B */
7632 if (TREE_CODE (arg1) == NEGATE_EXPR)
7633 return fold_build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0));
7634 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
7635 if (TREE_CODE (arg0) == NEGATE_EXPR
7636 && (FLOAT_TYPE_P (type)
7637 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
7638 && negate_expr_p (arg1)
7639 && reorder_operands_p (arg0, arg1))
7640 return fold_build2 (MINUS_EXPR, type, negate_expr (arg1),
7641 TREE_OPERAND (arg0, 0));
7642 /* Convert -A - 1 to ~A. */
7643 if (INTEGRAL_TYPE_P (type)
7644 && TREE_CODE (arg0) == NEGATE_EXPR
7645 && integer_onep (arg1))
7646 return fold_build1 (BIT_NOT_EXPR, type, TREE_OPERAND (arg0, 0));
7647
7648 /* Convert -1 - A to ~A. */
7649 if (INTEGRAL_TYPE_P (type)
7650 && integer_all_onesp (arg0))
7651 return fold_build1 (BIT_NOT_EXPR, type, arg1);
7652
7653 if (! FLOAT_TYPE_P (type))
7654 {
7655 if (! wins && integer_zerop (arg0))
7656 return negate_expr (fold_convert (type, arg1));
7657 if (integer_zerop (arg1))
7658 return non_lvalue (fold_convert (type, arg0));
7659
7660 /* Fold A - (A & B) into ~B & A. */
7661 if (!TREE_SIDE_EFFECTS (arg0)
7662 && TREE_CODE (arg1) == BIT_AND_EXPR)
7663 {
7664 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
7665 return fold_build2 (BIT_AND_EXPR, type,
7666 fold_build1 (BIT_NOT_EXPR, type,
7667 TREE_OPERAND (arg1, 0)),
7668 arg0);
7669 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7670 return fold_build2 (BIT_AND_EXPR, type,
7671 fold_build1 (BIT_NOT_EXPR, type,
7672 TREE_OPERAND (arg1, 1)),
7673 arg0);
7674 }
7675
7676 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
7677 any power of 2 minus 1. */
7678 if (TREE_CODE (arg0) == BIT_AND_EXPR
7679 && TREE_CODE (arg1) == BIT_AND_EXPR
7680 && operand_equal_p (TREE_OPERAND (arg0, 0),
7681 TREE_OPERAND (arg1, 0), 0))
7682 {
7683 tree mask0 = TREE_OPERAND (arg0, 1);
7684 tree mask1 = TREE_OPERAND (arg1, 1);
7685 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
7686
7687 if (operand_equal_p (tem, mask1, 0))
7688 {
7689 tem = fold_build2 (BIT_XOR_EXPR, type,
7690 TREE_OPERAND (arg0, 0), mask1);
7691 return fold_build2 (MINUS_EXPR, type, tem, mask1);
7692 }
7693 }
7694 }
7695
7696 /* See if ARG1 is zero and X - ARG1 reduces to X. */
7697 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
7698 return non_lvalue (fold_convert (type, arg0));
7699
7700 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
7701 ARG0 is zero and X + ARG0 reduces to X, since that would mean
7702 (-ARG1 + ARG0) reduces to -ARG1. */
7703 else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
7704 return negate_expr (fold_convert (type, arg1));
7705
7706 /* Fold &x - &x. This can happen from &x.foo - &x.
7707 This is unsafe for certain floats even in non-IEEE formats.
7708 In IEEE, it is unsafe because it does wrong for NaNs.
7709 Also note that operand_equal_p is always false if an operand
7710 is volatile. */
7711
7712 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
7713 && operand_equal_p (arg0, arg1, 0))
7714 return fold_convert (type, integer_zero_node);
7715
7716 /* A - B -> A + (-B) if B is easily negatable. */
7717 if (!wins && negate_expr_p (arg1)
7718 && ((FLOAT_TYPE_P (type)
7719 /* Avoid this transformation if B is a positive REAL_CST. */
7720 && (TREE_CODE (arg1) != REAL_CST
7721 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
7722 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
7723 return fold_build2 (PLUS_EXPR, type,
7724 fold_convert (type, arg0),
7725 fold_convert (type, negate_expr (arg1)));
7726
7727 /* Try folding difference of addresses. */
7728 {
7729 HOST_WIDE_INT diff;
7730
7731 if ((TREE_CODE (arg0) == ADDR_EXPR
7732 || TREE_CODE (arg1) == ADDR_EXPR)
7733 && ptr_difference_const (arg0, arg1, &diff))
7734 return build_int_cst_type (type, diff);
7735 }
7736
7737 /* Fold &a[i] - &a[j] to i-j. */
7738 if (TREE_CODE (arg0) == ADDR_EXPR
7739 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
7740 && TREE_CODE (arg1) == ADDR_EXPR
7741 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
7742 {
7743 tree aref0 = TREE_OPERAND (arg0, 0);
7744 tree aref1 = TREE_OPERAND (arg1, 0);
7745 if (operand_equal_p (TREE_OPERAND (aref0, 0),
7746 TREE_OPERAND (aref1, 0), 0))
7747 {
7748 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
7749 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
7750 tree esz = array_ref_element_size (aref0);
7751 tree diff = build2 (MINUS_EXPR, type, op0, op1);
7752 return fold_build2 (MULT_EXPR, type, diff,
7753 fold_convert (type, esz));
7754
7755 }
7756 }
7757
7758 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
7759 of the array. Loop optimizer sometimes produce this type of
7760 expressions. */
7761 if (TREE_CODE (arg0) == ADDR_EXPR)
7762 {
7763 tem = try_move_mult_to_index (MINUS_EXPR, arg0, arg1);
7764 if (tem)
7765 return fold_convert (type, tem);
7766 }
7767
7768 if (flag_unsafe_math_optimizations
7769 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
7770 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
7771 && (tem = distribute_real_division (code, type, arg0, arg1)))
7772 return tem;
7773
7774 if (TREE_CODE (arg0) == MULT_EXPR
7775 && TREE_CODE (arg1) == MULT_EXPR
7776 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
7777 {
7778 /* (A * C) - (B * C) -> (A-B) * C. */
7779 if (operand_equal_p (TREE_OPERAND (arg0, 1),
7780 TREE_OPERAND (arg1, 1), 0))
7781 return fold_build2 (MULT_EXPR, type,
7782 fold_build2 (MINUS_EXPR, type,
7783 TREE_OPERAND (arg0, 0),
7784 TREE_OPERAND (arg1, 0)),
7785 TREE_OPERAND (arg0, 1));
7786 /* (A * C1) - (A * C2) -> A * (C1-C2). */
7787 if (operand_equal_p (TREE_OPERAND (arg0, 0),
7788 TREE_OPERAND (arg1, 0), 0))
7789 return fold_build2 (MULT_EXPR, type,
7790 TREE_OPERAND (arg0, 0),
7791 fold_build2 (MINUS_EXPR, type,
7792 TREE_OPERAND (arg0, 1),
7793 TREE_OPERAND (arg1, 1)));
7794 }
7795
7796 goto associate;
7797
7798 case MULT_EXPR:
7799 /* (-A) * (-B) -> A * B */
7800 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
7801 return fold_build2 (MULT_EXPR, type,
7802 TREE_OPERAND (arg0, 0),
7803 negate_expr (arg1));
7804 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
7805 return fold_build2 (MULT_EXPR, type,
7806 negate_expr (arg0),
7807 TREE_OPERAND (arg1, 0));
7808
7809 if (! FLOAT_TYPE_P (type))
7810 {
7811 if (integer_zerop (arg1))
7812 return omit_one_operand (type, arg1, arg0);
7813 if (integer_onep (arg1))
7814 return non_lvalue (fold_convert (type, arg0));
7815 /* Transform x * -1 into -x. */
7816 if (integer_all_onesp (arg1))
7817 return fold_convert (type, negate_expr (arg0));
7818
7819 /* (a * (1 << b)) is (a << b) */
7820 if (TREE_CODE (arg1) == LSHIFT_EXPR
7821 && integer_onep (TREE_OPERAND (arg1, 0)))
7822 return fold_build2 (LSHIFT_EXPR, type, arg0,
7823 TREE_OPERAND (arg1, 1));
7824 if (TREE_CODE (arg0) == LSHIFT_EXPR
7825 && integer_onep (TREE_OPERAND (arg0, 0)))
7826 return fold_build2 (LSHIFT_EXPR, type, arg1,
7827 TREE_OPERAND (arg0, 1));
7828
7829 if (TREE_CODE (arg1) == INTEGER_CST
7830 && 0 != (tem = extract_muldiv (op0,
7831 fold_convert (type, arg1),
7832 code, NULL_TREE)))
7833 return fold_convert (type, tem);
7834
7835 }
7836 else
7837 {
7838 /* Maybe fold x * 0 to 0. The expressions aren't the same
7839 when x is NaN, since x * 0 is also NaN. Nor are they the
7840 same in modes with signed zeros, since multiplying a
7841 negative value by 0 gives -0, not +0. */
7842 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
7843 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
7844 && real_zerop (arg1))
7845 return omit_one_operand (type, arg1, arg0);
7846 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
7847 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7848 && real_onep (arg1))
7849 return non_lvalue (fold_convert (type, arg0));
7850
7851 /* Transform x * -1.0 into -x. */
7852 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7853 && real_minus_onep (arg1))
7854 return fold_convert (type, negate_expr (arg0));
7855
7856 /* Convert (C1/X)*C2 into (C1*C2)/X. */
7857 if (flag_unsafe_math_optimizations
7858 && TREE_CODE (arg0) == RDIV_EXPR
7859 && TREE_CODE (arg1) == REAL_CST
7860 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
7861 {
7862 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
7863 arg1, 0);
7864 if (tem)
7865 return fold_build2 (RDIV_EXPR, type, tem,
7866 TREE_OPERAND (arg0, 1));
7867 }
7868
7869 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
7870 if (operand_equal_p (arg0, arg1, 0))
7871 {
7872 tree tem = fold_strip_sign_ops (arg0);
7873 if (tem != NULL_TREE)
7874 {
7875 tem = fold_convert (type, tem);
7876 return fold_build2 (MULT_EXPR, type, tem, tem);
7877 }
7878 }
7879
7880 if (flag_unsafe_math_optimizations)
7881 {
7882 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
7883 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
7884
7885 /* Optimizations of root(...)*root(...). */
7886 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
7887 {
7888 tree rootfn, arg, arglist;
7889 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7890 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7891
7892 /* Optimize sqrt(x)*sqrt(x) as x. */
7893 if (BUILTIN_SQRT_P (fcode0)
7894 && operand_equal_p (arg00, arg10, 0)
7895 && ! HONOR_SNANS (TYPE_MODE (type)))
7896 return arg00;
7897
7898 /* Optimize root(x)*root(y) as root(x*y). */
7899 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7900 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
7901 arglist = build_tree_list (NULL_TREE, arg);
7902 return build_function_call_expr (rootfn, arglist);
7903 }
7904
7905 /* Optimize expN(x)*expN(y) as expN(x+y). */
7906 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
7907 {
7908 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7909 tree arg = fold_build2 (PLUS_EXPR, type,
7910 TREE_VALUE (TREE_OPERAND (arg0, 1)),
7911 TREE_VALUE (TREE_OPERAND (arg1, 1)));
7912 tree arglist = build_tree_list (NULL_TREE, arg);
7913 return build_function_call_expr (expfn, arglist);
7914 }
7915
7916 /* Optimizations of pow(...)*pow(...). */
7917 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
7918 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
7919 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
7920 {
7921 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7922 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
7923 1)));
7924 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7925 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7926 1)));
7927
7928 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
7929 if (operand_equal_p (arg01, arg11, 0))
7930 {
7931 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7932 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
7933 tree arglist = tree_cons (NULL_TREE, arg,
7934 build_tree_list (NULL_TREE,
7935 arg01));
7936 return build_function_call_expr (powfn, arglist);
7937 }
7938
7939 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
7940 if (operand_equal_p (arg00, arg10, 0))
7941 {
7942 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7943 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
7944 tree arglist = tree_cons (NULL_TREE, arg00,
7945 build_tree_list (NULL_TREE,
7946 arg));
7947 return build_function_call_expr (powfn, arglist);
7948 }
7949 }
7950
7951 /* Optimize tan(x)*cos(x) as sin(x). */
7952 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
7953 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
7954 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
7955 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
7956 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
7957 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
7958 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7959 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7960 {
7961 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
7962
7963 if (sinfn != NULL_TREE)
7964 return build_function_call_expr (sinfn,
7965 TREE_OPERAND (arg0, 1));
7966 }
7967
7968 /* Optimize x*pow(x,c) as pow(x,c+1). */
7969 if (fcode1 == BUILT_IN_POW
7970 || fcode1 == BUILT_IN_POWF
7971 || fcode1 == BUILT_IN_POWL)
7972 {
7973 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7974 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7975 1)));
7976 if (TREE_CODE (arg11) == REAL_CST
7977 && ! TREE_CONSTANT_OVERFLOW (arg11)
7978 && operand_equal_p (arg0, arg10, 0))
7979 {
7980 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
7981 REAL_VALUE_TYPE c;
7982 tree arg, arglist;
7983
7984 c = TREE_REAL_CST (arg11);
7985 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7986 arg = build_real (type, c);
7987 arglist = build_tree_list (NULL_TREE, arg);
7988 arglist = tree_cons (NULL_TREE, arg0, arglist);
7989 return build_function_call_expr (powfn, arglist);
7990 }
7991 }
7992
7993 /* Optimize pow(x,c)*x as pow(x,c+1). */
7994 if (fcode0 == BUILT_IN_POW
7995 || fcode0 == BUILT_IN_POWF
7996 || fcode0 == BUILT_IN_POWL)
7997 {
7998 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7999 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
8000 1)));
8001 if (TREE_CODE (arg01) == REAL_CST
8002 && ! TREE_CONSTANT_OVERFLOW (arg01)
8003 && operand_equal_p (arg1, arg00, 0))
8004 {
8005 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8006 REAL_VALUE_TYPE c;
8007 tree arg, arglist;
8008
8009 c = TREE_REAL_CST (arg01);
8010 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
8011 arg = build_real (type, c);
8012 arglist = build_tree_list (NULL_TREE, arg);
8013 arglist = tree_cons (NULL_TREE, arg1, arglist);
8014 return build_function_call_expr (powfn, arglist);
8015 }
8016 }
8017
8018 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
8019 if (! optimize_size
8020 && operand_equal_p (arg0, arg1, 0))
8021 {
8022 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
8023
8024 if (powfn)
8025 {
8026 tree arg = build_real (type, dconst2);
8027 tree arglist = build_tree_list (NULL_TREE, arg);
8028 arglist = tree_cons (NULL_TREE, arg0, arglist);
8029 return build_function_call_expr (powfn, arglist);
8030 }
8031 }
8032 }
8033 }
8034 goto associate;
8035
8036 case BIT_IOR_EXPR:
8037 bit_ior:
8038 if (integer_all_onesp (arg1))
8039 return omit_one_operand (type, arg1, arg0);
8040 if (integer_zerop (arg1))
8041 return non_lvalue (fold_convert (type, arg0));
8042 if (operand_equal_p (arg0, arg1, 0))
8043 return non_lvalue (fold_convert (type, arg0));
8044
8045 /* ~X | X is -1. */
8046 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8047 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8048 {
8049 t1 = build_int_cst (type, -1);
8050 t1 = force_fit_type (t1, 0, false, false);
8051 return omit_one_operand (type, t1, arg1);
8052 }
8053
8054 /* X | ~X is -1. */
8055 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8056 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8057 {
8058 t1 = build_int_cst (type, -1);
8059 t1 = force_fit_type (t1, 0, false, false);
8060 return omit_one_operand (type, t1, arg0);
8061 }
8062
8063 t1 = distribute_bit_expr (code, type, arg0, arg1);
8064 if (t1 != NULL_TREE)
8065 return t1;
8066
8067 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
8068
8069 This results in more efficient code for machines without a NAND
8070 instruction. Combine will canonicalize to the first form
8071 which will allow use of NAND instructions provided by the
8072 backend if they exist. */
8073 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8074 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8075 {
8076 return fold_build1 (BIT_NOT_EXPR, type,
8077 build2 (BIT_AND_EXPR, type,
8078 TREE_OPERAND (arg0, 0),
8079 TREE_OPERAND (arg1, 0)));
8080 }
8081
8082 /* See if this can be simplified into a rotate first. If that
8083 is unsuccessful continue in the association code. */
8084 goto bit_rotate;
8085
8086 case BIT_XOR_EXPR:
8087 if (integer_zerop (arg1))
8088 return non_lvalue (fold_convert (type, arg0));
8089 if (integer_all_onesp (arg1))
8090 return fold_build1 (BIT_NOT_EXPR, type, arg0);
8091 if (operand_equal_p (arg0, arg1, 0))
8092 return omit_one_operand (type, integer_zero_node, arg0);
8093
8094 /* ~X ^ X is -1. */
8095 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8096 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8097 {
8098 t1 = build_int_cst (type, -1);
8099 t1 = force_fit_type (t1, 0, false, false);
8100 return omit_one_operand (type, t1, arg1);
8101 }
8102
8103 /* X ^ ~X is -1. */
8104 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8105 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8106 {
8107 t1 = build_int_cst (type, -1);
8108 t1 = force_fit_type (t1, 0, false, false);
8109 return omit_one_operand (type, t1, arg0);
8110 }
8111
8112 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
8113 with a constant, and the two constants have no bits in common,
8114 we should treat this as a BIT_IOR_EXPR since this may produce more
8115 simplifications. */
8116 if (TREE_CODE (arg0) == BIT_AND_EXPR
8117 && TREE_CODE (arg1) == BIT_AND_EXPR
8118 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8119 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8120 && integer_zerop (const_binop (BIT_AND_EXPR,
8121 TREE_OPERAND (arg0, 1),
8122 TREE_OPERAND (arg1, 1), 0)))
8123 {
8124 code = BIT_IOR_EXPR;
8125 goto bit_ior;
8126 }
8127
8128 /* (X | Y) ^ X -> Y & ~ X*/
8129 if (TREE_CODE (arg0) == BIT_IOR_EXPR
8130 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8131 {
8132 tree t2 = TREE_OPERAND (arg0, 1);
8133 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
8134 arg1);
8135 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
8136 fold_convert (type, t1));
8137 return t1;
8138 }
8139
8140 /* (Y | X) ^ X -> Y & ~ X*/
8141 if (TREE_CODE (arg0) == BIT_IOR_EXPR
8142 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
8143 {
8144 tree t2 = TREE_OPERAND (arg0, 0);
8145 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
8146 arg1);
8147 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
8148 fold_convert (type, t1));
8149 return t1;
8150 }
8151
8152 /* X ^ (X | Y) -> Y & ~ X*/
8153 if (TREE_CODE (arg1) == BIT_IOR_EXPR
8154 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
8155 {
8156 tree t2 = TREE_OPERAND (arg1, 1);
8157 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
8158 arg0);
8159 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
8160 fold_convert (type, t1));
8161 return t1;
8162 }
8163
8164 /* X ^ (Y | X) -> Y & ~ X*/
8165 if (TREE_CODE (arg1) == BIT_IOR_EXPR
8166 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
8167 {
8168 tree t2 = TREE_OPERAND (arg1, 0);
8169 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
8170 arg0);
8171 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
8172 fold_convert (type, t1));
8173 return t1;
8174 }
8175
8176 /* Convert ~X ^ ~Y to X ^ Y. */
8177 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8178 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8179 return fold_build2 (code, type,
8180 fold_convert (type, TREE_OPERAND (arg0, 0)),
8181 fold_convert (type, TREE_OPERAND (arg1, 0)));
8182
8183 /* See if this can be simplified into a rotate first. If that
8184 is unsuccessful continue in the association code. */
8185 goto bit_rotate;
8186
8187 case BIT_AND_EXPR:
8188 if (integer_all_onesp (arg1))
8189 return non_lvalue (fold_convert (type, arg0));
8190 if (integer_zerop (arg1))
8191 return omit_one_operand (type, arg1, arg0);
8192 if (operand_equal_p (arg0, arg1, 0))
8193 return non_lvalue (fold_convert (type, arg0));
8194
8195 /* ~X & X is always zero. */
8196 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8197 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8198 return omit_one_operand (type, integer_zero_node, arg1);
8199
8200 /* X & ~X is always zero. */
8201 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8202 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8203 return omit_one_operand (type, integer_zero_node, arg0);
8204
8205 t1 = distribute_bit_expr (code, type, arg0, arg1);
8206 if (t1 != NULL_TREE)
8207 return t1;
8208 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
8209 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
8210 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
8211 {
8212 unsigned int prec
8213 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
8214
8215 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
8216 && (~TREE_INT_CST_LOW (arg1)
8217 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
8218 return fold_convert (type, TREE_OPERAND (arg0, 0));
8219 }
8220
8221 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
8222
8223 This results in more efficient code for machines without a NOR
8224 instruction. Combine will canonicalize to the first form
8225 which will allow use of NOR instructions provided by the
8226 backend if they exist. */
8227 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8228 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8229 {
8230 return fold_build1 (BIT_NOT_EXPR, type,
8231 build2 (BIT_IOR_EXPR, type,
8232 TREE_OPERAND (arg0, 0),
8233 TREE_OPERAND (arg1, 0)));
8234 }
8235
8236 goto associate;
8237
8238 case RDIV_EXPR:
8239 /* Don't touch a floating-point divide by zero unless the mode
8240 of the constant can represent infinity. */
8241 if (TREE_CODE (arg1) == REAL_CST
8242 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
8243 && real_zerop (arg1))
8244 return NULL_TREE;
8245
8246 /* (-A) / (-B) -> A / B */
8247 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
8248 return fold_build2 (RDIV_EXPR, type,
8249 TREE_OPERAND (arg0, 0),
8250 negate_expr (arg1));
8251 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
8252 return fold_build2 (RDIV_EXPR, type,
8253 negate_expr (arg0),
8254 TREE_OPERAND (arg1, 0));
8255
8256 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
8257 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8258 && real_onep (arg1))
8259 return non_lvalue (fold_convert (type, arg0));
8260
8261 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
8262 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8263 && real_minus_onep (arg1))
8264 return non_lvalue (fold_convert (type, negate_expr (arg0)));
8265
8266 /* If ARG1 is a constant, we can convert this to a multiply by the
8267 reciprocal. This does not have the same rounding properties,
8268 so only do this if -funsafe-math-optimizations. We can actually
8269 always safely do it if ARG1 is a power of two, but it's hard to
8270 tell if it is or not in a portable manner. */
8271 if (TREE_CODE (arg1) == REAL_CST)
8272 {
8273 if (flag_unsafe_math_optimizations
8274 && 0 != (tem = const_binop (code, build_real (type, dconst1),
8275 arg1, 0)))
8276 return fold_build2 (MULT_EXPR, type, arg0, tem);
8277 /* Find the reciprocal if optimizing and the result is exact. */
8278 if (optimize)
8279 {
8280 REAL_VALUE_TYPE r;
8281 r = TREE_REAL_CST (arg1);
8282 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
8283 {
8284 tem = build_real (type, r);
8285 return fold_build2 (MULT_EXPR, type,
8286 fold_convert (type, arg0), tem);
8287 }
8288 }
8289 }
8290 /* Convert A/B/C to A/(B*C). */
8291 if (flag_unsafe_math_optimizations
8292 && TREE_CODE (arg0) == RDIV_EXPR)
8293 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
8294 fold_build2 (MULT_EXPR, type,
8295 TREE_OPERAND (arg0, 1), arg1));
8296
8297 /* Convert A/(B/C) to (A/B)*C. */
8298 if (flag_unsafe_math_optimizations
8299 && TREE_CODE (arg1) == RDIV_EXPR)
8300 return fold_build2 (MULT_EXPR, type,
8301 fold_build2 (RDIV_EXPR, type, arg0,
8302 TREE_OPERAND (arg1, 0)),
8303 TREE_OPERAND (arg1, 1));
8304
8305 /* Convert C1/(X*C2) into (C1/C2)/X. */
8306 if (flag_unsafe_math_optimizations
8307 && TREE_CODE (arg1) == MULT_EXPR
8308 && TREE_CODE (arg0) == REAL_CST
8309 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
8310 {
8311 tree tem = const_binop (RDIV_EXPR, arg0,
8312 TREE_OPERAND (arg1, 1), 0);
8313 if (tem)
8314 return fold_build2 (RDIV_EXPR, type, tem,
8315 TREE_OPERAND (arg1, 0));
8316 }
8317
8318 if (flag_unsafe_math_optimizations)
8319 {
8320 enum built_in_function fcode = builtin_mathfn_code (arg1);
8321 /* Optimize x/expN(y) into x*expN(-y). */
8322 if (BUILTIN_EXPONENT_P (fcode))
8323 {
8324 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
8325 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
8326 tree arglist = build_tree_list (NULL_TREE,
8327 fold_convert (type, arg));
8328 arg1 = build_function_call_expr (expfn, arglist);
8329 return fold_build2 (MULT_EXPR, type, arg0, arg1);
8330 }
8331
8332 /* Optimize x/pow(y,z) into x*pow(y,-z). */
8333 if (fcode == BUILT_IN_POW
8334 || fcode == BUILT_IN_POWF
8335 || fcode == BUILT_IN_POWL)
8336 {
8337 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
8338 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
8339 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
8340 tree neg11 = fold_convert (type, negate_expr (arg11));
8341 tree arglist = tree_cons(NULL_TREE, arg10,
8342 build_tree_list (NULL_TREE, neg11));
8343 arg1 = build_function_call_expr (powfn, arglist);
8344 return fold_build2 (MULT_EXPR, type, arg0, arg1);
8345 }
8346 }
8347
8348 if (flag_unsafe_math_optimizations)
8349 {
8350 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
8351 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
8352
8353 /* Optimize sin(x)/cos(x) as tan(x). */
8354 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
8355 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
8356 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
8357 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
8358 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
8359 {
8360 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
8361
8362 if (tanfn != NULL_TREE)
8363 return build_function_call_expr (tanfn,
8364 TREE_OPERAND (arg0, 1));
8365 }
8366
8367 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
8368 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
8369 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
8370 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
8371 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
8372 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
8373 {
8374 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
8375
8376 if (tanfn != NULL_TREE)
8377 {
8378 tree tmp = TREE_OPERAND (arg0, 1);
8379 tmp = build_function_call_expr (tanfn, tmp);
8380 return fold_build2 (RDIV_EXPR, type,
8381 build_real (type, dconst1), tmp);
8382 }
8383 }
8384
8385 /* Optimize pow(x,c)/x as pow(x,c-1). */
8386 if (fcode0 == BUILT_IN_POW
8387 || fcode0 == BUILT_IN_POWF
8388 || fcode0 == BUILT_IN_POWL)
8389 {
8390 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8391 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
8392 if (TREE_CODE (arg01) == REAL_CST
8393 && ! TREE_CONSTANT_OVERFLOW (arg01)
8394 && operand_equal_p (arg1, arg00, 0))
8395 {
8396 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8397 REAL_VALUE_TYPE c;
8398 tree arg, arglist;
8399
8400 c = TREE_REAL_CST (arg01);
8401 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
8402 arg = build_real (type, c);
8403 arglist = build_tree_list (NULL_TREE, arg);
8404 arglist = tree_cons (NULL_TREE, arg1, arglist);
8405 return build_function_call_expr (powfn, arglist);
8406 }
8407 }
8408 }
8409 goto binary;
8410
8411 case TRUNC_DIV_EXPR:
8412 case ROUND_DIV_EXPR:
8413 case FLOOR_DIV_EXPR:
8414 case CEIL_DIV_EXPR:
8415 case EXACT_DIV_EXPR:
8416 if (integer_onep (arg1))
8417 return non_lvalue (fold_convert (type, arg0));
8418 if (integer_zerop (arg1))
8419 return NULL_TREE;
8420 /* X / -1 is -X. */
8421 if (!TYPE_UNSIGNED (type)
8422 && TREE_CODE (arg1) == INTEGER_CST
8423 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
8424 && TREE_INT_CST_HIGH (arg1) == -1)
8425 return fold_convert (type, negate_expr (arg0));
8426
8427 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
8428 operation, EXACT_DIV_EXPR.
8429
8430 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
8431 At one time others generated faster code, it's not clear if they do
8432 after the last round to changes to the DIV code in expmed.c. */
8433 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
8434 && multiple_of_p (type, arg0, arg1))
8435 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
8436
8437 if (TREE_CODE (arg1) == INTEGER_CST
8438 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
8439 return fold_convert (type, tem);
8440
8441 goto binary;
8442
8443 case CEIL_MOD_EXPR:
8444 case FLOOR_MOD_EXPR:
8445 case ROUND_MOD_EXPR:
8446 case TRUNC_MOD_EXPR:
8447 /* X % 1 is always zero, but be sure to preserve any side
8448 effects in X. */
8449 if (integer_onep (arg1))
8450 return omit_one_operand (type, integer_zero_node, arg0);
8451
8452 /* X % 0, return X % 0 unchanged so that we can get the
8453 proper warnings and errors. */
8454 if (integer_zerop (arg1))
8455 return NULL_TREE;
8456
8457 /* 0 % X is always zero, but be sure to preserve any side
8458 effects in X. Place this after checking for X == 0. */
8459 if (integer_zerop (arg0))
8460 return omit_one_operand (type, integer_zero_node, arg1);
8461
8462 /* X % -1 is zero. */
8463 if (!TYPE_UNSIGNED (type)
8464 && TREE_CODE (arg1) == INTEGER_CST
8465 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
8466 && TREE_INT_CST_HIGH (arg1) == -1)
8467 return omit_one_operand (type, integer_zero_node, arg0);
8468
8469 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
8470 i.e. "X % C" into "X & C2", if X and C are positive. */
8471 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
8472 && (TYPE_UNSIGNED (type) || tree_expr_nonnegative_p (arg0))
8473 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) >= 0)
8474 {
8475 unsigned HOST_WIDE_INT high, low;
8476 tree mask;
8477 int l;
8478
8479 l = tree_log2 (arg1);
8480 if (l >= HOST_BITS_PER_WIDE_INT)
8481 {
8482 high = ((unsigned HOST_WIDE_INT) 1
8483 << (l - HOST_BITS_PER_WIDE_INT)) - 1;
8484 low = -1;
8485 }
8486 else
8487 {
8488 high = 0;
8489 low = ((unsigned HOST_WIDE_INT) 1 << l) - 1;
8490 }
8491
8492 mask = build_int_cst_wide (type, low, high);
8493 return fold_build2 (BIT_AND_EXPR, type,
8494 fold_convert (type, arg0), mask);
8495 }
8496
8497 /* X % -C is the same as X % C. */
8498 if (code == TRUNC_MOD_EXPR
8499 && !TYPE_UNSIGNED (type)
8500 && TREE_CODE (arg1) == INTEGER_CST
8501 && !TREE_CONSTANT_OVERFLOW (arg1)
8502 && TREE_INT_CST_HIGH (arg1) < 0
8503 && !flag_trapv
8504 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
8505 && !sign_bit_p (arg1, arg1))
8506 return fold_build2 (code, type, fold_convert (type, arg0),
8507 fold_convert (type, negate_expr (arg1)));
8508
8509 /* X % -Y is the same as X % Y. */
8510 if (code == TRUNC_MOD_EXPR
8511 && !TYPE_UNSIGNED (type)
8512 && TREE_CODE (arg1) == NEGATE_EXPR
8513 && !flag_trapv)
8514 return fold_build2 (code, type, fold_convert (type, arg0),
8515 fold_convert (type, TREE_OPERAND (arg1, 0)));
8516
8517 if (TREE_CODE (arg1) == INTEGER_CST
8518 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
8519 return fold_convert (type, tem);
8520
8521 goto binary;
8522
8523 case LROTATE_EXPR:
8524 case RROTATE_EXPR:
8525 if (integer_all_onesp (arg0))
8526 return omit_one_operand (type, arg0, arg1);
8527 goto shift;
8528
8529 case RSHIFT_EXPR:
8530 /* Optimize -1 >> x for arithmetic right shifts. */
8531 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
8532 return omit_one_operand (type, arg0, arg1);
8533 /* ... fall through ... */
8534
8535 case LSHIFT_EXPR:
8536 shift:
8537 if (integer_zerop (arg1))
8538 return non_lvalue (fold_convert (type, arg0));
8539 if (integer_zerop (arg0))
8540 return omit_one_operand (type, arg0, arg1);
8541
8542 /* Since negative shift count is not well-defined,
8543 don't try to compute it in the compiler. */
8544 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
8545 return NULL_TREE;
8546
8547 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
8548 if (TREE_CODE (arg0) == code && host_integerp (arg1, false)
8549 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
8550 && host_integerp (TREE_OPERAND (arg0, 1), false)
8551 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
8552 {
8553 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
8554 + TREE_INT_CST_LOW (arg1));
8555
8556 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
8557 being well defined. */
8558 if (low >= TYPE_PRECISION (type))
8559 {
8560 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
8561 low = low % TYPE_PRECISION (type);
8562 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
8563 return build_int_cst (type, 0);
8564 else
8565 low = TYPE_PRECISION (type) - 1;
8566 }
8567
8568 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
8569 build_int_cst (type, low));
8570 }
8571
8572 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
8573 into x & ((unsigned)-1 >> c) for unsigned types. */
8574 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
8575 || (TYPE_UNSIGNED (type)
8576 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
8577 && host_integerp (arg1, false)
8578 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
8579 && host_integerp (TREE_OPERAND (arg0, 1), false)
8580 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
8581 {
8582 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
8583 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
8584 tree lshift;
8585 tree arg00;
8586
8587 if (low0 == low1)
8588 {
8589 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
8590
8591 lshift = build_int_cst (type, -1);
8592 lshift = int_const_binop (code, lshift, arg1, 0);
8593
8594 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
8595 }
8596 }
8597
8598 /* Rewrite an LROTATE_EXPR by a constant into an
8599 RROTATE_EXPR by a new constant. */
8600 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
8601 {
8602 tree tem = build_int_cst (NULL_TREE,
8603 GET_MODE_BITSIZE (TYPE_MODE (type)));
8604 tem = fold_convert (TREE_TYPE (arg1), tem);
8605 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
8606 return fold_build2 (RROTATE_EXPR, type, arg0, tem);
8607 }
8608
8609 /* If we have a rotate of a bit operation with the rotate count and
8610 the second operand of the bit operation both constant,
8611 permute the two operations. */
8612 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8613 && (TREE_CODE (arg0) == BIT_AND_EXPR
8614 || TREE_CODE (arg0) == BIT_IOR_EXPR
8615 || TREE_CODE (arg0) == BIT_XOR_EXPR)
8616 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8617 return fold_build2 (TREE_CODE (arg0), type,
8618 fold_build2 (code, type,
8619 TREE_OPERAND (arg0, 0), arg1),
8620 fold_build2 (code, type,
8621 TREE_OPERAND (arg0, 1), arg1));
8622
8623 /* Two consecutive rotates adding up to the width of the mode can
8624 be ignored. */
8625 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8626 && TREE_CODE (arg0) == RROTATE_EXPR
8627 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8628 && TREE_INT_CST_HIGH (arg1) == 0
8629 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
8630 && ((TREE_INT_CST_LOW (arg1)
8631 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
8632 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
8633 return TREE_OPERAND (arg0, 0);
8634
8635 goto binary;
8636
8637 case MIN_EXPR:
8638 if (operand_equal_p (arg0, arg1, 0))
8639 return omit_one_operand (type, arg0, arg1);
8640 if (INTEGRAL_TYPE_P (type)
8641 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
8642 return omit_one_operand (type, arg1, arg0);
8643 goto associate;
8644
8645 case MAX_EXPR:
8646 if (operand_equal_p (arg0, arg1, 0))
8647 return omit_one_operand (type, arg0, arg1);
8648 if (INTEGRAL_TYPE_P (type)
8649 && TYPE_MAX_VALUE (type)
8650 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
8651 return omit_one_operand (type, arg1, arg0);
8652 goto associate;
8653
8654 case TRUTH_ANDIF_EXPR:
8655 /* Note that the operands of this must be ints
8656 and their values must be 0 or 1.
8657 ("true" is a fixed value perhaps depending on the language.) */
8658 /* If first arg is constant zero, return it. */
8659 if (integer_zerop (arg0))
8660 return fold_convert (type, arg0);
8661 case TRUTH_AND_EXPR:
8662 /* If either arg is constant true, drop it. */
8663 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8664 return non_lvalue (fold_convert (type, arg1));
8665 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
8666 /* Preserve sequence points. */
8667 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
8668 return non_lvalue (fold_convert (type, arg0));
8669 /* If second arg is constant zero, result is zero, but first arg
8670 must be evaluated. */
8671 if (integer_zerop (arg1))
8672 return omit_one_operand (type, arg1, arg0);
8673 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
8674 case will be handled here. */
8675 if (integer_zerop (arg0))
8676 return omit_one_operand (type, arg0, arg1);
8677
8678 /* !X && X is always false. */
8679 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8680 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8681 return omit_one_operand (type, integer_zero_node, arg1);
8682 /* X && !X is always false. */
8683 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8684 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8685 return omit_one_operand (type, integer_zero_node, arg0);
8686
8687 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
8688 means A >= Y && A != MAX, but in this case we know that
8689 A < X <= MAX. */
8690
8691 if (!TREE_SIDE_EFFECTS (arg0)
8692 && !TREE_SIDE_EFFECTS (arg1))
8693 {
8694 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
8695 if (tem && !operand_equal_p (tem, arg0, 0))
8696 return fold_build2 (code, type, tem, arg1);
8697
8698 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
8699 if (tem && !operand_equal_p (tem, arg1, 0))
8700 return fold_build2 (code, type, arg0, tem);
8701 }
8702
8703 truth_andor:
8704 /* We only do these simplifications if we are optimizing. */
8705 if (!optimize)
8706 return NULL_TREE;
8707
8708 /* Check for things like (A || B) && (A || C). We can convert this
8709 to A || (B && C). Note that either operator can be any of the four
8710 truth and/or operations and the transformation will still be
8711 valid. Also note that we only care about order for the
8712 ANDIF and ORIF operators. If B contains side effects, this
8713 might change the truth-value of A. */
8714 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8715 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8716 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8717 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8718 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8719 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8720 {
8721 tree a00 = TREE_OPERAND (arg0, 0);
8722 tree a01 = TREE_OPERAND (arg0, 1);
8723 tree a10 = TREE_OPERAND (arg1, 0);
8724 tree a11 = TREE_OPERAND (arg1, 1);
8725 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8726 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8727 && (code == TRUTH_AND_EXPR
8728 || code == TRUTH_OR_EXPR));
8729
8730 if (operand_equal_p (a00, a10, 0))
8731 return fold_build2 (TREE_CODE (arg0), type, a00,
8732 fold_build2 (code, type, a01, a11));
8733 else if (commutative && operand_equal_p (a00, a11, 0))
8734 return fold_build2 (TREE_CODE (arg0), type, a00,
8735 fold_build2 (code, type, a01, a10));
8736 else if (commutative && operand_equal_p (a01, a10, 0))
8737 return fold_build2 (TREE_CODE (arg0), type, a01,
8738 fold_build2 (code, type, a00, a11));
8739
8740 /* This case if tricky because we must either have commutative
8741 operators or else A10 must not have side-effects. */
8742
8743 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8744 && operand_equal_p (a01, a11, 0))
8745 return fold_build2 (TREE_CODE (arg0), type,
8746 fold_build2 (code, type, a00, a10),
8747 a01);
8748 }
8749
8750 /* See if we can build a range comparison. */
8751 if (0 != (tem = fold_range_test (code, type, op0, op1)))
8752 return tem;
8753
8754 /* Check for the possibility of merging component references. If our
8755 lhs is another similar operation, try to merge its rhs with our
8756 rhs. Then try to merge our lhs and rhs. */
8757 if (TREE_CODE (arg0) == code
8758 && 0 != (tem = fold_truthop (code, type,
8759 TREE_OPERAND (arg0, 1), arg1)))
8760 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
8761
8762 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
8763 return tem;
8764
8765 return NULL_TREE;
8766
8767 case TRUTH_ORIF_EXPR:
8768 /* Note that the operands of this must be ints
8769 and their values must be 0 or true.
8770 ("true" is a fixed value perhaps depending on the language.) */
8771 /* If first arg is constant true, return it. */
8772 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8773 return fold_convert (type, arg0);
8774 case TRUTH_OR_EXPR:
8775 /* If either arg is constant zero, drop it. */
8776 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
8777 return non_lvalue (fold_convert (type, arg1));
8778 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
8779 /* Preserve sequence points. */
8780 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
8781 return non_lvalue (fold_convert (type, arg0));
8782 /* If second arg is constant true, result is true, but we must
8783 evaluate first arg. */
8784 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
8785 return omit_one_operand (type, arg1, arg0);
8786 /* Likewise for first arg, but note this only occurs here for
8787 TRUTH_OR_EXPR. */
8788 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8789 return omit_one_operand (type, arg0, arg1);
8790
8791 /* !X || X is always true. */
8792 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8793 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8794 return omit_one_operand (type, integer_one_node, arg1);
8795 /* X || !X is always true. */
8796 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8797 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8798 return omit_one_operand (type, integer_one_node, arg0);
8799
8800 goto truth_andor;
8801
8802 case TRUTH_XOR_EXPR:
8803 /* If the second arg is constant zero, drop it. */
8804 if (integer_zerop (arg1))
8805 return non_lvalue (fold_convert (type, arg0));
8806 /* If the second arg is constant true, this is a logical inversion. */
8807 if (integer_onep (arg1))
8808 {
8809 /* Only call invert_truthvalue if operand is a truth value. */
8810 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8811 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
8812 else
8813 tem = invert_truthvalue (arg0);
8814 return non_lvalue (fold_convert (type, tem));
8815 }
8816 /* Identical arguments cancel to zero. */
8817 if (operand_equal_p (arg0, arg1, 0))
8818 return omit_one_operand (type, integer_zero_node, arg0);
8819
8820 /* !X ^ X is always true. */
8821 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8822 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8823 return omit_one_operand (type, integer_one_node, arg1);
8824
8825 /* X ^ !X is always true. */
8826 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8827 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8828 return omit_one_operand (type, integer_one_node, arg0);
8829
8830 return NULL_TREE;
8831
8832 case EQ_EXPR:
8833 case NE_EXPR:
8834 case LT_EXPR:
8835 case GT_EXPR:
8836 case LE_EXPR:
8837 case GE_EXPR:
8838 /* If one arg is a real or integer constant, put it last. */
8839 if (tree_swap_operands_p (arg0, arg1, true))
8840 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
8841
8842 /* bool_var != 0 becomes bool_var. */
8843 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
8844 && code == NE_EXPR)
8845 return non_lvalue (fold_convert (type, arg0));
8846
8847 /* bool_var == 1 becomes bool_var. */
8848 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
8849 && code == EQ_EXPR)
8850 return non_lvalue (fold_convert (type, arg0));
8851
8852 /* If this is an equality comparison of the address of a non-weak
8853 object against zero, then we know the result. */
8854 if ((code == EQ_EXPR || code == NE_EXPR)
8855 && TREE_CODE (arg0) == ADDR_EXPR
8856 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
8857 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
8858 && integer_zerop (arg1))
8859 return constant_boolean_node (code != EQ_EXPR, type);
8860
8861 /* If this is an equality comparison of the address of two non-weak,
8862 unaliased symbols neither of which are extern (since we do not
8863 have access to attributes for externs), then we know the result. */
8864 if ((code == EQ_EXPR || code == NE_EXPR)
8865 && TREE_CODE (arg0) == ADDR_EXPR
8866 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
8867 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
8868 && ! lookup_attribute ("alias",
8869 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
8870 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
8871 && TREE_CODE (arg1) == ADDR_EXPR
8872 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
8873 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
8874 && ! lookup_attribute ("alias",
8875 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
8876 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
8877 {
8878 /* We know that we're looking at the address of two
8879 non-weak, unaliased, static _DECL nodes.
8880
8881 It is both wasteful and incorrect to call operand_equal_p
8882 to compare the two ADDR_EXPR nodes. It is wasteful in that
8883 all we need to do is test pointer equality for the arguments
8884 to the two ADDR_EXPR nodes. It is incorrect to use
8885 operand_equal_p as that function is NOT equivalent to a
8886 C equality test. It can in fact return false for two
8887 objects which would test as equal using the C equality
8888 operator. */
8889 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
8890 return constant_boolean_node (equal
8891 ? code == EQ_EXPR : code != EQ_EXPR,
8892 type);
8893 }
8894
8895 /* If this is a comparison of two exprs that look like an
8896 ARRAY_REF of the same object, then we can fold this to a
8897 comparison of the two offsets. */
8898 if (TREE_CODE_CLASS (code) == tcc_comparison)
8899 {
8900 tree base0, offset0, base1, offset1;
8901
8902 if (extract_array_ref (arg0, &base0, &offset0)
8903 && extract_array_ref (arg1, &base1, &offset1)
8904 && operand_equal_p (base0, base1, 0))
8905 {
8906 /* Handle no offsets on both sides specially. */
8907 if (offset0 == NULL_TREE
8908 && offset1 == NULL_TREE)
8909 return fold_build2 (code, type, integer_zero_node,
8910 integer_zero_node);
8911
8912 if (!offset0 || !offset1
8913 || TREE_TYPE (offset0) == TREE_TYPE (offset1))
8914 {
8915 if (offset0 == NULL_TREE)
8916 offset0 = build_int_cst (TREE_TYPE (offset1), 0);
8917 if (offset1 == NULL_TREE)
8918 offset1 = build_int_cst (TREE_TYPE (offset0), 0);
8919 return fold_build2 (code, type, offset0, offset1);
8920 }
8921 }
8922 }
8923
8924 /* Transform comparisons of the form X +- C CMP X. */
8925 if ((code != EQ_EXPR && code != NE_EXPR)
8926 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8927 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
8928 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8929 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
8930 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8931 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
8932 && !(flag_wrapv || flag_trapv))))
8933 {
8934 tree arg01 = TREE_OPERAND (arg0, 1);
8935 enum tree_code code0 = TREE_CODE (arg0);
8936 int is_positive;
8937
8938 if (TREE_CODE (arg01) == REAL_CST)
8939 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
8940 else
8941 is_positive = tree_int_cst_sgn (arg01);
8942
8943 /* (X - c) > X becomes false. */
8944 if (code == GT_EXPR
8945 && ((code0 == MINUS_EXPR && is_positive >= 0)
8946 || (code0 == PLUS_EXPR && is_positive <= 0)))
8947 return constant_boolean_node (0, type);
8948
8949 /* Likewise (X + c) < X becomes false. */
8950 if (code == LT_EXPR
8951 && ((code0 == PLUS_EXPR && is_positive >= 0)
8952 || (code0 == MINUS_EXPR && is_positive <= 0)))
8953 return constant_boolean_node (0, type);
8954
8955 /* Convert (X - c) <= X to true. */
8956 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
8957 && code == LE_EXPR
8958 && ((code0 == MINUS_EXPR && is_positive >= 0)
8959 || (code0 == PLUS_EXPR && is_positive <= 0)))
8960 return constant_boolean_node (1, type);
8961
8962 /* Convert (X + c) >= X to true. */
8963 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
8964 && code == GE_EXPR
8965 && ((code0 == PLUS_EXPR && is_positive >= 0)
8966 || (code0 == MINUS_EXPR && is_positive <= 0)))
8967 return constant_boolean_node (1, type);
8968
8969 if (TREE_CODE (arg01) == INTEGER_CST)
8970 {
8971 /* Convert X + c > X and X - c < X to true for integers. */
8972 if (code == GT_EXPR
8973 && ((code0 == PLUS_EXPR && is_positive > 0)
8974 || (code0 == MINUS_EXPR && is_positive < 0)))
8975 return constant_boolean_node (1, type);
8976
8977 if (code == LT_EXPR
8978 && ((code0 == MINUS_EXPR && is_positive > 0)
8979 || (code0 == PLUS_EXPR && is_positive < 0)))
8980 return constant_boolean_node (1, type);
8981
8982 /* Convert X + c <= X and X - c >= X to false for integers. */
8983 if (code == LE_EXPR
8984 && ((code0 == PLUS_EXPR && is_positive > 0)
8985 || (code0 == MINUS_EXPR && is_positive < 0)))
8986 return constant_boolean_node (0, type);
8987
8988 if (code == GE_EXPR
8989 && ((code0 == MINUS_EXPR && is_positive > 0)
8990 || (code0 == PLUS_EXPR && is_positive < 0)))
8991 return constant_boolean_node (0, type);
8992 }
8993 }
8994
8995 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8996 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8997 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8998 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8999 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
9000 && !(flag_wrapv || flag_trapv))
9001 && (TREE_CODE (arg1) == INTEGER_CST
9002 && !TREE_OVERFLOW (arg1)))
9003 {
9004 tree const1 = TREE_OPERAND (arg0, 1);
9005 tree const2 = arg1;
9006 tree variable = TREE_OPERAND (arg0, 0);
9007 tree lhs;
9008 int lhs_add;
9009 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
9010
9011 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
9012 TREE_TYPE (arg1), const2, const1);
9013 if (TREE_CODE (lhs) == TREE_CODE (arg1)
9014 && (TREE_CODE (lhs) != INTEGER_CST
9015 || !TREE_OVERFLOW (lhs)))
9016 return fold_build2 (code, type, variable, lhs);
9017 }
9018
9019 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9020 {
9021 tree targ0 = strip_float_extensions (arg0);
9022 tree targ1 = strip_float_extensions (arg1);
9023 tree newtype = TREE_TYPE (targ0);
9024
9025 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9026 newtype = TREE_TYPE (targ1);
9027
9028 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9029 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9030 return fold_build2 (code, type, fold_convert (newtype, targ0),
9031 fold_convert (newtype, targ1));
9032
9033 /* (-a) CMP (-b) -> b CMP a */
9034 if (TREE_CODE (arg0) == NEGATE_EXPR
9035 && TREE_CODE (arg1) == NEGATE_EXPR)
9036 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
9037 TREE_OPERAND (arg0, 0));
9038
9039 if (TREE_CODE (arg1) == REAL_CST)
9040 {
9041 REAL_VALUE_TYPE cst;
9042 cst = TREE_REAL_CST (arg1);
9043
9044 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9045 if (TREE_CODE (arg0) == NEGATE_EXPR)
9046 return
9047 fold_build2 (swap_tree_comparison (code), type,
9048 TREE_OPERAND (arg0, 0),
9049 build_real (TREE_TYPE (arg1),
9050 REAL_VALUE_NEGATE (cst)));
9051
9052 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9053 /* a CMP (-0) -> a CMP 0 */
9054 if (REAL_VALUE_MINUS_ZERO (cst))
9055 return fold_build2 (code, type, arg0,
9056 build_real (TREE_TYPE (arg1), dconst0));
9057
9058 /* x != NaN is always true, other ops are always false. */
9059 if (REAL_VALUE_ISNAN (cst)
9060 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9061 {
9062 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9063 return omit_one_operand (type, tem, arg0);
9064 }
9065
9066 /* Fold comparisons against infinity. */
9067 if (REAL_VALUE_ISINF (cst))
9068 {
9069 tem = fold_inf_compare (code, type, arg0, arg1);
9070 if (tem != NULL_TREE)
9071 return tem;
9072 }
9073 }
9074
9075 /* If this is a comparison of a real constant with a PLUS_EXPR
9076 or a MINUS_EXPR of a real constant, we can convert it into a
9077 comparison with a revised real constant as long as no overflow
9078 occurs when unsafe_math_optimizations are enabled. */
9079 if (flag_unsafe_math_optimizations
9080 && TREE_CODE (arg1) == REAL_CST
9081 && (TREE_CODE (arg0) == PLUS_EXPR
9082 || TREE_CODE (arg0) == MINUS_EXPR)
9083 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9084 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9085 ? MINUS_EXPR : PLUS_EXPR,
9086 arg1, TREE_OPERAND (arg0, 1), 0))
9087 && ! TREE_CONSTANT_OVERFLOW (tem))
9088 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9089
9090 /* Likewise, we can simplify a comparison of a real constant with
9091 a MINUS_EXPR whose first operand is also a real constant, i.e.
9092 (c1 - x) < c2 becomes x > c1-c2. */
9093 if (flag_unsafe_math_optimizations
9094 && TREE_CODE (arg1) == REAL_CST
9095 && TREE_CODE (arg0) == MINUS_EXPR
9096 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9097 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9098 arg1, 0))
9099 && ! TREE_CONSTANT_OVERFLOW (tem))
9100 return fold_build2 (swap_tree_comparison (code), type,
9101 TREE_OPERAND (arg0, 1), tem);
9102
9103 /* Fold comparisons against built-in math functions. */
9104 if (TREE_CODE (arg1) == REAL_CST
9105 && flag_unsafe_math_optimizations
9106 && ! flag_errno_math)
9107 {
9108 enum built_in_function fcode = builtin_mathfn_code (arg0);
9109
9110 if (fcode != END_BUILTINS)
9111 {
9112 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
9113 if (tem != NULL_TREE)
9114 return tem;
9115 }
9116 }
9117 }
9118
9119 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
9120 if (TREE_CONSTANT (arg1)
9121 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
9122 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
9123 /* This optimization is invalid for ordered comparisons
9124 if CONST+INCR overflows or if foo+incr might overflow.
9125 This optimization is invalid for floating point due to rounding.
9126 For pointer types we assume overflow doesn't happen. */
9127 && (POINTER_TYPE_P (TREE_TYPE (arg0))
9128 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9129 && (code == EQ_EXPR || code == NE_EXPR))))
9130 {
9131 tree varop, newconst;
9132
9133 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
9134 {
9135 newconst = fold_build2 (PLUS_EXPR, TREE_TYPE (arg0),
9136 arg1, TREE_OPERAND (arg0, 1));
9137 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
9138 TREE_OPERAND (arg0, 0),
9139 TREE_OPERAND (arg0, 1));
9140 }
9141 else
9142 {
9143 newconst = fold_build2 (MINUS_EXPR, TREE_TYPE (arg0),
9144 arg1, TREE_OPERAND (arg0, 1));
9145 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
9146 TREE_OPERAND (arg0, 0),
9147 TREE_OPERAND (arg0, 1));
9148 }
9149
9150
9151 /* If VAROP is a reference to a bitfield, we must mask
9152 the constant by the width of the field. */
9153 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
9154 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
9155 && host_integerp (DECL_SIZE (TREE_OPERAND
9156 (TREE_OPERAND (varop, 0), 1)), 1))
9157 {
9158 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
9159 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
9160 tree folded_compare, shift;
9161
9162 /* First check whether the comparison would come out
9163 always the same. If we don't do that we would
9164 change the meaning with the masking. */
9165 folded_compare = fold_build2 (code, type,
9166 TREE_OPERAND (varop, 0), arg1);
9167 if (integer_zerop (folded_compare)
9168 || integer_onep (folded_compare))
9169 return omit_one_operand (type, folded_compare, varop);
9170
9171 shift = build_int_cst (NULL_TREE,
9172 TYPE_PRECISION (TREE_TYPE (varop)) - size);
9173 shift = fold_convert (TREE_TYPE (varop), shift);
9174 newconst = fold_build2 (LSHIFT_EXPR, TREE_TYPE (varop),
9175 newconst, shift);
9176 newconst = fold_build2 (RSHIFT_EXPR, TREE_TYPE (varop),
9177 newconst, shift);
9178 }
9179
9180 return fold_build2 (code, type, varop, newconst);
9181 }
9182
9183 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
9184 This transformation affects the cases which are handled in later
9185 optimizations involving comparisons with non-negative constants. */
9186 if (TREE_CODE (arg1) == INTEGER_CST
9187 && TREE_CODE (arg0) != INTEGER_CST
9188 && tree_int_cst_sgn (arg1) > 0)
9189 {
9190 switch (code)
9191 {
9192 case GE_EXPR:
9193 arg1 = const_binop (MINUS_EXPR, arg1,
9194 build_int_cst (TREE_TYPE (arg1), 1), 0);
9195 return fold_build2 (GT_EXPR, type, arg0,
9196 fold_convert (TREE_TYPE (arg0), arg1));
9197
9198 case LT_EXPR:
9199 arg1 = const_binop (MINUS_EXPR, arg1,
9200 build_int_cst (TREE_TYPE (arg1), 1), 0);
9201 return fold_build2 (LE_EXPR, type, arg0,
9202 fold_convert (TREE_TYPE (arg0), arg1));
9203
9204 default:
9205 break;
9206 }
9207 }
9208
9209 /* Comparisons with the highest or lowest possible integer of
9210 the specified size will have known values. */
9211 {
9212 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
9213
9214 if (TREE_CODE (arg1) == INTEGER_CST
9215 && ! TREE_CONSTANT_OVERFLOW (arg1)
9216 && width <= 2 * HOST_BITS_PER_WIDE_INT
9217 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9218 || POINTER_TYPE_P (TREE_TYPE (arg1))))
9219 {
9220 HOST_WIDE_INT signed_max_hi;
9221 unsigned HOST_WIDE_INT signed_max_lo;
9222 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
9223
9224 if (width <= HOST_BITS_PER_WIDE_INT)
9225 {
9226 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
9227 - 1;
9228 signed_max_hi = 0;
9229 max_hi = 0;
9230
9231 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
9232 {
9233 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
9234 min_lo = 0;
9235 min_hi = 0;
9236 }
9237 else
9238 {
9239 max_lo = signed_max_lo;
9240 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
9241 min_hi = -1;
9242 }
9243 }
9244 else
9245 {
9246 width -= HOST_BITS_PER_WIDE_INT;
9247 signed_max_lo = -1;
9248 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
9249 - 1;
9250 max_lo = -1;
9251 min_lo = 0;
9252
9253 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
9254 {
9255 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
9256 min_hi = 0;
9257 }
9258 else
9259 {
9260 max_hi = signed_max_hi;
9261 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
9262 }
9263 }
9264
9265 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
9266 && TREE_INT_CST_LOW (arg1) == max_lo)
9267 switch (code)
9268 {
9269 case GT_EXPR:
9270 return omit_one_operand (type, integer_zero_node, arg0);
9271
9272 case GE_EXPR:
9273 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9274
9275 case LE_EXPR:
9276 return omit_one_operand (type, integer_one_node, arg0);
9277
9278 case LT_EXPR:
9279 return fold_build2 (NE_EXPR, type, arg0, arg1);
9280
9281 /* The GE_EXPR and LT_EXPR cases above are not normally
9282 reached because of previous transformations. */
9283
9284 default:
9285 break;
9286 }
9287 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9288 == max_hi
9289 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
9290 switch (code)
9291 {
9292 case GT_EXPR:
9293 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
9294 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9295 case LE_EXPR:
9296 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
9297 return fold_build2 (NE_EXPR, type, arg0, arg1);
9298 default:
9299 break;
9300 }
9301 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9302 == min_hi
9303 && TREE_INT_CST_LOW (arg1) == min_lo)
9304 switch (code)
9305 {
9306 case LT_EXPR:
9307 return omit_one_operand (type, integer_zero_node, arg0);
9308
9309 case LE_EXPR:
9310 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9311
9312 case GE_EXPR:
9313 return omit_one_operand (type, integer_one_node, arg0);
9314
9315 case GT_EXPR:
9316 return fold_build2 (NE_EXPR, type, op0, op1);
9317
9318 default:
9319 break;
9320 }
9321 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9322 == min_hi
9323 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
9324 switch (code)
9325 {
9326 case GE_EXPR:
9327 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
9328 return fold_build2 (NE_EXPR, type, arg0, arg1);
9329 case LT_EXPR:
9330 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
9331 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9332 default:
9333 break;
9334 }
9335
9336 else if (!in_gimple_form
9337 && TREE_INT_CST_HIGH (arg1) == signed_max_hi
9338 && TREE_INT_CST_LOW (arg1) == signed_max_lo
9339 && TYPE_UNSIGNED (TREE_TYPE (arg1))
9340 /* signed_type does not work on pointer types. */
9341 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
9342 {
9343 /* The following case also applies to X < signed_max+1
9344 and X >= signed_max+1 because previous transformations. */
9345 if (code == LE_EXPR || code == GT_EXPR)
9346 {
9347 tree st0, st1;
9348 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
9349 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
9350 return fold_build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
9351 type, fold_convert (st0, arg0),
9352 build_int_cst (st1, 0));
9353 }
9354 }
9355 }
9356 }
9357
9358 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
9359 a MINUS_EXPR of a constant, we can convert it into a comparison with
9360 a revised constant as long as no overflow occurs. */
9361 if ((code == EQ_EXPR || code == NE_EXPR)
9362 && TREE_CODE (arg1) == INTEGER_CST
9363 && (TREE_CODE (arg0) == PLUS_EXPR
9364 || TREE_CODE (arg0) == MINUS_EXPR)
9365 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9366 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9367 ? MINUS_EXPR : PLUS_EXPR,
9368 arg1, TREE_OPERAND (arg0, 1), 0))
9369 && ! TREE_CONSTANT_OVERFLOW (tem))
9370 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9371
9372 /* Similarly for a NEGATE_EXPR. */
9373 else if ((code == EQ_EXPR || code == NE_EXPR)
9374 && TREE_CODE (arg0) == NEGATE_EXPR
9375 && TREE_CODE (arg1) == INTEGER_CST
9376 && 0 != (tem = negate_expr (arg1))
9377 && TREE_CODE (tem) == INTEGER_CST
9378 && ! TREE_CONSTANT_OVERFLOW (tem))
9379 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9380
9381 /* If we have X - Y == 0, we can convert that to X == Y and similarly
9382 for !=. Don't do this for ordered comparisons due to overflow. */
9383 else if ((code == NE_EXPR || code == EQ_EXPR)
9384 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
9385 return fold_build2 (code, type,
9386 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
9387
9388 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9389 && (TREE_CODE (arg0) == NOP_EXPR
9390 || TREE_CODE (arg0) == CONVERT_EXPR))
9391 {
9392 /* If we are widening one operand of an integer comparison,
9393 see if the other operand is similarly being widened. Perhaps we
9394 can do the comparison in the narrower type. */
9395 tem = fold_widened_comparison (code, type, arg0, arg1);
9396 if (tem)
9397 return tem;
9398
9399 /* Or if we are changing signedness. */
9400 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
9401 if (tem)
9402 return tem;
9403 }
9404
9405 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9406 constant, we can simplify it. */
9407 else if (TREE_CODE (arg1) == INTEGER_CST
9408 && (TREE_CODE (arg0) == MIN_EXPR
9409 || TREE_CODE (arg0) == MAX_EXPR)
9410 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9411 {
9412 tem = optimize_minmax_comparison (code, type, op0, op1);
9413 if (tem)
9414 return tem;
9415
9416 return NULL_TREE;
9417 }
9418
9419 /* If we are comparing an ABS_EXPR with a constant, we can
9420 convert all the cases into explicit comparisons, but they may
9421 well not be faster than doing the ABS and one comparison.
9422 But ABS (X) <= C is a range comparison, which becomes a subtraction
9423 and a comparison, and is probably faster. */
9424 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
9425 && TREE_CODE (arg0) == ABS_EXPR
9426 && ! TREE_SIDE_EFFECTS (arg0)
9427 && (0 != (tem = negate_expr (arg1)))
9428 && TREE_CODE (tem) == INTEGER_CST
9429 && ! TREE_CONSTANT_OVERFLOW (tem))
9430 return fold_build2 (TRUTH_ANDIF_EXPR, type,
9431 build2 (GE_EXPR, type,
9432 TREE_OPERAND (arg0, 0), tem),
9433 build2 (LE_EXPR, type,
9434 TREE_OPERAND (arg0, 0), arg1));
9435
9436 /* Convert ABS_EXPR<x> >= 0 to true. */
9437 else if (code == GE_EXPR
9438 && tree_expr_nonnegative_p (arg0)
9439 && (integer_zerop (arg1)
9440 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9441 && real_zerop (arg1))))
9442 return omit_one_operand (type, integer_one_node, arg0);
9443
9444 /* Convert ABS_EXPR<x> < 0 to false. */
9445 else if (code == LT_EXPR
9446 && tree_expr_nonnegative_p (arg0)
9447 && (integer_zerop (arg1) || real_zerop (arg1)))
9448 return omit_one_operand (type, integer_zero_node, arg0);
9449
9450 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
9451 else if ((code == EQ_EXPR || code == NE_EXPR)
9452 && TREE_CODE (arg0) == ABS_EXPR
9453 && (integer_zerop (arg1) || real_zerop (arg1)))
9454 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
9455
9456 /* If this is an EQ or NE comparison with zero and ARG0 is
9457 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
9458 two operations, but the latter can be done in one less insn
9459 on machines that have only two-operand insns or on which a
9460 constant cannot be the first operand. */
9461 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
9462 && TREE_CODE (arg0) == BIT_AND_EXPR)
9463 {
9464 tree arg00 = TREE_OPERAND (arg0, 0);
9465 tree arg01 = TREE_OPERAND (arg0, 1);
9466 if (TREE_CODE (arg00) == LSHIFT_EXPR
9467 && integer_onep (TREE_OPERAND (arg00, 0)))
9468 return
9469 fold_build2 (code, type,
9470 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9471 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
9472 arg01, TREE_OPERAND (arg00, 1)),
9473 fold_convert (TREE_TYPE (arg0),
9474 integer_one_node)),
9475 arg1);
9476 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
9477 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
9478 return
9479 fold_build2 (code, type,
9480 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9481 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
9482 arg00, TREE_OPERAND (arg01, 1)),
9483 fold_convert (TREE_TYPE (arg0),
9484 integer_one_node)),
9485 arg1);
9486 }
9487
9488 /* If this is an NE or EQ comparison of zero against the result of a
9489 signed MOD operation whose second operand is a power of 2, make
9490 the MOD operation unsigned since it is simpler and equivalent. */
9491 if ((code == NE_EXPR || code == EQ_EXPR)
9492 && integer_zerop (arg1)
9493 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
9494 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
9495 || TREE_CODE (arg0) == CEIL_MOD_EXPR
9496 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
9497 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
9498 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9499 {
9500 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
9501 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
9502 fold_convert (newtype,
9503 TREE_OPERAND (arg0, 0)),
9504 fold_convert (newtype,
9505 TREE_OPERAND (arg0, 1)));
9506
9507 return fold_build2 (code, type, newmod,
9508 fold_convert (newtype, arg1));
9509 }
9510
9511 /* If this is an NE comparison of zero with an AND of one, remove the
9512 comparison since the AND will give the correct value. */
9513 if (code == NE_EXPR && integer_zerop (arg1)
9514 && TREE_CODE (arg0) == BIT_AND_EXPR
9515 && integer_onep (TREE_OPERAND (arg0, 1)))
9516 return fold_convert (type, arg0);
9517
9518 /* If we have (A & C) == C where C is a power of 2, convert this into
9519 (A & C) != 0. Similarly for NE_EXPR. */
9520 if ((code == EQ_EXPR || code == NE_EXPR)
9521 && TREE_CODE (arg0) == BIT_AND_EXPR
9522 && integer_pow2p (TREE_OPERAND (arg0, 1))
9523 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9524 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
9525 arg0, fold_convert (TREE_TYPE (arg0),
9526 integer_zero_node));
9527
9528 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
9529 bit, then fold the expression into A < 0 or A >= 0. */
9530 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
9531 if (tem)
9532 return tem;
9533
9534 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
9535 Similarly for NE_EXPR. */
9536 if ((code == EQ_EXPR || code == NE_EXPR)
9537 && TREE_CODE (arg0) == BIT_AND_EXPR
9538 && TREE_CODE (arg1) == INTEGER_CST
9539 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9540 {
9541 tree notc = fold_build1 (BIT_NOT_EXPR,
9542 TREE_TYPE (TREE_OPERAND (arg0, 1)),
9543 TREE_OPERAND (arg0, 1));
9544 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9545 arg1, notc);
9546 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
9547 if (integer_nonzerop (dandnotc))
9548 return omit_one_operand (type, rslt, arg0);
9549 }
9550
9551 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
9552 Similarly for NE_EXPR. */
9553 if ((code == EQ_EXPR || code == NE_EXPR)
9554 && TREE_CODE (arg0) == BIT_IOR_EXPR
9555 && TREE_CODE (arg1) == INTEGER_CST
9556 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9557 {
9558 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
9559 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9560 TREE_OPERAND (arg0, 1), notd);
9561 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
9562 if (integer_nonzerop (candnotd))
9563 return omit_one_operand (type, rslt, arg0);
9564 }
9565
9566 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
9567 and similarly for >= into !=. */
9568 if ((code == LT_EXPR || code == GE_EXPR)
9569 && TYPE_UNSIGNED (TREE_TYPE (arg0))
9570 && TREE_CODE (arg1) == LSHIFT_EXPR
9571 && integer_onep (TREE_OPERAND (arg1, 0)))
9572 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
9573 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
9574 TREE_OPERAND (arg1, 1)),
9575 fold_convert (TREE_TYPE (arg0), integer_zero_node));
9576
9577 else if ((code == LT_EXPR || code == GE_EXPR)
9578 && TYPE_UNSIGNED (TREE_TYPE (arg0))
9579 && (TREE_CODE (arg1) == NOP_EXPR
9580 || TREE_CODE (arg1) == CONVERT_EXPR)
9581 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
9582 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
9583 return
9584 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
9585 fold_convert (TREE_TYPE (arg0),
9586 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
9587 TREE_OPERAND (TREE_OPERAND (arg1, 0),
9588 1))),
9589 fold_convert (TREE_TYPE (arg0), integer_zero_node));
9590
9591 /* Simplify comparison of something with itself. (For IEEE
9592 floating-point, we can only do some of these simplifications.) */
9593 if (operand_equal_p (arg0, arg1, 0))
9594 {
9595 switch (code)
9596 {
9597 case EQ_EXPR:
9598 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9599 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9600 return constant_boolean_node (1, type);
9601 break;
9602
9603 case GE_EXPR:
9604 case LE_EXPR:
9605 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9606 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9607 return constant_boolean_node (1, type);
9608 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9609
9610 case NE_EXPR:
9611 /* For NE, we can only do this simplification if integer
9612 or we don't honor IEEE floating point NaNs. */
9613 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9614 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9615 break;
9616 /* ... fall through ... */
9617 case GT_EXPR:
9618 case LT_EXPR:
9619 return constant_boolean_node (0, type);
9620 default:
9621 gcc_unreachable ();
9622 }
9623 }
9624
9625 /* If we are comparing an expression that just has comparisons
9626 of two integer values, arithmetic expressions of those comparisons,
9627 and constants, we can simplify it. There are only three cases
9628 to check: the two values can either be equal, the first can be
9629 greater, or the second can be greater. Fold the expression for
9630 those three values. Since each value must be 0 or 1, we have
9631 eight possibilities, each of which corresponds to the constant 0
9632 or 1 or one of the six possible comparisons.
9633
9634 This handles common cases like (a > b) == 0 but also handles
9635 expressions like ((x > y) - (y > x)) > 0, which supposedly
9636 occur in macroized code. */
9637
9638 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9639 {
9640 tree cval1 = 0, cval2 = 0;
9641 int save_p = 0;
9642
9643 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9644 /* Don't handle degenerate cases here; they should already
9645 have been handled anyway. */
9646 && cval1 != 0 && cval2 != 0
9647 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9648 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9649 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9650 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9651 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9652 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9653 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9654 {
9655 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9656 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9657
9658 /* We can't just pass T to eval_subst in case cval1 or cval2
9659 was the same as ARG1. */
9660
9661 tree high_result
9662 = fold_build2 (code, type,
9663 eval_subst (arg0, cval1, maxval,
9664 cval2, minval),
9665 arg1);
9666 tree equal_result
9667 = fold_build2 (code, type,
9668 eval_subst (arg0, cval1, maxval,
9669 cval2, maxval),
9670 arg1);
9671 tree low_result
9672 = fold_build2 (code, type,
9673 eval_subst (arg0, cval1, minval,
9674 cval2, maxval),
9675 arg1);
9676
9677 /* All three of these results should be 0 or 1. Confirm they
9678 are. Then use those values to select the proper code
9679 to use. */
9680
9681 if ((integer_zerop (high_result)
9682 || integer_onep (high_result))
9683 && (integer_zerop (equal_result)
9684 || integer_onep (equal_result))
9685 && (integer_zerop (low_result)
9686 || integer_onep (low_result)))
9687 {
9688 /* Make a 3-bit mask with the high-order bit being the
9689 value for `>', the next for '=', and the low for '<'. */
9690 switch ((integer_onep (high_result) * 4)
9691 + (integer_onep (equal_result) * 2)
9692 + integer_onep (low_result))
9693 {
9694 case 0:
9695 /* Always false. */
9696 return omit_one_operand (type, integer_zero_node, arg0);
9697 case 1:
9698 code = LT_EXPR;
9699 break;
9700 case 2:
9701 code = EQ_EXPR;
9702 break;
9703 case 3:
9704 code = LE_EXPR;
9705 break;
9706 case 4:
9707 code = GT_EXPR;
9708 break;
9709 case 5:
9710 code = NE_EXPR;
9711 break;
9712 case 6:
9713 code = GE_EXPR;
9714 break;
9715 case 7:
9716 /* Always true. */
9717 return omit_one_operand (type, integer_one_node, arg0);
9718 }
9719
9720 if (save_p)
9721 return save_expr (build2 (code, type, cval1, cval2));
9722 else
9723 return fold_build2 (code, type, cval1, cval2);
9724 }
9725 }
9726 }
9727
9728 /* If this is a comparison of a field, we may be able to simplify it. */
9729 if (((TREE_CODE (arg0) == COMPONENT_REF
9730 && lang_hooks.can_use_bit_fields_p ())
9731 || TREE_CODE (arg0) == BIT_FIELD_REF)
9732 && (code == EQ_EXPR || code == NE_EXPR)
9733 /* Handle the constant case even without -O
9734 to make sure the warnings are given. */
9735 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
9736 {
9737 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
9738 if (t1)
9739 return t1;
9740 }
9741
9742 /* Fold a comparison of the address of COMPONENT_REFs with the same
9743 type and component to a comparison of the address of the base
9744 object. In short, &x->a OP &y->a to x OP y and
9745 &x->a OP &y.a to x OP &y */
9746 if (TREE_CODE (arg0) == ADDR_EXPR
9747 && TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
9748 && TREE_CODE (arg1) == ADDR_EXPR
9749 && TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
9750 {
9751 tree cref0 = TREE_OPERAND (arg0, 0);
9752 tree cref1 = TREE_OPERAND (arg1, 0);
9753 if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
9754 {
9755 tree op0 = TREE_OPERAND (cref0, 0);
9756 tree op1 = TREE_OPERAND (cref1, 0);
9757 return fold_build2 (code, type,
9758 build_fold_addr_expr (op0),
9759 build_fold_addr_expr (op1));
9760 }
9761 }
9762
9763 /* Optimize comparisons of strlen vs zero to a compare of the
9764 first character of the string vs zero. To wit,
9765 strlen(ptr) == 0 => *ptr == 0
9766 strlen(ptr) != 0 => *ptr != 0
9767 Other cases should reduce to one of these two (or a constant)
9768 due to the return value of strlen being unsigned. */
9769 if ((code == EQ_EXPR || code == NE_EXPR)
9770 && integer_zerop (arg1)
9771 && TREE_CODE (arg0) == CALL_EXPR)
9772 {
9773 tree fndecl = get_callee_fndecl (arg0);
9774 tree arglist;
9775
9776 if (fndecl
9777 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
9778 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
9779 && (arglist = TREE_OPERAND (arg0, 1))
9780 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
9781 && ! TREE_CHAIN (arglist))
9782 {
9783 tree iref = build_fold_indirect_ref (TREE_VALUE (arglist));
9784 return fold_build2 (code, type, iref,
9785 build_int_cst (TREE_TYPE (iref), 0));
9786 }
9787 }
9788
9789 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9790 into a single range test. */
9791 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9792 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9793 && TREE_CODE (arg1) == INTEGER_CST
9794 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9795 && !integer_zerop (TREE_OPERAND (arg0, 1))
9796 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9797 && !TREE_OVERFLOW (arg1))
9798 {
9799 t1 = fold_div_compare (code, type, arg0, arg1);
9800 if (t1 != NULL_TREE)
9801 return t1;
9802 }
9803
9804 if ((code == EQ_EXPR || code == NE_EXPR)
9805 && integer_zerop (arg1)
9806 && tree_expr_nonzero_p (arg0))
9807 {
9808 tree res = constant_boolean_node (code==NE_EXPR, type);
9809 return omit_one_operand (type, res, arg0);
9810 }
9811
9812 t1 = fold_relational_const (code, type, arg0, arg1);
9813 return t1 == NULL_TREE ? NULL_TREE : t1;
9814
9815 case UNORDERED_EXPR:
9816 case ORDERED_EXPR:
9817 case UNLT_EXPR:
9818 case UNLE_EXPR:
9819 case UNGT_EXPR:
9820 case UNGE_EXPR:
9821 case UNEQ_EXPR:
9822 case LTGT_EXPR:
9823 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9824 {
9825 t1 = fold_relational_const (code, type, arg0, arg1);
9826 if (t1 != NULL_TREE)
9827 return t1;
9828 }
9829
9830 /* If the first operand is NaN, the result is constant. */
9831 if (TREE_CODE (arg0) == REAL_CST
9832 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
9833 && (code != LTGT_EXPR || ! flag_trapping_math))
9834 {
9835 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
9836 ? integer_zero_node
9837 : integer_one_node;
9838 return omit_one_operand (type, t1, arg1);
9839 }
9840
9841 /* If the second operand is NaN, the result is constant. */
9842 if (TREE_CODE (arg1) == REAL_CST
9843 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
9844 && (code != LTGT_EXPR || ! flag_trapping_math))
9845 {
9846 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
9847 ? integer_zero_node
9848 : integer_one_node;
9849 return omit_one_operand (type, t1, arg0);
9850 }
9851
9852 /* Simplify unordered comparison of something with itself. */
9853 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
9854 && operand_equal_p (arg0, arg1, 0))
9855 return constant_boolean_node (1, type);
9856
9857 if (code == LTGT_EXPR
9858 && !flag_trapping_math
9859 && operand_equal_p (arg0, arg1, 0))
9860 return constant_boolean_node (0, type);
9861
9862 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9863 {
9864 tree targ0 = strip_float_extensions (arg0);
9865 tree targ1 = strip_float_extensions (arg1);
9866 tree newtype = TREE_TYPE (targ0);
9867
9868 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9869 newtype = TREE_TYPE (targ1);
9870
9871 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9872 return fold_build2 (code, type, fold_convert (newtype, targ0),
9873 fold_convert (newtype, targ1));
9874 }
9875
9876 return NULL_TREE;
9877
9878 case COMPOUND_EXPR:
9879 /* When pedantic, a compound expression can be neither an lvalue
9880 nor an integer constant expression. */
9881 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
9882 return NULL_TREE;
9883 /* Don't let (0, 0) be null pointer constant. */
9884 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
9885 : fold_convert (type, arg1);
9886 return pedantic_non_lvalue (tem);
9887
9888 case COMPLEX_EXPR:
9889 if (wins)
9890 return build_complex (type, arg0, arg1);
9891 return NULL_TREE;
9892
9893 case ASSERT_EXPR:
9894 /* An ASSERT_EXPR should never be passed to fold_binary. */
9895 gcc_unreachable ();
9896
9897 default:
9898 return NULL_TREE;
9899 } /* switch (code) */
9900 }
9901
9902 /* Callback for walk_tree, looking for LABEL_EXPR.
9903 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
9904 Do not check the sub-tree of GOTO_EXPR. */
9905
9906 static tree
9907 contains_label_1 (tree *tp,
9908 int *walk_subtrees,
9909 void *data ATTRIBUTE_UNUSED)
9910 {
9911 switch (TREE_CODE (*tp))
9912 {
9913 case LABEL_EXPR:
9914 return *tp;
9915 case GOTO_EXPR:
9916 *walk_subtrees = 0;
9917 /* no break */
9918 default:
9919 return NULL_TREE;
9920 }
9921 }
9922
9923 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
9924 accessible from outside the sub-tree. Returns NULL_TREE if no
9925 addressable label is found. */
9926
9927 static bool
9928 contains_label_p (tree st)
9929 {
9930 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
9931 }
9932
9933 /* Fold a ternary expression of code CODE and type TYPE with operands
9934 OP0, OP1, and OP2. Return the folded expression if folding is
9935 successful. Otherwise, return NULL_TREE. */
9936
9937 tree
9938 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
9939 {
9940 tree tem;
9941 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
9942 enum tree_code_class kind = TREE_CODE_CLASS (code);
9943
9944 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9945 && TREE_CODE_LENGTH (code) == 3);
9946
9947 /* Strip any conversions that don't change the mode. This is safe
9948 for every expression, except for a comparison expression because
9949 its signedness is derived from its operands. So, in the latter
9950 case, only strip conversions that don't change the signedness.
9951
9952 Note that this is done as an internal manipulation within the
9953 constant folder, in order to find the simplest representation of
9954 the arguments so that their form can be studied. In any cases,
9955 the appropriate type conversions should be put back in the tree
9956 that will get out of the constant folder. */
9957 if (op0)
9958 {
9959 arg0 = op0;
9960 STRIP_NOPS (arg0);
9961 }
9962
9963 if (op1)
9964 {
9965 arg1 = op1;
9966 STRIP_NOPS (arg1);
9967 }
9968
9969 switch (code)
9970 {
9971 case COMPONENT_REF:
9972 if (TREE_CODE (arg0) == CONSTRUCTOR
9973 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
9974 {
9975 unsigned HOST_WIDE_INT idx;
9976 tree field, value;
9977 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
9978 if (field == arg1)
9979 return value;
9980 }
9981 return NULL_TREE;
9982
9983 case COND_EXPR:
9984 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
9985 so all simple results must be passed through pedantic_non_lvalue. */
9986 if (TREE_CODE (arg0) == INTEGER_CST)
9987 {
9988 tree unused_op = integer_zerop (arg0) ? op1 : op2;
9989 tem = integer_zerop (arg0) ? op2 : op1;
9990 /* Only optimize constant conditions when the selected branch
9991 has the same type as the COND_EXPR. This avoids optimizing
9992 away "c ? x : throw", where the throw has a void type.
9993 Avoid throwing away that operand which contains label. */
9994 if ((!TREE_SIDE_EFFECTS (unused_op)
9995 || !contains_label_p (unused_op))
9996 && (! VOID_TYPE_P (TREE_TYPE (tem))
9997 || VOID_TYPE_P (type)))
9998 return pedantic_non_lvalue (tem);
9999 return NULL_TREE;
10000 }
10001 if (operand_equal_p (arg1, op2, 0))
10002 return pedantic_omit_one_operand (type, arg1, arg0);
10003
10004 /* If we have A op B ? A : C, we may be able to convert this to a
10005 simpler expression, depending on the operation and the values
10006 of B and C. Signed zeros prevent all of these transformations,
10007 for reasons given above each one.
10008
10009 Also try swapping the arguments and inverting the conditional. */
10010 if (COMPARISON_CLASS_P (arg0)
10011 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
10012 arg1, TREE_OPERAND (arg0, 1))
10013 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
10014 {
10015 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
10016 if (tem)
10017 return tem;
10018 }
10019
10020 if (COMPARISON_CLASS_P (arg0)
10021 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
10022 op2,
10023 TREE_OPERAND (arg0, 1))
10024 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
10025 {
10026 tem = invert_truthvalue (arg0);
10027 if (COMPARISON_CLASS_P (tem))
10028 {
10029 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
10030 if (tem)
10031 return tem;
10032 }
10033 }
10034
10035 /* If the second operand is simpler than the third, swap them
10036 since that produces better jump optimization results. */
10037 if (truth_value_p (TREE_CODE (arg0))
10038 && tree_swap_operands_p (op1, op2, false))
10039 {
10040 /* See if this can be inverted. If it can't, possibly because
10041 it was a floating-point inequality comparison, don't do
10042 anything. */
10043 tem = invert_truthvalue (arg0);
10044
10045 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
10046 return fold_build3 (code, type, tem, op2, op1);
10047 }
10048
10049 /* Convert A ? 1 : 0 to simply A. */
10050 if (integer_onep (op1)
10051 && integer_zerop (op2)
10052 /* If we try to convert OP0 to our type, the
10053 call to fold will try to move the conversion inside
10054 a COND, which will recurse. In that case, the COND_EXPR
10055 is probably the best choice, so leave it alone. */
10056 && type == TREE_TYPE (arg0))
10057 return pedantic_non_lvalue (arg0);
10058
10059 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
10060 over COND_EXPR in cases such as floating point comparisons. */
10061 if (integer_zerop (op1)
10062 && integer_onep (op2)
10063 && truth_value_p (TREE_CODE (arg0)))
10064 return pedantic_non_lvalue (fold_convert (type,
10065 invert_truthvalue (arg0)));
10066
10067 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
10068 if (TREE_CODE (arg0) == LT_EXPR
10069 && integer_zerop (TREE_OPERAND (arg0, 1))
10070 && integer_zerop (op2)
10071 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
10072 return fold_convert (type, fold_build2 (BIT_AND_EXPR,
10073 TREE_TYPE (tem), tem, arg1));
10074
10075 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
10076 already handled above. */
10077 if (TREE_CODE (arg0) == BIT_AND_EXPR
10078 && integer_onep (TREE_OPERAND (arg0, 1))
10079 && integer_zerop (op2)
10080 && integer_pow2p (arg1))
10081 {
10082 tree tem = TREE_OPERAND (arg0, 0);
10083 STRIP_NOPS (tem);
10084 if (TREE_CODE (tem) == RSHIFT_EXPR
10085 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
10086 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
10087 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
10088 return fold_build2 (BIT_AND_EXPR, type,
10089 TREE_OPERAND (tem, 0), arg1);
10090 }
10091
10092 /* A & N ? N : 0 is simply A & N if N is a power of two. This
10093 is probably obsolete because the first operand should be a
10094 truth value (that's why we have the two cases above), but let's
10095 leave it in until we can confirm this for all front-ends. */
10096 if (integer_zerop (op2)
10097 && TREE_CODE (arg0) == NE_EXPR
10098 && integer_zerop (TREE_OPERAND (arg0, 1))
10099 && integer_pow2p (arg1)
10100 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10101 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10102 arg1, OEP_ONLY_CONST))
10103 return pedantic_non_lvalue (fold_convert (type,
10104 TREE_OPERAND (arg0, 0)));
10105
10106 /* Convert A ? B : 0 into A && B if A and B are truth values. */
10107 if (integer_zerop (op2)
10108 && truth_value_p (TREE_CODE (arg0))
10109 && truth_value_p (TREE_CODE (arg1)))
10110 return fold_build2 (TRUTH_ANDIF_EXPR, type, arg0, arg1);
10111
10112 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
10113 if (integer_onep (op2)
10114 && truth_value_p (TREE_CODE (arg0))
10115 && truth_value_p (TREE_CODE (arg1)))
10116 {
10117 /* Only perform transformation if ARG0 is easily inverted. */
10118 tem = invert_truthvalue (arg0);
10119 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
10120 return fold_build2 (TRUTH_ORIF_EXPR, type, tem, arg1);
10121 }
10122
10123 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
10124 if (integer_zerop (arg1)
10125 && truth_value_p (TREE_CODE (arg0))
10126 && truth_value_p (TREE_CODE (op2)))
10127 {
10128 /* Only perform transformation if ARG0 is easily inverted. */
10129 tem = invert_truthvalue (arg0);
10130 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
10131 return fold_build2 (TRUTH_ANDIF_EXPR, type, tem, op2);
10132 }
10133
10134 /* Convert A ? 1 : B into A || B if A and B are truth values. */
10135 if (integer_onep (arg1)
10136 && truth_value_p (TREE_CODE (arg0))
10137 && truth_value_p (TREE_CODE (op2)))
10138 return fold_build2 (TRUTH_ORIF_EXPR, type, arg0, op2);
10139
10140 return NULL_TREE;
10141
10142 case CALL_EXPR:
10143 /* Check for a built-in function. */
10144 if (TREE_CODE (op0) == ADDR_EXPR
10145 && TREE_CODE (TREE_OPERAND (op0, 0)) == FUNCTION_DECL
10146 && DECL_BUILT_IN (TREE_OPERAND (op0, 0)))
10147 return fold_builtin (TREE_OPERAND (op0, 0), op1, false);
10148 return NULL_TREE;
10149
10150 case BIT_FIELD_REF:
10151 if (TREE_CODE (arg0) == VECTOR_CST
10152 && type == TREE_TYPE (TREE_TYPE (arg0))
10153 && host_integerp (arg1, 1)
10154 && host_integerp (op2, 1))
10155 {
10156 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
10157 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
10158
10159 if (width != 0
10160 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
10161 && (idx % width) == 0
10162 && (idx = idx / width)
10163 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
10164 {
10165 tree elements = TREE_VECTOR_CST_ELTS (arg0);
10166 while (idx-- > 0 && elements)
10167 elements = TREE_CHAIN (elements);
10168 if (elements)
10169 return TREE_VALUE (elements);
10170 else
10171 return fold_convert (type, integer_zero_node);
10172 }
10173 }
10174 return NULL_TREE;
10175
10176 default:
10177 return NULL_TREE;
10178 } /* switch (code) */
10179 }
10180
10181 /* Perform constant folding and related simplification of EXPR.
10182 The related simplifications include x*1 => x, x*0 => 0, etc.,
10183 and application of the associative law.
10184 NOP_EXPR conversions may be removed freely (as long as we
10185 are careful not to change the type of the overall expression).
10186 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
10187 but we can constant-fold them if they have constant operands. */
10188
10189 #ifdef ENABLE_FOLD_CHECKING
10190 # define fold(x) fold_1 (x)
10191 static tree fold_1 (tree);
10192 static
10193 #endif
10194 tree
10195 fold (tree expr)
10196 {
10197 const tree t = expr;
10198 enum tree_code code = TREE_CODE (t);
10199 enum tree_code_class kind = TREE_CODE_CLASS (code);
10200 tree tem;
10201
10202 /* Return right away if a constant. */
10203 if (kind == tcc_constant)
10204 return t;
10205
10206 if (IS_EXPR_CODE_CLASS (kind))
10207 {
10208 tree type = TREE_TYPE (t);
10209 tree op0, op1, op2;
10210
10211 switch (TREE_CODE_LENGTH (code))
10212 {
10213 case 1:
10214 op0 = TREE_OPERAND (t, 0);
10215 tem = fold_unary (code, type, op0);
10216 return tem ? tem : expr;
10217 case 2:
10218 op0 = TREE_OPERAND (t, 0);
10219 op1 = TREE_OPERAND (t, 1);
10220 tem = fold_binary (code, type, op0, op1);
10221 return tem ? tem : expr;
10222 case 3:
10223 op0 = TREE_OPERAND (t, 0);
10224 op1 = TREE_OPERAND (t, 1);
10225 op2 = TREE_OPERAND (t, 2);
10226 tem = fold_ternary (code, type, op0, op1, op2);
10227 return tem ? tem : expr;
10228 default:
10229 break;
10230 }
10231 }
10232
10233 switch (code)
10234 {
10235 case CONST_DECL:
10236 return fold (DECL_INITIAL (t));
10237
10238 default:
10239 return t;
10240 } /* switch (code) */
10241 }
10242
10243 #ifdef ENABLE_FOLD_CHECKING
10244 #undef fold
10245
10246 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
10247 static void fold_check_failed (tree, tree);
10248 void print_fold_checksum (tree);
10249
10250 /* When --enable-checking=fold, compute a digest of expr before
10251 and after actual fold call to see if fold did not accidentally
10252 change original expr. */
10253
10254 tree
10255 fold (tree expr)
10256 {
10257 tree ret;
10258 struct md5_ctx ctx;
10259 unsigned char checksum_before[16], checksum_after[16];
10260 htab_t ht;
10261
10262 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10263 md5_init_ctx (&ctx);
10264 fold_checksum_tree (expr, &ctx, ht);
10265 md5_finish_ctx (&ctx, checksum_before);
10266 htab_empty (ht);
10267
10268 ret = fold_1 (expr);
10269
10270 md5_init_ctx (&ctx);
10271 fold_checksum_tree (expr, &ctx, ht);
10272 md5_finish_ctx (&ctx, checksum_after);
10273 htab_delete (ht);
10274
10275 if (memcmp (checksum_before, checksum_after, 16))
10276 fold_check_failed (expr, ret);
10277
10278 return ret;
10279 }
10280
10281 void
10282 print_fold_checksum (tree expr)
10283 {
10284 struct md5_ctx ctx;
10285 unsigned char checksum[16], cnt;
10286 htab_t ht;
10287
10288 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10289 md5_init_ctx (&ctx);
10290 fold_checksum_tree (expr, &ctx, ht);
10291 md5_finish_ctx (&ctx, checksum);
10292 htab_delete (ht);
10293 for (cnt = 0; cnt < 16; ++cnt)
10294 fprintf (stderr, "%02x", checksum[cnt]);
10295 putc ('\n', stderr);
10296 }
10297
10298 static void
10299 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
10300 {
10301 internal_error ("fold check: original tree changed by fold");
10302 }
10303
10304 static void
10305 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
10306 {
10307 void **slot;
10308 enum tree_code code;
10309 char buf[sizeof (struct tree_function_decl)];
10310 int i, len;
10311
10312 recursive_label:
10313
10314 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
10315 <= sizeof (struct tree_function_decl))
10316 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
10317 if (expr == NULL)
10318 return;
10319 slot = htab_find_slot (ht, expr, INSERT);
10320 if (*slot != NULL)
10321 return;
10322 *slot = expr;
10323 code = TREE_CODE (expr);
10324 if (TREE_CODE_CLASS (code) == tcc_declaration
10325 && DECL_ASSEMBLER_NAME_SET_P (expr))
10326 {
10327 /* Allow DECL_ASSEMBLER_NAME to be modified. */
10328 memcpy (buf, expr, tree_size (expr));
10329 expr = (tree) buf;
10330 SET_DECL_ASSEMBLER_NAME (expr, NULL);
10331 }
10332 else if (TREE_CODE_CLASS (code) == tcc_type
10333 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
10334 || TYPE_CACHED_VALUES_P (expr)
10335 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
10336 {
10337 /* Allow these fields to be modified. */
10338 memcpy (buf, expr, tree_size (expr));
10339 expr = (tree) buf;
10340 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr) = 0;
10341 TYPE_POINTER_TO (expr) = NULL;
10342 TYPE_REFERENCE_TO (expr) = NULL;
10343 if (TYPE_CACHED_VALUES_P (expr))
10344 {
10345 TYPE_CACHED_VALUES_P (expr) = 0;
10346 TYPE_CACHED_VALUES (expr) = NULL;
10347 }
10348 }
10349 md5_process_bytes (expr, tree_size (expr), ctx);
10350 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
10351 if (TREE_CODE_CLASS (code) != tcc_type
10352 && TREE_CODE_CLASS (code) != tcc_declaration
10353 && code != TREE_LIST)
10354 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
10355 switch (TREE_CODE_CLASS (code))
10356 {
10357 case tcc_constant:
10358 switch (code)
10359 {
10360 case STRING_CST:
10361 md5_process_bytes (TREE_STRING_POINTER (expr),
10362 TREE_STRING_LENGTH (expr), ctx);
10363 break;
10364 case COMPLEX_CST:
10365 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
10366 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
10367 break;
10368 case VECTOR_CST:
10369 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
10370 break;
10371 default:
10372 break;
10373 }
10374 break;
10375 case tcc_exceptional:
10376 switch (code)
10377 {
10378 case TREE_LIST:
10379 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
10380 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
10381 expr = TREE_CHAIN (expr);
10382 goto recursive_label;
10383 break;
10384 case TREE_VEC:
10385 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
10386 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
10387 break;
10388 default:
10389 break;
10390 }
10391 break;
10392 case tcc_expression:
10393 case tcc_reference:
10394 case tcc_comparison:
10395 case tcc_unary:
10396 case tcc_binary:
10397 case tcc_statement:
10398 len = TREE_CODE_LENGTH (code);
10399 for (i = 0; i < len; ++i)
10400 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
10401 break;
10402 case tcc_declaration:
10403 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
10404 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
10405 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
10406 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
10407 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
10408 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
10409 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
10410 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
10411 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
10412
10413 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
10414 {
10415 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
10416 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
10417 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
10418 }
10419 break;
10420 case tcc_type:
10421 if (TREE_CODE (expr) == ENUMERAL_TYPE)
10422 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
10423 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
10424 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
10425 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
10426 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
10427 if (INTEGRAL_TYPE_P (expr)
10428 || SCALAR_FLOAT_TYPE_P (expr))
10429 {
10430 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
10431 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
10432 }
10433 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
10434 if (TREE_CODE (expr) == RECORD_TYPE
10435 || TREE_CODE (expr) == UNION_TYPE
10436 || TREE_CODE (expr) == QUAL_UNION_TYPE)
10437 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
10438 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
10439 break;
10440 default:
10441 break;
10442 }
10443 }
10444
10445 #endif
10446
10447 /* Fold a unary tree expression with code CODE of type TYPE with an
10448 operand OP0. Return a folded expression if successful. Otherwise,
10449 return a tree expression with code CODE of type TYPE with an
10450 operand OP0. */
10451
10452 tree
10453 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
10454 {
10455 tree tem;
10456 #ifdef ENABLE_FOLD_CHECKING
10457 unsigned char checksum_before[16], checksum_after[16];
10458 struct md5_ctx ctx;
10459 htab_t ht;
10460
10461 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10462 md5_init_ctx (&ctx);
10463 fold_checksum_tree (op0, &ctx, ht);
10464 md5_finish_ctx (&ctx, checksum_before);
10465 htab_empty (ht);
10466 #endif
10467
10468 tem = fold_unary (code, type, op0);
10469 if (!tem)
10470 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
10471
10472 #ifdef ENABLE_FOLD_CHECKING
10473 md5_init_ctx (&ctx);
10474 fold_checksum_tree (op0, &ctx, ht);
10475 md5_finish_ctx (&ctx, checksum_after);
10476 htab_delete (ht);
10477
10478 if (memcmp (checksum_before, checksum_after, 16))
10479 fold_check_failed (op0, tem);
10480 #endif
10481 return tem;
10482 }
10483
10484 /* Fold a binary tree expression with code CODE of type TYPE with
10485 operands OP0 and OP1. Return a folded expression if successful.
10486 Otherwise, return a tree expression with code CODE of type TYPE
10487 with operands OP0 and OP1. */
10488
10489 tree
10490 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
10491 MEM_STAT_DECL)
10492 {
10493 tree tem;
10494 #ifdef ENABLE_FOLD_CHECKING
10495 unsigned char checksum_before_op0[16],
10496 checksum_before_op1[16],
10497 checksum_after_op0[16],
10498 checksum_after_op1[16];
10499 struct md5_ctx ctx;
10500 htab_t ht;
10501
10502 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10503 md5_init_ctx (&ctx);
10504 fold_checksum_tree (op0, &ctx, ht);
10505 md5_finish_ctx (&ctx, checksum_before_op0);
10506 htab_empty (ht);
10507
10508 md5_init_ctx (&ctx);
10509 fold_checksum_tree (op1, &ctx, ht);
10510 md5_finish_ctx (&ctx, checksum_before_op1);
10511 htab_empty (ht);
10512 #endif
10513
10514 tem = fold_binary (code, type, op0, op1);
10515 if (!tem)
10516 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
10517
10518 #ifdef ENABLE_FOLD_CHECKING
10519 md5_init_ctx (&ctx);
10520 fold_checksum_tree (op0, &ctx, ht);
10521 md5_finish_ctx (&ctx, checksum_after_op0);
10522 htab_empty (ht);
10523
10524 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
10525 fold_check_failed (op0, tem);
10526
10527 md5_init_ctx (&ctx);
10528 fold_checksum_tree (op1, &ctx, ht);
10529 md5_finish_ctx (&ctx, checksum_after_op1);
10530 htab_delete (ht);
10531
10532 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
10533 fold_check_failed (op1, tem);
10534 #endif
10535 return tem;
10536 }
10537
10538 /* Fold a ternary tree expression with code CODE of type TYPE with
10539 operands OP0, OP1, and OP2. Return a folded expression if
10540 successful. Otherwise, return a tree expression with code CODE of
10541 type TYPE with operands OP0, OP1, and OP2. */
10542
10543 tree
10544 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
10545 MEM_STAT_DECL)
10546 {
10547 tree tem;
10548 #ifdef ENABLE_FOLD_CHECKING
10549 unsigned char checksum_before_op0[16],
10550 checksum_before_op1[16],
10551 checksum_before_op2[16],
10552 checksum_after_op0[16],
10553 checksum_after_op1[16],
10554 checksum_after_op2[16];
10555 struct md5_ctx ctx;
10556 htab_t ht;
10557
10558 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10559 md5_init_ctx (&ctx);
10560 fold_checksum_tree (op0, &ctx, ht);
10561 md5_finish_ctx (&ctx, checksum_before_op0);
10562 htab_empty (ht);
10563
10564 md5_init_ctx (&ctx);
10565 fold_checksum_tree (op1, &ctx, ht);
10566 md5_finish_ctx (&ctx, checksum_before_op1);
10567 htab_empty (ht);
10568
10569 md5_init_ctx (&ctx);
10570 fold_checksum_tree (op2, &ctx, ht);
10571 md5_finish_ctx (&ctx, checksum_before_op2);
10572 htab_empty (ht);
10573 #endif
10574
10575 tem = fold_ternary (code, type, op0, op1, op2);
10576 if (!tem)
10577 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
10578
10579 #ifdef ENABLE_FOLD_CHECKING
10580 md5_init_ctx (&ctx);
10581 fold_checksum_tree (op0, &ctx, ht);
10582 md5_finish_ctx (&ctx, checksum_after_op0);
10583 htab_empty (ht);
10584
10585 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
10586 fold_check_failed (op0, tem);
10587
10588 md5_init_ctx (&ctx);
10589 fold_checksum_tree (op1, &ctx, ht);
10590 md5_finish_ctx (&ctx, checksum_after_op1);
10591 htab_empty (ht);
10592
10593 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
10594 fold_check_failed (op1, tem);
10595
10596 md5_init_ctx (&ctx);
10597 fold_checksum_tree (op2, &ctx, ht);
10598 md5_finish_ctx (&ctx, checksum_after_op2);
10599 htab_delete (ht);
10600
10601 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
10602 fold_check_failed (op2, tem);
10603 #endif
10604 return tem;
10605 }
10606
10607 /* Perform constant folding and related simplification of initializer
10608 expression EXPR. These behave identically to "fold_buildN" but ignore
10609 potential run-time traps and exceptions that fold must preserve. */
10610
10611 #define START_FOLD_INIT \
10612 int saved_signaling_nans = flag_signaling_nans;\
10613 int saved_trapping_math = flag_trapping_math;\
10614 int saved_rounding_math = flag_rounding_math;\
10615 int saved_trapv = flag_trapv;\
10616 flag_signaling_nans = 0;\
10617 flag_trapping_math = 0;\
10618 flag_rounding_math = 0;\
10619 flag_trapv = 0
10620
10621 #define END_FOLD_INIT \
10622 flag_signaling_nans = saved_signaling_nans;\
10623 flag_trapping_math = saved_trapping_math;\
10624 flag_rounding_math = saved_rounding_math;\
10625 flag_trapv = saved_trapv
10626
10627 tree
10628 fold_build1_initializer (enum tree_code code, tree type, tree op)
10629 {
10630 tree result;
10631 START_FOLD_INIT;
10632
10633 result = fold_build1 (code, type, op);
10634
10635 END_FOLD_INIT;
10636 return result;
10637 }
10638
10639 tree
10640 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
10641 {
10642 tree result;
10643 START_FOLD_INIT;
10644
10645 result = fold_build2 (code, type, op0, op1);
10646
10647 END_FOLD_INIT;
10648 return result;
10649 }
10650
10651 tree
10652 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
10653 tree op2)
10654 {
10655 tree result;
10656 START_FOLD_INIT;
10657
10658 result = fold_build3 (code, type, op0, op1, op2);
10659
10660 END_FOLD_INIT;
10661 return result;
10662 }
10663
10664 #undef START_FOLD_INIT
10665 #undef END_FOLD_INIT
10666
10667 /* Determine if first argument is a multiple of second argument. Return 0 if
10668 it is not, or we cannot easily determined it to be.
10669
10670 An example of the sort of thing we care about (at this point; this routine
10671 could surely be made more general, and expanded to do what the *_DIV_EXPR's
10672 fold cases do now) is discovering that
10673
10674 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
10675
10676 is a multiple of
10677
10678 SAVE_EXPR (J * 8)
10679
10680 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
10681
10682 This code also handles discovering that
10683
10684 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
10685
10686 is a multiple of 8 so we don't have to worry about dealing with a
10687 possible remainder.
10688
10689 Note that we *look* inside a SAVE_EXPR only to determine how it was
10690 calculated; it is not safe for fold to do much of anything else with the
10691 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
10692 at run time. For example, the latter example above *cannot* be implemented
10693 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
10694 evaluation time of the original SAVE_EXPR is not necessarily the same at
10695 the time the new expression is evaluated. The only optimization of this
10696 sort that would be valid is changing
10697
10698 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
10699
10700 divided by 8 to
10701
10702 SAVE_EXPR (I) * SAVE_EXPR (J)
10703
10704 (where the same SAVE_EXPR (J) is used in the original and the
10705 transformed version). */
10706
10707 static int
10708 multiple_of_p (tree type, tree top, tree bottom)
10709 {
10710 if (operand_equal_p (top, bottom, 0))
10711 return 1;
10712
10713 if (TREE_CODE (type) != INTEGER_TYPE)
10714 return 0;
10715
10716 switch (TREE_CODE (top))
10717 {
10718 case BIT_AND_EXPR:
10719 /* Bitwise and provides a power of two multiple. If the mask is
10720 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
10721 if (!integer_pow2p (bottom))
10722 return 0;
10723 /* FALLTHRU */
10724
10725 case MULT_EXPR:
10726 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
10727 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
10728
10729 case PLUS_EXPR:
10730 case MINUS_EXPR:
10731 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
10732 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
10733
10734 case LSHIFT_EXPR:
10735 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
10736 {
10737 tree op1, t1;
10738
10739 op1 = TREE_OPERAND (top, 1);
10740 /* const_binop may not detect overflow correctly,
10741 so check for it explicitly here. */
10742 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
10743 > TREE_INT_CST_LOW (op1)
10744 && TREE_INT_CST_HIGH (op1) == 0
10745 && 0 != (t1 = fold_convert (type,
10746 const_binop (LSHIFT_EXPR,
10747 size_one_node,
10748 op1, 0)))
10749 && ! TREE_OVERFLOW (t1))
10750 return multiple_of_p (type, t1, bottom);
10751 }
10752 return 0;
10753
10754 case NOP_EXPR:
10755 /* Can't handle conversions from non-integral or wider integral type. */
10756 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
10757 || (TYPE_PRECISION (type)
10758 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
10759 return 0;
10760
10761 /* .. fall through ... */
10762
10763 case SAVE_EXPR:
10764 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
10765
10766 case INTEGER_CST:
10767 if (TREE_CODE (bottom) != INTEGER_CST
10768 || (TYPE_UNSIGNED (type)
10769 && (tree_int_cst_sgn (top) < 0
10770 || tree_int_cst_sgn (bottom) < 0)))
10771 return 0;
10772 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
10773 top, bottom, 0));
10774
10775 default:
10776 return 0;
10777 }
10778 }
10779
10780 /* Return true if `t' is known to be non-negative. */
10781
10782 int
10783 tree_expr_nonnegative_p (tree t)
10784 {
10785 if (TYPE_UNSIGNED (TREE_TYPE (t)))
10786 return 1;
10787
10788 switch (TREE_CODE (t))
10789 {
10790 case ABS_EXPR:
10791 /* We can't return 1 if flag_wrapv is set because
10792 ABS_EXPR<INT_MIN> = INT_MIN. */
10793 if (!(flag_wrapv && INTEGRAL_TYPE_P (TREE_TYPE (t))))
10794 return 1;
10795 break;
10796
10797 case INTEGER_CST:
10798 return tree_int_cst_sgn (t) >= 0;
10799
10800 case REAL_CST:
10801 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
10802
10803 case PLUS_EXPR:
10804 if (FLOAT_TYPE_P (TREE_TYPE (t)))
10805 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10806 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10807
10808 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
10809 both unsigned and at least 2 bits shorter than the result. */
10810 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
10811 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
10812 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
10813 {
10814 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
10815 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
10816 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
10817 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
10818 {
10819 unsigned int prec = MAX (TYPE_PRECISION (inner1),
10820 TYPE_PRECISION (inner2)) + 1;
10821 return prec < TYPE_PRECISION (TREE_TYPE (t));
10822 }
10823 }
10824 break;
10825
10826 case MULT_EXPR:
10827 if (FLOAT_TYPE_P (TREE_TYPE (t)))
10828 {
10829 /* x * x for floating point x is always non-negative. */
10830 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
10831 return 1;
10832 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10833 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10834 }
10835
10836 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
10837 both unsigned and their total bits is shorter than the result. */
10838 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
10839 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
10840 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
10841 {
10842 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
10843 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
10844 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
10845 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
10846 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
10847 < TYPE_PRECISION (TREE_TYPE (t));
10848 }
10849 return 0;
10850
10851 case BIT_AND_EXPR:
10852 case MAX_EXPR:
10853 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10854 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10855
10856 case BIT_IOR_EXPR:
10857 case BIT_XOR_EXPR:
10858 case MIN_EXPR:
10859 case RDIV_EXPR:
10860 case TRUNC_DIV_EXPR:
10861 case CEIL_DIV_EXPR:
10862 case FLOOR_DIV_EXPR:
10863 case ROUND_DIV_EXPR:
10864 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10865 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10866
10867 case TRUNC_MOD_EXPR:
10868 case CEIL_MOD_EXPR:
10869 case FLOOR_MOD_EXPR:
10870 case ROUND_MOD_EXPR:
10871 case SAVE_EXPR:
10872 case NON_LVALUE_EXPR:
10873 case FLOAT_EXPR:
10874 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10875
10876 case COMPOUND_EXPR:
10877 case MODIFY_EXPR:
10878 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10879
10880 case BIND_EXPR:
10881 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t, 1)));
10882
10883 case COND_EXPR:
10884 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
10885 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
10886
10887 case NOP_EXPR:
10888 {
10889 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
10890 tree outer_type = TREE_TYPE (t);
10891
10892 if (TREE_CODE (outer_type) == REAL_TYPE)
10893 {
10894 if (TREE_CODE (inner_type) == REAL_TYPE)
10895 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10896 if (TREE_CODE (inner_type) == INTEGER_TYPE)
10897 {
10898 if (TYPE_UNSIGNED (inner_type))
10899 return 1;
10900 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10901 }
10902 }
10903 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
10904 {
10905 if (TREE_CODE (inner_type) == REAL_TYPE)
10906 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
10907 if (TREE_CODE (inner_type) == INTEGER_TYPE)
10908 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
10909 && TYPE_UNSIGNED (inner_type);
10910 }
10911 }
10912 break;
10913
10914 case TARGET_EXPR:
10915 {
10916 tree temp = TARGET_EXPR_SLOT (t);
10917 t = TARGET_EXPR_INITIAL (t);
10918
10919 /* If the initializer is non-void, then it's a normal expression
10920 that will be assigned to the slot. */
10921 if (!VOID_TYPE_P (t))
10922 return tree_expr_nonnegative_p (t);
10923
10924 /* Otherwise, the initializer sets the slot in some way. One common
10925 way is an assignment statement at the end of the initializer. */
10926 while (1)
10927 {
10928 if (TREE_CODE (t) == BIND_EXPR)
10929 t = expr_last (BIND_EXPR_BODY (t));
10930 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
10931 || TREE_CODE (t) == TRY_CATCH_EXPR)
10932 t = expr_last (TREE_OPERAND (t, 0));
10933 else if (TREE_CODE (t) == STATEMENT_LIST)
10934 t = expr_last (t);
10935 else
10936 break;
10937 }
10938 if (TREE_CODE (t) == MODIFY_EXPR
10939 && TREE_OPERAND (t, 0) == temp)
10940 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10941
10942 return 0;
10943 }
10944
10945 case CALL_EXPR:
10946 {
10947 tree fndecl = get_callee_fndecl (t);
10948 tree arglist = TREE_OPERAND (t, 1);
10949 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
10950 switch (DECL_FUNCTION_CODE (fndecl))
10951 {
10952 #define CASE_BUILTIN_F(BUILT_IN_FN) \
10953 case BUILT_IN_FN: case BUILT_IN_FN##F: case BUILT_IN_FN##L:
10954 #define CASE_BUILTIN_I(BUILT_IN_FN) \
10955 case BUILT_IN_FN: case BUILT_IN_FN##L: case BUILT_IN_FN##LL:
10956
10957 CASE_BUILTIN_F (BUILT_IN_ACOS)
10958 CASE_BUILTIN_F (BUILT_IN_ACOSH)
10959 CASE_BUILTIN_F (BUILT_IN_CABS)
10960 CASE_BUILTIN_F (BUILT_IN_COSH)
10961 CASE_BUILTIN_F (BUILT_IN_ERFC)
10962 CASE_BUILTIN_F (BUILT_IN_EXP)
10963 CASE_BUILTIN_F (BUILT_IN_EXP10)
10964 CASE_BUILTIN_F (BUILT_IN_EXP2)
10965 CASE_BUILTIN_F (BUILT_IN_FABS)
10966 CASE_BUILTIN_F (BUILT_IN_FDIM)
10967 CASE_BUILTIN_F (BUILT_IN_HYPOT)
10968 CASE_BUILTIN_F (BUILT_IN_POW10)
10969 CASE_BUILTIN_I (BUILT_IN_FFS)
10970 CASE_BUILTIN_I (BUILT_IN_PARITY)
10971 CASE_BUILTIN_I (BUILT_IN_POPCOUNT)
10972 /* Always true. */
10973 return 1;
10974
10975 CASE_BUILTIN_F (BUILT_IN_SQRT)
10976 /* sqrt(-0.0) is -0.0. */
10977 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
10978 return 1;
10979 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
10980
10981 CASE_BUILTIN_F (BUILT_IN_ASINH)
10982 CASE_BUILTIN_F (BUILT_IN_ATAN)
10983 CASE_BUILTIN_F (BUILT_IN_ATANH)
10984 CASE_BUILTIN_F (BUILT_IN_CBRT)
10985 CASE_BUILTIN_F (BUILT_IN_CEIL)
10986 CASE_BUILTIN_F (BUILT_IN_ERF)
10987 CASE_BUILTIN_F (BUILT_IN_EXPM1)
10988 CASE_BUILTIN_F (BUILT_IN_FLOOR)
10989 CASE_BUILTIN_F (BUILT_IN_FMOD)
10990 CASE_BUILTIN_F (BUILT_IN_FREXP)
10991 CASE_BUILTIN_F (BUILT_IN_LCEIL)
10992 CASE_BUILTIN_F (BUILT_IN_LDEXP)
10993 CASE_BUILTIN_F (BUILT_IN_LFLOOR)
10994 CASE_BUILTIN_F (BUILT_IN_LLCEIL)
10995 CASE_BUILTIN_F (BUILT_IN_LLFLOOR)
10996 CASE_BUILTIN_F (BUILT_IN_LLRINT)
10997 CASE_BUILTIN_F (BUILT_IN_LLROUND)
10998 CASE_BUILTIN_F (BUILT_IN_LRINT)
10999 CASE_BUILTIN_F (BUILT_IN_LROUND)
11000 CASE_BUILTIN_F (BUILT_IN_MODF)
11001 CASE_BUILTIN_F (BUILT_IN_NEARBYINT)
11002 CASE_BUILTIN_F (BUILT_IN_POW)
11003 CASE_BUILTIN_F (BUILT_IN_RINT)
11004 CASE_BUILTIN_F (BUILT_IN_ROUND)
11005 CASE_BUILTIN_F (BUILT_IN_SIGNBIT)
11006 CASE_BUILTIN_F (BUILT_IN_SINH)
11007 CASE_BUILTIN_F (BUILT_IN_TANH)
11008 CASE_BUILTIN_F (BUILT_IN_TRUNC)
11009 /* True if the 1st argument is nonnegative. */
11010 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
11011
11012 CASE_BUILTIN_F (BUILT_IN_FMAX)
11013 /* True if the 1st OR 2nd arguments are nonnegative. */
11014 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
11015 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
11016
11017 CASE_BUILTIN_F (BUILT_IN_FMIN)
11018 /* True if the 1st AND 2nd arguments are nonnegative. */
11019 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
11020 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
11021
11022 CASE_BUILTIN_F (BUILT_IN_COPYSIGN)
11023 /* True if the 2nd argument is nonnegative. */
11024 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
11025
11026 default:
11027 break;
11028 #undef CASE_BUILTIN_F
11029 #undef CASE_BUILTIN_I
11030 }
11031 }
11032
11033 /* ... fall through ... */
11034
11035 default:
11036 if (truth_value_p (TREE_CODE (t)))
11037 /* Truth values evaluate to 0 or 1, which is nonnegative. */
11038 return 1;
11039 }
11040
11041 /* We don't know sign of `t', so be conservative and return false. */
11042 return 0;
11043 }
11044
11045 /* Return true when T is an address and is known to be nonzero.
11046 For floating point we further ensure that T is not denormal.
11047 Similar logic is present in nonzero_address in rtlanal.h. */
11048
11049 bool
11050 tree_expr_nonzero_p (tree t)
11051 {
11052 tree type = TREE_TYPE (t);
11053
11054 /* Doing something useful for floating point would need more work. */
11055 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
11056 return false;
11057
11058 switch (TREE_CODE (t))
11059 {
11060 case ABS_EXPR:
11061 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
11062
11063 case INTEGER_CST:
11064 /* We used to test for !integer_zerop here. This does not work correctly
11065 if TREE_CONSTANT_OVERFLOW (t). */
11066 return (TREE_INT_CST_LOW (t) != 0
11067 || TREE_INT_CST_HIGH (t) != 0);
11068
11069 case PLUS_EXPR:
11070 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
11071 {
11072 /* With the presence of negative values it is hard
11073 to say something. */
11074 if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
11075 || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
11076 return false;
11077 /* One of operands must be positive and the other non-negative. */
11078 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
11079 || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
11080 }
11081 break;
11082
11083 case MULT_EXPR:
11084 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
11085 {
11086 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
11087 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
11088 }
11089 break;
11090
11091 case NOP_EXPR:
11092 {
11093 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
11094 tree outer_type = TREE_TYPE (t);
11095
11096 return (TYPE_PRECISION (inner_type) >= TYPE_PRECISION (outer_type)
11097 && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
11098 }
11099 break;
11100
11101 case ADDR_EXPR:
11102 {
11103 tree base = get_base_address (TREE_OPERAND (t, 0));
11104
11105 if (!base)
11106 return false;
11107
11108 /* Weak declarations may link to NULL. */
11109 if (VAR_OR_FUNCTION_DECL_P (base))
11110 return !DECL_WEAK (base);
11111
11112 /* Constants are never weak. */
11113 if (CONSTANT_CLASS_P (base))
11114 return true;
11115
11116 return false;
11117 }
11118
11119 case COND_EXPR:
11120 return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
11121 && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
11122
11123 case MIN_EXPR:
11124 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
11125 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
11126
11127 case MAX_EXPR:
11128 if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
11129 {
11130 /* When both operands are nonzero, then MAX must be too. */
11131 if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
11132 return true;
11133
11134 /* MAX where operand 0 is positive is positive. */
11135 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
11136 }
11137 /* MAX where operand 1 is positive is positive. */
11138 else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
11139 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
11140 return true;
11141 break;
11142
11143 case COMPOUND_EXPR:
11144 case MODIFY_EXPR:
11145 case BIND_EXPR:
11146 return tree_expr_nonzero_p (TREE_OPERAND (t, 1));
11147
11148 case SAVE_EXPR:
11149 case NON_LVALUE_EXPR:
11150 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
11151
11152 case BIT_IOR_EXPR:
11153 return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
11154 || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
11155
11156 case CALL_EXPR:
11157 return alloca_call_p (t);
11158
11159 default:
11160 break;
11161 }
11162 return false;
11163 }
11164
11165 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
11166 attempt to fold the expression to a constant without modifying TYPE,
11167 OP0 or OP1.
11168
11169 If the expression could be simplified to a constant, then return
11170 the constant. If the expression would not be simplified to a
11171 constant, then return NULL_TREE. */
11172
11173 tree
11174 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
11175 {
11176 tree tem = fold_binary (code, type, op0, op1);
11177 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
11178 }
11179
11180 /* Given the components of a unary expression CODE, TYPE and OP0,
11181 attempt to fold the expression to a constant without modifying
11182 TYPE or OP0.
11183
11184 If the expression could be simplified to a constant, then return
11185 the constant. If the expression would not be simplified to a
11186 constant, then return NULL_TREE. */
11187
11188 tree
11189 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
11190 {
11191 tree tem = fold_unary (code, type, op0);
11192 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
11193 }
11194
11195 /* If EXP represents referencing an element in a constant string
11196 (either via pointer arithmetic or array indexing), return the
11197 tree representing the value accessed, otherwise return NULL. */
11198
11199 tree
11200 fold_read_from_constant_string (tree exp)
11201 {
11202 if (TREE_CODE (exp) == INDIRECT_REF || TREE_CODE (exp) == ARRAY_REF)
11203 {
11204 tree exp1 = TREE_OPERAND (exp, 0);
11205 tree index;
11206 tree string;
11207
11208 if (TREE_CODE (exp) == INDIRECT_REF)
11209 string = string_constant (exp1, &index);
11210 else
11211 {
11212 tree low_bound = array_ref_low_bound (exp);
11213 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
11214
11215 /* Optimize the special-case of a zero lower bound.
11216
11217 We convert the low_bound to sizetype to avoid some problems
11218 with constant folding. (E.g. suppose the lower bound is 1,
11219 and its mode is QI. Without the conversion,l (ARRAY
11220 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
11221 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
11222 if (! integer_zerop (low_bound))
11223 index = size_diffop (index, fold_convert (sizetype, low_bound));
11224
11225 string = exp1;
11226 }
11227
11228 if (string
11229 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (string))
11230 && TREE_CODE (string) == STRING_CST
11231 && TREE_CODE (index) == INTEGER_CST
11232 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
11233 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
11234 == MODE_INT)
11235 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
11236 return fold_convert (TREE_TYPE (exp),
11237 build_int_cst (NULL_TREE,
11238 (TREE_STRING_POINTER (string)
11239 [TREE_INT_CST_LOW (index)])));
11240 }
11241 return NULL;
11242 }
11243
11244 /* Return the tree for neg (ARG0) when ARG0 is known to be either
11245 an integer constant or real constant.
11246
11247 TYPE is the type of the result. */
11248
11249 static tree
11250 fold_negate_const (tree arg0, tree type)
11251 {
11252 tree t = NULL_TREE;
11253
11254 switch (TREE_CODE (arg0))
11255 {
11256 case INTEGER_CST:
11257 {
11258 unsigned HOST_WIDE_INT low;
11259 HOST_WIDE_INT high;
11260 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
11261 TREE_INT_CST_HIGH (arg0),
11262 &low, &high);
11263 t = build_int_cst_wide (type, low, high);
11264 t = force_fit_type (t, 1,
11265 (overflow | TREE_OVERFLOW (arg0))
11266 && !TYPE_UNSIGNED (type),
11267 TREE_CONSTANT_OVERFLOW (arg0));
11268 break;
11269 }
11270
11271 case REAL_CST:
11272 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
11273 break;
11274
11275 default:
11276 gcc_unreachable ();
11277 }
11278
11279 return t;
11280 }
11281
11282 /* Return the tree for abs (ARG0) when ARG0 is known to be either
11283 an integer constant or real constant.
11284
11285 TYPE is the type of the result. */
11286
11287 tree
11288 fold_abs_const (tree arg0, tree type)
11289 {
11290 tree t = NULL_TREE;
11291
11292 switch (TREE_CODE (arg0))
11293 {
11294 case INTEGER_CST:
11295 /* If the value is unsigned, then the absolute value is
11296 the same as the ordinary value. */
11297 if (TYPE_UNSIGNED (type))
11298 t = arg0;
11299 /* Similarly, if the value is non-negative. */
11300 else if (INT_CST_LT (integer_minus_one_node, arg0))
11301 t = arg0;
11302 /* If the value is negative, then the absolute value is
11303 its negation. */
11304 else
11305 {
11306 unsigned HOST_WIDE_INT low;
11307 HOST_WIDE_INT high;
11308 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
11309 TREE_INT_CST_HIGH (arg0),
11310 &low, &high);
11311 t = build_int_cst_wide (type, low, high);
11312 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg0),
11313 TREE_CONSTANT_OVERFLOW (arg0));
11314 }
11315 break;
11316
11317 case REAL_CST:
11318 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
11319 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
11320 else
11321 t = arg0;
11322 break;
11323
11324 default:
11325 gcc_unreachable ();
11326 }
11327
11328 return t;
11329 }
11330
11331 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
11332 constant. TYPE is the type of the result. */
11333
11334 static tree
11335 fold_not_const (tree arg0, tree type)
11336 {
11337 tree t = NULL_TREE;
11338
11339 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
11340
11341 t = build_int_cst_wide (type,
11342 ~ TREE_INT_CST_LOW (arg0),
11343 ~ TREE_INT_CST_HIGH (arg0));
11344 t = force_fit_type (t, 0, TREE_OVERFLOW (arg0),
11345 TREE_CONSTANT_OVERFLOW (arg0));
11346
11347 return t;
11348 }
11349
11350 /* Given CODE, a relational operator, the target type, TYPE and two
11351 constant operands OP0 and OP1, return the result of the
11352 relational operation. If the result is not a compile time
11353 constant, then return NULL_TREE. */
11354
11355 static tree
11356 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
11357 {
11358 int result, invert;
11359
11360 /* From here on, the only cases we handle are when the result is
11361 known to be a constant. */
11362
11363 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
11364 {
11365 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
11366 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
11367
11368 /* Handle the cases where either operand is a NaN. */
11369 if (real_isnan (c0) || real_isnan (c1))
11370 {
11371 switch (code)
11372 {
11373 case EQ_EXPR:
11374 case ORDERED_EXPR:
11375 result = 0;
11376 break;
11377
11378 case NE_EXPR:
11379 case UNORDERED_EXPR:
11380 case UNLT_EXPR:
11381 case UNLE_EXPR:
11382 case UNGT_EXPR:
11383 case UNGE_EXPR:
11384 case UNEQ_EXPR:
11385 result = 1;
11386 break;
11387
11388 case LT_EXPR:
11389 case LE_EXPR:
11390 case GT_EXPR:
11391 case GE_EXPR:
11392 case LTGT_EXPR:
11393 if (flag_trapping_math)
11394 return NULL_TREE;
11395 result = 0;
11396 break;
11397
11398 default:
11399 gcc_unreachable ();
11400 }
11401
11402 return constant_boolean_node (result, type);
11403 }
11404
11405 return constant_boolean_node (real_compare (code, c0, c1), type);
11406 }
11407
11408 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
11409
11410 To compute GT, swap the arguments and do LT.
11411 To compute GE, do LT and invert the result.
11412 To compute LE, swap the arguments, do LT and invert the result.
11413 To compute NE, do EQ and invert the result.
11414
11415 Therefore, the code below must handle only EQ and LT. */
11416
11417 if (code == LE_EXPR || code == GT_EXPR)
11418 {
11419 tree tem = op0;
11420 op0 = op1;
11421 op1 = tem;
11422 code = swap_tree_comparison (code);
11423 }
11424
11425 /* Note that it is safe to invert for real values here because we
11426 have already handled the one case that it matters. */
11427
11428 invert = 0;
11429 if (code == NE_EXPR || code == GE_EXPR)
11430 {
11431 invert = 1;
11432 code = invert_tree_comparison (code, false);
11433 }
11434
11435 /* Compute a result for LT or EQ if args permit;
11436 Otherwise return T. */
11437 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
11438 {
11439 if (code == EQ_EXPR)
11440 result = tree_int_cst_equal (op0, op1);
11441 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
11442 result = INT_CST_LT_UNSIGNED (op0, op1);
11443 else
11444 result = INT_CST_LT (op0, op1);
11445 }
11446 else
11447 return NULL_TREE;
11448
11449 if (invert)
11450 result ^= 1;
11451 return constant_boolean_node (result, type);
11452 }
11453
11454 /* Build an expression for the a clean point containing EXPR with type TYPE.
11455 Don't build a cleanup point expression for EXPR which don't have side
11456 effects. */
11457
11458 tree
11459 fold_build_cleanup_point_expr (tree type, tree expr)
11460 {
11461 /* If the expression does not have side effects then we don't have to wrap
11462 it with a cleanup point expression. */
11463 if (!TREE_SIDE_EFFECTS (expr))
11464 return expr;
11465
11466 /* If the expression is a return, check to see if the expression inside the
11467 return has no side effects or the right hand side of the modify expression
11468 inside the return. If either don't have side effects set we don't need to
11469 wrap the expression in a cleanup point expression. Note we don't check the
11470 left hand side of the modify because it should always be a return decl. */
11471 if (TREE_CODE (expr) == RETURN_EXPR)
11472 {
11473 tree op = TREE_OPERAND (expr, 0);
11474 if (!op || !TREE_SIDE_EFFECTS (op))
11475 return expr;
11476 op = TREE_OPERAND (op, 1);
11477 if (!TREE_SIDE_EFFECTS (op))
11478 return expr;
11479 }
11480
11481 return build1 (CLEANUP_POINT_EXPR, type, expr);
11482 }
11483
11484 /* Build an expression for the address of T. Folds away INDIRECT_REF to
11485 avoid confusing the gimplify process. */
11486
11487 tree
11488 build_fold_addr_expr_with_type (tree t, tree ptrtype)
11489 {
11490 /* The size of the object is not relevant when talking about its address. */
11491 if (TREE_CODE (t) == WITH_SIZE_EXPR)
11492 t = TREE_OPERAND (t, 0);
11493
11494 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
11495 if (TREE_CODE (t) == INDIRECT_REF
11496 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
11497 {
11498 t = TREE_OPERAND (t, 0);
11499 if (TREE_TYPE (t) != ptrtype)
11500 t = build1 (NOP_EXPR, ptrtype, t);
11501 }
11502 else
11503 {
11504 tree base = t;
11505
11506 while (handled_component_p (base))
11507 base = TREE_OPERAND (base, 0);
11508 if (DECL_P (base))
11509 TREE_ADDRESSABLE (base) = 1;
11510
11511 t = build1 (ADDR_EXPR, ptrtype, t);
11512 }
11513
11514 return t;
11515 }
11516
11517 tree
11518 build_fold_addr_expr (tree t)
11519 {
11520 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
11521 }
11522
11523 /* Given a pointer value OP0 and a type TYPE, return a simplified version
11524 of an indirection through OP0, or NULL_TREE if no simplification is
11525 possible. */
11526
11527 tree
11528 fold_indirect_ref_1 (tree type, tree op0)
11529 {
11530 tree sub = op0;
11531 tree subtype;
11532
11533 STRIP_NOPS (sub);
11534 subtype = TREE_TYPE (sub);
11535 if (!POINTER_TYPE_P (subtype))
11536 return NULL_TREE;
11537
11538 if (TREE_CODE (sub) == ADDR_EXPR)
11539 {
11540 tree op = TREE_OPERAND (sub, 0);
11541 tree optype = TREE_TYPE (op);
11542 /* *&p => p */
11543 if (type == optype)
11544 return op;
11545 /* *(foo *)&fooarray => fooarray[0] */
11546 else if (TREE_CODE (optype) == ARRAY_TYPE
11547 && type == TREE_TYPE (optype))
11548 {
11549 tree type_domain = TYPE_DOMAIN (optype);
11550 tree min_val = size_zero_node;
11551 if (type_domain && TYPE_MIN_VALUE (type_domain))
11552 min_val = TYPE_MIN_VALUE (type_domain);
11553 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
11554 }
11555 }
11556
11557 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
11558 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
11559 && type == TREE_TYPE (TREE_TYPE (subtype)))
11560 {
11561 tree type_domain;
11562 tree min_val = size_zero_node;
11563 sub = build_fold_indirect_ref (sub);
11564 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
11565 if (type_domain && TYPE_MIN_VALUE (type_domain))
11566 min_val = TYPE_MIN_VALUE (type_domain);
11567 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
11568 }
11569
11570 return NULL_TREE;
11571 }
11572
11573 /* Builds an expression for an indirection through T, simplifying some
11574 cases. */
11575
11576 tree
11577 build_fold_indirect_ref (tree t)
11578 {
11579 tree type = TREE_TYPE (TREE_TYPE (t));
11580 tree sub = fold_indirect_ref_1 (type, t);
11581
11582 if (sub)
11583 return sub;
11584 else
11585 return build1 (INDIRECT_REF, type, t);
11586 }
11587
11588 /* Given an INDIRECT_REF T, return either T or a simplified version. */
11589
11590 tree
11591 fold_indirect_ref (tree t)
11592 {
11593 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
11594
11595 if (sub)
11596 return sub;
11597 else
11598 return t;
11599 }
11600
11601 /* Strip non-trapping, non-side-effecting tree nodes from an expression
11602 whose result is ignored. The type of the returned tree need not be
11603 the same as the original expression. */
11604
11605 tree
11606 fold_ignored_result (tree t)
11607 {
11608 if (!TREE_SIDE_EFFECTS (t))
11609 return integer_zero_node;
11610
11611 for (;;)
11612 switch (TREE_CODE_CLASS (TREE_CODE (t)))
11613 {
11614 case tcc_unary:
11615 t = TREE_OPERAND (t, 0);
11616 break;
11617
11618 case tcc_binary:
11619 case tcc_comparison:
11620 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
11621 t = TREE_OPERAND (t, 0);
11622 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
11623 t = TREE_OPERAND (t, 1);
11624 else
11625 return t;
11626 break;
11627
11628 case tcc_expression:
11629 switch (TREE_CODE (t))
11630 {
11631 case COMPOUND_EXPR:
11632 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
11633 return t;
11634 t = TREE_OPERAND (t, 0);
11635 break;
11636
11637 case COND_EXPR:
11638 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
11639 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
11640 return t;
11641 t = TREE_OPERAND (t, 0);
11642 break;
11643
11644 default:
11645 return t;
11646 }
11647 break;
11648
11649 default:
11650 return t;
11651 }
11652 }
11653
11654 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
11655 This can only be applied to objects of a sizetype. */
11656
11657 tree
11658 round_up (tree value, int divisor)
11659 {
11660 tree div = NULL_TREE;
11661
11662 gcc_assert (divisor > 0);
11663 if (divisor == 1)
11664 return value;
11665
11666 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
11667 have to do anything. Only do this when we are not given a const,
11668 because in that case, this check is more expensive than just
11669 doing it. */
11670 if (TREE_CODE (value) != INTEGER_CST)
11671 {
11672 div = build_int_cst (TREE_TYPE (value), divisor);
11673
11674 if (multiple_of_p (TREE_TYPE (value), value, div))
11675 return value;
11676 }
11677
11678 /* If divisor is a power of two, simplify this to bit manipulation. */
11679 if (divisor == (divisor & -divisor))
11680 {
11681 tree t;
11682
11683 t = build_int_cst (TREE_TYPE (value), divisor - 1);
11684 value = size_binop (PLUS_EXPR, value, t);
11685 t = build_int_cst (TREE_TYPE (value), -divisor);
11686 value = size_binop (BIT_AND_EXPR, value, t);
11687 }
11688 else
11689 {
11690 if (!div)
11691 div = build_int_cst (TREE_TYPE (value), divisor);
11692 value = size_binop (CEIL_DIV_EXPR, value, div);
11693 value = size_binop (MULT_EXPR, value, div);
11694 }
11695
11696 return value;
11697 }
11698
11699 /* Likewise, but round down. */
11700
11701 tree
11702 round_down (tree value, int divisor)
11703 {
11704 tree div = NULL_TREE;
11705
11706 gcc_assert (divisor > 0);
11707 if (divisor == 1)
11708 return value;
11709
11710 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
11711 have to do anything. Only do this when we are not given a const,
11712 because in that case, this check is more expensive than just
11713 doing it. */
11714 if (TREE_CODE (value) != INTEGER_CST)
11715 {
11716 div = build_int_cst (TREE_TYPE (value), divisor);
11717
11718 if (multiple_of_p (TREE_TYPE (value), value, div))
11719 return value;
11720 }
11721
11722 /* If divisor is a power of two, simplify this to bit manipulation. */
11723 if (divisor == (divisor & -divisor))
11724 {
11725 tree t;
11726
11727 t = build_int_cst (TREE_TYPE (value), -divisor);
11728 value = size_binop (BIT_AND_EXPR, value, t);
11729 }
11730 else
11731 {
11732 if (!div)
11733 div = build_int_cst (TREE_TYPE (value), divisor);
11734 value = size_binop (FLOOR_DIV_EXPR, value, div);
11735 value = size_binop (MULT_EXPR, value, div);
11736 }
11737
11738 return value;
11739 }
11740
11741 /* Returns the pointer to the base of the object addressed by EXP and
11742 extracts the information about the offset of the access, storing it
11743 to PBITPOS and POFFSET. */
11744
11745 static tree
11746 split_address_to_core_and_offset (tree exp,
11747 HOST_WIDE_INT *pbitpos, tree *poffset)
11748 {
11749 tree core;
11750 enum machine_mode mode;
11751 int unsignedp, volatilep;
11752 HOST_WIDE_INT bitsize;
11753
11754 if (TREE_CODE (exp) == ADDR_EXPR)
11755 {
11756 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
11757 poffset, &mode, &unsignedp, &volatilep,
11758 false);
11759 core = build_fold_addr_expr (core);
11760 }
11761 else
11762 {
11763 core = exp;
11764 *pbitpos = 0;
11765 *poffset = NULL_TREE;
11766 }
11767
11768 return core;
11769 }
11770
11771 /* Returns true if addresses of E1 and E2 differ by a constant, false
11772 otherwise. If they do, E1 - E2 is stored in *DIFF. */
11773
11774 bool
11775 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
11776 {
11777 tree core1, core2;
11778 HOST_WIDE_INT bitpos1, bitpos2;
11779 tree toffset1, toffset2, tdiff, type;
11780
11781 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
11782 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
11783
11784 if (bitpos1 % BITS_PER_UNIT != 0
11785 || bitpos2 % BITS_PER_UNIT != 0
11786 || !operand_equal_p (core1, core2, 0))
11787 return false;
11788
11789 if (toffset1 && toffset2)
11790 {
11791 type = TREE_TYPE (toffset1);
11792 if (type != TREE_TYPE (toffset2))
11793 toffset2 = fold_convert (type, toffset2);
11794
11795 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
11796 if (!cst_and_fits_in_hwi (tdiff))
11797 return false;
11798
11799 *diff = int_cst_value (tdiff);
11800 }
11801 else if (toffset1 || toffset2)
11802 {
11803 /* If only one of the offsets is non-constant, the difference cannot
11804 be a constant. */
11805 return false;
11806 }
11807 else
11808 *diff = 0;
11809
11810 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
11811 return true;
11812 }
11813
11814 /* Simplify the floating point expression EXP when the sign of the
11815 result is not significant. Return NULL_TREE if no simplification
11816 is possible. */
11817
11818 tree
11819 fold_strip_sign_ops (tree exp)
11820 {
11821 tree arg0, arg1;
11822
11823 switch (TREE_CODE (exp))
11824 {
11825 case ABS_EXPR:
11826 case NEGATE_EXPR:
11827 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
11828 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
11829
11830 case MULT_EXPR:
11831 case RDIV_EXPR:
11832 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
11833 return NULL_TREE;
11834 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
11835 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
11836 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
11837 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
11838 arg0 ? arg0 : TREE_OPERAND (exp, 0),
11839 arg1 ? arg1 : TREE_OPERAND (exp, 1));
11840 break;
11841
11842 default:
11843 break;
11844 }
11845 return NULL_TREE;
11846 }
11847