re PR c++/24439 (ICE with invert conditional containing throw)
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20 02110-1301, USA. */
21
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
29
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type.
32
33 fold takes a tree as argument and returns a simplified tree.
34
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
38
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
41
42 force_fit_type takes a constant, an overflowable flag and prior
43 overflow indicators. It forces the value to fit the type and sets
44 TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate. */
45
46 #include "config.h"
47 #include "system.h"
48 #include "coretypes.h"
49 #include "tm.h"
50 #include "flags.h"
51 #include "tree.h"
52 #include "real.h"
53 #include "rtl.h"
54 #include "expr.h"
55 #include "tm_p.h"
56 #include "toplev.h"
57 #include "ggc.h"
58 #include "hashtab.h"
59 #include "langhooks.h"
60 #include "md5.h"
61
62 /* The following constants represent a bit based encoding of GCC's
63 comparison operators. This encoding simplifies transformations
64 on relational comparison operators, such as AND and OR. */
65 enum comparison_code {
66 COMPCODE_FALSE = 0,
67 COMPCODE_LT = 1,
68 COMPCODE_EQ = 2,
69 COMPCODE_LE = 3,
70 COMPCODE_GT = 4,
71 COMPCODE_LTGT = 5,
72 COMPCODE_GE = 6,
73 COMPCODE_ORD = 7,
74 COMPCODE_UNORD = 8,
75 COMPCODE_UNLT = 9,
76 COMPCODE_UNEQ = 10,
77 COMPCODE_UNLE = 11,
78 COMPCODE_UNGT = 12,
79 COMPCODE_NE = 13,
80 COMPCODE_UNGE = 14,
81 COMPCODE_TRUE = 15
82 };
83
84 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
85 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
86 static bool negate_mathfn_p (enum built_in_function);
87 static bool negate_expr_p (tree);
88 static tree negate_expr (tree);
89 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
90 static tree associate_trees (tree, tree, enum tree_code, tree);
91 static tree const_binop (enum tree_code, tree, tree, int);
92 static enum comparison_code comparison_to_compcode (enum tree_code);
93 static enum tree_code compcode_to_comparison (enum comparison_code);
94 static tree combine_comparisons (enum tree_code, enum tree_code,
95 enum tree_code, tree, tree, tree);
96 static int truth_value_p (enum tree_code);
97 static int operand_equal_for_comparison_p (tree, tree, tree);
98 static int twoval_comparison_p (tree, tree *, tree *, int *);
99 static tree eval_subst (tree, tree, tree, tree, tree);
100 static tree pedantic_omit_one_operand (tree, tree, tree);
101 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
102 static tree make_bit_field_ref (tree, tree, int, int, int);
103 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
104 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
105 enum machine_mode *, int *, int *,
106 tree *, tree *);
107 static int all_ones_mask_p (tree, int);
108 static tree sign_bit_p (tree, tree);
109 static int simple_operand_p (tree);
110 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
111 static tree make_range (tree, int *, tree *, tree *);
112 static tree build_range_check (tree, tree, int, tree, tree);
113 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
114 tree);
115 static tree fold_range_test (enum tree_code, tree, tree, tree);
116 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
117 static tree unextend (tree, int, int, tree);
118 static tree fold_truthop (enum tree_code, tree, tree, tree);
119 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
120 static tree extract_muldiv (tree, tree, enum tree_code, tree);
121 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
122 static int multiple_of_p (tree, tree, tree);
123 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
124 tree, tree,
125 tree, tree, int);
126 static bool fold_real_zero_addition_p (tree, tree, int);
127 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
128 tree, tree, tree);
129 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
130 static tree fold_div_compare (enum tree_code, tree, tree, tree);
131 static bool reorder_operands_p (tree, tree);
132 static tree fold_negate_const (tree, tree);
133 static tree fold_not_const (tree, tree);
134 static tree fold_relational_const (enum tree_code, tree, tree, tree);
135
136 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
137 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
138 and SUM1. Then this yields nonzero if overflow occurred during the
139 addition.
140
141 Overflow occurs if A and B have the same sign, but A and SUM differ in
142 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
143 sign. */
144 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
145 \f
146 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
147 We do that by representing the two-word integer in 4 words, with only
148 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
149 number. The value of the word is LOWPART + HIGHPART * BASE. */
150
151 #define LOWPART(x) \
152 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
153 #define HIGHPART(x) \
154 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
155 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
156
157 /* Unpack a two-word integer into 4 words.
158 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
159 WORDS points to the array of HOST_WIDE_INTs. */
160
161 static void
162 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
163 {
164 words[0] = LOWPART (low);
165 words[1] = HIGHPART (low);
166 words[2] = LOWPART (hi);
167 words[3] = HIGHPART (hi);
168 }
169
170 /* Pack an array of 4 words into a two-word integer.
171 WORDS points to the array of words.
172 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
173
174 static void
175 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
176 HOST_WIDE_INT *hi)
177 {
178 *low = words[0] + words[1] * BASE;
179 *hi = words[2] + words[3] * BASE;
180 }
181 \f
182 /* T is an INT_CST node. OVERFLOWABLE indicates if we are interested
183 in overflow of the value, when >0 we are only interested in signed
184 overflow, for <0 we are interested in any overflow. OVERFLOWED
185 indicates whether overflow has already occurred. CONST_OVERFLOWED
186 indicates whether constant overflow has already occurred. We force
187 T's value to be within range of T's type (by setting to 0 or 1 all
188 the bits outside the type's range). We set TREE_OVERFLOWED if,
189 OVERFLOWED is nonzero,
190 or OVERFLOWABLE is >0 and signed overflow occurs
191 or OVERFLOWABLE is <0 and any overflow occurs
192 We set TREE_CONSTANT_OVERFLOWED if,
193 CONST_OVERFLOWED is nonzero
194 or we set TREE_OVERFLOWED.
195 We return either the original T, or a copy. */
196
197 tree
198 force_fit_type (tree t, int overflowable,
199 bool overflowed, bool overflowed_const)
200 {
201 unsigned HOST_WIDE_INT low;
202 HOST_WIDE_INT high;
203 unsigned int prec;
204 int sign_extended_type;
205
206 gcc_assert (TREE_CODE (t) == INTEGER_CST);
207
208 low = TREE_INT_CST_LOW (t);
209 high = TREE_INT_CST_HIGH (t);
210
211 if (POINTER_TYPE_P (TREE_TYPE (t))
212 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
213 prec = POINTER_SIZE;
214 else
215 prec = TYPE_PRECISION (TREE_TYPE (t));
216 /* Size types *are* sign extended. */
217 sign_extended_type = (!TYPE_UNSIGNED (TREE_TYPE (t))
218 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
219 && TYPE_IS_SIZETYPE (TREE_TYPE (t))));
220
221 /* First clear all bits that are beyond the type's precision. */
222
223 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
224 ;
225 else if (prec > HOST_BITS_PER_WIDE_INT)
226 high &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
227 else
228 {
229 high = 0;
230 if (prec < HOST_BITS_PER_WIDE_INT)
231 low &= ~((HOST_WIDE_INT) (-1) << prec);
232 }
233
234 if (!sign_extended_type)
235 /* No sign extension */;
236 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
237 /* Correct width already. */;
238 else if (prec > HOST_BITS_PER_WIDE_INT)
239 {
240 /* Sign extend top half? */
241 if (high & ((unsigned HOST_WIDE_INT)1
242 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
243 high |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
244 }
245 else if (prec == HOST_BITS_PER_WIDE_INT)
246 {
247 if ((HOST_WIDE_INT)low < 0)
248 high = -1;
249 }
250 else
251 {
252 /* Sign extend bottom half? */
253 if (low & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
254 {
255 high = -1;
256 low |= (HOST_WIDE_INT)(-1) << prec;
257 }
258 }
259
260 /* If the value changed, return a new node. */
261 if (overflowed || overflowed_const
262 || low != TREE_INT_CST_LOW (t) || high != TREE_INT_CST_HIGH (t))
263 {
264 t = build_int_cst_wide (TREE_TYPE (t), low, high);
265
266 if (overflowed
267 || overflowable < 0
268 || (overflowable > 0 && sign_extended_type))
269 {
270 t = copy_node (t);
271 TREE_OVERFLOW (t) = 1;
272 TREE_CONSTANT_OVERFLOW (t) = 1;
273 }
274 else if (overflowed_const)
275 {
276 t = copy_node (t);
277 TREE_CONSTANT_OVERFLOW (t) = 1;
278 }
279 }
280
281 return t;
282 }
283 \f
284 /* Add two doubleword integers with doubleword result.
285 Each argument is given as two `HOST_WIDE_INT' pieces.
286 One argument is L1 and H1; the other, L2 and H2.
287 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
288
289 int
290 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
291 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
292 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
293 {
294 unsigned HOST_WIDE_INT l;
295 HOST_WIDE_INT h;
296
297 l = l1 + l2;
298 h = h1 + h2 + (l < l1);
299
300 *lv = l;
301 *hv = h;
302 return OVERFLOW_SUM_SIGN (h1, h2, h);
303 }
304
305 /* Negate a doubleword integer with doubleword result.
306 Return nonzero if the operation overflows, assuming it's signed.
307 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
308 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
309
310 int
311 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
312 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
313 {
314 if (l1 == 0)
315 {
316 *lv = 0;
317 *hv = - h1;
318 return (*hv & h1) < 0;
319 }
320 else
321 {
322 *lv = -l1;
323 *hv = ~h1;
324 return 0;
325 }
326 }
327 \f
328 /* Multiply two doubleword integers with doubleword result.
329 Return nonzero if the operation overflows, assuming it's signed.
330 Each argument is given as two `HOST_WIDE_INT' pieces.
331 One argument is L1 and H1; the other, L2 and H2.
332 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
333
334 int
335 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
336 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
337 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
338 {
339 HOST_WIDE_INT arg1[4];
340 HOST_WIDE_INT arg2[4];
341 HOST_WIDE_INT prod[4 * 2];
342 unsigned HOST_WIDE_INT carry;
343 int i, j, k;
344 unsigned HOST_WIDE_INT toplow, neglow;
345 HOST_WIDE_INT tophigh, neghigh;
346
347 encode (arg1, l1, h1);
348 encode (arg2, l2, h2);
349
350 memset (prod, 0, sizeof prod);
351
352 for (i = 0; i < 4; i++)
353 {
354 carry = 0;
355 for (j = 0; j < 4; j++)
356 {
357 k = i + j;
358 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
359 carry += arg1[i] * arg2[j];
360 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
361 carry += prod[k];
362 prod[k] = LOWPART (carry);
363 carry = HIGHPART (carry);
364 }
365 prod[i + 4] = carry;
366 }
367
368 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
369
370 /* Check for overflow by calculating the top half of the answer in full;
371 it should agree with the low half's sign bit. */
372 decode (prod + 4, &toplow, &tophigh);
373 if (h1 < 0)
374 {
375 neg_double (l2, h2, &neglow, &neghigh);
376 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
377 }
378 if (h2 < 0)
379 {
380 neg_double (l1, h1, &neglow, &neghigh);
381 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
382 }
383 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
384 }
385 \f
386 /* Shift the doubleword integer in L1, H1 left by COUNT places
387 keeping only PREC bits of result.
388 Shift right if COUNT is negative.
389 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
390 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
391
392 void
393 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
394 HOST_WIDE_INT count, unsigned int prec,
395 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
396 {
397 unsigned HOST_WIDE_INT signmask;
398
399 if (count < 0)
400 {
401 rshift_double (l1, h1, -count, prec, lv, hv, arith);
402 return;
403 }
404
405 if (SHIFT_COUNT_TRUNCATED)
406 count %= prec;
407
408 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
409 {
410 /* Shifting by the host word size is undefined according to the
411 ANSI standard, so we must handle this as a special case. */
412 *hv = 0;
413 *lv = 0;
414 }
415 else if (count >= HOST_BITS_PER_WIDE_INT)
416 {
417 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
418 *lv = 0;
419 }
420 else
421 {
422 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
423 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
424 *lv = l1 << count;
425 }
426
427 /* Sign extend all bits that are beyond the precision. */
428
429 signmask = -((prec > HOST_BITS_PER_WIDE_INT
430 ? ((unsigned HOST_WIDE_INT) *hv
431 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
432 : (*lv >> (prec - 1))) & 1);
433
434 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
435 ;
436 else if (prec >= HOST_BITS_PER_WIDE_INT)
437 {
438 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
439 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
440 }
441 else
442 {
443 *hv = signmask;
444 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
445 *lv |= signmask << prec;
446 }
447 }
448
449 /* Shift the doubleword integer in L1, H1 right by COUNT places
450 keeping only PREC bits of result. COUNT must be positive.
451 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
452 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
453
454 void
455 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
456 HOST_WIDE_INT count, unsigned int prec,
457 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
458 int arith)
459 {
460 unsigned HOST_WIDE_INT signmask;
461
462 signmask = (arith
463 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
464 : 0);
465
466 if (SHIFT_COUNT_TRUNCATED)
467 count %= prec;
468
469 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
470 {
471 /* Shifting by the host word size is undefined according to the
472 ANSI standard, so we must handle this as a special case. */
473 *hv = 0;
474 *lv = 0;
475 }
476 else if (count >= HOST_BITS_PER_WIDE_INT)
477 {
478 *hv = 0;
479 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
480 }
481 else
482 {
483 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
484 *lv = ((l1 >> count)
485 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
486 }
487
488 /* Zero / sign extend all bits that are beyond the precision. */
489
490 if (count >= (HOST_WIDE_INT)prec)
491 {
492 *hv = signmask;
493 *lv = signmask;
494 }
495 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
496 ;
497 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
498 {
499 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
500 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
501 }
502 else
503 {
504 *hv = signmask;
505 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
506 *lv |= signmask << (prec - count);
507 }
508 }
509 \f
510 /* Rotate the doubleword integer in L1, H1 left by COUNT places
511 keeping only PREC bits of result.
512 Rotate right if COUNT is negative.
513 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
514
515 void
516 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
517 HOST_WIDE_INT count, unsigned int prec,
518 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
519 {
520 unsigned HOST_WIDE_INT s1l, s2l;
521 HOST_WIDE_INT s1h, s2h;
522
523 count %= prec;
524 if (count < 0)
525 count += prec;
526
527 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
528 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
529 *lv = s1l | s2l;
530 *hv = s1h | s2h;
531 }
532
533 /* Rotate the doubleword integer in L1, H1 left by COUNT places
534 keeping only PREC bits of result. COUNT must be positive.
535 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
536
537 void
538 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
539 HOST_WIDE_INT count, unsigned int prec,
540 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
541 {
542 unsigned HOST_WIDE_INT s1l, s2l;
543 HOST_WIDE_INT s1h, s2h;
544
545 count %= prec;
546 if (count < 0)
547 count += prec;
548
549 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
550 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
551 *lv = s1l | s2l;
552 *hv = s1h | s2h;
553 }
554 \f
555 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
556 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
557 CODE is a tree code for a kind of division, one of
558 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
559 or EXACT_DIV_EXPR
560 It controls how the quotient is rounded to an integer.
561 Return nonzero if the operation overflows.
562 UNS nonzero says do unsigned division. */
563
564 int
565 div_and_round_double (enum tree_code code, int uns,
566 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
567 HOST_WIDE_INT hnum_orig,
568 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
569 HOST_WIDE_INT hden_orig,
570 unsigned HOST_WIDE_INT *lquo,
571 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
572 HOST_WIDE_INT *hrem)
573 {
574 int quo_neg = 0;
575 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
576 HOST_WIDE_INT den[4], quo[4];
577 int i, j;
578 unsigned HOST_WIDE_INT work;
579 unsigned HOST_WIDE_INT carry = 0;
580 unsigned HOST_WIDE_INT lnum = lnum_orig;
581 HOST_WIDE_INT hnum = hnum_orig;
582 unsigned HOST_WIDE_INT lden = lden_orig;
583 HOST_WIDE_INT hden = hden_orig;
584 int overflow = 0;
585
586 if (hden == 0 && lden == 0)
587 overflow = 1, lden = 1;
588
589 /* Calculate quotient sign and convert operands to unsigned. */
590 if (!uns)
591 {
592 if (hnum < 0)
593 {
594 quo_neg = ~ quo_neg;
595 /* (minimum integer) / (-1) is the only overflow case. */
596 if (neg_double (lnum, hnum, &lnum, &hnum)
597 && ((HOST_WIDE_INT) lden & hden) == -1)
598 overflow = 1;
599 }
600 if (hden < 0)
601 {
602 quo_neg = ~ quo_neg;
603 neg_double (lden, hden, &lden, &hden);
604 }
605 }
606
607 if (hnum == 0 && hden == 0)
608 { /* single precision */
609 *hquo = *hrem = 0;
610 /* This unsigned division rounds toward zero. */
611 *lquo = lnum / lden;
612 goto finish_up;
613 }
614
615 if (hnum == 0)
616 { /* trivial case: dividend < divisor */
617 /* hden != 0 already checked. */
618 *hquo = *lquo = 0;
619 *hrem = hnum;
620 *lrem = lnum;
621 goto finish_up;
622 }
623
624 memset (quo, 0, sizeof quo);
625
626 memset (num, 0, sizeof num); /* to zero 9th element */
627 memset (den, 0, sizeof den);
628
629 encode (num, lnum, hnum);
630 encode (den, lden, hden);
631
632 /* Special code for when the divisor < BASE. */
633 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
634 {
635 /* hnum != 0 already checked. */
636 for (i = 4 - 1; i >= 0; i--)
637 {
638 work = num[i] + carry * BASE;
639 quo[i] = work / lden;
640 carry = work % lden;
641 }
642 }
643 else
644 {
645 /* Full double precision division,
646 with thanks to Don Knuth's "Seminumerical Algorithms". */
647 int num_hi_sig, den_hi_sig;
648 unsigned HOST_WIDE_INT quo_est, scale;
649
650 /* Find the highest nonzero divisor digit. */
651 for (i = 4 - 1;; i--)
652 if (den[i] != 0)
653 {
654 den_hi_sig = i;
655 break;
656 }
657
658 /* Insure that the first digit of the divisor is at least BASE/2.
659 This is required by the quotient digit estimation algorithm. */
660
661 scale = BASE / (den[den_hi_sig] + 1);
662 if (scale > 1)
663 { /* scale divisor and dividend */
664 carry = 0;
665 for (i = 0; i <= 4 - 1; i++)
666 {
667 work = (num[i] * scale) + carry;
668 num[i] = LOWPART (work);
669 carry = HIGHPART (work);
670 }
671
672 num[4] = carry;
673 carry = 0;
674 for (i = 0; i <= 4 - 1; i++)
675 {
676 work = (den[i] * scale) + carry;
677 den[i] = LOWPART (work);
678 carry = HIGHPART (work);
679 if (den[i] != 0) den_hi_sig = i;
680 }
681 }
682
683 num_hi_sig = 4;
684
685 /* Main loop */
686 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
687 {
688 /* Guess the next quotient digit, quo_est, by dividing the first
689 two remaining dividend digits by the high order quotient digit.
690 quo_est is never low and is at most 2 high. */
691 unsigned HOST_WIDE_INT tmp;
692
693 num_hi_sig = i + den_hi_sig + 1;
694 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
695 if (num[num_hi_sig] != den[den_hi_sig])
696 quo_est = work / den[den_hi_sig];
697 else
698 quo_est = BASE - 1;
699
700 /* Refine quo_est so it's usually correct, and at most one high. */
701 tmp = work - quo_est * den[den_hi_sig];
702 if (tmp < BASE
703 && (den[den_hi_sig - 1] * quo_est
704 > (tmp * BASE + num[num_hi_sig - 2])))
705 quo_est--;
706
707 /* Try QUO_EST as the quotient digit, by multiplying the
708 divisor by QUO_EST and subtracting from the remaining dividend.
709 Keep in mind that QUO_EST is the I - 1st digit. */
710
711 carry = 0;
712 for (j = 0; j <= den_hi_sig; j++)
713 {
714 work = quo_est * den[j] + carry;
715 carry = HIGHPART (work);
716 work = num[i + j] - LOWPART (work);
717 num[i + j] = LOWPART (work);
718 carry += HIGHPART (work) != 0;
719 }
720
721 /* If quo_est was high by one, then num[i] went negative and
722 we need to correct things. */
723 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
724 {
725 quo_est--;
726 carry = 0; /* add divisor back in */
727 for (j = 0; j <= den_hi_sig; j++)
728 {
729 work = num[i + j] + den[j] + carry;
730 carry = HIGHPART (work);
731 num[i + j] = LOWPART (work);
732 }
733
734 num [num_hi_sig] += carry;
735 }
736
737 /* Store the quotient digit. */
738 quo[i] = quo_est;
739 }
740 }
741
742 decode (quo, lquo, hquo);
743
744 finish_up:
745 /* If result is negative, make it so. */
746 if (quo_neg)
747 neg_double (*lquo, *hquo, lquo, hquo);
748
749 /* Compute trial remainder: rem = num - (quo * den) */
750 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
751 neg_double (*lrem, *hrem, lrem, hrem);
752 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
753
754 switch (code)
755 {
756 case TRUNC_DIV_EXPR:
757 case TRUNC_MOD_EXPR: /* round toward zero */
758 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
759 return overflow;
760
761 case FLOOR_DIV_EXPR:
762 case FLOOR_MOD_EXPR: /* round toward negative infinity */
763 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
764 {
765 /* quo = quo - 1; */
766 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
767 lquo, hquo);
768 }
769 else
770 return overflow;
771 break;
772
773 case CEIL_DIV_EXPR:
774 case CEIL_MOD_EXPR: /* round toward positive infinity */
775 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
776 {
777 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
778 lquo, hquo);
779 }
780 else
781 return overflow;
782 break;
783
784 case ROUND_DIV_EXPR:
785 case ROUND_MOD_EXPR: /* round to closest integer */
786 {
787 unsigned HOST_WIDE_INT labs_rem = *lrem;
788 HOST_WIDE_INT habs_rem = *hrem;
789 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
790 HOST_WIDE_INT habs_den = hden, htwice;
791
792 /* Get absolute values. */
793 if (*hrem < 0)
794 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
795 if (hden < 0)
796 neg_double (lden, hden, &labs_den, &habs_den);
797
798 /* If (2 * abs (lrem) >= abs (lden)) */
799 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
800 labs_rem, habs_rem, &ltwice, &htwice);
801
802 if (((unsigned HOST_WIDE_INT) habs_den
803 < (unsigned HOST_WIDE_INT) htwice)
804 || (((unsigned HOST_WIDE_INT) habs_den
805 == (unsigned HOST_WIDE_INT) htwice)
806 && (labs_den < ltwice)))
807 {
808 if (*hquo < 0)
809 /* quo = quo - 1; */
810 add_double (*lquo, *hquo,
811 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
812 else
813 /* quo = quo + 1; */
814 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
815 lquo, hquo);
816 }
817 else
818 return overflow;
819 }
820 break;
821
822 default:
823 gcc_unreachable ();
824 }
825
826 /* Compute true remainder: rem = num - (quo * den) */
827 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
828 neg_double (*lrem, *hrem, lrem, hrem);
829 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
830 return overflow;
831 }
832
833 /* If ARG2 divides ARG1 with zero remainder, carries out the division
834 of type CODE and returns the quotient.
835 Otherwise returns NULL_TREE. */
836
837 static tree
838 div_if_zero_remainder (enum tree_code code, tree arg1, tree arg2)
839 {
840 unsigned HOST_WIDE_INT int1l, int2l;
841 HOST_WIDE_INT int1h, int2h;
842 unsigned HOST_WIDE_INT quol, reml;
843 HOST_WIDE_INT quoh, remh;
844 tree type = TREE_TYPE (arg1);
845 int uns = TYPE_UNSIGNED (type);
846
847 int1l = TREE_INT_CST_LOW (arg1);
848 int1h = TREE_INT_CST_HIGH (arg1);
849 int2l = TREE_INT_CST_LOW (arg2);
850 int2h = TREE_INT_CST_HIGH (arg2);
851
852 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
853 &quol, &quoh, &reml, &remh);
854 if (remh != 0 || reml != 0)
855 return NULL_TREE;
856
857 return build_int_cst_wide (type, quol, quoh);
858 }
859 \f
860 /* Return true if built-in mathematical function specified by CODE
861 preserves the sign of it argument, i.e. -f(x) == f(-x). */
862
863 static bool
864 negate_mathfn_p (enum built_in_function code)
865 {
866 switch (code)
867 {
868 case BUILT_IN_ASIN:
869 case BUILT_IN_ASINF:
870 case BUILT_IN_ASINL:
871 case BUILT_IN_ATAN:
872 case BUILT_IN_ATANF:
873 case BUILT_IN_ATANL:
874 case BUILT_IN_SIN:
875 case BUILT_IN_SINF:
876 case BUILT_IN_SINL:
877 case BUILT_IN_TAN:
878 case BUILT_IN_TANF:
879 case BUILT_IN_TANL:
880 return true;
881
882 default:
883 break;
884 }
885 return false;
886 }
887
888 /* Check whether we may negate an integer constant T without causing
889 overflow. */
890
891 bool
892 may_negate_without_overflow_p (tree t)
893 {
894 unsigned HOST_WIDE_INT val;
895 unsigned int prec;
896 tree type;
897
898 gcc_assert (TREE_CODE (t) == INTEGER_CST);
899
900 type = TREE_TYPE (t);
901 if (TYPE_UNSIGNED (type))
902 return false;
903
904 prec = TYPE_PRECISION (type);
905 if (prec > HOST_BITS_PER_WIDE_INT)
906 {
907 if (TREE_INT_CST_LOW (t) != 0)
908 return true;
909 prec -= HOST_BITS_PER_WIDE_INT;
910 val = TREE_INT_CST_HIGH (t);
911 }
912 else
913 val = TREE_INT_CST_LOW (t);
914 if (prec < HOST_BITS_PER_WIDE_INT)
915 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
916 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
917 }
918
919 /* Determine whether an expression T can be cheaply negated using
920 the function negate_expr. */
921
922 static bool
923 negate_expr_p (tree t)
924 {
925 tree type;
926
927 if (t == 0)
928 return false;
929
930 type = TREE_TYPE (t);
931
932 STRIP_SIGN_NOPS (t);
933 switch (TREE_CODE (t))
934 {
935 case INTEGER_CST:
936 if (TYPE_UNSIGNED (type) || ! flag_trapv)
937 return true;
938
939 /* Check that -CST will not overflow type. */
940 return may_negate_without_overflow_p (t);
941
942 case REAL_CST:
943 case NEGATE_EXPR:
944 return true;
945
946 case COMPLEX_CST:
947 return negate_expr_p (TREE_REALPART (t))
948 && negate_expr_p (TREE_IMAGPART (t));
949
950 case PLUS_EXPR:
951 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
952 return false;
953 /* -(A + B) -> (-B) - A. */
954 if (negate_expr_p (TREE_OPERAND (t, 1))
955 && reorder_operands_p (TREE_OPERAND (t, 0),
956 TREE_OPERAND (t, 1)))
957 return true;
958 /* -(A + B) -> (-A) - B. */
959 return negate_expr_p (TREE_OPERAND (t, 0));
960
961 case MINUS_EXPR:
962 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
963 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
964 && reorder_operands_p (TREE_OPERAND (t, 0),
965 TREE_OPERAND (t, 1));
966
967 case MULT_EXPR:
968 if (TYPE_UNSIGNED (TREE_TYPE (t)))
969 break;
970
971 /* Fall through. */
972
973 case RDIV_EXPR:
974 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
975 return negate_expr_p (TREE_OPERAND (t, 1))
976 || negate_expr_p (TREE_OPERAND (t, 0));
977 break;
978
979 case NOP_EXPR:
980 /* Negate -((double)float) as (double)(-float). */
981 if (TREE_CODE (type) == REAL_TYPE)
982 {
983 tree tem = strip_float_extensions (t);
984 if (tem != t)
985 return negate_expr_p (tem);
986 }
987 break;
988
989 case CALL_EXPR:
990 /* Negate -f(x) as f(-x). */
991 if (negate_mathfn_p (builtin_mathfn_code (t)))
992 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
993 break;
994
995 case RSHIFT_EXPR:
996 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
997 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
998 {
999 tree op1 = TREE_OPERAND (t, 1);
1000 if (TREE_INT_CST_HIGH (op1) == 0
1001 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1002 == TREE_INT_CST_LOW (op1))
1003 return true;
1004 }
1005 break;
1006
1007 default:
1008 break;
1009 }
1010 return false;
1011 }
1012
1013 /* Given T, an expression, return the negation of T. Allow for T to be
1014 null, in which case return null. */
1015
1016 static tree
1017 negate_expr (tree t)
1018 {
1019 tree type;
1020 tree tem;
1021
1022 if (t == 0)
1023 return 0;
1024
1025 type = TREE_TYPE (t);
1026 STRIP_SIGN_NOPS (t);
1027
1028 switch (TREE_CODE (t))
1029 {
1030 case INTEGER_CST:
1031 tem = fold_negate_const (t, type);
1032 if (! TREE_OVERFLOW (tem)
1033 || TYPE_UNSIGNED (type)
1034 || ! flag_trapv)
1035 return tem;
1036 break;
1037
1038 case REAL_CST:
1039 tem = fold_negate_const (t, type);
1040 /* Two's complement FP formats, such as c4x, may overflow. */
1041 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
1042 return fold_convert (type, tem);
1043 break;
1044
1045 case COMPLEX_CST:
1046 {
1047 tree rpart = negate_expr (TREE_REALPART (t));
1048 tree ipart = negate_expr (TREE_IMAGPART (t));
1049
1050 if ((TREE_CODE (rpart) == REAL_CST
1051 && TREE_CODE (ipart) == REAL_CST)
1052 || (TREE_CODE (rpart) == INTEGER_CST
1053 && TREE_CODE (ipart) == INTEGER_CST))
1054 return build_complex (type, rpart, ipart);
1055 }
1056 break;
1057
1058 case NEGATE_EXPR:
1059 return fold_convert (type, TREE_OPERAND (t, 0));
1060
1061 case PLUS_EXPR:
1062 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1063 {
1064 /* -(A + B) -> (-B) - A. */
1065 if (negate_expr_p (TREE_OPERAND (t, 1))
1066 && reorder_operands_p (TREE_OPERAND (t, 0),
1067 TREE_OPERAND (t, 1)))
1068 {
1069 tem = negate_expr (TREE_OPERAND (t, 1));
1070 tem = fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1071 tem, TREE_OPERAND (t, 0));
1072 return fold_convert (type, tem);
1073 }
1074
1075 /* -(A + B) -> (-A) - B. */
1076 if (negate_expr_p (TREE_OPERAND (t, 0)))
1077 {
1078 tem = negate_expr (TREE_OPERAND (t, 0));
1079 tem = fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1080 tem, TREE_OPERAND (t, 1));
1081 return fold_convert (type, tem);
1082 }
1083 }
1084 break;
1085
1086 case MINUS_EXPR:
1087 /* - (A - B) -> B - A */
1088 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1089 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1090 return fold_convert (type,
1091 fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1092 TREE_OPERAND (t, 1),
1093 TREE_OPERAND (t, 0)));
1094 break;
1095
1096 case MULT_EXPR:
1097 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1098 break;
1099
1100 /* Fall through. */
1101
1102 case RDIV_EXPR:
1103 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1104 {
1105 tem = TREE_OPERAND (t, 1);
1106 if (negate_expr_p (tem))
1107 return fold_convert (type,
1108 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1109 TREE_OPERAND (t, 0),
1110 negate_expr (tem)));
1111 tem = TREE_OPERAND (t, 0);
1112 if (negate_expr_p (tem))
1113 return fold_convert (type,
1114 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1115 negate_expr (tem),
1116 TREE_OPERAND (t, 1)));
1117 }
1118 break;
1119
1120 case NOP_EXPR:
1121 /* Convert -((double)float) into (double)(-float). */
1122 if (TREE_CODE (type) == REAL_TYPE)
1123 {
1124 tem = strip_float_extensions (t);
1125 if (tem != t && negate_expr_p (tem))
1126 return fold_convert (type, negate_expr (tem));
1127 }
1128 break;
1129
1130 case CALL_EXPR:
1131 /* Negate -f(x) as f(-x). */
1132 if (negate_mathfn_p (builtin_mathfn_code (t))
1133 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1134 {
1135 tree fndecl, arg, arglist;
1136
1137 fndecl = get_callee_fndecl (t);
1138 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1139 arglist = build_tree_list (NULL_TREE, arg);
1140 return build_function_call_expr (fndecl, arglist);
1141 }
1142 break;
1143
1144 case RSHIFT_EXPR:
1145 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1146 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1147 {
1148 tree op1 = TREE_OPERAND (t, 1);
1149 if (TREE_INT_CST_HIGH (op1) == 0
1150 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1151 == TREE_INT_CST_LOW (op1))
1152 {
1153 tree ntype = TYPE_UNSIGNED (type)
1154 ? lang_hooks.types.signed_type (type)
1155 : lang_hooks.types.unsigned_type (type);
1156 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1157 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1158 return fold_convert (type, temp);
1159 }
1160 }
1161 break;
1162
1163 default:
1164 break;
1165 }
1166
1167 tem = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1168 return fold_convert (type, tem);
1169 }
1170 \f
1171 /* Split a tree IN into a constant, literal and variable parts that could be
1172 combined with CODE to make IN. "constant" means an expression with
1173 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1174 commutative arithmetic operation. Store the constant part into *CONP,
1175 the literal in *LITP and return the variable part. If a part isn't
1176 present, set it to null. If the tree does not decompose in this way,
1177 return the entire tree as the variable part and the other parts as null.
1178
1179 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1180 case, we negate an operand that was subtracted. Except if it is a
1181 literal for which we use *MINUS_LITP instead.
1182
1183 If NEGATE_P is true, we are negating all of IN, again except a literal
1184 for which we use *MINUS_LITP instead.
1185
1186 If IN is itself a literal or constant, return it as appropriate.
1187
1188 Note that we do not guarantee that any of the three values will be the
1189 same type as IN, but they will have the same signedness and mode. */
1190
1191 static tree
1192 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1193 tree *minus_litp, int negate_p)
1194 {
1195 tree var = 0;
1196
1197 *conp = 0;
1198 *litp = 0;
1199 *minus_litp = 0;
1200
1201 /* Strip any conversions that don't change the machine mode or signedness. */
1202 STRIP_SIGN_NOPS (in);
1203
1204 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1205 *litp = in;
1206 else if (TREE_CODE (in) == code
1207 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1208 /* We can associate addition and subtraction together (even
1209 though the C standard doesn't say so) for integers because
1210 the value is not affected. For reals, the value might be
1211 affected, so we can't. */
1212 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1213 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1214 {
1215 tree op0 = TREE_OPERAND (in, 0);
1216 tree op1 = TREE_OPERAND (in, 1);
1217 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1218 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1219
1220 /* First see if either of the operands is a literal, then a constant. */
1221 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1222 *litp = op0, op0 = 0;
1223 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1224 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1225
1226 if (op0 != 0 && TREE_CONSTANT (op0))
1227 *conp = op0, op0 = 0;
1228 else if (op1 != 0 && TREE_CONSTANT (op1))
1229 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1230
1231 /* If we haven't dealt with either operand, this is not a case we can
1232 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1233 if (op0 != 0 && op1 != 0)
1234 var = in;
1235 else if (op0 != 0)
1236 var = op0;
1237 else
1238 var = op1, neg_var_p = neg1_p;
1239
1240 /* Now do any needed negations. */
1241 if (neg_litp_p)
1242 *minus_litp = *litp, *litp = 0;
1243 if (neg_conp_p)
1244 *conp = negate_expr (*conp);
1245 if (neg_var_p)
1246 var = negate_expr (var);
1247 }
1248 else if (TREE_CONSTANT (in))
1249 *conp = in;
1250 else
1251 var = in;
1252
1253 if (negate_p)
1254 {
1255 if (*litp)
1256 *minus_litp = *litp, *litp = 0;
1257 else if (*minus_litp)
1258 *litp = *minus_litp, *minus_litp = 0;
1259 *conp = negate_expr (*conp);
1260 var = negate_expr (var);
1261 }
1262
1263 return var;
1264 }
1265
1266 /* Re-associate trees split by the above function. T1 and T2 are either
1267 expressions to associate or null. Return the new expression, if any. If
1268 we build an operation, do it in TYPE and with CODE. */
1269
1270 static tree
1271 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1272 {
1273 if (t1 == 0)
1274 return t2;
1275 else if (t2 == 0)
1276 return t1;
1277
1278 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1279 try to fold this since we will have infinite recursion. But do
1280 deal with any NEGATE_EXPRs. */
1281 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1282 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1283 {
1284 if (code == PLUS_EXPR)
1285 {
1286 if (TREE_CODE (t1) == NEGATE_EXPR)
1287 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1288 fold_convert (type, TREE_OPERAND (t1, 0)));
1289 else if (TREE_CODE (t2) == NEGATE_EXPR)
1290 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1291 fold_convert (type, TREE_OPERAND (t2, 0)));
1292 else if (integer_zerop (t2))
1293 return fold_convert (type, t1);
1294 }
1295 else if (code == MINUS_EXPR)
1296 {
1297 if (integer_zerop (t2))
1298 return fold_convert (type, t1);
1299 }
1300
1301 return build2 (code, type, fold_convert (type, t1),
1302 fold_convert (type, t2));
1303 }
1304
1305 return fold_build2 (code, type, fold_convert (type, t1),
1306 fold_convert (type, t2));
1307 }
1308 \f
1309 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1310 to produce a new constant.
1311
1312 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1313
1314 tree
1315 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1316 {
1317 unsigned HOST_WIDE_INT int1l, int2l;
1318 HOST_WIDE_INT int1h, int2h;
1319 unsigned HOST_WIDE_INT low;
1320 HOST_WIDE_INT hi;
1321 unsigned HOST_WIDE_INT garbagel;
1322 HOST_WIDE_INT garbageh;
1323 tree t;
1324 tree type = TREE_TYPE (arg1);
1325 int uns = TYPE_UNSIGNED (type);
1326 int is_sizetype
1327 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1328 int overflow = 0;
1329
1330 int1l = TREE_INT_CST_LOW (arg1);
1331 int1h = TREE_INT_CST_HIGH (arg1);
1332 int2l = TREE_INT_CST_LOW (arg2);
1333 int2h = TREE_INT_CST_HIGH (arg2);
1334
1335 switch (code)
1336 {
1337 case BIT_IOR_EXPR:
1338 low = int1l | int2l, hi = int1h | int2h;
1339 break;
1340
1341 case BIT_XOR_EXPR:
1342 low = int1l ^ int2l, hi = int1h ^ int2h;
1343 break;
1344
1345 case BIT_AND_EXPR:
1346 low = int1l & int2l, hi = int1h & int2h;
1347 break;
1348
1349 case RSHIFT_EXPR:
1350 int2l = -int2l;
1351 case LSHIFT_EXPR:
1352 /* It's unclear from the C standard whether shifts can overflow.
1353 The following code ignores overflow; perhaps a C standard
1354 interpretation ruling is needed. */
1355 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1356 &low, &hi, !uns);
1357 break;
1358
1359 case RROTATE_EXPR:
1360 int2l = - int2l;
1361 case LROTATE_EXPR:
1362 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1363 &low, &hi);
1364 break;
1365
1366 case PLUS_EXPR:
1367 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1368 break;
1369
1370 case MINUS_EXPR:
1371 neg_double (int2l, int2h, &low, &hi);
1372 add_double (int1l, int1h, low, hi, &low, &hi);
1373 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1374 break;
1375
1376 case MULT_EXPR:
1377 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1378 break;
1379
1380 case TRUNC_DIV_EXPR:
1381 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1382 case EXACT_DIV_EXPR:
1383 /* This is a shortcut for a common special case. */
1384 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1385 && ! TREE_CONSTANT_OVERFLOW (arg1)
1386 && ! TREE_CONSTANT_OVERFLOW (arg2)
1387 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1388 {
1389 if (code == CEIL_DIV_EXPR)
1390 int1l += int2l - 1;
1391
1392 low = int1l / int2l, hi = 0;
1393 break;
1394 }
1395
1396 /* ... fall through ... */
1397
1398 case ROUND_DIV_EXPR:
1399 if (int2h == 0 && int2l == 1)
1400 {
1401 low = int1l, hi = int1h;
1402 break;
1403 }
1404 if (int1l == int2l && int1h == int2h
1405 && ! (int1l == 0 && int1h == 0))
1406 {
1407 low = 1, hi = 0;
1408 break;
1409 }
1410 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1411 &low, &hi, &garbagel, &garbageh);
1412 break;
1413
1414 case TRUNC_MOD_EXPR:
1415 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1416 /* This is a shortcut for a common special case. */
1417 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1418 && ! TREE_CONSTANT_OVERFLOW (arg1)
1419 && ! TREE_CONSTANT_OVERFLOW (arg2)
1420 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1421 {
1422 if (code == CEIL_MOD_EXPR)
1423 int1l += int2l - 1;
1424 low = int1l % int2l, hi = 0;
1425 break;
1426 }
1427
1428 /* ... fall through ... */
1429
1430 case ROUND_MOD_EXPR:
1431 overflow = div_and_round_double (code, uns,
1432 int1l, int1h, int2l, int2h,
1433 &garbagel, &garbageh, &low, &hi);
1434 break;
1435
1436 case MIN_EXPR:
1437 case MAX_EXPR:
1438 if (uns)
1439 low = (((unsigned HOST_WIDE_INT) int1h
1440 < (unsigned HOST_WIDE_INT) int2h)
1441 || (((unsigned HOST_WIDE_INT) int1h
1442 == (unsigned HOST_WIDE_INT) int2h)
1443 && int1l < int2l));
1444 else
1445 low = (int1h < int2h
1446 || (int1h == int2h && int1l < int2l));
1447
1448 if (low == (code == MIN_EXPR))
1449 low = int1l, hi = int1h;
1450 else
1451 low = int2l, hi = int2h;
1452 break;
1453
1454 default:
1455 gcc_unreachable ();
1456 }
1457
1458 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1459
1460 if (notrunc)
1461 {
1462 /* Propagate overflow flags ourselves. */
1463 if (((!uns || is_sizetype) && overflow)
1464 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1465 {
1466 t = copy_node (t);
1467 TREE_OVERFLOW (t) = 1;
1468 TREE_CONSTANT_OVERFLOW (t) = 1;
1469 }
1470 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1471 {
1472 t = copy_node (t);
1473 TREE_CONSTANT_OVERFLOW (t) = 1;
1474 }
1475 }
1476 else
1477 t = force_fit_type (t, 1,
1478 ((!uns || is_sizetype) && overflow)
1479 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
1480 TREE_CONSTANT_OVERFLOW (arg1)
1481 | TREE_CONSTANT_OVERFLOW (arg2));
1482
1483 return t;
1484 }
1485
1486 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1487 constant. We assume ARG1 and ARG2 have the same data type, or at least
1488 are the same kind of constant and the same machine mode.
1489
1490 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1491
1492 static tree
1493 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1494 {
1495 STRIP_NOPS (arg1);
1496 STRIP_NOPS (arg2);
1497
1498 if (TREE_CODE (arg1) == INTEGER_CST)
1499 return int_const_binop (code, arg1, arg2, notrunc);
1500
1501 if (TREE_CODE (arg1) == REAL_CST)
1502 {
1503 enum machine_mode mode;
1504 REAL_VALUE_TYPE d1;
1505 REAL_VALUE_TYPE d2;
1506 REAL_VALUE_TYPE value;
1507 REAL_VALUE_TYPE result;
1508 bool inexact;
1509 tree t, type;
1510
1511 d1 = TREE_REAL_CST (arg1);
1512 d2 = TREE_REAL_CST (arg2);
1513
1514 type = TREE_TYPE (arg1);
1515 mode = TYPE_MODE (type);
1516
1517 /* Don't perform operation if we honor signaling NaNs and
1518 either operand is a NaN. */
1519 if (HONOR_SNANS (mode)
1520 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1521 return NULL_TREE;
1522
1523 /* Don't perform operation if it would raise a division
1524 by zero exception. */
1525 if (code == RDIV_EXPR
1526 && REAL_VALUES_EQUAL (d2, dconst0)
1527 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1528 return NULL_TREE;
1529
1530 /* If either operand is a NaN, just return it. Otherwise, set up
1531 for floating-point trap; we return an overflow. */
1532 if (REAL_VALUE_ISNAN (d1))
1533 return arg1;
1534 else if (REAL_VALUE_ISNAN (d2))
1535 return arg2;
1536
1537 inexact = real_arithmetic (&value, code, &d1, &d2);
1538 real_convert (&result, mode, &value);
1539
1540 /* Don't constant fold this floating point operation if the
1541 result may dependent upon the run-time rounding mode and
1542 flag_rounding_math is set, or if GCC's software emulation
1543 is unable to accurately represent the result. */
1544
1545 if ((flag_rounding_math
1546 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1547 && !flag_unsafe_math_optimizations))
1548 && (inexact || !real_identical (&result, &value)))
1549 return NULL_TREE;
1550
1551 t = build_real (type, result);
1552
1553 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1554 TREE_CONSTANT_OVERFLOW (t)
1555 = TREE_OVERFLOW (t)
1556 | TREE_CONSTANT_OVERFLOW (arg1)
1557 | TREE_CONSTANT_OVERFLOW (arg2);
1558 return t;
1559 }
1560 if (TREE_CODE (arg1) == COMPLEX_CST)
1561 {
1562 tree type = TREE_TYPE (arg1);
1563 tree r1 = TREE_REALPART (arg1);
1564 tree i1 = TREE_IMAGPART (arg1);
1565 tree r2 = TREE_REALPART (arg2);
1566 tree i2 = TREE_IMAGPART (arg2);
1567 tree t;
1568
1569 switch (code)
1570 {
1571 case PLUS_EXPR:
1572 t = build_complex (type,
1573 const_binop (PLUS_EXPR, r1, r2, notrunc),
1574 const_binop (PLUS_EXPR, i1, i2, notrunc));
1575 break;
1576
1577 case MINUS_EXPR:
1578 t = build_complex (type,
1579 const_binop (MINUS_EXPR, r1, r2, notrunc),
1580 const_binop (MINUS_EXPR, i1, i2, notrunc));
1581 break;
1582
1583 case MULT_EXPR:
1584 t = build_complex (type,
1585 const_binop (MINUS_EXPR,
1586 const_binop (MULT_EXPR,
1587 r1, r2, notrunc),
1588 const_binop (MULT_EXPR,
1589 i1, i2, notrunc),
1590 notrunc),
1591 const_binop (PLUS_EXPR,
1592 const_binop (MULT_EXPR,
1593 r1, i2, notrunc),
1594 const_binop (MULT_EXPR,
1595 i1, r2, notrunc),
1596 notrunc));
1597 break;
1598
1599 case RDIV_EXPR:
1600 {
1601 tree t1, t2, real, imag;
1602 tree magsquared
1603 = const_binop (PLUS_EXPR,
1604 const_binop (MULT_EXPR, r2, r2, notrunc),
1605 const_binop (MULT_EXPR, i2, i2, notrunc),
1606 notrunc);
1607
1608 t1 = const_binop (PLUS_EXPR,
1609 const_binop (MULT_EXPR, r1, r2, notrunc),
1610 const_binop (MULT_EXPR, i1, i2, notrunc),
1611 notrunc);
1612 t2 = const_binop (MINUS_EXPR,
1613 const_binop (MULT_EXPR, i1, r2, notrunc),
1614 const_binop (MULT_EXPR, r1, i2, notrunc),
1615 notrunc);
1616
1617 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1618 {
1619 real = const_binop (TRUNC_DIV_EXPR, t1, magsquared, notrunc);
1620 imag = const_binop (TRUNC_DIV_EXPR, t2, magsquared, notrunc);
1621 }
1622 else
1623 {
1624 real = const_binop (RDIV_EXPR, t1, magsquared, notrunc);
1625 imag = const_binop (RDIV_EXPR, t2, magsquared, notrunc);
1626 if (!real || !imag)
1627 return NULL_TREE;
1628 }
1629
1630 t = build_complex (type, real, imag);
1631 }
1632 break;
1633
1634 default:
1635 gcc_unreachable ();
1636 }
1637 return t;
1638 }
1639 return 0;
1640 }
1641
1642 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1643 indicates which particular sizetype to create. */
1644
1645 tree
1646 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1647 {
1648 return build_int_cst (sizetype_tab[(int) kind], number);
1649 }
1650 \f
1651 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1652 is a tree code. The type of the result is taken from the operands.
1653 Both must be the same type integer type and it must be a size type.
1654 If the operands are constant, so is the result. */
1655
1656 tree
1657 size_binop (enum tree_code code, tree arg0, tree arg1)
1658 {
1659 tree type = TREE_TYPE (arg0);
1660
1661 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1662 && type == TREE_TYPE (arg1));
1663
1664 /* Handle the special case of two integer constants faster. */
1665 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1666 {
1667 /* And some specific cases even faster than that. */
1668 if (code == PLUS_EXPR && integer_zerop (arg0))
1669 return arg1;
1670 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1671 && integer_zerop (arg1))
1672 return arg0;
1673 else if (code == MULT_EXPR && integer_onep (arg0))
1674 return arg1;
1675
1676 /* Handle general case of two integer constants. */
1677 return int_const_binop (code, arg0, arg1, 0);
1678 }
1679
1680 if (arg0 == error_mark_node || arg1 == error_mark_node)
1681 return error_mark_node;
1682
1683 return fold_build2 (code, type, arg0, arg1);
1684 }
1685
1686 /* Given two values, either both of sizetype or both of bitsizetype,
1687 compute the difference between the two values. Return the value
1688 in signed type corresponding to the type of the operands. */
1689
1690 tree
1691 size_diffop (tree arg0, tree arg1)
1692 {
1693 tree type = TREE_TYPE (arg0);
1694 tree ctype;
1695
1696 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1697 && type == TREE_TYPE (arg1));
1698
1699 /* If the type is already signed, just do the simple thing. */
1700 if (!TYPE_UNSIGNED (type))
1701 return size_binop (MINUS_EXPR, arg0, arg1);
1702
1703 ctype = type == bitsizetype ? sbitsizetype : ssizetype;
1704
1705 /* If either operand is not a constant, do the conversions to the signed
1706 type and subtract. The hardware will do the right thing with any
1707 overflow in the subtraction. */
1708 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1709 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1710 fold_convert (ctype, arg1));
1711
1712 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1713 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1714 overflow) and negate (which can't either). Special-case a result
1715 of zero while we're here. */
1716 if (tree_int_cst_equal (arg0, arg1))
1717 return fold_convert (ctype, integer_zero_node);
1718 else if (tree_int_cst_lt (arg1, arg0))
1719 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1720 else
1721 return size_binop (MINUS_EXPR, fold_convert (ctype, integer_zero_node),
1722 fold_convert (ctype, size_binop (MINUS_EXPR,
1723 arg1, arg0)));
1724 }
1725 \f
1726 /* A subroutine of fold_convert_const handling conversions of an
1727 INTEGER_CST to another integer type. */
1728
1729 static tree
1730 fold_convert_const_int_from_int (tree type, tree arg1)
1731 {
1732 tree t;
1733
1734 /* Given an integer constant, make new constant with new type,
1735 appropriately sign-extended or truncated. */
1736 t = build_int_cst_wide (type, TREE_INT_CST_LOW (arg1),
1737 TREE_INT_CST_HIGH (arg1));
1738
1739 t = force_fit_type (t,
1740 /* Don't set the overflow when
1741 converting a pointer */
1742 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1743 (TREE_INT_CST_HIGH (arg1) < 0
1744 && (TYPE_UNSIGNED (type)
1745 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1746 | TREE_OVERFLOW (arg1),
1747 TREE_CONSTANT_OVERFLOW (arg1));
1748
1749 return t;
1750 }
1751
1752 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1753 to an integer type. */
1754
1755 static tree
1756 fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
1757 {
1758 int overflow = 0;
1759 tree t;
1760
1761 /* The following code implements the floating point to integer
1762 conversion rules required by the Java Language Specification,
1763 that IEEE NaNs are mapped to zero and values that overflow
1764 the target precision saturate, i.e. values greater than
1765 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1766 are mapped to INT_MIN. These semantics are allowed by the
1767 C and C++ standards that simply state that the behavior of
1768 FP-to-integer conversion is unspecified upon overflow. */
1769
1770 HOST_WIDE_INT high, low;
1771 REAL_VALUE_TYPE r;
1772 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1773
1774 switch (code)
1775 {
1776 case FIX_TRUNC_EXPR:
1777 real_trunc (&r, VOIDmode, &x);
1778 break;
1779
1780 case FIX_CEIL_EXPR:
1781 real_ceil (&r, VOIDmode, &x);
1782 break;
1783
1784 case FIX_FLOOR_EXPR:
1785 real_floor (&r, VOIDmode, &x);
1786 break;
1787
1788 case FIX_ROUND_EXPR:
1789 real_round (&r, VOIDmode, &x);
1790 break;
1791
1792 default:
1793 gcc_unreachable ();
1794 }
1795
1796 /* If R is NaN, return zero and show we have an overflow. */
1797 if (REAL_VALUE_ISNAN (r))
1798 {
1799 overflow = 1;
1800 high = 0;
1801 low = 0;
1802 }
1803
1804 /* See if R is less than the lower bound or greater than the
1805 upper bound. */
1806
1807 if (! overflow)
1808 {
1809 tree lt = TYPE_MIN_VALUE (type);
1810 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1811 if (REAL_VALUES_LESS (r, l))
1812 {
1813 overflow = 1;
1814 high = TREE_INT_CST_HIGH (lt);
1815 low = TREE_INT_CST_LOW (lt);
1816 }
1817 }
1818
1819 if (! overflow)
1820 {
1821 tree ut = TYPE_MAX_VALUE (type);
1822 if (ut)
1823 {
1824 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1825 if (REAL_VALUES_LESS (u, r))
1826 {
1827 overflow = 1;
1828 high = TREE_INT_CST_HIGH (ut);
1829 low = TREE_INT_CST_LOW (ut);
1830 }
1831 }
1832 }
1833
1834 if (! overflow)
1835 REAL_VALUE_TO_INT (&low, &high, r);
1836
1837 t = build_int_cst_wide (type, low, high);
1838
1839 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1),
1840 TREE_CONSTANT_OVERFLOW (arg1));
1841 return t;
1842 }
1843
1844 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1845 to another floating point type. */
1846
1847 static tree
1848 fold_convert_const_real_from_real (tree type, tree arg1)
1849 {
1850 REAL_VALUE_TYPE value;
1851 tree t;
1852
1853 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1854 t = build_real (type, value);
1855
1856 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1857 TREE_CONSTANT_OVERFLOW (t)
1858 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1859 return t;
1860 }
1861
1862 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1863 type TYPE. If no simplification can be done return NULL_TREE. */
1864
1865 static tree
1866 fold_convert_const (enum tree_code code, tree type, tree arg1)
1867 {
1868 if (TREE_TYPE (arg1) == type)
1869 return arg1;
1870
1871 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1872 {
1873 if (TREE_CODE (arg1) == INTEGER_CST)
1874 return fold_convert_const_int_from_int (type, arg1);
1875 else if (TREE_CODE (arg1) == REAL_CST)
1876 return fold_convert_const_int_from_real (code, type, arg1);
1877 }
1878 else if (TREE_CODE (type) == REAL_TYPE)
1879 {
1880 if (TREE_CODE (arg1) == INTEGER_CST)
1881 return build_real_from_int_cst (type, arg1);
1882 if (TREE_CODE (arg1) == REAL_CST)
1883 return fold_convert_const_real_from_real (type, arg1);
1884 }
1885 return NULL_TREE;
1886 }
1887
1888 /* Construct a vector of zero elements of vector type TYPE. */
1889
1890 static tree
1891 build_zero_vector (tree type)
1892 {
1893 tree elem, list;
1894 int i, units;
1895
1896 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1897 units = TYPE_VECTOR_SUBPARTS (type);
1898
1899 list = NULL_TREE;
1900 for (i = 0; i < units; i++)
1901 list = tree_cons (NULL_TREE, elem, list);
1902 return build_vector (type, list);
1903 }
1904
1905 /* Convert expression ARG to type TYPE. Used by the middle-end for
1906 simple conversions in preference to calling the front-end's convert. */
1907
1908 tree
1909 fold_convert (tree type, tree arg)
1910 {
1911 tree orig = TREE_TYPE (arg);
1912 tree tem;
1913
1914 if (type == orig)
1915 return arg;
1916
1917 if (TREE_CODE (arg) == ERROR_MARK
1918 || TREE_CODE (type) == ERROR_MARK
1919 || TREE_CODE (orig) == ERROR_MARK)
1920 return error_mark_node;
1921
1922 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
1923 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
1924 TYPE_MAIN_VARIANT (orig)))
1925 return fold_build1 (NOP_EXPR, type, arg);
1926
1927 switch (TREE_CODE (type))
1928 {
1929 case INTEGER_TYPE: case CHAR_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1930 case POINTER_TYPE: case REFERENCE_TYPE:
1931 case OFFSET_TYPE:
1932 if (TREE_CODE (arg) == INTEGER_CST)
1933 {
1934 tem = fold_convert_const (NOP_EXPR, type, arg);
1935 if (tem != NULL_TREE)
1936 return tem;
1937 }
1938 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1939 || TREE_CODE (orig) == OFFSET_TYPE)
1940 return fold_build1 (NOP_EXPR, type, arg);
1941 if (TREE_CODE (orig) == COMPLEX_TYPE)
1942 {
1943 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
1944 return fold_convert (type, tem);
1945 }
1946 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1947 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1948 return fold_build1 (NOP_EXPR, type, arg);
1949
1950 case REAL_TYPE:
1951 if (TREE_CODE (arg) == INTEGER_CST)
1952 {
1953 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1954 if (tem != NULL_TREE)
1955 return tem;
1956 }
1957 else if (TREE_CODE (arg) == REAL_CST)
1958 {
1959 tem = fold_convert_const (NOP_EXPR, type, arg);
1960 if (tem != NULL_TREE)
1961 return tem;
1962 }
1963
1964 switch (TREE_CODE (orig))
1965 {
1966 case INTEGER_TYPE: case CHAR_TYPE:
1967 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1968 case POINTER_TYPE: case REFERENCE_TYPE:
1969 return fold_build1 (FLOAT_EXPR, type, arg);
1970
1971 case REAL_TYPE:
1972 return fold_build1 (flag_float_store ? CONVERT_EXPR : NOP_EXPR,
1973 type, arg);
1974
1975 case COMPLEX_TYPE:
1976 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
1977 return fold_convert (type, tem);
1978
1979 default:
1980 gcc_unreachable ();
1981 }
1982
1983 case COMPLEX_TYPE:
1984 switch (TREE_CODE (orig))
1985 {
1986 case INTEGER_TYPE: case CHAR_TYPE:
1987 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1988 case POINTER_TYPE: case REFERENCE_TYPE:
1989 case REAL_TYPE:
1990 return build2 (COMPLEX_EXPR, type,
1991 fold_convert (TREE_TYPE (type), arg),
1992 fold_convert (TREE_TYPE (type), integer_zero_node));
1993 case COMPLEX_TYPE:
1994 {
1995 tree rpart, ipart;
1996
1997 if (TREE_CODE (arg) == COMPLEX_EXPR)
1998 {
1999 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2000 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2001 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2002 }
2003
2004 arg = save_expr (arg);
2005 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2006 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2007 rpart = fold_convert (TREE_TYPE (type), rpart);
2008 ipart = fold_convert (TREE_TYPE (type), ipart);
2009 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2010 }
2011
2012 default:
2013 gcc_unreachable ();
2014 }
2015
2016 case VECTOR_TYPE:
2017 if (integer_zerop (arg))
2018 return build_zero_vector (type);
2019 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2020 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2021 || TREE_CODE (orig) == VECTOR_TYPE);
2022 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2023
2024 case VOID_TYPE:
2025 return fold_build1 (CONVERT_EXPR, type, fold_ignored_result (arg));
2026
2027 default:
2028 gcc_unreachable ();
2029 }
2030 }
2031 \f
2032 /* Return false if expr can be assumed not to be an lvalue, true
2033 otherwise. */
2034
2035 static bool
2036 maybe_lvalue_p (tree x)
2037 {
2038 /* We only need to wrap lvalue tree codes. */
2039 switch (TREE_CODE (x))
2040 {
2041 case VAR_DECL:
2042 case PARM_DECL:
2043 case RESULT_DECL:
2044 case LABEL_DECL:
2045 case FUNCTION_DECL:
2046 case SSA_NAME:
2047
2048 case COMPONENT_REF:
2049 case INDIRECT_REF:
2050 case ALIGN_INDIRECT_REF:
2051 case MISALIGNED_INDIRECT_REF:
2052 case ARRAY_REF:
2053 case ARRAY_RANGE_REF:
2054 case BIT_FIELD_REF:
2055 case OBJ_TYPE_REF:
2056
2057 case REALPART_EXPR:
2058 case IMAGPART_EXPR:
2059 case PREINCREMENT_EXPR:
2060 case PREDECREMENT_EXPR:
2061 case SAVE_EXPR:
2062 case TRY_CATCH_EXPR:
2063 case WITH_CLEANUP_EXPR:
2064 case COMPOUND_EXPR:
2065 case MODIFY_EXPR:
2066 case TARGET_EXPR:
2067 case COND_EXPR:
2068 case BIND_EXPR:
2069 case MIN_EXPR:
2070 case MAX_EXPR:
2071 break;
2072
2073 default:
2074 /* Assume the worst for front-end tree codes. */
2075 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2076 break;
2077 return false;
2078 }
2079
2080 return true;
2081 }
2082
2083 /* Return an expr equal to X but certainly not valid as an lvalue. */
2084
2085 tree
2086 non_lvalue (tree x)
2087 {
2088 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2089 us. */
2090 if (in_gimple_form)
2091 return x;
2092
2093 if (! maybe_lvalue_p (x))
2094 return x;
2095 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2096 }
2097
2098 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2099 Zero means allow extended lvalues. */
2100
2101 int pedantic_lvalues;
2102
2103 /* When pedantic, return an expr equal to X but certainly not valid as a
2104 pedantic lvalue. Otherwise, return X. */
2105
2106 static tree
2107 pedantic_non_lvalue (tree x)
2108 {
2109 if (pedantic_lvalues)
2110 return non_lvalue (x);
2111 else
2112 return x;
2113 }
2114 \f
2115 /* Given a tree comparison code, return the code that is the logical inverse
2116 of the given code. It is not safe to do this for floating-point
2117 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2118 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2119
2120 enum tree_code
2121 invert_tree_comparison (enum tree_code code, bool honor_nans)
2122 {
2123 if (honor_nans && flag_trapping_math)
2124 return ERROR_MARK;
2125
2126 switch (code)
2127 {
2128 case EQ_EXPR:
2129 return NE_EXPR;
2130 case NE_EXPR:
2131 return EQ_EXPR;
2132 case GT_EXPR:
2133 return honor_nans ? UNLE_EXPR : LE_EXPR;
2134 case GE_EXPR:
2135 return honor_nans ? UNLT_EXPR : LT_EXPR;
2136 case LT_EXPR:
2137 return honor_nans ? UNGE_EXPR : GE_EXPR;
2138 case LE_EXPR:
2139 return honor_nans ? UNGT_EXPR : GT_EXPR;
2140 case LTGT_EXPR:
2141 return UNEQ_EXPR;
2142 case UNEQ_EXPR:
2143 return LTGT_EXPR;
2144 case UNGT_EXPR:
2145 return LE_EXPR;
2146 case UNGE_EXPR:
2147 return LT_EXPR;
2148 case UNLT_EXPR:
2149 return GE_EXPR;
2150 case UNLE_EXPR:
2151 return GT_EXPR;
2152 case ORDERED_EXPR:
2153 return UNORDERED_EXPR;
2154 case UNORDERED_EXPR:
2155 return ORDERED_EXPR;
2156 default:
2157 gcc_unreachable ();
2158 }
2159 }
2160
2161 /* Similar, but return the comparison that results if the operands are
2162 swapped. This is safe for floating-point. */
2163
2164 enum tree_code
2165 swap_tree_comparison (enum tree_code code)
2166 {
2167 switch (code)
2168 {
2169 case EQ_EXPR:
2170 case NE_EXPR:
2171 case ORDERED_EXPR:
2172 case UNORDERED_EXPR:
2173 case LTGT_EXPR:
2174 case UNEQ_EXPR:
2175 return code;
2176 case GT_EXPR:
2177 return LT_EXPR;
2178 case GE_EXPR:
2179 return LE_EXPR;
2180 case LT_EXPR:
2181 return GT_EXPR;
2182 case LE_EXPR:
2183 return GE_EXPR;
2184 case UNGT_EXPR:
2185 return UNLT_EXPR;
2186 case UNGE_EXPR:
2187 return UNLE_EXPR;
2188 case UNLT_EXPR:
2189 return UNGT_EXPR;
2190 case UNLE_EXPR:
2191 return UNGE_EXPR;
2192 default:
2193 gcc_unreachable ();
2194 }
2195 }
2196
2197
2198 /* Convert a comparison tree code from an enum tree_code representation
2199 into a compcode bit-based encoding. This function is the inverse of
2200 compcode_to_comparison. */
2201
2202 static enum comparison_code
2203 comparison_to_compcode (enum tree_code code)
2204 {
2205 switch (code)
2206 {
2207 case LT_EXPR:
2208 return COMPCODE_LT;
2209 case EQ_EXPR:
2210 return COMPCODE_EQ;
2211 case LE_EXPR:
2212 return COMPCODE_LE;
2213 case GT_EXPR:
2214 return COMPCODE_GT;
2215 case NE_EXPR:
2216 return COMPCODE_NE;
2217 case GE_EXPR:
2218 return COMPCODE_GE;
2219 case ORDERED_EXPR:
2220 return COMPCODE_ORD;
2221 case UNORDERED_EXPR:
2222 return COMPCODE_UNORD;
2223 case UNLT_EXPR:
2224 return COMPCODE_UNLT;
2225 case UNEQ_EXPR:
2226 return COMPCODE_UNEQ;
2227 case UNLE_EXPR:
2228 return COMPCODE_UNLE;
2229 case UNGT_EXPR:
2230 return COMPCODE_UNGT;
2231 case LTGT_EXPR:
2232 return COMPCODE_LTGT;
2233 case UNGE_EXPR:
2234 return COMPCODE_UNGE;
2235 default:
2236 gcc_unreachable ();
2237 }
2238 }
2239
2240 /* Convert a compcode bit-based encoding of a comparison operator back
2241 to GCC's enum tree_code representation. This function is the
2242 inverse of comparison_to_compcode. */
2243
2244 static enum tree_code
2245 compcode_to_comparison (enum comparison_code code)
2246 {
2247 switch (code)
2248 {
2249 case COMPCODE_LT:
2250 return LT_EXPR;
2251 case COMPCODE_EQ:
2252 return EQ_EXPR;
2253 case COMPCODE_LE:
2254 return LE_EXPR;
2255 case COMPCODE_GT:
2256 return GT_EXPR;
2257 case COMPCODE_NE:
2258 return NE_EXPR;
2259 case COMPCODE_GE:
2260 return GE_EXPR;
2261 case COMPCODE_ORD:
2262 return ORDERED_EXPR;
2263 case COMPCODE_UNORD:
2264 return UNORDERED_EXPR;
2265 case COMPCODE_UNLT:
2266 return UNLT_EXPR;
2267 case COMPCODE_UNEQ:
2268 return UNEQ_EXPR;
2269 case COMPCODE_UNLE:
2270 return UNLE_EXPR;
2271 case COMPCODE_UNGT:
2272 return UNGT_EXPR;
2273 case COMPCODE_LTGT:
2274 return LTGT_EXPR;
2275 case COMPCODE_UNGE:
2276 return UNGE_EXPR;
2277 default:
2278 gcc_unreachable ();
2279 }
2280 }
2281
2282 /* Return a tree for the comparison which is the combination of
2283 doing the AND or OR (depending on CODE) of the two operations LCODE
2284 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2285 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2286 if this makes the transformation invalid. */
2287
2288 tree
2289 combine_comparisons (enum tree_code code, enum tree_code lcode,
2290 enum tree_code rcode, tree truth_type,
2291 tree ll_arg, tree lr_arg)
2292 {
2293 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2294 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2295 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2296 enum comparison_code compcode;
2297
2298 switch (code)
2299 {
2300 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2301 compcode = lcompcode & rcompcode;
2302 break;
2303
2304 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2305 compcode = lcompcode | rcompcode;
2306 break;
2307
2308 default:
2309 return NULL_TREE;
2310 }
2311
2312 if (!honor_nans)
2313 {
2314 /* Eliminate unordered comparisons, as well as LTGT and ORD
2315 which are not used unless the mode has NaNs. */
2316 compcode &= ~COMPCODE_UNORD;
2317 if (compcode == COMPCODE_LTGT)
2318 compcode = COMPCODE_NE;
2319 else if (compcode == COMPCODE_ORD)
2320 compcode = COMPCODE_TRUE;
2321 }
2322 else if (flag_trapping_math)
2323 {
2324 /* Check that the original operation and the optimized ones will trap
2325 under the same condition. */
2326 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2327 && (lcompcode != COMPCODE_EQ)
2328 && (lcompcode != COMPCODE_ORD);
2329 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2330 && (rcompcode != COMPCODE_EQ)
2331 && (rcompcode != COMPCODE_ORD);
2332 bool trap = (compcode & COMPCODE_UNORD) == 0
2333 && (compcode != COMPCODE_EQ)
2334 && (compcode != COMPCODE_ORD);
2335
2336 /* In a short-circuited boolean expression the LHS might be
2337 such that the RHS, if evaluated, will never trap. For
2338 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2339 if neither x nor y is NaN. (This is a mixed blessing: for
2340 example, the expression above will never trap, hence
2341 optimizing it to x < y would be invalid). */
2342 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2343 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2344 rtrap = false;
2345
2346 /* If the comparison was short-circuited, and only the RHS
2347 trapped, we may now generate a spurious trap. */
2348 if (rtrap && !ltrap
2349 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2350 return NULL_TREE;
2351
2352 /* If we changed the conditions that cause a trap, we lose. */
2353 if ((ltrap || rtrap) != trap)
2354 return NULL_TREE;
2355 }
2356
2357 if (compcode == COMPCODE_TRUE)
2358 return constant_boolean_node (true, truth_type);
2359 else if (compcode == COMPCODE_FALSE)
2360 return constant_boolean_node (false, truth_type);
2361 else
2362 return fold_build2 (compcode_to_comparison (compcode),
2363 truth_type, ll_arg, lr_arg);
2364 }
2365
2366 /* Return nonzero if CODE is a tree code that represents a truth value. */
2367
2368 static int
2369 truth_value_p (enum tree_code code)
2370 {
2371 return (TREE_CODE_CLASS (code) == tcc_comparison
2372 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2373 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2374 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2375 }
2376 \f
2377 /* Return nonzero if two operands (typically of the same tree node)
2378 are necessarily equal. If either argument has side-effects this
2379 function returns zero. FLAGS modifies behavior as follows:
2380
2381 If OEP_ONLY_CONST is set, only return nonzero for constants.
2382 This function tests whether the operands are indistinguishable;
2383 it does not test whether they are equal using C's == operation.
2384 The distinction is important for IEEE floating point, because
2385 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2386 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2387
2388 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2389 even though it may hold multiple values during a function.
2390 This is because a GCC tree node guarantees that nothing else is
2391 executed between the evaluation of its "operands" (which may often
2392 be evaluated in arbitrary order). Hence if the operands themselves
2393 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2394 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2395 unset means assuming isochronic (or instantaneous) tree equivalence.
2396 Unless comparing arbitrary expression trees, such as from different
2397 statements, this flag can usually be left unset.
2398
2399 If OEP_PURE_SAME is set, then pure functions with identical arguments
2400 are considered the same. It is used when the caller has other ways
2401 to ensure that global memory is unchanged in between. */
2402
2403 int
2404 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2405 {
2406 /* If either is ERROR_MARK, they aren't equal. */
2407 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2408 return 0;
2409
2410 /* If both types don't have the same signedness, then we can't consider
2411 them equal. We must check this before the STRIP_NOPS calls
2412 because they may change the signedness of the arguments. */
2413 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2414 return 0;
2415
2416 STRIP_NOPS (arg0);
2417 STRIP_NOPS (arg1);
2418
2419 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2420 /* This is needed for conversions and for COMPONENT_REF.
2421 Might as well play it safe and always test this. */
2422 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2423 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2424 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2425 return 0;
2426
2427 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2428 We don't care about side effects in that case because the SAVE_EXPR
2429 takes care of that for us. In all other cases, two expressions are
2430 equal if they have no side effects. If we have two identical
2431 expressions with side effects that should be treated the same due
2432 to the only side effects being identical SAVE_EXPR's, that will
2433 be detected in the recursive calls below. */
2434 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2435 && (TREE_CODE (arg0) == SAVE_EXPR
2436 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2437 return 1;
2438
2439 /* Next handle constant cases, those for which we can return 1 even
2440 if ONLY_CONST is set. */
2441 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2442 switch (TREE_CODE (arg0))
2443 {
2444 case INTEGER_CST:
2445 return (! TREE_CONSTANT_OVERFLOW (arg0)
2446 && ! TREE_CONSTANT_OVERFLOW (arg1)
2447 && tree_int_cst_equal (arg0, arg1));
2448
2449 case REAL_CST:
2450 return (! TREE_CONSTANT_OVERFLOW (arg0)
2451 && ! TREE_CONSTANT_OVERFLOW (arg1)
2452 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2453 TREE_REAL_CST (arg1)));
2454
2455 case VECTOR_CST:
2456 {
2457 tree v1, v2;
2458
2459 if (TREE_CONSTANT_OVERFLOW (arg0)
2460 || TREE_CONSTANT_OVERFLOW (arg1))
2461 return 0;
2462
2463 v1 = TREE_VECTOR_CST_ELTS (arg0);
2464 v2 = TREE_VECTOR_CST_ELTS (arg1);
2465 while (v1 && v2)
2466 {
2467 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2468 flags))
2469 return 0;
2470 v1 = TREE_CHAIN (v1);
2471 v2 = TREE_CHAIN (v2);
2472 }
2473
2474 return v1 == v2;
2475 }
2476
2477 case COMPLEX_CST:
2478 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2479 flags)
2480 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2481 flags));
2482
2483 case STRING_CST:
2484 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2485 && ! memcmp (TREE_STRING_POINTER (arg0),
2486 TREE_STRING_POINTER (arg1),
2487 TREE_STRING_LENGTH (arg0)));
2488
2489 case ADDR_EXPR:
2490 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2491 0);
2492 default:
2493 break;
2494 }
2495
2496 if (flags & OEP_ONLY_CONST)
2497 return 0;
2498
2499 /* Define macros to test an operand from arg0 and arg1 for equality and a
2500 variant that allows null and views null as being different from any
2501 non-null value. In the latter case, if either is null, the both
2502 must be; otherwise, do the normal comparison. */
2503 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2504 TREE_OPERAND (arg1, N), flags)
2505
2506 #define OP_SAME_WITH_NULL(N) \
2507 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2508 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2509
2510 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2511 {
2512 case tcc_unary:
2513 /* Two conversions are equal only if signedness and modes match. */
2514 switch (TREE_CODE (arg0))
2515 {
2516 case NOP_EXPR:
2517 case CONVERT_EXPR:
2518 case FIX_CEIL_EXPR:
2519 case FIX_TRUNC_EXPR:
2520 case FIX_FLOOR_EXPR:
2521 case FIX_ROUND_EXPR:
2522 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2523 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2524 return 0;
2525 break;
2526 default:
2527 break;
2528 }
2529
2530 return OP_SAME (0);
2531
2532
2533 case tcc_comparison:
2534 case tcc_binary:
2535 if (OP_SAME (0) && OP_SAME (1))
2536 return 1;
2537
2538 /* For commutative ops, allow the other order. */
2539 return (commutative_tree_code (TREE_CODE (arg0))
2540 && operand_equal_p (TREE_OPERAND (arg0, 0),
2541 TREE_OPERAND (arg1, 1), flags)
2542 && operand_equal_p (TREE_OPERAND (arg0, 1),
2543 TREE_OPERAND (arg1, 0), flags));
2544
2545 case tcc_reference:
2546 /* If either of the pointer (or reference) expressions we are
2547 dereferencing contain a side effect, these cannot be equal. */
2548 if (TREE_SIDE_EFFECTS (arg0)
2549 || TREE_SIDE_EFFECTS (arg1))
2550 return 0;
2551
2552 switch (TREE_CODE (arg0))
2553 {
2554 case INDIRECT_REF:
2555 case ALIGN_INDIRECT_REF:
2556 case MISALIGNED_INDIRECT_REF:
2557 case REALPART_EXPR:
2558 case IMAGPART_EXPR:
2559 return OP_SAME (0);
2560
2561 case ARRAY_REF:
2562 case ARRAY_RANGE_REF:
2563 /* Operands 2 and 3 may be null. */
2564 return (OP_SAME (0)
2565 && OP_SAME (1)
2566 && OP_SAME_WITH_NULL (2)
2567 && OP_SAME_WITH_NULL (3));
2568
2569 case COMPONENT_REF:
2570 /* Handle operand 2 the same as for ARRAY_REF. */
2571 return OP_SAME (0) && OP_SAME (1) && OP_SAME_WITH_NULL (2);
2572
2573 case BIT_FIELD_REF:
2574 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2575
2576 default:
2577 return 0;
2578 }
2579
2580 case tcc_expression:
2581 switch (TREE_CODE (arg0))
2582 {
2583 case ADDR_EXPR:
2584 case TRUTH_NOT_EXPR:
2585 return OP_SAME (0);
2586
2587 case TRUTH_ANDIF_EXPR:
2588 case TRUTH_ORIF_EXPR:
2589 return OP_SAME (0) && OP_SAME (1);
2590
2591 case TRUTH_AND_EXPR:
2592 case TRUTH_OR_EXPR:
2593 case TRUTH_XOR_EXPR:
2594 if (OP_SAME (0) && OP_SAME (1))
2595 return 1;
2596
2597 /* Otherwise take into account this is a commutative operation. */
2598 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2599 TREE_OPERAND (arg1, 1), flags)
2600 && operand_equal_p (TREE_OPERAND (arg0, 1),
2601 TREE_OPERAND (arg1, 0), flags));
2602
2603 case CALL_EXPR:
2604 /* If the CALL_EXPRs call different functions, then they
2605 clearly can not be equal. */
2606 if (!OP_SAME (0))
2607 return 0;
2608
2609 {
2610 unsigned int cef = call_expr_flags (arg0);
2611 if (flags & OEP_PURE_SAME)
2612 cef &= ECF_CONST | ECF_PURE;
2613 else
2614 cef &= ECF_CONST;
2615 if (!cef)
2616 return 0;
2617 }
2618
2619 /* Now see if all the arguments are the same. operand_equal_p
2620 does not handle TREE_LIST, so we walk the operands here
2621 feeding them to operand_equal_p. */
2622 arg0 = TREE_OPERAND (arg0, 1);
2623 arg1 = TREE_OPERAND (arg1, 1);
2624 while (arg0 && arg1)
2625 {
2626 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2627 flags))
2628 return 0;
2629
2630 arg0 = TREE_CHAIN (arg0);
2631 arg1 = TREE_CHAIN (arg1);
2632 }
2633
2634 /* If we get here and both argument lists are exhausted
2635 then the CALL_EXPRs are equal. */
2636 return ! (arg0 || arg1);
2637
2638 default:
2639 return 0;
2640 }
2641
2642 case tcc_declaration:
2643 /* Consider __builtin_sqrt equal to sqrt. */
2644 return (TREE_CODE (arg0) == FUNCTION_DECL
2645 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2646 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2647 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2648
2649 default:
2650 return 0;
2651 }
2652
2653 #undef OP_SAME
2654 #undef OP_SAME_WITH_NULL
2655 }
2656 \f
2657 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2658 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2659
2660 When in doubt, return 0. */
2661
2662 static int
2663 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2664 {
2665 int unsignedp1, unsignedpo;
2666 tree primarg0, primarg1, primother;
2667 unsigned int correct_width;
2668
2669 if (operand_equal_p (arg0, arg1, 0))
2670 return 1;
2671
2672 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2673 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2674 return 0;
2675
2676 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2677 and see if the inner values are the same. This removes any
2678 signedness comparison, which doesn't matter here. */
2679 primarg0 = arg0, primarg1 = arg1;
2680 STRIP_NOPS (primarg0);
2681 STRIP_NOPS (primarg1);
2682 if (operand_equal_p (primarg0, primarg1, 0))
2683 return 1;
2684
2685 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2686 actual comparison operand, ARG0.
2687
2688 First throw away any conversions to wider types
2689 already present in the operands. */
2690
2691 primarg1 = get_narrower (arg1, &unsignedp1);
2692 primother = get_narrower (other, &unsignedpo);
2693
2694 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2695 if (unsignedp1 == unsignedpo
2696 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2697 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2698 {
2699 tree type = TREE_TYPE (arg0);
2700
2701 /* Make sure shorter operand is extended the right way
2702 to match the longer operand. */
2703 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2704 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2705
2706 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2707 return 1;
2708 }
2709
2710 return 0;
2711 }
2712 \f
2713 /* See if ARG is an expression that is either a comparison or is performing
2714 arithmetic on comparisons. The comparisons must only be comparing
2715 two different values, which will be stored in *CVAL1 and *CVAL2; if
2716 they are nonzero it means that some operands have already been found.
2717 No variables may be used anywhere else in the expression except in the
2718 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2719 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2720
2721 If this is true, return 1. Otherwise, return zero. */
2722
2723 static int
2724 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2725 {
2726 enum tree_code code = TREE_CODE (arg);
2727 enum tree_code_class class = TREE_CODE_CLASS (code);
2728
2729 /* We can handle some of the tcc_expression cases here. */
2730 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2731 class = tcc_unary;
2732 else if (class == tcc_expression
2733 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2734 || code == COMPOUND_EXPR))
2735 class = tcc_binary;
2736
2737 else if (class == tcc_expression && code == SAVE_EXPR
2738 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2739 {
2740 /* If we've already found a CVAL1 or CVAL2, this expression is
2741 two complex to handle. */
2742 if (*cval1 || *cval2)
2743 return 0;
2744
2745 class = tcc_unary;
2746 *save_p = 1;
2747 }
2748
2749 switch (class)
2750 {
2751 case tcc_unary:
2752 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2753
2754 case tcc_binary:
2755 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2756 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2757 cval1, cval2, save_p));
2758
2759 case tcc_constant:
2760 return 1;
2761
2762 case tcc_expression:
2763 if (code == COND_EXPR)
2764 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2765 cval1, cval2, save_p)
2766 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2767 cval1, cval2, save_p)
2768 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2769 cval1, cval2, save_p));
2770 return 0;
2771
2772 case tcc_comparison:
2773 /* First see if we can handle the first operand, then the second. For
2774 the second operand, we know *CVAL1 can't be zero. It must be that
2775 one side of the comparison is each of the values; test for the
2776 case where this isn't true by failing if the two operands
2777 are the same. */
2778
2779 if (operand_equal_p (TREE_OPERAND (arg, 0),
2780 TREE_OPERAND (arg, 1), 0))
2781 return 0;
2782
2783 if (*cval1 == 0)
2784 *cval1 = TREE_OPERAND (arg, 0);
2785 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2786 ;
2787 else if (*cval2 == 0)
2788 *cval2 = TREE_OPERAND (arg, 0);
2789 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2790 ;
2791 else
2792 return 0;
2793
2794 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2795 ;
2796 else if (*cval2 == 0)
2797 *cval2 = TREE_OPERAND (arg, 1);
2798 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2799 ;
2800 else
2801 return 0;
2802
2803 return 1;
2804
2805 default:
2806 return 0;
2807 }
2808 }
2809 \f
2810 /* ARG is a tree that is known to contain just arithmetic operations and
2811 comparisons. Evaluate the operations in the tree substituting NEW0 for
2812 any occurrence of OLD0 as an operand of a comparison and likewise for
2813 NEW1 and OLD1. */
2814
2815 static tree
2816 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2817 {
2818 tree type = TREE_TYPE (arg);
2819 enum tree_code code = TREE_CODE (arg);
2820 enum tree_code_class class = TREE_CODE_CLASS (code);
2821
2822 /* We can handle some of the tcc_expression cases here. */
2823 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2824 class = tcc_unary;
2825 else if (class == tcc_expression
2826 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2827 class = tcc_binary;
2828
2829 switch (class)
2830 {
2831 case tcc_unary:
2832 return fold_build1 (code, type,
2833 eval_subst (TREE_OPERAND (arg, 0),
2834 old0, new0, old1, new1));
2835
2836 case tcc_binary:
2837 return fold_build2 (code, type,
2838 eval_subst (TREE_OPERAND (arg, 0),
2839 old0, new0, old1, new1),
2840 eval_subst (TREE_OPERAND (arg, 1),
2841 old0, new0, old1, new1));
2842
2843 case tcc_expression:
2844 switch (code)
2845 {
2846 case SAVE_EXPR:
2847 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2848
2849 case COMPOUND_EXPR:
2850 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2851
2852 case COND_EXPR:
2853 return fold_build3 (code, type,
2854 eval_subst (TREE_OPERAND (arg, 0),
2855 old0, new0, old1, new1),
2856 eval_subst (TREE_OPERAND (arg, 1),
2857 old0, new0, old1, new1),
2858 eval_subst (TREE_OPERAND (arg, 2),
2859 old0, new0, old1, new1));
2860 default:
2861 break;
2862 }
2863 /* Fall through - ??? */
2864
2865 case tcc_comparison:
2866 {
2867 tree arg0 = TREE_OPERAND (arg, 0);
2868 tree arg1 = TREE_OPERAND (arg, 1);
2869
2870 /* We need to check both for exact equality and tree equality. The
2871 former will be true if the operand has a side-effect. In that
2872 case, we know the operand occurred exactly once. */
2873
2874 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2875 arg0 = new0;
2876 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2877 arg0 = new1;
2878
2879 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2880 arg1 = new0;
2881 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2882 arg1 = new1;
2883
2884 return fold_build2 (code, type, arg0, arg1);
2885 }
2886
2887 default:
2888 return arg;
2889 }
2890 }
2891 \f
2892 /* Return a tree for the case when the result of an expression is RESULT
2893 converted to TYPE and OMITTED was previously an operand of the expression
2894 but is now not needed (e.g., we folded OMITTED * 0).
2895
2896 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2897 the conversion of RESULT to TYPE. */
2898
2899 tree
2900 omit_one_operand (tree type, tree result, tree omitted)
2901 {
2902 tree t = fold_convert (type, result);
2903
2904 if (TREE_SIDE_EFFECTS (omitted))
2905 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2906
2907 return non_lvalue (t);
2908 }
2909
2910 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2911
2912 static tree
2913 pedantic_omit_one_operand (tree type, tree result, tree omitted)
2914 {
2915 tree t = fold_convert (type, result);
2916
2917 if (TREE_SIDE_EFFECTS (omitted))
2918 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2919
2920 return pedantic_non_lvalue (t);
2921 }
2922
2923 /* Return a tree for the case when the result of an expression is RESULT
2924 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
2925 of the expression but are now not needed.
2926
2927 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
2928 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
2929 evaluated before OMITTED2. Otherwise, if neither has side effects,
2930 just do the conversion of RESULT to TYPE. */
2931
2932 tree
2933 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
2934 {
2935 tree t = fold_convert (type, result);
2936
2937 if (TREE_SIDE_EFFECTS (omitted2))
2938 t = build2 (COMPOUND_EXPR, type, omitted2, t);
2939 if (TREE_SIDE_EFFECTS (omitted1))
2940 t = build2 (COMPOUND_EXPR, type, omitted1, t);
2941
2942 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
2943 }
2944
2945 \f
2946 /* Return a simplified tree node for the truth-negation of ARG. This
2947 never alters ARG itself. We assume that ARG is an operation that
2948 returns a truth value (0 or 1).
2949
2950 FIXME: one would think we would fold the result, but it causes
2951 problems with the dominator optimizer. */
2952 tree
2953 invert_truthvalue (tree arg)
2954 {
2955 tree type = TREE_TYPE (arg);
2956 enum tree_code code = TREE_CODE (arg);
2957
2958 if (code == ERROR_MARK)
2959 return arg;
2960
2961 /* If this is a comparison, we can simply invert it, except for
2962 floating-point non-equality comparisons, in which case we just
2963 enclose a TRUTH_NOT_EXPR around what we have. */
2964
2965 if (TREE_CODE_CLASS (code) == tcc_comparison)
2966 {
2967 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
2968 if (FLOAT_TYPE_P (op_type)
2969 && flag_trapping_math
2970 && code != ORDERED_EXPR && code != UNORDERED_EXPR
2971 && code != NE_EXPR && code != EQ_EXPR)
2972 return build1 (TRUTH_NOT_EXPR, type, arg);
2973 else
2974 {
2975 code = invert_tree_comparison (code,
2976 HONOR_NANS (TYPE_MODE (op_type)));
2977 if (code == ERROR_MARK)
2978 return build1 (TRUTH_NOT_EXPR, type, arg);
2979 else
2980 return build2 (code, type,
2981 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2982 }
2983 }
2984
2985 switch (code)
2986 {
2987 case INTEGER_CST:
2988 return constant_boolean_node (integer_zerop (arg), type);
2989
2990 case TRUTH_AND_EXPR:
2991 return build2 (TRUTH_OR_EXPR, type,
2992 invert_truthvalue (TREE_OPERAND (arg, 0)),
2993 invert_truthvalue (TREE_OPERAND (arg, 1)));
2994
2995 case TRUTH_OR_EXPR:
2996 return build2 (TRUTH_AND_EXPR, type,
2997 invert_truthvalue (TREE_OPERAND (arg, 0)),
2998 invert_truthvalue (TREE_OPERAND (arg, 1)));
2999
3000 case TRUTH_XOR_EXPR:
3001 /* Here we can invert either operand. We invert the first operand
3002 unless the second operand is a TRUTH_NOT_EXPR in which case our
3003 result is the XOR of the first operand with the inside of the
3004 negation of the second operand. */
3005
3006 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3007 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3008 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3009 else
3010 return build2 (TRUTH_XOR_EXPR, type,
3011 invert_truthvalue (TREE_OPERAND (arg, 0)),
3012 TREE_OPERAND (arg, 1));
3013
3014 case TRUTH_ANDIF_EXPR:
3015 return build2 (TRUTH_ORIF_EXPR, type,
3016 invert_truthvalue (TREE_OPERAND (arg, 0)),
3017 invert_truthvalue (TREE_OPERAND (arg, 1)));
3018
3019 case TRUTH_ORIF_EXPR:
3020 return build2 (TRUTH_ANDIF_EXPR, type,
3021 invert_truthvalue (TREE_OPERAND (arg, 0)),
3022 invert_truthvalue (TREE_OPERAND (arg, 1)));
3023
3024 case TRUTH_NOT_EXPR:
3025 return TREE_OPERAND (arg, 0);
3026
3027 case COND_EXPR:
3028 {
3029 tree arg1 = TREE_OPERAND (arg, 1);
3030 tree arg2 = TREE_OPERAND (arg, 2);
3031 /* A COND_EXPR may have a throw as one operand, which
3032 then has void type. Just leave void operands
3033 as they are. */
3034 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3035 VOID_TYPE_P (TREE_TYPE (arg1))
3036 ? arg1 : invert_truthvalue (arg1),
3037 VOID_TYPE_P (TREE_TYPE (arg2))
3038 ? arg2 : invert_truthvalue (arg2));
3039 }
3040
3041 case COMPOUND_EXPR:
3042 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3043 invert_truthvalue (TREE_OPERAND (arg, 1)));
3044
3045 case NON_LVALUE_EXPR:
3046 return invert_truthvalue (TREE_OPERAND (arg, 0));
3047
3048 case NOP_EXPR:
3049 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3050 break;
3051
3052 case CONVERT_EXPR:
3053 case FLOAT_EXPR:
3054 return build1 (TREE_CODE (arg), type,
3055 invert_truthvalue (TREE_OPERAND (arg, 0)));
3056
3057 case BIT_AND_EXPR:
3058 if (!integer_onep (TREE_OPERAND (arg, 1)))
3059 break;
3060 return build2 (EQ_EXPR, type, arg,
3061 fold_convert (type, integer_zero_node));
3062
3063 case SAVE_EXPR:
3064 return build1 (TRUTH_NOT_EXPR, type, arg);
3065
3066 case CLEANUP_POINT_EXPR:
3067 return build1 (CLEANUP_POINT_EXPR, type,
3068 invert_truthvalue (TREE_OPERAND (arg, 0)));
3069
3070 default:
3071 break;
3072 }
3073 gcc_assert (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE);
3074 return build1 (TRUTH_NOT_EXPR, type, arg);
3075 }
3076
3077 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3078 operands are another bit-wise operation with a common input. If so,
3079 distribute the bit operations to save an operation and possibly two if
3080 constants are involved. For example, convert
3081 (A | B) & (A | C) into A | (B & C)
3082 Further simplification will occur if B and C are constants.
3083
3084 If this optimization cannot be done, 0 will be returned. */
3085
3086 static tree
3087 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3088 {
3089 tree common;
3090 tree left, right;
3091
3092 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3093 || TREE_CODE (arg0) == code
3094 || (TREE_CODE (arg0) != BIT_AND_EXPR
3095 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3096 return 0;
3097
3098 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3099 {
3100 common = TREE_OPERAND (arg0, 0);
3101 left = TREE_OPERAND (arg0, 1);
3102 right = TREE_OPERAND (arg1, 1);
3103 }
3104 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3105 {
3106 common = TREE_OPERAND (arg0, 0);
3107 left = TREE_OPERAND (arg0, 1);
3108 right = TREE_OPERAND (arg1, 0);
3109 }
3110 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3111 {
3112 common = TREE_OPERAND (arg0, 1);
3113 left = TREE_OPERAND (arg0, 0);
3114 right = TREE_OPERAND (arg1, 1);
3115 }
3116 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3117 {
3118 common = TREE_OPERAND (arg0, 1);
3119 left = TREE_OPERAND (arg0, 0);
3120 right = TREE_OPERAND (arg1, 0);
3121 }
3122 else
3123 return 0;
3124
3125 return fold_build2 (TREE_CODE (arg0), type, common,
3126 fold_build2 (code, type, left, right));
3127 }
3128
3129 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3130 with code CODE. This optimization is unsafe. */
3131 static tree
3132 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3133 {
3134 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3135 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3136
3137 /* (A / C) +- (B / C) -> (A +- B) / C. */
3138 if (mul0 == mul1
3139 && operand_equal_p (TREE_OPERAND (arg0, 1),
3140 TREE_OPERAND (arg1, 1), 0))
3141 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3142 fold_build2 (code, type,
3143 TREE_OPERAND (arg0, 0),
3144 TREE_OPERAND (arg1, 0)),
3145 TREE_OPERAND (arg0, 1));
3146
3147 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3148 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3149 TREE_OPERAND (arg1, 0), 0)
3150 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3151 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3152 {
3153 REAL_VALUE_TYPE r0, r1;
3154 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3155 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3156 if (!mul0)
3157 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3158 if (!mul1)
3159 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3160 real_arithmetic (&r0, code, &r0, &r1);
3161 return fold_build2 (MULT_EXPR, type,
3162 TREE_OPERAND (arg0, 0),
3163 build_real (type, r0));
3164 }
3165
3166 return NULL_TREE;
3167 }
3168 \f
3169 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3170 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3171
3172 static tree
3173 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3174 int unsignedp)
3175 {
3176 tree result;
3177
3178 if (bitpos == 0)
3179 {
3180 tree size = TYPE_SIZE (TREE_TYPE (inner));
3181 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3182 || POINTER_TYPE_P (TREE_TYPE (inner)))
3183 && host_integerp (size, 0)
3184 && tree_low_cst (size, 0) == bitsize)
3185 return fold_convert (type, inner);
3186 }
3187
3188 result = build3 (BIT_FIELD_REF, type, inner,
3189 size_int (bitsize), bitsize_int (bitpos));
3190
3191 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3192
3193 return result;
3194 }
3195
3196 /* Optimize a bit-field compare.
3197
3198 There are two cases: First is a compare against a constant and the
3199 second is a comparison of two items where the fields are at the same
3200 bit position relative to the start of a chunk (byte, halfword, word)
3201 large enough to contain it. In these cases we can avoid the shift
3202 implicit in bitfield extractions.
3203
3204 For constants, we emit a compare of the shifted constant with the
3205 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3206 compared. For two fields at the same position, we do the ANDs with the
3207 similar mask and compare the result of the ANDs.
3208
3209 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3210 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3211 are the left and right operands of the comparison, respectively.
3212
3213 If the optimization described above can be done, we return the resulting
3214 tree. Otherwise we return zero. */
3215
3216 static tree
3217 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3218 tree lhs, tree rhs)
3219 {
3220 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3221 tree type = TREE_TYPE (lhs);
3222 tree signed_type, unsigned_type;
3223 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3224 enum machine_mode lmode, rmode, nmode;
3225 int lunsignedp, runsignedp;
3226 int lvolatilep = 0, rvolatilep = 0;
3227 tree linner, rinner = NULL_TREE;
3228 tree mask;
3229 tree offset;
3230
3231 /* Get all the information about the extractions being done. If the bit size
3232 if the same as the size of the underlying object, we aren't doing an
3233 extraction at all and so can do nothing. We also don't want to
3234 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3235 then will no longer be able to replace it. */
3236 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3237 &lunsignedp, &lvolatilep, false);
3238 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3239 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3240 return 0;
3241
3242 if (!const_p)
3243 {
3244 /* If this is not a constant, we can only do something if bit positions,
3245 sizes, and signedness are the same. */
3246 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3247 &runsignedp, &rvolatilep, false);
3248
3249 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3250 || lunsignedp != runsignedp || offset != 0
3251 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3252 return 0;
3253 }
3254
3255 /* See if we can find a mode to refer to this field. We should be able to,
3256 but fail if we can't. */
3257 nmode = get_best_mode (lbitsize, lbitpos,
3258 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3259 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3260 TYPE_ALIGN (TREE_TYPE (rinner))),
3261 word_mode, lvolatilep || rvolatilep);
3262 if (nmode == VOIDmode)
3263 return 0;
3264
3265 /* Set signed and unsigned types of the precision of this mode for the
3266 shifts below. */
3267 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3268 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3269
3270 /* Compute the bit position and size for the new reference and our offset
3271 within it. If the new reference is the same size as the original, we
3272 won't optimize anything, so return zero. */
3273 nbitsize = GET_MODE_BITSIZE (nmode);
3274 nbitpos = lbitpos & ~ (nbitsize - 1);
3275 lbitpos -= nbitpos;
3276 if (nbitsize == lbitsize)
3277 return 0;
3278
3279 if (BYTES_BIG_ENDIAN)
3280 lbitpos = nbitsize - lbitsize - lbitpos;
3281
3282 /* Make the mask to be used against the extracted field. */
3283 mask = build_int_cst (unsigned_type, -1);
3284 mask = force_fit_type (mask, 0, false, false);
3285 mask = fold_convert (unsigned_type, mask);
3286 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3287 mask = const_binop (RSHIFT_EXPR, mask,
3288 size_int (nbitsize - lbitsize - lbitpos), 0);
3289
3290 if (! const_p)
3291 /* If not comparing with constant, just rework the comparison
3292 and return. */
3293 return build2 (code, compare_type,
3294 build2 (BIT_AND_EXPR, unsigned_type,
3295 make_bit_field_ref (linner, unsigned_type,
3296 nbitsize, nbitpos, 1),
3297 mask),
3298 build2 (BIT_AND_EXPR, unsigned_type,
3299 make_bit_field_ref (rinner, unsigned_type,
3300 nbitsize, nbitpos, 1),
3301 mask));
3302
3303 /* Otherwise, we are handling the constant case. See if the constant is too
3304 big for the field. Warn and return a tree of for 0 (false) if so. We do
3305 this not only for its own sake, but to avoid having to test for this
3306 error case below. If we didn't, we might generate wrong code.
3307
3308 For unsigned fields, the constant shifted right by the field length should
3309 be all zero. For signed fields, the high-order bits should agree with
3310 the sign bit. */
3311
3312 if (lunsignedp)
3313 {
3314 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3315 fold_convert (unsigned_type, rhs),
3316 size_int (lbitsize), 0)))
3317 {
3318 warning (0, "comparison is always %d due to width of bit-field",
3319 code == NE_EXPR);
3320 return constant_boolean_node (code == NE_EXPR, compare_type);
3321 }
3322 }
3323 else
3324 {
3325 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3326 size_int (lbitsize - 1), 0);
3327 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3328 {
3329 warning (0, "comparison is always %d due to width of bit-field",
3330 code == NE_EXPR);
3331 return constant_boolean_node (code == NE_EXPR, compare_type);
3332 }
3333 }
3334
3335 /* Single-bit compares should always be against zero. */
3336 if (lbitsize == 1 && ! integer_zerop (rhs))
3337 {
3338 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3339 rhs = fold_convert (type, integer_zero_node);
3340 }
3341
3342 /* Make a new bitfield reference, shift the constant over the
3343 appropriate number of bits and mask it with the computed mask
3344 (in case this was a signed field). If we changed it, make a new one. */
3345 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3346 if (lvolatilep)
3347 {
3348 TREE_SIDE_EFFECTS (lhs) = 1;
3349 TREE_THIS_VOLATILE (lhs) = 1;
3350 }
3351
3352 rhs = const_binop (BIT_AND_EXPR,
3353 const_binop (LSHIFT_EXPR,
3354 fold_convert (unsigned_type, rhs),
3355 size_int (lbitpos), 0),
3356 mask, 0);
3357
3358 return build2 (code, compare_type,
3359 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3360 rhs);
3361 }
3362 \f
3363 /* Subroutine for fold_truthop: decode a field reference.
3364
3365 If EXP is a comparison reference, we return the innermost reference.
3366
3367 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3368 set to the starting bit number.
3369
3370 If the innermost field can be completely contained in a mode-sized
3371 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3372
3373 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3374 otherwise it is not changed.
3375
3376 *PUNSIGNEDP is set to the signedness of the field.
3377
3378 *PMASK is set to the mask used. This is either contained in a
3379 BIT_AND_EXPR or derived from the width of the field.
3380
3381 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3382
3383 Return 0 if this is not a component reference or is one that we can't
3384 do anything with. */
3385
3386 static tree
3387 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3388 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3389 int *punsignedp, int *pvolatilep,
3390 tree *pmask, tree *pand_mask)
3391 {
3392 tree outer_type = 0;
3393 tree and_mask = 0;
3394 tree mask, inner, offset;
3395 tree unsigned_type;
3396 unsigned int precision;
3397
3398 /* All the optimizations using this function assume integer fields.
3399 There are problems with FP fields since the type_for_size call
3400 below can fail for, e.g., XFmode. */
3401 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3402 return 0;
3403
3404 /* We are interested in the bare arrangement of bits, so strip everything
3405 that doesn't affect the machine mode. However, record the type of the
3406 outermost expression if it may matter below. */
3407 if (TREE_CODE (exp) == NOP_EXPR
3408 || TREE_CODE (exp) == CONVERT_EXPR
3409 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3410 outer_type = TREE_TYPE (exp);
3411 STRIP_NOPS (exp);
3412
3413 if (TREE_CODE (exp) == BIT_AND_EXPR)
3414 {
3415 and_mask = TREE_OPERAND (exp, 1);
3416 exp = TREE_OPERAND (exp, 0);
3417 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3418 if (TREE_CODE (and_mask) != INTEGER_CST)
3419 return 0;
3420 }
3421
3422 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3423 punsignedp, pvolatilep, false);
3424 if ((inner == exp && and_mask == 0)
3425 || *pbitsize < 0 || offset != 0
3426 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3427 return 0;
3428
3429 /* If the number of bits in the reference is the same as the bitsize of
3430 the outer type, then the outer type gives the signedness. Otherwise
3431 (in case of a small bitfield) the signedness is unchanged. */
3432 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3433 *punsignedp = TYPE_UNSIGNED (outer_type);
3434
3435 /* Compute the mask to access the bitfield. */
3436 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3437 precision = TYPE_PRECISION (unsigned_type);
3438
3439 mask = build_int_cst (unsigned_type, -1);
3440 mask = force_fit_type (mask, 0, false, false);
3441
3442 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3443 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3444
3445 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3446 if (and_mask != 0)
3447 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3448 fold_convert (unsigned_type, and_mask), mask);
3449
3450 *pmask = mask;
3451 *pand_mask = and_mask;
3452 return inner;
3453 }
3454
3455 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3456 bit positions. */
3457
3458 static int
3459 all_ones_mask_p (tree mask, int size)
3460 {
3461 tree type = TREE_TYPE (mask);
3462 unsigned int precision = TYPE_PRECISION (type);
3463 tree tmask;
3464
3465 tmask = build_int_cst (lang_hooks.types.signed_type (type), -1);
3466 tmask = force_fit_type (tmask, 0, false, false);
3467
3468 return
3469 tree_int_cst_equal (mask,
3470 const_binop (RSHIFT_EXPR,
3471 const_binop (LSHIFT_EXPR, tmask,
3472 size_int (precision - size),
3473 0),
3474 size_int (precision - size), 0));
3475 }
3476
3477 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3478 represents the sign bit of EXP's type. If EXP represents a sign
3479 or zero extension, also test VAL against the unextended type.
3480 The return value is the (sub)expression whose sign bit is VAL,
3481 or NULL_TREE otherwise. */
3482
3483 static tree
3484 sign_bit_p (tree exp, tree val)
3485 {
3486 unsigned HOST_WIDE_INT mask_lo, lo;
3487 HOST_WIDE_INT mask_hi, hi;
3488 int width;
3489 tree t;
3490
3491 /* Tree EXP must have an integral type. */
3492 t = TREE_TYPE (exp);
3493 if (! INTEGRAL_TYPE_P (t))
3494 return NULL_TREE;
3495
3496 /* Tree VAL must be an integer constant. */
3497 if (TREE_CODE (val) != INTEGER_CST
3498 || TREE_CONSTANT_OVERFLOW (val))
3499 return NULL_TREE;
3500
3501 width = TYPE_PRECISION (t);
3502 if (width > HOST_BITS_PER_WIDE_INT)
3503 {
3504 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3505 lo = 0;
3506
3507 mask_hi = ((unsigned HOST_WIDE_INT) -1
3508 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3509 mask_lo = -1;
3510 }
3511 else
3512 {
3513 hi = 0;
3514 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3515
3516 mask_hi = 0;
3517 mask_lo = ((unsigned HOST_WIDE_INT) -1
3518 >> (HOST_BITS_PER_WIDE_INT - width));
3519 }
3520
3521 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3522 treat VAL as if it were unsigned. */
3523 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3524 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3525 return exp;
3526
3527 /* Handle extension from a narrower type. */
3528 if (TREE_CODE (exp) == NOP_EXPR
3529 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3530 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3531
3532 return NULL_TREE;
3533 }
3534
3535 /* Subroutine for fold_truthop: determine if an operand is simple enough
3536 to be evaluated unconditionally. */
3537
3538 static int
3539 simple_operand_p (tree exp)
3540 {
3541 /* Strip any conversions that don't change the machine mode. */
3542 STRIP_NOPS (exp);
3543
3544 return (CONSTANT_CLASS_P (exp)
3545 || TREE_CODE (exp) == SSA_NAME
3546 || (DECL_P (exp)
3547 && ! TREE_ADDRESSABLE (exp)
3548 && ! TREE_THIS_VOLATILE (exp)
3549 && ! DECL_NONLOCAL (exp)
3550 /* Don't regard global variables as simple. They may be
3551 allocated in ways unknown to the compiler (shared memory,
3552 #pragma weak, etc). */
3553 && ! TREE_PUBLIC (exp)
3554 && ! DECL_EXTERNAL (exp)
3555 /* Loading a static variable is unduly expensive, but global
3556 registers aren't expensive. */
3557 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3558 }
3559 \f
3560 /* The following functions are subroutines to fold_range_test and allow it to
3561 try to change a logical combination of comparisons into a range test.
3562
3563 For example, both
3564 X == 2 || X == 3 || X == 4 || X == 5
3565 and
3566 X >= 2 && X <= 5
3567 are converted to
3568 (unsigned) (X - 2) <= 3
3569
3570 We describe each set of comparisons as being either inside or outside
3571 a range, using a variable named like IN_P, and then describe the
3572 range with a lower and upper bound. If one of the bounds is omitted,
3573 it represents either the highest or lowest value of the type.
3574
3575 In the comments below, we represent a range by two numbers in brackets
3576 preceded by a "+" to designate being inside that range, or a "-" to
3577 designate being outside that range, so the condition can be inverted by
3578 flipping the prefix. An omitted bound is represented by a "-". For
3579 example, "- [-, 10]" means being outside the range starting at the lowest
3580 possible value and ending at 10, in other words, being greater than 10.
3581 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3582 always false.
3583
3584 We set up things so that the missing bounds are handled in a consistent
3585 manner so neither a missing bound nor "true" and "false" need to be
3586 handled using a special case. */
3587
3588 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3589 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3590 and UPPER1_P are nonzero if the respective argument is an upper bound
3591 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3592 must be specified for a comparison. ARG1 will be converted to ARG0's
3593 type if both are specified. */
3594
3595 static tree
3596 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3597 tree arg1, int upper1_p)
3598 {
3599 tree tem;
3600 int result;
3601 int sgn0, sgn1;
3602
3603 /* If neither arg represents infinity, do the normal operation.
3604 Else, if not a comparison, return infinity. Else handle the special
3605 comparison rules. Note that most of the cases below won't occur, but
3606 are handled for consistency. */
3607
3608 if (arg0 != 0 && arg1 != 0)
3609 {
3610 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3611 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3612 STRIP_NOPS (tem);
3613 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3614 }
3615
3616 if (TREE_CODE_CLASS (code) != tcc_comparison)
3617 return 0;
3618
3619 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3620 for neither. In real maths, we cannot assume open ended ranges are
3621 the same. But, this is computer arithmetic, where numbers are finite.
3622 We can therefore make the transformation of any unbounded range with
3623 the value Z, Z being greater than any representable number. This permits
3624 us to treat unbounded ranges as equal. */
3625 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3626 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3627 switch (code)
3628 {
3629 case EQ_EXPR:
3630 result = sgn0 == sgn1;
3631 break;
3632 case NE_EXPR:
3633 result = sgn0 != sgn1;
3634 break;
3635 case LT_EXPR:
3636 result = sgn0 < sgn1;
3637 break;
3638 case LE_EXPR:
3639 result = sgn0 <= sgn1;
3640 break;
3641 case GT_EXPR:
3642 result = sgn0 > sgn1;
3643 break;
3644 case GE_EXPR:
3645 result = sgn0 >= sgn1;
3646 break;
3647 default:
3648 gcc_unreachable ();
3649 }
3650
3651 return constant_boolean_node (result, type);
3652 }
3653 \f
3654 /* Given EXP, a logical expression, set the range it is testing into
3655 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3656 actually being tested. *PLOW and *PHIGH will be made of the same type
3657 as the returned expression. If EXP is not a comparison, we will most
3658 likely not be returning a useful value and range. */
3659
3660 static tree
3661 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3662 {
3663 enum tree_code code;
3664 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3665 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3666 int in_p, n_in_p;
3667 tree low, high, n_low, n_high;
3668
3669 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3670 and see if we can refine the range. Some of the cases below may not
3671 happen, but it doesn't seem worth worrying about this. We "continue"
3672 the outer loop when we've changed something; otherwise we "break"
3673 the switch, which will "break" the while. */
3674
3675 in_p = 0;
3676 low = high = fold_convert (TREE_TYPE (exp), integer_zero_node);
3677
3678 while (1)
3679 {
3680 code = TREE_CODE (exp);
3681 exp_type = TREE_TYPE (exp);
3682
3683 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3684 {
3685 if (TREE_CODE_LENGTH (code) > 0)
3686 arg0 = TREE_OPERAND (exp, 0);
3687 if (TREE_CODE_CLASS (code) == tcc_comparison
3688 || TREE_CODE_CLASS (code) == tcc_unary
3689 || TREE_CODE_CLASS (code) == tcc_binary)
3690 arg0_type = TREE_TYPE (arg0);
3691 if (TREE_CODE_CLASS (code) == tcc_binary
3692 || TREE_CODE_CLASS (code) == tcc_comparison
3693 || (TREE_CODE_CLASS (code) == tcc_expression
3694 && TREE_CODE_LENGTH (code) > 1))
3695 arg1 = TREE_OPERAND (exp, 1);
3696 }
3697
3698 switch (code)
3699 {
3700 case TRUTH_NOT_EXPR:
3701 in_p = ! in_p, exp = arg0;
3702 continue;
3703
3704 case EQ_EXPR: case NE_EXPR:
3705 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3706 /* We can only do something if the range is testing for zero
3707 and if the second operand is an integer constant. Note that
3708 saying something is "in" the range we make is done by
3709 complementing IN_P since it will set in the initial case of
3710 being not equal to zero; "out" is leaving it alone. */
3711 if (low == 0 || high == 0
3712 || ! integer_zerop (low) || ! integer_zerop (high)
3713 || TREE_CODE (arg1) != INTEGER_CST)
3714 break;
3715
3716 switch (code)
3717 {
3718 case NE_EXPR: /* - [c, c] */
3719 low = high = arg1;
3720 break;
3721 case EQ_EXPR: /* + [c, c] */
3722 in_p = ! in_p, low = high = arg1;
3723 break;
3724 case GT_EXPR: /* - [-, c] */
3725 low = 0, high = arg1;
3726 break;
3727 case GE_EXPR: /* + [c, -] */
3728 in_p = ! in_p, low = arg1, high = 0;
3729 break;
3730 case LT_EXPR: /* - [c, -] */
3731 low = arg1, high = 0;
3732 break;
3733 case LE_EXPR: /* + [-, c] */
3734 in_p = ! in_p, low = 0, high = arg1;
3735 break;
3736 default:
3737 gcc_unreachable ();
3738 }
3739
3740 /* If this is an unsigned comparison, we also know that EXP is
3741 greater than or equal to zero. We base the range tests we make
3742 on that fact, so we record it here so we can parse existing
3743 range tests. We test arg0_type since often the return type
3744 of, e.g. EQ_EXPR, is boolean. */
3745 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3746 {
3747 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3748 in_p, low, high, 1,
3749 fold_convert (arg0_type, integer_zero_node),
3750 NULL_TREE))
3751 break;
3752
3753 in_p = n_in_p, low = n_low, high = n_high;
3754
3755 /* If the high bound is missing, but we have a nonzero low
3756 bound, reverse the range so it goes from zero to the low bound
3757 minus 1. */
3758 if (high == 0 && low && ! integer_zerop (low))
3759 {
3760 in_p = ! in_p;
3761 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3762 integer_one_node, 0);
3763 low = fold_convert (arg0_type, integer_zero_node);
3764 }
3765 }
3766
3767 exp = arg0;
3768 continue;
3769
3770 case NEGATE_EXPR:
3771 /* (-x) IN [a,b] -> x in [-b, -a] */
3772 n_low = range_binop (MINUS_EXPR, exp_type,
3773 fold_convert (exp_type, integer_zero_node),
3774 0, high, 1);
3775 n_high = range_binop (MINUS_EXPR, exp_type,
3776 fold_convert (exp_type, integer_zero_node),
3777 0, low, 0);
3778 low = n_low, high = n_high;
3779 exp = arg0;
3780 continue;
3781
3782 case BIT_NOT_EXPR:
3783 /* ~ X -> -X - 1 */
3784 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
3785 fold_convert (exp_type, integer_one_node));
3786 continue;
3787
3788 case PLUS_EXPR: case MINUS_EXPR:
3789 if (TREE_CODE (arg1) != INTEGER_CST)
3790 break;
3791
3792 /* If EXP is signed, any overflow in the computation is undefined,
3793 so we don't worry about it so long as our computations on
3794 the bounds don't overflow. For unsigned, overflow is defined
3795 and this is exactly the right thing. */
3796 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3797 arg0_type, low, 0, arg1, 0);
3798 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3799 arg0_type, high, 1, arg1, 0);
3800 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3801 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3802 break;
3803
3804 /* Check for an unsigned range which has wrapped around the maximum
3805 value thus making n_high < n_low, and normalize it. */
3806 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3807 {
3808 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3809 integer_one_node, 0);
3810 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3811 integer_one_node, 0);
3812
3813 /* If the range is of the form +/- [ x+1, x ], we won't
3814 be able to normalize it. But then, it represents the
3815 whole range or the empty set, so make it
3816 +/- [ -, - ]. */
3817 if (tree_int_cst_equal (n_low, low)
3818 && tree_int_cst_equal (n_high, high))
3819 low = high = 0;
3820 else
3821 in_p = ! in_p;
3822 }
3823 else
3824 low = n_low, high = n_high;
3825
3826 exp = arg0;
3827 continue;
3828
3829 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3830 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3831 break;
3832
3833 if (! INTEGRAL_TYPE_P (arg0_type)
3834 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3835 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3836 break;
3837
3838 n_low = low, n_high = high;
3839
3840 if (n_low != 0)
3841 n_low = fold_convert (arg0_type, n_low);
3842
3843 if (n_high != 0)
3844 n_high = fold_convert (arg0_type, n_high);
3845
3846
3847 /* If we're converting arg0 from an unsigned type, to exp,
3848 a signed type, we will be doing the comparison as unsigned.
3849 The tests above have already verified that LOW and HIGH
3850 are both positive.
3851
3852 So we have to ensure that we will handle large unsigned
3853 values the same way that the current signed bounds treat
3854 negative values. */
3855
3856 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
3857 {
3858 tree high_positive;
3859 tree equiv_type = lang_hooks.types.type_for_mode
3860 (TYPE_MODE (arg0_type), 1);
3861
3862 /* A range without an upper bound is, naturally, unbounded.
3863 Since convert would have cropped a very large value, use
3864 the max value for the destination type. */
3865 high_positive
3866 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3867 : TYPE_MAX_VALUE (arg0_type);
3868
3869 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
3870 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
3871 fold_convert (arg0_type,
3872 high_positive),
3873 fold_convert (arg0_type,
3874 integer_one_node));
3875
3876 /* If the low bound is specified, "and" the range with the
3877 range for which the original unsigned value will be
3878 positive. */
3879 if (low != 0)
3880 {
3881 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3882 1, n_low, n_high, 1,
3883 fold_convert (arg0_type,
3884 integer_zero_node),
3885 high_positive))
3886 break;
3887
3888 in_p = (n_in_p == in_p);
3889 }
3890 else
3891 {
3892 /* Otherwise, "or" the range with the range of the input
3893 that will be interpreted as negative. */
3894 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3895 0, n_low, n_high, 1,
3896 fold_convert (arg0_type,
3897 integer_zero_node),
3898 high_positive))
3899 break;
3900
3901 in_p = (in_p != n_in_p);
3902 }
3903 }
3904
3905 exp = arg0;
3906 low = n_low, high = n_high;
3907 continue;
3908
3909 default:
3910 break;
3911 }
3912
3913 break;
3914 }
3915
3916 /* If EXP is a constant, we can evaluate whether this is true or false. */
3917 if (TREE_CODE (exp) == INTEGER_CST)
3918 {
3919 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3920 exp, 0, low, 0))
3921 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3922 exp, 1, high, 1)));
3923 low = high = 0;
3924 exp = 0;
3925 }
3926
3927 *pin_p = in_p, *plow = low, *phigh = high;
3928 return exp;
3929 }
3930 \f
3931 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3932 type, TYPE, return an expression to test if EXP is in (or out of, depending
3933 on IN_P) the range. Return 0 if the test couldn't be created. */
3934
3935 static tree
3936 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
3937 {
3938 tree etype = TREE_TYPE (exp);
3939 tree value;
3940
3941 #ifdef HAVE_canonicalize_funcptr_for_compare
3942 /* Disable this optimization for function pointer expressions
3943 on targets that require function pointer canonicalization. */
3944 if (HAVE_canonicalize_funcptr_for_compare
3945 && TREE_CODE (etype) == POINTER_TYPE
3946 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
3947 return NULL_TREE;
3948 #endif
3949
3950 if (! in_p)
3951 {
3952 value = build_range_check (type, exp, 1, low, high);
3953 if (value != 0)
3954 return invert_truthvalue (value);
3955
3956 return 0;
3957 }
3958
3959 if (low == 0 && high == 0)
3960 return fold_convert (type, integer_one_node);
3961
3962 if (low == 0)
3963 return fold_build2 (LE_EXPR, type, exp,
3964 fold_convert (etype, high));
3965
3966 if (high == 0)
3967 return fold_build2 (GE_EXPR, type, exp,
3968 fold_convert (etype, low));
3969
3970 if (operand_equal_p (low, high, 0))
3971 return fold_build2 (EQ_EXPR, type, exp,
3972 fold_convert (etype, low));
3973
3974 if (integer_zerop (low))
3975 {
3976 if (! TYPE_UNSIGNED (etype))
3977 {
3978 etype = lang_hooks.types.unsigned_type (etype);
3979 high = fold_convert (etype, high);
3980 exp = fold_convert (etype, exp);
3981 }
3982 return build_range_check (type, exp, 1, 0, high);
3983 }
3984
3985 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3986 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
3987 {
3988 unsigned HOST_WIDE_INT lo;
3989 HOST_WIDE_INT hi;
3990 int prec;
3991
3992 prec = TYPE_PRECISION (etype);
3993 if (prec <= HOST_BITS_PER_WIDE_INT)
3994 {
3995 hi = 0;
3996 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
3997 }
3998 else
3999 {
4000 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4001 lo = (unsigned HOST_WIDE_INT) -1;
4002 }
4003
4004 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4005 {
4006 if (TYPE_UNSIGNED (etype))
4007 {
4008 etype = lang_hooks.types.signed_type (etype);
4009 exp = fold_convert (etype, exp);
4010 }
4011 return fold_build2 (GT_EXPR, type, exp,
4012 fold_convert (etype, integer_zero_node));
4013 }
4014 }
4015
4016 value = const_binop (MINUS_EXPR, high, low, 0);
4017 if (value != 0 && TREE_OVERFLOW (value) && ! TYPE_UNSIGNED (etype))
4018 {
4019 tree utype, minv, maxv;
4020
4021 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4022 for the type in question, as we rely on this here. */
4023 switch (TREE_CODE (etype))
4024 {
4025 case INTEGER_TYPE:
4026 case ENUMERAL_TYPE:
4027 case CHAR_TYPE:
4028 utype = lang_hooks.types.unsigned_type (etype);
4029 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4030 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4031 integer_one_node, 1);
4032 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4033 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4034 minv, 1, maxv, 1)))
4035 {
4036 etype = utype;
4037 high = fold_convert (etype, high);
4038 low = fold_convert (etype, low);
4039 exp = fold_convert (etype, exp);
4040 value = const_binop (MINUS_EXPR, high, low, 0);
4041 }
4042 break;
4043 default:
4044 break;
4045 }
4046 }
4047
4048 if (value != 0 && ! TREE_OVERFLOW (value))
4049 return build_range_check (type,
4050 fold_build2 (MINUS_EXPR, etype, exp, low),
4051 1, fold_convert (etype, integer_zero_node),
4052 value);
4053
4054 return 0;
4055 }
4056 \f
4057 /* Given two ranges, see if we can merge them into one. Return 1 if we
4058 can, 0 if we can't. Set the output range into the specified parameters. */
4059
4060 static int
4061 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4062 tree high0, int in1_p, tree low1, tree high1)
4063 {
4064 int no_overlap;
4065 int subset;
4066 int temp;
4067 tree tem;
4068 int in_p;
4069 tree low, high;
4070 int lowequal = ((low0 == 0 && low1 == 0)
4071 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4072 low0, 0, low1, 0)));
4073 int highequal = ((high0 == 0 && high1 == 0)
4074 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4075 high0, 1, high1, 1)));
4076
4077 /* Make range 0 be the range that starts first, or ends last if they
4078 start at the same value. Swap them if it isn't. */
4079 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4080 low0, 0, low1, 0))
4081 || (lowequal
4082 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4083 high1, 1, high0, 1))))
4084 {
4085 temp = in0_p, in0_p = in1_p, in1_p = temp;
4086 tem = low0, low0 = low1, low1 = tem;
4087 tem = high0, high0 = high1, high1 = tem;
4088 }
4089
4090 /* Now flag two cases, whether the ranges are disjoint or whether the
4091 second range is totally subsumed in the first. Note that the tests
4092 below are simplified by the ones above. */
4093 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4094 high0, 1, low1, 0));
4095 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4096 high1, 1, high0, 1));
4097
4098 /* We now have four cases, depending on whether we are including or
4099 excluding the two ranges. */
4100 if (in0_p && in1_p)
4101 {
4102 /* If they don't overlap, the result is false. If the second range
4103 is a subset it is the result. Otherwise, the range is from the start
4104 of the second to the end of the first. */
4105 if (no_overlap)
4106 in_p = 0, low = high = 0;
4107 else if (subset)
4108 in_p = 1, low = low1, high = high1;
4109 else
4110 in_p = 1, low = low1, high = high0;
4111 }
4112
4113 else if (in0_p && ! in1_p)
4114 {
4115 /* If they don't overlap, the result is the first range. If they are
4116 equal, the result is false. If the second range is a subset of the
4117 first, and the ranges begin at the same place, we go from just after
4118 the end of the first range to the end of the second. If the second
4119 range is not a subset of the first, or if it is a subset and both
4120 ranges end at the same place, the range starts at the start of the
4121 first range and ends just before the second range.
4122 Otherwise, we can't describe this as a single range. */
4123 if (no_overlap)
4124 in_p = 1, low = low0, high = high0;
4125 else if (lowequal && highequal)
4126 in_p = 0, low = high = 0;
4127 else if (subset && lowequal)
4128 {
4129 in_p = 1, high = high0;
4130 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
4131 integer_one_node, 0);
4132 }
4133 else if (! subset || highequal)
4134 {
4135 in_p = 1, low = low0;
4136 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4137 integer_one_node, 0);
4138 }
4139 else
4140 return 0;
4141 }
4142
4143 else if (! in0_p && in1_p)
4144 {
4145 /* If they don't overlap, the result is the second range. If the second
4146 is a subset of the first, the result is false. Otherwise,
4147 the range starts just after the first range and ends at the
4148 end of the second. */
4149 if (no_overlap)
4150 in_p = 1, low = low1, high = high1;
4151 else if (subset || highequal)
4152 in_p = 0, low = high = 0;
4153 else
4154 {
4155 in_p = 1, high = high1;
4156 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4157 integer_one_node, 0);
4158 }
4159 }
4160
4161 else
4162 {
4163 /* The case where we are excluding both ranges. Here the complex case
4164 is if they don't overlap. In that case, the only time we have a
4165 range is if they are adjacent. If the second is a subset of the
4166 first, the result is the first. Otherwise, the range to exclude
4167 starts at the beginning of the first range and ends at the end of the
4168 second. */
4169 if (no_overlap)
4170 {
4171 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4172 range_binop (PLUS_EXPR, NULL_TREE,
4173 high0, 1,
4174 integer_one_node, 1),
4175 1, low1, 0)))
4176 in_p = 0, low = low0, high = high1;
4177 else
4178 {
4179 /* Canonicalize - [min, x] into - [-, x]. */
4180 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4181 switch (TREE_CODE (TREE_TYPE (low0)))
4182 {
4183 case ENUMERAL_TYPE:
4184 if (TYPE_PRECISION (TREE_TYPE (low0))
4185 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4186 break;
4187 /* FALLTHROUGH */
4188 case INTEGER_TYPE:
4189 case CHAR_TYPE:
4190 if (tree_int_cst_equal (low0,
4191 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4192 low0 = 0;
4193 break;
4194 case POINTER_TYPE:
4195 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4196 && integer_zerop (low0))
4197 low0 = 0;
4198 break;
4199 default:
4200 break;
4201 }
4202
4203 /* Canonicalize - [x, max] into - [x, -]. */
4204 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4205 switch (TREE_CODE (TREE_TYPE (high1)))
4206 {
4207 case ENUMERAL_TYPE:
4208 if (TYPE_PRECISION (TREE_TYPE (high1))
4209 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4210 break;
4211 /* FALLTHROUGH */
4212 case INTEGER_TYPE:
4213 case CHAR_TYPE:
4214 if (tree_int_cst_equal (high1,
4215 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4216 high1 = 0;
4217 break;
4218 case POINTER_TYPE:
4219 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4220 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4221 high1, 1,
4222 integer_one_node, 1)))
4223 high1 = 0;
4224 break;
4225 default:
4226 break;
4227 }
4228
4229 /* The ranges might be also adjacent between the maximum and
4230 minimum values of the given type. For
4231 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4232 return + [x + 1, y - 1]. */
4233 if (low0 == 0 && high1 == 0)
4234 {
4235 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4236 integer_one_node, 1);
4237 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4238 integer_one_node, 0);
4239 if (low == 0 || high == 0)
4240 return 0;
4241
4242 in_p = 1;
4243 }
4244 else
4245 return 0;
4246 }
4247 }
4248 else if (subset)
4249 in_p = 0, low = low0, high = high0;
4250 else
4251 in_p = 0, low = low0, high = high1;
4252 }
4253
4254 *pin_p = in_p, *plow = low, *phigh = high;
4255 return 1;
4256 }
4257 \f
4258
4259 /* Subroutine of fold, looking inside expressions of the form
4260 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4261 of the COND_EXPR. This function is being used also to optimize
4262 A op B ? C : A, by reversing the comparison first.
4263
4264 Return a folded expression whose code is not a COND_EXPR
4265 anymore, or NULL_TREE if no folding opportunity is found. */
4266
4267 static tree
4268 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4269 {
4270 enum tree_code comp_code = TREE_CODE (arg0);
4271 tree arg00 = TREE_OPERAND (arg0, 0);
4272 tree arg01 = TREE_OPERAND (arg0, 1);
4273 tree arg1_type = TREE_TYPE (arg1);
4274 tree tem;
4275
4276 STRIP_NOPS (arg1);
4277 STRIP_NOPS (arg2);
4278
4279 /* If we have A op 0 ? A : -A, consider applying the following
4280 transformations:
4281
4282 A == 0? A : -A same as -A
4283 A != 0? A : -A same as A
4284 A >= 0? A : -A same as abs (A)
4285 A > 0? A : -A same as abs (A)
4286 A <= 0? A : -A same as -abs (A)
4287 A < 0? A : -A same as -abs (A)
4288
4289 None of these transformations work for modes with signed
4290 zeros. If A is +/-0, the first two transformations will
4291 change the sign of the result (from +0 to -0, or vice
4292 versa). The last four will fix the sign of the result,
4293 even though the original expressions could be positive or
4294 negative, depending on the sign of A.
4295
4296 Note that all these transformations are correct if A is
4297 NaN, since the two alternatives (A and -A) are also NaNs. */
4298 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4299 ? real_zerop (arg01)
4300 : integer_zerop (arg01))
4301 && ((TREE_CODE (arg2) == NEGATE_EXPR
4302 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4303 /* In the case that A is of the form X-Y, '-A' (arg2) may
4304 have already been folded to Y-X, check for that. */
4305 || (TREE_CODE (arg1) == MINUS_EXPR
4306 && TREE_CODE (arg2) == MINUS_EXPR
4307 && operand_equal_p (TREE_OPERAND (arg1, 0),
4308 TREE_OPERAND (arg2, 1), 0)
4309 && operand_equal_p (TREE_OPERAND (arg1, 1),
4310 TREE_OPERAND (arg2, 0), 0))))
4311 switch (comp_code)
4312 {
4313 case EQ_EXPR:
4314 case UNEQ_EXPR:
4315 tem = fold_convert (arg1_type, arg1);
4316 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4317 case NE_EXPR:
4318 case LTGT_EXPR:
4319 return pedantic_non_lvalue (fold_convert (type, arg1));
4320 case UNGE_EXPR:
4321 case UNGT_EXPR:
4322 if (flag_trapping_math)
4323 break;
4324 /* Fall through. */
4325 case GE_EXPR:
4326 case GT_EXPR:
4327 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4328 arg1 = fold_convert (lang_hooks.types.signed_type
4329 (TREE_TYPE (arg1)), arg1);
4330 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4331 return pedantic_non_lvalue (fold_convert (type, tem));
4332 case UNLE_EXPR:
4333 case UNLT_EXPR:
4334 if (flag_trapping_math)
4335 break;
4336 case LE_EXPR:
4337 case LT_EXPR:
4338 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4339 arg1 = fold_convert (lang_hooks.types.signed_type
4340 (TREE_TYPE (arg1)), arg1);
4341 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4342 return negate_expr (fold_convert (type, tem));
4343 default:
4344 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4345 break;
4346 }
4347
4348 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4349 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4350 both transformations are correct when A is NaN: A != 0
4351 is then true, and A == 0 is false. */
4352
4353 if (integer_zerop (arg01) && integer_zerop (arg2))
4354 {
4355 if (comp_code == NE_EXPR)
4356 return pedantic_non_lvalue (fold_convert (type, arg1));
4357 else if (comp_code == EQ_EXPR)
4358 return fold_convert (type, integer_zero_node);
4359 }
4360
4361 /* Try some transformations of A op B ? A : B.
4362
4363 A == B? A : B same as B
4364 A != B? A : B same as A
4365 A >= B? A : B same as max (A, B)
4366 A > B? A : B same as max (B, A)
4367 A <= B? A : B same as min (A, B)
4368 A < B? A : B same as min (B, A)
4369
4370 As above, these transformations don't work in the presence
4371 of signed zeros. For example, if A and B are zeros of
4372 opposite sign, the first two transformations will change
4373 the sign of the result. In the last four, the original
4374 expressions give different results for (A=+0, B=-0) and
4375 (A=-0, B=+0), but the transformed expressions do not.
4376
4377 The first two transformations are correct if either A or B
4378 is a NaN. In the first transformation, the condition will
4379 be false, and B will indeed be chosen. In the case of the
4380 second transformation, the condition A != B will be true,
4381 and A will be chosen.
4382
4383 The conversions to max() and min() are not correct if B is
4384 a number and A is not. The conditions in the original
4385 expressions will be false, so all four give B. The min()
4386 and max() versions would give a NaN instead. */
4387 if (operand_equal_for_comparison_p (arg01, arg2, arg00)
4388 /* Avoid these transformations if the COND_EXPR may be used
4389 as an lvalue in the C++ front-end. PR c++/19199. */
4390 && (in_gimple_form
4391 || strcmp (lang_hooks.name, "GNU C++") != 0
4392 || ! maybe_lvalue_p (arg1)
4393 || ! maybe_lvalue_p (arg2)))
4394 {
4395 tree comp_op0 = arg00;
4396 tree comp_op1 = arg01;
4397 tree comp_type = TREE_TYPE (comp_op0);
4398
4399 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4400 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4401 {
4402 comp_type = type;
4403 comp_op0 = arg1;
4404 comp_op1 = arg2;
4405 }
4406
4407 switch (comp_code)
4408 {
4409 case EQ_EXPR:
4410 return pedantic_non_lvalue (fold_convert (type, arg2));
4411 case NE_EXPR:
4412 return pedantic_non_lvalue (fold_convert (type, arg1));
4413 case LE_EXPR:
4414 case LT_EXPR:
4415 case UNLE_EXPR:
4416 case UNLT_EXPR:
4417 /* In C++ a ?: expression can be an lvalue, so put the
4418 operand which will be used if they are equal first
4419 so that we can convert this back to the
4420 corresponding COND_EXPR. */
4421 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4422 {
4423 comp_op0 = fold_convert (comp_type, comp_op0);
4424 comp_op1 = fold_convert (comp_type, comp_op1);
4425 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4426 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4427 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4428 return pedantic_non_lvalue (fold_convert (type, tem));
4429 }
4430 break;
4431 case GE_EXPR:
4432 case GT_EXPR:
4433 case UNGE_EXPR:
4434 case UNGT_EXPR:
4435 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4436 {
4437 comp_op0 = fold_convert (comp_type, comp_op0);
4438 comp_op1 = fold_convert (comp_type, comp_op1);
4439 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4440 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
4441 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
4442 return pedantic_non_lvalue (fold_convert (type, tem));
4443 }
4444 break;
4445 case UNEQ_EXPR:
4446 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4447 return pedantic_non_lvalue (fold_convert (type, arg2));
4448 break;
4449 case LTGT_EXPR:
4450 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4451 return pedantic_non_lvalue (fold_convert (type, arg1));
4452 break;
4453 default:
4454 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4455 break;
4456 }
4457 }
4458
4459 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4460 we might still be able to simplify this. For example,
4461 if C1 is one less or one more than C2, this might have started
4462 out as a MIN or MAX and been transformed by this function.
4463 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4464
4465 if (INTEGRAL_TYPE_P (type)
4466 && TREE_CODE (arg01) == INTEGER_CST
4467 && TREE_CODE (arg2) == INTEGER_CST)
4468 switch (comp_code)
4469 {
4470 case EQ_EXPR:
4471 /* We can replace A with C1 in this case. */
4472 arg1 = fold_convert (type, arg01);
4473 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
4474
4475 case LT_EXPR:
4476 /* If C1 is C2 + 1, this is min(A, C2). */
4477 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4478 OEP_ONLY_CONST)
4479 && operand_equal_p (arg01,
4480 const_binop (PLUS_EXPR, arg2,
4481 integer_one_node, 0),
4482 OEP_ONLY_CONST))
4483 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4484 type, arg1, arg2));
4485 break;
4486
4487 case LE_EXPR:
4488 /* If C1 is C2 - 1, this is min(A, C2). */
4489 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4490 OEP_ONLY_CONST)
4491 && operand_equal_p (arg01,
4492 const_binop (MINUS_EXPR, arg2,
4493 integer_one_node, 0),
4494 OEP_ONLY_CONST))
4495 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4496 type, arg1, arg2));
4497 break;
4498
4499 case GT_EXPR:
4500 /* If C1 is C2 - 1, this is max(A, C2). */
4501 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4502 OEP_ONLY_CONST)
4503 && operand_equal_p (arg01,
4504 const_binop (MINUS_EXPR, arg2,
4505 integer_one_node, 0),
4506 OEP_ONLY_CONST))
4507 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4508 type, arg1, arg2));
4509 break;
4510
4511 case GE_EXPR:
4512 /* If C1 is C2 + 1, this is max(A, C2). */
4513 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4514 OEP_ONLY_CONST)
4515 && operand_equal_p (arg01,
4516 const_binop (PLUS_EXPR, arg2,
4517 integer_one_node, 0),
4518 OEP_ONLY_CONST))
4519 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4520 type, arg1, arg2));
4521 break;
4522 case NE_EXPR:
4523 break;
4524 default:
4525 gcc_unreachable ();
4526 }
4527
4528 return NULL_TREE;
4529 }
4530
4531
4532 \f
4533 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4534 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4535 #endif
4536
4537 /* EXP is some logical combination of boolean tests. See if we can
4538 merge it into some range test. Return the new tree if so. */
4539
4540 static tree
4541 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
4542 {
4543 int or_op = (code == TRUTH_ORIF_EXPR
4544 || code == TRUTH_OR_EXPR);
4545 int in0_p, in1_p, in_p;
4546 tree low0, low1, low, high0, high1, high;
4547 tree lhs = make_range (op0, &in0_p, &low0, &high0);
4548 tree rhs = make_range (op1, &in1_p, &low1, &high1);
4549 tree tem;
4550
4551 /* If this is an OR operation, invert both sides; we will invert
4552 again at the end. */
4553 if (or_op)
4554 in0_p = ! in0_p, in1_p = ! in1_p;
4555
4556 /* If both expressions are the same, if we can merge the ranges, and we
4557 can build the range test, return it or it inverted. If one of the
4558 ranges is always true or always false, consider it to be the same
4559 expression as the other. */
4560 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4561 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4562 in1_p, low1, high1)
4563 && 0 != (tem = (build_range_check (type,
4564 lhs != 0 ? lhs
4565 : rhs != 0 ? rhs : integer_zero_node,
4566 in_p, low, high))))
4567 return or_op ? invert_truthvalue (tem) : tem;
4568
4569 /* On machines where the branch cost is expensive, if this is a
4570 short-circuited branch and the underlying object on both sides
4571 is the same, make a non-short-circuit operation. */
4572 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4573 && lhs != 0 && rhs != 0
4574 && (code == TRUTH_ANDIF_EXPR
4575 || code == TRUTH_ORIF_EXPR)
4576 && operand_equal_p (lhs, rhs, 0))
4577 {
4578 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4579 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4580 which cases we can't do this. */
4581 if (simple_operand_p (lhs))
4582 return build2 (code == TRUTH_ANDIF_EXPR
4583 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4584 type, op0, op1);
4585
4586 else if (lang_hooks.decls.global_bindings_p () == 0
4587 && ! CONTAINS_PLACEHOLDER_P (lhs))
4588 {
4589 tree common = save_expr (lhs);
4590
4591 if (0 != (lhs = build_range_check (type, common,
4592 or_op ? ! in0_p : in0_p,
4593 low0, high0))
4594 && (0 != (rhs = build_range_check (type, common,
4595 or_op ? ! in1_p : in1_p,
4596 low1, high1))))
4597 return build2 (code == TRUTH_ANDIF_EXPR
4598 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4599 type, lhs, rhs);
4600 }
4601 }
4602
4603 return 0;
4604 }
4605 \f
4606 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4607 bit value. Arrange things so the extra bits will be set to zero if and
4608 only if C is signed-extended to its full width. If MASK is nonzero,
4609 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4610
4611 static tree
4612 unextend (tree c, int p, int unsignedp, tree mask)
4613 {
4614 tree type = TREE_TYPE (c);
4615 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4616 tree temp;
4617
4618 if (p == modesize || unsignedp)
4619 return c;
4620
4621 /* We work by getting just the sign bit into the low-order bit, then
4622 into the high-order bit, then sign-extend. We then XOR that value
4623 with C. */
4624 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4625 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4626
4627 /* We must use a signed type in order to get an arithmetic right shift.
4628 However, we must also avoid introducing accidental overflows, so that
4629 a subsequent call to integer_zerop will work. Hence we must
4630 do the type conversion here. At this point, the constant is either
4631 zero or one, and the conversion to a signed type can never overflow.
4632 We could get an overflow if this conversion is done anywhere else. */
4633 if (TYPE_UNSIGNED (type))
4634 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4635
4636 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4637 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4638 if (mask != 0)
4639 temp = const_binop (BIT_AND_EXPR, temp,
4640 fold_convert (TREE_TYPE (c), mask), 0);
4641 /* If necessary, convert the type back to match the type of C. */
4642 if (TYPE_UNSIGNED (type))
4643 temp = fold_convert (type, temp);
4644
4645 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4646 }
4647 \f
4648 /* Find ways of folding logical expressions of LHS and RHS:
4649 Try to merge two comparisons to the same innermost item.
4650 Look for range tests like "ch >= '0' && ch <= '9'".
4651 Look for combinations of simple terms on machines with expensive branches
4652 and evaluate the RHS unconditionally.
4653
4654 For example, if we have p->a == 2 && p->b == 4 and we can make an
4655 object large enough to span both A and B, we can do this with a comparison
4656 against the object ANDed with the a mask.
4657
4658 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4659 operations to do this with one comparison.
4660
4661 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4662 function and the one above.
4663
4664 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4665 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4666
4667 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4668 two operands.
4669
4670 We return the simplified tree or 0 if no optimization is possible. */
4671
4672 static tree
4673 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
4674 {
4675 /* If this is the "or" of two comparisons, we can do something if
4676 the comparisons are NE_EXPR. If this is the "and", we can do something
4677 if the comparisons are EQ_EXPR. I.e.,
4678 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4679
4680 WANTED_CODE is this operation code. For single bit fields, we can
4681 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4682 comparison for one-bit fields. */
4683
4684 enum tree_code wanted_code;
4685 enum tree_code lcode, rcode;
4686 tree ll_arg, lr_arg, rl_arg, rr_arg;
4687 tree ll_inner, lr_inner, rl_inner, rr_inner;
4688 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
4689 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
4690 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
4691 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
4692 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
4693 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
4694 enum machine_mode lnmode, rnmode;
4695 tree ll_mask, lr_mask, rl_mask, rr_mask;
4696 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
4697 tree l_const, r_const;
4698 tree lntype, rntype, result;
4699 int first_bit, end_bit;
4700 int volatilep;
4701
4702 /* Start by getting the comparison codes. Fail if anything is volatile.
4703 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4704 it were surrounded with a NE_EXPR. */
4705
4706 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
4707 return 0;
4708
4709 lcode = TREE_CODE (lhs);
4710 rcode = TREE_CODE (rhs);
4711
4712 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
4713 {
4714 lhs = build2 (NE_EXPR, truth_type, lhs,
4715 fold_convert (TREE_TYPE (lhs), integer_zero_node));
4716 lcode = NE_EXPR;
4717 }
4718
4719 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
4720 {
4721 rhs = build2 (NE_EXPR, truth_type, rhs,
4722 fold_convert (TREE_TYPE (rhs), integer_zero_node));
4723 rcode = NE_EXPR;
4724 }
4725
4726 if (TREE_CODE_CLASS (lcode) != tcc_comparison
4727 || TREE_CODE_CLASS (rcode) != tcc_comparison)
4728 return 0;
4729
4730 ll_arg = TREE_OPERAND (lhs, 0);
4731 lr_arg = TREE_OPERAND (lhs, 1);
4732 rl_arg = TREE_OPERAND (rhs, 0);
4733 rr_arg = TREE_OPERAND (rhs, 1);
4734
4735 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4736 if (simple_operand_p (ll_arg)
4737 && simple_operand_p (lr_arg))
4738 {
4739 tree result;
4740 if (operand_equal_p (ll_arg, rl_arg, 0)
4741 && operand_equal_p (lr_arg, rr_arg, 0))
4742 {
4743 result = combine_comparisons (code, lcode, rcode,
4744 truth_type, ll_arg, lr_arg);
4745 if (result)
4746 return result;
4747 }
4748 else if (operand_equal_p (ll_arg, rr_arg, 0)
4749 && operand_equal_p (lr_arg, rl_arg, 0))
4750 {
4751 result = combine_comparisons (code, lcode,
4752 swap_tree_comparison (rcode),
4753 truth_type, ll_arg, lr_arg);
4754 if (result)
4755 return result;
4756 }
4757 }
4758
4759 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
4760 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
4761
4762 /* If the RHS can be evaluated unconditionally and its operands are
4763 simple, it wins to evaluate the RHS unconditionally on machines
4764 with expensive branches. In this case, this isn't a comparison
4765 that can be merged. Avoid doing this if the RHS is a floating-point
4766 comparison since those can trap. */
4767
4768 if (BRANCH_COST >= 2
4769 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
4770 && simple_operand_p (rl_arg)
4771 && simple_operand_p (rr_arg))
4772 {
4773 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4774 if (code == TRUTH_OR_EXPR
4775 && lcode == NE_EXPR && integer_zerop (lr_arg)
4776 && rcode == NE_EXPR && integer_zerop (rr_arg)
4777 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4778 return build2 (NE_EXPR, truth_type,
4779 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4780 ll_arg, rl_arg),
4781 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4782
4783 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4784 if (code == TRUTH_AND_EXPR
4785 && lcode == EQ_EXPR && integer_zerop (lr_arg)
4786 && rcode == EQ_EXPR && integer_zerop (rr_arg)
4787 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4788 return build2 (EQ_EXPR, truth_type,
4789 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4790 ll_arg, rl_arg),
4791 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4792
4793 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
4794 return build2 (code, truth_type, lhs, rhs);
4795 }
4796
4797 /* See if the comparisons can be merged. Then get all the parameters for
4798 each side. */
4799
4800 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
4801 || (rcode != EQ_EXPR && rcode != NE_EXPR))
4802 return 0;
4803
4804 volatilep = 0;
4805 ll_inner = decode_field_reference (ll_arg,
4806 &ll_bitsize, &ll_bitpos, &ll_mode,
4807 &ll_unsignedp, &volatilep, &ll_mask,
4808 &ll_and_mask);
4809 lr_inner = decode_field_reference (lr_arg,
4810 &lr_bitsize, &lr_bitpos, &lr_mode,
4811 &lr_unsignedp, &volatilep, &lr_mask,
4812 &lr_and_mask);
4813 rl_inner = decode_field_reference (rl_arg,
4814 &rl_bitsize, &rl_bitpos, &rl_mode,
4815 &rl_unsignedp, &volatilep, &rl_mask,
4816 &rl_and_mask);
4817 rr_inner = decode_field_reference (rr_arg,
4818 &rr_bitsize, &rr_bitpos, &rr_mode,
4819 &rr_unsignedp, &volatilep, &rr_mask,
4820 &rr_and_mask);
4821
4822 /* It must be true that the inner operation on the lhs of each
4823 comparison must be the same if we are to be able to do anything.
4824 Then see if we have constants. If not, the same must be true for
4825 the rhs's. */
4826 if (volatilep || ll_inner == 0 || rl_inner == 0
4827 || ! operand_equal_p (ll_inner, rl_inner, 0))
4828 return 0;
4829
4830 if (TREE_CODE (lr_arg) == INTEGER_CST
4831 && TREE_CODE (rr_arg) == INTEGER_CST)
4832 l_const = lr_arg, r_const = rr_arg;
4833 else if (lr_inner == 0 || rr_inner == 0
4834 || ! operand_equal_p (lr_inner, rr_inner, 0))
4835 return 0;
4836 else
4837 l_const = r_const = 0;
4838
4839 /* If either comparison code is not correct for our logical operation,
4840 fail. However, we can convert a one-bit comparison against zero into
4841 the opposite comparison against that bit being set in the field. */
4842
4843 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
4844 if (lcode != wanted_code)
4845 {
4846 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
4847 {
4848 /* Make the left operand unsigned, since we are only interested
4849 in the value of one bit. Otherwise we are doing the wrong
4850 thing below. */
4851 ll_unsignedp = 1;
4852 l_const = ll_mask;
4853 }
4854 else
4855 return 0;
4856 }
4857
4858 /* This is analogous to the code for l_const above. */
4859 if (rcode != wanted_code)
4860 {
4861 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
4862 {
4863 rl_unsignedp = 1;
4864 r_const = rl_mask;
4865 }
4866 else
4867 return 0;
4868 }
4869
4870 /* After this point all optimizations will generate bit-field
4871 references, which we might not want. */
4872 if (! lang_hooks.can_use_bit_fields_p ())
4873 return 0;
4874
4875 /* See if we can find a mode that contains both fields being compared on
4876 the left. If we can't, fail. Otherwise, update all constants and masks
4877 to be relative to a field of that size. */
4878 first_bit = MIN (ll_bitpos, rl_bitpos);
4879 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
4880 lnmode = get_best_mode (end_bit - first_bit, first_bit,
4881 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
4882 volatilep);
4883 if (lnmode == VOIDmode)
4884 return 0;
4885
4886 lnbitsize = GET_MODE_BITSIZE (lnmode);
4887 lnbitpos = first_bit & ~ (lnbitsize - 1);
4888 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
4889 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
4890
4891 if (BYTES_BIG_ENDIAN)
4892 {
4893 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
4894 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
4895 }
4896
4897 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
4898 size_int (xll_bitpos), 0);
4899 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
4900 size_int (xrl_bitpos), 0);
4901
4902 if (l_const)
4903 {
4904 l_const = fold_convert (lntype, l_const);
4905 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
4906 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
4907 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
4908 fold_build1 (BIT_NOT_EXPR,
4909 lntype, ll_mask),
4910 0)))
4911 {
4912 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
4913
4914 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4915 }
4916 }
4917 if (r_const)
4918 {
4919 r_const = fold_convert (lntype, r_const);
4920 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
4921 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
4922 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
4923 fold_build1 (BIT_NOT_EXPR,
4924 lntype, rl_mask),
4925 0)))
4926 {
4927 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
4928
4929 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4930 }
4931 }
4932
4933 /* If the right sides are not constant, do the same for it. Also,
4934 disallow this optimization if a size or signedness mismatch occurs
4935 between the left and right sides. */
4936 if (l_const == 0)
4937 {
4938 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
4939 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
4940 /* Make sure the two fields on the right
4941 correspond to the left without being swapped. */
4942 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
4943 return 0;
4944
4945 first_bit = MIN (lr_bitpos, rr_bitpos);
4946 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
4947 rnmode = get_best_mode (end_bit - first_bit, first_bit,
4948 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
4949 volatilep);
4950 if (rnmode == VOIDmode)
4951 return 0;
4952
4953 rnbitsize = GET_MODE_BITSIZE (rnmode);
4954 rnbitpos = first_bit & ~ (rnbitsize - 1);
4955 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
4956 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
4957
4958 if (BYTES_BIG_ENDIAN)
4959 {
4960 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
4961 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
4962 }
4963
4964 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
4965 size_int (xlr_bitpos), 0);
4966 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
4967 size_int (xrr_bitpos), 0);
4968
4969 /* Make a mask that corresponds to both fields being compared.
4970 Do this for both items being compared. If the operands are the
4971 same size and the bits being compared are in the same position
4972 then we can do this by masking both and comparing the masked
4973 results. */
4974 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4975 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
4976 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
4977 {
4978 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4979 ll_unsignedp || rl_unsignedp);
4980 if (! all_ones_mask_p (ll_mask, lnbitsize))
4981 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
4982
4983 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
4984 lr_unsignedp || rr_unsignedp);
4985 if (! all_ones_mask_p (lr_mask, rnbitsize))
4986 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
4987
4988 return build2 (wanted_code, truth_type, lhs, rhs);
4989 }
4990
4991 /* There is still another way we can do something: If both pairs of
4992 fields being compared are adjacent, we may be able to make a wider
4993 field containing them both.
4994
4995 Note that we still must mask the lhs/rhs expressions. Furthermore,
4996 the mask must be shifted to account for the shift done by
4997 make_bit_field_ref. */
4998 if ((ll_bitsize + ll_bitpos == rl_bitpos
4999 && lr_bitsize + lr_bitpos == rr_bitpos)
5000 || (ll_bitpos == rl_bitpos + rl_bitsize
5001 && lr_bitpos == rr_bitpos + rr_bitsize))
5002 {
5003 tree type;
5004
5005 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
5006 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5007 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
5008 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5009
5010 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5011 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
5012 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5013 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
5014
5015 /* Convert to the smaller type before masking out unwanted bits. */
5016 type = lntype;
5017 if (lntype != rntype)
5018 {
5019 if (lnbitsize > rnbitsize)
5020 {
5021 lhs = fold_convert (rntype, lhs);
5022 ll_mask = fold_convert (rntype, ll_mask);
5023 type = rntype;
5024 }
5025 else if (lnbitsize < rnbitsize)
5026 {
5027 rhs = fold_convert (lntype, rhs);
5028 lr_mask = fold_convert (lntype, lr_mask);
5029 type = lntype;
5030 }
5031 }
5032
5033 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5034 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5035
5036 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5037 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5038
5039 return build2 (wanted_code, truth_type, lhs, rhs);
5040 }
5041
5042 return 0;
5043 }
5044
5045 /* Handle the case of comparisons with constants. If there is something in
5046 common between the masks, those bits of the constants must be the same.
5047 If not, the condition is always false. Test for this to avoid generating
5048 incorrect code below. */
5049 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5050 if (! integer_zerop (result)
5051 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5052 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5053 {
5054 if (wanted_code == NE_EXPR)
5055 {
5056 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5057 return constant_boolean_node (true, truth_type);
5058 }
5059 else
5060 {
5061 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5062 return constant_boolean_node (false, truth_type);
5063 }
5064 }
5065
5066 /* Construct the expression we will return. First get the component
5067 reference we will make. Unless the mask is all ones the width of
5068 that field, perform the mask operation. Then compare with the
5069 merged constant. */
5070 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5071 ll_unsignedp || rl_unsignedp);
5072
5073 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5074 if (! all_ones_mask_p (ll_mask, lnbitsize))
5075 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5076
5077 return build2 (wanted_code, truth_type, result,
5078 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5079 }
5080 \f
5081 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5082 constant. */
5083
5084 static tree
5085 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5086 {
5087 tree arg0 = op0;
5088 enum tree_code op_code;
5089 tree comp_const = op1;
5090 tree minmax_const;
5091 int consts_equal, consts_lt;
5092 tree inner;
5093
5094 STRIP_SIGN_NOPS (arg0);
5095
5096 op_code = TREE_CODE (arg0);
5097 minmax_const = TREE_OPERAND (arg0, 1);
5098 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5099 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5100 inner = TREE_OPERAND (arg0, 0);
5101
5102 /* If something does not permit us to optimize, return the original tree. */
5103 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5104 || TREE_CODE (comp_const) != INTEGER_CST
5105 || TREE_CONSTANT_OVERFLOW (comp_const)
5106 || TREE_CODE (minmax_const) != INTEGER_CST
5107 || TREE_CONSTANT_OVERFLOW (minmax_const))
5108 return NULL_TREE;
5109
5110 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5111 and GT_EXPR, doing the rest with recursive calls using logical
5112 simplifications. */
5113 switch (code)
5114 {
5115 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5116 {
5117 /* FIXME: We should be able to invert code without building a
5118 scratch tree node, but doing so would require us to
5119 duplicate a part of invert_truthvalue here. */
5120 tree tem = invert_truthvalue (build2 (code, type, op0, op1));
5121 tem = optimize_minmax_comparison (TREE_CODE (tem),
5122 TREE_TYPE (tem),
5123 TREE_OPERAND (tem, 0),
5124 TREE_OPERAND (tem, 1));
5125 return invert_truthvalue (tem);
5126 }
5127
5128 case GE_EXPR:
5129 return
5130 fold_build2 (TRUTH_ORIF_EXPR, type,
5131 optimize_minmax_comparison
5132 (EQ_EXPR, type, arg0, comp_const),
5133 optimize_minmax_comparison
5134 (GT_EXPR, type, arg0, comp_const));
5135
5136 case EQ_EXPR:
5137 if (op_code == MAX_EXPR && consts_equal)
5138 /* MAX (X, 0) == 0 -> X <= 0 */
5139 return fold_build2 (LE_EXPR, type, inner, comp_const);
5140
5141 else if (op_code == MAX_EXPR && consts_lt)
5142 /* MAX (X, 0) == 5 -> X == 5 */
5143 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5144
5145 else if (op_code == MAX_EXPR)
5146 /* MAX (X, 0) == -1 -> false */
5147 return omit_one_operand (type, integer_zero_node, inner);
5148
5149 else if (consts_equal)
5150 /* MIN (X, 0) == 0 -> X >= 0 */
5151 return fold_build2 (GE_EXPR, type, inner, comp_const);
5152
5153 else if (consts_lt)
5154 /* MIN (X, 0) == 5 -> false */
5155 return omit_one_operand (type, integer_zero_node, inner);
5156
5157 else
5158 /* MIN (X, 0) == -1 -> X == -1 */
5159 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5160
5161 case GT_EXPR:
5162 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5163 /* MAX (X, 0) > 0 -> X > 0
5164 MAX (X, 0) > 5 -> X > 5 */
5165 return fold_build2 (GT_EXPR, type, inner, comp_const);
5166
5167 else if (op_code == MAX_EXPR)
5168 /* MAX (X, 0) > -1 -> true */
5169 return omit_one_operand (type, integer_one_node, inner);
5170
5171 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5172 /* MIN (X, 0) > 0 -> false
5173 MIN (X, 0) > 5 -> false */
5174 return omit_one_operand (type, integer_zero_node, inner);
5175
5176 else
5177 /* MIN (X, 0) > -1 -> X > -1 */
5178 return fold_build2 (GT_EXPR, type, inner, comp_const);
5179
5180 default:
5181 return NULL_TREE;
5182 }
5183 }
5184 \f
5185 /* T is an integer expression that is being multiplied, divided, or taken a
5186 modulus (CODE says which and what kind of divide or modulus) by a
5187 constant C. See if we can eliminate that operation by folding it with
5188 other operations already in T. WIDE_TYPE, if non-null, is a type that
5189 should be used for the computation if wider than our type.
5190
5191 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5192 (X * 2) + (Y * 4). We must, however, be assured that either the original
5193 expression would not overflow or that overflow is undefined for the type
5194 in the language in question.
5195
5196 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5197 the machine has a multiply-accumulate insn or that this is part of an
5198 addressing calculation.
5199
5200 If we return a non-null expression, it is an equivalent form of the
5201 original computation, but need not be in the original type. */
5202
5203 static tree
5204 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
5205 {
5206 /* To avoid exponential search depth, refuse to allow recursion past
5207 three levels. Beyond that (1) it's highly unlikely that we'll find
5208 something interesting and (2) we've probably processed it before
5209 when we built the inner expression. */
5210
5211 static int depth;
5212 tree ret;
5213
5214 if (depth > 3)
5215 return NULL;
5216
5217 depth++;
5218 ret = extract_muldiv_1 (t, c, code, wide_type);
5219 depth--;
5220
5221 return ret;
5222 }
5223
5224 static tree
5225 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
5226 {
5227 tree type = TREE_TYPE (t);
5228 enum tree_code tcode = TREE_CODE (t);
5229 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5230 > GET_MODE_SIZE (TYPE_MODE (type)))
5231 ? wide_type : type);
5232 tree t1, t2;
5233 int same_p = tcode == code;
5234 tree op0 = NULL_TREE, op1 = NULL_TREE;
5235
5236 /* Don't deal with constants of zero here; they confuse the code below. */
5237 if (integer_zerop (c))
5238 return NULL_TREE;
5239
5240 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5241 op0 = TREE_OPERAND (t, 0);
5242
5243 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5244 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5245
5246 /* Note that we need not handle conditional operations here since fold
5247 already handles those cases. So just do arithmetic here. */
5248 switch (tcode)
5249 {
5250 case INTEGER_CST:
5251 /* For a constant, we can always simplify if we are a multiply
5252 or (for divide and modulus) if it is a multiple of our constant. */
5253 if (code == MULT_EXPR
5254 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5255 return const_binop (code, fold_convert (ctype, t),
5256 fold_convert (ctype, c), 0);
5257 break;
5258
5259 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5260 /* If op0 is an expression ... */
5261 if ((COMPARISON_CLASS_P (op0)
5262 || UNARY_CLASS_P (op0)
5263 || BINARY_CLASS_P (op0)
5264 || EXPRESSION_CLASS_P (op0))
5265 /* ... and is unsigned, and its type is smaller than ctype,
5266 then we cannot pass through as widening. */
5267 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5268 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5269 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5270 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5271 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5272 /* ... or this is a truncation (t is narrower than op0),
5273 then we cannot pass through this narrowing. */
5274 || (GET_MODE_SIZE (TYPE_MODE (type))
5275 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5276 /* ... or signedness changes for division or modulus,
5277 then we cannot pass through this conversion. */
5278 || (code != MULT_EXPR
5279 && (TYPE_UNSIGNED (ctype)
5280 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5281 break;
5282
5283 /* Pass the constant down and see if we can make a simplification. If
5284 we can, replace this expression with the inner simplification for
5285 possible later conversion to our or some other type. */
5286 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5287 && TREE_CODE (t2) == INTEGER_CST
5288 && ! TREE_CONSTANT_OVERFLOW (t2)
5289 && (0 != (t1 = extract_muldiv (op0, t2, code,
5290 code == MULT_EXPR
5291 ? ctype : NULL_TREE))))
5292 return t1;
5293 break;
5294
5295 case ABS_EXPR:
5296 /* If widening the type changes it from signed to unsigned, then we
5297 must avoid building ABS_EXPR itself as unsigned. */
5298 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5299 {
5300 tree cstype = (*lang_hooks.types.signed_type) (ctype);
5301 if ((t1 = extract_muldiv (op0, c, code, cstype)) != 0)
5302 {
5303 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5304 return fold_convert (ctype, t1);
5305 }
5306 break;
5307 }
5308 /* FALLTHROUGH */
5309 case NEGATE_EXPR:
5310 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5311 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5312 break;
5313
5314 case MIN_EXPR: case MAX_EXPR:
5315 /* If widening the type changes the signedness, then we can't perform
5316 this optimization as that changes the result. */
5317 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5318 break;
5319
5320 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5321 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
5322 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
5323 {
5324 if (tree_int_cst_sgn (c) < 0)
5325 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5326
5327 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5328 fold_convert (ctype, t2));
5329 }
5330 break;
5331
5332 case LSHIFT_EXPR: case RSHIFT_EXPR:
5333 /* If the second operand is constant, this is a multiplication
5334 or floor division, by a power of two, so we can treat it that
5335 way unless the multiplier or divisor overflows. Signed
5336 left-shift overflow is implementation-defined rather than
5337 undefined in C90, so do not convert signed left shift into
5338 multiplication. */
5339 if (TREE_CODE (op1) == INTEGER_CST
5340 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5341 /* const_binop may not detect overflow correctly,
5342 so check for it explicitly here. */
5343 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5344 && TREE_INT_CST_HIGH (op1) == 0
5345 && 0 != (t1 = fold_convert (ctype,
5346 const_binop (LSHIFT_EXPR,
5347 size_one_node,
5348 op1, 0)))
5349 && ! TREE_OVERFLOW (t1))
5350 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5351 ? MULT_EXPR : FLOOR_DIV_EXPR,
5352 ctype, fold_convert (ctype, op0), t1),
5353 c, code, wide_type);
5354 break;
5355
5356 case PLUS_EXPR: case MINUS_EXPR:
5357 /* See if we can eliminate the operation on both sides. If we can, we
5358 can return a new PLUS or MINUS. If we can't, the only remaining
5359 cases where we can do anything are if the second operand is a
5360 constant. */
5361 t1 = extract_muldiv (op0, c, code, wide_type);
5362 t2 = extract_muldiv (op1, c, code, wide_type);
5363 if (t1 != 0 && t2 != 0
5364 && (code == MULT_EXPR
5365 /* If not multiplication, we can only do this if both operands
5366 are divisible by c. */
5367 || (multiple_of_p (ctype, op0, c)
5368 && multiple_of_p (ctype, op1, c))))
5369 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5370 fold_convert (ctype, t2));
5371
5372 /* If this was a subtraction, negate OP1 and set it to be an addition.
5373 This simplifies the logic below. */
5374 if (tcode == MINUS_EXPR)
5375 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5376
5377 if (TREE_CODE (op1) != INTEGER_CST)
5378 break;
5379
5380 /* If either OP1 or C are negative, this optimization is not safe for
5381 some of the division and remainder types while for others we need
5382 to change the code. */
5383 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5384 {
5385 if (code == CEIL_DIV_EXPR)
5386 code = FLOOR_DIV_EXPR;
5387 else if (code == FLOOR_DIV_EXPR)
5388 code = CEIL_DIV_EXPR;
5389 else if (code != MULT_EXPR
5390 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5391 break;
5392 }
5393
5394 /* If it's a multiply or a division/modulus operation of a multiple
5395 of our constant, do the operation and verify it doesn't overflow. */
5396 if (code == MULT_EXPR
5397 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5398 {
5399 op1 = const_binop (code, fold_convert (ctype, op1),
5400 fold_convert (ctype, c), 0);
5401 /* We allow the constant to overflow with wrapping semantics. */
5402 if (op1 == 0
5403 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
5404 break;
5405 }
5406 else
5407 break;
5408
5409 /* If we have an unsigned type is not a sizetype, we cannot widen
5410 the operation since it will change the result if the original
5411 computation overflowed. */
5412 if (TYPE_UNSIGNED (ctype)
5413 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5414 && ctype != type)
5415 break;
5416
5417 /* If we were able to eliminate our operation from the first side,
5418 apply our operation to the second side and reform the PLUS. */
5419 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5420 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5421
5422 /* The last case is if we are a multiply. In that case, we can
5423 apply the distributive law to commute the multiply and addition
5424 if the multiplication of the constants doesn't overflow. */
5425 if (code == MULT_EXPR)
5426 return fold_build2 (tcode, ctype,
5427 fold_build2 (code, ctype,
5428 fold_convert (ctype, op0),
5429 fold_convert (ctype, c)),
5430 op1);
5431
5432 break;
5433
5434 case MULT_EXPR:
5435 /* We have a special case here if we are doing something like
5436 (C * 8) % 4 since we know that's zero. */
5437 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5438 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5439 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5440 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5441 return omit_one_operand (type, integer_zero_node, op0);
5442
5443 /* ... fall through ... */
5444
5445 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5446 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5447 /* If we can extract our operation from the LHS, do so and return a
5448 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5449 do something only if the second operand is a constant. */
5450 if (same_p
5451 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5452 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5453 fold_convert (ctype, op1));
5454 else if (tcode == MULT_EXPR && code == MULT_EXPR
5455 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
5456 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5457 fold_convert (ctype, t1));
5458 else if (TREE_CODE (op1) != INTEGER_CST)
5459 return 0;
5460
5461 /* If these are the same operation types, we can associate them
5462 assuming no overflow. */
5463 if (tcode == code
5464 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5465 fold_convert (ctype, c), 0))
5466 && ! TREE_OVERFLOW (t1))
5467 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5468
5469 /* If these operations "cancel" each other, we have the main
5470 optimizations of this pass, which occur when either constant is a
5471 multiple of the other, in which case we replace this with either an
5472 operation or CODE or TCODE.
5473
5474 If we have an unsigned type that is not a sizetype, we cannot do
5475 this since it will change the result if the original computation
5476 overflowed. */
5477 if ((! TYPE_UNSIGNED (ctype)
5478 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5479 && ! flag_wrapv
5480 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5481 || (tcode == MULT_EXPR
5482 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5483 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5484 {
5485 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5486 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5487 fold_convert (ctype,
5488 const_binop (TRUNC_DIV_EXPR,
5489 op1, c, 0)));
5490 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5491 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5492 fold_convert (ctype,
5493 const_binop (TRUNC_DIV_EXPR,
5494 c, op1, 0)));
5495 }
5496 break;
5497
5498 default:
5499 break;
5500 }
5501
5502 return 0;
5503 }
5504 \f
5505 /* Return a node which has the indicated constant VALUE (either 0 or
5506 1), and is of the indicated TYPE. */
5507
5508 tree
5509 constant_boolean_node (int value, tree type)
5510 {
5511 if (type == integer_type_node)
5512 return value ? integer_one_node : integer_zero_node;
5513 else if (type == boolean_type_node)
5514 return value ? boolean_true_node : boolean_false_node;
5515 else
5516 return build_int_cst (type, value);
5517 }
5518
5519
5520 /* Return true if expr looks like an ARRAY_REF and set base and
5521 offset to the appropriate trees. If there is no offset,
5522 offset is set to NULL_TREE. Base will be canonicalized to
5523 something you can get the element type from using
5524 TREE_TYPE (TREE_TYPE (base)). */
5525
5526 static bool
5527 extract_array_ref (tree expr, tree *base, tree *offset)
5528 {
5529 /* One canonical form is a PLUS_EXPR with the first
5530 argument being an ADDR_EXPR with a possible NOP_EXPR
5531 attached. */
5532 if (TREE_CODE (expr) == PLUS_EXPR)
5533 {
5534 tree op0 = TREE_OPERAND (expr, 0);
5535 tree inner_base, dummy1;
5536 /* Strip NOP_EXPRs here because the C frontends and/or
5537 folders present us (int *)&x.a + 4B possibly. */
5538 STRIP_NOPS (op0);
5539 if (extract_array_ref (op0, &inner_base, &dummy1))
5540 {
5541 *base = inner_base;
5542 if (dummy1 == NULL_TREE)
5543 *offset = TREE_OPERAND (expr, 1);
5544 else
5545 *offset = fold_build2 (PLUS_EXPR, TREE_TYPE (expr),
5546 dummy1, TREE_OPERAND (expr, 1));
5547 return true;
5548 }
5549 }
5550 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5551 which we transform into an ADDR_EXPR with appropriate
5552 offset. For other arguments to the ADDR_EXPR we assume
5553 zero offset and as such do not care about the ADDR_EXPR
5554 type and strip possible nops from it. */
5555 else if (TREE_CODE (expr) == ADDR_EXPR)
5556 {
5557 tree op0 = TREE_OPERAND (expr, 0);
5558 if (TREE_CODE (op0) == ARRAY_REF)
5559 {
5560 *base = TREE_OPERAND (op0, 0);
5561 *offset = TREE_OPERAND (op0, 1);
5562 }
5563 else
5564 {
5565 /* Handle array-to-pointer decay as &a. */
5566 if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
5567 *base = TREE_OPERAND (expr, 0);
5568 else
5569 *base = expr;
5570 *offset = NULL_TREE;
5571 }
5572 return true;
5573 }
5574 /* The next canonical form is a VAR_DECL with POINTER_TYPE. */
5575 else if (SSA_VAR_P (expr)
5576 && TREE_CODE (TREE_TYPE (expr)) == POINTER_TYPE)
5577 {
5578 *base = expr;
5579 *offset = NULL_TREE;
5580 return true;
5581 }
5582
5583 return false;
5584 }
5585
5586
5587 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5588 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5589 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5590 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5591 COND is the first argument to CODE; otherwise (as in the example
5592 given here), it is the second argument. TYPE is the type of the
5593 original expression. Return NULL_TREE if no simplification is
5594 possible. */
5595
5596 static tree
5597 fold_binary_op_with_conditional_arg (enum tree_code code,
5598 tree type, tree op0, tree op1,
5599 tree cond, tree arg, int cond_first_p)
5600 {
5601 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5602 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5603 tree test, true_value, false_value;
5604 tree lhs = NULL_TREE;
5605 tree rhs = NULL_TREE;
5606
5607 /* This transformation is only worthwhile if we don't have to wrap
5608 arg in a SAVE_EXPR, and the operation can be simplified on at least
5609 one of the branches once its pushed inside the COND_EXPR. */
5610 if (!TREE_CONSTANT (arg))
5611 return NULL_TREE;
5612
5613 if (TREE_CODE (cond) == COND_EXPR)
5614 {
5615 test = TREE_OPERAND (cond, 0);
5616 true_value = TREE_OPERAND (cond, 1);
5617 false_value = TREE_OPERAND (cond, 2);
5618 /* If this operand throws an expression, then it does not make
5619 sense to try to perform a logical or arithmetic operation
5620 involving it. */
5621 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5622 lhs = true_value;
5623 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5624 rhs = false_value;
5625 }
5626 else
5627 {
5628 tree testtype = TREE_TYPE (cond);
5629 test = cond;
5630 true_value = constant_boolean_node (true, testtype);
5631 false_value = constant_boolean_node (false, testtype);
5632 }
5633
5634 arg = fold_convert (arg_type, arg);
5635 if (lhs == 0)
5636 {
5637 true_value = fold_convert (cond_type, true_value);
5638 if (cond_first_p)
5639 lhs = fold_build2 (code, type, true_value, arg);
5640 else
5641 lhs = fold_build2 (code, type, arg, true_value);
5642 }
5643 if (rhs == 0)
5644 {
5645 false_value = fold_convert (cond_type, false_value);
5646 if (cond_first_p)
5647 rhs = fold_build2 (code, type, false_value, arg);
5648 else
5649 rhs = fold_build2 (code, type, arg, false_value);
5650 }
5651
5652 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
5653 return fold_convert (type, test);
5654 }
5655
5656 \f
5657 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5658
5659 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5660 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5661 ADDEND is the same as X.
5662
5663 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5664 and finite. The problematic cases are when X is zero, and its mode
5665 has signed zeros. In the case of rounding towards -infinity,
5666 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5667 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5668
5669 static bool
5670 fold_real_zero_addition_p (tree type, tree addend, int negate)
5671 {
5672 if (!real_zerop (addend))
5673 return false;
5674
5675 /* Don't allow the fold with -fsignaling-nans. */
5676 if (HONOR_SNANS (TYPE_MODE (type)))
5677 return false;
5678
5679 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5680 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
5681 return true;
5682
5683 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5684 if (TREE_CODE (addend) == REAL_CST
5685 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
5686 negate = !negate;
5687
5688 /* The mode has signed zeros, and we have to honor their sign.
5689 In this situation, there is only one case we can return true for.
5690 X - 0 is the same as X unless rounding towards -infinity is
5691 supported. */
5692 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
5693 }
5694
5695 /* Subroutine of fold() that checks comparisons of built-in math
5696 functions against real constants.
5697
5698 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5699 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5700 is the type of the result and ARG0 and ARG1 are the operands of the
5701 comparison. ARG1 must be a TREE_REAL_CST.
5702
5703 The function returns the constant folded tree if a simplification
5704 can be made, and NULL_TREE otherwise. */
5705
5706 static tree
5707 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5708 tree type, tree arg0, tree arg1)
5709 {
5710 REAL_VALUE_TYPE c;
5711
5712 if (BUILTIN_SQRT_P (fcode))
5713 {
5714 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5715 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5716
5717 c = TREE_REAL_CST (arg1);
5718 if (REAL_VALUE_NEGATIVE (c))
5719 {
5720 /* sqrt(x) < y is always false, if y is negative. */
5721 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5722 return omit_one_operand (type, integer_zero_node, arg);
5723
5724 /* sqrt(x) > y is always true, if y is negative and we
5725 don't care about NaNs, i.e. negative values of x. */
5726 if (code == NE_EXPR || !HONOR_NANS (mode))
5727 return omit_one_operand (type, integer_one_node, arg);
5728
5729 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5730 return fold_build2 (GE_EXPR, type, arg,
5731 build_real (TREE_TYPE (arg), dconst0));
5732 }
5733 else if (code == GT_EXPR || code == GE_EXPR)
5734 {
5735 REAL_VALUE_TYPE c2;
5736
5737 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5738 real_convert (&c2, mode, &c2);
5739
5740 if (REAL_VALUE_ISINF (c2))
5741 {
5742 /* sqrt(x) > y is x == +Inf, when y is very large. */
5743 if (HONOR_INFINITIES (mode))
5744 return fold_build2 (EQ_EXPR, type, arg,
5745 build_real (TREE_TYPE (arg), c2));
5746
5747 /* sqrt(x) > y is always false, when y is very large
5748 and we don't care about infinities. */
5749 return omit_one_operand (type, integer_zero_node, arg);
5750 }
5751
5752 /* sqrt(x) > c is the same as x > c*c. */
5753 return fold_build2 (code, type, arg,
5754 build_real (TREE_TYPE (arg), c2));
5755 }
5756 else if (code == LT_EXPR || code == LE_EXPR)
5757 {
5758 REAL_VALUE_TYPE c2;
5759
5760 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5761 real_convert (&c2, mode, &c2);
5762
5763 if (REAL_VALUE_ISINF (c2))
5764 {
5765 /* sqrt(x) < y is always true, when y is a very large
5766 value and we don't care about NaNs or Infinities. */
5767 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5768 return omit_one_operand (type, integer_one_node, arg);
5769
5770 /* sqrt(x) < y is x != +Inf when y is very large and we
5771 don't care about NaNs. */
5772 if (! HONOR_NANS (mode))
5773 return fold_build2 (NE_EXPR, type, arg,
5774 build_real (TREE_TYPE (arg), c2));
5775
5776 /* sqrt(x) < y is x >= 0 when y is very large and we
5777 don't care about Infinities. */
5778 if (! HONOR_INFINITIES (mode))
5779 return fold_build2 (GE_EXPR, type, arg,
5780 build_real (TREE_TYPE (arg), dconst0));
5781
5782 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5783 if (lang_hooks.decls.global_bindings_p () != 0
5784 || CONTAINS_PLACEHOLDER_P (arg))
5785 return NULL_TREE;
5786
5787 arg = save_expr (arg);
5788 return fold_build2 (TRUTH_ANDIF_EXPR, type,
5789 fold_build2 (GE_EXPR, type, arg,
5790 build_real (TREE_TYPE (arg),
5791 dconst0)),
5792 fold_build2 (NE_EXPR, type, arg,
5793 build_real (TREE_TYPE (arg),
5794 c2)));
5795 }
5796
5797 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5798 if (! HONOR_NANS (mode))
5799 return fold_build2 (code, type, arg,
5800 build_real (TREE_TYPE (arg), c2));
5801
5802 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5803 if (lang_hooks.decls.global_bindings_p () == 0
5804 && ! CONTAINS_PLACEHOLDER_P (arg))
5805 {
5806 arg = save_expr (arg);
5807 return fold_build2 (TRUTH_ANDIF_EXPR, type,
5808 fold_build2 (GE_EXPR, type, arg,
5809 build_real (TREE_TYPE (arg),
5810 dconst0)),
5811 fold_build2 (code, type, arg,
5812 build_real (TREE_TYPE (arg),
5813 c2)));
5814 }
5815 }
5816 }
5817
5818 return NULL_TREE;
5819 }
5820
5821 /* Subroutine of fold() that optimizes comparisons against Infinities,
5822 either +Inf or -Inf.
5823
5824 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5825 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5826 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5827
5828 The function returns the constant folded tree if a simplification
5829 can be made, and NULL_TREE otherwise. */
5830
5831 static tree
5832 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5833 {
5834 enum machine_mode mode;
5835 REAL_VALUE_TYPE max;
5836 tree temp;
5837 bool neg;
5838
5839 mode = TYPE_MODE (TREE_TYPE (arg0));
5840
5841 /* For negative infinity swap the sense of the comparison. */
5842 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
5843 if (neg)
5844 code = swap_tree_comparison (code);
5845
5846 switch (code)
5847 {
5848 case GT_EXPR:
5849 /* x > +Inf is always false, if with ignore sNANs. */
5850 if (HONOR_SNANS (mode))
5851 return NULL_TREE;
5852 return omit_one_operand (type, integer_zero_node, arg0);
5853
5854 case LE_EXPR:
5855 /* x <= +Inf is always true, if we don't case about NaNs. */
5856 if (! HONOR_NANS (mode))
5857 return omit_one_operand (type, integer_one_node, arg0);
5858
5859 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5860 if (lang_hooks.decls.global_bindings_p () == 0
5861 && ! CONTAINS_PLACEHOLDER_P (arg0))
5862 {
5863 arg0 = save_expr (arg0);
5864 return fold_build2 (EQ_EXPR, type, arg0, arg0);
5865 }
5866 break;
5867
5868 case EQ_EXPR:
5869 case GE_EXPR:
5870 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
5871 real_maxval (&max, neg, mode);
5872 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
5873 arg0, build_real (TREE_TYPE (arg0), max));
5874
5875 case LT_EXPR:
5876 /* x < +Inf is always equal to x <= DBL_MAX. */
5877 real_maxval (&max, neg, mode);
5878 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
5879 arg0, build_real (TREE_TYPE (arg0), max));
5880
5881 case NE_EXPR:
5882 /* x != +Inf is always equal to !(x > DBL_MAX). */
5883 real_maxval (&max, neg, mode);
5884 if (! HONOR_NANS (mode))
5885 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
5886 arg0, build_real (TREE_TYPE (arg0), max));
5887
5888 /* The transformation below creates non-gimple code and thus is
5889 not appropriate if we are in gimple form. */
5890 if (in_gimple_form)
5891 return NULL_TREE;
5892
5893 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
5894 arg0, build_real (TREE_TYPE (arg0), max));
5895 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
5896
5897 default:
5898 break;
5899 }
5900
5901 return NULL_TREE;
5902 }
5903
5904 /* Subroutine of fold() that optimizes comparisons of a division by
5905 a nonzero integer constant against an integer constant, i.e.
5906 X/C1 op C2.
5907
5908 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5909 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5910 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5911
5912 The function returns the constant folded tree if a simplification
5913 can be made, and NULL_TREE otherwise. */
5914
5915 static tree
5916 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5917 {
5918 tree prod, tmp, hi, lo;
5919 tree arg00 = TREE_OPERAND (arg0, 0);
5920 tree arg01 = TREE_OPERAND (arg0, 1);
5921 unsigned HOST_WIDE_INT lpart;
5922 HOST_WIDE_INT hpart;
5923 int overflow;
5924
5925 /* We have to do this the hard way to detect unsigned overflow.
5926 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
5927 overflow = mul_double (TREE_INT_CST_LOW (arg01),
5928 TREE_INT_CST_HIGH (arg01),
5929 TREE_INT_CST_LOW (arg1),
5930 TREE_INT_CST_HIGH (arg1), &lpart, &hpart);
5931 prod = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
5932 prod = force_fit_type (prod, -1, overflow, false);
5933
5934 if (TYPE_UNSIGNED (TREE_TYPE (arg0)))
5935 {
5936 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5937 lo = prod;
5938
5939 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
5940 overflow = add_double (TREE_INT_CST_LOW (prod),
5941 TREE_INT_CST_HIGH (prod),
5942 TREE_INT_CST_LOW (tmp),
5943 TREE_INT_CST_HIGH (tmp),
5944 &lpart, &hpart);
5945 hi = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
5946 hi = force_fit_type (hi, -1, overflow | TREE_OVERFLOW (prod),
5947 TREE_CONSTANT_OVERFLOW (prod));
5948 }
5949 else if (tree_int_cst_sgn (arg01) >= 0)
5950 {
5951 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5952 switch (tree_int_cst_sgn (arg1))
5953 {
5954 case -1:
5955 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5956 hi = prod;
5957 break;
5958
5959 case 0:
5960 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
5961 hi = tmp;
5962 break;
5963
5964 case 1:
5965 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
5966 lo = prod;
5967 break;
5968
5969 default:
5970 gcc_unreachable ();
5971 }
5972 }
5973 else
5974 {
5975 /* A negative divisor reverses the relational operators. */
5976 code = swap_tree_comparison (code);
5977
5978 tmp = int_const_binop (PLUS_EXPR, arg01, integer_one_node, 0);
5979 switch (tree_int_cst_sgn (arg1))
5980 {
5981 case -1:
5982 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5983 lo = prod;
5984 break;
5985
5986 case 0:
5987 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
5988 lo = tmp;
5989 break;
5990
5991 case 1:
5992 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
5993 hi = prod;
5994 break;
5995
5996 default:
5997 gcc_unreachable ();
5998 }
5999 }
6000
6001 switch (code)
6002 {
6003 case EQ_EXPR:
6004 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6005 return omit_one_operand (type, integer_zero_node, arg00);
6006 if (TREE_OVERFLOW (hi))
6007 return fold_build2 (GE_EXPR, type, arg00, lo);
6008 if (TREE_OVERFLOW (lo))
6009 return fold_build2 (LE_EXPR, type, arg00, hi);
6010 return build_range_check (type, arg00, 1, lo, hi);
6011
6012 case NE_EXPR:
6013 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6014 return omit_one_operand (type, integer_one_node, arg00);
6015 if (TREE_OVERFLOW (hi))
6016 return fold_build2 (LT_EXPR, type, arg00, lo);
6017 if (TREE_OVERFLOW (lo))
6018 return fold_build2 (GT_EXPR, type, arg00, hi);
6019 return build_range_check (type, arg00, 0, lo, hi);
6020
6021 case LT_EXPR:
6022 if (TREE_OVERFLOW (lo))
6023 return omit_one_operand (type, integer_zero_node, arg00);
6024 return fold_build2 (LT_EXPR, type, arg00, lo);
6025
6026 case LE_EXPR:
6027 if (TREE_OVERFLOW (hi))
6028 return omit_one_operand (type, integer_one_node, arg00);
6029 return fold_build2 (LE_EXPR, type, arg00, hi);
6030
6031 case GT_EXPR:
6032 if (TREE_OVERFLOW (hi))
6033 return omit_one_operand (type, integer_zero_node, arg00);
6034 return fold_build2 (GT_EXPR, type, arg00, hi);
6035
6036 case GE_EXPR:
6037 if (TREE_OVERFLOW (lo))
6038 return omit_one_operand (type, integer_one_node, arg00);
6039 return fold_build2 (GE_EXPR, type, arg00, lo);
6040
6041 default:
6042 break;
6043 }
6044
6045 return NULL_TREE;
6046 }
6047
6048
6049 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6050 equality/inequality test, then return a simplified form of the test
6051 using a sign testing. Otherwise return NULL. TYPE is the desired
6052 result type. */
6053
6054 static tree
6055 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6056 tree result_type)
6057 {
6058 /* If this is testing a single bit, we can optimize the test. */
6059 if ((code == NE_EXPR || code == EQ_EXPR)
6060 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6061 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6062 {
6063 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6064 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6065 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6066
6067 if (arg00 != NULL_TREE
6068 /* This is only a win if casting to a signed type is cheap,
6069 i.e. when arg00's type is not a partial mode. */
6070 && TYPE_PRECISION (TREE_TYPE (arg00))
6071 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6072 {
6073 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
6074 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6075 result_type, fold_convert (stype, arg00),
6076 fold_convert (stype, integer_zero_node));
6077 }
6078 }
6079
6080 return NULL_TREE;
6081 }
6082
6083 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6084 equality/inequality test, then return a simplified form of
6085 the test using shifts and logical operations. Otherwise return
6086 NULL. TYPE is the desired result type. */
6087
6088 tree
6089 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6090 tree result_type)
6091 {
6092 /* If this is testing a single bit, we can optimize the test. */
6093 if ((code == NE_EXPR || code == EQ_EXPR)
6094 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6095 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6096 {
6097 tree inner = TREE_OPERAND (arg0, 0);
6098 tree type = TREE_TYPE (arg0);
6099 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6100 enum machine_mode operand_mode = TYPE_MODE (type);
6101 int ops_unsigned;
6102 tree signed_type, unsigned_type, intermediate_type;
6103 tree tem;
6104
6105 /* First, see if we can fold the single bit test into a sign-bit
6106 test. */
6107 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6108 result_type);
6109 if (tem)
6110 return tem;
6111
6112 /* Otherwise we have (A & C) != 0 where C is a single bit,
6113 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6114 Similarly for (A & C) == 0. */
6115
6116 /* If INNER is a right shift of a constant and it plus BITNUM does
6117 not overflow, adjust BITNUM and INNER. */
6118 if (TREE_CODE (inner) == RSHIFT_EXPR
6119 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6120 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6121 && bitnum < TYPE_PRECISION (type)
6122 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6123 bitnum - TYPE_PRECISION (type)))
6124 {
6125 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6126 inner = TREE_OPERAND (inner, 0);
6127 }
6128
6129 /* If we are going to be able to omit the AND below, we must do our
6130 operations as unsigned. If we must use the AND, we have a choice.
6131 Normally unsigned is faster, but for some machines signed is. */
6132 #ifdef LOAD_EXTEND_OP
6133 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6134 && !flag_syntax_only) ? 0 : 1;
6135 #else
6136 ops_unsigned = 1;
6137 #endif
6138
6139 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6140 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6141 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6142 inner = fold_convert (intermediate_type, inner);
6143
6144 if (bitnum != 0)
6145 inner = build2 (RSHIFT_EXPR, intermediate_type,
6146 inner, size_int (bitnum));
6147
6148 if (code == EQ_EXPR)
6149 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type,
6150 inner, integer_one_node);
6151
6152 /* Put the AND last so it can combine with more things. */
6153 inner = build2 (BIT_AND_EXPR, intermediate_type,
6154 inner, integer_one_node);
6155
6156 /* Make sure to return the proper type. */
6157 inner = fold_convert (result_type, inner);
6158
6159 return inner;
6160 }
6161 return NULL_TREE;
6162 }
6163
6164 /* Check whether we are allowed to reorder operands arg0 and arg1,
6165 such that the evaluation of arg1 occurs before arg0. */
6166
6167 static bool
6168 reorder_operands_p (tree arg0, tree arg1)
6169 {
6170 if (! flag_evaluation_order)
6171 return true;
6172 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6173 return true;
6174 return ! TREE_SIDE_EFFECTS (arg0)
6175 && ! TREE_SIDE_EFFECTS (arg1);
6176 }
6177
6178 /* Test whether it is preferable two swap two operands, ARG0 and
6179 ARG1, for example because ARG0 is an integer constant and ARG1
6180 isn't. If REORDER is true, only recommend swapping if we can
6181 evaluate the operands in reverse order. */
6182
6183 bool
6184 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
6185 {
6186 STRIP_SIGN_NOPS (arg0);
6187 STRIP_SIGN_NOPS (arg1);
6188
6189 if (TREE_CODE (arg1) == INTEGER_CST)
6190 return 0;
6191 if (TREE_CODE (arg0) == INTEGER_CST)
6192 return 1;
6193
6194 if (TREE_CODE (arg1) == REAL_CST)
6195 return 0;
6196 if (TREE_CODE (arg0) == REAL_CST)
6197 return 1;
6198
6199 if (TREE_CODE (arg1) == COMPLEX_CST)
6200 return 0;
6201 if (TREE_CODE (arg0) == COMPLEX_CST)
6202 return 1;
6203
6204 if (TREE_CONSTANT (arg1))
6205 return 0;
6206 if (TREE_CONSTANT (arg0))
6207 return 1;
6208
6209 if (optimize_size)
6210 return 0;
6211
6212 if (reorder && flag_evaluation_order
6213 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6214 return 0;
6215
6216 if (DECL_P (arg1))
6217 return 0;
6218 if (DECL_P (arg0))
6219 return 1;
6220
6221 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6222 for commutative and comparison operators. Ensuring a canonical
6223 form allows the optimizers to find additional redundancies without
6224 having to explicitly check for both orderings. */
6225 if (TREE_CODE (arg0) == SSA_NAME
6226 && TREE_CODE (arg1) == SSA_NAME
6227 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6228 return 1;
6229
6230 return 0;
6231 }
6232
6233 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6234 ARG0 is extended to a wider type. */
6235
6236 static tree
6237 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6238 {
6239 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6240 tree arg1_unw;
6241 tree shorter_type, outer_type;
6242 tree min, max;
6243 bool above, below;
6244
6245 if (arg0_unw == arg0)
6246 return NULL_TREE;
6247 shorter_type = TREE_TYPE (arg0_unw);
6248
6249 #ifdef HAVE_canonicalize_funcptr_for_compare
6250 /* Disable this optimization if we're casting a function pointer
6251 type on targets that require function pointer canonicalization. */
6252 if (HAVE_canonicalize_funcptr_for_compare
6253 && TREE_CODE (shorter_type) == POINTER_TYPE
6254 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6255 return NULL_TREE;
6256 #endif
6257
6258 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6259 return NULL_TREE;
6260
6261 arg1_unw = get_unwidened (arg1, shorter_type);
6262
6263 /* If possible, express the comparison in the shorter mode. */
6264 if ((code == EQ_EXPR || code == NE_EXPR
6265 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6266 && (TREE_TYPE (arg1_unw) == shorter_type
6267 || (TREE_CODE (arg1_unw) == INTEGER_CST
6268 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6269 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6270 && int_fits_type_p (arg1_unw, shorter_type))))
6271 return fold_build2 (code, type, arg0_unw,
6272 fold_convert (shorter_type, arg1_unw));
6273
6274 if (TREE_CODE (arg1_unw) != INTEGER_CST
6275 || TREE_CODE (shorter_type) != INTEGER_TYPE
6276 || !int_fits_type_p (arg1_unw, shorter_type))
6277 return NULL_TREE;
6278
6279 /* If we are comparing with the integer that does not fit into the range
6280 of the shorter type, the result is known. */
6281 outer_type = TREE_TYPE (arg1_unw);
6282 min = lower_bound_in_type (outer_type, shorter_type);
6283 max = upper_bound_in_type (outer_type, shorter_type);
6284
6285 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6286 max, arg1_unw));
6287 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6288 arg1_unw, min));
6289
6290 switch (code)
6291 {
6292 case EQ_EXPR:
6293 if (above || below)
6294 return omit_one_operand (type, integer_zero_node, arg0);
6295 break;
6296
6297 case NE_EXPR:
6298 if (above || below)
6299 return omit_one_operand (type, integer_one_node, arg0);
6300 break;
6301
6302 case LT_EXPR:
6303 case LE_EXPR:
6304 if (above)
6305 return omit_one_operand (type, integer_one_node, arg0);
6306 else if (below)
6307 return omit_one_operand (type, integer_zero_node, arg0);
6308
6309 case GT_EXPR:
6310 case GE_EXPR:
6311 if (above)
6312 return omit_one_operand (type, integer_zero_node, arg0);
6313 else if (below)
6314 return omit_one_operand (type, integer_one_node, arg0);
6315
6316 default:
6317 break;
6318 }
6319
6320 return NULL_TREE;
6321 }
6322
6323 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6324 ARG0 just the signedness is changed. */
6325
6326 static tree
6327 fold_sign_changed_comparison (enum tree_code code, tree type,
6328 tree arg0, tree arg1)
6329 {
6330 tree arg0_inner, tmp;
6331 tree inner_type, outer_type;
6332
6333 if (TREE_CODE (arg0) != NOP_EXPR
6334 && TREE_CODE (arg0) != CONVERT_EXPR)
6335 return NULL_TREE;
6336
6337 outer_type = TREE_TYPE (arg0);
6338 arg0_inner = TREE_OPERAND (arg0, 0);
6339 inner_type = TREE_TYPE (arg0_inner);
6340
6341 #ifdef HAVE_canonicalize_funcptr_for_compare
6342 /* Disable this optimization if we're casting a function pointer
6343 type on targets that require function pointer canonicalization. */
6344 if (HAVE_canonicalize_funcptr_for_compare
6345 && TREE_CODE (inner_type) == POINTER_TYPE
6346 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6347 return NULL_TREE;
6348 #endif
6349
6350 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6351 return NULL_TREE;
6352
6353 if (TREE_CODE (arg1) != INTEGER_CST
6354 && !((TREE_CODE (arg1) == NOP_EXPR
6355 || TREE_CODE (arg1) == CONVERT_EXPR)
6356 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6357 return NULL_TREE;
6358
6359 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6360 && code != NE_EXPR
6361 && code != EQ_EXPR)
6362 return NULL_TREE;
6363
6364 if (TREE_CODE (arg1) == INTEGER_CST)
6365 {
6366 tmp = build_int_cst_wide (inner_type,
6367 TREE_INT_CST_LOW (arg1),
6368 TREE_INT_CST_HIGH (arg1));
6369 arg1 = force_fit_type (tmp, 0,
6370 TREE_OVERFLOW (arg1),
6371 TREE_CONSTANT_OVERFLOW (arg1));
6372 }
6373 else
6374 arg1 = fold_convert (inner_type, arg1);
6375
6376 return fold_build2 (code, type, arg0_inner, arg1);
6377 }
6378
6379 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6380 step of the array. Reconstructs s and delta in the case of s * delta
6381 being an integer constant (and thus already folded).
6382 ADDR is the address. MULT is the multiplicative expression.
6383 If the function succeeds, the new address expression is returned. Otherwise
6384 NULL_TREE is returned. */
6385
6386 static tree
6387 try_move_mult_to_index (enum tree_code code, tree addr, tree op1)
6388 {
6389 tree s, delta, step;
6390 tree ref = TREE_OPERAND (addr, 0), pref;
6391 tree ret, pos;
6392 tree itype;
6393
6394 /* Canonicalize op1 into a possibly non-constant delta
6395 and an INTEGER_CST s. */
6396 if (TREE_CODE (op1) == MULT_EXPR)
6397 {
6398 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6399
6400 STRIP_NOPS (arg0);
6401 STRIP_NOPS (arg1);
6402
6403 if (TREE_CODE (arg0) == INTEGER_CST)
6404 {
6405 s = arg0;
6406 delta = arg1;
6407 }
6408 else if (TREE_CODE (arg1) == INTEGER_CST)
6409 {
6410 s = arg1;
6411 delta = arg0;
6412 }
6413 else
6414 return NULL_TREE;
6415 }
6416 else if (TREE_CODE (op1) == INTEGER_CST)
6417 {
6418 delta = op1;
6419 s = NULL_TREE;
6420 }
6421 else
6422 {
6423 /* Simulate we are delta * 1. */
6424 delta = op1;
6425 s = integer_one_node;
6426 }
6427
6428 for (;; ref = TREE_OPERAND (ref, 0))
6429 {
6430 if (TREE_CODE (ref) == ARRAY_REF)
6431 {
6432 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6433 if (! itype)
6434 continue;
6435
6436 step = array_ref_element_size (ref);
6437 if (TREE_CODE (step) != INTEGER_CST)
6438 continue;
6439
6440 if (s)
6441 {
6442 if (! tree_int_cst_equal (step, s))
6443 continue;
6444 }
6445 else
6446 {
6447 /* Try if delta is a multiple of step. */
6448 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step);
6449 if (! tmp)
6450 continue;
6451 delta = tmp;
6452 }
6453
6454 break;
6455 }
6456
6457 if (!handled_component_p (ref))
6458 return NULL_TREE;
6459 }
6460
6461 /* We found the suitable array reference. So copy everything up to it,
6462 and replace the index. */
6463
6464 pref = TREE_OPERAND (addr, 0);
6465 ret = copy_node (pref);
6466 pos = ret;
6467
6468 while (pref != ref)
6469 {
6470 pref = TREE_OPERAND (pref, 0);
6471 TREE_OPERAND (pos, 0) = copy_node (pref);
6472 pos = TREE_OPERAND (pos, 0);
6473 }
6474
6475 TREE_OPERAND (pos, 1) = fold_build2 (code, itype,
6476 fold_convert (itype,
6477 TREE_OPERAND (pos, 1)),
6478 fold_convert (itype, delta));
6479
6480 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6481 }
6482
6483
6484 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6485 means A >= Y && A != MAX, but in this case we know that
6486 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6487
6488 static tree
6489 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6490 {
6491 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6492
6493 if (TREE_CODE (bound) == LT_EXPR)
6494 a = TREE_OPERAND (bound, 0);
6495 else if (TREE_CODE (bound) == GT_EXPR)
6496 a = TREE_OPERAND (bound, 1);
6497 else
6498 return NULL_TREE;
6499
6500 typea = TREE_TYPE (a);
6501 if (!INTEGRAL_TYPE_P (typea)
6502 && !POINTER_TYPE_P (typea))
6503 return NULL_TREE;
6504
6505 if (TREE_CODE (ineq) == LT_EXPR)
6506 {
6507 a1 = TREE_OPERAND (ineq, 1);
6508 y = TREE_OPERAND (ineq, 0);
6509 }
6510 else if (TREE_CODE (ineq) == GT_EXPR)
6511 {
6512 a1 = TREE_OPERAND (ineq, 0);
6513 y = TREE_OPERAND (ineq, 1);
6514 }
6515 else
6516 return NULL_TREE;
6517
6518 if (TREE_TYPE (a1) != typea)
6519 return NULL_TREE;
6520
6521 diff = fold_build2 (MINUS_EXPR, typea, a1, a);
6522 if (!integer_onep (diff))
6523 return NULL_TREE;
6524
6525 return fold_build2 (GE_EXPR, type, a, y);
6526 }
6527
6528 /* Fold a unary expression of code CODE and type TYPE with operand
6529 OP0. Return the folded expression if folding is successful.
6530 Otherwise, return NULL_TREE. */
6531
6532 tree
6533 fold_unary (enum tree_code code, tree type, tree op0)
6534 {
6535 tree tem;
6536 tree arg0;
6537 enum tree_code_class kind = TREE_CODE_CLASS (code);
6538
6539 gcc_assert (IS_EXPR_CODE_CLASS (kind)
6540 && TREE_CODE_LENGTH (code) == 1);
6541
6542 arg0 = op0;
6543 if (arg0)
6544 {
6545 if (code == NOP_EXPR || code == CONVERT_EXPR
6546 || code == FLOAT_EXPR || code == ABS_EXPR)
6547 {
6548 /* Don't use STRIP_NOPS, because signedness of argument type
6549 matters. */
6550 STRIP_SIGN_NOPS (arg0);
6551 }
6552 else
6553 {
6554 /* Strip any conversions that don't change the mode. This
6555 is safe for every expression, except for a comparison
6556 expression because its signedness is derived from its
6557 operands.
6558
6559 Note that this is done as an internal manipulation within
6560 the constant folder, in order to find the simplest
6561 representation of the arguments so that their form can be
6562 studied. In any cases, the appropriate type conversions
6563 should be put back in the tree that will get out of the
6564 constant folder. */
6565 STRIP_NOPS (arg0);
6566 }
6567 }
6568
6569 if (TREE_CODE_CLASS (code) == tcc_unary)
6570 {
6571 if (TREE_CODE (arg0) == COMPOUND_EXPR)
6572 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6573 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
6574 else if (TREE_CODE (arg0) == COND_EXPR)
6575 {
6576 tree arg01 = TREE_OPERAND (arg0, 1);
6577 tree arg02 = TREE_OPERAND (arg0, 2);
6578 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
6579 arg01 = fold_build1 (code, type, arg01);
6580 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
6581 arg02 = fold_build1 (code, type, arg02);
6582 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
6583 arg01, arg02);
6584
6585 /* If this was a conversion, and all we did was to move into
6586 inside the COND_EXPR, bring it back out. But leave it if
6587 it is a conversion from integer to integer and the
6588 result precision is no wider than a word since such a
6589 conversion is cheap and may be optimized away by combine,
6590 while it couldn't if it were outside the COND_EXPR. Then return
6591 so we don't get into an infinite recursion loop taking the
6592 conversion out and then back in. */
6593
6594 if ((code == NOP_EXPR || code == CONVERT_EXPR
6595 || code == NON_LVALUE_EXPR)
6596 && TREE_CODE (tem) == COND_EXPR
6597 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
6598 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
6599 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
6600 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
6601 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
6602 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
6603 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
6604 && (INTEGRAL_TYPE_P
6605 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
6606 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
6607 || flag_syntax_only))
6608 tem = build1 (code, type,
6609 build3 (COND_EXPR,
6610 TREE_TYPE (TREE_OPERAND
6611 (TREE_OPERAND (tem, 1), 0)),
6612 TREE_OPERAND (tem, 0),
6613 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
6614 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
6615 return tem;
6616 }
6617 else if (COMPARISON_CLASS_P (arg0))
6618 {
6619 if (TREE_CODE (type) == BOOLEAN_TYPE)
6620 {
6621 arg0 = copy_node (arg0);
6622 TREE_TYPE (arg0) = type;
6623 return arg0;
6624 }
6625 else if (TREE_CODE (type) != INTEGER_TYPE)
6626 return fold_build3 (COND_EXPR, type, arg0,
6627 fold_build1 (code, type,
6628 integer_one_node),
6629 fold_build1 (code, type,
6630 integer_zero_node));
6631 }
6632 }
6633
6634 switch (code)
6635 {
6636 case NOP_EXPR:
6637 case FLOAT_EXPR:
6638 case CONVERT_EXPR:
6639 case FIX_TRUNC_EXPR:
6640 case FIX_CEIL_EXPR:
6641 case FIX_FLOOR_EXPR:
6642 case FIX_ROUND_EXPR:
6643 if (TREE_TYPE (op0) == type)
6644 return op0;
6645
6646 /* Handle cases of two conversions in a row. */
6647 if (TREE_CODE (op0) == NOP_EXPR
6648 || TREE_CODE (op0) == CONVERT_EXPR)
6649 {
6650 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
6651 tree inter_type = TREE_TYPE (op0);
6652 int inside_int = INTEGRAL_TYPE_P (inside_type);
6653 int inside_ptr = POINTER_TYPE_P (inside_type);
6654 int inside_float = FLOAT_TYPE_P (inside_type);
6655 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
6656 unsigned int inside_prec = TYPE_PRECISION (inside_type);
6657 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
6658 int inter_int = INTEGRAL_TYPE_P (inter_type);
6659 int inter_ptr = POINTER_TYPE_P (inter_type);
6660 int inter_float = FLOAT_TYPE_P (inter_type);
6661 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
6662 unsigned int inter_prec = TYPE_PRECISION (inter_type);
6663 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
6664 int final_int = INTEGRAL_TYPE_P (type);
6665 int final_ptr = POINTER_TYPE_P (type);
6666 int final_float = FLOAT_TYPE_P (type);
6667 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
6668 unsigned int final_prec = TYPE_PRECISION (type);
6669 int final_unsignedp = TYPE_UNSIGNED (type);
6670
6671 /* In addition to the cases of two conversions in a row
6672 handled below, if we are converting something to its own
6673 type via an object of identical or wider precision, neither
6674 conversion is needed. */
6675 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
6676 && ((inter_int && final_int) || (inter_float && final_float))
6677 && inter_prec >= final_prec)
6678 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6679
6680 /* Likewise, if the intermediate and final types are either both
6681 float or both integer, we don't need the middle conversion if
6682 it is wider than the final type and doesn't change the signedness
6683 (for integers). Avoid this if the final type is a pointer
6684 since then we sometimes need the inner conversion. Likewise if
6685 the outer has a precision not equal to the size of its mode. */
6686 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
6687 || (inter_float && inside_float)
6688 || (inter_vec && inside_vec))
6689 && inter_prec >= inside_prec
6690 && (inter_float || inter_vec
6691 || inter_unsignedp == inside_unsignedp)
6692 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6693 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6694 && ! final_ptr
6695 && (! final_vec || inter_prec == inside_prec))
6696 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6697
6698 /* If we have a sign-extension of a zero-extended value, we can
6699 replace that by a single zero-extension. */
6700 if (inside_int && inter_int && final_int
6701 && inside_prec < inter_prec && inter_prec < final_prec
6702 && inside_unsignedp && !inter_unsignedp)
6703 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6704
6705 /* Two conversions in a row are not needed unless:
6706 - some conversion is floating-point (overstrict for now), or
6707 - some conversion is a vector (overstrict for now), or
6708 - the intermediate type is narrower than both initial and
6709 final, or
6710 - the intermediate type and innermost type differ in signedness,
6711 and the outermost type is wider than the intermediate, or
6712 - the initial type is a pointer type and the precisions of the
6713 intermediate and final types differ, or
6714 - the final type is a pointer type and the precisions of the
6715 initial and intermediate types differ. */
6716 if (! inside_float && ! inter_float && ! final_float
6717 && ! inside_vec && ! inter_vec && ! final_vec
6718 && (inter_prec > inside_prec || inter_prec > final_prec)
6719 && ! (inside_int && inter_int
6720 && inter_unsignedp != inside_unsignedp
6721 && inter_prec < final_prec)
6722 && ((inter_unsignedp && inter_prec > inside_prec)
6723 == (final_unsignedp && final_prec > inter_prec))
6724 && ! (inside_ptr && inter_prec != final_prec)
6725 && ! (final_ptr && inside_prec != inter_prec)
6726 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6727 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6728 && ! final_ptr)
6729 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6730 }
6731
6732 /* Handle (T *)&A.B.C for A being of type T and B and C
6733 living at offset zero. This occurs frequently in
6734 C++ upcasting and then accessing the base. */
6735 if (TREE_CODE (op0) == ADDR_EXPR
6736 && POINTER_TYPE_P (type)
6737 && handled_component_p (TREE_OPERAND (op0, 0)))
6738 {
6739 HOST_WIDE_INT bitsize, bitpos;
6740 tree offset;
6741 enum machine_mode mode;
6742 int unsignedp, volatilep;
6743 tree base = TREE_OPERAND (op0, 0);
6744 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
6745 &mode, &unsignedp, &volatilep, false);
6746 /* If the reference was to a (constant) zero offset, we can use
6747 the address of the base if it has the same base type
6748 as the result type. */
6749 if (! offset && bitpos == 0
6750 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
6751 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
6752 return fold_convert (type, build_fold_addr_expr (base));
6753 }
6754
6755 if (TREE_CODE (op0) == MODIFY_EXPR
6756 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
6757 /* Detect assigning a bitfield. */
6758 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
6759 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
6760 {
6761 /* Don't leave an assignment inside a conversion
6762 unless assigning a bitfield. */
6763 tem = fold_build1 (code, type, TREE_OPERAND (op0, 1));
6764 /* First do the assignment, then return converted constant. */
6765 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
6766 TREE_NO_WARNING (tem) = 1;
6767 TREE_USED (tem) = 1;
6768 return tem;
6769 }
6770
6771 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
6772 constants (if x has signed type, the sign bit cannot be set
6773 in c). This folds extension into the BIT_AND_EXPR. */
6774 if (INTEGRAL_TYPE_P (type)
6775 && TREE_CODE (type) != BOOLEAN_TYPE
6776 && TREE_CODE (op0) == BIT_AND_EXPR
6777 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
6778 {
6779 tree and = op0;
6780 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
6781 int change = 0;
6782
6783 if (TYPE_UNSIGNED (TREE_TYPE (and))
6784 || (TYPE_PRECISION (type)
6785 <= TYPE_PRECISION (TREE_TYPE (and))))
6786 change = 1;
6787 else if (TYPE_PRECISION (TREE_TYPE (and1))
6788 <= HOST_BITS_PER_WIDE_INT
6789 && host_integerp (and1, 1))
6790 {
6791 unsigned HOST_WIDE_INT cst;
6792
6793 cst = tree_low_cst (and1, 1);
6794 cst &= (HOST_WIDE_INT) -1
6795 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
6796 change = (cst == 0);
6797 #ifdef LOAD_EXTEND_OP
6798 if (change
6799 && !flag_syntax_only
6800 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
6801 == ZERO_EXTEND))
6802 {
6803 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
6804 and0 = fold_convert (uns, and0);
6805 and1 = fold_convert (uns, and1);
6806 }
6807 #endif
6808 }
6809 if (change)
6810 {
6811 tem = build_int_cst_wide (type, TREE_INT_CST_LOW (and1),
6812 TREE_INT_CST_HIGH (and1));
6813 tem = force_fit_type (tem, 0, TREE_OVERFLOW (and1),
6814 TREE_CONSTANT_OVERFLOW (and1));
6815 return fold_build2 (BIT_AND_EXPR, type,
6816 fold_convert (type, and0), tem);
6817 }
6818 }
6819
6820 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
6821 T2 being pointers to types of the same size. */
6822 if (POINTER_TYPE_P (type)
6823 && BINARY_CLASS_P (arg0)
6824 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
6825 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6826 {
6827 tree arg00 = TREE_OPERAND (arg0, 0);
6828 tree t0 = type;
6829 tree t1 = TREE_TYPE (arg00);
6830 tree tt0 = TREE_TYPE (t0);
6831 tree tt1 = TREE_TYPE (t1);
6832 tree s0 = TYPE_SIZE (tt0);
6833 tree s1 = TYPE_SIZE (tt1);
6834
6835 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
6836 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
6837 TREE_OPERAND (arg0, 1));
6838 }
6839
6840 tem = fold_convert_const (code, type, arg0);
6841 return tem ? tem : NULL_TREE;
6842
6843 case VIEW_CONVERT_EXPR:
6844 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
6845 return build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
6846 return NULL_TREE;
6847
6848 case NEGATE_EXPR:
6849 if (negate_expr_p (arg0))
6850 return fold_convert (type, negate_expr (arg0));
6851 /* Convert - (~A) to A + 1. */
6852 if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == BIT_NOT_EXPR)
6853 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (arg0, 0),
6854 build_int_cst (type, 1));
6855 return NULL_TREE;
6856
6857 case ABS_EXPR:
6858 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
6859 return fold_abs_const (arg0, type);
6860 else if (TREE_CODE (arg0) == NEGATE_EXPR)
6861 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
6862 /* Convert fabs((double)float) into (double)fabsf(float). */
6863 else if (TREE_CODE (arg0) == NOP_EXPR
6864 && TREE_CODE (type) == REAL_TYPE)
6865 {
6866 tree targ0 = strip_float_extensions (arg0);
6867 if (targ0 != arg0)
6868 return fold_convert (type, fold_build1 (ABS_EXPR,
6869 TREE_TYPE (targ0),
6870 targ0));
6871 }
6872 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
6873 else if (tree_expr_nonnegative_p (arg0) || TREE_CODE (arg0) == ABS_EXPR)
6874 return arg0;
6875
6876 /* Strip sign ops from argument. */
6877 if (TREE_CODE (type) == REAL_TYPE)
6878 {
6879 tem = fold_strip_sign_ops (arg0);
6880 if (tem)
6881 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
6882 }
6883 return NULL_TREE;
6884
6885 case CONJ_EXPR:
6886 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
6887 return fold_convert (type, arg0);
6888 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
6889 return build2 (COMPLEX_EXPR, type,
6890 TREE_OPERAND (arg0, 0),
6891 negate_expr (TREE_OPERAND (arg0, 1)));
6892 else if (TREE_CODE (arg0) == COMPLEX_CST)
6893 return build_complex (type, TREE_REALPART (arg0),
6894 negate_expr (TREE_IMAGPART (arg0)));
6895 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
6896 return fold_build2 (TREE_CODE (arg0), type,
6897 fold_build1 (CONJ_EXPR, type,
6898 TREE_OPERAND (arg0, 0)),
6899 fold_build1 (CONJ_EXPR, type,
6900 TREE_OPERAND (arg0, 1)));
6901 else if (TREE_CODE (arg0) == CONJ_EXPR)
6902 return TREE_OPERAND (arg0, 0);
6903 return NULL_TREE;
6904
6905 case BIT_NOT_EXPR:
6906 if (TREE_CODE (arg0) == INTEGER_CST)
6907 return fold_not_const (arg0, type);
6908 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
6909 return TREE_OPERAND (arg0, 0);
6910 /* Convert ~ (-A) to A - 1. */
6911 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
6912 return fold_build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0),
6913 build_int_cst (type, 1));
6914 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
6915 else if (INTEGRAL_TYPE_P (type)
6916 && ((TREE_CODE (arg0) == MINUS_EXPR
6917 && integer_onep (TREE_OPERAND (arg0, 1)))
6918 || (TREE_CODE (arg0) == PLUS_EXPR
6919 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
6920 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
6921 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
6922 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
6923 && (tem = fold_unary (BIT_NOT_EXPR, type,
6924 fold_convert (type,
6925 TREE_OPERAND (arg0, 0)))))
6926 return fold_build2 (BIT_XOR_EXPR, type, tem,
6927 fold_convert (type, TREE_OPERAND (arg0, 1)));
6928 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
6929 && (tem = fold_unary (BIT_NOT_EXPR, type,
6930 fold_convert (type,
6931 TREE_OPERAND (arg0, 1)))))
6932 return fold_build2 (BIT_XOR_EXPR, type,
6933 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
6934
6935 return NULL_TREE;
6936
6937 case TRUTH_NOT_EXPR:
6938 /* The argument to invert_truthvalue must have Boolean type. */
6939 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
6940 arg0 = fold_convert (boolean_type_node, arg0);
6941
6942 /* Note that the operand of this must be an int
6943 and its values must be 0 or 1.
6944 ("true" is a fixed value perhaps depending on the language,
6945 but we don't handle values other than 1 correctly yet.) */
6946 tem = invert_truthvalue (arg0);
6947 /* Avoid infinite recursion. */
6948 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
6949 return NULL_TREE;
6950 return fold_convert (type, tem);
6951
6952 case REALPART_EXPR:
6953 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
6954 return NULL_TREE;
6955 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
6956 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
6957 TREE_OPERAND (arg0, 1));
6958 else if (TREE_CODE (arg0) == COMPLEX_CST)
6959 return TREE_REALPART (arg0);
6960 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
6961 return fold_build2 (TREE_CODE (arg0), type,
6962 fold_build1 (REALPART_EXPR, type,
6963 TREE_OPERAND (arg0, 0)),
6964 fold_build1 (REALPART_EXPR, type,
6965 TREE_OPERAND (arg0, 1)));
6966 return NULL_TREE;
6967
6968 case IMAGPART_EXPR:
6969 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
6970 return fold_convert (type, integer_zero_node);
6971 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
6972 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
6973 TREE_OPERAND (arg0, 0));
6974 else if (TREE_CODE (arg0) == COMPLEX_CST)
6975 return TREE_IMAGPART (arg0);
6976 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
6977 return fold_build2 (TREE_CODE (arg0), type,
6978 fold_build1 (IMAGPART_EXPR, type,
6979 TREE_OPERAND (arg0, 0)),
6980 fold_build1 (IMAGPART_EXPR, type,
6981 TREE_OPERAND (arg0, 1)));
6982 return NULL_TREE;
6983
6984 default:
6985 return NULL_TREE;
6986 } /* switch (code) */
6987 }
6988
6989 /* Fold a binary expression of code CODE and type TYPE with operands
6990 OP0 and OP1. Return the folded expression if folding is
6991 successful. Otherwise, return NULL_TREE. */
6992
6993 tree
6994 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
6995 {
6996 tree t1 = NULL_TREE;
6997 tree tem;
6998 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
6999 enum tree_code_class kind = TREE_CODE_CLASS (code);
7000
7001 /* WINS will be nonzero when the switch is done
7002 if all operands are constant. */
7003 int wins = 1;
7004
7005 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7006 && TREE_CODE_LENGTH (code) == 2);
7007
7008 arg0 = op0;
7009 arg1 = op1;
7010
7011 if (arg0)
7012 {
7013 tree subop;
7014
7015 /* Strip any conversions that don't change the mode. This is
7016 safe for every expression, except for a comparison expression
7017 because its signedness is derived from its operands. So, in
7018 the latter case, only strip conversions that don't change the
7019 signedness.
7020
7021 Note that this is done as an internal manipulation within the
7022 constant folder, in order to find the simplest representation
7023 of the arguments so that their form can be studied. In any
7024 cases, the appropriate type conversions should be put back in
7025 the tree that will get out of the constant folder. */
7026 if (kind == tcc_comparison)
7027 STRIP_SIGN_NOPS (arg0);
7028 else
7029 STRIP_NOPS (arg0);
7030
7031 if (TREE_CODE (arg0) == COMPLEX_CST)
7032 subop = TREE_REALPART (arg0);
7033 else
7034 subop = arg0;
7035
7036 if (TREE_CODE (subop) != INTEGER_CST
7037 && TREE_CODE (subop) != REAL_CST)
7038 /* Note that TREE_CONSTANT isn't enough:
7039 static var addresses are constant but we can't
7040 do arithmetic on them. */
7041 wins = 0;
7042 }
7043
7044 if (arg1)
7045 {
7046 tree subop;
7047
7048 /* Strip any conversions that don't change the mode. This is
7049 safe for every expression, except for a comparison expression
7050 because its signedness is derived from its operands. So, in
7051 the latter case, only strip conversions that don't change the
7052 signedness.
7053
7054 Note that this is done as an internal manipulation within the
7055 constant folder, in order to find the simplest representation
7056 of the arguments so that their form can be studied. In any
7057 cases, the appropriate type conversions should be put back in
7058 the tree that will get out of the constant folder. */
7059 if (kind == tcc_comparison)
7060 STRIP_SIGN_NOPS (arg1);
7061 else
7062 STRIP_NOPS (arg1);
7063
7064 if (TREE_CODE (arg1) == COMPLEX_CST)
7065 subop = TREE_REALPART (arg1);
7066 else
7067 subop = arg1;
7068
7069 if (TREE_CODE (subop) != INTEGER_CST
7070 && TREE_CODE (subop) != REAL_CST)
7071 /* Note that TREE_CONSTANT isn't enough:
7072 static var addresses are constant but we can't
7073 do arithmetic on them. */
7074 wins = 0;
7075 }
7076
7077 /* If this is a commutative operation, and ARG0 is a constant, move it
7078 to ARG1 to reduce the number of tests below. */
7079 if (commutative_tree_code (code)
7080 && tree_swap_operands_p (arg0, arg1, true))
7081 return fold_build2 (code, type, op1, op0);
7082
7083 /* Now WINS is set as described above,
7084 ARG0 is the first operand of EXPR,
7085 and ARG1 is the second operand (if it has more than one operand).
7086
7087 First check for cases where an arithmetic operation is applied to a
7088 compound, conditional, or comparison operation. Push the arithmetic
7089 operation inside the compound or conditional to see if any folding
7090 can then be done. Convert comparison to conditional for this purpose.
7091 The also optimizes non-constant cases that used to be done in
7092 expand_expr.
7093
7094 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
7095 one of the operands is a comparison and the other is a comparison, a
7096 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
7097 code below would make the expression more complex. Change it to a
7098 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
7099 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
7100
7101 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
7102 || code == EQ_EXPR || code == NE_EXPR)
7103 && ((truth_value_p (TREE_CODE (arg0))
7104 && (truth_value_p (TREE_CODE (arg1))
7105 || (TREE_CODE (arg1) == BIT_AND_EXPR
7106 && integer_onep (TREE_OPERAND (arg1, 1)))))
7107 || (truth_value_p (TREE_CODE (arg1))
7108 && (truth_value_p (TREE_CODE (arg0))
7109 || (TREE_CODE (arg0) == BIT_AND_EXPR
7110 && integer_onep (TREE_OPERAND (arg0, 1)))))))
7111 {
7112 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
7113 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
7114 : TRUTH_XOR_EXPR,
7115 boolean_type_node,
7116 fold_convert (boolean_type_node, arg0),
7117 fold_convert (boolean_type_node, arg1));
7118
7119 if (code == EQ_EXPR)
7120 tem = invert_truthvalue (tem);
7121
7122 return fold_convert (type, tem);
7123 }
7124
7125 if (TREE_CODE_CLASS (code) == tcc_binary
7126 || TREE_CODE_CLASS (code) == tcc_comparison)
7127 {
7128 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7129 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7130 fold_build2 (code, type,
7131 TREE_OPERAND (arg0, 1), op1));
7132 if (TREE_CODE (arg1) == COMPOUND_EXPR
7133 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
7134 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
7135 fold_build2 (code, type,
7136 op0, TREE_OPERAND (arg1, 1)));
7137
7138 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
7139 {
7140 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
7141 arg0, arg1,
7142 /*cond_first_p=*/1);
7143 if (tem != NULL_TREE)
7144 return tem;
7145 }
7146
7147 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
7148 {
7149 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
7150 arg1, arg0,
7151 /*cond_first_p=*/0);
7152 if (tem != NULL_TREE)
7153 return tem;
7154 }
7155 }
7156
7157 switch (code)
7158 {
7159 case PLUS_EXPR:
7160 /* A + (-B) -> A - B */
7161 if (TREE_CODE (arg1) == NEGATE_EXPR)
7162 return fold_build2 (MINUS_EXPR, type,
7163 fold_convert (type, arg0),
7164 fold_convert (type, TREE_OPERAND (arg1, 0)));
7165 /* (-A) + B -> B - A */
7166 if (TREE_CODE (arg0) == NEGATE_EXPR
7167 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
7168 return fold_build2 (MINUS_EXPR, type,
7169 fold_convert (type, arg1),
7170 fold_convert (type, TREE_OPERAND (arg0, 0)));
7171 /* Convert ~A + 1 to -A. */
7172 if (INTEGRAL_TYPE_P (type)
7173 && TREE_CODE (arg0) == BIT_NOT_EXPR
7174 && integer_onep (arg1))
7175 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7176
7177 if (! FLOAT_TYPE_P (type))
7178 {
7179 if (integer_zerop (arg1))
7180 return non_lvalue (fold_convert (type, arg0));
7181
7182 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
7183 with a constant, and the two constants have no bits in common,
7184 we should treat this as a BIT_IOR_EXPR since this may produce more
7185 simplifications. */
7186 if (TREE_CODE (arg0) == BIT_AND_EXPR
7187 && TREE_CODE (arg1) == BIT_AND_EXPR
7188 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7189 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
7190 && integer_zerop (const_binop (BIT_AND_EXPR,
7191 TREE_OPERAND (arg0, 1),
7192 TREE_OPERAND (arg1, 1), 0)))
7193 {
7194 code = BIT_IOR_EXPR;
7195 goto bit_ior;
7196 }
7197
7198 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
7199 (plus (plus (mult) (mult)) (foo)) so that we can
7200 take advantage of the factoring cases below. */
7201 if (((TREE_CODE (arg0) == PLUS_EXPR
7202 || TREE_CODE (arg0) == MINUS_EXPR)
7203 && TREE_CODE (arg1) == MULT_EXPR)
7204 || ((TREE_CODE (arg1) == PLUS_EXPR
7205 || TREE_CODE (arg1) == MINUS_EXPR)
7206 && TREE_CODE (arg0) == MULT_EXPR))
7207 {
7208 tree parg0, parg1, parg, marg;
7209 enum tree_code pcode;
7210
7211 if (TREE_CODE (arg1) == MULT_EXPR)
7212 parg = arg0, marg = arg1;
7213 else
7214 parg = arg1, marg = arg0;
7215 pcode = TREE_CODE (parg);
7216 parg0 = TREE_OPERAND (parg, 0);
7217 parg1 = TREE_OPERAND (parg, 1);
7218 STRIP_NOPS (parg0);
7219 STRIP_NOPS (parg1);
7220
7221 if (TREE_CODE (parg0) == MULT_EXPR
7222 && TREE_CODE (parg1) != MULT_EXPR)
7223 return fold_build2 (pcode, type,
7224 fold_build2 (PLUS_EXPR, type,
7225 fold_convert (type, parg0),
7226 fold_convert (type, marg)),
7227 fold_convert (type, parg1));
7228 if (TREE_CODE (parg0) != MULT_EXPR
7229 && TREE_CODE (parg1) == MULT_EXPR)
7230 return fold_build2 (PLUS_EXPR, type,
7231 fold_convert (type, parg0),
7232 fold_build2 (pcode, type,
7233 fold_convert (type, marg),
7234 fold_convert (type,
7235 parg1)));
7236 }
7237
7238 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
7239 {
7240 tree arg00, arg01, arg10, arg11;
7241 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7242
7243 /* (A * C) + (B * C) -> (A+B) * C.
7244 We are most concerned about the case where C is a constant,
7245 but other combinations show up during loop reduction. Since
7246 it is not difficult, try all four possibilities. */
7247
7248 arg00 = TREE_OPERAND (arg0, 0);
7249 arg01 = TREE_OPERAND (arg0, 1);
7250 arg10 = TREE_OPERAND (arg1, 0);
7251 arg11 = TREE_OPERAND (arg1, 1);
7252 same = NULL_TREE;
7253
7254 if (operand_equal_p (arg01, arg11, 0))
7255 same = arg01, alt0 = arg00, alt1 = arg10;
7256 else if (operand_equal_p (arg00, arg10, 0))
7257 same = arg00, alt0 = arg01, alt1 = arg11;
7258 else if (operand_equal_p (arg00, arg11, 0))
7259 same = arg00, alt0 = arg01, alt1 = arg10;
7260 else if (operand_equal_p (arg01, arg10, 0))
7261 same = arg01, alt0 = arg00, alt1 = arg11;
7262
7263 /* No identical multiplicands; see if we can find a common
7264 power-of-two factor in non-power-of-two multiplies. This
7265 can help in multi-dimensional array access. */
7266 else if (TREE_CODE (arg01) == INTEGER_CST
7267 && TREE_CODE (arg11) == INTEGER_CST
7268 && TREE_INT_CST_HIGH (arg01) == 0
7269 && TREE_INT_CST_HIGH (arg11) == 0)
7270 {
7271 HOST_WIDE_INT int01, int11, tmp;
7272 int01 = TREE_INT_CST_LOW (arg01);
7273 int11 = TREE_INT_CST_LOW (arg11);
7274
7275 /* Move min of absolute values to int11. */
7276 if ((int01 >= 0 ? int01 : -int01)
7277 < (int11 >= 0 ? int11 : -int11))
7278 {
7279 tmp = int01, int01 = int11, int11 = tmp;
7280 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7281 alt0 = arg01, arg01 = arg11, arg11 = alt0;
7282 }
7283
7284 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
7285 {
7286 alt0 = fold_build2 (MULT_EXPR, type, arg00,
7287 build_int_cst (NULL_TREE,
7288 int01 / int11));
7289 alt1 = arg10;
7290 same = arg11;
7291 }
7292 }
7293
7294 if (same)
7295 return fold_build2 (MULT_EXPR, type,
7296 fold_build2 (PLUS_EXPR, type,
7297 fold_convert (type, alt0),
7298 fold_convert (type, alt1)),
7299 fold_convert (type, same));
7300 }
7301
7302 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
7303 of the array. Loop optimizer sometimes produce this type of
7304 expressions. */
7305 if (TREE_CODE (arg0) == ADDR_EXPR)
7306 {
7307 tem = try_move_mult_to_index (PLUS_EXPR, arg0, arg1);
7308 if (tem)
7309 return fold_convert (type, tem);
7310 }
7311 else if (TREE_CODE (arg1) == ADDR_EXPR)
7312 {
7313 tem = try_move_mult_to_index (PLUS_EXPR, arg1, arg0);
7314 if (tem)
7315 return fold_convert (type, tem);
7316 }
7317 }
7318 else
7319 {
7320 /* See if ARG1 is zero and X + ARG1 reduces to X. */
7321 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
7322 return non_lvalue (fold_convert (type, arg0));
7323
7324 /* Likewise if the operands are reversed. */
7325 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
7326 return non_lvalue (fold_convert (type, arg1));
7327
7328 /* Convert X + -C into X - C. */
7329 if (TREE_CODE (arg1) == REAL_CST
7330 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
7331 {
7332 tem = fold_negate_const (arg1, type);
7333 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
7334 return fold_build2 (MINUS_EXPR, type,
7335 fold_convert (type, arg0),
7336 fold_convert (type, tem));
7337 }
7338
7339 if (flag_unsafe_math_optimizations
7340 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
7341 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
7342 && (tem = distribute_real_division (code, type, arg0, arg1)))
7343 return tem;
7344
7345 /* Convert x+x into x*2.0. */
7346 if (operand_equal_p (arg0, arg1, 0)
7347 && SCALAR_FLOAT_TYPE_P (type))
7348 return fold_build2 (MULT_EXPR, type, arg0,
7349 build_real (type, dconst2));
7350
7351 /* Convert x*c+x into x*(c+1). */
7352 if (flag_unsafe_math_optimizations
7353 && TREE_CODE (arg0) == MULT_EXPR
7354 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7355 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
7356 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7357 {
7358 REAL_VALUE_TYPE c;
7359
7360 c = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
7361 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7362 return fold_build2 (MULT_EXPR, type, arg1,
7363 build_real (type, c));
7364 }
7365
7366 /* Convert x+x*c into x*(c+1). */
7367 if (flag_unsafe_math_optimizations
7368 && TREE_CODE (arg1) == MULT_EXPR
7369 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
7370 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
7371 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
7372 {
7373 REAL_VALUE_TYPE c;
7374
7375 c = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
7376 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7377 return fold_build2 (MULT_EXPR, type, arg0,
7378 build_real (type, c));
7379 }
7380
7381 /* Convert x*c1+x*c2 into x*(c1+c2). */
7382 if (flag_unsafe_math_optimizations
7383 && TREE_CODE (arg0) == MULT_EXPR
7384 && TREE_CODE (arg1) == MULT_EXPR
7385 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7386 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
7387 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
7388 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
7389 && operand_equal_p (TREE_OPERAND (arg0, 0),
7390 TREE_OPERAND (arg1, 0), 0))
7391 {
7392 REAL_VALUE_TYPE c1, c2;
7393
7394 c1 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
7395 c2 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
7396 real_arithmetic (&c1, PLUS_EXPR, &c1, &c2);
7397 return fold_build2 (MULT_EXPR, type,
7398 TREE_OPERAND (arg0, 0),
7399 build_real (type, c1));
7400 }
7401 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
7402 if (flag_unsafe_math_optimizations
7403 && TREE_CODE (arg1) == PLUS_EXPR
7404 && TREE_CODE (arg0) != MULT_EXPR)
7405 {
7406 tree tree10 = TREE_OPERAND (arg1, 0);
7407 tree tree11 = TREE_OPERAND (arg1, 1);
7408 if (TREE_CODE (tree11) == MULT_EXPR
7409 && TREE_CODE (tree10) == MULT_EXPR)
7410 {
7411 tree tree0;
7412 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
7413 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
7414 }
7415 }
7416 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
7417 if (flag_unsafe_math_optimizations
7418 && TREE_CODE (arg0) == PLUS_EXPR
7419 && TREE_CODE (arg1) != MULT_EXPR)
7420 {
7421 tree tree00 = TREE_OPERAND (arg0, 0);
7422 tree tree01 = TREE_OPERAND (arg0, 1);
7423 if (TREE_CODE (tree01) == MULT_EXPR
7424 && TREE_CODE (tree00) == MULT_EXPR)
7425 {
7426 tree tree0;
7427 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
7428 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
7429 }
7430 }
7431 }
7432
7433 bit_rotate:
7434 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
7435 is a rotate of A by C1 bits. */
7436 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
7437 is a rotate of A by B bits. */
7438 {
7439 enum tree_code code0, code1;
7440 code0 = TREE_CODE (arg0);
7441 code1 = TREE_CODE (arg1);
7442 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
7443 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
7444 && operand_equal_p (TREE_OPERAND (arg0, 0),
7445 TREE_OPERAND (arg1, 0), 0)
7446 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7447 {
7448 tree tree01, tree11;
7449 enum tree_code code01, code11;
7450
7451 tree01 = TREE_OPERAND (arg0, 1);
7452 tree11 = TREE_OPERAND (arg1, 1);
7453 STRIP_NOPS (tree01);
7454 STRIP_NOPS (tree11);
7455 code01 = TREE_CODE (tree01);
7456 code11 = TREE_CODE (tree11);
7457 if (code01 == INTEGER_CST
7458 && code11 == INTEGER_CST
7459 && TREE_INT_CST_HIGH (tree01) == 0
7460 && TREE_INT_CST_HIGH (tree11) == 0
7461 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
7462 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
7463 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
7464 code0 == LSHIFT_EXPR ? tree01 : tree11);
7465 else if (code11 == MINUS_EXPR)
7466 {
7467 tree tree110, tree111;
7468 tree110 = TREE_OPERAND (tree11, 0);
7469 tree111 = TREE_OPERAND (tree11, 1);
7470 STRIP_NOPS (tree110);
7471 STRIP_NOPS (tree111);
7472 if (TREE_CODE (tree110) == INTEGER_CST
7473 && 0 == compare_tree_int (tree110,
7474 TYPE_PRECISION
7475 (TREE_TYPE (TREE_OPERAND
7476 (arg0, 0))))
7477 && operand_equal_p (tree01, tree111, 0))
7478 return build2 ((code0 == LSHIFT_EXPR
7479 ? LROTATE_EXPR
7480 : RROTATE_EXPR),
7481 type, TREE_OPERAND (arg0, 0), tree01);
7482 }
7483 else if (code01 == MINUS_EXPR)
7484 {
7485 tree tree010, tree011;
7486 tree010 = TREE_OPERAND (tree01, 0);
7487 tree011 = TREE_OPERAND (tree01, 1);
7488 STRIP_NOPS (tree010);
7489 STRIP_NOPS (tree011);
7490 if (TREE_CODE (tree010) == INTEGER_CST
7491 && 0 == compare_tree_int (tree010,
7492 TYPE_PRECISION
7493 (TREE_TYPE (TREE_OPERAND
7494 (arg0, 0))))
7495 && operand_equal_p (tree11, tree011, 0))
7496 return build2 ((code0 != LSHIFT_EXPR
7497 ? LROTATE_EXPR
7498 : RROTATE_EXPR),
7499 type, TREE_OPERAND (arg0, 0), tree11);
7500 }
7501 }
7502 }
7503
7504 associate:
7505 /* In most languages, can't associate operations on floats through
7506 parentheses. Rather than remember where the parentheses were, we
7507 don't associate floats at all, unless the user has specified
7508 -funsafe-math-optimizations. */
7509
7510 if (! wins
7511 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
7512 {
7513 tree var0, con0, lit0, minus_lit0;
7514 tree var1, con1, lit1, minus_lit1;
7515
7516 /* Split both trees into variables, constants, and literals. Then
7517 associate each group together, the constants with literals,
7518 then the result with variables. This increases the chances of
7519 literals being recombined later and of generating relocatable
7520 expressions for the sum of a constant and literal. */
7521 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
7522 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
7523 code == MINUS_EXPR);
7524
7525 /* Only do something if we found more than two objects. Otherwise,
7526 nothing has changed and we risk infinite recursion. */
7527 if (2 < ((var0 != 0) + (var1 != 0)
7528 + (con0 != 0) + (con1 != 0)
7529 + (lit0 != 0) + (lit1 != 0)
7530 + (minus_lit0 != 0) + (minus_lit1 != 0)))
7531 {
7532 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
7533 if (code == MINUS_EXPR)
7534 code = PLUS_EXPR;
7535
7536 var0 = associate_trees (var0, var1, code, type);
7537 con0 = associate_trees (con0, con1, code, type);
7538 lit0 = associate_trees (lit0, lit1, code, type);
7539 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
7540
7541 /* Preserve the MINUS_EXPR if the negative part of the literal is
7542 greater than the positive part. Otherwise, the multiplicative
7543 folding code (i.e extract_muldiv) may be fooled in case
7544 unsigned constants are subtracted, like in the following
7545 example: ((X*2 + 4) - 8U)/2. */
7546 if (minus_lit0 && lit0)
7547 {
7548 if (TREE_CODE (lit0) == INTEGER_CST
7549 && TREE_CODE (minus_lit0) == INTEGER_CST
7550 && tree_int_cst_lt (lit0, minus_lit0))
7551 {
7552 minus_lit0 = associate_trees (minus_lit0, lit0,
7553 MINUS_EXPR, type);
7554 lit0 = 0;
7555 }
7556 else
7557 {
7558 lit0 = associate_trees (lit0, minus_lit0,
7559 MINUS_EXPR, type);
7560 minus_lit0 = 0;
7561 }
7562 }
7563 if (minus_lit0)
7564 {
7565 if (con0 == 0)
7566 return fold_convert (type,
7567 associate_trees (var0, minus_lit0,
7568 MINUS_EXPR, type));
7569 else
7570 {
7571 con0 = associate_trees (con0, minus_lit0,
7572 MINUS_EXPR, type);
7573 return fold_convert (type,
7574 associate_trees (var0, con0,
7575 PLUS_EXPR, type));
7576 }
7577 }
7578
7579 con0 = associate_trees (con0, lit0, code, type);
7580 return fold_convert (type, associate_trees (var0, con0,
7581 code, type));
7582 }
7583 }
7584
7585 binary:
7586 if (wins)
7587 t1 = const_binop (code, arg0, arg1, 0);
7588 if (t1 != NULL_TREE)
7589 {
7590 /* The return value should always have
7591 the same type as the original expression. */
7592 if (TREE_TYPE (t1) != type)
7593 t1 = fold_convert (type, t1);
7594
7595 return t1;
7596 }
7597 return NULL_TREE;
7598
7599 case MINUS_EXPR:
7600 /* A - (-B) -> A + B */
7601 if (TREE_CODE (arg1) == NEGATE_EXPR)
7602 return fold_build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0));
7603 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
7604 if (TREE_CODE (arg0) == NEGATE_EXPR
7605 && (FLOAT_TYPE_P (type)
7606 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
7607 && negate_expr_p (arg1)
7608 && reorder_operands_p (arg0, arg1))
7609 return fold_build2 (MINUS_EXPR, type, negate_expr (arg1),
7610 TREE_OPERAND (arg0, 0));
7611 /* Convert -A - 1 to ~A. */
7612 if (INTEGRAL_TYPE_P (type)
7613 && TREE_CODE (arg0) == NEGATE_EXPR
7614 && integer_onep (arg1))
7615 return fold_build1 (BIT_NOT_EXPR, type, TREE_OPERAND (arg0, 0));
7616
7617 /* Convert -1 - A to ~A. */
7618 if (INTEGRAL_TYPE_P (type)
7619 && integer_all_onesp (arg0))
7620 return fold_build1 (BIT_NOT_EXPR, type, arg1);
7621
7622 if (! FLOAT_TYPE_P (type))
7623 {
7624 if (! wins && integer_zerop (arg0))
7625 return negate_expr (fold_convert (type, arg1));
7626 if (integer_zerop (arg1))
7627 return non_lvalue (fold_convert (type, arg0));
7628
7629 /* Fold A - (A & B) into ~B & A. */
7630 if (!TREE_SIDE_EFFECTS (arg0)
7631 && TREE_CODE (arg1) == BIT_AND_EXPR)
7632 {
7633 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
7634 return fold_build2 (BIT_AND_EXPR, type,
7635 fold_build1 (BIT_NOT_EXPR, type,
7636 TREE_OPERAND (arg1, 0)),
7637 arg0);
7638 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7639 return fold_build2 (BIT_AND_EXPR, type,
7640 fold_build1 (BIT_NOT_EXPR, type,
7641 TREE_OPERAND (arg1, 1)),
7642 arg0);
7643 }
7644
7645 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
7646 any power of 2 minus 1. */
7647 if (TREE_CODE (arg0) == BIT_AND_EXPR
7648 && TREE_CODE (arg1) == BIT_AND_EXPR
7649 && operand_equal_p (TREE_OPERAND (arg0, 0),
7650 TREE_OPERAND (arg1, 0), 0))
7651 {
7652 tree mask0 = TREE_OPERAND (arg0, 1);
7653 tree mask1 = TREE_OPERAND (arg1, 1);
7654 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
7655
7656 if (operand_equal_p (tem, mask1, 0))
7657 {
7658 tem = fold_build2 (BIT_XOR_EXPR, type,
7659 TREE_OPERAND (arg0, 0), mask1);
7660 return fold_build2 (MINUS_EXPR, type, tem, mask1);
7661 }
7662 }
7663 }
7664
7665 /* See if ARG1 is zero and X - ARG1 reduces to X. */
7666 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
7667 return non_lvalue (fold_convert (type, arg0));
7668
7669 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
7670 ARG0 is zero and X + ARG0 reduces to X, since that would mean
7671 (-ARG1 + ARG0) reduces to -ARG1. */
7672 else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
7673 return negate_expr (fold_convert (type, arg1));
7674
7675 /* Fold &x - &x. This can happen from &x.foo - &x.
7676 This is unsafe for certain floats even in non-IEEE formats.
7677 In IEEE, it is unsafe because it does wrong for NaNs.
7678 Also note that operand_equal_p is always false if an operand
7679 is volatile. */
7680
7681 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
7682 && operand_equal_p (arg0, arg1, 0))
7683 return fold_convert (type, integer_zero_node);
7684
7685 /* A - B -> A + (-B) if B is easily negatable. */
7686 if (!wins && negate_expr_p (arg1)
7687 && ((FLOAT_TYPE_P (type)
7688 /* Avoid this transformation if B is a positive REAL_CST. */
7689 && (TREE_CODE (arg1) != REAL_CST
7690 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
7691 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
7692 return fold_build2 (PLUS_EXPR, type,
7693 fold_convert (type, arg0),
7694 fold_convert (type, negate_expr (arg1)));
7695
7696 /* Try folding difference of addresses. */
7697 {
7698 HOST_WIDE_INT diff;
7699
7700 if ((TREE_CODE (arg0) == ADDR_EXPR
7701 || TREE_CODE (arg1) == ADDR_EXPR)
7702 && ptr_difference_const (arg0, arg1, &diff))
7703 return build_int_cst_type (type, diff);
7704 }
7705
7706 /* Fold &a[i] - &a[j] to i-j. */
7707 if (TREE_CODE (arg0) == ADDR_EXPR
7708 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
7709 && TREE_CODE (arg1) == ADDR_EXPR
7710 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
7711 {
7712 tree aref0 = TREE_OPERAND (arg0, 0);
7713 tree aref1 = TREE_OPERAND (arg1, 0);
7714 if (operand_equal_p (TREE_OPERAND (aref0, 0),
7715 TREE_OPERAND (aref1, 0), 0))
7716 {
7717 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
7718 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
7719 tree esz = array_ref_element_size (aref0);
7720 tree diff = build2 (MINUS_EXPR, type, op0, op1);
7721 return fold_build2 (MULT_EXPR, type, diff,
7722 fold_convert (type, esz));
7723
7724 }
7725 }
7726
7727 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
7728 of the array. Loop optimizer sometimes produce this type of
7729 expressions. */
7730 if (TREE_CODE (arg0) == ADDR_EXPR)
7731 {
7732 tem = try_move_mult_to_index (MINUS_EXPR, arg0, arg1);
7733 if (tem)
7734 return fold_convert (type, tem);
7735 }
7736
7737 if (flag_unsafe_math_optimizations
7738 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
7739 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
7740 && (tem = distribute_real_division (code, type, arg0, arg1)))
7741 return tem;
7742
7743 if (TREE_CODE (arg0) == MULT_EXPR
7744 && TREE_CODE (arg1) == MULT_EXPR
7745 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
7746 {
7747 /* (A * C) - (B * C) -> (A-B) * C. */
7748 if (operand_equal_p (TREE_OPERAND (arg0, 1),
7749 TREE_OPERAND (arg1, 1), 0))
7750 return fold_build2 (MULT_EXPR, type,
7751 fold_build2 (MINUS_EXPR, type,
7752 TREE_OPERAND (arg0, 0),
7753 TREE_OPERAND (arg1, 0)),
7754 TREE_OPERAND (arg0, 1));
7755 /* (A * C1) - (A * C2) -> A * (C1-C2). */
7756 if (operand_equal_p (TREE_OPERAND (arg0, 0),
7757 TREE_OPERAND (arg1, 0), 0))
7758 return fold_build2 (MULT_EXPR, type,
7759 TREE_OPERAND (arg0, 0),
7760 fold_build2 (MINUS_EXPR, type,
7761 TREE_OPERAND (arg0, 1),
7762 TREE_OPERAND (arg1, 1)));
7763 }
7764
7765 goto associate;
7766
7767 case MULT_EXPR:
7768 /* (-A) * (-B) -> A * B */
7769 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
7770 return fold_build2 (MULT_EXPR, type,
7771 TREE_OPERAND (arg0, 0),
7772 negate_expr (arg1));
7773 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
7774 return fold_build2 (MULT_EXPR, type,
7775 negate_expr (arg0),
7776 TREE_OPERAND (arg1, 0));
7777
7778 if (! FLOAT_TYPE_P (type))
7779 {
7780 if (integer_zerop (arg1))
7781 return omit_one_operand (type, arg1, arg0);
7782 if (integer_onep (arg1))
7783 return non_lvalue (fold_convert (type, arg0));
7784 /* Transform x * -1 into -x. */
7785 if (integer_all_onesp (arg1))
7786 return fold_convert (type, negate_expr (arg0));
7787
7788 /* (a * (1 << b)) is (a << b) */
7789 if (TREE_CODE (arg1) == LSHIFT_EXPR
7790 && integer_onep (TREE_OPERAND (arg1, 0)))
7791 return fold_build2 (LSHIFT_EXPR, type, arg0,
7792 TREE_OPERAND (arg1, 1));
7793 if (TREE_CODE (arg0) == LSHIFT_EXPR
7794 && integer_onep (TREE_OPERAND (arg0, 0)))
7795 return fold_build2 (LSHIFT_EXPR, type, arg1,
7796 TREE_OPERAND (arg0, 1));
7797
7798 if (TREE_CODE (arg1) == INTEGER_CST
7799 && 0 != (tem = extract_muldiv (op0,
7800 fold_convert (type, arg1),
7801 code, NULL_TREE)))
7802 return fold_convert (type, tem);
7803
7804 }
7805 else
7806 {
7807 /* Maybe fold x * 0 to 0. The expressions aren't the same
7808 when x is NaN, since x * 0 is also NaN. Nor are they the
7809 same in modes with signed zeros, since multiplying a
7810 negative value by 0 gives -0, not +0. */
7811 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
7812 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
7813 && real_zerop (arg1))
7814 return omit_one_operand (type, arg1, arg0);
7815 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
7816 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7817 && real_onep (arg1))
7818 return non_lvalue (fold_convert (type, arg0));
7819
7820 /* Transform x * -1.0 into -x. */
7821 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7822 && real_minus_onep (arg1))
7823 return fold_convert (type, negate_expr (arg0));
7824
7825 /* Convert (C1/X)*C2 into (C1*C2)/X. */
7826 if (flag_unsafe_math_optimizations
7827 && TREE_CODE (arg0) == RDIV_EXPR
7828 && TREE_CODE (arg1) == REAL_CST
7829 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
7830 {
7831 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
7832 arg1, 0);
7833 if (tem)
7834 return fold_build2 (RDIV_EXPR, type, tem,
7835 TREE_OPERAND (arg0, 1));
7836 }
7837
7838 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
7839 if (operand_equal_p (arg0, arg1, 0))
7840 {
7841 tree tem = fold_strip_sign_ops (arg0);
7842 if (tem != NULL_TREE)
7843 {
7844 tem = fold_convert (type, tem);
7845 return fold_build2 (MULT_EXPR, type, tem, tem);
7846 }
7847 }
7848
7849 if (flag_unsafe_math_optimizations)
7850 {
7851 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
7852 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
7853
7854 /* Optimizations of root(...)*root(...). */
7855 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
7856 {
7857 tree rootfn, arg, arglist;
7858 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7859 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7860
7861 /* Optimize sqrt(x)*sqrt(x) as x. */
7862 if (BUILTIN_SQRT_P (fcode0)
7863 && operand_equal_p (arg00, arg10, 0)
7864 && ! HONOR_SNANS (TYPE_MODE (type)))
7865 return arg00;
7866
7867 /* Optimize root(x)*root(y) as root(x*y). */
7868 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7869 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
7870 arglist = build_tree_list (NULL_TREE, arg);
7871 return build_function_call_expr (rootfn, arglist);
7872 }
7873
7874 /* Optimize expN(x)*expN(y) as expN(x+y). */
7875 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
7876 {
7877 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7878 tree arg = fold_build2 (PLUS_EXPR, type,
7879 TREE_VALUE (TREE_OPERAND (arg0, 1)),
7880 TREE_VALUE (TREE_OPERAND (arg1, 1)));
7881 tree arglist = build_tree_list (NULL_TREE, arg);
7882 return build_function_call_expr (expfn, arglist);
7883 }
7884
7885 /* Optimizations of pow(...)*pow(...). */
7886 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
7887 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
7888 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
7889 {
7890 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7891 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
7892 1)));
7893 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7894 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7895 1)));
7896
7897 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
7898 if (operand_equal_p (arg01, arg11, 0))
7899 {
7900 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7901 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
7902 tree arglist = tree_cons (NULL_TREE, arg,
7903 build_tree_list (NULL_TREE,
7904 arg01));
7905 return build_function_call_expr (powfn, arglist);
7906 }
7907
7908 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
7909 if (operand_equal_p (arg00, arg10, 0))
7910 {
7911 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7912 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
7913 tree arglist = tree_cons (NULL_TREE, arg00,
7914 build_tree_list (NULL_TREE,
7915 arg));
7916 return build_function_call_expr (powfn, arglist);
7917 }
7918 }
7919
7920 /* Optimize tan(x)*cos(x) as sin(x). */
7921 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
7922 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
7923 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
7924 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
7925 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
7926 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
7927 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7928 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7929 {
7930 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
7931
7932 if (sinfn != NULL_TREE)
7933 return build_function_call_expr (sinfn,
7934 TREE_OPERAND (arg0, 1));
7935 }
7936
7937 /* Optimize x*pow(x,c) as pow(x,c+1). */
7938 if (fcode1 == BUILT_IN_POW
7939 || fcode1 == BUILT_IN_POWF
7940 || fcode1 == BUILT_IN_POWL)
7941 {
7942 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7943 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7944 1)));
7945 if (TREE_CODE (arg11) == REAL_CST
7946 && ! TREE_CONSTANT_OVERFLOW (arg11)
7947 && operand_equal_p (arg0, arg10, 0))
7948 {
7949 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
7950 REAL_VALUE_TYPE c;
7951 tree arg, arglist;
7952
7953 c = TREE_REAL_CST (arg11);
7954 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7955 arg = build_real (type, c);
7956 arglist = build_tree_list (NULL_TREE, arg);
7957 arglist = tree_cons (NULL_TREE, arg0, arglist);
7958 return build_function_call_expr (powfn, arglist);
7959 }
7960 }
7961
7962 /* Optimize pow(x,c)*x as pow(x,c+1). */
7963 if (fcode0 == BUILT_IN_POW
7964 || fcode0 == BUILT_IN_POWF
7965 || fcode0 == BUILT_IN_POWL)
7966 {
7967 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7968 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
7969 1)));
7970 if (TREE_CODE (arg01) == REAL_CST
7971 && ! TREE_CONSTANT_OVERFLOW (arg01)
7972 && operand_equal_p (arg1, arg00, 0))
7973 {
7974 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7975 REAL_VALUE_TYPE c;
7976 tree arg, arglist;
7977
7978 c = TREE_REAL_CST (arg01);
7979 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7980 arg = build_real (type, c);
7981 arglist = build_tree_list (NULL_TREE, arg);
7982 arglist = tree_cons (NULL_TREE, arg1, arglist);
7983 return build_function_call_expr (powfn, arglist);
7984 }
7985 }
7986
7987 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
7988 if (! optimize_size
7989 && operand_equal_p (arg0, arg1, 0))
7990 {
7991 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7992
7993 if (powfn)
7994 {
7995 tree arg = build_real (type, dconst2);
7996 tree arglist = build_tree_list (NULL_TREE, arg);
7997 arglist = tree_cons (NULL_TREE, arg0, arglist);
7998 return build_function_call_expr (powfn, arglist);
7999 }
8000 }
8001 }
8002 }
8003 goto associate;
8004
8005 case BIT_IOR_EXPR:
8006 bit_ior:
8007 if (integer_all_onesp (arg1))
8008 return omit_one_operand (type, arg1, arg0);
8009 if (integer_zerop (arg1))
8010 return non_lvalue (fold_convert (type, arg0));
8011 if (operand_equal_p (arg0, arg1, 0))
8012 return non_lvalue (fold_convert (type, arg0));
8013
8014 /* ~X | X is -1. */
8015 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8016 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8017 {
8018 t1 = build_int_cst (type, -1);
8019 t1 = force_fit_type (t1, 0, false, false);
8020 return omit_one_operand (type, t1, arg1);
8021 }
8022
8023 /* X | ~X is -1. */
8024 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8025 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8026 {
8027 t1 = build_int_cst (type, -1);
8028 t1 = force_fit_type (t1, 0, false, false);
8029 return omit_one_operand (type, t1, arg0);
8030 }
8031
8032 t1 = distribute_bit_expr (code, type, arg0, arg1);
8033 if (t1 != NULL_TREE)
8034 return t1;
8035
8036 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
8037
8038 This results in more efficient code for machines without a NAND
8039 instruction. Combine will canonicalize to the first form
8040 which will allow use of NAND instructions provided by the
8041 backend if they exist. */
8042 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8043 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8044 {
8045 return fold_build1 (BIT_NOT_EXPR, type,
8046 build2 (BIT_AND_EXPR, type,
8047 TREE_OPERAND (arg0, 0),
8048 TREE_OPERAND (arg1, 0)));
8049 }
8050
8051 /* See if this can be simplified into a rotate first. If that
8052 is unsuccessful continue in the association code. */
8053 goto bit_rotate;
8054
8055 case BIT_XOR_EXPR:
8056 if (integer_zerop (arg1))
8057 return non_lvalue (fold_convert (type, arg0));
8058 if (integer_all_onesp (arg1))
8059 return fold_build1 (BIT_NOT_EXPR, type, arg0);
8060 if (operand_equal_p (arg0, arg1, 0))
8061 return omit_one_operand (type, integer_zero_node, arg0);
8062
8063 /* ~X ^ X is -1. */
8064 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8065 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8066 {
8067 t1 = build_int_cst (type, -1);
8068 t1 = force_fit_type (t1, 0, false, false);
8069 return omit_one_operand (type, t1, arg1);
8070 }
8071
8072 /* X ^ ~X is -1. */
8073 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8074 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8075 {
8076 t1 = build_int_cst (type, -1);
8077 t1 = force_fit_type (t1, 0, false, false);
8078 return omit_one_operand (type, t1, arg0);
8079 }
8080
8081 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
8082 with a constant, and the two constants have no bits in common,
8083 we should treat this as a BIT_IOR_EXPR since this may produce more
8084 simplifications. */
8085 if (TREE_CODE (arg0) == BIT_AND_EXPR
8086 && TREE_CODE (arg1) == BIT_AND_EXPR
8087 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8088 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8089 && integer_zerop (const_binop (BIT_AND_EXPR,
8090 TREE_OPERAND (arg0, 1),
8091 TREE_OPERAND (arg1, 1), 0)))
8092 {
8093 code = BIT_IOR_EXPR;
8094 goto bit_ior;
8095 }
8096
8097 /* (X | Y) ^ X -> Y & ~ X*/
8098 if (TREE_CODE (arg0) == BIT_IOR_EXPR
8099 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8100 {
8101 tree t2 = TREE_OPERAND (arg0, 1);
8102 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
8103 arg1);
8104 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
8105 fold_convert (type, t1));
8106 return t1;
8107 }
8108
8109 /* (Y | X) ^ X -> Y & ~ X*/
8110 if (TREE_CODE (arg0) == BIT_IOR_EXPR
8111 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
8112 {
8113 tree t2 = TREE_OPERAND (arg0, 0);
8114 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
8115 arg1);
8116 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
8117 fold_convert (type, t1));
8118 return t1;
8119 }
8120
8121 /* X ^ (X | Y) -> Y & ~ X*/
8122 if (TREE_CODE (arg1) == BIT_IOR_EXPR
8123 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
8124 {
8125 tree t2 = TREE_OPERAND (arg1, 1);
8126 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
8127 arg0);
8128 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
8129 fold_convert (type, t1));
8130 return t1;
8131 }
8132
8133 /* X ^ (Y | X) -> Y & ~ X*/
8134 if (TREE_CODE (arg1) == BIT_IOR_EXPR
8135 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
8136 {
8137 tree t2 = TREE_OPERAND (arg1, 0);
8138 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
8139 arg0);
8140 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
8141 fold_convert (type, t1));
8142 return t1;
8143 }
8144
8145 /* Convert ~X ^ ~Y to X ^ Y. */
8146 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8147 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8148 return fold_build2 (code, type,
8149 fold_convert (type, TREE_OPERAND (arg0, 0)),
8150 fold_convert (type, TREE_OPERAND (arg1, 0)));
8151
8152 /* See if this can be simplified into a rotate first. If that
8153 is unsuccessful continue in the association code. */
8154 goto bit_rotate;
8155
8156 case BIT_AND_EXPR:
8157 if (integer_all_onesp (arg1))
8158 return non_lvalue (fold_convert (type, arg0));
8159 if (integer_zerop (arg1))
8160 return omit_one_operand (type, arg1, arg0);
8161 if (operand_equal_p (arg0, arg1, 0))
8162 return non_lvalue (fold_convert (type, arg0));
8163
8164 /* ~X & X is always zero. */
8165 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8166 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8167 return omit_one_operand (type, integer_zero_node, arg1);
8168
8169 /* X & ~X is always zero. */
8170 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8171 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8172 return omit_one_operand (type, integer_zero_node, arg0);
8173
8174 t1 = distribute_bit_expr (code, type, arg0, arg1);
8175 if (t1 != NULL_TREE)
8176 return t1;
8177 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
8178 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
8179 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
8180 {
8181 unsigned int prec
8182 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
8183
8184 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
8185 && (~TREE_INT_CST_LOW (arg1)
8186 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
8187 return fold_convert (type, TREE_OPERAND (arg0, 0));
8188 }
8189
8190 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
8191
8192 This results in more efficient code for machines without a NOR
8193 instruction. Combine will canonicalize to the first form
8194 which will allow use of NOR instructions provided by the
8195 backend if they exist. */
8196 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8197 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8198 {
8199 return fold_build1 (BIT_NOT_EXPR, type,
8200 build2 (BIT_IOR_EXPR, type,
8201 TREE_OPERAND (arg0, 0),
8202 TREE_OPERAND (arg1, 0)));
8203 }
8204
8205 goto associate;
8206
8207 case RDIV_EXPR:
8208 /* Don't touch a floating-point divide by zero unless the mode
8209 of the constant can represent infinity. */
8210 if (TREE_CODE (arg1) == REAL_CST
8211 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
8212 && real_zerop (arg1))
8213 return NULL_TREE;
8214
8215 /* (-A) / (-B) -> A / B */
8216 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
8217 return fold_build2 (RDIV_EXPR, type,
8218 TREE_OPERAND (arg0, 0),
8219 negate_expr (arg1));
8220 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
8221 return fold_build2 (RDIV_EXPR, type,
8222 negate_expr (arg0),
8223 TREE_OPERAND (arg1, 0));
8224
8225 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
8226 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8227 && real_onep (arg1))
8228 return non_lvalue (fold_convert (type, arg0));
8229
8230 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
8231 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8232 && real_minus_onep (arg1))
8233 return non_lvalue (fold_convert (type, negate_expr (arg0)));
8234
8235 /* If ARG1 is a constant, we can convert this to a multiply by the
8236 reciprocal. This does not have the same rounding properties,
8237 so only do this if -funsafe-math-optimizations. We can actually
8238 always safely do it if ARG1 is a power of two, but it's hard to
8239 tell if it is or not in a portable manner. */
8240 if (TREE_CODE (arg1) == REAL_CST)
8241 {
8242 if (flag_unsafe_math_optimizations
8243 && 0 != (tem = const_binop (code, build_real (type, dconst1),
8244 arg1, 0)))
8245 return fold_build2 (MULT_EXPR, type, arg0, tem);
8246 /* Find the reciprocal if optimizing and the result is exact. */
8247 if (optimize)
8248 {
8249 REAL_VALUE_TYPE r;
8250 r = TREE_REAL_CST (arg1);
8251 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
8252 {
8253 tem = build_real (type, r);
8254 return fold_build2 (MULT_EXPR, type,
8255 fold_convert (type, arg0), tem);
8256 }
8257 }
8258 }
8259 /* Convert A/B/C to A/(B*C). */
8260 if (flag_unsafe_math_optimizations
8261 && TREE_CODE (arg0) == RDIV_EXPR)
8262 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
8263 fold_build2 (MULT_EXPR, type,
8264 TREE_OPERAND (arg0, 1), arg1));
8265
8266 /* Convert A/(B/C) to (A/B)*C. */
8267 if (flag_unsafe_math_optimizations
8268 && TREE_CODE (arg1) == RDIV_EXPR)
8269 return fold_build2 (MULT_EXPR, type,
8270 fold_build2 (RDIV_EXPR, type, arg0,
8271 TREE_OPERAND (arg1, 0)),
8272 TREE_OPERAND (arg1, 1));
8273
8274 /* Convert C1/(X*C2) into (C1/C2)/X. */
8275 if (flag_unsafe_math_optimizations
8276 && TREE_CODE (arg1) == MULT_EXPR
8277 && TREE_CODE (arg0) == REAL_CST
8278 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
8279 {
8280 tree tem = const_binop (RDIV_EXPR, arg0,
8281 TREE_OPERAND (arg1, 1), 0);
8282 if (tem)
8283 return fold_build2 (RDIV_EXPR, type, tem,
8284 TREE_OPERAND (arg1, 0));
8285 }
8286
8287 if (flag_unsafe_math_optimizations)
8288 {
8289 enum built_in_function fcode = builtin_mathfn_code (arg1);
8290 /* Optimize x/expN(y) into x*expN(-y). */
8291 if (BUILTIN_EXPONENT_P (fcode))
8292 {
8293 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
8294 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
8295 tree arglist = build_tree_list (NULL_TREE,
8296 fold_convert (type, arg));
8297 arg1 = build_function_call_expr (expfn, arglist);
8298 return fold_build2 (MULT_EXPR, type, arg0, arg1);
8299 }
8300
8301 /* Optimize x/pow(y,z) into x*pow(y,-z). */
8302 if (fcode == BUILT_IN_POW
8303 || fcode == BUILT_IN_POWF
8304 || fcode == BUILT_IN_POWL)
8305 {
8306 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
8307 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
8308 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
8309 tree neg11 = fold_convert (type, negate_expr (arg11));
8310 tree arglist = tree_cons(NULL_TREE, arg10,
8311 build_tree_list (NULL_TREE, neg11));
8312 arg1 = build_function_call_expr (powfn, arglist);
8313 return fold_build2 (MULT_EXPR, type, arg0, arg1);
8314 }
8315 }
8316
8317 if (flag_unsafe_math_optimizations)
8318 {
8319 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
8320 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
8321
8322 /* Optimize sin(x)/cos(x) as tan(x). */
8323 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
8324 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
8325 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
8326 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
8327 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
8328 {
8329 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
8330
8331 if (tanfn != NULL_TREE)
8332 return build_function_call_expr (tanfn,
8333 TREE_OPERAND (arg0, 1));
8334 }
8335
8336 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
8337 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
8338 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
8339 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
8340 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
8341 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
8342 {
8343 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
8344
8345 if (tanfn != NULL_TREE)
8346 {
8347 tree tmp = TREE_OPERAND (arg0, 1);
8348 tmp = build_function_call_expr (tanfn, tmp);
8349 return fold_build2 (RDIV_EXPR, type,
8350 build_real (type, dconst1), tmp);
8351 }
8352 }
8353
8354 /* Optimize pow(x,c)/x as pow(x,c-1). */
8355 if (fcode0 == BUILT_IN_POW
8356 || fcode0 == BUILT_IN_POWF
8357 || fcode0 == BUILT_IN_POWL)
8358 {
8359 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8360 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
8361 if (TREE_CODE (arg01) == REAL_CST
8362 && ! TREE_CONSTANT_OVERFLOW (arg01)
8363 && operand_equal_p (arg1, arg00, 0))
8364 {
8365 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8366 REAL_VALUE_TYPE c;
8367 tree arg, arglist;
8368
8369 c = TREE_REAL_CST (arg01);
8370 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
8371 arg = build_real (type, c);
8372 arglist = build_tree_list (NULL_TREE, arg);
8373 arglist = tree_cons (NULL_TREE, arg1, arglist);
8374 return build_function_call_expr (powfn, arglist);
8375 }
8376 }
8377 }
8378 goto binary;
8379
8380 case TRUNC_DIV_EXPR:
8381 case ROUND_DIV_EXPR:
8382 case FLOOR_DIV_EXPR:
8383 case CEIL_DIV_EXPR:
8384 case EXACT_DIV_EXPR:
8385 if (integer_onep (arg1))
8386 return non_lvalue (fold_convert (type, arg0));
8387 if (integer_zerop (arg1))
8388 return NULL_TREE;
8389 /* X / -1 is -X. */
8390 if (!TYPE_UNSIGNED (type)
8391 && TREE_CODE (arg1) == INTEGER_CST
8392 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
8393 && TREE_INT_CST_HIGH (arg1) == -1)
8394 return fold_convert (type, negate_expr (arg0));
8395
8396 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
8397 operation, EXACT_DIV_EXPR.
8398
8399 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
8400 At one time others generated faster code, it's not clear if they do
8401 after the last round to changes to the DIV code in expmed.c. */
8402 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
8403 && multiple_of_p (type, arg0, arg1))
8404 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
8405
8406 if (TREE_CODE (arg1) == INTEGER_CST
8407 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
8408 return fold_convert (type, tem);
8409
8410 goto binary;
8411
8412 case CEIL_MOD_EXPR:
8413 case FLOOR_MOD_EXPR:
8414 case ROUND_MOD_EXPR:
8415 case TRUNC_MOD_EXPR:
8416 /* X % 1 is always zero, but be sure to preserve any side
8417 effects in X. */
8418 if (integer_onep (arg1))
8419 return omit_one_operand (type, integer_zero_node, arg0);
8420
8421 /* X % 0, return X % 0 unchanged so that we can get the
8422 proper warnings and errors. */
8423 if (integer_zerop (arg1))
8424 return NULL_TREE;
8425
8426 /* 0 % X is always zero, but be sure to preserve any side
8427 effects in X. Place this after checking for X == 0. */
8428 if (integer_zerop (arg0))
8429 return omit_one_operand (type, integer_zero_node, arg1);
8430
8431 /* X % -1 is zero. */
8432 if (!TYPE_UNSIGNED (type)
8433 && TREE_CODE (arg1) == INTEGER_CST
8434 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
8435 && TREE_INT_CST_HIGH (arg1) == -1)
8436 return omit_one_operand (type, integer_zero_node, arg0);
8437
8438 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
8439 i.e. "X % C" into "X & C2", if X and C are positive. */
8440 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
8441 && (TYPE_UNSIGNED (type) || tree_expr_nonnegative_p (arg0))
8442 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) >= 0)
8443 {
8444 unsigned HOST_WIDE_INT high, low;
8445 tree mask;
8446 int l;
8447
8448 l = tree_log2 (arg1);
8449 if (l >= HOST_BITS_PER_WIDE_INT)
8450 {
8451 high = ((unsigned HOST_WIDE_INT) 1
8452 << (l - HOST_BITS_PER_WIDE_INT)) - 1;
8453 low = -1;
8454 }
8455 else
8456 {
8457 high = 0;
8458 low = ((unsigned HOST_WIDE_INT) 1 << l) - 1;
8459 }
8460
8461 mask = build_int_cst_wide (type, low, high);
8462 return fold_build2 (BIT_AND_EXPR, type,
8463 fold_convert (type, arg0), mask);
8464 }
8465
8466 /* X % -C is the same as X % C. */
8467 if (code == TRUNC_MOD_EXPR
8468 && !TYPE_UNSIGNED (type)
8469 && TREE_CODE (arg1) == INTEGER_CST
8470 && !TREE_CONSTANT_OVERFLOW (arg1)
8471 && TREE_INT_CST_HIGH (arg1) < 0
8472 && !flag_trapv
8473 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
8474 && !sign_bit_p (arg1, arg1))
8475 return fold_build2 (code, type, fold_convert (type, arg0),
8476 fold_convert (type, negate_expr (arg1)));
8477
8478 /* X % -Y is the same as X % Y. */
8479 if (code == TRUNC_MOD_EXPR
8480 && !TYPE_UNSIGNED (type)
8481 && TREE_CODE (arg1) == NEGATE_EXPR
8482 && !flag_trapv)
8483 return fold_build2 (code, type, fold_convert (type, arg0),
8484 fold_convert (type, TREE_OPERAND (arg1, 0)));
8485
8486 if (TREE_CODE (arg1) == INTEGER_CST
8487 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
8488 return fold_convert (type, tem);
8489
8490 goto binary;
8491
8492 case LROTATE_EXPR:
8493 case RROTATE_EXPR:
8494 if (integer_all_onesp (arg0))
8495 return omit_one_operand (type, arg0, arg1);
8496 goto shift;
8497
8498 case RSHIFT_EXPR:
8499 /* Optimize -1 >> x for arithmetic right shifts. */
8500 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
8501 return omit_one_operand (type, arg0, arg1);
8502 /* ... fall through ... */
8503
8504 case LSHIFT_EXPR:
8505 shift:
8506 if (integer_zerop (arg1))
8507 return non_lvalue (fold_convert (type, arg0));
8508 if (integer_zerop (arg0))
8509 return omit_one_operand (type, arg0, arg1);
8510
8511 /* Since negative shift count is not well-defined,
8512 don't try to compute it in the compiler. */
8513 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
8514 return NULL_TREE;
8515
8516 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
8517 if (TREE_CODE (arg0) == code && host_integerp (arg1, false)
8518 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
8519 && host_integerp (TREE_OPERAND (arg0, 1), false)
8520 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
8521 {
8522 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
8523 + TREE_INT_CST_LOW (arg1));
8524
8525 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
8526 being well defined. */
8527 if (low >= TYPE_PRECISION (type))
8528 {
8529 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
8530 low = low % TYPE_PRECISION (type);
8531 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
8532 return build_int_cst (type, 0);
8533 else
8534 low = TYPE_PRECISION (type) - 1;
8535 }
8536
8537 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
8538 build_int_cst (type, low));
8539 }
8540
8541 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
8542 into x & ((unsigned)-1 >> c) for unsigned types. */
8543 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
8544 || (TYPE_UNSIGNED (type)
8545 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
8546 && host_integerp (arg1, false)
8547 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
8548 && host_integerp (TREE_OPERAND (arg0, 1), false)
8549 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
8550 {
8551 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
8552 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
8553 tree lshift;
8554 tree arg00;
8555
8556 if (low0 == low1)
8557 {
8558 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
8559
8560 lshift = build_int_cst (type, -1);
8561 lshift = int_const_binop (code, lshift, arg1, 0);
8562
8563 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
8564 }
8565 }
8566
8567 /* Rewrite an LROTATE_EXPR by a constant into an
8568 RROTATE_EXPR by a new constant. */
8569 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
8570 {
8571 tree tem = build_int_cst (NULL_TREE,
8572 GET_MODE_BITSIZE (TYPE_MODE (type)));
8573 tem = fold_convert (TREE_TYPE (arg1), tem);
8574 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
8575 return fold_build2 (RROTATE_EXPR, type, arg0, tem);
8576 }
8577
8578 /* If we have a rotate of a bit operation with the rotate count and
8579 the second operand of the bit operation both constant,
8580 permute the two operations. */
8581 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8582 && (TREE_CODE (arg0) == BIT_AND_EXPR
8583 || TREE_CODE (arg0) == BIT_IOR_EXPR
8584 || TREE_CODE (arg0) == BIT_XOR_EXPR)
8585 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8586 return fold_build2 (TREE_CODE (arg0), type,
8587 fold_build2 (code, type,
8588 TREE_OPERAND (arg0, 0), arg1),
8589 fold_build2 (code, type,
8590 TREE_OPERAND (arg0, 1), arg1));
8591
8592 /* Two consecutive rotates adding up to the width of the mode can
8593 be ignored. */
8594 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8595 && TREE_CODE (arg0) == RROTATE_EXPR
8596 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8597 && TREE_INT_CST_HIGH (arg1) == 0
8598 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
8599 && ((TREE_INT_CST_LOW (arg1)
8600 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
8601 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
8602 return TREE_OPERAND (arg0, 0);
8603
8604 goto binary;
8605
8606 case MIN_EXPR:
8607 if (operand_equal_p (arg0, arg1, 0))
8608 return omit_one_operand (type, arg0, arg1);
8609 if (INTEGRAL_TYPE_P (type)
8610 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
8611 return omit_one_operand (type, arg1, arg0);
8612 goto associate;
8613
8614 case MAX_EXPR:
8615 if (operand_equal_p (arg0, arg1, 0))
8616 return omit_one_operand (type, arg0, arg1);
8617 if (INTEGRAL_TYPE_P (type)
8618 && TYPE_MAX_VALUE (type)
8619 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
8620 return omit_one_operand (type, arg1, arg0);
8621 goto associate;
8622
8623 case TRUTH_ANDIF_EXPR:
8624 /* Note that the operands of this must be ints
8625 and their values must be 0 or 1.
8626 ("true" is a fixed value perhaps depending on the language.) */
8627 /* If first arg is constant zero, return it. */
8628 if (integer_zerop (arg0))
8629 return fold_convert (type, arg0);
8630 case TRUTH_AND_EXPR:
8631 /* If either arg is constant true, drop it. */
8632 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8633 return non_lvalue (fold_convert (type, arg1));
8634 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
8635 /* Preserve sequence points. */
8636 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
8637 return non_lvalue (fold_convert (type, arg0));
8638 /* If second arg is constant zero, result is zero, but first arg
8639 must be evaluated. */
8640 if (integer_zerop (arg1))
8641 return omit_one_operand (type, arg1, arg0);
8642 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
8643 case will be handled here. */
8644 if (integer_zerop (arg0))
8645 return omit_one_operand (type, arg0, arg1);
8646
8647 /* !X && X is always false. */
8648 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8649 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8650 return omit_one_operand (type, integer_zero_node, arg1);
8651 /* X && !X is always false. */
8652 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8653 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8654 return omit_one_operand (type, integer_zero_node, arg0);
8655
8656 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
8657 means A >= Y && A != MAX, but in this case we know that
8658 A < X <= MAX. */
8659
8660 if (!TREE_SIDE_EFFECTS (arg0)
8661 && !TREE_SIDE_EFFECTS (arg1))
8662 {
8663 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
8664 if (tem && !operand_equal_p (tem, arg0, 0))
8665 return fold_build2 (code, type, tem, arg1);
8666
8667 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
8668 if (tem && !operand_equal_p (tem, arg1, 0))
8669 return fold_build2 (code, type, arg0, tem);
8670 }
8671
8672 truth_andor:
8673 /* We only do these simplifications if we are optimizing. */
8674 if (!optimize)
8675 return NULL_TREE;
8676
8677 /* Check for things like (A || B) && (A || C). We can convert this
8678 to A || (B && C). Note that either operator can be any of the four
8679 truth and/or operations and the transformation will still be
8680 valid. Also note that we only care about order for the
8681 ANDIF and ORIF operators. If B contains side effects, this
8682 might change the truth-value of A. */
8683 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8684 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8685 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8686 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8687 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8688 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8689 {
8690 tree a00 = TREE_OPERAND (arg0, 0);
8691 tree a01 = TREE_OPERAND (arg0, 1);
8692 tree a10 = TREE_OPERAND (arg1, 0);
8693 tree a11 = TREE_OPERAND (arg1, 1);
8694 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8695 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8696 && (code == TRUTH_AND_EXPR
8697 || code == TRUTH_OR_EXPR));
8698
8699 if (operand_equal_p (a00, a10, 0))
8700 return fold_build2 (TREE_CODE (arg0), type, a00,
8701 fold_build2 (code, type, a01, a11));
8702 else if (commutative && operand_equal_p (a00, a11, 0))
8703 return fold_build2 (TREE_CODE (arg0), type, a00,
8704 fold_build2 (code, type, a01, a10));
8705 else if (commutative && operand_equal_p (a01, a10, 0))
8706 return fold_build2 (TREE_CODE (arg0), type, a01,
8707 fold_build2 (code, type, a00, a11));
8708
8709 /* This case if tricky because we must either have commutative
8710 operators or else A10 must not have side-effects. */
8711
8712 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8713 && operand_equal_p (a01, a11, 0))
8714 return fold_build2 (TREE_CODE (arg0), type,
8715 fold_build2 (code, type, a00, a10),
8716 a01);
8717 }
8718
8719 /* See if we can build a range comparison. */
8720 if (0 != (tem = fold_range_test (code, type, op0, op1)))
8721 return tem;
8722
8723 /* Check for the possibility of merging component references. If our
8724 lhs is another similar operation, try to merge its rhs with our
8725 rhs. Then try to merge our lhs and rhs. */
8726 if (TREE_CODE (arg0) == code
8727 && 0 != (tem = fold_truthop (code, type,
8728 TREE_OPERAND (arg0, 1), arg1)))
8729 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
8730
8731 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
8732 return tem;
8733
8734 return NULL_TREE;
8735
8736 case TRUTH_ORIF_EXPR:
8737 /* Note that the operands of this must be ints
8738 and their values must be 0 or true.
8739 ("true" is a fixed value perhaps depending on the language.) */
8740 /* If first arg is constant true, return it. */
8741 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8742 return fold_convert (type, arg0);
8743 case TRUTH_OR_EXPR:
8744 /* If either arg is constant zero, drop it. */
8745 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
8746 return non_lvalue (fold_convert (type, arg1));
8747 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
8748 /* Preserve sequence points. */
8749 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
8750 return non_lvalue (fold_convert (type, arg0));
8751 /* If second arg is constant true, result is true, but we must
8752 evaluate first arg. */
8753 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
8754 return omit_one_operand (type, arg1, arg0);
8755 /* Likewise for first arg, but note this only occurs here for
8756 TRUTH_OR_EXPR. */
8757 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8758 return omit_one_operand (type, arg0, arg1);
8759
8760 /* !X || X is always true. */
8761 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8762 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8763 return omit_one_operand (type, integer_one_node, arg1);
8764 /* X || !X is always true. */
8765 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8766 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8767 return omit_one_operand (type, integer_one_node, arg0);
8768
8769 goto truth_andor;
8770
8771 case TRUTH_XOR_EXPR:
8772 /* If the second arg is constant zero, drop it. */
8773 if (integer_zerop (arg1))
8774 return non_lvalue (fold_convert (type, arg0));
8775 /* If the second arg is constant true, this is a logical inversion. */
8776 if (integer_onep (arg1))
8777 {
8778 /* Only call invert_truthvalue if operand is a truth value. */
8779 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8780 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
8781 else
8782 tem = invert_truthvalue (arg0);
8783 return non_lvalue (fold_convert (type, tem));
8784 }
8785 /* Identical arguments cancel to zero. */
8786 if (operand_equal_p (arg0, arg1, 0))
8787 return omit_one_operand (type, integer_zero_node, arg0);
8788
8789 /* !X ^ X is always true. */
8790 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8791 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8792 return omit_one_operand (type, integer_one_node, arg1);
8793
8794 /* X ^ !X is always true. */
8795 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8796 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8797 return omit_one_operand (type, integer_one_node, arg0);
8798
8799 return NULL_TREE;
8800
8801 case EQ_EXPR:
8802 case NE_EXPR:
8803 case LT_EXPR:
8804 case GT_EXPR:
8805 case LE_EXPR:
8806 case GE_EXPR:
8807 /* If one arg is a real or integer constant, put it last. */
8808 if (tree_swap_operands_p (arg0, arg1, true))
8809 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
8810
8811 /* bool_var != 0 becomes bool_var. */
8812 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
8813 && code == NE_EXPR)
8814 return non_lvalue (fold_convert (type, arg0));
8815
8816 /* bool_var == 1 becomes bool_var. */
8817 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
8818 && code == EQ_EXPR)
8819 return non_lvalue (fold_convert (type, arg0));
8820
8821 /* If this is an equality comparison of the address of a non-weak
8822 object against zero, then we know the result. */
8823 if ((code == EQ_EXPR || code == NE_EXPR)
8824 && TREE_CODE (arg0) == ADDR_EXPR
8825 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
8826 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
8827 && integer_zerop (arg1))
8828 return constant_boolean_node (code != EQ_EXPR, type);
8829
8830 /* If this is an equality comparison of the address of two non-weak,
8831 unaliased symbols neither of which are extern (since we do not
8832 have access to attributes for externs), then we know the result. */
8833 if ((code == EQ_EXPR || code == NE_EXPR)
8834 && TREE_CODE (arg0) == ADDR_EXPR
8835 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
8836 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
8837 && ! lookup_attribute ("alias",
8838 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
8839 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
8840 && TREE_CODE (arg1) == ADDR_EXPR
8841 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
8842 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
8843 && ! lookup_attribute ("alias",
8844 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
8845 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
8846 {
8847 /* We know that we're looking at the address of two
8848 non-weak, unaliased, static _DECL nodes.
8849
8850 It is both wasteful and incorrect to call operand_equal_p
8851 to compare the two ADDR_EXPR nodes. It is wasteful in that
8852 all we need to do is test pointer equality for the arguments
8853 to the two ADDR_EXPR nodes. It is incorrect to use
8854 operand_equal_p as that function is NOT equivalent to a
8855 C equality test. It can in fact return false for two
8856 objects which would test as equal using the C equality
8857 operator. */
8858 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
8859 return constant_boolean_node (equal
8860 ? code == EQ_EXPR : code != EQ_EXPR,
8861 type);
8862 }
8863
8864 /* If this is a comparison of two exprs that look like an
8865 ARRAY_REF of the same object, then we can fold this to a
8866 comparison of the two offsets. */
8867 if (TREE_CODE_CLASS (code) == tcc_comparison)
8868 {
8869 tree base0, offset0, base1, offset1;
8870
8871 if (extract_array_ref (arg0, &base0, &offset0)
8872 && extract_array_ref (arg1, &base1, &offset1)
8873 && operand_equal_p (base0, base1, 0))
8874 {
8875 if (TYPE_SIZE (TREE_TYPE (TREE_TYPE (base0)))
8876 && integer_zerop (TYPE_SIZE (TREE_TYPE (TREE_TYPE (base0)))))
8877 offset0 = NULL_TREE;
8878 if (TYPE_SIZE (TREE_TYPE (TREE_TYPE (base1)))
8879 && integer_zerop (TYPE_SIZE (TREE_TYPE (TREE_TYPE (base1)))))
8880 offset1 = NULL_TREE;
8881 if (offset0 == NULL_TREE
8882 && offset1 == NULL_TREE)
8883 {
8884 offset0 = integer_zero_node;
8885 offset1 = integer_zero_node;
8886 }
8887 else if (offset0 == NULL_TREE)
8888 offset0 = build_int_cst (TREE_TYPE (offset1), 0);
8889 else if (offset1 == NULL_TREE)
8890 offset1 = build_int_cst (TREE_TYPE (offset0), 0);
8891
8892 if (TREE_TYPE (offset0) == TREE_TYPE (offset1))
8893 return fold_build2 (code, type, offset0, offset1);
8894 }
8895 }
8896
8897 /* Transform comparisons of the form X +- C CMP X. */
8898 if ((code != EQ_EXPR && code != NE_EXPR)
8899 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8900 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
8901 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8902 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
8903 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8904 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
8905 && !(flag_wrapv || flag_trapv))))
8906 {
8907 tree arg01 = TREE_OPERAND (arg0, 1);
8908 enum tree_code code0 = TREE_CODE (arg0);
8909 int is_positive;
8910
8911 if (TREE_CODE (arg01) == REAL_CST)
8912 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
8913 else
8914 is_positive = tree_int_cst_sgn (arg01);
8915
8916 /* (X - c) > X becomes false. */
8917 if (code == GT_EXPR
8918 && ((code0 == MINUS_EXPR && is_positive >= 0)
8919 || (code0 == PLUS_EXPR && is_positive <= 0)))
8920 return constant_boolean_node (0, type);
8921
8922 /* Likewise (X + c) < X becomes false. */
8923 if (code == LT_EXPR
8924 && ((code0 == PLUS_EXPR && is_positive >= 0)
8925 || (code0 == MINUS_EXPR && is_positive <= 0)))
8926 return constant_boolean_node (0, type);
8927
8928 /* Convert (X - c) <= X to true. */
8929 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
8930 && code == LE_EXPR
8931 && ((code0 == MINUS_EXPR && is_positive >= 0)
8932 || (code0 == PLUS_EXPR && is_positive <= 0)))
8933 return constant_boolean_node (1, type);
8934
8935 /* Convert (X + c) >= X to true. */
8936 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
8937 && code == GE_EXPR
8938 && ((code0 == PLUS_EXPR && is_positive >= 0)
8939 || (code0 == MINUS_EXPR && is_positive <= 0)))
8940 return constant_boolean_node (1, type);
8941
8942 if (TREE_CODE (arg01) == INTEGER_CST)
8943 {
8944 /* Convert X + c > X and X - c < X to true for integers. */
8945 if (code == GT_EXPR
8946 && ((code0 == PLUS_EXPR && is_positive > 0)
8947 || (code0 == MINUS_EXPR && is_positive < 0)))
8948 return constant_boolean_node (1, type);
8949
8950 if (code == LT_EXPR
8951 && ((code0 == MINUS_EXPR && is_positive > 0)
8952 || (code0 == PLUS_EXPR && is_positive < 0)))
8953 return constant_boolean_node (1, type);
8954
8955 /* Convert X + c <= X and X - c >= X to false for integers. */
8956 if (code == LE_EXPR
8957 && ((code0 == PLUS_EXPR && is_positive > 0)
8958 || (code0 == MINUS_EXPR && is_positive < 0)))
8959 return constant_boolean_node (0, type);
8960
8961 if (code == GE_EXPR
8962 && ((code0 == MINUS_EXPR && is_positive > 0)
8963 || (code0 == PLUS_EXPR && is_positive < 0)))
8964 return constant_boolean_node (0, type);
8965 }
8966 }
8967
8968 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8969 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8970 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8971 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8972 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
8973 && !(flag_wrapv || flag_trapv))
8974 && (TREE_CODE (arg1) == INTEGER_CST
8975 && !TREE_OVERFLOW (arg1)))
8976 {
8977 tree const1 = TREE_OPERAND (arg0, 1);
8978 tree const2 = arg1;
8979 tree variable = TREE_OPERAND (arg0, 0);
8980 tree lhs;
8981 int lhs_add;
8982 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8983
8984 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
8985 TREE_TYPE (arg1), const2, const1);
8986 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8987 && (TREE_CODE (lhs) != INTEGER_CST
8988 || !TREE_OVERFLOW (lhs)))
8989 return fold_build2 (code, type, variable, lhs);
8990 }
8991
8992 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8993 {
8994 tree targ0 = strip_float_extensions (arg0);
8995 tree targ1 = strip_float_extensions (arg1);
8996 tree newtype = TREE_TYPE (targ0);
8997
8998 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8999 newtype = TREE_TYPE (targ1);
9000
9001 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9002 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9003 return fold_build2 (code, type, fold_convert (newtype, targ0),
9004 fold_convert (newtype, targ1));
9005
9006 /* (-a) CMP (-b) -> b CMP a */
9007 if (TREE_CODE (arg0) == NEGATE_EXPR
9008 && TREE_CODE (arg1) == NEGATE_EXPR)
9009 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
9010 TREE_OPERAND (arg0, 0));
9011
9012 if (TREE_CODE (arg1) == REAL_CST)
9013 {
9014 REAL_VALUE_TYPE cst;
9015 cst = TREE_REAL_CST (arg1);
9016
9017 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9018 if (TREE_CODE (arg0) == NEGATE_EXPR)
9019 return
9020 fold_build2 (swap_tree_comparison (code), type,
9021 TREE_OPERAND (arg0, 0),
9022 build_real (TREE_TYPE (arg1),
9023 REAL_VALUE_NEGATE (cst)));
9024
9025 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9026 /* a CMP (-0) -> a CMP 0 */
9027 if (REAL_VALUE_MINUS_ZERO (cst))
9028 return fold_build2 (code, type, arg0,
9029 build_real (TREE_TYPE (arg1), dconst0));
9030
9031 /* x != NaN is always true, other ops are always false. */
9032 if (REAL_VALUE_ISNAN (cst)
9033 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9034 {
9035 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9036 return omit_one_operand (type, tem, arg0);
9037 }
9038
9039 /* Fold comparisons against infinity. */
9040 if (REAL_VALUE_ISINF (cst))
9041 {
9042 tem = fold_inf_compare (code, type, arg0, arg1);
9043 if (tem != NULL_TREE)
9044 return tem;
9045 }
9046 }
9047
9048 /* If this is a comparison of a real constant with a PLUS_EXPR
9049 or a MINUS_EXPR of a real constant, we can convert it into a
9050 comparison with a revised real constant as long as no overflow
9051 occurs when unsafe_math_optimizations are enabled. */
9052 if (flag_unsafe_math_optimizations
9053 && TREE_CODE (arg1) == REAL_CST
9054 && (TREE_CODE (arg0) == PLUS_EXPR
9055 || TREE_CODE (arg0) == MINUS_EXPR)
9056 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9057 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9058 ? MINUS_EXPR : PLUS_EXPR,
9059 arg1, TREE_OPERAND (arg0, 1), 0))
9060 && ! TREE_CONSTANT_OVERFLOW (tem))
9061 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9062
9063 /* Likewise, we can simplify a comparison of a real constant with
9064 a MINUS_EXPR whose first operand is also a real constant, i.e.
9065 (c1 - x) < c2 becomes x > c1-c2. */
9066 if (flag_unsafe_math_optimizations
9067 && TREE_CODE (arg1) == REAL_CST
9068 && TREE_CODE (arg0) == MINUS_EXPR
9069 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9070 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9071 arg1, 0))
9072 && ! TREE_CONSTANT_OVERFLOW (tem))
9073 return fold_build2 (swap_tree_comparison (code), type,
9074 TREE_OPERAND (arg0, 1), tem);
9075
9076 /* Fold comparisons against built-in math functions. */
9077 if (TREE_CODE (arg1) == REAL_CST
9078 && flag_unsafe_math_optimizations
9079 && ! flag_errno_math)
9080 {
9081 enum built_in_function fcode = builtin_mathfn_code (arg0);
9082
9083 if (fcode != END_BUILTINS)
9084 {
9085 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
9086 if (tem != NULL_TREE)
9087 return tem;
9088 }
9089 }
9090 }
9091
9092 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
9093 if (TREE_CONSTANT (arg1)
9094 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
9095 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
9096 /* This optimization is invalid for ordered comparisons
9097 if CONST+INCR overflows or if foo+incr might overflow.
9098 This optimization is invalid for floating point due to rounding.
9099 For pointer types we assume overflow doesn't happen. */
9100 && (POINTER_TYPE_P (TREE_TYPE (arg0))
9101 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9102 && (code == EQ_EXPR || code == NE_EXPR))))
9103 {
9104 tree varop, newconst;
9105
9106 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
9107 {
9108 newconst = fold_build2 (PLUS_EXPR, TREE_TYPE (arg0),
9109 arg1, TREE_OPERAND (arg0, 1));
9110 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
9111 TREE_OPERAND (arg0, 0),
9112 TREE_OPERAND (arg0, 1));
9113 }
9114 else
9115 {
9116 newconst = fold_build2 (MINUS_EXPR, TREE_TYPE (arg0),
9117 arg1, TREE_OPERAND (arg0, 1));
9118 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
9119 TREE_OPERAND (arg0, 0),
9120 TREE_OPERAND (arg0, 1));
9121 }
9122
9123
9124 /* If VAROP is a reference to a bitfield, we must mask
9125 the constant by the width of the field. */
9126 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
9127 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
9128 && host_integerp (DECL_SIZE (TREE_OPERAND
9129 (TREE_OPERAND (varop, 0), 1)), 1))
9130 {
9131 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
9132 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
9133 tree folded_compare, shift;
9134
9135 /* First check whether the comparison would come out
9136 always the same. If we don't do that we would
9137 change the meaning with the masking. */
9138 folded_compare = fold_build2 (code, type,
9139 TREE_OPERAND (varop, 0), arg1);
9140 if (integer_zerop (folded_compare)
9141 || integer_onep (folded_compare))
9142 return omit_one_operand (type, folded_compare, varop);
9143
9144 shift = build_int_cst (NULL_TREE,
9145 TYPE_PRECISION (TREE_TYPE (varop)) - size);
9146 shift = fold_convert (TREE_TYPE (varop), shift);
9147 newconst = fold_build2 (LSHIFT_EXPR, TREE_TYPE (varop),
9148 newconst, shift);
9149 newconst = fold_build2 (RSHIFT_EXPR, TREE_TYPE (varop),
9150 newconst, shift);
9151 }
9152
9153 return fold_build2 (code, type, varop, newconst);
9154 }
9155
9156 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
9157 This transformation affects the cases which are handled in later
9158 optimizations involving comparisons with non-negative constants. */
9159 if (TREE_CODE (arg1) == INTEGER_CST
9160 && TREE_CODE (arg0) != INTEGER_CST
9161 && tree_int_cst_sgn (arg1) > 0)
9162 {
9163 switch (code)
9164 {
9165 case GE_EXPR:
9166 arg1 = const_binop (MINUS_EXPR, arg1,
9167 build_int_cst (TREE_TYPE (arg1), 1), 0);
9168 return fold_build2 (GT_EXPR, type, arg0,
9169 fold_convert (TREE_TYPE (arg0), arg1));
9170
9171 case LT_EXPR:
9172 arg1 = const_binop (MINUS_EXPR, arg1,
9173 build_int_cst (TREE_TYPE (arg1), 1), 0);
9174 return fold_build2 (LE_EXPR, type, arg0,
9175 fold_convert (TREE_TYPE (arg0), arg1));
9176
9177 default:
9178 break;
9179 }
9180 }
9181
9182 /* Comparisons with the highest or lowest possible integer of
9183 the specified size will have known values. */
9184 {
9185 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
9186
9187 if (TREE_CODE (arg1) == INTEGER_CST
9188 && ! TREE_CONSTANT_OVERFLOW (arg1)
9189 && width <= 2 * HOST_BITS_PER_WIDE_INT
9190 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9191 || POINTER_TYPE_P (TREE_TYPE (arg1))))
9192 {
9193 HOST_WIDE_INT signed_max_hi;
9194 unsigned HOST_WIDE_INT signed_max_lo;
9195 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
9196
9197 if (width <= HOST_BITS_PER_WIDE_INT)
9198 {
9199 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
9200 - 1;
9201 signed_max_hi = 0;
9202 max_hi = 0;
9203
9204 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
9205 {
9206 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
9207 min_lo = 0;
9208 min_hi = 0;
9209 }
9210 else
9211 {
9212 max_lo = signed_max_lo;
9213 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
9214 min_hi = -1;
9215 }
9216 }
9217 else
9218 {
9219 width -= HOST_BITS_PER_WIDE_INT;
9220 signed_max_lo = -1;
9221 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
9222 - 1;
9223 max_lo = -1;
9224 min_lo = 0;
9225
9226 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
9227 {
9228 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
9229 min_hi = 0;
9230 }
9231 else
9232 {
9233 max_hi = signed_max_hi;
9234 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
9235 }
9236 }
9237
9238 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
9239 && TREE_INT_CST_LOW (arg1) == max_lo)
9240 switch (code)
9241 {
9242 case GT_EXPR:
9243 return omit_one_operand (type, integer_zero_node, arg0);
9244
9245 case GE_EXPR:
9246 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9247
9248 case LE_EXPR:
9249 return omit_one_operand (type, integer_one_node, arg0);
9250
9251 case LT_EXPR:
9252 return fold_build2 (NE_EXPR, type, arg0, arg1);
9253
9254 /* The GE_EXPR and LT_EXPR cases above are not normally
9255 reached because of previous transformations. */
9256
9257 default:
9258 break;
9259 }
9260 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9261 == max_hi
9262 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
9263 switch (code)
9264 {
9265 case GT_EXPR:
9266 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
9267 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9268 case LE_EXPR:
9269 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
9270 return fold_build2 (NE_EXPR, type, arg0, arg1);
9271 default:
9272 break;
9273 }
9274 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9275 == min_hi
9276 && TREE_INT_CST_LOW (arg1) == min_lo)
9277 switch (code)
9278 {
9279 case LT_EXPR:
9280 return omit_one_operand (type, integer_zero_node, arg0);
9281
9282 case LE_EXPR:
9283 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9284
9285 case GE_EXPR:
9286 return omit_one_operand (type, integer_one_node, arg0);
9287
9288 case GT_EXPR:
9289 return fold_build2 (NE_EXPR, type, op0, op1);
9290
9291 default:
9292 break;
9293 }
9294 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9295 == min_hi
9296 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
9297 switch (code)
9298 {
9299 case GE_EXPR:
9300 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
9301 return fold_build2 (NE_EXPR, type, arg0, arg1);
9302 case LT_EXPR:
9303 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
9304 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9305 default:
9306 break;
9307 }
9308
9309 else if (!in_gimple_form
9310 && TREE_INT_CST_HIGH (arg1) == signed_max_hi
9311 && TREE_INT_CST_LOW (arg1) == signed_max_lo
9312 && TYPE_UNSIGNED (TREE_TYPE (arg1))
9313 /* signed_type does not work on pointer types. */
9314 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
9315 {
9316 /* The following case also applies to X < signed_max+1
9317 and X >= signed_max+1 because previous transformations. */
9318 if (code == LE_EXPR || code == GT_EXPR)
9319 {
9320 tree st0, st1;
9321 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
9322 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
9323 return fold_build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
9324 type, fold_convert (st0, arg0),
9325 build_int_cst (st1, 0));
9326 }
9327 }
9328 }
9329 }
9330
9331 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
9332 a MINUS_EXPR of a constant, we can convert it into a comparison with
9333 a revised constant as long as no overflow occurs. */
9334 if ((code == EQ_EXPR || code == NE_EXPR)
9335 && TREE_CODE (arg1) == INTEGER_CST
9336 && (TREE_CODE (arg0) == PLUS_EXPR
9337 || TREE_CODE (arg0) == MINUS_EXPR)
9338 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9339 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9340 ? MINUS_EXPR : PLUS_EXPR,
9341 arg1, TREE_OPERAND (arg0, 1), 0))
9342 && ! TREE_CONSTANT_OVERFLOW (tem))
9343 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9344
9345 /* Similarly for a NEGATE_EXPR. */
9346 else if ((code == EQ_EXPR || code == NE_EXPR)
9347 && TREE_CODE (arg0) == NEGATE_EXPR
9348 && TREE_CODE (arg1) == INTEGER_CST
9349 && 0 != (tem = negate_expr (arg1))
9350 && TREE_CODE (tem) == INTEGER_CST
9351 && ! TREE_CONSTANT_OVERFLOW (tem))
9352 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9353
9354 /* If we have X - Y == 0, we can convert that to X == Y and similarly
9355 for !=. Don't do this for ordered comparisons due to overflow. */
9356 else if ((code == NE_EXPR || code == EQ_EXPR)
9357 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
9358 return fold_build2 (code, type,
9359 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
9360
9361 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9362 && (TREE_CODE (arg0) == NOP_EXPR
9363 || TREE_CODE (arg0) == CONVERT_EXPR))
9364 {
9365 /* If we are widening one operand of an integer comparison,
9366 see if the other operand is similarly being widened. Perhaps we
9367 can do the comparison in the narrower type. */
9368 tem = fold_widened_comparison (code, type, arg0, arg1);
9369 if (tem)
9370 return tem;
9371
9372 /* Or if we are changing signedness. */
9373 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
9374 if (tem)
9375 return tem;
9376 }
9377
9378 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9379 constant, we can simplify it. */
9380 else if (TREE_CODE (arg1) == INTEGER_CST
9381 && (TREE_CODE (arg0) == MIN_EXPR
9382 || TREE_CODE (arg0) == MAX_EXPR)
9383 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9384 {
9385 tem = optimize_minmax_comparison (code, type, op0, op1);
9386 if (tem)
9387 return tem;
9388
9389 return NULL_TREE;
9390 }
9391
9392 /* If we are comparing an ABS_EXPR with a constant, we can
9393 convert all the cases into explicit comparisons, but they may
9394 well not be faster than doing the ABS and one comparison.
9395 But ABS (X) <= C is a range comparison, which becomes a subtraction
9396 and a comparison, and is probably faster. */
9397 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
9398 && TREE_CODE (arg0) == ABS_EXPR
9399 && ! TREE_SIDE_EFFECTS (arg0)
9400 && (0 != (tem = negate_expr (arg1)))
9401 && TREE_CODE (tem) == INTEGER_CST
9402 && ! TREE_CONSTANT_OVERFLOW (tem))
9403 return fold_build2 (TRUTH_ANDIF_EXPR, type,
9404 build2 (GE_EXPR, type,
9405 TREE_OPERAND (arg0, 0), tem),
9406 build2 (LE_EXPR, type,
9407 TREE_OPERAND (arg0, 0), arg1));
9408
9409 /* Convert ABS_EXPR<x> >= 0 to true. */
9410 else if (code == GE_EXPR
9411 && tree_expr_nonnegative_p (arg0)
9412 && (integer_zerop (arg1)
9413 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9414 && real_zerop (arg1))))
9415 return omit_one_operand (type, integer_one_node, arg0);
9416
9417 /* Convert ABS_EXPR<x> < 0 to false. */
9418 else if (code == LT_EXPR
9419 && tree_expr_nonnegative_p (arg0)
9420 && (integer_zerop (arg1) || real_zerop (arg1)))
9421 return omit_one_operand (type, integer_zero_node, arg0);
9422
9423 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
9424 else if ((code == EQ_EXPR || code == NE_EXPR)
9425 && TREE_CODE (arg0) == ABS_EXPR
9426 && (integer_zerop (arg1) || real_zerop (arg1)))
9427 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
9428
9429 /* If this is an EQ or NE comparison with zero and ARG0 is
9430 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
9431 two operations, but the latter can be done in one less insn
9432 on machines that have only two-operand insns or on which a
9433 constant cannot be the first operand. */
9434 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
9435 && TREE_CODE (arg0) == BIT_AND_EXPR)
9436 {
9437 tree arg00 = TREE_OPERAND (arg0, 0);
9438 tree arg01 = TREE_OPERAND (arg0, 1);
9439 if (TREE_CODE (arg00) == LSHIFT_EXPR
9440 && integer_onep (TREE_OPERAND (arg00, 0)))
9441 return
9442 fold_build2 (code, type,
9443 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9444 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
9445 arg01, TREE_OPERAND (arg00, 1)),
9446 fold_convert (TREE_TYPE (arg0),
9447 integer_one_node)),
9448 arg1);
9449 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
9450 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
9451 return
9452 fold_build2 (code, type,
9453 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9454 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
9455 arg00, TREE_OPERAND (arg01, 1)),
9456 fold_convert (TREE_TYPE (arg0),
9457 integer_one_node)),
9458 arg1);
9459 }
9460
9461 /* If this is an NE or EQ comparison of zero against the result of a
9462 signed MOD operation whose second operand is a power of 2, make
9463 the MOD operation unsigned since it is simpler and equivalent. */
9464 if ((code == NE_EXPR || code == EQ_EXPR)
9465 && integer_zerop (arg1)
9466 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
9467 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
9468 || TREE_CODE (arg0) == CEIL_MOD_EXPR
9469 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
9470 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
9471 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9472 {
9473 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
9474 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
9475 fold_convert (newtype,
9476 TREE_OPERAND (arg0, 0)),
9477 fold_convert (newtype,
9478 TREE_OPERAND (arg0, 1)));
9479
9480 return fold_build2 (code, type, newmod,
9481 fold_convert (newtype, arg1));
9482 }
9483
9484 /* If this is an NE comparison of zero with an AND of one, remove the
9485 comparison since the AND will give the correct value. */
9486 if (code == NE_EXPR && integer_zerop (arg1)
9487 && TREE_CODE (arg0) == BIT_AND_EXPR
9488 && integer_onep (TREE_OPERAND (arg0, 1)))
9489 return fold_convert (type, arg0);
9490
9491 /* If we have (A & C) == C where C is a power of 2, convert this into
9492 (A & C) != 0. Similarly for NE_EXPR. */
9493 if ((code == EQ_EXPR || code == NE_EXPR)
9494 && TREE_CODE (arg0) == BIT_AND_EXPR
9495 && integer_pow2p (TREE_OPERAND (arg0, 1))
9496 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9497 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
9498 arg0, fold_convert (TREE_TYPE (arg0),
9499 integer_zero_node));
9500
9501 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
9502 bit, then fold the expression into A < 0 or A >= 0. */
9503 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
9504 if (tem)
9505 return tem;
9506
9507 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
9508 Similarly for NE_EXPR. */
9509 if ((code == EQ_EXPR || code == NE_EXPR)
9510 && TREE_CODE (arg0) == BIT_AND_EXPR
9511 && TREE_CODE (arg1) == INTEGER_CST
9512 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9513 {
9514 tree notc = fold_build1 (BIT_NOT_EXPR,
9515 TREE_TYPE (TREE_OPERAND (arg0, 1)),
9516 TREE_OPERAND (arg0, 1));
9517 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9518 arg1, notc);
9519 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
9520 if (integer_nonzerop (dandnotc))
9521 return omit_one_operand (type, rslt, arg0);
9522 }
9523
9524 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
9525 Similarly for NE_EXPR. */
9526 if ((code == EQ_EXPR || code == NE_EXPR)
9527 && TREE_CODE (arg0) == BIT_IOR_EXPR
9528 && TREE_CODE (arg1) == INTEGER_CST
9529 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9530 {
9531 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
9532 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9533 TREE_OPERAND (arg0, 1), notd);
9534 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
9535 if (integer_nonzerop (candnotd))
9536 return omit_one_operand (type, rslt, arg0);
9537 }
9538
9539 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
9540 and similarly for >= into !=. */
9541 if ((code == LT_EXPR || code == GE_EXPR)
9542 && TYPE_UNSIGNED (TREE_TYPE (arg0))
9543 && TREE_CODE (arg1) == LSHIFT_EXPR
9544 && integer_onep (TREE_OPERAND (arg1, 0)))
9545 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
9546 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
9547 TREE_OPERAND (arg1, 1)),
9548 fold_convert (TREE_TYPE (arg0), integer_zero_node));
9549
9550 else if ((code == LT_EXPR || code == GE_EXPR)
9551 && TYPE_UNSIGNED (TREE_TYPE (arg0))
9552 && (TREE_CODE (arg1) == NOP_EXPR
9553 || TREE_CODE (arg1) == CONVERT_EXPR)
9554 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
9555 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
9556 return
9557 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
9558 fold_convert (TREE_TYPE (arg0),
9559 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
9560 TREE_OPERAND (TREE_OPERAND (arg1, 0),
9561 1))),
9562 fold_convert (TREE_TYPE (arg0), integer_zero_node));
9563
9564 /* Simplify comparison of something with itself. (For IEEE
9565 floating-point, we can only do some of these simplifications.) */
9566 if (operand_equal_p (arg0, arg1, 0))
9567 {
9568 switch (code)
9569 {
9570 case EQ_EXPR:
9571 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9572 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9573 return constant_boolean_node (1, type);
9574 break;
9575
9576 case GE_EXPR:
9577 case LE_EXPR:
9578 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9579 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9580 return constant_boolean_node (1, type);
9581 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9582
9583 case NE_EXPR:
9584 /* For NE, we can only do this simplification if integer
9585 or we don't honor IEEE floating point NaNs. */
9586 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9587 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9588 break;
9589 /* ... fall through ... */
9590 case GT_EXPR:
9591 case LT_EXPR:
9592 return constant_boolean_node (0, type);
9593 default:
9594 gcc_unreachable ();
9595 }
9596 }
9597
9598 /* If we are comparing an expression that just has comparisons
9599 of two integer values, arithmetic expressions of those comparisons,
9600 and constants, we can simplify it. There are only three cases
9601 to check: the two values can either be equal, the first can be
9602 greater, or the second can be greater. Fold the expression for
9603 those three values. Since each value must be 0 or 1, we have
9604 eight possibilities, each of which corresponds to the constant 0
9605 or 1 or one of the six possible comparisons.
9606
9607 This handles common cases like (a > b) == 0 but also handles
9608 expressions like ((x > y) - (y > x)) > 0, which supposedly
9609 occur in macroized code. */
9610
9611 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9612 {
9613 tree cval1 = 0, cval2 = 0;
9614 int save_p = 0;
9615
9616 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9617 /* Don't handle degenerate cases here; they should already
9618 have been handled anyway. */
9619 && cval1 != 0 && cval2 != 0
9620 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9621 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9622 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9623 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9624 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9625 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9626 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9627 {
9628 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9629 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9630
9631 /* We can't just pass T to eval_subst in case cval1 or cval2
9632 was the same as ARG1. */
9633
9634 tree high_result
9635 = fold_build2 (code, type,
9636 eval_subst (arg0, cval1, maxval,
9637 cval2, minval),
9638 arg1);
9639 tree equal_result
9640 = fold_build2 (code, type,
9641 eval_subst (arg0, cval1, maxval,
9642 cval2, maxval),
9643 arg1);
9644 tree low_result
9645 = fold_build2 (code, type,
9646 eval_subst (arg0, cval1, minval,
9647 cval2, maxval),
9648 arg1);
9649
9650 /* All three of these results should be 0 or 1. Confirm they
9651 are. Then use those values to select the proper code
9652 to use. */
9653
9654 if ((integer_zerop (high_result)
9655 || integer_onep (high_result))
9656 && (integer_zerop (equal_result)
9657 || integer_onep (equal_result))
9658 && (integer_zerop (low_result)
9659 || integer_onep (low_result)))
9660 {
9661 /* Make a 3-bit mask with the high-order bit being the
9662 value for `>', the next for '=', and the low for '<'. */
9663 switch ((integer_onep (high_result) * 4)
9664 + (integer_onep (equal_result) * 2)
9665 + integer_onep (low_result))
9666 {
9667 case 0:
9668 /* Always false. */
9669 return omit_one_operand (type, integer_zero_node, arg0);
9670 case 1:
9671 code = LT_EXPR;
9672 break;
9673 case 2:
9674 code = EQ_EXPR;
9675 break;
9676 case 3:
9677 code = LE_EXPR;
9678 break;
9679 case 4:
9680 code = GT_EXPR;
9681 break;
9682 case 5:
9683 code = NE_EXPR;
9684 break;
9685 case 6:
9686 code = GE_EXPR;
9687 break;
9688 case 7:
9689 /* Always true. */
9690 return omit_one_operand (type, integer_one_node, arg0);
9691 }
9692
9693 if (save_p)
9694 return save_expr (build2 (code, type, cval1, cval2));
9695 else
9696 return fold_build2 (code, type, cval1, cval2);
9697 }
9698 }
9699 }
9700
9701 /* If this is a comparison of a field, we may be able to simplify it. */
9702 if (((TREE_CODE (arg0) == COMPONENT_REF
9703 && lang_hooks.can_use_bit_fields_p ())
9704 || TREE_CODE (arg0) == BIT_FIELD_REF)
9705 && (code == EQ_EXPR || code == NE_EXPR)
9706 /* Handle the constant case even without -O
9707 to make sure the warnings are given. */
9708 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
9709 {
9710 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
9711 if (t1)
9712 return t1;
9713 }
9714
9715 /* Fold a comparison of the address of COMPONENT_REFs with the same
9716 type and component to a comparison of the address of the base
9717 object. In short, &x->a OP &y->a to x OP y and
9718 &x->a OP &y.a to x OP &y */
9719 if (TREE_CODE (arg0) == ADDR_EXPR
9720 && TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
9721 && TREE_CODE (arg1) == ADDR_EXPR
9722 && TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
9723 {
9724 tree cref0 = TREE_OPERAND (arg0, 0);
9725 tree cref1 = TREE_OPERAND (arg1, 0);
9726 if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
9727 {
9728 tree op0 = TREE_OPERAND (cref0, 0);
9729 tree op1 = TREE_OPERAND (cref1, 0);
9730 return fold_build2 (code, type,
9731 build_fold_addr_expr (op0),
9732 build_fold_addr_expr (op1));
9733 }
9734 }
9735
9736 /* Optimize comparisons of strlen vs zero to a compare of the
9737 first character of the string vs zero. To wit,
9738 strlen(ptr) == 0 => *ptr == 0
9739 strlen(ptr) != 0 => *ptr != 0
9740 Other cases should reduce to one of these two (or a constant)
9741 due to the return value of strlen being unsigned. */
9742 if ((code == EQ_EXPR || code == NE_EXPR)
9743 && integer_zerop (arg1)
9744 && TREE_CODE (arg0) == CALL_EXPR)
9745 {
9746 tree fndecl = get_callee_fndecl (arg0);
9747 tree arglist;
9748
9749 if (fndecl
9750 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
9751 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
9752 && (arglist = TREE_OPERAND (arg0, 1))
9753 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
9754 && ! TREE_CHAIN (arglist))
9755 {
9756 tree iref = build_fold_indirect_ref (TREE_VALUE (arglist));
9757 return fold_build2 (code, type, iref,
9758 build_int_cst (TREE_TYPE (iref), 0));
9759 }
9760 }
9761
9762 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9763 into a single range test. */
9764 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9765 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9766 && TREE_CODE (arg1) == INTEGER_CST
9767 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9768 && !integer_zerop (TREE_OPERAND (arg0, 1))
9769 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9770 && !TREE_OVERFLOW (arg1))
9771 {
9772 t1 = fold_div_compare (code, type, arg0, arg1);
9773 if (t1 != NULL_TREE)
9774 return t1;
9775 }
9776
9777 if ((code == EQ_EXPR || code == NE_EXPR)
9778 && integer_zerop (arg1)
9779 && tree_expr_nonzero_p (arg0))
9780 {
9781 tree res = constant_boolean_node (code==NE_EXPR, type);
9782 return omit_one_operand (type, res, arg0);
9783 }
9784
9785 t1 = fold_relational_const (code, type, arg0, arg1);
9786 return t1 == NULL_TREE ? NULL_TREE : t1;
9787
9788 case UNORDERED_EXPR:
9789 case ORDERED_EXPR:
9790 case UNLT_EXPR:
9791 case UNLE_EXPR:
9792 case UNGT_EXPR:
9793 case UNGE_EXPR:
9794 case UNEQ_EXPR:
9795 case LTGT_EXPR:
9796 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9797 {
9798 t1 = fold_relational_const (code, type, arg0, arg1);
9799 if (t1 != NULL_TREE)
9800 return t1;
9801 }
9802
9803 /* If the first operand is NaN, the result is constant. */
9804 if (TREE_CODE (arg0) == REAL_CST
9805 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
9806 && (code != LTGT_EXPR || ! flag_trapping_math))
9807 {
9808 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
9809 ? integer_zero_node
9810 : integer_one_node;
9811 return omit_one_operand (type, t1, arg1);
9812 }
9813
9814 /* If the second operand is NaN, the result is constant. */
9815 if (TREE_CODE (arg1) == REAL_CST
9816 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
9817 && (code != LTGT_EXPR || ! flag_trapping_math))
9818 {
9819 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
9820 ? integer_zero_node
9821 : integer_one_node;
9822 return omit_one_operand (type, t1, arg0);
9823 }
9824
9825 /* Simplify unordered comparison of something with itself. */
9826 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
9827 && operand_equal_p (arg0, arg1, 0))
9828 return constant_boolean_node (1, type);
9829
9830 if (code == LTGT_EXPR
9831 && !flag_trapping_math
9832 && operand_equal_p (arg0, arg1, 0))
9833 return constant_boolean_node (0, type);
9834
9835 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9836 {
9837 tree targ0 = strip_float_extensions (arg0);
9838 tree targ1 = strip_float_extensions (arg1);
9839 tree newtype = TREE_TYPE (targ0);
9840
9841 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9842 newtype = TREE_TYPE (targ1);
9843
9844 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9845 return fold_build2 (code, type, fold_convert (newtype, targ0),
9846 fold_convert (newtype, targ1));
9847 }
9848
9849 return NULL_TREE;
9850
9851 case COMPOUND_EXPR:
9852 /* When pedantic, a compound expression can be neither an lvalue
9853 nor an integer constant expression. */
9854 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
9855 return NULL_TREE;
9856 /* Don't let (0, 0) be null pointer constant. */
9857 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
9858 : fold_convert (type, arg1);
9859 return pedantic_non_lvalue (tem);
9860
9861 case COMPLEX_EXPR:
9862 if (wins)
9863 return build_complex (type, arg0, arg1);
9864 return NULL_TREE;
9865
9866 case ASSERT_EXPR:
9867 /* An ASSERT_EXPR should never be passed to fold_binary. */
9868 gcc_unreachable ();
9869
9870 default:
9871 return NULL_TREE;
9872 } /* switch (code) */
9873 }
9874
9875 /* Callback for walk_tree, looking for LABEL_EXPR.
9876 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
9877 Do not check the sub-tree of GOTO_EXPR. */
9878
9879 static tree
9880 contains_label_1 (tree *tp,
9881 int *walk_subtrees,
9882 void *data ATTRIBUTE_UNUSED)
9883 {
9884 switch (TREE_CODE (*tp))
9885 {
9886 case LABEL_EXPR:
9887 return *tp;
9888 case GOTO_EXPR:
9889 *walk_subtrees = 0;
9890 /* no break */
9891 default:
9892 return NULL_TREE;
9893 }
9894 }
9895
9896 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
9897 accessible from outside the sub-tree. Returns NULL_TREE if no
9898 addressable label is found. */
9899
9900 static bool
9901 contains_label_p (tree st)
9902 {
9903 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
9904 }
9905
9906 /* Fold a ternary expression of code CODE and type TYPE with operands
9907 OP0, OP1, and OP2. Return the folded expression if folding is
9908 successful. Otherwise, return NULL_TREE. */
9909
9910 tree
9911 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
9912 {
9913 tree tem;
9914 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
9915 enum tree_code_class kind = TREE_CODE_CLASS (code);
9916
9917 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9918 && TREE_CODE_LENGTH (code) == 3);
9919
9920 /* Strip any conversions that don't change the mode. This is safe
9921 for every expression, except for a comparison expression because
9922 its signedness is derived from its operands. So, in the latter
9923 case, only strip conversions that don't change the signedness.
9924
9925 Note that this is done as an internal manipulation within the
9926 constant folder, in order to find the simplest representation of
9927 the arguments so that their form can be studied. In any cases,
9928 the appropriate type conversions should be put back in the tree
9929 that will get out of the constant folder. */
9930 if (op0)
9931 {
9932 arg0 = op0;
9933 STRIP_NOPS (arg0);
9934 }
9935
9936 if (op1)
9937 {
9938 arg1 = op1;
9939 STRIP_NOPS (arg1);
9940 }
9941
9942 switch (code)
9943 {
9944 case COMPONENT_REF:
9945 if (TREE_CODE (arg0) == CONSTRUCTOR
9946 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
9947 {
9948 unsigned HOST_WIDE_INT idx;
9949 tree field, value;
9950 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
9951 if (field == arg1)
9952 return value;
9953 }
9954 return NULL_TREE;
9955
9956 case COND_EXPR:
9957 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
9958 so all simple results must be passed through pedantic_non_lvalue. */
9959 if (TREE_CODE (arg0) == INTEGER_CST)
9960 {
9961 tree unused_op = integer_zerop (arg0) ? op1 : op2;
9962 tem = integer_zerop (arg0) ? op2 : op1;
9963 /* Only optimize constant conditions when the selected branch
9964 has the same type as the COND_EXPR. This avoids optimizing
9965 away "c ? x : throw", where the throw has a void type.
9966 Avoid throwing away that operand which contains label. */
9967 if ((!TREE_SIDE_EFFECTS (unused_op)
9968 || !contains_label_p (unused_op))
9969 && (! VOID_TYPE_P (TREE_TYPE (tem))
9970 || VOID_TYPE_P (type)))
9971 return pedantic_non_lvalue (tem);
9972 return NULL_TREE;
9973 }
9974 if (operand_equal_p (arg1, op2, 0))
9975 return pedantic_omit_one_operand (type, arg1, arg0);
9976
9977 /* If we have A op B ? A : C, we may be able to convert this to a
9978 simpler expression, depending on the operation and the values
9979 of B and C. Signed zeros prevent all of these transformations,
9980 for reasons given above each one.
9981
9982 Also try swapping the arguments and inverting the conditional. */
9983 if (COMPARISON_CLASS_P (arg0)
9984 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
9985 arg1, TREE_OPERAND (arg0, 1))
9986 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
9987 {
9988 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
9989 if (tem)
9990 return tem;
9991 }
9992
9993 if (COMPARISON_CLASS_P (arg0)
9994 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
9995 op2,
9996 TREE_OPERAND (arg0, 1))
9997 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
9998 {
9999 tem = invert_truthvalue (arg0);
10000 if (COMPARISON_CLASS_P (tem))
10001 {
10002 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
10003 if (tem)
10004 return tem;
10005 }
10006 }
10007
10008 /* If the second operand is simpler than the third, swap them
10009 since that produces better jump optimization results. */
10010 if (truth_value_p (TREE_CODE (arg0))
10011 && tree_swap_operands_p (op1, op2, false))
10012 {
10013 /* See if this can be inverted. If it can't, possibly because
10014 it was a floating-point inequality comparison, don't do
10015 anything. */
10016 tem = invert_truthvalue (arg0);
10017
10018 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
10019 return fold_build3 (code, type, tem, op2, op1);
10020 }
10021
10022 /* Convert A ? 1 : 0 to simply A. */
10023 if (integer_onep (op1)
10024 && integer_zerop (op2)
10025 /* If we try to convert OP0 to our type, the
10026 call to fold will try to move the conversion inside
10027 a COND, which will recurse. In that case, the COND_EXPR
10028 is probably the best choice, so leave it alone. */
10029 && type == TREE_TYPE (arg0))
10030 return pedantic_non_lvalue (arg0);
10031
10032 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
10033 over COND_EXPR in cases such as floating point comparisons. */
10034 if (integer_zerop (op1)
10035 && integer_onep (op2)
10036 && truth_value_p (TREE_CODE (arg0)))
10037 return pedantic_non_lvalue (fold_convert (type,
10038 invert_truthvalue (arg0)));
10039
10040 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
10041 if (TREE_CODE (arg0) == LT_EXPR
10042 && integer_zerop (TREE_OPERAND (arg0, 1))
10043 && integer_zerop (op2)
10044 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
10045 return fold_convert (type, fold_build2 (BIT_AND_EXPR,
10046 TREE_TYPE (tem), tem, arg1));
10047
10048 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
10049 already handled above. */
10050 if (TREE_CODE (arg0) == BIT_AND_EXPR
10051 && integer_onep (TREE_OPERAND (arg0, 1))
10052 && integer_zerop (op2)
10053 && integer_pow2p (arg1))
10054 {
10055 tree tem = TREE_OPERAND (arg0, 0);
10056 STRIP_NOPS (tem);
10057 if (TREE_CODE (tem) == RSHIFT_EXPR
10058 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
10059 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
10060 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
10061 return fold_build2 (BIT_AND_EXPR, type,
10062 TREE_OPERAND (tem, 0), arg1);
10063 }
10064
10065 /* A & N ? N : 0 is simply A & N if N is a power of two. This
10066 is probably obsolete because the first operand should be a
10067 truth value (that's why we have the two cases above), but let's
10068 leave it in until we can confirm this for all front-ends. */
10069 if (integer_zerop (op2)
10070 && TREE_CODE (arg0) == NE_EXPR
10071 && integer_zerop (TREE_OPERAND (arg0, 1))
10072 && integer_pow2p (arg1)
10073 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10074 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10075 arg1, OEP_ONLY_CONST))
10076 return pedantic_non_lvalue (fold_convert (type,
10077 TREE_OPERAND (arg0, 0)));
10078
10079 /* Convert A ? B : 0 into A && B if A and B are truth values. */
10080 if (integer_zerop (op2)
10081 && truth_value_p (TREE_CODE (arg0))
10082 && truth_value_p (TREE_CODE (arg1)))
10083 return fold_build2 (TRUTH_ANDIF_EXPR, type, arg0, arg1);
10084
10085 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
10086 if (integer_onep (op2)
10087 && truth_value_p (TREE_CODE (arg0))
10088 && truth_value_p (TREE_CODE (arg1)))
10089 {
10090 /* Only perform transformation if ARG0 is easily inverted. */
10091 tem = invert_truthvalue (arg0);
10092 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
10093 return fold_build2 (TRUTH_ORIF_EXPR, type, tem, arg1);
10094 }
10095
10096 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
10097 if (integer_zerop (arg1)
10098 && truth_value_p (TREE_CODE (arg0))
10099 && truth_value_p (TREE_CODE (op2)))
10100 {
10101 /* Only perform transformation if ARG0 is easily inverted. */
10102 tem = invert_truthvalue (arg0);
10103 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
10104 return fold_build2 (TRUTH_ANDIF_EXPR, type, tem, op2);
10105 }
10106
10107 /* Convert A ? 1 : B into A || B if A and B are truth values. */
10108 if (integer_onep (arg1)
10109 && truth_value_p (TREE_CODE (arg0))
10110 && truth_value_p (TREE_CODE (op2)))
10111 return fold_build2 (TRUTH_ORIF_EXPR, type, arg0, op2);
10112
10113 return NULL_TREE;
10114
10115 case CALL_EXPR:
10116 /* Check for a built-in function. */
10117 if (TREE_CODE (op0) == ADDR_EXPR
10118 && TREE_CODE (TREE_OPERAND (op0, 0)) == FUNCTION_DECL
10119 && DECL_BUILT_IN (TREE_OPERAND (op0, 0)))
10120 return fold_builtin (TREE_OPERAND (op0, 0), op1, false);
10121 return NULL_TREE;
10122
10123 case BIT_FIELD_REF:
10124 if (TREE_CODE (arg0) == VECTOR_CST
10125 && type == TREE_TYPE (TREE_TYPE (arg0))
10126 && host_integerp (arg1, 1)
10127 && host_integerp (op2, 1))
10128 {
10129 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
10130 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
10131
10132 if (width != 0
10133 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
10134 && (idx % width) == 0
10135 && (idx = idx / width)
10136 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
10137 {
10138 tree elements = TREE_VECTOR_CST_ELTS (arg0);
10139 while (idx-- > 0 && elements)
10140 elements = TREE_CHAIN (elements);
10141 if (elements)
10142 return TREE_VALUE (elements);
10143 else
10144 return fold_convert (type, integer_zero_node);
10145 }
10146 }
10147 return NULL_TREE;
10148
10149 default:
10150 return NULL_TREE;
10151 } /* switch (code) */
10152 }
10153
10154 /* Perform constant folding and related simplification of EXPR.
10155 The related simplifications include x*1 => x, x*0 => 0, etc.,
10156 and application of the associative law.
10157 NOP_EXPR conversions may be removed freely (as long as we
10158 are careful not to change the type of the overall expression).
10159 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
10160 but we can constant-fold them if they have constant operands. */
10161
10162 #ifdef ENABLE_FOLD_CHECKING
10163 # define fold(x) fold_1 (x)
10164 static tree fold_1 (tree);
10165 static
10166 #endif
10167 tree
10168 fold (tree expr)
10169 {
10170 const tree t = expr;
10171 enum tree_code code = TREE_CODE (t);
10172 enum tree_code_class kind = TREE_CODE_CLASS (code);
10173 tree tem;
10174
10175 /* Return right away if a constant. */
10176 if (kind == tcc_constant)
10177 return t;
10178
10179 if (IS_EXPR_CODE_CLASS (kind))
10180 {
10181 tree type = TREE_TYPE (t);
10182 tree op0, op1, op2;
10183
10184 switch (TREE_CODE_LENGTH (code))
10185 {
10186 case 1:
10187 op0 = TREE_OPERAND (t, 0);
10188 tem = fold_unary (code, type, op0);
10189 return tem ? tem : expr;
10190 case 2:
10191 op0 = TREE_OPERAND (t, 0);
10192 op1 = TREE_OPERAND (t, 1);
10193 tem = fold_binary (code, type, op0, op1);
10194 return tem ? tem : expr;
10195 case 3:
10196 op0 = TREE_OPERAND (t, 0);
10197 op1 = TREE_OPERAND (t, 1);
10198 op2 = TREE_OPERAND (t, 2);
10199 tem = fold_ternary (code, type, op0, op1, op2);
10200 return tem ? tem : expr;
10201 default:
10202 break;
10203 }
10204 }
10205
10206 switch (code)
10207 {
10208 case CONST_DECL:
10209 return fold (DECL_INITIAL (t));
10210
10211 default:
10212 return t;
10213 } /* switch (code) */
10214 }
10215
10216 #ifdef ENABLE_FOLD_CHECKING
10217 #undef fold
10218
10219 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
10220 static void fold_check_failed (tree, tree);
10221 void print_fold_checksum (tree);
10222
10223 /* When --enable-checking=fold, compute a digest of expr before
10224 and after actual fold call to see if fold did not accidentally
10225 change original expr. */
10226
10227 tree
10228 fold (tree expr)
10229 {
10230 tree ret;
10231 struct md5_ctx ctx;
10232 unsigned char checksum_before[16], checksum_after[16];
10233 htab_t ht;
10234
10235 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10236 md5_init_ctx (&ctx);
10237 fold_checksum_tree (expr, &ctx, ht);
10238 md5_finish_ctx (&ctx, checksum_before);
10239 htab_empty (ht);
10240
10241 ret = fold_1 (expr);
10242
10243 md5_init_ctx (&ctx);
10244 fold_checksum_tree (expr, &ctx, ht);
10245 md5_finish_ctx (&ctx, checksum_after);
10246 htab_delete (ht);
10247
10248 if (memcmp (checksum_before, checksum_after, 16))
10249 fold_check_failed (expr, ret);
10250
10251 return ret;
10252 }
10253
10254 void
10255 print_fold_checksum (tree expr)
10256 {
10257 struct md5_ctx ctx;
10258 unsigned char checksum[16], cnt;
10259 htab_t ht;
10260
10261 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10262 md5_init_ctx (&ctx);
10263 fold_checksum_tree (expr, &ctx, ht);
10264 md5_finish_ctx (&ctx, checksum);
10265 htab_delete (ht);
10266 for (cnt = 0; cnt < 16; ++cnt)
10267 fprintf (stderr, "%02x", checksum[cnt]);
10268 putc ('\n', stderr);
10269 }
10270
10271 static void
10272 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
10273 {
10274 internal_error ("fold check: original tree changed by fold");
10275 }
10276
10277 static void
10278 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
10279 {
10280 void **slot;
10281 enum tree_code code;
10282 char buf[sizeof (struct tree_function_decl)];
10283 int i, len;
10284
10285 recursive_label:
10286
10287 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
10288 <= sizeof (struct tree_function_decl))
10289 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
10290 if (expr == NULL)
10291 return;
10292 slot = htab_find_slot (ht, expr, INSERT);
10293 if (*slot != NULL)
10294 return;
10295 *slot = expr;
10296 code = TREE_CODE (expr);
10297 if (TREE_CODE_CLASS (code) == tcc_declaration
10298 && DECL_ASSEMBLER_NAME_SET_P (expr))
10299 {
10300 /* Allow DECL_ASSEMBLER_NAME to be modified. */
10301 memcpy (buf, expr, tree_size (expr));
10302 expr = (tree) buf;
10303 SET_DECL_ASSEMBLER_NAME (expr, NULL);
10304 }
10305 else if (TREE_CODE_CLASS (code) == tcc_type
10306 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
10307 || TYPE_CACHED_VALUES_P (expr)
10308 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
10309 {
10310 /* Allow these fields to be modified. */
10311 memcpy (buf, expr, tree_size (expr));
10312 expr = (tree) buf;
10313 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr) = 0;
10314 TYPE_POINTER_TO (expr) = NULL;
10315 TYPE_REFERENCE_TO (expr) = NULL;
10316 if (TYPE_CACHED_VALUES_P (expr))
10317 {
10318 TYPE_CACHED_VALUES_P (expr) = 0;
10319 TYPE_CACHED_VALUES (expr) = NULL;
10320 }
10321 }
10322 md5_process_bytes (expr, tree_size (expr), ctx);
10323 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
10324 if (TREE_CODE_CLASS (code) != tcc_type
10325 && TREE_CODE_CLASS (code) != tcc_declaration
10326 && code != TREE_LIST)
10327 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
10328 switch (TREE_CODE_CLASS (code))
10329 {
10330 case tcc_constant:
10331 switch (code)
10332 {
10333 case STRING_CST:
10334 md5_process_bytes (TREE_STRING_POINTER (expr),
10335 TREE_STRING_LENGTH (expr), ctx);
10336 break;
10337 case COMPLEX_CST:
10338 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
10339 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
10340 break;
10341 case VECTOR_CST:
10342 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
10343 break;
10344 default:
10345 break;
10346 }
10347 break;
10348 case tcc_exceptional:
10349 switch (code)
10350 {
10351 case TREE_LIST:
10352 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
10353 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
10354 expr = TREE_CHAIN (expr);
10355 goto recursive_label;
10356 break;
10357 case TREE_VEC:
10358 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
10359 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
10360 break;
10361 default:
10362 break;
10363 }
10364 break;
10365 case tcc_expression:
10366 case tcc_reference:
10367 case tcc_comparison:
10368 case tcc_unary:
10369 case tcc_binary:
10370 case tcc_statement:
10371 len = TREE_CODE_LENGTH (code);
10372 for (i = 0; i < len; ++i)
10373 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
10374 break;
10375 case tcc_declaration:
10376 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
10377 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
10378 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
10379 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
10380 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
10381 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
10382 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
10383 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
10384 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
10385
10386 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
10387 {
10388 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
10389 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
10390 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
10391 }
10392 break;
10393 case tcc_type:
10394 if (TREE_CODE (expr) == ENUMERAL_TYPE)
10395 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
10396 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
10397 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
10398 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
10399 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
10400 if (INTEGRAL_TYPE_P (expr)
10401 || SCALAR_FLOAT_TYPE_P (expr))
10402 {
10403 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
10404 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
10405 }
10406 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
10407 if (TREE_CODE (expr) == RECORD_TYPE
10408 || TREE_CODE (expr) == UNION_TYPE
10409 || TREE_CODE (expr) == QUAL_UNION_TYPE)
10410 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
10411 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
10412 break;
10413 default:
10414 break;
10415 }
10416 }
10417
10418 #endif
10419
10420 /* Fold a unary tree expression with code CODE of type TYPE with an
10421 operand OP0. Return a folded expression if successful. Otherwise,
10422 return a tree expression with code CODE of type TYPE with an
10423 operand OP0. */
10424
10425 tree
10426 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
10427 {
10428 tree tem;
10429 #ifdef ENABLE_FOLD_CHECKING
10430 unsigned char checksum_before[16], checksum_after[16];
10431 struct md5_ctx ctx;
10432 htab_t ht;
10433
10434 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10435 md5_init_ctx (&ctx);
10436 fold_checksum_tree (op0, &ctx, ht);
10437 md5_finish_ctx (&ctx, checksum_before);
10438 htab_empty (ht);
10439 #endif
10440
10441 tem = fold_unary (code, type, op0);
10442 if (!tem)
10443 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
10444
10445 #ifdef ENABLE_FOLD_CHECKING
10446 md5_init_ctx (&ctx);
10447 fold_checksum_tree (op0, &ctx, ht);
10448 md5_finish_ctx (&ctx, checksum_after);
10449 htab_delete (ht);
10450
10451 if (memcmp (checksum_before, checksum_after, 16))
10452 fold_check_failed (op0, tem);
10453 #endif
10454 return tem;
10455 }
10456
10457 /* Fold a binary tree expression with code CODE of type TYPE with
10458 operands OP0 and OP1. Return a folded expression if successful.
10459 Otherwise, return a tree expression with code CODE of type TYPE
10460 with operands OP0 and OP1. */
10461
10462 tree
10463 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
10464 MEM_STAT_DECL)
10465 {
10466 tree tem;
10467 #ifdef ENABLE_FOLD_CHECKING
10468 unsigned char checksum_before_op0[16],
10469 checksum_before_op1[16],
10470 checksum_after_op0[16],
10471 checksum_after_op1[16];
10472 struct md5_ctx ctx;
10473 htab_t ht;
10474
10475 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10476 md5_init_ctx (&ctx);
10477 fold_checksum_tree (op0, &ctx, ht);
10478 md5_finish_ctx (&ctx, checksum_before_op0);
10479 htab_empty (ht);
10480
10481 md5_init_ctx (&ctx);
10482 fold_checksum_tree (op1, &ctx, ht);
10483 md5_finish_ctx (&ctx, checksum_before_op1);
10484 htab_empty (ht);
10485 #endif
10486
10487 tem = fold_binary (code, type, op0, op1);
10488 if (!tem)
10489 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
10490
10491 #ifdef ENABLE_FOLD_CHECKING
10492 md5_init_ctx (&ctx);
10493 fold_checksum_tree (op0, &ctx, ht);
10494 md5_finish_ctx (&ctx, checksum_after_op0);
10495 htab_empty (ht);
10496
10497 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
10498 fold_check_failed (op0, tem);
10499
10500 md5_init_ctx (&ctx);
10501 fold_checksum_tree (op1, &ctx, ht);
10502 md5_finish_ctx (&ctx, checksum_after_op1);
10503 htab_delete (ht);
10504
10505 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
10506 fold_check_failed (op1, tem);
10507 #endif
10508 return tem;
10509 }
10510
10511 /* Fold a ternary tree expression with code CODE of type TYPE with
10512 operands OP0, OP1, and OP2. Return a folded expression if
10513 successful. Otherwise, return a tree expression with code CODE of
10514 type TYPE with operands OP0, OP1, and OP2. */
10515
10516 tree
10517 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
10518 MEM_STAT_DECL)
10519 {
10520 tree tem;
10521 #ifdef ENABLE_FOLD_CHECKING
10522 unsigned char checksum_before_op0[16],
10523 checksum_before_op1[16],
10524 checksum_before_op2[16],
10525 checksum_after_op0[16],
10526 checksum_after_op1[16],
10527 checksum_after_op2[16];
10528 struct md5_ctx ctx;
10529 htab_t ht;
10530
10531 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10532 md5_init_ctx (&ctx);
10533 fold_checksum_tree (op0, &ctx, ht);
10534 md5_finish_ctx (&ctx, checksum_before_op0);
10535 htab_empty (ht);
10536
10537 md5_init_ctx (&ctx);
10538 fold_checksum_tree (op1, &ctx, ht);
10539 md5_finish_ctx (&ctx, checksum_before_op1);
10540 htab_empty (ht);
10541
10542 md5_init_ctx (&ctx);
10543 fold_checksum_tree (op2, &ctx, ht);
10544 md5_finish_ctx (&ctx, checksum_before_op2);
10545 htab_empty (ht);
10546 #endif
10547
10548 tem = fold_ternary (code, type, op0, op1, op2);
10549 if (!tem)
10550 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
10551
10552 #ifdef ENABLE_FOLD_CHECKING
10553 md5_init_ctx (&ctx);
10554 fold_checksum_tree (op0, &ctx, ht);
10555 md5_finish_ctx (&ctx, checksum_after_op0);
10556 htab_empty (ht);
10557
10558 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
10559 fold_check_failed (op0, tem);
10560
10561 md5_init_ctx (&ctx);
10562 fold_checksum_tree (op1, &ctx, ht);
10563 md5_finish_ctx (&ctx, checksum_after_op1);
10564 htab_empty (ht);
10565
10566 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
10567 fold_check_failed (op1, tem);
10568
10569 md5_init_ctx (&ctx);
10570 fold_checksum_tree (op2, &ctx, ht);
10571 md5_finish_ctx (&ctx, checksum_after_op2);
10572 htab_delete (ht);
10573
10574 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
10575 fold_check_failed (op2, tem);
10576 #endif
10577 return tem;
10578 }
10579
10580 /* Perform constant folding and related simplification of initializer
10581 expression EXPR. These behave identically to "fold_buildN" but ignore
10582 potential run-time traps and exceptions that fold must preserve. */
10583
10584 #define START_FOLD_INIT \
10585 int saved_signaling_nans = flag_signaling_nans;\
10586 int saved_trapping_math = flag_trapping_math;\
10587 int saved_rounding_math = flag_rounding_math;\
10588 int saved_trapv = flag_trapv;\
10589 flag_signaling_nans = 0;\
10590 flag_trapping_math = 0;\
10591 flag_rounding_math = 0;\
10592 flag_trapv = 0
10593
10594 #define END_FOLD_INIT \
10595 flag_signaling_nans = saved_signaling_nans;\
10596 flag_trapping_math = saved_trapping_math;\
10597 flag_rounding_math = saved_rounding_math;\
10598 flag_trapv = saved_trapv
10599
10600 tree
10601 fold_build1_initializer (enum tree_code code, tree type, tree op)
10602 {
10603 tree result;
10604 START_FOLD_INIT;
10605
10606 result = fold_build1 (code, type, op);
10607
10608 END_FOLD_INIT;
10609 return result;
10610 }
10611
10612 tree
10613 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
10614 {
10615 tree result;
10616 START_FOLD_INIT;
10617
10618 result = fold_build2 (code, type, op0, op1);
10619
10620 END_FOLD_INIT;
10621 return result;
10622 }
10623
10624 tree
10625 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
10626 tree op2)
10627 {
10628 tree result;
10629 START_FOLD_INIT;
10630
10631 result = fold_build3 (code, type, op0, op1, op2);
10632
10633 END_FOLD_INIT;
10634 return result;
10635 }
10636
10637 #undef START_FOLD_INIT
10638 #undef END_FOLD_INIT
10639
10640 /* Determine if first argument is a multiple of second argument. Return 0 if
10641 it is not, or we cannot easily determined it to be.
10642
10643 An example of the sort of thing we care about (at this point; this routine
10644 could surely be made more general, and expanded to do what the *_DIV_EXPR's
10645 fold cases do now) is discovering that
10646
10647 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
10648
10649 is a multiple of
10650
10651 SAVE_EXPR (J * 8)
10652
10653 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
10654
10655 This code also handles discovering that
10656
10657 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
10658
10659 is a multiple of 8 so we don't have to worry about dealing with a
10660 possible remainder.
10661
10662 Note that we *look* inside a SAVE_EXPR only to determine how it was
10663 calculated; it is not safe for fold to do much of anything else with the
10664 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
10665 at run time. For example, the latter example above *cannot* be implemented
10666 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
10667 evaluation time of the original SAVE_EXPR is not necessarily the same at
10668 the time the new expression is evaluated. The only optimization of this
10669 sort that would be valid is changing
10670
10671 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
10672
10673 divided by 8 to
10674
10675 SAVE_EXPR (I) * SAVE_EXPR (J)
10676
10677 (where the same SAVE_EXPR (J) is used in the original and the
10678 transformed version). */
10679
10680 static int
10681 multiple_of_p (tree type, tree top, tree bottom)
10682 {
10683 if (operand_equal_p (top, bottom, 0))
10684 return 1;
10685
10686 if (TREE_CODE (type) != INTEGER_TYPE)
10687 return 0;
10688
10689 switch (TREE_CODE (top))
10690 {
10691 case BIT_AND_EXPR:
10692 /* Bitwise and provides a power of two multiple. If the mask is
10693 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
10694 if (!integer_pow2p (bottom))
10695 return 0;
10696 /* FALLTHRU */
10697
10698 case MULT_EXPR:
10699 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
10700 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
10701
10702 case PLUS_EXPR:
10703 case MINUS_EXPR:
10704 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
10705 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
10706
10707 case LSHIFT_EXPR:
10708 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
10709 {
10710 tree op1, t1;
10711
10712 op1 = TREE_OPERAND (top, 1);
10713 /* const_binop may not detect overflow correctly,
10714 so check for it explicitly here. */
10715 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
10716 > TREE_INT_CST_LOW (op1)
10717 && TREE_INT_CST_HIGH (op1) == 0
10718 && 0 != (t1 = fold_convert (type,
10719 const_binop (LSHIFT_EXPR,
10720 size_one_node,
10721 op1, 0)))
10722 && ! TREE_OVERFLOW (t1))
10723 return multiple_of_p (type, t1, bottom);
10724 }
10725 return 0;
10726
10727 case NOP_EXPR:
10728 /* Can't handle conversions from non-integral or wider integral type. */
10729 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
10730 || (TYPE_PRECISION (type)
10731 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
10732 return 0;
10733
10734 /* .. fall through ... */
10735
10736 case SAVE_EXPR:
10737 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
10738
10739 case INTEGER_CST:
10740 if (TREE_CODE (bottom) != INTEGER_CST
10741 || (TYPE_UNSIGNED (type)
10742 && (tree_int_cst_sgn (top) < 0
10743 || tree_int_cst_sgn (bottom) < 0)))
10744 return 0;
10745 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
10746 top, bottom, 0));
10747
10748 default:
10749 return 0;
10750 }
10751 }
10752
10753 /* Return true if `t' is known to be non-negative. */
10754
10755 int
10756 tree_expr_nonnegative_p (tree t)
10757 {
10758 if (TYPE_UNSIGNED (TREE_TYPE (t)))
10759 return 1;
10760
10761 switch (TREE_CODE (t))
10762 {
10763 case ABS_EXPR:
10764 /* We can't return 1 if flag_wrapv is set because
10765 ABS_EXPR<INT_MIN> = INT_MIN. */
10766 if (!(flag_wrapv && INTEGRAL_TYPE_P (TREE_TYPE (t))))
10767 return 1;
10768 break;
10769
10770 case INTEGER_CST:
10771 return tree_int_cst_sgn (t) >= 0;
10772
10773 case REAL_CST:
10774 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
10775
10776 case PLUS_EXPR:
10777 if (FLOAT_TYPE_P (TREE_TYPE (t)))
10778 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10779 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10780
10781 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
10782 both unsigned and at least 2 bits shorter than the result. */
10783 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
10784 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
10785 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
10786 {
10787 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
10788 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
10789 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
10790 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
10791 {
10792 unsigned int prec = MAX (TYPE_PRECISION (inner1),
10793 TYPE_PRECISION (inner2)) + 1;
10794 return prec < TYPE_PRECISION (TREE_TYPE (t));
10795 }
10796 }
10797 break;
10798
10799 case MULT_EXPR:
10800 if (FLOAT_TYPE_P (TREE_TYPE (t)))
10801 {
10802 /* x * x for floating point x is always non-negative. */
10803 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
10804 return 1;
10805 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10806 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10807 }
10808
10809 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
10810 both unsigned and their total bits is shorter than the result. */
10811 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
10812 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
10813 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
10814 {
10815 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
10816 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
10817 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
10818 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
10819 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
10820 < TYPE_PRECISION (TREE_TYPE (t));
10821 }
10822 return 0;
10823
10824 case BIT_AND_EXPR:
10825 case MAX_EXPR:
10826 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10827 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10828
10829 case BIT_IOR_EXPR:
10830 case BIT_XOR_EXPR:
10831 case MIN_EXPR:
10832 case RDIV_EXPR:
10833 case TRUNC_DIV_EXPR:
10834 case CEIL_DIV_EXPR:
10835 case FLOOR_DIV_EXPR:
10836 case ROUND_DIV_EXPR:
10837 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10838 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10839
10840 case TRUNC_MOD_EXPR:
10841 case CEIL_MOD_EXPR:
10842 case FLOOR_MOD_EXPR:
10843 case ROUND_MOD_EXPR:
10844 case SAVE_EXPR:
10845 case NON_LVALUE_EXPR:
10846 case FLOAT_EXPR:
10847 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10848
10849 case COMPOUND_EXPR:
10850 case MODIFY_EXPR:
10851 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10852
10853 case BIND_EXPR:
10854 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t, 1)));
10855
10856 case COND_EXPR:
10857 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
10858 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
10859
10860 case NOP_EXPR:
10861 {
10862 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
10863 tree outer_type = TREE_TYPE (t);
10864
10865 if (TREE_CODE (outer_type) == REAL_TYPE)
10866 {
10867 if (TREE_CODE (inner_type) == REAL_TYPE)
10868 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10869 if (TREE_CODE (inner_type) == INTEGER_TYPE)
10870 {
10871 if (TYPE_UNSIGNED (inner_type))
10872 return 1;
10873 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10874 }
10875 }
10876 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
10877 {
10878 if (TREE_CODE (inner_type) == REAL_TYPE)
10879 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
10880 if (TREE_CODE (inner_type) == INTEGER_TYPE)
10881 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
10882 && TYPE_UNSIGNED (inner_type);
10883 }
10884 }
10885 break;
10886
10887 case TARGET_EXPR:
10888 {
10889 tree temp = TARGET_EXPR_SLOT (t);
10890 t = TARGET_EXPR_INITIAL (t);
10891
10892 /* If the initializer is non-void, then it's a normal expression
10893 that will be assigned to the slot. */
10894 if (!VOID_TYPE_P (t))
10895 return tree_expr_nonnegative_p (t);
10896
10897 /* Otherwise, the initializer sets the slot in some way. One common
10898 way is an assignment statement at the end of the initializer. */
10899 while (1)
10900 {
10901 if (TREE_CODE (t) == BIND_EXPR)
10902 t = expr_last (BIND_EXPR_BODY (t));
10903 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
10904 || TREE_CODE (t) == TRY_CATCH_EXPR)
10905 t = expr_last (TREE_OPERAND (t, 0));
10906 else if (TREE_CODE (t) == STATEMENT_LIST)
10907 t = expr_last (t);
10908 else
10909 break;
10910 }
10911 if (TREE_CODE (t) == MODIFY_EXPR
10912 && TREE_OPERAND (t, 0) == temp)
10913 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10914
10915 return 0;
10916 }
10917
10918 case CALL_EXPR:
10919 {
10920 tree fndecl = get_callee_fndecl (t);
10921 tree arglist = TREE_OPERAND (t, 1);
10922 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
10923 switch (DECL_FUNCTION_CODE (fndecl))
10924 {
10925 #define CASE_BUILTIN_F(BUILT_IN_FN) \
10926 case BUILT_IN_FN: case BUILT_IN_FN##F: case BUILT_IN_FN##L:
10927 #define CASE_BUILTIN_I(BUILT_IN_FN) \
10928 case BUILT_IN_FN: case BUILT_IN_FN##L: case BUILT_IN_FN##LL:
10929
10930 CASE_BUILTIN_F (BUILT_IN_ACOS)
10931 CASE_BUILTIN_F (BUILT_IN_ACOSH)
10932 CASE_BUILTIN_F (BUILT_IN_CABS)
10933 CASE_BUILTIN_F (BUILT_IN_COSH)
10934 CASE_BUILTIN_F (BUILT_IN_ERFC)
10935 CASE_BUILTIN_F (BUILT_IN_EXP)
10936 CASE_BUILTIN_F (BUILT_IN_EXP10)
10937 CASE_BUILTIN_F (BUILT_IN_EXP2)
10938 CASE_BUILTIN_F (BUILT_IN_FABS)
10939 CASE_BUILTIN_F (BUILT_IN_FDIM)
10940 CASE_BUILTIN_F (BUILT_IN_HYPOT)
10941 CASE_BUILTIN_F (BUILT_IN_POW10)
10942 CASE_BUILTIN_I (BUILT_IN_FFS)
10943 CASE_BUILTIN_I (BUILT_IN_PARITY)
10944 CASE_BUILTIN_I (BUILT_IN_POPCOUNT)
10945 /* Always true. */
10946 return 1;
10947
10948 CASE_BUILTIN_F (BUILT_IN_SQRT)
10949 /* sqrt(-0.0) is -0.0. */
10950 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
10951 return 1;
10952 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
10953
10954 CASE_BUILTIN_F (BUILT_IN_ASINH)
10955 CASE_BUILTIN_F (BUILT_IN_ATAN)
10956 CASE_BUILTIN_F (BUILT_IN_ATANH)
10957 CASE_BUILTIN_F (BUILT_IN_CBRT)
10958 CASE_BUILTIN_F (BUILT_IN_CEIL)
10959 CASE_BUILTIN_F (BUILT_IN_ERF)
10960 CASE_BUILTIN_F (BUILT_IN_EXPM1)
10961 CASE_BUILTIN_F (BUILT_IN_FLOOR)
10962 CASE_BUILTIN_F (BUILT_IN_FMOD)
10963 CASE_BUILTIN_F (BUILT_IN_FREXP)
10964 CASE_BUILTIN_F (BUILT_IN_LCEIL)
10965 CASE_BUILTIN_F (BUILT_IN_LDEXP)
10966 CASE_BUILTIN_F (BUILT_IN_LFLOOR)
10967 CASE_BUILTIN_F (BUILT_IN_LLCEIL)
10968 CASE_BUILTIN_F (BUILT_IN_LLFLOOR)
10969 CASE_BUILTIN_F (BUILT_IN_LLRINT)
10970 CASE_BUILTIN_F (BUILT_IN_LLROUND)
10971 CASE_BUILTIN_F (BUILT_IN_LRINT)
10972 CASE_BUILTIN_F (BUILT_IN_LROUND)
10973 CASE_BUILTIN_F (BUILT_IN_MODF)
10974 CASE_BUILTIN_F (BUILT_IN_NEARBYINT)
10975 CASE_BUILTIN_F (BUILT_IN_POW)
10976 CASE_BUILTIN_F (BUILT_IN_RINT)
10977 CASE_BUILTIN_F (BUILT_IN_ROUND)
10978 CASE_BUILTIN_F (BUILT_IN_SIGNBIT)
10979 CASE_BUILTIN_F (BUILT_IN_SINH)
10980 CASE_BUILTIN_F (BUILT_IN_TANH)
10981 CASE_BUILTIN_F (BUILT_IN_TRUNC)
10982 /* True if the 1st argument is nonnegative. */
10983 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
10984
10985 CASE_BUILTIN_F (BUILT_IN_FMAX)
10986 /* True if the 1st OR 2nd arguments are nonnegative. */
10987 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
10988 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
10989
10990 CASE_BUILTIN_F (BUILT_IN_FMIN)
10991 /* True if the 1st AND 2nd arguments are nonnegative. */
10992 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
10993 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
10994
10995 CASE_BUILTIN_F (BUILT_IN_COPYSIGN)
10996 /* True if the 2nd argument is nonnegative. */
10997 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
10998
10999 default:
11000 break;
11001 #undef CASE_BUILTIN_F
11002 #undef CASE_BUILTIN_I
11003 }
11004 }
11005
11006 /* ... fall through ... */
11007
11008 default:
11009 if (truth_value_p (TREE_CODE (t)))
11010 /* Truth values evaluate to 0 or 1, which is nonnegative. */
11011 return 1;
11012 }
11013
11014 /* We don't know sign of `t', so be conservative and return false. */
11015 return 0;
11016 }
11017
11018 /* Return true when T is an address and is known to be nonzero.
11019 For floating point we further ensure that T is not denormal.
11020 Similar logic is present in nonzero_address in rtlanal.h. */
11021
11022 bool
11023 tree_expr_nonzero_p (tree t)
11024 {
11025 tree type = TREE_TYPE (t);
11026
11027 /* Doing something useful for floating point would need more work. */
11028 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
11029 return false;
11030
11031 switch (TREE_CODE (t))
11032 {
11033 case ABS_EXPR:
11034 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
11035
11036 case INTEGER_CST:
11037 /* We used to test for !integer_zerop here. This does not work correctly
11038 if TREE_CONSTANT_OVERFLOW (t). */
11039 return (TREE_INT_CST_LOW (t) != 0
11040 || TREE_INT_CST_HIGH (t) != 0);
11041
11042 case PLUS_EXPR:
11043 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
11044 {
11045 /* With the presence of negative values it is hard
11046 to say something. */
11047 if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
11048 || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
11049 return false;
11050 /* One of operands must be positive and the other non-negative. */
11051 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
11052 || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
11053 }
11054 break;
11055
11056 case MULT_EXPR:
11057 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
11058 {
11059 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
11060 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
11061 }
11062 break;
11063
11064 case NOP_EXPR:
11065 {
11066 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
11067 tree outer_type = TREE_TYPE (t);
11068
11069 return (TYPE_PRECISION (inner_type) >= TYPE_PRECISION (outer_type)
11070 && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
11071 }
11072 break;
11073
11074 case ADDR_EXPR:
11075 {
11076 tree base = get_base_address (TREE_OPERAND (t, 0));
11077
11078 if (!base)
11079 return false;
11080
11081 /* Weak declarations may link to NULL. */
11082 if (VAR_OR_FUNCTION_DECL_P (base))
11083 return !DECL_WEAK (base);
11084
11085 /* Constants are never weak. */
11086 if (CONSTANT_CLASS_P (base))
11087 return true;
11088
11089 return false;
11090 }
11091
11092 case COND_EXPR:
11093 return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
11094 && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
11095
11096 case MIN_EXPR:
11097 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
11098 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
11099
11100 case MAX_EXPR:
11101 if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
11102 {
11103 /* When both operands are nonzero, then MAX must be too. */
11104 if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
11105 return true;
11106
11107 /* MAX where operand 0 is positive is positive. */
11108 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
11109 }
11110 /* MAX where operand 1 is positive is positive. */
11111 else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
11112 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
11113 return true;
11114 break;
11115
11116 case COMPOUND_EXPR:
11117 case MODIFY_EXPR:
11118 case BIND_EXPR:
11119 return tree_expr_nonzero_p (TREE_OPERAND (t, 1));
11120
11121 case SAVE_EXPR:
11122 case NON_LVALUE_EXPR:
11123 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
11124
11125 case BIT_IOR_EXPR:
11126 return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
11127 || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
11128
11129 case CALL_EXPR:
11130 return alloca_call_p (t);
11131
11132 default:
11133 break;
11134 }
11135 return false;
11136 }
11137
11138 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
11139 attempt to fold the expression to a constant without modifying TYPE,
11140 OP0 or OP1.
11141
11142 If the expression could be simplified to a constant, then return
11143 the constant. If the expression would not be simplified to a
11144 constant, then return NULL_TREE. */
11145
11146 tree
11147 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
11148 {
11149 tree tem = fold_binary (code, type, op0, op1);
11150 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
11151 }
11152
11153 /* Given the components of a unary expression CODE, TYPE and OP0,
11154 attempt to fold the expression to a constant without modifying
11155 TYPE or OP0.
11156
11157 If the expression could be simplified to a constant, then return
11158 the constant. If the expression would not be simplified to a
11159 constant, then return NULL_TREE. */
11160
11161 tree
11162 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
11163 {
11164 tree tem = fold_unary (code, type, op0);
11165 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
11166 }
11167
11168 /* If EXP represents referencing an element in a constant string
11169 (either via pointer arithmetic or array indexing), return the
11170 tree representing the value accessed, otherwise return NULL. */
11171
11172 tree
11173 fold_read_from_constant_string (tree exp)
11174 {
11175 if (TREE_CODE (exp) == INDIRECT_REF || TREE_CODE (exp) == ARRAY_REF)
11176 {
11177 tree exp1 = TREE_OPERAND (exp, 0);
11178 tree index;
11179 tree string;
11180
11181 if (TREE_CODE (exp) == INDIRECT_REF)
11182 string = string_constant (exp1, &index);
11183 else
11184 {
11185 tree low_bound = array_ref_low_bound (exp);
11186 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
11187
11188 /* Optimize the special-case of a zero lower bound.
11189
11190 We convert the low_bound to sizetype to avoid some problems
11191 with constant folding. (E.g. suppose the lower bound is 1,
11192 and its mode is QI. Without the conversion,l (ARRAY
11193 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
11194 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
11195 if (! integer_zerop (low_bound))
11196 index = size_diffop (index, fold_convert (sizetype, low_bound));
11197
11198 string = exp1;
11199 }
11200
11201 if (string
11202 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (string))
11203 && TREE_CODE (string) == STRING_CST
11204 && TREE_CODE (index) == INTEGER_CST
11205 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
11206 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
11207 == MODE_INT)
11208 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
11209 return fold_convert (TREE_TYPE (exp),
11210 build_int_cst (NULL_TREE,
11211 (TREE_STRING_POINTER (string)
11212 [TREE_INT_CST_LOW (index)])));
11213 }
11214 return NULL;
11215 }
11216
11217 /* Return the tree for neg (ARG0) when ARG0 is known to be either
11218 an integer constant or real constant.
11219
11220 TYPE is the type of the result. */
11221
11222 static tree
11223 fold_negate_const (tree arg0, tree type)
11224 {
11225 tree t = NULL_TREE;
11226
11227 switch (TREE_CODE (arg0))
11228 {
11229 case INTEGER_CST:
11230 {
11231 unsigned HOST_WIDE_INT low;
11232 HOST_WIDE_INT high;
11233 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
11234 TREE_INT_CST_HIGH (arg0),
11235 &low, &high);
11236 t = build_int_cst_wide (type, low, high);
11237 t = force_fit_type (t, 1,
11238 (overflow | TREE_OVERFLOW (arg0))
11239 && !TYPE_UNSIGNED (type),
11240 TREE_CONSTANT_OVERFLOW (arg0));
11241 break;
11242 }
11243
11244 case REAL_CST:
11245 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
11246 break;
11247
11248 default:
11249 gcc_unreachable ();
11250 }
11251
11252 return t;
11253 }
11254
11255 /* Return the tree for abs (ARG0) when ARG0 is known to be either
11256 an integer constant or real constant.
11257
11258 TYPE is the type of the result. */
11259
11260 tree
11261 fold_abs_const (tree arg0, tree type)
11262 {
11263 tree t = NULL_TREE;
11264
11265 switch (TREE_CODE (arg0))
11266 {
11267 case INTEGER_CST:
11268 /* If the value is unsigned, then the absolute value is
11269 the same as the ordinary value. */
11270 if (TYPE_UNSIGNED (type))
11271 t = arg0;
11272 /* Similarly, if the value is non-negative. */
11273 else if (INT_CST_LT (integer_minus_one_node, arg0))
11274 t = arg0;
11275 /* If the value is negative, then the absolute value is
11276 its negation. */
11277 else
11278 {
11279 unsigned HOST_WIDE_INT low;
11280 HOST_WIDE_INT high;
11281 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
11282 TREE_INT_CST_HIGH (arg0),
11283 &low, &high);
11284 t = build_int_cst_wide (type, low, high);
11285 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg0),
11286 TREE_CONSTANT_OVERFLOW (arg0));
11287 }
11288 break;
11289
11290 case REAL_CST:
11291 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
11292 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
11293 else
11294 t = arg0;
11295 break;
11296
11297 default:
11298 gcc_unreachable ();
11299 }
11300
11301 return t;
11302 }
11303
11304 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
11305 constant. TYPE is the type of the result. */
11306
11307 static tree
11308 fold_not_const (tree arg0, tree type)
11309 {
11310 tree t = NULL_TREE;
11311
11312 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
11313
11314 t = build_int_cst_wide (type,
11315 ~ TREE_INT_CST_LOW (arg0),
11316 ~ TREE_INT_CST_HIGH (arg0));
11317 t = force_fit_type (t, 0, TREE_OVERFLOW (arg0),
11318 TREE_CONSTANT_OVERFLOW (arg0));
11319
11320 return t;
11321 }
11322
11323 /* Given CODE, a relational operator, the target type, TYPE and two
11324 constant operands OP0 and OP1, return the result of the
11325 relational operation. If the result is not a compile time
11326 constant, then return NULL_TREE. */
11327
11328 static tree
11329 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
11330 {
11331 int result, invert;
11332
11333 /* From here on, the only cases we handle are when the result is
11334 known to be a constant. */
11335
11336 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
11337 {
11338 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
11339 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
11340
11341 /* Handle the cases where either operand is a NaN. */
11342 if (real_isnan (c0) || real_isnan (c1))
11343 {
11344 switch (code)
11345 {
11346 case EQ_EXPR:
11347 case ORDERED_EXPR:
11348 result = 0;
11349 break;
11350
11351 case NE_EXPR:
11352 case UNORDERED_EXPR:
11353 case UNLT_EXPR:
11354 case UNLE_EXPR:
11355 case UNGT_EXPR:
11356 case UNGE_EXPR:
11357 case UNEQ_EXPR:
11358 result = 1;
11359 break;
11360
11361 case LT_EXPR:
11362 case LE_EXPR:
11363 case GT_EXPR:
11364 case GE_EXPR:
11365 case LTGT_EXPR:
11366 if (flag_trapping_math)
11367 return NULL_TREE;
11368 result = 0;
11369 break;
11370
11371 default:
11372 gcc_unreachable ();
11373 }
11374
11375 return constant_boolean_node (result, type);
11376 }
11377
11378 return constant_boolean_node (real_compare (code, c0, c1), type);
11379 }
11380
11381 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
11382
11383 To compute GT, swap the arguments and do LT.
11384 To compute GE, do LT and invert the result.
11385 To compute LE, swap the arguments, do LT and invert the result.
11386 To compute NE, do EQ and invert the result.
11387
11388 Therefore, the code below must handle only EQ and LT. */
11389
11390 if (code == LE_EXPR || code == GT_EXPR)
11391 {
11392 tree tem = op0;
11393 op0 = op1;
11394 op1 = tem;
11395 code = swap_tree_comparison (code);
11396 }
11397
11398 /* Note that it is safe to invert for real values here because we
11399 have already handled the one case that it matters. */
11400
11401 invert = 0;
11402 if (code == NE_EXPR || code == GE_EXPR)
11403 {
11404 invert = 1;
11405 code = invert_tree_comparison (code, false);
11406 }
11407
11408 /* Compute a result for LT or EQ if args permit;
11409 Otherwise return T. */
11410 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
11411 {
11412 if (code == EQ_EXPR)
11413 result = tree_int_cst_equal (op0, op1);
11414 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
11415 result = INT_CST_LT_UNSIGNED (op0, op1);
11416 else
11417 result = INT_CST_LT (op0, op1);
11418 }
11419 else
11420 return NULL_TREE;
11421
11422 if (invert)
11423 result ^= 1;
11424 return constant_boolean_node (result, type);
11425 }
11426
11427 /* Build an expression for the a clean point containing EXPR with type TYPE.
11428 Don't build a cleanup point expression for EXPR which don't have side
11429 effects. */
11430
11431 tree
11432 fold_build_cleanup_point_expr (tree type, tree expr)
11433 {
11434 /* If the expression does not have side effects then we don't have to wrap
11435 it with a cleanup point expression. */
11436 if (!TREE_SIDE_EFFECTS (expr))
11437 return expr;
11438
11439 /* If the expression is a return, check to see if the expression inside the
11440 return has no side effects or the right hand side of the modify expression
11441 inside the return. If either don't have side effects set we don't need to
11442 wrap the expression in a cleanup point expression. Note we don't check the
11443 left hand side of the modify because it should always be a return decl. */
11444 if (TREE_CODE (expr) == RETURN_EXPR)
11445 {
11446 tree op = TREE_OPERAND (expr, 0);
11447 if (!op || !TREE_SIDE_EFFECTS (op))
11448 return expr;
11449 op = TREE_OPERAND (op, 1);
11450 if (!TREE_SIDE_EFFECTS (op))
11451 return expr;
11452 }
11453
11454 return build1 (CLEANUP_POINT_EXPR, type, expr);
11455 }
11456
11457 /* Build an expression for the address of T. Folds away INDIRECT_REF to
11458 avoid confusing the gimplify process. */
11459
11460 tree
11461 build_fold_addr_expr_with_type (tree t, tree ptrtype)
11462 {
11463 /* The size of the object is not relevant when talking about its address. */
11464 if (TREE_CODE (t) == WITH_SIZE_EXPR)
11465 t = TREE_OPERAND (t, 0);
11466
11467 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
11468 if (TREE_CODE (t) == INDIRECT_REF
11469 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
11470 {
11471 t = TREE_OPERAND (t, 0);
11472 if (TREE_TYPE (t) != ptrtype)
11473 t = build1 (NOP_EXPR, ptrtype, t);
11474 }
11475 else
11476 {
11477 tree base = t;
11478
11479 while (handled_component_p (base))
11480 base = TREE_OPERAND (base, 0);
11481 if (DECL_P (base))
11482 TREE_ADDRESSABLE (base) = 1;
11483
11484 t = build1 (ADDR_EXPR, ptrtype, t);
11485 }
11486
11487 return t;
11488 }
11489
11490 tree
11491 build_fold_addr_expr (tree t)
11492 {
11493 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
11494 }
11495
11496 /* Given a pointer value OP0 and a type TYPE, return a simplified version
11497 of an indirection through OP0, or NULL_TREE if no simplification is
11498 possible. */
11499
11500 tree
11501 fold_indirect_ref_1 (tree type, tree op0)
11502 {
11503 tree sub = op0;
11504 tree subtype;
11505
11506 STRIP_NOPS (sub);
11507 subtype = TREE_TYPE (sub);
11508 if (!POINTER_TYPE_P (subtype))
11509 return NULL_TREE;
11510
11511 if (TREE_CODE (sub) == ADDR_EXPR)
11512 {
11513 tree op = TREE_OPERAND (sub, 0);
11514 tree optype = TREE_TYPE (op);
11515 /* *&p => p */
11516 if (type == optype)
11517 return op;
11518 /* *(foo *)&fooarray => fooarray[0] */
11519 else if (TREE_CODE (optype) == ARRAY_TYPE
11520 && type == TREE_TYPE (optype))
11521 {
11522 tree type_domain = TYPE_DOMAIN (optype);
11523 tree min_val = size_zero_node;
11524 if (type_domain && TYPE_MIN_VALUE (type_domain))
11525 min_val = TYPE_MIN_VALUE (type_domain);
11526 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
11527 }
11528 }
11529
11530 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
11531 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
11532 && type == TREE_TYPE (TREE_TYPE (subtype)))
11533 {
11534 tree type_domain;
11535 tree min_val = size_zero_node;
11536 sub = build_fold_indirect_ref (sub);
11537 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
11538 if (type_domain && TYPE_MIN_VALUE (type_domain))
11539 min_val = TYPE_MIN_VALUE (type_domain);
11540 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
11541 }
11542
11543 return NULL_TREE;
11544 }
11545
11546 /* Builds an expression for an indirection through T, simplifying some
11547 cases. */
11548
11549 tree
11550 build_fold_indirect_ref (tree t)
11551 {
11552 tree type = TREE_TYPE (TREE_TYPE (t));
11553 tree sub = fold_indirect_ref_1 (type, t);
11554
11555 if (sub)
11556 return sub;
11557 else
11558 return build1 (INDIRECT_REF, type, t);
11559 }
11560
11561 /* Given an INDIRECT_REF T, return either T or a simplified version. */
11562
11563 tree
11564 fold_indirect_ref (tree t)
11565 {
11566 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
11567
11568 if (sub)
11569 return sub;
11570 else
11571 return t;
11572 }
11573
11574 /* Strip non-trapping, non-side-effecting tree nodes from an expression
11575 whose result is ignored. The type of the returned tree need not be
11576 the same as the original expression. */
11577
11578 tree
11579 fold_ignored_result (tree t)
11580 {
11581 if (!TREE_SIDE_EFFECTS (t))
11582 return integer_zero_node;
11583
11584 for (;;)
11585 switch (TREE_CODE_CLASS (TREE_CODE (t)))
11586 {
11587 case tcc_unary:
11588 t = TREE_OPERAND (t, 0);
11589 break;
11590
11591 case tcc_binary:
11592 case tcc_comparison:
11593 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
11594 t = TREE_OPERAND (t, 0);
11595 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
11596 t = TREE_OPERAND (t, 1);
11597 else
11598 return t;
11599 break;
11600
11601 case tcc_expression:
11602 switch (TREE_CODE (t))
11603 {
11604 case COMPOUND_EXPR:
11605 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
11606 return t;
11607 t = TREE_OPERAND (t, 0);
11608 break;
11609
11610 case COND_EXPR:
11611 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
11612 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
11613 return t;
11614 t = TREE_OPERAND (t, 0);
11615 break;
11616
11617 default:
11618 return t;
11619 }
11620 break;
11621
11622 default:
11623 return t;
11624 }
11625 }
11626
11627 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
11628 This can only be applied to objects of a sizetype. */
11629
11630 tree
11631 round_up (tree value, int divisor)
11632 {
11633 tree div = NULL_TREE;
11634
11635 gcc_assert (divisor > 0);
11636 if (divisor == 1)
11637 return value;
11638
11639 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
11640 have to do anything. Only do this when we are not given a const,
11641 because in that case, this check is more expensive than just
11642 doing it. */
11643 if (TREE_CODE (value) != INTEGER_CST)
11644 {
11645 div = build_int_cst (TREE_TYPE (value), divisor);
11646
11647 if (multiple_of_p (TREE_TYPE (value), value, div))
11648 return value;
11649 }
11650
11651 /* If divisor is a power of two, simplify this to bit manipulation. */
11652 if (divisor == (divisor & -divisor))
11653 {
11654 tree t;
11655
11656 t = build_int_cst (TREE_TYPE (value), divisor - 1);
11657 value = size_binop (PLUS_EXPR, value, t);
11658 t = build_int_cst (TREE_TYPE (value), -divisor);
11659 value = size_binop (BIT_AND_EXPR, value, t);
11660 }
11661 else
11662 {
11663 if (!div)
11664 div = build_int_cst (TREE_TYPE (value), divisor);
11665 value = size_binop (CEIL_DIV_EXPR, value, div);
11666 value = size_binop (MULT_EXPR, value, div);
11667 }
11668
11669 return value;
11670 }
11671
11672 /* Likewise, but round down. */
11673
11674 tree
11675 round_down (tree value, int divisor)
11676 {
11677 tree div = NULL_TREE;
11678
11679 gcc_assert (divisor > 0);
11680 if (divisor == 1)
11681 return value;
11682
11683 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
11684 have to do anything. Only do this when we are not given a const,
11685 because in that case, this check is more expensive than just
11686 doing it. */
11687 if (TREE_CODE (value) != INTEGER_CST)
11688 {
11689 div = build_int_cst (TREE_TYPE (value), divisor);
11690
11691 if (multiple_of_p (TREE_TYPE (value), value, div))
11692 return value;
11693 }
11694
11695 /* If divisor is a power of two, simplify this to bit manipulation. */
11696 if (divisor == (divisor & -divisor))
11697 {
11698 tree t;
11699
11700 t = build_int_cst (TREE_TYPE (value), -divisor);
11701 value = size_binop (BIT_AND_EXPR, value, t);
11702 }
11703 else
11704 {
11705 if (!div)
11706 div = build_int_cst (TREE_TYPE (value), divisor);
11707 value = size_binop (FLOOR_DIV_EXPR, value, div);
11708 value = size_binop (MULT_EXPR, value, div);
11709 }
11710
11711 return value;
11712 }
11713
11714 /* Returns the pointer to the base of the object addressed by EXP and
11715 extracts the information about the offset of the access, storing it
11716 to PBITPOS and POFFSET. */
11717
11718 static tree
11719 split_address_to_core_and_offset (tree exp,
11720 HOST_WIDE_INT *pbitpos, tree *poffset)
11721 {
11722 tree core;
11723 enum machine_mode mode;
11724 int unsignedp, volatilep;
11725 HOST_WIDE_INT bitsize;
11726
11727 if (TREE_CODE (exp) == ADDR_EXPR)
11728 {
11729 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
11730 poffset, &mode, &unsignedp, &volatilep,
11731 false);
11732 core = build_fold_addr_expr (core);
11733 }
11734 else
11735 {
11736 core = exp;
11737 *pbitpos = 0;
11738 *poffset = NULL_TREE;
11739 }
11740
11741 return core;
11742 }
11743
11744 /* Returns true if addresses of E1 and E2 differ by a constant, false
11745 otherwise. If they do, E1 - E2 is stored in *DIFF. */
11746
11747 bool
11748 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
11749 {
11750 tree core1, core2;
11751 HOST_WIDE_INT bitpos1, bitpos2;
11752 tree toffset1, toffset2, tdiff, type;
11753
11754 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
11755 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
11756
11757 if (bitpos1 % BITS_PER_UNIT != 0
11758 || bitpos2 % BITS_PER_UNIT != 0
11759 || !operand_equal_p (core1, core2, 0))
11760 return false;
11761
11762 if (toffset1 && toffset2)
11763 {
11764 type = TREE_TYPE (toffset1);
11765 if (type != TREE_TYPE (toffset2))
11766 toffset2 = fold_convert (type, toffset2);
11767
11768 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
11769 if (!cst_and_fits_in_hwi (tdiff))
11770 return false;
11771
11772 *diff = int_cst_value (tdiff);
11773 }
11774 else if (toffset1 || toffset2)
11775 {
11776 /* If only one of the offsets is non-constant, the difference cannot
11777 be a constant. */
11778 return false;
11779 }
11780 else
11781 *diff = 0;
11782
11783 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
11784 return true;
11785 }
11786
11787 /* Simplify the floating point expression EXP when the sign of the
11788 result is not significant. Return NULL_TREE if no simplification
11789 is possible. */
11790
11791 tree
11792 fold_strip_sign_ops (tree exp)
11793 {
11794 tree arg0, arg1;
11795
11796 switch (TREE_CODE (exp))
11797 {
11798 case ABS_EXPR:
11799 case NEGATE_EXPR:
11800 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
11801 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
11802
11803 case MULT_EXPR:
11804 case RDIV_EXPR:
11805 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
11806 return NULL_TREE;
11807 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
11808 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
11809 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
11810 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
11811 arg0 ? arg0 : TREE_OPERAND (exp, 0),
11812 arg1 ? arg1 : TREE_OPERAND (exp, 1));
11813 break;
11814
11815 default:
11816 break;
11817 }
11818 return NULL_TREE;
11819 }
11820