* builtins.def (BUILT_IN_LFLOOR, BUILT_IN_LFLOORF, BUILT_IN_LFLOORL)
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
29
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type.
32
33 fold takes a tree as argument and returns a simplified tree.
34
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
38
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
41
42 force_fit_type takes a constant, an overflowable flag and prior
43 overflow indicators. It forces the value to fit the type and sets
44 TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate. */
45
46 #include "config.h"
47 #include "system.h"
48 #include "coretypes.h"
49 #include "tm.h"
50 #include "flags.h"
51 #include "tree.h"
52 #include "real.h"
53 #include "rtl.h"
54 #include "expr.h"
55 #include "tm_p.h"
56 #include "toplev.h"
57 #include "ggc.h"
58 #include "hashtab.h"
59 #include "langhooks.h"
60 #include "md5.h"
61
62 /* The following constants represent a bit based encoding of GCC's
63 comparison operators. This encoding simplifies transformations
64 on relational comparison operators, such as AND and OR. */
65 enum comparison_code {
66 COMPCODE_FALSE = 0,
67 COMPCODE_LT = 1,
68 COMPCODE_EQ = 2,
69 COMPCODE_LE = 3,
70 COMPCODE_GT = 4,
71 COMPCODE_LTGT = 5,
72 COMPCODE_GE = 6,
73 COMPCODE_ORD = 7,
74 COMPCODE_UNORD = 8,
75 COMPCODE_UNLT = 9,
76 COMPCODE_UNEQ = 10,
77 COMPCODE_UNLE = 11,
78 COMPCODE_UNGT = 12,
79 COMPCODE_NE = 13,
80 COMPCODE_UNGE = 14,
81 COMPCODE_TRUE = 15
82 };
83
84 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
85 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
86 static bool negate_mathfn_p (enum built_in_function);
87 static bool negate_expr_p (tree);
88 static tree negate_expr (tree);
89 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
90 static tree associate_trees (tree, tree, enum tree_code, tree);
91 static tree const_binop (enum tree_code, tree, tree, int);
92 static enum tree_code invert_tree_comparison (enum tree_code, bool);
93 static enum comparison_code comparison_to_compcode (enum tree_code);
94 static enum tree_code compcode_to_comparison (enum comparison_code);
95 static tree combine_comparisons (enum tree_code, enum tree_code,
96 enum tree_code, tree, tree, tree);
97 static int truth_value_p (enum tree_code);
98 static int operand_equal_for_comparison_p (tree, tree, tree);
99 static int twoval_comparison_p (tree, tree *, tree *, int *);
100 static tree eval_subst (tree, tree, tree, tree, tree);
101 static tree pedantic_omit_one_operand (tree, tree, tree);
102 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
103 static tree make_bit_field_ref (tree, tree, int, int, int);
104 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
105 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
106 enum machine_mode *, int *, int *,
107 tree *, tree *);
108 static int all_ones_mask_p (tree, int);
109 static tree sign_bit_p (tree, tree);
110 static int simple_operand_p (tree);
111 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
112 static tree make_range (tree, int *, tree *, tree *);
113 static tree build_range_check (tree, tree, int, tree, tree);
114 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
115 tree);
116 static tree fold_range_test (enum tree_code, tree, tree, tree);
117 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
118 static tree unextend (tree, int, int, tree);
119 static tree fold_truthop (enum tree_code, tree, tree, tree);
120 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
121 static tree extract_muldiv (tree, tree, enum tree_code, tree);
122 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
123 static int multiple_of_p (tree, tree, tree);
124 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
125 tree, tree,
126 tree, tree, int);
127 static bool fold_real_zero_addition_p (tree, tree, int);
128 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
129 tree, tree, tree);
130 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
131 static tree fold_div_compare (enum tree_code, tree, tree, tree);
132 static bool reorder_operands_p (tree, tree);
133 static tree fold_negate_const (tree, tree);
134 static tree fold_not_const (tree, tree);
135 static tree fold_relational_const (enum tree_code, tree, tree, tree);
136 static tree fold_relational_hi_lo (enum tree_code *, const tree,
137 tree *, tree *);
138 static bool tree_expr_nonzero_p (tree);
139
140 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
141 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
142 and SUM1. Then this yields nonzero if overflow occurred during the
143 addition.
144
145 Overflow occurs if A and B have the same sign, but A and SUM differ in
146 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
147 sign. */
148 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
149 \f
150 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
151 We do that by representing the two-word integer in 4 words, with only
152 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
153 number. The value of the word is LOWPART + HIGHPART * BASE. */
154
155 #define LOWPART(x) \
156 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
157 #define HIGHPART(x) \
158 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
159 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
160
161 /* Unpack a two-word integer into 4 words.
162 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
163 WORDS points to the array of HOST_WIDE_INTs. */
164
165 static void
166 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
167 {
168 words[0] = LOWPART (low);
169 words[1] = HIGHPART (low);
170 words[2] = LOWPART (hi);
171 words[3] = HIGHPART (hi);
172 }
173
174 /* Pack an array of 4 words into a two-word integer.
175 WORDS points to the array of words.
176 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
177
178 static void
179 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
180 HOST_WIDE_INT *hi)
181 {
182 *low = words[0] + words[1] * BASE;
183 *hi = words[2] + words[3] * BASE;
184 }
185 \f
186 /* T is an INT_CST node. OVERFLOWABLE indicates if we are interested
187 in overflow of the value, when >0 we are only interested in signed
188 overflow, for <0 we are interested in any overflow. OVERFLOWED
189 indicates whether overflow has already occurred. CONST_OVERFLOWED
190 indicates whether constant overflow has already occurred. We force
191 T's value to be within range of T's type (by setting to 0 or 1 all
192 the bits outside the type's range). We set TREE_OVERFLOWED if,
193 OVERFLOWED is nonzero,
194 or OVERFLOWABLE is >0 and signed overflow occurs
195 or OVERFLOWABLE is <0 and any overflow occurs
196 We set TREE_CONSTANT_OVERFLOWED if,
197 CONST_OVERFLOWED is nonzero
198 or we set TREE_OVERFLOWED.
199 We return either the original T, or a copy. */
200
201 tree
202 force_fit_type (tree t, int overflowable,
203 bool overflowed, bool overflowed_const)
204 {
205 unsigned HOST_WIDE_INT low;
206 HOST_WIDE_INT high;
207 unsigned int prec;
208 int sign_extended_type;
209
210 gcc_assert (TREE_CODE (t) == INTEGER_CST);
211
212 low = TREE_INT_CST_LOW (t);
213 high = TREE_INT_CST_HIGH (t);
214
215 if (POINTER_TYPE_P (TREE_TYPE (t))
216 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
217 prec = POINTER_SIZE;
218 else
219 prec = TYPE_PRECISION (TREE_TYPE (t));
220 /* Size types *are* sign extended. */
221 sign_extended_type = (!TYPE_UNSIGNED (TREE_TYPE (t))
222 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
223 && TYPE_IS_SIZETYPE (TREE_TYPE (t))));
224
225 /* First clear all bits that are beyond the type's precision. */
226
227 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
228 ;
229 else if (prec > HOST_BITS_PER_WIDE_INT)
230 high &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
231 else
232 {
233 high = 0;
234 if (prec < HOST_BITS_PER_WIDE_INT)
235 low &= ~((HOST_WIDE_INT) (-1) << prec);
236 }
237
238 if (!sign_extended_type)
239 /* No sign extension */;
240 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
241 /* Correct width already. */;
242 else if (prec > HOST_BITS_PER_WIDE_INT)
243 {
244 /* Sign extend top half? */
245 if (high & ((unsigned HOST_WIDE_INT)1
246 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
247 high |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
248 }
249 else if (prec == HOST_BITS_PER_WIDE_INT)
250 {
251 if ((HOST_WIDE_INT)low < 0)
252 high = -1;
253 }
254 else
255 {
256 /* Sign extend bottom half? */
257 if (low & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
258 {
259 high = -1;
260 low |= (HOST_WIDE_INT)(-1) << prec;
261 }
262 }
263
264 /* If the value changed, return a new node. */
265 if (overflowed || overflowed_const
266 || low != TREE_INT_CST_LOW (t) || high != TREE_INT_CST_HIGH (t))
267 {
268 t = build_int_cst_wide (TREE_TYPE (t), low, high);
269
270 if (overflowed
271 || overflowable < 0
272 || (overflowable > 0 && sign_extended_type))
273 {
274 t = copy_node (t);
275 TREE_OVERFLOW (t) = 1;
276 TREE_CONSTANT_OVERFLOW (t) = 1;
277 }
278 else if (overflowed_const)
279 {
280 t = copy_node (t);
281 TREE_CONSTANT_OVERFLOW (t) = 1;
282 }
283 }
284
285 return t;
286 }
287 \f
288 /* Add two doubleword integers with doubleword result.
289 Each argument is given as two `HOST_WIDE_INT' pieces.
290 One argument is L1 and H1; the other, L2 and H2.
291 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
292
293 int
294 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
295 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
296 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
297 {
298 unsigned HOST_WIDE_INT l;
299 HOST_WIDE_INT h;
300
301 l = l1 + l2;
302 h = h1 + h2 + (l < l1);
303
304 *lv = l;
305 *hv = h;
306 return OVERFLOW_SUM_SIGN (h1, h2, h);
307 }
308
309 /* Negate a doubleword integer with doubleword result.
310 Return nonzero if the operation overflows, assuming it's signed.
311 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
312 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
313
314 int
315 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
316 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
317 {
318 if (l1 == 0)
319 {
320 *lv = 0;
321 *hv = - h1;
322 return (*hv & h1) < 0;
323 }
324 else
325 {
326 *lv = -l1;
327 *hv = ~h1;
328 return 0;
329 }
330 }
331 \f
332 /* Multiply two doubleword integers with doubleword result.
333 Return nonzero if the operation overflows, assuming it's signed.
334 Each argument is given as two `HOST_WIDE_INT' pieces.
335 One argument is L1 and H1; the other, L2 and H2.
336 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
337
338 int
339 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
340 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
341 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
342 {
343 HOST_WIDE_INT arg1[4];
344 HOST_WIDE_INT arg2[4];
345 HOST_WIDE_INT prod[4 * 2];
346 unsigned HOST_WIDE_INT carry;
347 int i, j, k;
348 unsigned HOST_WIDE_INT toplow, neglow;
349 HOST_WIDE_INT tophigh, neghigh;
350
351 encode (arg1, l1, h1);
352 encode (arg2, l2, h2);
353
354 memset (prod, 0, sizeof prod);
355
356 for (i = 0; i < 4; i++)
357 {
358 carry = 0;
359 for (j = 0; j < 4; j++)
360 {
361 k = i + j;
362 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
363 carry += arg1[i] * arg2[j];
364 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
365 carry += prod[k];
366 prod[k] = LOWPART (carry);
367 carry = HIGHPART (carry);
368 }
369 prod[i + 4] = carry;
370 }
371
372 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
373
374 /* Check for overflow by calculating the top half of the answer in full;
375 it should agree with the low half's sign bit. */
376 decode (prod + 4, &toplow, &tophigh);
377 if (h1 < 0)
378 {
379 neg_double (l2, h2, &neglow, &neghigh);
380 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
381 }
382 if (h2 < 0)
383 {
384 neg_double (l1, h1, &neglow, &neghigh);
385 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
386 }
387 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
388 }
389 \f
390 /* Shift the doubleword integer in L1, H1 left by COUNT places
391 keeping only PREC bits of result.
392 Shift right if COUNT is negative.
393 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
394 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
395
396 void
397 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
398 HOST_WIDE_INT count, unsigned int prec,
399 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
400 {
401 unsigned HOST_WIDE_INT signmask;
402
403 if (count < 0)
404 {
405 rshift_double (l1, h1, -count, prec, lv, hv, arith);
406 return;
407 }
408
409 if (SHIFT_COUNT_TRUNCATED)
410 count %= prec;
411
412 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
413 {
414 /* Shifting by the host word size is undefined according to the
415 ANSI standard, so we must handle this as a special case. */
416 *hv = 0;
417 *lv = 0;
418 }
419 else if (count >= HOST_BITS_PER_WIDE_INT)
420 {
421 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
422 *lv = 0;
423 }
424 else
425 {
426 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
427 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
428 *lv = l1 << count;
429 }
430
431 /* Sign extend all bits that are beyond the precision. */
432
433 signmask = -((prec > HOST_BITS_PER_WIDE_INT
434 ? ((unsigned HOST_WIDE_INT) *hv
435 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
436 : (*lv >> (prec - 1))) & 1);
437
438 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
439 ;
440 else if (prec >= HOST_BITS_PER_WIDE_INT)
441 {
442 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
443 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
444 }
445 else
446 {
447 *hv = signmask;
448 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
449 *lv |= signmask << prec;
450 }
451 }
452
453 /* Shift the doubleword integer in L1, H1 right by COUNT places
454 keeping only PREC bits of result. COUNT must be positive.
455 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
456 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
457
458 void
459 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
460 HOST_WIDE_INT count, unsigned int prec,
461 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
462 int arith)
463 {
464 unsigned HOST_WIDE_INT signmask;
465
466 signmask = (arith
467 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
468 : 0);
469
470 if (SHIFT_COUNT_TRUNCATED)
471 count %= prec;
472
473 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
474 {
475 /* Shifting by the host word size is undefined according to the
476 ANSI standard, so we must handle this as a special case. */
477 *hv = 0;
478 *lv = 0;
479 }
480 else if (count >= HOST_BITS_PER_WIDE_INT)
481 {
482 *hv = 0;
483 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
484 }
485 else
486 {
487 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
488 *lv = ((l1 >> count)
489 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
490 }
491
492 /* Zero / sign extend all bits that are beyond the precision. */
493
494 if (count >= (HOST_WIDE_INT)prec)
495 {
496 *hv = signmask;
497 *lv = signmask;
498 }
499 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
500 ;
501 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
502 {
503 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
504 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
505 }
506 else
507 {
508 *hv = signmask;
509 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
510 *lv |= signmask << (prec - count);
511 }
512 }
513 \f
514 /* Rotate the doubleword integer in L1, H1 left by COUNT places
515 keeping only PREC bits of result.
516 Rotate right if COUNT is negative.
517 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
518
519 void
520 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
521 HOST_WIDE_INT count, unsigned int prec,
522 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
523 {
524 unsigned HOST_WIDE_INT s1l, s2l;
525 HOST_WIDE_INT s1h, s2h;
526
527 count %= prec;
528 if (count < 0)
529 count += prec;
530
531 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
532 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
533 *lv = s1l | s2l;
534 *hv = s1h | s2h;
535 }
536
537 /* Rotate the doubleword integer in L1, H1 left by COUNT places
538 keeping only PREC bits of result. COUNT must be positive.
539 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
540
541 void
542 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
543 HOST_WIDE_INT count, unsigned int prec,
544 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
545 {
546 unsigned HOST_WIDE_INT s1l, s2l;
547 HOST_WIDE_INT s1h, s2h;
548
549 count %= prec;
550 if (count < 0)
551 count += prec;
552
553 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
554 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
555 *lv = s1l | s2l;
556 *hv = s1h | s2h;
557 }
558 \f
559 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
560 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
561 CODE is a tree code for a kind of division, one of
562 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
563 or EXACT_DIV_EXPR
564 It controls how the quotient is rounded to an integer.
565 Return nonzero if the operation overflows.
566 UNS nonzero says do unsigned division. */
567
568 int
569 div_and_round_double (enum tree_code code, int uns,
570 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
571 HOST_WIDE_INT hnum_orig,
572 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
573 HOST_WIDE_INT hden_orig,
574 unsigned HOST_WIDE_INT *lquo,
575 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
576 HOST_WIDE_INT *hrem)
577 {
578 int quo_neg = 0;
579 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
580 HOST_WIDE_INT den[4], quo[4];
581 int i, j;
582 unsigned HOST_WIDE_INT work;
583 unsigned HOST_WIDE_INT carry = 0;
584 unsigned HOST_WIDE_INT lnum = lnum_orig;
585 HOST_WIDE_INT hnum = hnum_orig;
586 unsigned HOST_WIDE_INT lden = lden_orig;
587 HOST_WIDE_INT hden = hden_orig;
588 int overflow = 0;
589
590 if (hden == 0 && lden == 0)
591 overflow = 1, lden = 1;
592
593 /* Calculate quotient sign and convert operands to unsigned. */
594 if (!uns)
595 {
596 if (hnum < 0)
597 {
598 quo_neg = ~ quo_neg;
599 /* (minimum integer) / (-1) is the only overflow case. */
600 if (neg_double (lnum, hnum, &lnum, &hnum)
601 && ((HOST_WIDE_INT) lden & hden) == -1)
602 overflow = 1;
603 }
604 if (hden < 0)
605 {
606 quo_neg = ~ quo_neg;
607 neg_double (lden, hden, &lden, &hden);
608 }
609 }
610
611 if (hnum == 0 && hden == 0)
612 { /* single precision */
613 *hquo = *hrem = 0;
614 /* This unsigned division rounds toward zero. */
615 *lquo = lnum / lden;
616 goto finish_up;
617 }
618
619 if (hnum == 0)
620 { /* trivial case: dividend < divisor */
621 /* hden != 0 already checked. */
622 *hquo = *lquo = 0;
623 *hrem = hnum;
624 *lrem = lnum;
625 goto finish_up;
626 }
627
628 memset (quo, 0, sizeof quo);
629
630 memset (num, 0, sizeof num); /* to zero 9th element */
631 memset (den, 0, sizeof den);
632
633 encode (num, lnum, hnum);
634 encode (den, lden, hden);
635
636 /* Special code for when the divisor < BASE. */
637 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
638 {
639 /* hnum != 0 already checked. */
640 for (i = 4 - 1; i >= 0; i--)
641 {
642 work = num[i] + carry * BASE;
643 quo[i] = work / lden;
644 carry = work % lden;
645 }
646 }
647 else
648 {
649 /* Full double precision division,
650 with thanks to Don Knuth's "Seminumerical Algorithms". */
651 int num_hi_sig, den_hi_sig;
652 unsigned HOST_WIDE_INT quo_est, scale;
653
654 /* Find the highest nonzero divisor digit. */
655 for (i = 4 - 1;; i--)
656 if (den[i] != 0)
657 {
658 den_hi_sig = i;
659 break;
660 }
661
662 /* Insure that the first digit of the divisor is at least BASE/2.
663 This is required by the quotient digit estimation algorithm. */
664
665 scale = BASE / (den[den_hi_sig] + 1);
666 if (scale > 1)
667 { /* scale divisor and dividend */
668 carry = 0;
669 for (i = 0; i <= 4 - 1; i++)
670 {
671 work = (num[i] * scale) + carry;
672 num[i] = LOWPART (work);
673 carry = HIGHPART (work);
674 }
675
676 num[4] = carry;
677 carry = 0;
678 for (i = 0; i <= 4 - 1; i++)
679 {
680 work = (den[i] * scale) + carry;
681 den[i] = LOWPART (work);
682 carry = HIGHPART (work);
683 if (den[i] != 0) den_hi_sig = i;
684 }
685 }
686
687 num_hi_sig = 4;
688
689 /* Main loop */
690 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
691 {
692 /* Guess the next quotient digit, quo_est, by dividing the first
693 two remaining dividend digits by the high order quotient digit.
694 quo_est is never low and is at most 2 high. */
695 unsigned HOST_WIDE_INT tmp;
696
697 num_hi_sig = i + den_hi_sig + 1;
698 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
699 if (num[num_hi_sig] != den[den_hi_sig])
700 quo_est = work / den[den_hi_sig];
701 else
702 quo_est = BASE - 1;
703
704 /* Refine quo_est so it's usually correct, and at most one high. */
705 tmp = work - quo_est * den[den_hi_sig];
706 if (tmp < BASE
707 && (den[den_hi_sig - 1] * quo_est
708 > (tmp * BASE + num[num_hi_sig - 2])))
709 quo_est--;
710
711 /* Try QUO_EST as the quotient digit, by multiplying the
712 divisor by QUO_EST and subtracting from the remaining dividend.
713 Keep in mind that QUO_EST is the I - 1st digit. */
714
715 carry = 0;
716 for (j = 0; j <= den_hi_sig; j++)
717 {
718 work = quo_est * den[j] + carry;
719 carry = HIGHPART (work);
720 work = num[i + j] - LOWPART (work);
721 num[i + j] = LOWPART (work);
722 carry += HIGHPART (work) != 0;
723 }
724
725 /* If quo_est was high by one, then num[i] went negative and
726 we need to correct things. */
727 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
728 {
729 quo_est--;
730 carry = 0; /* add divisor back in */
731 for (j = 0; j <= den_hi_sig; j++)
732 {
733 work = num[i + j] + den[j] + carry;
734 carry = HIGHPART (work);
735 num[i + j] = LOWPART (work);
736 }
737
738 num [num_hi_sig] += carry;
739 }
740
741 /* Store the quotient digit. */
742 quo[i] = quo_est;
743 }
744 }
745
746 decode (quo, lquo, hquo);
747
748 finish_up:
749 /* If result is negative, make it so. */
750 if (quo_neg)
751 neg_double (*lquo, *hquo, lquo, hquo);
752
753 /* Compute trial remainder: rem = num - (quo * den) */
754 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
755 neg_double (*lrem, *hrem, lrem, hrem);
756 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
757
758 switch (code)
759 {
760 case TRUNC_DIV_EXPR:
761 case TRUNC_MOD_EXPR: /* round toward zero */
762 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
763 return overflow;
764
765 case FLOOR_DIV_EXPR:
766 case FLOOR_MOD_EXPR: /* round toward negative infinity */
767 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
768 {
769 /* quo = quo - 1; */
770 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
771 lquo, hquo);
772 }
773 else
774 return overflow;
775 break;
776
777 case CEIL_DIV_EXPR:
778 case CEIL_MOD_EXPR: /* round toward positive infinity */
779 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
780 {
781 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
782 lquo, hquo);
783 }
784 else
785 return overflow;
786 break;
787
788 case ROUND_DIV_EXPR:
789 case ROUND_MOD_EXPR: /* round to closest integer */
790 {
791 unsigned HOST_WIDE_INT labs_rem = *lrem;
792 HOST_WIDE_INT habs_rem = *hrem;
793 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
794 HOST_WIDE_INT habs_den = hden, htwice;
795
796 /* Get absolute values. */
797 if (*hrem < 0)
798 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
799 if (hden < 0)
800 neg_double (lden, hden, &labs_den, &habs_den);
801
802 /* If (2 * abs (lrem) >= abs (lden)) */
803 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
804 labs_rem, habs_rem, &ltwice, &htwice);
805
806 if (((unsigned HOST_WIDE_INT) habs_den
807 < (unsigned HOST_WIDE_INT) htwice)
808 || (((unsigned HOST_WIDE_INT) habs_den
809 == (unsigned HOST_WIDE_INT) htwice)
810 && (labs_den < ltwice)))
811 {
812 if (*hquo < 0)
813 /* quo = quo - 1; */
814 add_double (*lquo, *hquo,
815 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
816 else
817 /* quo = quo + 1; */
818 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
819 lquo, hquo);
820 }
821 else
822 return overflow;
823 }
824 break;
825
826 default:
827 gcc_unreachable ();
828 }
829
830 /* Compute true remainder: rem = num - (quo * den) */
831 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
832 neg_double (*lrem, *hrem, lrem, hrem);
833 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
834 return overflow;
835 }
836 \f
837 /* Return true if built-in mathematical function specified by CODE
838 preserves the sign of it argument, i.e. -f(x) == f(-x). */
839
840 static bool
841 negate_mathfn_p (enum built_in_function code)
842 {
843 switch (code)
844 {
845 case BUILT_IN_ASIN:
846 case BUILT_IN_ASINF:
847 case BUILT_IN_ASINL:
848 case BUILT_IN_ATAN:
849 case BUILT_IN_ATANF:
850 case BUILT_IN_ATANL:
851 case BUILT_IN_SIN:
852 case BUILT_IN_SINF:
853 case BUILT_IN_SINL:
854 case BUILT_IN_TAN:
855 case BUILT_IN_TANF:
856 case BUILT_IN_TANL:
857 return true;
858
859 default:
860 break;
861 }
862 return false;
863 }
864
865 /* Check whether we may negate an integer constant T without causing
866 overflow. */
867
868 bool
869 may_negate_without_overflow_p (tree t)
870 {
871 unsigned HOST_WIDE_INT val;
872 unsigned int prec;
873 tree type;
874
875 gcc_assert (TREE_CODE (t) == INTEGER_CST);
876
877 type = TREE_TYPE (t);
878 if (TYPE_UNSIGNED (type))
879 return false;
880
881 prec = TYPE_PRECISION (type);
882 if (prec > HOST_BITS_PER_WIDE_INT)
883 {
884 if (TREE_INT_CST_LOW (t) != 0)
885 return true;
886 prec -= HOST_BITS_PER_WIDE_INT;
887 val = TREE_INT_CST_HIGH (t);
888 }
889 else
890 val = TREE_INT_CST_LOW (t);
891 if (prec < HOST_BITS_PER_WIDE_INT)
892 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
893 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
894 }
895
896 /* Determine whether an expression T can be cheaply negated using
897 the function negate_expr. */
898
899 static bool
900 negate_expr_p (tree t)
901 {
902 tree type;
903
904 if (t == 0)
905 return false;
906
907 type = TREE_TYPE (t);
908
909 STRIP_SIGN_NOPS (t);
910 switch (TREE_CODE (t))
911 {
912 case INTEGER_CST:
913 if (TYPE_UNSIGNED (type) || ! flag_trapv)
914 return true;
915
916 /* Check that -CST will not overflow type. */
917 return may_negate_without_overflow_p (t);
918
919 case REAL_CST:
920 case NEGATE_EXPR:
921 return true;
922
923 case COMPLEX_CST:
924 return negate_expr_p (TREE_REALPART (t))
925 && negate_expr_p (TREE_IMAGPART (t));
926
927 case PLUS_EXPR:
928 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
929 return false;
930 /* -(A + B) -> (-B) - A. */
931 if (negate_expr_p (TREE_OPERAND (t, 1))
932 && reorder_operands_p (TREE_OPERAND (t, 0),
933 TREE_OPERAND (t, 1)))
934 return true;
935 /* -(A + B) -> (-A) - B. */
936 return negate_expr_p (TREE_OPERAND (t, 0));
937
938 case MINUS_EXPR:
939 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
940 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
941 && reorder_operands_p (TREE_OPERAND (t, 0),
942 TREE_OPERAND (t, 1));
943
944 case MULT_EXPR:
945 if (TYPE_UNSIGNED (TREE_TYPE (t)))
946 break;
947
948 /* Fall through. */
949
950 case RDIV_EXPR:
951 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
952 return negate_expr_p (TREE_OPERAND (t, 1))
953 || negate_expr_p (TREE_OPERAND (t, 0));
954 break;
955
956 case NOP_EXPR:
957 /* Negate -((double)float) as (double)(-float). */
958 if (TREE_CODE (type) == REAL_TYPE)
959 {
960 tree tem = strip_float_extensions (t);
961 if (tem != t)
962 return negate_expr_p (tem);
963 }
964 break;
965
966 case CALL_EXPR:
967 /* Negate -f(x) as f(-x). */
968 if (negate_mathfn_p (builtin_mathfn_code (t)))
969 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
970 break;
971
972 case RSHIFT_EXPR:
973 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
974 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
975 {
976 tree op1 = TREE_OPERAND (t, 1);
977 if (TREE_INT_CST_HIGH (op1) == 0
978 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
979 == TREE_INT_CST_LOW (op1))
980 return true;
981 }
982 break;
983
984 default:
985 break;
986 }
987 return false;
988 }
989
990 /* Given T, an expression, return the negation of T. Allow for T to be
991 null, in which case return null. */
992
993 static tree
994 negate_expr (tree t)
995 {
996 tree type;
997 tree tem;
998
999 if (t == 0)
1000 return 0;
1001
1002 type = TREE_TYPE (t);
1003 STRIP_SIGN_NOPS (t);
1004
1005 switch (TREE_CODE (t))
1006 {
1007 case INTEGER_CST:
1008 tem = fold_negate_const (t, type);
1009 if (! TREE_OVERFLOW (tem)
1010 || TYPE_UNSIGNED (type)
1011 || ! flag_trapv)
1012 return tem;
1013 break;
1014
1015 case REAL_CST:
1016 tem = fold_negate_const (t, type);
1017 /* Two's complement FP formats, such as c4x, may overflow. */
1018 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
1019 return fold_convert (type, tem);
1020 break;
1021
1022 case COMPLEX_CST:
1023 {
1024 tree rpart = negate_expr (TREE_REALPART (t));
1025 tree ipart = negate_expr (TREE_IMAGPART (t));
1026
1027 if ((TREE_CODE (rpart) == REAL_CST
1028 && TREE_CODE (ipart) == REAL_CST)
1029 || (TREE_CODE (rpart) == INTEGER_CST
1030 && TREE_CODE (ipart) == INTEGER_CST))
1031 return build_complex (type, rpart, ipart);
1032 }
1033 break;
1034
1035 case NEGATE_EXPR:
1036 return fold_convert (type, TREE_OPERAND (t, 0));
1037
1038 case PLUS_EXPR:
1039 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1040 {
1041 /* -(A + B) -> (-B) - A. */
1042 if (negate_expr_p (TREE_OPERAND (t, 1))
1043 && reorder_operands_p (TREE_OPERAND (t, 0),
1044 TREE_OPERAND (t, 1)))
1045 {
1046 tem = negate_expr (TREE_OPERAND (t, 1));
1047 tem = fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1048 tem, TREE_OPERAND (t, 0));
1049 return fold_convert (type, tem);
1050 }
1051
1052 /* -(A + B) -> (-A) - B. */
1053 if (negate_expr_p (TREE_OPERAND (t, 0)))
1054 {
1055 tem = negate_expr (TREE_OPERAND (t, 0));
1056 tem = fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1057 tem, TREE_OPERAND (t, 1));
1058 return fold_convert (type, tem);
1059 }
1060 }
1061 break;
1062
1063 case MINUS_EXPR:
1064 /* - (A - B) -> B - A */
1065 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1066 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1067 return fold_convert (type,
1068 fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1069 TREE_OPERAND (t, 1),
1070 TREE_OPERAND (t, 0)));
1071 break;
1072
1073 case MULT_EXPR:
1074 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1075 break;
1076
1077 /* Fall through. */
1078
1079 case RDIV_EXPR:
1080 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1081 {
1082 tem = TREE_OPERAND (t, 1);
1083 if (negate_expr_p (tem))
1084 return fold_convert (type,
1085 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1086 TREE_OPERAND (t, 0),
1087 negate_expr (tem)));
1088 tem = TREE_OPERAND (t, 0);
1089 if (negate_expr_p (tem))
1090 return fold_convert (type,
1091 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1092 negate_expr (tem),
1093 TREE_OPERAND (t, 1)));
1094 }
1095 break;
1096
1097 case NOP_EXPR:
1098 /* Convert -((double)float) into (double)(-float). */
1099 if (TREE_CODE (type) == REAL_TYPE)
1100 {
1101 tem = strip_float_extensions (t);
1102 if (tem != t && negate_expr_p (tem))
1103 return fold_convert (type, negate_expr (tem));
1104 }
1105 break;
1106
1107 case CALL_EXPR:
1108 /* Negate -f(x) as f(-x). */
1109 if (negate_mathfn_p (builtin_mathfn_code (t))
1110 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1111 {
1112 tree fndecl, arg, arglist;
1113
1114 fndecl = get_callee_fndecl (t);
1115 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1116 arglist = build_tree_list (NULL_TREE, arg);
1117 return build_function_call_expr (fndecl, arglist);
1118 }
1119 break;
1120
1121 case RSHIFT_EXPR:
1122 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1123 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1124 {
1125 tree op1 = TREE_OPERAND (t, 1);
1126 if (TREE_INT_CST_HIGH (op1) == 0
1127 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1128 == TREE_INT_CST_LOW (op1))
1129 {
1130 tree ntype = TYPE_UNSIGNED (type)
1131 ? lang_hooks.types.signed_type (type)
1132 : lang_hooks.types.unsigned_type (type);
1133 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1134 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1135 return fold_convert (type, temp);
1136 }
1137 }
1138 break;
1139
1140 default:
1141 break;
1142 }
1143
1144 tem = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1145 return fold_convert (type, tem);
1146 }
1147 \f
1148 /* Split a tree IN into a constant, literal and variable parts that could be
1149 combined with CODE to make IN. "constant" means an expression with
1150 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1151 commutative arithmetic operation. Store the constant part into *CONP,
1152 the literal in *LITP and return the variable part. If a part isn't
1153 present, set it to null. If the tree does not decompose in this way,
1154 return the entire tree as the variable part and the other parts as null.
1155
1156 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1157 case, we negate an operand that was subtracted. Except if it is a
1158 literal for which we use *MINUS_LITP instead.
1159
1160 If NEGATE_P is true, we are negating all of IN, again except a literal
1161 for which we use *MINUS_LITP instead.
1162
1163 If IN is itself a literal or constant, return it as appropriate.
1164
1165 Note that we do not guarantee that any of the three values will be the
1166 same type as IN, but they will have the same signedness and mode. */
1167
1168 static tree
1169 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1170 tree *minus_litp, int negate_p)
1171 {
1172 tree var = 0;
1173
1174 *conp = 0;
1175 *litp = 0;
1176 *minus_litp = 0;
1177
1178 /* Strip any conversions that don't change the machine mode or signedness. */
1179 STRIP_SIGN_NOPS (in);
1180
1181 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1182 *litp = in;
1183 else if (TREE_CODE (in) == code
1184 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1185 /* We can associate addition and subtraction together (even
1186 though the C standard doesn't say so) for integers because
1187 the value is not affected. For reals, the value might be
1188 affected, so we can't. */
1189 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1190 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1191 {
1192 tree op0 = TREE_OPERAND (in, 0);
1193 tree op1 = TREE_OPERAND (in, 1);
1194 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1195 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1196
1197 /* First see if either of the operands is a literal, then a constant. */
1198 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1199 *litp = op0, op0 = 0;
1200 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1201 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1202
1203 if (op0 != 0 && TREE_CONSTANT (op0))
1204 *conp = op0, op0 = 0;
1205 else if (op1 != 0 && TREE_CONSTANT (op1))
1206 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1207
1208 /* If we haven't dealt with either operand, this is not a case we can
1209 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1210 if (op0 != 0 && op1 != 0)
1211 var = in;
1212 else if (op0 != 0)
1213 var = op0;
1214 else
1215 var = op1, neg_var_p = neg1_p;
1216
1217 /* Now do any needed negations. */
1218 if (neg_litp_p)
1219 *minus_litp = *litp, *litp = 0;
1220 if (neg_conp_p)
1221 *conp = negate_expr (*conp);
1222 if (neg_var_p)
1223 var = negate_expr (var);
1224 }
1225 else if (TREE_CONSTANT (in))
1226 *conp = in;
1227 else
1228 var = in;
1229
1230 if (negate_p)
1231 {
1232 if (*litp)
1233 *minus_litp = *litp, *litp = 0;
1234 else if (*minus_litp)
1235 *litp = *minus_litp, *minus_litp = 0;
1236 *conp = negate_expr (*conp);
1237 var = negate_expr (var);
1238 }
1239
1240 return var;
1241 }
1242
1243 /* Re-associate trees split by the above function. T1 and T2 are either
1244 expressions to associate or null. Return the new expression, if any. If
1245 we build an operation, do it in TYPE and with CODE. */
1246
1247 static tree
1248 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1249 {
1250 if (t1 == 0)
1251 return t2;
1252 else if (t2 == 0)
1253 return t1;
1254
1255 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1256 try to fold this since we will have infinite recursion. But do
1257 deal with any NEGATE_EXPRs. */
1258 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1259 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1260 {
1261 if (code == PLUS_EXPR)
1262 {
1263 if (TREE_CODE (t1) == NEGATE_EXPR)
1264 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1265 fold_convert (type, TREE_OPERAND (t1, 0)));
1266 else if (TREE_CODE (t2) == NEGATE_EXPR)
1267 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1268 fold_convert (type, TREE_OPERAND (t2, 0)));
1269 else if (integer_zerop (t2))
1270 return fold_convert (type, t1);
1271 }
1272 else if (code == MINUS_EXPR)
1273 {
1274 if (integer_zerop (t2))
1275 return fold_convert (type, t1);
1276 }
1277
1278 return build2 (code, type, fold_convert (type, t1),
1279 fold_convert (type, t2));
1280 }
1281
1282 return fold_build2 (code, type, fold_convert (type, t1),
1283 fold_convert (type, t2));
1284 }
1285 \f
1286 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1287 to produce a new constant.
1288
1289 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1290
1291 tree
1292 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1293 {
1294 unsigned HOST_WIDE_INT int1l, int2l;
1295 HOST_WIDE_INT int1h, int2h;
1296 unsigned HOST_WIDE_INT low;
1297 HOST_WIDE_INT hi;
1298 unsigned HOST_WIDE_INT garbagel;
1299 HOST_WIDE_INT garbageh;
1300 tree t;
1301 tree type = TREE_TYPE (arg1);
1302 int uns = TYPE_UNSIGNED (type);
1303 int is_sizetype
1304 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1305 int overflow = 0;
1306
1307 int1l = TREE_INT_CST_LOW (arg1);
1308 int1h = TREE_INT_CST_HIGH (arg1);
1309 int2l = TREE_INT_CST_LOW (arg2);
1310 int2h = TREE_INT_CST_HIGH (arg2);
1311
1312 switch (code)
1313 {
1314 case BIT_IOR_EXPR:
1315 low = int1l | int2l, hi = int1h | int2h;
1316 break;
1317
1318 case BIT_XOR_EXPR:
1319 low = int1l ^ int2l, hi = int1h ^ int2h;
1320 break;
1321
1322 case BIT_AND_EXPR:
1323 low = int1l & int2l, hi = int1h & int2h;
1324 break;
1325
1326 case RSHIFT_EXPR:
1327 int2l = -int2l;
1328 case LSHIFT_EXPR:
1329 /* It's unclear from the C standard whether shifts can overflow.
1330 The following code ignores overflow; perhaps a C standard
1331 interpretation ruling is needed. */
1332 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1333 &low, &hi, !uns);
1334 break;
1335
1336 case RROTATE_EXPR:
1337 int2l = - int2l;
1338 case LROTATE_EXPR:
1339 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1340 &low, &hi);
1341 break;
1342
1343 case PLUS_EXPR:
1344 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1345 break;
1346
1347 case MINUS_EXPR:
1348 neg_double (int2l, int2h, &low, &hi);
1349 add_double (int1l, int1h, low, hi, &low, &hi);
1350 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1351 break;
1352
1353 case MULT_EXPR:
1354 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1355 break;
1356
1357 case TRUNC_DIV_EXPR:
1358 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1359 case EXACT_DIV_EXPR:
1360 /* This is a shortcut for a common special case. */
1361 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1362 && ! TREE_CONSTANT_OVERFLOW (arg1)
1363 && ! TREE_CONSTANT_OVERFLOW (arg2)
1364 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1365 {
1366 if (code == CEIL_DIV_EXPR)
1367 int1l += int2l - 1;
1368
1369 low = int1l / int2l, hi = 0;
1370 break;
1371 }
1372
1373 /* ... fall through ... */
1374
1375 case ROUND_DIV_EXPR:
1376 if (int2h == 0 && int2l == 1)
1377 {
1378 low = int1l, hi = int1h;
1379 break;
1380 }
1381 if (int1l == int2l && int1h == int2h
1382 && ! (int1l == 0 && int1h == 0))
1383 {
1384 low = 1, hi = 0;
1385 break;
1386 }
1387 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1388 &low, &hi, &garbagel, &garbageh);
1389 break;
1390
1391 case TRUNC_MOD_EXPR:
1392 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1393 /* This is a shortcut for a common special case. */
1394 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1395 && ! TREE_CONSTANT_OVERFLOW (arg1)
1396 && ! TREE_CONSTANT_OVERFLOW (arg2)
1397 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1398 {
1399 if (code == CEIL_MOD_EXPR)
1400 int1l += int2l - 1;
1401 low = int1l % int2l, hi = 0;
1402 break;
1403 }
1404
1405 /* ... fall through ... */
1406
1407 case ROUND_MOD_EXPR:
1408 overflow = div_and_round_double (code, uns,
1409 int1l, int1h, int2l, int2h,
1410 &garbagel, &garbageh, &low, &hi);
1411 break;
1412
1413 case MIN_EXPR:
1414 case MAX_EXPR:
1415 if (uns)
1416 low = (((unsigned HOST_WIDE_INT) int1h
1417 < (unsigned HOST_WIDE_INT) int2h)
1418 || (((unsigned HOST_WIDE_INT) int1h
1419 == (unsigned HOST_WIDE_INT) int2h)
1420 && int1l < int2l));
1421 else
1422 low = (int1h < int2h
1423 || (int1h == int2h && int1l < int2l));
1424
1425 if (low == (code == MIN_EXPR))
1426 low = int1l, hi = int1h;
1427 else
1428 low = int2l, hi = int2h;
1429 break;
1430
1431 default:
1432 gcc_unreachable ();
1433 }
1434
1435 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1436
1437 if (notrunc)
1438 {
1439 /* Propagate overflow flags ourselves. */
1440 if (((!uns || is_sizetype) && overflow)
1441 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1442 {
1443 t = copy_node (t);
1444 TREE_OVERFLOW (t) = 1;
1445 TREE_CONSTANT_OVERFLOW (t) = 1;
1446 }
1447 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1448 {
1449 t = copy_node (t);
1450 TREE_CONSTANT_OVERFLOW (t) = 1;
1451 }
1452 }
1453 else
1454 t = force_fit_type (t, 1,
1455 ((!uns || is_sizetype) && overflow)
1456 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
1457 TREE_CONSTANT_OVERFLOW (arg1)
1458 | TREE_CONSTANT_OVERFLOW (arg2));
1459
1460 return t;
1461 }
1462
1463 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1464 constant. We assume ARG1 and ARG2 have the same data type, or at least
1465 are the same kind of constant and the same machine mode.
1466
1467 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1468
1469 static tree
1470 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1471 {
1472 STRIP_NOPS (arg1);
1473 STRIP_NOPS (arg2);
1474
1475 if (TREE_CODE (arg1) == INTEGER_CST)
1476 return int_const_binop (code, arg1, arg2, notrunc);
1477
1478 if (TREE_CODE (arg1) == REAL_CST)
1479 {
1480 enum machine_mode mode;
1481 REAL_VALUE_TYPE d1;
1482 REAL_VALUE_TYPE d2;
1483 REAL_VALUE_TYPE value;
1484 REAL_VALUE_TYPE result;
1485 bool inexact;
1486 tree t, type;
1487
1488 d1 = TREE_REAL_CST (arg1);
1489 d2 = TREE_REAL_CST (arg2);
1490
1491 type = TREE_TYPE (arg1);
1492 mode = TYPE_MODE (type);
1493
1494 /* Don't perform operation if we honor signaling NaNs and
1495 either operand is a NaN. */
1496 if (HONOR_SNANS (mode)
1497 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1498 return NULL_TREE;
1499
1500 /* Don't perform operation if it would raise a division
1501 by zero exception. */
1502 if (code == RDIV_EXPR
1503 && REAL_VALUES_EQUAL (d2, dconst0)
1504 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1505 return NULL_TREE;
1506
1507 /* If either operand is a NaN, just return it. Otherwise, set up
1508 for floating-point trap; we return an overflow. */
1509 if (REAL_VALUE_ISNAN (d1))
1510 return arg1;
1511 else if (REAL_VALUE_ISNAN (d2))
1512 return arg2;
1513
1514 inexact = real_arithmetic (&value, code, &d1, &d2);
1515 real_convert (&result, mode, &value);
1516
1517 /* Don't constant fold this floating point operation if the
1518 result may dependent upon the run-time rounding mode and
1519 flag_rounding_math is set, or if GCC's software emulation
1520 is unable to accurately represent the result. */
1521
1522 if ((flag_rounding_math
1523 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1524 && !flag_unsafe_math_optimizations))
1525 && (inexact || !real_identical (&result, &value)))
1526 return NULL_TREE;
1527
1528 t = build_real (type, result);
1529
1530 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1531 TREE_CONSTANT_OVERFLOW (t)
1532 = TREE_OVERFLOW (t)
1533 | TREE_CONSTANT_OVERFLOW (arg1)
1534 | TREE_CONSTANT_OVERFLOW (arg2);
1535 return t;
1536 }
1537 if (TREE_CODE (arg1) == COMPLEX_CST)
1538 {
1539 tree type = TREE_TYPE (arg1);
1540 tree r1 = TREE_REALPART (arg1);
1541 tree i1 = TREE_IMAGPART (arg1);
1542 tree r2 = TREE_REALPART (arg2);
1543 tree i2 = TREE_IMAGPART (arg2);
1544 tree t;
1545
1546 switch (code)
1547 {
1548 case PLUS_EXPR:
1549 t = build_complex (type,
1550 const_binop (PLUS_EXPR, r1, r2, notrunc),
1551 const_binop (PLUS_EXPR, i1, i2, notrunc));
1552 break;
1553
1554 case MINUS_EXPR:
1555 t = build_complex (type,
1556 const_binop (MINUS_EXPR, r1, r2, notrunc),
1557 const_binop (MINUS_EXPR, i1, i2, notrunc));
1558 break;
1559
1560 case MULT_EXPR:
1561 t = build_complex (type,
1562 const_binop (MINUS_EXPR,
1563 const_binop (MULT_EXPR,
1564 r1, r2, notrunc),
1565 const_binop (MULT_EXPR,
1566 i1, i2, notrunc),
1567 notrunc),
1568 const_binop (PLUS_EXPR,
1569 const_binop (MULT_EXPR,
1570 r1, i2, notrunc),
1571 const_binop (MULT_EXPR,
1572 i1, r2, notrunc),
1573 notrunc));
1574 break;
1575
1576 case RDIV_EXPR:
1577 {
1578 tree magsquared
1579 = const_binop (PLUS_EXPR,
1580 const_binop (MULT_EXPR, r2, r2, notrunc),
1581 const_binop (MULT_EXPR, i2, i2, notrunc),
1582 notrunc);
1583
1584 t = build_complex (type,
1585 const_binop
1586 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1587 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1588 const_binop (PLUS_EXPR,
1589 const_binop (MULT_EXPR, r1, r2,
1590 notrunc),
1591 const_binop (MULT_EXPR, i1, i2,
1592 notrunc),
1593 notrunc),
1594 magsquared, notrunc),
1595 const_binop
1596 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1597 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1598 const_binop (MINUS_EXPR,
1599 const_binop (MULT_EXPR, i1, r2,
1600 notrunc),
1601 const_binop (MULT_EXPR, r1, i2,
1602 notrunc),
1603 notrunc),
1604 magsquared, notrunc));
1605 }
1606 break;
1607
1608 default:
1609 gcc_unreachable ();
1610 }
1611 return t;
1612 }
1613 return 0;
1614 }
1615
1616 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1617 indicates which particular sizetype to create. */
1618
1619 tree
1620 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1621 {
1622 return build_int_cst (sizetype_tab[(int) kind], number);
1623 }
1624 \f
1625 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1626 is a tree code. The type of the result is taken from the operands.
1627 Both must be the same type integer type and it must be a size type.
1628 If the operands are constant, so is the result. */
1629
1630 tree
1631 size_binop (enum tree_code code, tree arg0, tree arg1)
1632 {
1633 tree type = TREE_TYPE (arg0);
1634
1635 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1636 && type == TREE_TYPE (arg1));
1637
1638 /* Handle the special case of two integer constants faster. */
1639 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1640 {
1641 /* And some specific cases even faster than that. */
1642 if (code == PLUS_EXPR && integer_zerop (arg0))
1643 return arg1;
1644 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1645 && integer_zerop (arg1))
1646 return arg0;
1647 else if (code == MULT_EXPR && integer_onep (arg0))
1648 return arg1;
1649
1650 /* Handle general case of two integer constants. */
1651 return int_const_binop (code, arg0, arg1, 0);
1652 }
1653
1654 if (arg0 == error_mark_node || arg1 == error_mark_node)
1655 return error_mark_node;
1656
1657 return fold_build2 (code, type, arg0, arg1);
1658 }
1659
1660 /* Given two values, either both of sizetype or both of bitsizetype,
1661 compute the difference between the two values. Return the value
1662 in signed type corresponding to the type of the operands. */
1663
1664 tree
1665 size_diffop (tree arg0, tree arg1)
1666 {
1667 tree type = TREE_TYPE (arg0);
1668 tree ctype;
1669
1670 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1671 && type == TREE_TYPE (arg1));
1672
1673 /* If the type is already signed, just do the simple thing. */
1674 if (!TYPE_UNSIGNED (type))
1675 return size_binop (MINUS_EXPR, arg0, arg1);
1676
1677 ctype = type == bitsizetype ? sbitsizetype : ssizetype;
1678
1679 /* If either operand is not a constant, do the conversions to the signed
1680 type and subtract. The hardware will do the right thing with any
1681 overflow in the subtraction. */
1682 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1683 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1684 fold_convert (ctype, arg1));
1685
1686 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1687 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1688 overflow) and negate (which can't either). Special-case a result
1689 of zero while we're here. */
1690 if (tree_int_cst_equal (arg0, arg1))
1691 return fold_convert (ctype, integer_zero_node);
1692 else if (tree_int_cst_lt (arg1, arg0))
1693 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1694 else
1695 return size_binop (MINUS_EXPR, fold_convert (ctype, integer_zero_node),
1696 fold_convert (ctype, size_binop (MINUS_EXPR,
1697 arg1, arg0)));
1698 }
1699 \f
1700 /* A subroutine of fold_convert_const handling conversions of an
1701 INTEGER_CST to another integer type. */
1702
1703 static tree
1704 fold_convert_const_int_from_int (tree type, tree arg1)
1705 {
1706 tree t;
1707
1708 /* Given an integer constant, make new constant with new type,
1709 appropriately sign-extended or truncated. */
1710 t = build_int_cst_wide (type, TREE_INT_CST_LOW (arg1),
1711 TREE_INT_CST_HIGH (arg1));
1712
1713 t = force_fit_type (t,
1714 /* Don't set the overflow when
1715 converting a pointer */
1716 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1717 (TREE_INT_CST_HIGH (arg1) < 0
1718 && (TYPE_UNSIGNED (type)
1719 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1720 | TREE_OVERFLOW (arg1),
1721 TREE_CONSTANT_OVERFLOW (arg1));
1722
1723 return t;
1724 }
1725
1726 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1727 to an integer type. */
1728
1729 static tree
1730 fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
1731 {
1732 int overflow = 0;
1733 tree t;
1734
1735 /* The following code implements the floating point to integer
1736 conversion rules required by the Java Language Specification,
1737 that IEEE NaNs are mapped to zero and values that overflow
1738 the target precision saturate, i.e. values greater than
1739 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1740 are mapped to INT_MIN. These semantics are allowed by the
1741 C and C++ standards that simply state that the behavior of
1742 FP-to-integer conversion is unspecified upon overflow. */
1743
1744 HOST_WIDE_INT high, low;
1745 REAL_VALUE_TYPE r;
1746 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1747
1748 switch (code)
1749 {
1750 case FIX_TRUNC_EXPR:
1751 real_trunc (&r, VOIDmode, &x);
1752 break;
1753
1754 case FIX_CEIL_EXPR:
1755 real_ceil (&r, VOIDmode, &x);
1756 break;
1757
1758 case FIX_FLOOR_EXPR:
1759 real_floor (&r, VOIDmode, &x);
1760 break;
1761
1762 case FIX_ROUND_EXPR:
1763 real_round (&r, VOIDmode, &x);
1764 break;
1765
1766 default:
1767 gcc_unreachable ();
1768 }
1769
1770 /* If R is NaN, return zero and show we have an overflow. */
1771 if (REAL_VALUE_ISNAN (r))
1772 {
1773 overflow = 1;
1774 high = 0;
1775 low = 0;
1776 }
1777
1778 /* See if R is less than the lower bound or greater than the
1779 upper bound. */
1780
1781 if (! overflow)
1782 {
1783 tree lt = TYPE_MIN_VALUE (type);
1784 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1785 if (REAL_VALUES_LESS (r, l))
1786 {
1787 overflow = 1;
1788 high = TREE_INT_CST_HIGH (lt);
1789 low = TREE_INT_CST_LOW (lt);
1790 }
1791 }
1792
1793 if (! overflow)
1794 {
1795 tree ut = TYPE_MAX_VALUE (type);
1796 if (ut)
1797 {
1798 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1799 if (REAL_VALUES_LESS (u, r))
1800 {
1801 overflow = 1;
1802 high = TREE_INT_CST_HIGH (ut);
1803 low = TREE_INT_CST_LOW (ut);
1804 }
1805 }
1806 }
1807
1808 if (! overflow)
1809 REAL_VALUE_TO_INT (&low, &high, r);
1810
1811 t = build_int_cst_wide (type, low, high);
1812
1813 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1),
1814 TREE_CONSTANT_OVERFLOW (arg1));
1815 return t;
1816 }
1817
1818 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1819 to another floating point type. */
1820
1821 static tree
1822 fold_convert_const_real_from_real (tree type, tree arg1)
1823 {
1824 REAL_VALUE_TYPE value;
1825 tree t;
1826
1827 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1828 t = build_real (type, value);
1829
1830 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1831 TREE_CONSTANT_OVERFLOW (t)
1832 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1833 return t;
1834 }
1835
1836 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1837 type TYPE. If no simplification can be done return NULL_TREE. */
1838
1839 static tree
1840 fold_convert_const (enum tree_code code, tree type, tree arg1)
1841 {
1842 if (TREE_TYPE (arg1) == type)
1843 return arg1;
1844
1845 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1846 {
1847 if (TREE_CODE (arg1) == INTEGER_CST)
1848 return fold_convert_const_int_from_int (type, arg1);
1849 else if (TREE_CODE (arg1) == REAL_CST)
1850 return fold_convert_const_int_from_real (code, type, arg1);
1851 }
1852 else if (TREE_CODE (type) == REAL_TYPE)
1853 {
1854 if (TREE_CODE (arg1) == INTEGER_CST)
1855 return build_real_from_int_cst (type, arg1);
1856 if (TREE_CODE (arg1) == REAL_CST)
1857 return fold_convert_const_real_from_real (type, arg1);
1858 }
1859 return NULL_TREE;
1860 }
1861
1862 /* Construct a vector of zero elements of vector type TYPE. */
1863
1864 static tree
1865 build_zero_vector (tree type)
1866 {
1867 tree elem, list;
1868 int i, units;
1869
1870 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1871 units = TYPE_VECTOR_SUBPARTS (type);
1872
1873 list = NULL_TREE;
1874 for (i = 0; i < units; i++)
1875 list = tree_cons (NULL_TREE, elem, list);
1876 return build_vector (type, list);
1877 }
1878
1879 /* Convert expression ARG to type TYPE. Used by the middle-end for
1880 simple conversions in preference to calling the front-end's convert. */
1881
1882 tree
1883 fold_convert (tree type, tree arg)
1884 {
1885 tree orig = TREE_TYPE (arg);
1886 tree tem;
1887
1888 if (type == orig)
1889 return arg;
1890
1891 if (TREE_CODE (arg) == ERROR_MARK
1892 || TREE_CODE (type) == ERROR_MARK
1893 || TREE_CODE (orig) == ERROR_MARK)
1894 return error_mark_node;
1895
1896 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
1897 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
1898 TYPE_MAIN_VARIANT (orig)))
1899 return fold_build1 (NOP_EXPR, type, arg);
1900
1901 switch (TREE_CODE (type))
1902 {
1903 case INTEGER_TYPE: case CHAR_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1904 case POINTER_TYPE: case REFERENCE_TYPE:
1905 case OFFSET_TYPE:
1906 if (TREE_CODE (arg) == INTEGER_CST)
1907 {
1908 tem = fold_convert_const (NOP_EXPR, type, arg);
1909 if (tem != NULL_TREE)
1910 return tem;
1911 }
1912 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1913 || TREE_CODE (orig) == OFFSET_TYPE)
1914 return fold_build1 (NOP_EXPR, type, arg);
1915 if (TREE_CODE (orig) == COMPLEX_TYPE)
1916 {
1917 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
1918 return fold_convert (type, tem);
1919 }
1920 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1921 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1922 return fold_build1 (NOP_EXPR, type, arg);
1923
1924 case REAL_TYPE:
1925 if (TREE_CODE (arg) == INTEGER_CST)
1926 {
1927 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1928 if (tem != NULL_TREE)
1929 return tem;
1930 }
1931 else if (TREE_CODE (arg) == REAL_CST)
1932 {
1933 tem = fold_convert_const (NOP_EXPR, type, arg);
1934 if (tem != NULL_TREE)
1935 return tem;
1936 }
1937
1938 switch (TREE_CODE (orig))
1939 {
1940 case INTEGER_TYPE: case CHAR_TYPE:
1941 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1942 case POINTER_TYPE: case REFERENCE_TYPE:
1943 return fold_build1 (FLOAT_EXPR, type, arg);
1944
1945 case REAL_TYPE:
1946 return fold_build1 (flag_float_store ? CONVERT_EXPR : NOP_EXPR,
1947 type, arg);
1948
1949 case COMPLEX_TYPE:
1950 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
1951 return fold_convert (type, tem);
1952
1953 default:
1954 gcc_unreachable ();
1955 }
1956
1957 case COMPLEX_TYPE:
1958 switch (TREE_CODE (orig))
1959 {
1960 case INTEGER_TYPE: case CHAR_TYPE:
1961 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1962 case POINTER_TYPE: case REFERENCE_TYPE:
1963 case REAL_TYPE:
1964 return build2 (COMPLEX_EXPR, type,
1965 fold_convert (TREE_TYPE (type), arg),
1966 fold_convert (TREE_TYPE (type), integer_zero_node));
1967 case COMPLEX_TYPE:
1968 {
1969 tree rpart, ipart;
1970
1971 if (TREE_CODE (arg) == COMPLEX_EXPR)
1972 {
1973 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
1974 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
1975 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
1976 }
1977
1978 arg = save_expr (arg);
1979 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
1980 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
1981 rpart = fold_convert (TREE_TYPE (type), rpart);
1982 ipart = fold_convert (TREE_TYPE (type), ipart);
1983 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
1984 }
1985
1986 default:
1987 gcc_unreachable ();
1988 }
1989
1990 case VECTOR_TYPE:
1991 if (integer_zerop (arg))
1992 return build_zero_vector (type);
1993 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1994 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1995 || TREE_CODE (orig) == VECTOR_TYPE);
1996 return fold_build1 (NOP_EXPR, type, arg);
1997
1998 case VOID_TYPE:
1999 return fold_build1 (CONVERT_EXPR, type, fold_ignored_result (arg));
2000
2001 default:
2002 gcc_unreachable ();
2003 }
2004 }
2005 \f
2006 /* Return false if expr can be assumed not to be an value, true
2007 otherwise. */
2008
2009 static bool
2010 maybe_lvalue_p (tree x)
2011 {
2012 /* We only need to wrap lvalue tree codes. */
2013 switch (TREE_CODE (x))
2014 {
2015 case VAR_DECL:
2016 case PARM_DECL:
2017 case RESULT_DECL:
2018 case LABEL_DECL:
2019 case FUNCTION_DECL:
2020 case SSA_NAME:
2021
2022 case COMPONENT_REF:
2023 case INDIRECT_REF:
2024 case ALIGN_INDIRECT_REF:
2025 case MISALIGNED_INDIRECT_REF:
2026 case ARRAY_REF:
2027 case ARRAY_RANGE_REF:
2028 case BIT_FIELD_REF:
2029 case OBJ_TYPE_REF:
2030
2031 case REALPART_EXPR:
2032 case IMAGPART_EXPR:
2033 case PREINCREMENT_EXPR:
2034 case PREDECREMENT_EXPR:
2035 case SAVE_EXPR:
2036 case TRY_CATCH_EXPR:
2037 case WITH_CLEANUP_EXPR:
2038 case COMPOUND_EXPR:
2039 case MODIFY_EXPR:
2040 case TARGET_EXPR:
2041 case COND_EXPR:
2042 case BIND_EXPR:
2043 case MIN_EXPR:
2044 case MAX_EXPR:
2045 break;
2046
2047 default:
2048 /* Assume the worst for front-end tree codes. */
2049 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2050 break;
2051 return false;
2052 }
2053
2054 return true;
2055 }
2056
2057 /* Return an expr equal to X but certainly not valid as an lvalue. */
2058
2059 tree
2060 non_lvalue (tree x)
2061 {
2062 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2063 us. */
2064 if (in_gimple_form)
2065 return x;
2066
2067 if (! maybe_lvalue_p (x))
2068 return x;
2069 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2070 }
2071
2072 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2073 Zero means allow extended lvalues. */
2074
2075 int pedantic_lvalues;
2076
2077 /* When pedantic, return an expr equal to X but certainly not valid as a
2078 pedantic lvalue. Otherwise, return X. */
2079
2080 static tree
2081 pedantic_non_lvalue (tree x)
2082 {
2083 if (pedantic_lvalues)
2084 return non_lvalue (x);
2085 else
2086 return x;
2087 }
2088 \f
2089 /* Given a tree comparison code, return the code that is the logical inverse
2090 of the given code. It is not safe to do this for floating-point
2091 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2092 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2093
2094 static enum tree_code
2095 invert_tree_comparison (enum tree_code code, bool honor_nans)
2096 {
2097 if (honor_nans && flag_trapping_math)
2098 return ERROR_MARK;
2099
2100 switch (code)
2101 {
2102 case EQ_EXPR:
2103 return NE_EXPR;
2104 case NE_EXPR:
2105 return EQ_EXPR;
2106 case GT_EXPR:
2107 return honor_nans ? UNLE_EXPR : LE_EXPR;
2108 case GE_EXPR:
2109 return honor_nans ? UNLT_EXPR : LT_EXPR;
2110 case LT_EXPR:
2111 return honor_nans ? UNGE_EXPR : GE_EXPR;
2112 case LE_EXPR:
2113 return honor_nans ? UNGT_EXPR : GT_EXPR;
2114 case LTGT_EXPR:
2115 return UNEQ_EXPR;
2116 case UNEQ_EXPR:
2117 return LTGT_EXPR;
2118 case UNGT_EXPR:
2119 return LE_EXPR;
2120 case UNGE_EXPR:
2121 return LT_EXPR;
2122 case UNLT_EXPR:
2123 return GE_EXPR;
2124 case UNLE_EXPR:
2125 return GT_EXPR;
2126 case ORDERED_EXPR:
2127 return UNORDERED_EXPR;
2128 case UNORDERED_EXPR:
2129 return ORDERED_EXPR;
2130 default:
2131 gcc_unreachable ();
2132 }
2133 }
2134
2135 /* Similar, but return the comparison that results if the operands are
2136 swapped. This is safe for floating-point. */
2137
2138 enum tree_code
2139 swap_tree_comparison (enum tree_code code)
2140 {
2141 switch (code)
2142 {
2143 case EQ_EXPR:
2144 case NE_EXPR:
2145 return code;
2146 case GT_EXPR:
2147 return LT_EXPR;
2148 case GE_EXPR:
2149 return LE_EXPR;
2150 case LT_EXPR:
2151 return GT_EXPR;
2152 case LE_EXPR:
2153 return GE_EXPR;
2154 default:
2155 gcc_unreachable ();
2156 }
2157 }
2158
2159
2160 /* Convert a comparison tree code from an enum tree_code representation
2161 into a compcode bit-based encoding. This function is the inverse of
2162 compcode_to_comparison. */
2163
2164 static enum comparison_code
2165 comparison_to_compcode (enum tree_code code)
2166 {
2167 switch (code)
2168 {
2169 case LT_EXPR:
2170 return COMPCODE_LT;
2171 case EQ_EXPR:
2172 return COMPCODE_EQ;
2173 case LE_EXPR:
2174 return COMPCODE_LE;
2175 case GT_EXPR:
2176 return COMPCODE_GT;
2177 case NE_EXPR:
2178 return COMPCODE_NE;
2179 case GE_EXPR:
2180 return COMPCODE_GE;
2181 case ORDERED_EXPR:
2182 return COMPCODE_ORD;
2183 case UNORDERED_EXPR:
2184 return COMPCODE_UNORD;
2185 case UNLT_EXPR:
2186 return COMPCODE_UNLT;
2187 case UNEQ_EXPR:
2188 return COMPCODE_UNEQ;
2189 case UNLE_EXPR:
2190 return COMPCODE_UNLE;
2191 case UNGT_EXPR:
2192 return COMPCODE_UNGT;
2193 case LTGT_EXPR:
2194 return COMPCODE_LTGT;
2195 case UNGE_EXPR:
2196 return COMPCODE_UNGE;
2197 default:
2198 gcc_unreachable ();
2199 }
2200 }
2201
2202 /* Convert a compcode bit-based encoding of a comparison operator back
2203 to GCC's enum tree_code representation. This function is the
2204 inverse of comparison_to_compcode. */
2205
2206 static enum tree_code
2207 compcode_to_comparison (enum comparison_code code)
2208 {
2209 switch (code)
2210 {
2211 case COMPCODE_LT:
2212 return LT_EXPR;
2213 case COMPCODE_EQ:
2214 return EQ_EXPR;
2215 case COMPCODE_LE:
2216 return LE_EXPR;
2217 case COMPCODE_GT:
2218 return GT_EXPR;
2219 case COMPCODE_NE:
2220 return NE_EXPR;
2221 case COMPCODE_GE:
2222 return GE_EXPR;
2223 case COMPCODE_ORD:
2224 return ORDERED_EXPR;
2225 case COMPCODE_UNORD:
2226 return UNORDERED_EXPR;
2227 case COMPCODE_UNLT:
2228 return UNLT_EXPR;
2229 case COMPCODE_UNEQ:
2230 return UNEQ_EXPR;
2231 case COMPCODE_UNLE:
2232 return UNLE_EXPR;
2233 case COMPCODE_UNGT:
2234 return UNGT_EXPR;
2235 case COMPCODE_LTGT:
2236 return LTGT_EXPR;
2237 case COMPCODE_UNGE:
2238 return UNGE_EXPR;
2239 default:
2240 gcc_unreachable ();
2241 }
2242 }
2243
2244 /* Return a tree for the comparison which is the combination of
2245 doing the AND or OR (depending on CODE) of the two operations LCODE
2246 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2247 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2248 if this makes the transformation invalid. */
2249
2250 tree
2251 combine_comparisons (enum tree_code code, enum tree_code lcode,
2252 enum tree_code rcode, tree truth_type,
2253 tree ll_arg, tree lr_arg)
2254 {
2255 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2256 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2257 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2258 enum comparison_code compcode;
2259
2260 switch (code)
2261 {
2262 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2263 compcode = lcompcode & rcompcode;
2264 break;
2265
2266 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2267 compcode = lcompcode | rcompcode;
2268 break;
2269
2270 default:
2271 return NULL_TREE;
2272 }
2273
2274 if (!honor_nans)
2275 {
2276 /* Eliminate unordered comparisons, as well as LTGT and ORD
2277 which are not used unless the mode has NaNs. */
2278 compcode &= ~COMPCODE_UNORD;
2279 if (compcode == COMPCODE_LTGT)
2280 compcode = COMPCODE_NE;
2281 else if (compcode == COMPCODE_ORD)
2282 compcode = COMPCODE_TRUE;
2283 }
2284 else if (flag_trapping_math)
2285 {
2286 /* Check that the original operation and the optimized ones will trap
2287 under the same condition. */
2288 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2289 && (lcompcode != COMPCODE_EQ)
2290 && (lcompcode != COMPCODE_ORD);
2291 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2292 && (rcompcode != COMPCODE_EQ)
2293 && (rcompcode != COMPCODE_ORD);
2294 bool trap = (compcode & COMPCODE_UNORD) == 0
2295 && (compcode != COMPCODE_EQ)
2296 && (compcode != COMPCODE_ORD);
2297
2298 /* In a short-circuited boolean expression the LHS might be
2299 such that the RHS, if evaluated, will never trap. For
2300 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2301 if neither x nor y is NaN. (This is a mixed blessing: for
2302 example, the expression above will never trap, hence
2303 optimizing it to x < y would be invalid). */
2304 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2305 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2306 rtrap = false;
2307
2308 /* If the comparison was short-circuited, and only the RHS
2309 trapped, we may now generate a spurious trap. */
2310 if (rtrap && !ltrap
2311 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2312 return NULL_TREE;
2313
2314 /* If we changed the conditions that cause a trap, we lose. */
2315 if ((ltrap || rtrap) != trap)
2316 return NULL_TREE;
2317 }
2318
2319 if (compcode == COMPCODE_TRUE)
2320 return constant_boolean_node (true, truth_type);
2321 else if (compcode == COMPCODE_FALSE)
2322 return constant_boolean_node (false, truth_type);
2323 else
2324 return fold_build2 (compcode_to_comparison (compcode),
2325 truth_type, ll_arg, lr_arg);
2326 }
2327
2328 /* Return nonzero if CODE is a tree code that represents a truth value. */
2329
2330 static int
2331 truth_value_p (enum tree_code code)
2332 {
2333 return (TREE_CODE_CLASS (code) == tcc_comparison
2334 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2335 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2336 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2337 }
2338 \f
2339 /* Return nonzero if two operands (typically of the same tree node)
2340 are necessarily equal. If either argument has side-effects this
2341 function returns zero. FLAGS modifies behavior as follows:
2342
2343 If OEP_ONLY_CONST is set, only return nonzero for constants.
2344 This function tests whether the operands are indistinguishable;
2345 it does not test whether they are equal using C's == operation.
2346 The distinction is important for IEEE floating point, because
2347 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2348 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2349
2350 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2351 even though it may hold multiple values during a function.
2352 This is because a GCC tree node guarantees that nothing else is
2353 executed between the evaluation of its "operands" (which may often
2354 be evaluated in arbitrary order). Hence if the operands themselves
2355 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2356 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2357 unset means assuming isochronic (or instantaneous) tree equivalence.
2358 Unless comparing arbitrary expression trees, such as from different
2359 statements, this flag can usually be left unset.
2360
2361 If OEP_PURE_SAME is set, then pure functions with identical arguments
2362 are considered the same. It is used when the caller has other ways
2363 to ensure that global memory is unchanged in between. */
2364
2365 int
2366 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2367 {
2368 /* If either is ERROR_MARK, they aren't equal. */
2369 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2370 return 0;
2371
2372 /* If both types don't have the same signedness, then we can't consider
2373 them equal. We must check this before the STRIP_NOPS calls
2374 because they may change the signedness of the arguments. */
2375 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2376 return 0;
2377
2378 STRIP_NOPS (arg0);
2379 STRIP_NOPS (arg1);
2380
2381 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2382 /* This is needed for conversions and for COMPONENT_REF.
2383 Might as well play it safe and always test this. */
2384 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2385 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2386 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2387 return 0;
2388
2389 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2390 We don't care about side effects in that case because the SAVE_EXPR
2391 takes care of that for us. In all other cases, two expressions are
2392 equal if they have no side effects. If we have two identical
2393 expressions with side effects that should be treated the same due
2394 to the only side effects being identical SAVE_EXPR's, that will
2395 be detected in the recursive calls below. */
2396 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2397 && (TREE_CODE (arg0) == SAVE_EXPR
2398 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2399 return 1;
2400
2401 /* Next handle constant cases, those for which we can return 1 even
2402 if ONLY_CONST is set. */
2403 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2404 switch (TREE_CODE (arg0))
2405 {
2406 case INTEGER_CST:
2407 return (! TREE_CONSTANT_OVERFLOW (arg0)
2408 && ! TREE_CONSTANT_OVERFLOW (arg1)
2409 && tree_int_cst_equal (arg0, arg1));
2410
2411 case REAL_CST:
2412 return (! TREE_CONSTANT_OVERFLOW (arg0)
2413 && ! TREE_CONSTANT_OVERFLOW (arg1)
2414 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2415 TREE_REAL_CST (arg1)));
2416
2417 case VECTOR_CST:
2418 {
2419 tree v1, v2;
2420
2421 if (TREE_CONSTANT_OVERFLOW (arg0)
2422 || TREE_CONSTANT_OVERFLOW (arg1))
2423 return 0;
2424
2425 v1 = TREE_VECTOR_CST_ELTS (arg0);
2426 v2 = TREE_VECTOR_CST_ELTS (arg1);
2427 while (v1 && v2)
2428 {
2429 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2430 flags))
2431 return 0;
2432 v1 = TREE_CHAIN (v1);
2433 v2 = TREE_CHAIN (v2);
2434 }
2435
2436 return 1;
2437 }
2438
2439 case COMPLEX_CST:
2440 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2441 flags)
2442 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2443 flags));
2444
2445 case STRING_CST:
2446 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2447 && ! memcmp (TREE_STRING_POINTER (arg0),
2448 TREE_STRING_POINTER (arg1),
2449 TREE_STRING_LENGTH (arg0)));
2450
2451 case ADDR_EXPR:
2452 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2453 0);
2454 default:
2455 break;
2456 }
2457
2458 if (flags & OEP_ONLY_CONST)
2459 return 0;
2460
2461 /* Define macros to test an operand from arg0 and arg1 for equality and a
2462 variant that allows null and views null as being different from any
2463 non-null value. In the latter case, if either is null, the both
2464 must be; otherwise, do the normal comparison. */
2465 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2466 TREE_OPERAND (arg1, N), flags)
2467
2468 #define OP_SAME_WITH_NULL(N) \
2469 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2470 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2471
2472 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2473 {
2474 case tcc_unary:
2475 /* Two conversions are equal only if signedness and modes match. */
2476 switch (TREE_CODE (arg0))
2477 {
2478 case NOP_EXPR:
2479 case CONVERT_EXPR:
2480 case FIX_CEIL_EXPR:
2481 case FIX_TRUNC_EXPR:
2482 case FIX_FLOOR_EXPR:
2483 case FIX_ROUND_EXPR:
2484 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2485 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2486 return 0;
2487 break;
2488 default:
2489 break;
2490 }
2491
2492 return OP_SAME (0);
2493
2494
2495 case tcc_comparison:
2496 case tcc_binary:
2497 if (OP_SAME (0) && OP_SAME (1))
2498 return 1;
2499
2500 /* For commutative ops, allow the other order. */
2501 return (commutative_tree_code (TREE_CODE (arg0))
2502 && operand_equal_p (TREE_OPERAND (arg0, 0),
2503 TREE_OPERAND (arg1, 1), flags)
2504 && operand_equal_p (TREE_OPERAND (arg0, 1),
2505 TREE_OPERAND (arg1, 0), flags));
2506
2507 case tcc_reference:
2508 /* If either of the pointer (or reference) expressions we are
2509 dereferencing contain a side effect, these cannot be equal. */
2510 if (TREE_SIDE_EFFECTS (arg0)
2511 || TREE_SIDE_EFFECTS (arg1))
2512 return 0;
2513
2514 switch (TREE_CODE (arg0))
2515 {
2516 case INDIRECT_REF:
2517 case ALIGN_INDIRECT_REF:
2518 case MISALIGNED_INDIRECT_REF:
2519 case REALPART_EXPR:
2520 case IMAGPART_EXPR:
2521 return OP_SAME (0);
2522
2523 case ARRAY_REF:
2524 case ARRAY_RANGE_REF:
2525 /* Operands 2 and 3 may be null. */
2526 return (OP_SAME (0)
2527 && OP_SAME (1)
2528 && OP_SAME_WITH_NULL (2)
2529 && OP_SAME_WITH_NULL (3));
2530
2531 case COMPONENT_REF:
2532 /* Handle operand 2 the same as for ARRAY_REF. */
2533 return OP_SAME (0) && OP_SAME (1) && OP_SAME_WITH_NULL (2);
2534
2535 case BIT_FIELD_REF:
2536 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2537
2538 default:
2539 return 0;
2540 }
2541
2542 case tcc_expression:
2543 switch (TREE_CODE (arg0))
2544 {
2545 case ADDR_EXPR:
2546 case TRUTH_NOT_EXPR:
2547 return OP_SAME (0);
2548
2549 case TRUTH_ANDIF_EXPR:
2550 case TRUTH_ORIF_EXPR:
2551 return OP_SAME (0) && OP_SAME (1);
2552
2553 case TRUTH_AND_EXPR:
2554 case TRUTH_OR_EXPR:
2555 case TRUTH_XOR_EXPR:
2556 if (OP_SAME (0) && OP_SAME (1))
2557 return 1;
2558
2559 /* Otherwise take into account this is a commutative operation. */
2560 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2561 TREE_OPERAND (arg1, 1), flags)
2562 && operand_equal_p (TREE_OPERAND (arg0, 1),
2563 TREE_OPERAND (arg1, 0), flags));
2564
2565 case CALL_EXPR:
2566 /* If the CALL_EXPRs call different functions, then they
2567 clearly can not be equal. */
2568 if (!OP_SAME (0))
2569 return 0;
2570
2571 {
2572 unsigned int cef = call_expr_flags (arg0);
2573 if (flags & OEP_PURE_SAME)
2574 cef &= ECF_CONST | ECF_PURE;
2575 else
2576 cef &= ECF_CONST;
2577 if (!cef)
2578 return 0;
2579 }
2580
2581 /* Now see if all the arguments are the same. operand_equal_p
2582 does not handle TREE_LIST, so we walk the operands here
2583 feeding them to operand_equal_p. */
2584 arg0 = TREE_OPERAND (arg0, 1);
2585 arg1 = TREE_OPERAND (arg1, 1);
2586 while (arg0 && arg1)
2587 {
2588 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2589 flags))
2590 return 0;
2591
2592 arg0 = TREE_CHAIN (arg0);
2593 arg1 = TREE_CHAIN (arg1);
2594 }
2595
2596 /* If we get here and both argument lists are exhausted
2597 then the CALL_EXPRs are equal. */
2598 return ! (arg0 || arg1);
2599
2600 default:
2601 return 0;
2602 }
2603
2604 case tcc_declaration:
2605 /* Consider __builtin_sqrt equal to sqrt. */
2606 return (TREE_CODE (arg0) == FUNCTION_DECL
2607 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2608 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2609 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2610
2611 default:
2612 return 0;
2613 }
2614
2615 #undef OP_SAME
2616 #undef OP_SAME_WITH_NULL
2617 }
2618 \f
2619 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2620 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2621
2622 When in doubt, return 0. */
2623
2624 static int
2625 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2626 {
2627 int unsignedp1, unsignedpo;
2628 tree primarg0, primarg1, primother;
2629 unsigned int correct_width;
2630
2631 if (operand_equal_p (arg0, arg1, 0))
2632 return 1;
2633
2634 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2635 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2636 return 0;
2637
2638 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2639 and see if the inner values are the same. This removes any
2640 signedness comparison, which doesn't matter here. */
2641 primarg0 = arg0, primarg1 = arg1;
2642 STRIP_NOPS (primarg0);
2643 STRIP_NOPS (primarg1);
2644 if (operand_equal_p (primarg0, primarg1, 0))
2645 return 1;
2646
2647 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2648 actual comparison operand, ARG0.
2649
2650 First throw away any conversions to wider types
2651 already present in the operands. */
2652
2653 primarg1 = get_narrower (arg1, &unsignedp1);
2654 primother = get_narrower (other, &unsignedpo);
2655
2656 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2657 if (unsignedp1 == unsignedpo
2658 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2659 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2660 {
2661 tree type = TREE_TYPE (arg0);
2662
2663 /* Make sure shorter operand is extended the right way
2664 to match the longer operand. */
2665 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2666 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2667
2668 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2669 return 1;
2670 }
2671
2672 return 0;
2673 }
2674 \f
2675 /* See if ARG is an expression that is either a comparison or is performing
2676 arithmetic on comparisons. The comparisons must only be comparing
2677 two different values, which will be stored in *CVAL1 and *CVAL2; if
2678 they are nonzero it means that some operands have already been found.
2679 No variables may be used anywhere else in the expression except in the
2680 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2681 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2682
2683 If this is true, return 1. Otherwise, return zero. */
2684
2685 static int
2686 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2687 {
2688 enum tree_code code = TREE_CODE (arg);
2689 enum tree_code_class class = TREE_CODE_CLASS (code);
2690
2691 /* We can handle some of the tcc_expression cases here. */
2692 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2693 class = tcc_unary;
2694 else if (class == tcc_expression
2695 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2696 || code == COMPOUND_EXPR))
2697 class = tcc_binary;
2698
2699 else if (class == tcc_expression && code == SAVE_EXPR
2700 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2701 {
2702 /* If we've already found a CVAL1 or CVAL2, this expression is
2703 two complex to handle. */
2704 if (*cval1 || *cval2)
2705 return 0;
2706
2707 class = tcc_unary;
2708 *save_p = 1;
2709 }
2710
2711 switch (class)
2712 {
2713 case tcc_unary:
2714 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2715
2716 case tcc_binary:
2717 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2718 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2719 cval1, cval2, save_p));
2720
2721 case tcc_constant:
2722 return 1;
2723
2724 case tcc_expression:
2725 if (code == COND_EXPR)
2726 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2727 cval1, cval2, save_p)
2728 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2729 cval1, cval2, save_p)
2730 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2731 cval1, cval2, save_p));
2732 return 0;
2733
2734 case tcc_comparison:
2735 /* First see if we can handle the first operand, then the second. For
2736 the second operand, we know *CVAL1 can't be zero. It must be that
2737 one side of the comparison is each of the values; test for the
2738 case where this isn't true by failing if the two operands
2739 are the same. */
2740
2741 if (operand_equal_p (TREE_OPERAND (arg, 0),
2742 TREE_OPERAND (arg, 1), 0))
2743 return 0;
2744
2745 if (*cval1 == 0)
2746 *cval1 = TREE_OPERAND (arg, 0);
2747 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2748 ;
2749 else if (*cval2 == 0)
2750 *cval2 = TREE_OPERAND (arg, 0);
2751 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2752 ;
2753 else
2754 return 0;
2755
2756 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2757 ;
2758 else if (*cval2 == 0)
2759 *cval2 = TREE_OPERAND (arg, 1);
2760 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2761 ;
2762 else
2763 return 0;
2764
2765 return 1;
2766
2767 default:
2768 return 0;
2769 }
2770 }
2771 \f
2772 /* ARG is a tree that is known to contain just arithmetic operations and
2773 comparisons. Evaluate the operations in the tree substituting NEW0 for
2774 any occurrence of OLD0 as an operand of a comparison and likewise for
2775 NEW1 and OLD1. */
2776
2777 static tree
2778 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2779 {
2780 tree type = TREE_TYPE (arg);
2781 enum tree_code code = TREE_CODE (arg);
2782 enum tree_code_class class = TREE_CODE_CLASS (code);
2783
2784 /* We can handle some of the tcc_expression cases here. */
2785 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2786 class = tcc_unary;
2787 else if (class == tcc_expression
2788 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2789 class = tcc_binary;
2790
2791 switch (class)
2792 {
2793 case tcc_unary:
2794 return fold_build1 (code, type,
2795 eval_subst (TREE_OPERAND (arg, 0),
2796 old0, new0, old1, new1));
2797
2798 case tcc_binary:
2799 return fold_build2 (code, type,
2800 eval_subst (TREE_OPERAND (arg, 0),
2801 old0, new0, old1, new1),
2802 eval_subst (TREE_OPERAND (arg, 1),
2803 old0, new0, old1, new1));
2804
2805 case tcc_expression:
2806 switch (code)
2807 {
2808 case SAVE_EXPR:
2809 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2810
2811 case COMPOUND_EXPR:
2812 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2813
2814 case COND_EXPR:
2815 return fold_build3 (code, type,
2816 eval_subst (TREE_OPERAND (arg, 0),
2817 old0, new0, old1, new1),
2818 eval_subst (TREE_OPERAND (arg, 1),
2819 old0, new0, old1, new1),
2820 eval_subst (TREE_OPERAND (arg, 2),
2821 old0, new0, old1, new1));
2822 default:
2823 break;
2824 }
2825 /* Fall through - ??? */
2826
2827 case tcc_comparison:
2828 {
2829 tree arg0 = TREE_OPERAND (arg, 0);
2830 tree arg1 = TREE_OPERAND (arg, 1);
2831
2832 /* We need to check both for exact equality and tree equality. The
2833 former will be true if the operand has a side-effect. In that
2834 case, we know the operand occurred exactly once. */
2835
2836 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2837 arg0 = new0;
2838 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2839 arg0 = new1;
2840
2841 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2842 arg1 = new0;
2843 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2844 arg1 = new1;
2845
2846 return fold_build2 (code, type, arg0, arg1);
2847 }
2848
2849 default:
2850 return arg;
2851 }
2852 }
2853 \f
2854 /* Return a tree for the case when the result of an expression is RESULT
2855 converted to TYPE and OMITTED was previously an operand of the expression
2856 but is now not needed (e.g., we folded OMITTED * 0).
2857
2858 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2859 the conversion of RESULT to TYPE. */
2860
2861 tree
2862 omit_one_operand (tree type, tree result, tree omitted)
2863 {
2864 tree t = fold_convert (type, result);
2865
2866 if (TREE_SIDE_EFFECTS (omitted))
2867 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2868
2869 return non_lvalue (t);
2870 }
2871
2872 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2873
2874 static tree
2875 pedantic_omit_one_operand (tree type, tree result, tree omitted)
2876 {
2877 tree t = fold_convert (type, result);
2878
2879 if (TREE_SIDE_EFFECTS (omitted))
2880 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2881
2882 return pedantic_non_lvalue (t);
2883 }
2884
2885 /* Return a tree for the case when the result of an expression is RESULT
2886 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
2887 of the expression but are now not needed.
2888
2889 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
2890 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
2891 evaluated before OMITTED2. Otherwise, if neither has side effects,
2892 just do the conversion of RESULT to TYPE. */
2893
2894 tree
2895 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
2896 {
2897 tree t = fold_convert (type, result);
2898
2899 if (TREE_SIDE_EFFECTS (omitted2))
2900 t = build2 (COMPOUND_EXPR, type, omitted2, t);
2901 if (TREE_SIDE_EFFECTS (omitted1))
2902 t = build2 (COMPOUND_EXPR, type, omitted1, t);
2903
2904 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
2905 }
2906
2907 \f
2908 /* Return a simplified tree node for the truth-negation of ARG. This
2909 never alters ARG itself. We assume that ARG is an operation that
2910 returns a truth value (0 or 1).
2911
2912 FIXME: one would think we would fold the result, but it causes
2913 problems with the dominator optimizer. */
2914 tree
2915 invert_truthvalue (tree arg)
2916 {
2917 tree type = TREE_TYPE (arg);
2918 enum tree_code code = TREE_CODE (arg);
2919
2920 if (code == ERROR_MARK)
2921 return arg;
2922
2923 /* If this is a comparison, we can simply invert it, except for
2924 floating-point non-equality comparisons, in which case we just
2925 enclose a TRUTH_NOT_EXPR around what we have. */
2926
2927 if (TREE_CODE_CLASS (code) == tcc_comparison)
2928 {
2929 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
2930 if (FLOAT_TYPE_P (op_type)
2931 && flag_trapping_math
2932 && code != ORDERED_EXPR && code != UNORDERED_EXPR
2933 && code != NE_EXPR && code != EQ_EXPR)
2934 return build1 (TRUTH_NOT_EXPR, type, arg);
2935 else
2936 {
2937 code = invert_tree_comparison (code,
2938 HONOR_NANS (TYPE_MODE (op_type)));
2939 if (code == ERROR_MARK)
2940 return build1 (TRUTH_NOT_EXPR, type, arg);
2941 else
2942 return build2 (code, type,
2943 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2944 }
2945 }
2946
2947 switch (code)
2948 {
2949 case INTEGER_CST:
2950 return constant_boolean_node (integer_zerop (arg), type);
2951
2952 case TRUTH_AND_EXPR:
2953 return build2 (TRUTH_OR_EXPR, type,
2954 invert_truthvalue (TREE_OPERAND (arg, 0)),
2955 invert_truthvalue (TREE_OPERAND (arg, 1)));
2956
2957 case TRUTH_OR_EXPR:
2958 return build2 (TRUTH_AND_EXPR, type,
2959 invert_truthvalue (TREE_OPERAND (arg, 0)),
2960 invert_truthvalue (TREE_OPERAND (arg, 1)));
2961
2962 case TRUTH_XOR_EXPR:
2963 /* Here we can invert either operand. We invert the first operand
2964 unless the second operand is a TRUTH_NOT_EXPR in which case our
2965 result is the XOR of the first operand with the inside of the
2966 negation of the second operand. */
2967
2968 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2969 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2970 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2971 else
2972 return build2 (TRUTH_XOR_EXPR, type,
2973 invert_truthvalue (TREE_OPERAND (arg, 0)),
2974 TREE_OPERAND (arg, 1));
2975
2976 case TRUTH_ANDIF_EXPR:
2977 return build2 (TRUTH_ORIF_EXPR, type,
2978 invert_truthvalue (TREE_OPERAND (arg, 0)),
2979 invert_truthvalue (TREE_OPERAND (arg, 1)));
2980
2981 case TRUTH_ORIF_EXPR:
2982 return build2 (TRUTH_ANDIF_EXPR, type,
2983 invert_truthvalue (TREE_OPERAND (arg, 0)),
2984 invert_truthvalue (TREE_OPERAND (arg, 1)));
2985
2986 case TRUTH_NOT_EXPR:
2987 return TREE_OPERAND (arg, 0);
2988
2989 case COND_EXPR:
2990 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
2991 invert_truthvalue (TREE_OPERAND (arg, 1)),
2992 invert_truthvalue (TREE_OPERAND (arg, 2)));
2993
2994 case COMPOUND_EXPR:
2995 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2996 invert_truthvalue (TREE_OPERAND (arg, 1)));
2997
2998 case NON_LVALUE_EXPR:
2999 return invert_truthvalue (TREE_OPERAND (arg, 0));
3000
3001 case NOP_EXPR:
3002 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3003 break;
3004
3005 case CONVERT_EXPR:
3006 case FLOAT_EXPR:
3007 return build1 (TREE_CODE (arg), type,
3008 invert_truthvalue (TREE_OPERAND (arg, 0)));
3009
3010 case BIT_AND_EXPR:
3011 if (!integer_onep (TREE_OPERAND (arg, 1)))
3012 break;
3013 return build2 (EQ_EXPR, type, arg,
3014 fold_convert (type, integer_zero_node));
3015
3016 case SAVE_EXPR:
3017 return build1 (TRUTH_NOT_EXPR, type, arg);
3018
3019 case CLEANUP_POINT_EXPR:
3020 return build1 (CLEANUP_POINT_EXPR, type,
3021 invert_truthvalue (TREE_OPERAND (arg, 0)));
3022
3023 default:
3024 break;
3025 }
3026 gcc_assert (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE);
3027 return build1 (TRUTH_NOT_EXPR, type, arg);
3028 }
3029
3030 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3031 operands are another bit-wise operation with a common input. If so,
3032 distribute the bit operations to save an operation and possibly two if
3033 constants are involved. For example, convert
3034 (A | B) & (A | C) into A | (B & C)
3035 Further simplification will occur if B and C are constants.
3036
3037 If this optimization cannot be done, 0 will be returned. */
3038
3039 static tree
3040 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3041 {
3042 tree common;
3043 tree left, right;
3044
3045 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3046 || TREE_CODE (arg0) == code
3047 || (TREE_CODE (arg0) != BIT_AND_EXPR
3048 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3049 return 0;
3050
3051 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3052 {
3053 common = TREE_OPERAND (arg0, 0);
3054 left = TREE_OPERAND (arg0, 1);
3055 right = TREE_OPERAND (arg1, 1);
3056 }
3057 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3058 {
3059 common = TREE_OPERAND (arg0, 0);
3060 left = TREE_OPERAND (arg0, 1);
3061 right = TREE_OPERAND (arg1, 0);
3062 }
3063 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3064 {
3065 common = TREE_OPERAND (arg0, 1);
3066 left = TREE_OPERAND (arg0, 0);
3067 right = TREE_OPERAND (arg1, 1);
3068 }
3069 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3070 {
3071 common = TREE_OPERAND (arg0, 1);
3072 left = TREE_OPERAND (arg0, 0);
3073 right = TREE_OPERAND (arg1, 0);
3074 }
3075 else
3076 return 0;
3077
3078 return fold_build2 (TREE_CODE (arg0), type, common,
3079 fold_build2 (code, type, left, right));
3080 }
3081 \f
3082 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3083 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3084
3085 static tree
3086 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3087 int unsignedp)
3088 {
3089 tree result;
3090
3091 if (bitpos == 0)
3092 {
3093 tree size = TYPE_SIZE (TREE_TYPE (inner));
3094 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3095 || POINTER_TYPE_P (TREE_TYPE (inner)))
3096 && host_integerp (size, 0)
3097 && tree_low_cst (size, 0) == bitsize)
3098 return fold_convert (type, inner);
3099 }
3100
3101 result = build3 (BIT_FIELD_REF, type, inner,
3102 size_int (bitsize), bitsize_int (bitpos));
3103
3104 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3105
3106 return result;
3107 }
3108
3109 /* Optimize a bit-field compare.
3110
3111 There are two cases: First is a compare against a constant and the
3112 second is a comparison of two items where the fields are at the same
3113 bit position relative to the start of a chunk (byte, halfword, word)
3114 large enough to contain it. In these cases we can avoid the shift
3115 implicit in bitfield extractions.
3116
3117 For constants, we emit a compare of the shifted constant with the
3118 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3119 compared. For two fields at the same position, we do the ANDs with the
3120 similar mask and compare the result of the ANDs.
3121
3122 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3123 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3124 are the left and right operands of the comparison, respectively.
3125
3126 If the optimization described above can be done, we return the resulting
3127 tree. Otherwise we return zero. */
3128
3129 static tree
3130 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3131 tree lhs, tree rhs)
3132 {
3133 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3134 tree type = TREE_TYPE (lhs);
3135 tree signed_type, unsigned_type;
3136 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3137 enum machine_mode lmode, rmode, nmode;
3138 int lunsignedp, runsignedp;
3139 int lvolatilep = 0, rvolatilep = 0;
3140 tree linner, rinner = NULL_TREE;
3141 tree mask;
3142 tree offset;
3143
3144 /* Get all the information about the extractions being done. If the bit size
3145 if the same as the size of the underlying object, we aren't doing an
3146 extraction at all and so can do nothing. We also don't want to
3147 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3148 then will no longer be able to replace it. */
3149 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3150 &lunsignedp, &lvolatilep, false);
3151 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3152 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3153 return 0;
3154
3155 if (!const_p)
3156 {
3157 /* If this is not a constant, we can only do something if bit positions,
3158 sizes, and signedness are the same. */
3159 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3160 &runsignedp, &rvolatilep, false);
3161
3162 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3163 || lunsignedp != runsignedp || offset != 0
3164 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3165 return 0;
3166 }
3167
3168 /* See if we can find a mode to refer to this field. We should be able to,
3169 but fail if we can't. */
3170 nmode = get_best_mode (lbitsize, lbitpos,
3171 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3172 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3173 TYPE_ALIGN (TREE_TYPE (rinner))),
3174 word_mode, lvolatilep || rvolatilep);
3175 if (nmode == VOIDmode)
3176 return 0;
3177
3178 /* Set signed and unsigned types of the precision of this mode for the
3179 shifts below. */
3180 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3181 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3182
3183 /* Compute the bit position and size for the new reference and our offset
3184 within it. If the new reference is the same size as the original, we
3185 won't optimize anything, so return zero. */
3186 nbitsize = GET_MODE_BITSIZE (nmode);
3187 nbitpos = lbitpos & ~ (nbitsize - 1);
3188 lbitpos -= nbitpos;
3189 if (nbitsize == lbitsize)
3190 return 0;
3191
3192 if (BYTES_BIG_ENDIAN)
3193 lbitpos = nbitsize - lbitsize - lbitpos;
3194
3195 /* Make the mask to be used against the extracted field. */
3196 mask = build_int_cst (unsigned_type, -1);
3197 mask = force_fit_type (mask, 0, false, false);
3198 mask = fold_convert (unsigned_type, mask);
3199 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3200 mask = const_binop (RSHIFT_EXPR, mask,
3201 size_int (nbitsize - lbitsize - lbitpos), 0);
3202
3203 if (! const_p)
3204 /* If not comparing with constant, just rework the comparison
3205 and return. */
3206 return build2 (code, compare_type,
3207 build2 (BIT_AND_EXPR, unsigned_type,
3208 make_bit_field_ref (linner, unsigned_type,
3209 nbitsize, nbitpos, 1),
3210 mask),
3211 build2 (BIT_AND_EXPR, unsigned_type,
3212 make_bit_field_ref (rinner, unsigned_type,
3213 nbitsize, nbitpos, 1),
3214 mask));
3215
3216 /* Otherwise, we are handling the constant case. See if the constant is too
3217 big for the field. Warn and return a tree of for 0 (false) if so. We do
3218 this not only for its own sake, but to avoid having to test for this
3219 error case below. If we didn't, we might generate wrong code.
3220
3221 For unsigned fields, the constant shifted right by the field length should
3222 be all zero. For signed fields, the high-order bits should agree with
3223 the sign bit. */
3224
3225 if (lunsignedp)
3226 {
3227 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3228 fold_convert (unsigned_type, rhs),
3229 size_int (lbitsize), 0)))
3230 {
3231 warning ("comparison is always %d due to width of bit-field",
3232 code == NE_EXPR);
3233 return constant_boolean_node (code == NE_EXPR, compare_type);
3234 }
3235 }
3236 else
3237 {
3238 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3239 size_int (lbitsize - 1), 0);
3240 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3241 {
3242 warning ("comparison is always %d due to width of bit-field",
3243 code == NE_EXPR);
3244 return constant_boolean_node (code == NE_EXPR, compare_type);
3245 }
3246 }
3247
3248 /* Single-bit compares should always be against zero. */
3249 if (lbitsize == 1 && ! integer_zerop (rhs))
3250 {
3251 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3252 rhs = fold_convert (type, integer_zero_node);
3253 }
3254
3255 /* Make a new bitfield reference, shift the constant over the
3256 appropriate number of bits and mask it with the computed mask
3257 (in case this was a signed field). If we changed it, make a new one. */
3258 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3259 if (lvolatilep)
3260 {
3261 TREE_SIDE_EFFECTS (lhs) = 1;
3262 TREE_THIS_VOLATILE (lhs) = 1;
3263 }
3264
3265 rhs = fold (const_binop (BIT_AND_EXPR,
3266 const_binop (LSHIFT_EXPR,
3267 fold_convert (unsigned_type, rhs),
3268 size_int (lbitpos), 0),
3269 mask, 0));
3270
3271 return build2 (code, compare_type,
3272 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3273 rhs);
3274 }
3275 \f
3276 /* Subroutine for fold_truthop: decode a field reference.
3277
3278 If EXP is a comparison reference, we return the innermost reference.
3279
3280 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3281 set to the starting bit number.
3282
3283 If the innermost field can be completely contained in a mode-sized
3284 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3285
3286 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3287 otherwise it is not changed.
3288
3289 *PUNSIGNEDP is set to the signedness of the field.
3290
3291 *PMASK is set to the mask used. This is either contained in a
3292 BIT_AND_EXPR or derived from the width of the field.
3293
3294 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3295
3296 Return 0 if this is not a component reference or is one that we can't
3297 do anything with. */
3298
3299 static tree
3300 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3301 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3302 int *punsignedp, int *pvolatilep,
3303 tree *pmask, tree *pand_mask)
3304 {
3305 tree outer_type = 0;
3306 tree and_mask = 0;
3307 tree mask, inner, offset;
3308 tree unsigned_type;
3309 unsigned int precision;
3310
3311 /* All the optimizations using this function assume integer fields.
3312 There are problems with FP fields since the type_for_size call
3313 below can fail for, e.g., XFmode. */
3314 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3315 return 0;
3316
3317 /* We are interested in the bare arrangement of bits, so strip everything
3318 that doesn't affect the machine mode. However, record the type of the
3319 outermost expression if it may matter below. */
3320 if (TREE_CODE (exp) == NOP_EXPR
3321 || TREE_CODE (exp) == CONVERT_EXPR
3322 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3323 outer_type = TREE_TYPE (exp);
3324 STRIP_NOPS (exp);
3325
3326 if (TREE_CODE (exp) == BIT_AND_EXPR)
3327 {
3328 and_mask = TREE_OPERAND (exp, 1);
3329 exp = TREE_OPERAND (exp, 0);
3330 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3331 if (TREE_CODE (and_mask) != INTEGER_CST)
3332 return 0;
3333 }
3334
3335 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3336 punsignedp, pvolatilep, false);
3337 if ((inner == exp && and_mask == 0)
3338 || *pbitsize < 0 || offset != 0
3339 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3340 return 0;
3341
3342 /* If the number of bits in the reference is the same as the bitsize of
3343 the outer type, then the outer type gives the signedness. Otherwise
3344 (in case of a small bitfield) the signedness is unchanged. */
3345 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3346 *punsignedp = TYPE_UNSIGNED (outer_type);
3347
3348 /* Compute the mask to access the bitfield. */
3349 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3350 precision = TYPE_PRECISION (unsigned_type);
3351
3352 mask = build_int_cst (unsigned_type, -1);
3353 mask = force_fit_type (mask, 0, false, false);
3354
3355 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3356 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3357
3358 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3359 if (and_mask != 0)
3360 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3361 fold_convert (unsigned_type, and_mask), mask);
3362
3363 *pmask = mask;
3364 *pand_mask = and_mask;
3365 return inner;
3366 }
3367
3368 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3369 bit positions. */
3370
3371 static int
3372 all_ones_mask_p (tree mask, int size)
3373 {
3374 tree type = TREE_TYPE (mask);
3375 unsigned int precision = TYPE_PRECISION (type);
3376 tree tmask;
3377
3378 tmask = build_int_cst (lang_hooks.types.signed_type (type), -1);
3379 tmask = force_fit_type (tmask, 0, false, false);
3380
3381 return
3382 tree_int_cst_equal (mask,
3383 const_binop (RSHIFT_EXPR,
3384 const_binop (LSHIFT_EXPR, tmask,
3385 size_int (precision - size),
3386 0),
3387 size_int (precision - size), 0));
3388 }
3389
3390 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3391 represents the sign bit of EXP's type. If EXP represents a sign
3392 or zero extension, also test VAL against the unextended type.
3393 The return value is the (sub)expression whose sign bit is VAL,
3394 or NULL_TREE otherwise. */
3395
3396 static tree
3397 sign_bit_p (tree exp, tree val)
3398 {
3399 unsigned HOST_WIDE_INT mask_lo, lo;
3400 HOST_WIDE_INT mask_hi, hi;
3401 int width;
3402 tree t;
3403
3404 /* Tree EXP must have an integral type. */
3405 t = TREE_TYPE (exp);
3406 if (! INTEGRAL_TYPE_P (t))
3407 return NULL_TREE;
3408
3409 /* Tree VAL must be an integer constant. */
3410 if (TREE_CODE (val) != INTEGER_CST
3411 || TREE_CONSTANT_OVERFLOW (val))
3412 return NULL_TREE;
3413
3414 width = TYPE_PRECISION (t);
3415 if (width > HOST_BITS_PER_WIDE_INT)
3416 {
3417 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3418 lo = 0;
3419
3420 mask_hi = ((unsigned HOST_WIDE_INT) -1
3421 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3422 mask_lo = -1;
3423 }
3424 else
3425 {
3426 hi = 0;
3427 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3428
3429 mask_hi = 0;
3430 mask_lo = ((unsigned HOST_WIDE_INT) -1
3431 >> (HOST_BITS_PER_WIDE_INT - width));
3432 }
3433
3434 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3435 treat VAL as if it were unsigned. */
3436 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3437 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3438 return exp;
3439
3440 /* Handle extension from a narrower type. */
3441 if (TREE_CODE (exp) == NOP_EXPR
3442 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3443 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3444
3445 return NULL_TREE;
3446 }
3447
3448 /* Subroutine for fold_truthop: determine if an operand is simple enough
3449 to be evaluated unconditionally. */
3450
3451 static int
3452 simple_operand_p (tree exp)
3453 {
3454 /* Strip any conversions that don't change the machine mode. */
3455 STRIP_NOPS (exp);
3456
3457 return (CONSTANT_CLASS_P (exp)
3458 || TREE_CODE (exp) == SSA_NAME
3459 || (DECL_P (exp)
3460 && ! TREE_ADDRESSABLE (exp)
3461 && ! TREE_THIS_VOLATILE (exp)
3462 && ! DECL_NONLOCAL (exp)
3463 /* Don't regard global variables as simple. They may be
3464 allocated in ways unknown to the compiler (shared memory,
3465 #pragma weak, etc). */
3466 && ! TREE_PUBLIC (exp)
3467 && ! DECL_EXTERNAL (exp)
3468 /* Loading a static variable is unduly expensive, but global
3469 registers aren't expensive. */
3470 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3471 }
3472 \f
3473 /* The following functions are subroutines to fold_range_test and allow it to
3474 try to change a logical combination of comparisons into a range test.
3475
3476 For example, both
3477 X == 2 || X == 3 || X == 4 || X == 5
3478 and
3479 X >= 2 && X <= 5
3480 are converted to
3481 (unsigned) (X - 2) <= 3
3482
3483 We describe each set of comparisons as being either inside or outside
3484 a range, using a variable named like IN_P, and then describe the
3485 range with a lower and upper bound. If one of the bounds is omitted,
3486 it represents either the highest or lowest value of the type.
3487
3488 In the comments below, we represent a range by two numbers in brackets
3489 preceded by a "+" to designate being inside that range, or a "-" to
3490 designate being outside that range, so the condition can be inverted by
3491 flipping the prefix. An omitted bound is represented by a "-". For
3492 example, "- [-, 10]" means being outside the range starting at the lowest
3493 possible value and ending at 10, in other words, being greater than 10.
3494 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3495 always false.
3496
3497 We set up things so that the missing bounds are handled in a consistent
3498 manner so neither a missing bound nor "true" and "false" need to be
3499 handled using a special case. */
3500
3501 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3502 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3503 and UPPER1_P are nonzero if the respective argument is an upper bound
3504 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3505 must be specified for a comparison. ARG1 will be converted to ARG0's
3506 type if both are specified. */
3507
3508 static tree
3509 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3510 tree arg1, int upper1_p)
3511 {
3512 tree tem;
3513 int result;
3514 int sgn0, sgn1;
3515
3516 /* If neither arg represents infinity, do the normal operation.
3517 Else, if not a comparison, return infinity. Else handle the special
3518 comparison rules. Note that most of the cases below won't occur, but
3519 are handled for consistency. */
3520
3521 if (arg0 != 0 && arg1 != 0)
3522 {
3523 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3524 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3525 STRIP_NOPS (tem);
3526 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3527 }
3528
3529 if (TREE_CODE_CLASS (code) != tcc_comparison)
3530 return 0;
3531
3532 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3533 for neither. In real maths, we cannot assume open ended ranges are
3534 the same. But, this is computer arithmetic, where numbers are finite.
3535 We can therefore make the transformation of any unbounded range with
3536 the value Z, Z being greater than any representable number. This permits
3537 us to treat unbounded ranges as equal. */
3538 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3539 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3540 switch (code)
3541 {
3542 case EQ_EXPR:
3543 result = sgn0 == sgn1;
3544 break;
3545 case NE_EXPR:
3546 result = sgn0 != sgn1;
3547 break;
3548 case LT_EXPR:
3549 result = sgn0 < sgn1;
3550 break;
3551 case LE_EXPR:
3552 result = sgn0 <= sgn1;
3553 break;
3554 case GT_EXPR:
3555 result = sgn0 > sgn1;
3556 break;
3557 case GE_EXPR:
3558 result = sgn0 >= sgn1;
3559 break;
3560 default:
3561 gcc_unreachable ();
3562 }
3563
3564 return constant_boolean_node (result, type);
3565 }
3566 \f
3567 /* Given EXP, a logical expression, set the range it is testing into
3568 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3569 actually being tested. *PLOW and *PHIGH will be made of the same type
3570 as the returned expression. If EXP is not a comparison, we will most
3571 likely not be returning a useful value and range. */
3572
3573 static tree
3574 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3575 {
3576 enum tree_code code;
3577 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3578 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3579 int in_p, n_in_p;
3580 tree low, high, n_low, n_high;
3581
3582 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3583 and see if we can refine the range. Some of the cases below may not
3584 happen, but it doesn't seem worth worrying about this. We "continue"
3585 the outer loop when we've changed something; otherwise we "break"
3586 the switch, which will "break" the while. */
3587
3588 in_p = 0;
3589 low = high = fold_convert (TREE_TYPE (exp), integer_zero_node);
3590
3591 while (1)
3592 {
3593 code = TREE_CODE (exp);
3594 exp_type = TREE_TYPE (exp);
3595
3596 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3597 {
3598 if (TREE_CODE_LENGTH (code) > 0)
3599 arg0 = TREE_OPERAND (exp, 0);
3600 if (TREE_CODE_CLASS (code) == tcc_comparison
3601 || TREE_CODE_CLASS (code) == tcc_unary
3602 || TREE_CODE_CLASS (code) == tcc_binary)
3603 arg0_type = TREE_TYPE (arg0);
3604 if (TREE_CODE_CLASS (code) == tcc_binary
3605 || TREE_CODE_CLASS (code) == tcc_comparison
3606 || (TREE_CODE_CLASS (code) == tcc_expression
3607 && TREE_CODE_LENGTH (code) > 1))
3608 arg1 = TREE_OPERAND (exp, 1);
3609 }
3610
3611 switch (code)
3612 {
3613 case TRUTH_NOT_EXPR:
3614 in_p = ! in_p, exp = arg0;
3615 continue;
3616
3617 case EQ_EXPR: case NE_EXPR:
3618 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3619 /* We can only do something if the range is testing for zero
3620 and if the second operand is an integer constant. Note that
3621 saying something is "in" the range we make is done by
3622 complementing IN_P since it will set in the initial case of
3623 being not equal to zero; "out" is leaving it alone. */
3624 if (low == 0 || high == 0
3625 || ! integer_zerop (low) || ! integer_zerop (high)
3626 || TREE_CODE (arg1) != INTEGER_CST)
3627 break;
3628
3629 switch (code)
3630 {
3631 case NE_EXPR: /* - [c, c] */
3632 low = high = arg1;
3633 break;
3634 case EQ_EXPR: /* + [c, c] */
3635 in_p = ! in_p, low = high = arg1;
3636 break;
3637 case GT_EXPR: /* - [-, c] */
3638 low = 0, high = arg1;
3639 break;
3640 case GE_EXPR: /* + [c, -] */
3641 in_p = ! in_p, low = arg1, high = 0;
3642 break;
3643 case LT_EXPR: /* - [c, -] */
3644 low = arg1, high = 0;
3645 break;
3646 case LE_EXPR: /* + [-, c] */
3647 in_p = ! in_p, low = 0, high = arg1;
3648 break;
3649 default:
3650 gcc_unreachable ();
3651 }
3652
3653 /* If this is an unsigned comparison, we also know that EXP is
3654 greater than or equal to zero. We base the range tests we make
3655 on that fact, so we record it here so we can parse existing
3656 range tests. We test arg0_type since often the return type
3657 of, e.g. EQ_EXPR, is boolean. */
3658 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3659 {
3660 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3661 in_p, low, high, 1,
3662 fold_convert (arg0_type, integer_zero_node),
3663 NULL_TREE))
3664 break;
3665
3666 in_p = n_in_p, low = n_low, high = n_high;
3667
3668 /* If the high bound is missing, but we have a nonzero low
3669 bound, reverse the range so it goes from zero to the low bound
3670 minus 1. */
3671 if (high == 0 && low && ! integer_zerop (low))
3672 {
3673 in_p = ! in_p;
3674 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3675 integer_one_node, 0);
3676 low = fold_convert (arg0_type, integer_zero_node);
3677 }
3678 }
3679
3680 exp = arg0;
3681 continue;
3682
3683 case NEGATE_EXPR:
3684 /* (-x) IN [a,b] -> x in [-b, -a] */
3685 n_low = range_binop (MINUS_EXPR, exp_type,
3686 fold_convert (exp_type, integer_zero_node),
3687 0, high, 1);
3688 n_high = range_binop (MINUS_EXPR, exp_type,
3689 fold_convert (exp_type, integer_zero_node),
3690 0, low, 0);
3691 low = n_low, high = n_high;
3692 exp = arg0;
3693 continue;
3694
3695 case BIT_NOT_EXPR:
3696 /* ~ X -> -X - 1 */
3697 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
3698 fold_convert (exp_type, integer_one_node));
3699 continue;
3700
3701 case PLUS_EXPR: case MINUS_EXPR:
3702 if (TREE_CODE (arg1) != INTEGER_CST)
3703 break;
3704
3705 /* If EXP is signed, any overflow in the computation is undefined,
3706 so we don't worry about it so long as our computations on
3707 the bounds don't overflow. For unsigned, overflow is defined
3708 and this is exactly the right thing. */
3709 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3710 arg0_type, low, 0, arg1, 0);
3711 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3712 arg0_type, high, 1, arg1, 0);
3713 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3714 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3715 break;
3716
3717 /* Check for an unsigned range which has wrapped around the maximum
3718 value thus making n_high < n_low, and normalize it. */
3719 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3720 {
3721 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3722 integer_one_node, 0);
3723 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3724 integer_one_node, 0);
3725
3726 /* If the range is of the form +/- [ x+1, x ], we won't
3727 be able to normalize it. But then, it represents the
3728 whole range or the empty set, so make it
3729 +/- [ -, - ]. */
3730 if (tree_int_cst_equal (n_low, low)
3731 && tree_int_cst_equal (n_high, high))
3732 low = high = 0;
3733 else
3734 in_p = ! in_p;
3735 }
3736 else
3737 low = n_low, high = n_high;
3738
3739 exp = arg0;
3740 continue;
3741
3742 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3743 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3744 break;
3745
3746 if (! INTEGRAL_TYPE_P (arg0_type)
3747 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3748 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3749 break;
3750
3751 n_low = low, n_high = high;
3752
3753 if (n_low != 0)
3754 n_low = fold_convert (arg0_type, n_low);
3755
3756 if (n_high != 0)
3757 n_high = fold_convert (arg0_type, n_high);
3758
3759
3760 /* If we're converting arg0 from an unsigned type, to exp,
3761 a signed type, we will be doing the comparison as unsigned.
3762 The tests above have already verified that LOW and HIGH
3763 are both positive.
3764
3765 So we have to ensure that we will handle large unsigned
3766 values the same way that the current signed bounds treat
3767 negative values. */
3768
3769 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
3770 {
3771 tree high_positive;
3772 tree equiv_type = lang_hooks.types.type_for_mode
3773 (TYPE_MODE (arg0_type), 1);
3774
3775 /* A range without an upper bound is, naturally, unbounded.
3776 Since convert would have cropped a very large value, use
3777 the max value for the destination type. */
3778 high_positive
3779 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3780 : TYPE_MAX_VALUE (arg0_type);
3781
3782 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
3783 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
3784 fold_convert (arg0_type,
3785 high_positive),
3786 fold_convert (arg0_type,
3787 integer_one_node));
3788
3789 /* If the low bound is specified, "and" the range with the
3790 range for which the original unsigned value will be
3791 positive. */
3792 if (low != 0)
3793 {
3794 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3795 1, n_low, n_high, 1,
3796 fold_convert (arg0_type,
3797 integer_zero_node),
3798 high_positive))
3799 break;
3800
3801 in_p = (n_in_p == in_p);
3802 }
3803 else
3804 {
3805 /* Otherwise, "or" the range with the range of the input
3806 that will be interpreted as negative. */
3807 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3808 0, n_low, n_high, 1,
3809 fold_convert (arg0_type,
3810 integer_zero_node),
3811 high_positive))
3812 break;
3813
3814 in_p = (in_p != n_in_p);
3815 }
3816 }
3817
3818 exp = arg0;
3819 low = n_low, high = n_high;
3820 continue;
3821
3822 default:
3823 break;
3824 }
3825
3826 break;
3827 }
3828
3829 /* If EXP is a constant, we can evaluate whether this is true or false. */
3830 if (TREE_CODE (exp) == INTEGER_CST)
3831 {
3832 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3833 exp, 0, low, 0))
3834 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3835 exp, 1, high, 1)));
3836 low = high = 0;
3837 exp = 0;
3838 }
3839
3840 *pin_p = in_p, *plow = low, *phigh = high;
3841 return exp;
3842 }
3843 \f
3844 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3845 type, TYPE, return an expression to test if EXP is in (or out of, depending
3846 on IN_P) the range. Return 0 if the test couldn't be created. */
3847
3848 static tree
3849 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
3850 {
3851 tree etype = TREE_TYPE (exp);
3852 tree value;
3853
3854 if (! in_p)
3855 {
3856 value = build_range_check (type, exp, 1, low, high);
3857 if (value != 0)
3858 return invert_truthvalue (value);
3859
3860 return 0;
3861 }
3862
3863 if (low == 0 && high == 0)
3864 return fold_convert (type, integer_one_node);
3865
3866 if (low == 0)
3867 return fold_build2 (LE_EXPR, type, exp, high);
3868
3869 if (high == 0)
3870 return fold_build2 (GE_EXPR, type, exp, low);
3871
3872 if (operand_equal_p (low, high, 0))
3873 return fold_build2 (EQ_EXPR, type, exp, low);
3874
3875 if (integer_zerop (low))
3876 {
3877 if (! TYPE_UNSIGNED (etype))
3878 {
3879 etype = lang_hooks.types.unsigned_type (etype);
3880 high = fold_convert (etype, high);
3881 exp = fold_convert (etype, exp);
3882 }
3883 return build_range_check (type, exp, 1, 0, high);
3884 }
3885
3886 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3887 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
3888 {
3889 unsigned HOST_WIDE_INT lo;
3890 HOST_WIDE_INT hi;
3891 int prec;
3892
3893 prec = TYPE_PRECISION (etype);
3894 if (prec <= HOST_BITS_PER_WIDE_INT)
3895 {
3896 hi = 0;
3897 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
3898 }
3899 else
3900 {
3901 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
3902 lo = (unsigned HOST_WIDE_INT) -1;
3903 }
3904
3905 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
3906 {
3907 if (TYPE_UNSIGNED (etype))
3908 {
3909 etype = lang_hooks.types.signed_type (etype);
3910 exp = fold_convert (etype, exp);
3911 }
3912 return fold_build2 (GT_EXPR, type, exp,
3913 fold_convert (etype, integer_zero_node));
3914 }
3915 }
3916
3917 value = const_binop (MINUS_EXPR, high, low, 0);
3918 if (value != 0 && TREE_OVERFLOW (value) && ! TYPE_UNSIGNED (etype))
3919 {
3920 tree utype, minv, maxv;
3921
3922 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
3923 for the type in question, as we rely on this here. */
3924 switch (TREE_CODE (etype))
3925 {
3926 case INTEGER_TYPE:
3927 case ENUMERAL_TYPE:
3928 case CHAR_TYPE:
3929 utype = lang_hooks.types.unsigned_type (etype);
3930 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
3931 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
3932 integer_one_node, 1);
3933 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
3934 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
3935 minv, 1, maxv, 1)))
3936 {
3937 etype = utype;
3938 high = fold_convert (etype, high);
3939 low = fold_convert (etype, low);
3940 exp = fold_convert (etype, exp);
3941 value = const_binop (MINUS_EXPR, high, low, 0);
3942 }
3943 break;
3944 default:
3945 break;
3946 }
3947 }
3948
3949 if (value != 0 && ! TREE_OVERFLOW (value))
3950 return build_range_check (type,
3951 fold_build2 (MINUS_EXPR, etype, exp, low),
3952 1, fold_convert (etype, integer_zero_node),
3953 value);
3954
3955 return 0;
3956 }
3957 \f
3958 /* Given two ranges, see if we can merge them into one. Return 1 if we
3959 can, 0 if we can't. Set the output range into the specified parameters. */
3960
3961 static int
3962 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
3963 tree high0, int in1_p, tree low1, tree high1)
3964 {
3965 int no_overlap;
3966 int subset;
3967 int temp;
3968 tree tem;
3969 int in_p;
3970 tree low, high;
3971 int lowequal = ((low0 == 0 && low1 == 0)
3972 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3973 low0, 0, low1, 0)));
3974 int highequal = ((high0 == 0 && high1 == 0)
3975 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3976 high0, 1, high1, 1)));
3977
3978 /* Make range 0 be the range that starts first, or ends last if they
3979 start at the same value. Swap them if it isn't. */
3980 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
3981 low0, 0, low1, 0))
3982 || (lowequal
3983 && integer_onep (range_binop (GT_EXPR, integer_type_node,
3984 high1, 1, high0, 1))))
3985 {
3986 temp = in0_p, in0_p = in1_p, in1_p = temp;
3987 tem = low0, low0 = low1, low1 = tem;
3988 tem = high0, high0 = high1, high1 = tem;
3989 }
3990
3991 /* Now flag two cases, whether the ranges are disjoint or whether the
3992 second range is totally subsumed in the first. Note that the tests
3993 below are simplified by the ones above. */
3994 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
3995 high0, 1, low1, 0));
3996 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
3997 high1, 1, high0, 1));
3998
3999 /* We now have four cases, depending on whether we are including or
4000 excluding the two ranges. */
4001 if (in0_p && in1_p)
4002 {
4003 /* If they don't overlap, the result is false. If the second range
4004 is a subset it is the result. Otherwise, the range is from the start
4005 of the second to the end of the first. */
4006 if (no_overlap)
4007 in_p = 0, low = high = 0;
4008 else if (subset)
4009 in_p = 1, low = low1, high = high1;
4010 else
4011 in_p = 1, low = low1, high = high0;
4012 }
4013
4014 else if (in0_p && ! in1_p)
4015 {
4016 /* If they don't overlap, the result is the first range. If they are
4017 equal, the result is false. If the second range is a subset of the
4018 first, and the ranges begin at the same place, we go from just after
4019 the end of the first range to the end of the second. If the second
4020 range is not a subset of the first, or if it is a subset and both
4021 ranges end at the same place, the range starts at the start of the
4022 first range and ends just before the second range.
4023 Otherwise, we can't describe this as a single range. */
4024 if (no_overlap)
4025 in_p = 1, low = low0, high = high0;
4026 else if (lowequal && highequal)
4027 in_p = 0, low = high = 0;
4028 else if (subset && lowequal)
4029 {
4030 in_p = 1, high = high0;
4031 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
4032 integer_one_node, 0);
4033 }
4034 else if (! subset || highequal)
4035 {
4036 in_p = 1, low = low0;
4037 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4038 integer_one_node, 0);
4039 }
4040 else
4041 return 0;
4042 }
4043
4044 else if (! in0_p && in1_p)
4045 {
4046 /* If they don't overlap, the result is the second range. If the second
4047 is a subset of the first, the result is false. Otherwise,
4048 the range starts just after the first range and ends at the
4049 end of the second. */
4050 if (no_overlap)
4051 in_p = 1, low = low1, high = high1;
4052 else if (subset || highequal)
4053 in_p = 0, low = high = 0;
4054 else
4055 {
4056 in_p = 1, high = high1;
4057 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4058 integer_one_node, 0);
4059 }
4060 }
4061
4062 else
4063 {
4064 /* The case where we are excluding both ranges. Here the complex case
4065 is if they don't overlap. In that case, the only time we have a
4066 range is if they are adjacent. If the second is a subset of the
4067 first, the result is the first. Otherwise, the range to exclude
4068 starts at the beginning of the first range and ends at the end of the
4069 second. */
4070 if (no_overlap)
4071 {
4072 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4073 range_binop (PLUS_EXPR, NULL_TREE,
4074 high0, 1,
4075 integer_one_node, 1),
4076 1, low1, 0)))
4077 in_p = 0, low = low0, high = high1;
4078 else
4079 {
4080 /* Canonicalize - [min, x] into - [-, x]. */
4081 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4082 switch (TREE_CODE (TREE_TYPE (low0)))
4083 {
4084 case ENUMERAL_TYPE:
4085 if (TYPE_PRECISION (TREE_TYPE (low0))
4086 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4087 break;
4088 /* FALLTHROUGH */
4089 case INTEGER_TYPE:
4090 case CHAR_TYPE:
4091 if (tree_int_cst_equal (low0,
4092 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4093 low0 = 0;
4094 break;
4095 case POINTER_TYPE:
4096 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4097 && integer_zerop (low0))
4098 low0 = 0;
4099 break;
4100 default:
4101 break;
4102 }
4103
4104 /* Canonicalize - [x, max] into - [x, -]. */
4105 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4106 switch (TREE_CODE (TREE_TYPE (high1)))
4107 {
4108 case ENUMERAL_TYPE:
4109 if (TYPE_PRECISION (TREE_TYPE (high1))
4110 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4111 break;
4112 /* FALLTHROUGH */
4113 case INTEGER_TYPE:
4114 case CHAR_TYPE:
4115 if (tree_int_cst_equal (high1,
4116 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4117 high1 = 0;
4118 break;
4119 case POINTER_TYPE:
4120 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4121 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4122 high1, 1,
4123 integer_one_node, 1)))
4124 high1 = 0;
4125 break;
4126 default:
4127 break;
4128 }
4129
4130 /* The ranges might be also adjacent between the maximum and
4131 minimum values of the given type. For
4132 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4133 return + [x + 1, y - 1]. */
4134 if (low0 == 0 && high1 == 0)
4135 {
4136 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4137 integer_one_node, 1);
4138 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4139 integer_one_node, 0);
4140 if (low == 0 || high == 0)
4141 return 0;
4142
4143 in_p = 1;
4144 }
4145 else
4146 return 0;
4147 }
4148 }
4149 else if (subset)
4150 in_p = 0, low = low0, high = high0;
4151 else
4152 in_p = 0, low = low0, high = high1;
4153 }
4154
4155 *pin_p = in_p, *plow = low, *phigh = high;
4156 return 1;
4157 }
4158 \f
4159
4160 /* Subroutine of fold, looking inside expressions of the form
4161 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4162 of the COND_EXPR. This function is being used also to optimize
4163 A op B ? C : A, by reversing the comparison first.
4164
4165 Return a folded expression whose code is not a COND_EXPR
4166 anymore, or NULL_TREE if no folding opportunity is found. */
4167
4168 static tree
4169 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4170 {
4171 enum tree_code comp_code = TREE_CODE (arg0);
4172 tree arg00 = TREE_OPERAND (arg0, 0);
4173 tree arg01 = TREE_OPERAND (arg0, 1);
4174 tree arg1_type = TREE_TYPE (arg1);
4175 tree tem;
4176
4177 STRIP_NOPS (arg1);
4178 STRIP_NOPS (arg2);
4179
4180 /* If we have A op 0 ? A : -A, consider applying the following
4181 transformations:
4182
4183 A == 0? A : -A same as -A
4184 A != 0? A : -A same as A
4185 A >= 0? A : -A same as abs (A)
4186 A > 0? A : -A same as abs (A)
4187 A <= 0? A : -A same as -abs (A)
4188 A < 0? A : -A same as -abs (A)
4189
4190 None of these transformations work for modes with signed
4191 zeros. If A is +/-0, the first two transformations will
4192 change the sign of the result (from +0 to -0, or vice
4193 versa). The last four will fix the sign of the result,
4194 even though the original expressions could be positive or
4195 negative, depending on the sign of A.
4196
4197 Note that all these transformations are correct if A is
4198 NaN, since the two alternatives (A and -A) are also NaNs. */
4199 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4200 ? real_zerop (arg01)
4201 : integer_zerop (arg01))
4202 && ((TREE_CODE (arg2) == NEGATE_EXPR
4203 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4204 /* In the case that A is of the form X-Y, '-A' (arg2) may
4205 have already been folded to Y-X, check for that. */
4206 || (TREE_CODE (arg1) == MINUS_EXPR
4207 && TREE_CODE (arg2) == MINUS_EXPR
4208 && operand_equal_p (TREE_OPERAND (arg1, 0),
4209 TREE_OPERAND (arg2, 1), 0)
4210 && operand_equal_p (TREE_OPERAND (arg1, 1),
4211 TREE_OPERAND (arg2, 0), 0))))
4212 switch (comp_code)
4213 {
4214 case EQ_EXPR:
4215 case UNEQ_EXPR:
4216 tem = fold_convert (arg1_type, arg1);
4217 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4218 case NE_EXPR:
4219 case LTGT_EXPR:
4220 return pedantic_non_lvalue (fold_convert (type, arg1));
4221 case UNGE_EXPR:
4222 case UNGT_EXPR:
4223 if (flag_trapping_math)
4224 break;
4225 /* Fall through. */
4226 case GE_EXPR:
4227 case GT_EXPR:
4228 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4229 arg1 = fold_convert (lang_hooks.types.signed_type
4230 (TREE_TYPE (arg1)), arg1);
4231 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4232 return pedantic_non_lvalue (fold_convert (type, tem));
4233 case UNLE_EXPR:
4234 case UNLT_EXPR:
4235 if (flag_trapping_math)
4236 break;
4237 case LE_EXPR:
4238 case LT_EXPR:
4239 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4240 arg1 = fold_convert (lang_hooks.types.signed_type
4241 (TREE_TYPE (arg1)), arg1);
4242 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4243 return negate_expr (fold_convert (type, tem));
4244 default:
4245 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4246 break;
4247 }
4248
4249 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4250 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4251 both transformations are correct when A is NaN: A != 0
4252 is then true, and A == 0 is false. */
4253
4254 if (integer_zerop (arg01) && integer_zerop (arg2))
4255 {
4256 if (comp_code == NE_EXPR)
4257 return pedantic_non_lvalue (fold_convert (type, arg1));
4258 else if (comp_code == EQ_EXPR)
4259 return fold_convert (type, integer_zero_node);
4260 }
4261
4262 /* Try some transformations of A op B ? A : B.
4263
4264 A == B? A : B same as B
4265 A != B? A : B same as A
4266 A >= B? A : B same as max (A, B)
4267 A > B? A : B same as max (B, A)
4268 A <= B? A : B same as min (A, B)
4269 A < B? A : B same as min (B, A)
4270
4271 As above, these transformations don't work in the presence
4272 of signed zeros. For example, if A and B are zeros of
4273 opposite sign, the first two transformations will change
4274 the sign of the result. In the last four, the original
4275 expressions give different results for (A=+0, B=-0) and
4276 (A=-0, B=+0), but the transformed expressions do not.
4277
4278 The first two transformations are correct if either A or B
4279 is a NaN. In the first transformation, the condition will
4280 be false, and B will indeed be chosen. In the case of the
4281 second transformation, the condition A != B will be true,
4282 and A will be chosen.
4283
4284 The conversions to max() and min() are not correct if B is
4285 a number and A is not. The conditions in the original
4286 expressions will be false, so all four give B. The min()
4287 and max() versions would give a NaN instead. */
4288 if (operand_equal_for_comparison_p (arg01, arg2, arg00)
4289 /* Avoid these transformations if the COND_EXPR may be used
4290 as an lvalue in the C++ front-end. PR c++/19199. */
4291 && (in_gimple_form
4292 || strcmp (lang_hooks.name, "GNU C++") != 0
4293 || ! maybe_lvalue_p (arg1)
4294 || ! maybe_lvalue_p (arg2)))
4295 {
4296 tree comp_op0 = arg00;
4297 tree comp_op1 = arg01;
4298 tree comp_type = TREE_TYPE (comp_op0);
4299
4300 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4301 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4302 {
4303 comp_type = type;
4304 comp_op0 = arg1;
4305 comp_op1 = arg2;
4306 }
4307
4308 switch (comp_code)
4309 {
4310 case EQ_EXPR:
4311 return pedantic_non_lvalue (fold_convert (type, arg2));
4312 case NE_EXPR:
4313 return pedantic_non_lvalue (fold_convert (type, arg1));
4314 case LE_EXPR:
4315 case LT_EXPR:
4316 case UNLE_EXPR:
4317 case UNLT_EXPR:
4318 /* In C++ a ?: expression can be an lvalue, so put the
4319 operand which will be used if they are equal first
4320 so that we can convert this back to the
4321 corresponding COND_EXPR. */
4322 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4323 {
4324 comp_op0 = fold_convert (comp_type, comp_op0);
4325 comp_op1 = fold_convert (comp_type, comp_op1);
4326 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4327 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4328 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4329 return pedantic_non_lvalue (fold_convert (type, tem));
4330 }
4331 break;
4332 case GE_EXPR:
4333 case GT_EXPR:
4334 case UNGE_EXPR:
4335 case UNGT_EXPR:
4336 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4337 {
4338 comp_op0 = fold_convert (comp_type, comp_op0);
4339 comp_op1 = fold_convert (comp_type, comp_op1);
4340 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4341 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
4342 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
4343 return pedantic_non_lvalue (fold_convert (type, tem));
4344 }
4345 break;
4346 case UNEQ_EXPR:
4347 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4348 return pedantic_non_lvalue (fold_convert (type, arg2));
4349 break;
4350 case LTGT_EXPR:
4351 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4352 return pedantic_non_lvalue (fold_convert (type, arg1));
4353 break;
4354 default:
4355 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4356 break;
4357 }
4358 }
4359
4360 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4361 we might still be able to simplify this. For example,
4362 if C1 is one less or one more than C2, this might have started
4363 out as a MIN or MAX and been transformed by this function.
4364 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4365
4366 if (INTEGRAL_TYPE_P (type)
4367 && TREE_CODE (arg01) == INTEGER_CST
4368 && TREE_CODE (arg2) == INTEGER_CST)
4369 switch (comp_code)
4370 {
4371 case EQ_EXPR:
4372 /* We can replace A with C1 in this case. */
4373 arg1 = fold_convert (type, arg01);
4374 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
4375
4376 case LT_EXPR:
4377 /* If C1 is C2 + 1, this is min(A, C2). */
4378 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4379 OEP_ONLY_CONST)
4380 && operand_equal_p (arg01,
4381 const_binop (PLUS_EXPR, arg2,
4382 integer_one_node, 0),
4383 OEP_ONLY_CONST))
4384 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4385 type, arg1, arg2));
4386 break;
4387
4388 case LE_EXPR:
4389 /* If C1 is C2 - 1, this is min(A, C2). */
4390 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4391 OEP_ONLY_CONST)
4392 && operand_equal_p (arg01,
4393 const_binop (MINUS_EXPR, arg2,
4394 integer_one_node, 0),
4395 OEP_ONLY_CONST))
4396 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4397 type, arg1, arg2));
4398 break;
4399
4400 case GT_EXPR:
4401 /* If C1 is C2 - 1, this is max(A, C2). */
4402 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4403 OEP_ONLY_CONST)
4404 && operand_equal_p (arg01,
4405 const_binop (MINUS_EXPR, arg2,
4406 integer_one_node, 0),
4407 OEP_ONLY_CONST))
4408 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4409 type, arg1, arg2));
4410 break;
4411
4412 case GE_EXPR:
4413 /* If C1 is C2 + 1, this is max(A, C2). */
4414 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4415 OEP_ONLY_CONST)
4416 && operand_equal_p (arg01,
4417 const_binop (PLUS_EXPR, arg2,
4418 integer_one_node, 0),
4419 OEP_ONLY_CONST))
4420 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4421 type, arg1, arg2));
4422 break;
4423 case NE_EXPR:
4424 break;
4425 default:
4426 gcc_unreachable ();
4427 }
4428
4429 return NULL_TREE;
4430 }
4431
4432
4433 \f
4434 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4435 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4436 #endif
4437
4438 /* EXP is some logical combination of boolean tests. See if we can
4439 merge it into some range test. Return the new tree if so. */
4440
4441 static tree
4442 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
4443 {
4444 int or_op = (code == TRUTH_ORIF_EXPR
4445 || code == TRUTH_OR_EXPR);
4446 int in0_p, in1_p, in_p;
4447 tree low0, low1, low, high0, high1, high;
4448 tree lhs = make_range (op0, &in0_p, &low0, &high0);
4449 tree rhs = make_range (op1, &in1_p, &low1, &high1);
4450 tree tem;
4451
4452 /* If this is an OR operation, invert both sides; we will invert
4453 again at the end. */
4454 if (or_op)
4455 in0_p = ! in0_p, in1_p = ! in1_p;
4456
4457 /* If both expressions are the same, if we can merge the ranges, and we
4458 can build the range test, return it or it inverted. If one of the
4459 ranges is always true or always false, consider it to be the same
4460 expression as the other. */
4461 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4462 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4463 in1_p, low1, high1)
4464 && 0 != (tem = (build_range_check (type,
4465 lhs != 0 ? lhs
4466 : rhs != 0 ? rhs : integer_zero_node,
4467 in_p, low, high))))
4468 return or_op ? invert_truthvalue (tem) : tem;
4469
4470 /* On machines where the branch cost is expensive, if this is a
4471 short-circuited branch and the underlying object on both sides
4472 is the same, make a non-short-circuit operation. */
4473 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4474 && lhs != 0 && rhs != 0
4475 && (code == TRUTH_ANDIF_EXPR
4476 || code == TRUTH_ORIF_EXPR)
4477 && operand_equal_p (lhs, rhs, 0))
4478 {
4479 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4480 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4481 which cases we can't do this. */
4482 if (simple_operand_p (lhs))
4483 return build2 (code == TRUTH_ANDIF_EXPR
4484 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4485 type, op0, op1);
4486
4487 else if (lang_hooks.decls.global_bindings_p () == 0
4488 && ! CONTAINS_PLACEHOLDER_P (lhs))
4489 {
4490 tree common = save_expr (lhs);
4491
4492 if (0 != (lhs = build_range_check (type, common,
4493 or_op ? ! in0_p : in0_p,
4494 low0, high0))
4495 && (0 != (rhs = build_range_check (type, common,
4496 or_op ? ! in1_p : in1_p,
4497 low1, high1))))
4498 return build2 (code == TRUTH_ANDIF_EXPR
4499 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4500 type, lhs, rhs);
4501 }
4502 }
4503
4504 return 0;
4505 }
4506 \f
4507 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4508 bit value. Arrange things so the extra bits will be set to zero if and
4509 only if C is signed-extended to its full width. If MASK is nonzero,
4510 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4511
4512 static tree
4513 unextend (tree c, int p, int unsignedp, tree mask)
4514 {
4515 tree type = TREE_TYPE (c);
4516 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4517 tree temp;
4518
4519 if (p == modesize || unsignedp)
4520 return c;
4521
4522 /* We work by getting just the sign bit into the low-order bit, then
4523 into the high-order bit, then sign-extend. We then XOR that value
4524 with C. */
4525 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4526 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4527
4528 /* We must use a signed type in order to get an arithmetic right shift.
4529 However, we must also avoid introducing accidental overflows, so that
4530 a subsequent call to integer_zerop will work. Hence we must
4531 do the type conversion here. At this point, the constant is either
4532 zero or one, and the conversion to a signed type can never overflow.
4533 We could get an overflow if this conversion is done anywhere else. */
4534 if (TYPE_UNSIGNED (type))
4535 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4536
4537 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4538 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4539 if (mask != 0)
4540 temp = const_binop (BIT_AND_EXPR, temp,
4541 fold_convert (TREE_TYPE (c), mask), 0);
4542 /* If necessary, convert the type back to match the type of C. */
4543 if (TYPE_UNSIGNED (type))
4544 temp = fold_convert (type, temp);
4545
4546 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4547 }
4548 \f
4549 /* Find ways of folding logical expressions of LHS and RHS:
4550 Try to merge two comparisons to the same innermost item.
4551 Look for range tests like "ch >= '0' && ch <= '9'".
4552 Look for combinations of simple terms on machines with expensive branches
4553 and evaluate the RHS unconditionally.
4554
4555 For example, if we have p->a == 2 && p->b == 4 and we can make an
4556 object large enough to span both A and B, we can do this with a comparison
4557 against the object ANDed with the a mask.
4558
4559 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4560 operations to do this with one comparison.
4561
4562 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4563 function and the one above.
4564
4565 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4566 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4567
4568 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4569 two operands.
4570
4571 We return the simplified tree or 0 if no optimization is possible. */
4572
4573 static tree
4574 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
4575 {
4576 /* If this is the "or" of two comparisons, we can do something if
4577 the comparisons are NE_EXPR. If this is the "and", we can do something
4578 if the comparisons are EQ_EXPR. I.e.,
4579 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4580
4581 WANTED_CODE is this operation code. For single bit fields, we can
4582 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4583 comparison for one-bit fields. */
4584
4585 enum tree_code wanted_code;
4586 enum tree_code lcode, rcode;
4587 tree ll_arg, lr_arg, rl_arg, rr_arg;
4588 tree ll_inner, lr_inner, rl_inner, rr_inner;
4589 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
4590 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
4591 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
4592 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
4593 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
4594 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
4595 enum machine_mode lnmode, rnmode;
4596 tree ll_mask, lr_mask, rl_mask, rr_mask;
4597 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
4598 tree l_const, r_const;
4599 tree lntype, rntype, result;
4600 int first_bit, end_bit;
4601 int volatilep;
4602
4603 /* Start by getting the comparison codes. Fail if anything is volatile.
4604 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4605 it were surrounded with a NE_EXPR. */
4606
4607 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
4608 return 0;
4609
4610 lcode = TREE_CODE (lhs);
4611 rcode = TREE_CODE (rhs);
4612
4613 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
4614 {
4615 lhs = build2 (NE_EXPR, truth_type, lhs,
4616 fold_convert (TREE_TYPE (lhs), integer_zero_node));
4617 lcode = NE_EXPR;
4618 }
4619
4620 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
4621 {
4622 rhs = build2 (NE_EXPR, truth_type, rhs,
4623 fold_convert (TREE_TYPE (rhs), integer_zero_node));
4624 rcode = NE_EXPR;
4625 }
4626
4627 if (TREE_CODE_CLASS (lcode) != tcc_comparison
4628 || TREE_CODE_CLASS (rcode) != tcc_comparison)
4629 return 0;
4630
4631 ll_arg = TREE_OPERAND (lhs, 0);
4632 lr_arg = TREE_OPERAND (lhs, 1);
4633 rl_arg = TREE_OPERAND (rhs, 0);
4634 rr_arg = TREE_OPERAND (rhs, 1);
4635
4636 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4637 if (simple_operand_p (ll_arg)
4638 && simple_operand_p (lr_arg))
4639 {
4640 tree result;
4641 if (operand_equal_p (ll_arg, rl_arg, 0)
4642 && operand_equal_p (lr_arg, rr_arg, 0))
4643 {
4644 result = combine_comparisons (code, lcode, rcode,
4645 truth_type, ll_arg, lr_arg);
4646 if (result)
4647 return result;
4648 }
4649 else if (operand_equal_p (ll_arg, rr_arg, 0)
4650 && operand_equal_p (lr_arg, rl_arg, 0))
4651 {
4652 result = combine_comparisons (code, lcode,
4653 swap_tree_comparison (rcode),
4654 truth_type, ll_arg, lr_arg);
4655 if (result)
4656 return result;
4657 }
4658 }
4659
4660 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
4661 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
4662
4663 /* If the RHS can be evaluated unconditionally and its operands are
4664 simple, it wins to evaluate the RHS unconditionally on machines
4665 with expensive branches. In this case, this isn't a comparison
4666 that can be merged. Avoid doing this if the RHS is a floating-point
4667 comparison since those can trap. */
4668
4669 if (BRANCH_COST >= 2
4670 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
4671 && simple_operand_p (rl_arg)
4672 && simple_operand_p (rr_arg))
4673 {
4674 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4675 if (code == TRUTH_OR_EXPR
4676 && lcode == NE_EXPR && integer_zerop (lr_arg)
4677 && rcode == NE_EXPR && integer_zerop (rr_arg)
4678 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4679 return build2 (NE_EXPR, truth_type,
4680 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4681 ll_arg, rl_arg),
4682 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4683
4684 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4685 if (code == TRUTH_AND_EXPR
4686 && lcode == EQ_EXPR && integer_zerop (lr_arg)
4687 && rcode == EQ_EXPR && integer_zerop (rr_arg)
4688 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4689 return build2 (EQ_EXPR, truth_type,
4690 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4691 ll_arg, rl_arg),
4692 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4693
4694 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
4695 return build2 (code, truth_type, lhs, rhs);
4696 }
4697
4698 /* See if the comparisons can be merged. Then get all the parameters for
4699 each side. */
4700
4701 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
4702 || (rcode != EQ_EXPR && rcode != NE_EXPR))
4703 return 0;
4704
4705 volatilep = 0;
4706 ll_inner = decode_field_reference (ll_arg,
4707 &ll_bitsize, &ll_bitpos, &ll_mode,
4708 &ll_unsignedp, &volatilep, &ll_mask,
4709 &ll_and_mask);
4710 lr_inner = decode_field_reference (lr_arg,
4711 &lr_bitsize, &lr_bitpos, &lr_mode,
4712 &lr_unsignedp, &volatilep, &lr_mask,
4713 &lr_and_mask);
4714 rl_inner = decode_field_reference (rl_arg,
4715 &rl_bitsize, &rl_bitpos, &rl_mode,
4716 &rl_unsignedp, &volatilep, &rl_mask,
4717 &rl_and_mask);
4718 rr_inner = decode_field_reference (rr_arg,
4719 &rr_bitsize, &rr_bitpos, &rr_mode,
4720 &rr_unsignedp, &volatilep, &rr_mask,
4721 &rr_and_mask);
4722
4723 /* It must be true that the inner operation on the lhs of each
4724 comparison must be the same if we are to be able to do anything.
4725 Then see if we have constants. If not, the same must be true for
4726 the rhs's. */
4727 if (volatilep || ll_inner == 0 || rl_inner == 0
4728 || ! operand_equal_p (ll_inner, rl_inner, 0))
4729 return 0;
4730
4731 if (TREE_CODE (lr_arg) == INTEGER_CST
4732 && TREE_CODE (rr_arg) == INTEGER_CST)
4733 l_const = lr_arg, r_const = rr_arg;
4734 else if (lr_inner == 0 || rr_inner == 0
4735 || ! operand_equal_p (lr_inner, rr_inner, 0))
4736 return 0;
4737 else
4738 l_const = r_const = 0;
4739
4740 /* If either comparison code is not correct for our logical operation,
4741 fail. However, we can convert a one-bit comparison against zero into
4742 the opposite comparison against that bit being set in the field. */
4743
4744 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
4745 if (lcode != wanted_code)
4746 {
4747 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
4748 {
4749 /* Make the left operand unsigned, since we are only interested
4750 in the value of one bit. Otherwise we are doing the wrong
4751 thing below. */
4752 ll_unsignedp = 1;
4753 l_const = ll_mask;
4754 }
4755 else
4756 return 0;
4757 }
4758
4759 /* This is analogous to the code for l_const above. */
4760 if (rcode != wanted_code)
4761 {
4762 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
4763 {
4764 rl_unsignedp = 1;
4765 r_const = rl_mask;
4766 }
4767 else
4768 return 0;
4769 }
4770
4771 /* After this point all optimizations will generate bit-field
4772 references, which we might not want. */
4773 if (! lang_hooks.can_use_bit_fields_p ())
4774 return 0;
4775
4776 /* See if we can find a mode that contains both fields being compared on
4777 the left. If we can't, fail. Otherwise, update all constants and masks
4778 to be relative to a field of that size. */
4779 first_bit = MIN (ll_bitpos, rl_bitpos);
4780 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
4781 lnmode = get_best_mode (end_bit - first_bit, first_bit,
4782 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
4783 volatilep);
4784 if (lnmode == VOIDmode)
4785 return 0;
4786
4787 lnbitsize = GET_MODE_BITSIZE (lnmode);
4788 lnbitpos = first_bit & ~ (lnbitsize - 1);
4789 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
4790 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
4791
4792 if (BYTES_BIG_ENDIAN)
4793 {
4794 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
4795 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
4796 }
4797
4798 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
4799 size_int (xll_bitpos), 0);
4800 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
4801 size_int (xrl_bitpos), 0);
4802
4803 if (l_const)
4804 {
4805 l_const = fold_convert (lntype, l_const);
4806 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
4807 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
4808 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
4809 fold_build1 (BIT_NOT_EXPR,
4810 lntype, ll_mask),
4811 0)))
4812 {
4813 warning ("comparison is always %d", wanted_code == NE_EXPR);
4814
4815 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4816 }
4817 }
4818 if (r_const)
4819 {
4820 r_const = fold_convert (lntype, r_const);
4821 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
4822 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
4823 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
4824 fold_build1 (BIT_NOT_EXPR,
4825 lntype, rl_mask),
4826 0)))
4827 {
4828 warning ("comparison is always %d", wanted_code == NE_EXPR);
4829
4830 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4831 }
4832 }
4833
4834 /* If the right sides are not constant, do the same for it. Also,
4835 disallow this optimization if a size or signedness mismatch occurs
4836 between the left and right sides. */
4837 if (l_const == 0)
4838 {
4839 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
4840 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
4841 /* Make sure the two fields on the right
4842 correspond to the left without being swapped. */
4843 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
4844 return 0;
4845
4846 first_bit = MIN (lr_bitpos, rr_bitpos);
4847 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
4848 rnmode = get_best_mode (end_bit - first_bit, first_bit,
4849 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
4850 volatilep);
4851 if (rnmode == VOIDmode)
4852 return 0;
4853
4854 rnbitsize = GET_MODE_BITSIZE (rnmode);
4855 rnbitpos = first_bit & ~ (rnbitsize - 1);
4856 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
4857 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
4858
4859 if (BYTES_BIG_ENDIAN)
4860 {
4861 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
4862 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
4863 }
4864
4865 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
4866 size_int (xlr_bitpos), 0);
4867 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
4868 size_int (xrr_bitpos), 0);
4869
4870 /* Make a mask that corresponds to both fields being compared.
4871 Do this for both items being compared. If the operands are the
4872 same size and the bits being compared are in the same position
4873 then we can do this by masking both and comparing the masked
4874 results. */
4875 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4876 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
4877 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
4878 {
4879 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4880 ll_unsignedp || rl_unsignedp);
4881 if (! all_ones_mask_p (ll_mask, lnbitsize))
4882 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
4883
4884 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
4885 lr_unsignedp || rr_unsignedp);
4886 if (! all_ones_mask_p (lr_mask, rnbitsize))
4887 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
4888
4889 return build2 (wanted_code, truth_type, lhs, rhs);
4890 }
4891
4892 /* There is still another way we can do something: If both pairs of
4893 fields being compared are adjacent, we may be able to make a wider
4894 field containing them both.
4895
4896 Note that we still must mask the lhs/rhs expressions. Furthermore,
4897 the mask must be shifted to account for the shift done by
4898 make_bit_field_ref. */
4899 if ((ll_bitsize + ll_bitpos == rl_bitpos
4900 && lr_bitsize + lr_bitpos == rr_bitpos)
4901 || (ll_bitpos == rl_bitpos + rl_bitsize
4902 && lr_bitpos == rr_bitpos + rr_bitsize))
4903 {
4904 tree type;
4905
4906 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
4907 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
4908 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
4909 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
4910
4911 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
4912 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
4913 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
4914 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
4915
4916 /* Convert to the smaller type before masking out unwanted bits. */
4917 type = lntype;
4918 if (lntype != rntype)
4919 {
4920 if (lnbitsize > rnbitsize)
4921 {
4922 lhs = fold_convert (rntype, lhs);
4923 ll_mask = fold_convert (rntype, ll_mask);
4924 type = rntype;
4925 }
4926 else if (lnbitsize < rnbitsize)
4927 {
4928 rhs = fold_convert (lntype, rhs);
4929 lr_mask = fold_convert (lntype, lr_mask);
4930 type = lntype;
4931 }
4932 }
4933
4934 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
4935 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
4936
4937 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
4938 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
4939
4940 return build2 (wanted_code, truth_type, lhs, rhs);
4941 }
4942
4943 return 0;
4944 }
4945
4946 /* Handle the case of comparisons with constants. If there is something in
4947 common between the masks, those bits of the constants must be the same.
4948 If not, the condition is always false. Test for this to avoid generating
4949 incorrect code below. */
4950 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
4951 if (! integer_zerop (result)
4952 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
4953 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
4954 {
4955 if (wanted_code == NE_EXPR)
4956 {
4957 warning ("%<or%> of unmatched not-equal tests is always 1");
4958 return constant_boolean_node (true, truth_type);
4959 }
4960 else
4961 {
4962 warning ("%<and%> of mutually exclusive equal-tests is always 0");
4963 return constant_boolean_node (false, truth_type);
4964 }
4965 }
4966
4967 /* Construct the expression we will return. First get the component
4968 reference we will make. Unless the mask is all ones the width of
4969 that field, perform the mask operation. Then compare with the
4970 merged constant. */
4971 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4972 ll_unsignedp || rl_unsignedp);
4973
4974 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4975 if (! all_ones_mask_p (ll_mask, lnbitsize))
4976 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
4977
4978 return build2 (wanted_code, truth_type, result,
4979 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
4980 }
4981 \f
4982 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
4983 constant. */
4984
4985 static tree
4986 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
4987 {
4988 tree arg0 = op0;
4989 enum tree_code op_code;
4990 tree comp_const = op1;
4991 tree minmax_const;
4992 int consts_equal, consts_lt;
4993 tree inner;
4994
4995 STRIP_SIGN_NOPS (arg0);
4996
4997 op_code = TREE_CODE (arg0);
4998 minmax_const = TREE_OPERAND (arg0, 1);
4999 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5000 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5001 inner = TREE_OPERAND (arg0, 0);
5002
5003 /* If something does not permit us to optimize, return the original tree. */
5004 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5005 || TREE_CODE (comp_const) != INTEGER_CST
5006 || TREE_CONSTANT_OVERFLOW (comp_const)
5007 || TREE_CODE (minmax_const) != INTEGER_CST
5008 || TREE_CONSTANT_OVERFLOW (minmax_const))
5009 return NULL_TREE;
5010
5011 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5012 and GT_EXPR, doing the rest with recursive calls using logical
5013 simplifications. */
5014 switch (code)
5015 {
5016 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5017 {
5018 /* FIXME: We should be able to invert code without building a
5019 scratch tree node, but doing so would require us to
5020 duplicate a part of invert_truthvalue here. */
5021 tree tem = invert_truthvalue (build2 (code, type, op0, op1));
5022 tem = optimize_minmax_comparison (TREE_CODE (tem),
5023 TREE_TYPE (tem),
5024 TREE_OPERAND (tem, 0),
5025 TREE_OPERAND (tem, 1));
5026 return invert_truthvalue (tem);
5027 }
5028
5029 case GE_EXPR:
5030 return
5031 fold_build2 (TRUTH_ORIF_EXPR, type,
5032 optimize_minmax_comparison
5033 (EQ_EXPR, type, arg0, comp_const),
5034 optimize_minmax_comparison
5035 (GT_EXPR, type, arg0, comp_const));
5036
5037 case EQ_EXPR:
5038 if (op_code == MAX_EXPR && consts_equal)
5039 /* MAX (X, 0) == 0 -> X <= 0 */
5040 return fold_build2 (LE_EXPR, type, inner, comp_const);
5041
5042 else if (op_code == MAX_EXPR && consts_lt)
5043 /* MAX (X, 0) == 5 -> X == 5 */
5044 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5045
5046 else if (op_code == MAX_EXPR)
5047 /* MAX (X, 0) == -1 -> false */
5048 return omit_one_operand (type, integer_zero_node, inner);
5049
5050 else if (consts_equal)
5051 /* MIN (X, 0) == 0 -> X >= 0 */
5052 return fold_build2 (GE_EXPR, type, inner, comp_const);
5053
5054 else if (consts_lt)
5055 /* MIN (X, 0) == 5 -> false */
5056 return omit_one_operand (type, integer_zero_node, inner);
5057
5058 else
5059 /* MIN (X, 0) == -1 -> X == -1 */
5060 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5061
5062 case GT_EXPR:
5063 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5064 /* MAX (X, 0) > 0 -> X > 0
5065 MAX (X, 0) > 5 -> X > 5 */
5066 return fold_build2 (GT_EXPR, type, inner, comp_const);
5067
5068 else if (op_code == MAX_EXPR)
5069 /* MAX (X, 0) > -1 -> true */
5070 return omit_one_operand (type, integer_one_node, inner);
5071
5072 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5073 /* MIN (X, 0) > 0 -> false
5074 MIN (X, 0) > 5 -> false */
5075 return omit_one_operand (type, integer_zero_node, inner);
5076
5077 else
5078 /* MIN (X, 0) > -1 -> X > -1 */
5079 return fold_build2 (GT_EXPR, type, inner, comp_const);
5080
5081 default:
5082 return NULL_TREE;
5083 }
5084 }
5085 \f
5086 /* T is an integer expression that is being multiplied, divided, or taken a
5087 modulus (CODE says which and what kind of divide or modulus) by a
5088 constant C. See if we can eliminate that operation by folding it with
5089 other operations already in T. WIDE_TYPE, if non-null, is a type that
5090 should be used for the computation if wider than our type.
5091
5092 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5093 (X * 2) + (Y * 4). We must, however, be assured that either the original
5094 expression would not overflow or that overflow is undefined for the type
5095 in the language in question.
5096
5097 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5098 the machine has a multiply-accumulate insn or that this is part of an
5099 addressing calculation.
5100
5101 If we return a non-null expression, it is an equivalent form of the
5102 original computation, but need not be in the original type. */
5103
5104 static tree
5105 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
5106 {
5107 /* To avoid exponential search depth, refuse to allow recursion past
5108 three levels. Beyond that (1) it's highly unlikely that we'll find
5109 something interesting and (2) we've probably processed it before
5110 when we built the inner expression. */
5111
5112 static int depth;
5113 tree ret;
5114
5115 if (depth > 3)
5116 return NULL;
5117
5118 depth++;
5119 ret = extract_muldiv_1 (t, c, code, wide_type);
5120 depth--;
5121
5122 return ret;
5123 }
5124
5125 static tree
5126 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
5127 {
5128 tree type = TREE_TYPE (t);
5129 enum tree_code tcode = TREE_CODE (t);
5130 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5131 > GET_MODE_SIZE (TYPE_MODE (type)))
5132 ? wide_type : type);
5133 tree t1, t2;
5134 int same_p = tcode == code;
5135 tree op0 = NULL_TREE, op1 = NULL_TREE;
5136
5137 /* Don't deal with constants of zero here; they confuse the code below. */
5138 if (integer_zerop (c))
5139 return NULL_TREE;
5140
5141 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5142 op0 = TREE_OPERAND (t, 0);
5143
5144 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5145 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5146
5147 /* Note that we need not handle conditional operations here since fold
5148 already handles those cases. So just do arithmetic here. */
5149 switch (tcode)
5150 {
5151 case INTEGER_CST:
5152 /* For a constant, we can always simplify if we are a multiply
5153 or (for divide and modulus) if it is a multiple of our constant. */
5154 if (code == MULT_EXPR
5155 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5156 return const_binop (code, fold_convert (ctype, t),
5157 fold_convert (ctype, c), 0);
5158 break;
5159
5160 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5161 /* If op0 is an expression ... */
5162 if ((COMPARISON_CLASS_P (op0)
5163 || UNARY_CLASS_P (op0)
5164 || BINARY_CLASS_P (op0)
5165 || EXPRESSION_CLASS_P (op0))
5166 /* ... and is unsigned, and its type is smaller than ctype,
5167 then we cannot pass through as widening. */
5168 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5169 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5170 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5171 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5172 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5173 /* ... or this is a truncation (t is narrower than op0),
5174 then we cannot pass through this narrowing. */
5175 || (GET_MODE_SIZE (TYPE_MODE (type))
5176 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5177 /* ... or signedness changes for division or modulus,
5178 then we cannot pass through this conversion. */
5179 || (code != MULT_EXPR
5180 && (TYPE_UNSIGNED (ctype)
5181 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5182 break;
5183
5184 /* Pass the constant down and see if we can make a simplification. If
5185 we can, replace this expression with the inner simplification for
5186 possible later conversion to our or some other type. */
5187 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5188 && TREE_CODE (t2) == INTEGER_CST
5189 && ! TREE_CONSTANT_OVERFLOW (t2)
5190 && (0 != (t1 = extract_muldiv (op0, t2, code,
5191 code == MULT_EXPR
5192 ? ctype : NULL_TREE))))
5193 return t1;
5194 break;
5195
5196 case ABS_EXPR:
5197 /* If widening the type changes it from signed to unsigned, then we
5198 must avoid building ABS_EXPR itself as unsigned. */
5199 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5200 {
5201 tree cstype = (*lang_hooks.types.signed_type) (ctype);
5202 if ((t1 = extract_muldiv (op0, c, code, cstype)) != 0)
5203 {
5204 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5205 return fold_convert (ctype, t1);
5206 }
5207 break;
5208 }
5209 /* FALLTHROUGH */
5210 case NEGATE_EXPR:
5211 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5212 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5213 break;
5214
5215 case MIN_EXPR: case MAX_EXPR:
5216 /* If widening the type changes the signedness, then we can't perform
5217 this optimization as that changes the result. */
5218 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5219 break;
5220
5221 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5222 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
5223 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
5224 {
5225 if (tree_int_cst_sgn (c) < 0)
5226 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5227
5228 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5229 fold_convert (ctype, t2));
5230 }
5231 break;
5232
5233 case LSHIFT_EXPR: case RSHIFT_EXPR:
5234 /* If the second operand is constant, this is a multiplication
5235 or floor division, by a power of two, so we can treat it that
5236 way unless the multiplier or divisor overflows. Signed
5237 left-shift overflow is implementation-defined rather than
5238 undefined in C90, so do not convert signed left shift into
5239 multiplication. */
5240 if (TREE_CODE (op1) == INTEGER_CST
5241 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5242 /* const_binop may not detect overflow correctly,
5243 so check for it explicitly here. */
5244 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5245 && TREE_INT_CST_HIGH (op1) == 0
5246 && 0 != (t1 = fold_convert (ctype,
5247 const_binop (LSHIFT_EXPR,
5248 size_one_node,
5249 op1, 0)))
5250 && ! TREE_OVERFLOW (t1))
5251 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5252 ? MULT_EXPR : FLOOR_DIV_EXPR,
5253 ctype, fold_convert (ctype, op0), t1),
5254 c, code, wide_type);
5255 break;
5256
5257 case PLUS_EXPR: case MINUS_EXPR:
5258 /* See if we can eliminate the operation on both sides. If we can, we
5259 can return a new PLUS or MINUS. If we can't, the only remaining
5260 cases where we can do anything are if the second operand is a
5261 constant. */
5262 t1 = extract_muldiv (op0, c, code, wide_type);
5263 t2 = extract_muldiv (op1, c, code, wide_type);
5264 if (t1 != 0 && t2 != 0
5265 && (code == MULT_EXPR
5266 /* If not multiplication, we can only do this if both operands
5267 are divisible by c. */
5268 || (multiple_of_p (ctype, op0, c)
5269 && multiple_of_p (ctype, op1, c))))
5270 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5271 fold_convert (ctype, t2));
5272
5273 /* If this was a subtraction, negate OP1 and set it to be an addition.
5274 This simplifies the logic below. */
5275 if (tcode == MINUS_EXPR)
5276 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5277
5278 if (TREE_CODE (op1) != INTEGER_CST)
5279 break;
5280
5281 /* If either OP1 or C are negative, this optimization is not safe for
5282 some of the division and remainder types while for others we need
5283 to change the code. */
5284 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5285 {
5286 if (code == CEIL_DIV_EXPR)
5287 code = FLOOR_DIV_EXPR;
5288 else if (code == FLOOR_DIV_EXPR)
5289 code = CEIL_DIV_EXPR;
5290 else if (code != MULT_EXPR
5291 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5292 break;
5293 }
5294
5295 /* If it's a multiply or a division/modulus operation of a multiple
5296 of our constant, do the operation and verify it doesn't overflow. */
5297 if (code == MULT_EXPR
5298 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5299 {
5300 op1 = const_binop (code, fold_convert (ctype, op1),
5301 fold_convert (ctype, c), 0);
5302 /* We allow the constant to overflow with wrapping semantics. */
5303 if (op1 == 0
5304 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
5305 break;
5306 }
5307 else
5308 break;
5309
5310 /* If we have an unsigned type is not a sizetype, we cannot widen
5311 the operation since it will change the result if the original
5312 computation overflowed. */
5313 if (TYPE_UNSIGNED (ctype)
5314 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5315 && ctype != type)
5316 break;
5317
5318 /* If we were able to eliminate our operation from the first side,
5319 apply our operation to the second side and reform the PLUS. */
5320 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5321 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5322
5323 /* The last case is if we are a multiply. In that case, we can
5324 apply the distributive law to commute the multiply and addition
5325 if the multiplication of the constants doesn't overflow. */
5326 if (code == MULT_EXPR)
5327 return fold_build2 (tcode, ctype,
5328 fold_build2 (code, ctype,
5329 fold_convert (ctype, op0),
5330 fold_convert (ctype, c)),
5331 op1);
5332
5333 break;
5334
5335 case MULT_EXPR:
5336 /* We have a special case here if we are doing something like
5337 (C * 8) % 4 since we know that's zero. */
5338 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5339 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5340 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5341 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5342 return omit_one_operand (type, integer_zero_node, op0);
5343
5344 /* ... fall through ... */
5345
5346 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5347 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5348 /* If we can extract our operation from the LHS, do so and return a
5349 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5350 do something only if the second operand is a constant. */
5351 if (same_p
5352 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5353 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5354 fold_convert (ctype, op1));
5355 else if (tcode == MULT_EXPR && code == MULT_EXPR
5356 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
5357 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5358 fold_convert (ctype, t1));
5359 else if (TREE_CODE (op1) != INTEGER_CST)
5360 return 0;
5361
5362 /* If these are the same operation types, we can associate them
5363 assuming no overflow. */
5364 if (tcode == code
5365 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5366 fold_convert (ctype, c), 0))
5367 && ! TREE_OVERFLOW (t1))
5368 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5369
5370 /* If these operations "cancel" each other, we have the main
5371 optimizations of this pass, which occur when either constant is a
5372 multiple of the other, in which case we replace this with either an
5373 operation or CODE or TCODE.
5374
5375 If we have an unsigned type that is not a sizetype, we cannot do
5376 this since it will change the result if the original computation
5377 overflowed. */
5378 if ((! TYPE_UNSIGNED (ctype)
5379 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5380 && ! flag_wrapv
5381 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5382 || (tcode == MULT_EXPR
5383 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5384 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5385 {
5386 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5387 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5388 fold_convert (ctype,
5389 const_binop (TRUNC_DIV_EXPR,
5390 op1, c, 0)));
5391 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5392 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5393 fold_convert (ctype,
5394 const_binop (TRUNC_DIV_EXPR,
5395 c, op1, 0)));
5396 }
5397 break;
5398
5399 default:
5400 break;
5401 }
5402
5403 return 0;
5404 }
5405 \f
5406 /* Return a node which has the indicated constant VALUE (either 0 or
5407 1), and is of the indicated TYPE. */
5408
5409 tree
5410 constant_boolean_node (int value, tree type)
5411 {
5412 if (type == integer_type_node)
5413 return value ? integer_one_node : integer_zero_node;
5414 else if (type == boolean_type_node)
5415 return value ? boolean_true_node : boolean_false_node;
5416 else
5417 return build_int_cst (type, value);
5418 }
5419
5420
5421 /* Return true if expr looks like an ARRAY_REF and set base and
5422 offset to the appropriate trees. If there is no offset,
5423 offset is set to NULL_TREE. */
5424
5425 static bool
5426 extract_array_ref (tree expr, tree *base, tree *offset)
5427 {
5428 /* We have to be careful with stripping nops as with the
5429 base type the meaning of the offset can change. */
5430 tree inner_expr = expr;
5431 STRIP_NOPS (inner_expr);
5432 /* One canonical form is a PLUS_EXPR with the first
5433 argument being an ADDR_EXPR with a possible NOP_EXPR
5434 attached. */
5435 if (TREE_CODE (expr) == PLUS_EXPR)
5436 {
5437 tree op0 = TREE_OPERAND (expr, 0);
5438 STRIP_NOPS (op0);
5439 if (TREE_CODE (op0) == ADDR_EXPR)
5440 {
5441 *base = TREE_OPERAND (expr, 0);
5442 *offset = TREE_OPERAND (expr, 1);
5443 return true;
5444 }
5445 }
5446 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5447 which we transform into an ADDR_EXPR with appropriate
5448 offset. For other arguments to the ADDR_EXPR we assume
5449 zero offset and as such do not care about the ADDR_EXPR
5450 type and strip possible nops from it. */
5451 else if (TREE_CODE (inner_expr) == ADDR_EXPR)
5452 {
5453 tree op0 = TREE_OPERAND (inner_expr, 0);
5454 if (TREE_CODE (op0) == ARRAY_REF)
5455 {
5456 *base = build_fold_addr_expr (TREE_OPERAND (op0, 0));
5457 *offset = TREE_OPERAND (op0, 1);
5458 }
5459 else
5460 {
5461 *base = inner_expr;
5462 *offset = NULL_TREE;
5463 }
5464 return true;
5465 }
5466
5467 return false;
5468 }
5469
5470
5471 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5472 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5473 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5474 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5475 COND is the first argument to CODE; otherwise (as in the example
5476 given here), it is the second argument. TYPE is the type of the
5477 original expression. Return NULL_TREE if no simplification is
5478 possible. */
5479
5480 static tree
5481 fold_binary_op_with_conditional_arg (enum tree_code code,
5482 tree type, tree op0, tree op1,
5483 tree cond, tree arg, int cond_first_p)
5484 {
5485 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5486 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5487 tree test, true_value, false_value;
5488 tree lhs = NULL_TREE;
5489 tree rhs = NULL_TREE;
5490
5491 /* This transformation is only worthwhile if we don't have to wrap
5492 arg in a SAVE_EXPR, and the operation can be simplified on at least
5493 one of the branches once its pushed inside the COND_EXPR. */
5494 if (!TREE_CONSTANT (arg))
5495 return NULL_TREE;
5496
5497 if (TREE_CODE (cond) == COND_EXPR)
5498 {
5499 test = TREE_OPERAND (cond, 0);
5500 true_value = TREE_OPERAND (cond, 1);
5501 false_value = TREE_OPERAND (cond, 2);
5502 /* If this operand throws an expression, then it does not make
5503 sense to try to perform a logical or arithmetic operation
5504 involving it. */
5505 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5506 lhs = true_value;
5507 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5508 rhs = false_value;
5509 }
5510 else
5511 {
5512 tree testtype = TREE_TYPE (cond);
5513 test = cond;
5514 true_value = constant_boolean_node (true, testtype);
5515 false_value = constant_boolean_node (false, testtype);
5516 }
5517
5518 arg = fold_convert (arg_type, arg);
5519 if (lhs == 0)
5520 {
5521 true_value = fold_convert (cond_type, true_value);
5522 lhs = fold (cond_first_p ? build2 (code, type, true_value, arg)
5523 : build2 (code, type, arg, true_value));
5524 }
5525 if (rhs == 0)
5526 {
5527 false_value = fold_convert (cond_type, false_value);
5528 rhs = fold (cond_first_p ? build2 (code, type, false_value, arg)
5529 : build2 (code, type, arg, false_value));
5530 }
5531
5532 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
5533 return fold_convert (type, test);
5534 }
5535
5536 \f
5537 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5538
5539 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5540 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5541 ADDEND is the same as X.
5542
5543 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5544 and finite. The problematic cases are when X is zero, and its mode
5545 has signed zeros. In the case of rounding towards -infinity,
5546 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5547 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5548
5549 static bool
5550 fold_real_zero_addition_p (tree type, tree addend, int negate)
5551 {
5552 if (!real_zerop (addend))
5553 return false;
5554
5555 /* Don't allow the fold with -fsignaling-nans. */
5556 if (HONOR_SNANS (TYPE_MODE (type)))
5557 return false;
5558
5559 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5560 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
5561 return true;
5562
5563 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5564 if (TREE_CODE (addend) == REAL_CST
5565 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
5566 negate = !negate;
5567
5568 /* The mode has signed zeros, and we have to honor their sign.
5569 In this situation, there is only one case we can return true for.
5570 X - 0 is the same as X unless rounding towards -infinity is
5571 supported. */
5572 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
5573 }
5574
5575 /* Subroutine of fold() that checks comparisons of built-in math
5576 functions against real constants.
5577
5578 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5579 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5580 is the type of the result and ARG0 and ARG1 are the operands of the
5581 comparison. ARG1 must be a TREE_REAL_CST.
5582
5583 The function returns the constant folded tree if a simplification
5584 can be made, and NULL_TREE otherwise. */
5585
5586 static tree
5587 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5588 tree type, tree arg0, tree arg1)
5589 {
5590 REAL_VALUE_TYPE c;
5591
5592 if (BUILTIN_SQRT_P (fcode))
5593 {
5594 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5595 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5596
5597 c = TREE_REAL_CST (arg1);
5598 if (REAL_VALUE_NEGATIVE (c))
5599 {
5600 /* sqrt(x) < y is always false, if y is negative. */
5601 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5602 return omit_one_operand (type, integer_zero_node, arg);
5603
5604 /* sqrt(x) > y is always true, if y is negative and we
5605 don't care about NaNs, i.e. negative values of x. */
5606 if (code == NE_EXPR || !HONOR_NANS (mode))
5607 return omit_one_operand (type, integer_one_node, arg);
5608
5609 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5610 return fold_build2 (GE_EXPR, type, arg,
5611 build_real (TREE_TYPE (arg), dconst0));
5612 }
5613 else if (code == GT_EXPR || code == GE_EXPR)
5614 {
5615 REAL_VALUE_TYPE c2;
5616
5617 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5618 real_convert (&c2, mode, &c2);
5619
5620 if (REAL_VALUE_ISINF (c2))
5621 {
5622 /* sqrt(x) > y is x == +Inf, when y is very large. */
5623 if (HONOR_INFINITIES (mode))
5624 return fold_build2 (EQ_EXPR, type, arg,
5625 build_real (TREE_TYPE (arg), c2));
5626
5627 /* sqrt(x) > y is always false, when y is very large
5628 and we don't care about infinities. */
5629 return omit_one_operand (type, integer_zero_node, arg);
5630 }
5631
5632 /* sqrt(x) > c is the same as x > c*c. */
5633 return fold_build2 (code, type, arg,
5634 build_real (TREE_TYPE (arg), c2));
5635 }
5636 else if (code == LT_EXPR || code == LE_EXPR)
5637 {
5638 REAL_VALUE_TYPE c2;
5639
5640 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5641 real_convert (&c2, mode, &c2);
5642
5643 if (REAL_VALUE_ISINF (c2))
5644 {
5645 /* sqrt(x) < y is always true, when y is a very large
5646 value and we don't care about NaNs or Infinities. */
5647 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5648 return omit_one_operand (type, integer_one_node, arg);
5649
5650 /* sqrt(x) < y is x != +Inf when y is very large and we
5651 don't care about NaNs. */
5652 if (! HONOR_NANS (mode))
5653 return fold_build2 (NE_EXPR, type, arg,
5654 build_real (TREE_TYPE (arg), c2));
5655
5656 /* sqrt(x) < y is x >= 0 when y is very large and we
5657 don't care about Infinities. */
5658 if (! HONOR_INFINITIES (mode))
5659 return fold_build2 (GE_EXPR, type, arg,
5660 build_real (TREE_TYPE (arg), dconst0));
5661
5662 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5663 if (lang_hooks.decls.global_bindings_p () != 0
5664 || CONTAINS_PLACEHOLDER_P (arg))
5665 return NULL_TREE;
5666
5667 arg = save_expr (arg);
5668 return fold_build2 (TRUTH_ANDIF_EXPR, type,
5669 fold_build2 (GE_EXPR, type, arg,
5670 build_real (TREE_TYPE (arg),
5671 dconst0)),
5672 fold_build2 (NE_EXPR, type, arg,
5673 build_real (TREE_TYPE (arg),
5674 c2)));
5675 }
5676
5677 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5678 if (! HONOR_NANS (mode))
5679 return fold_build2 (code, type, arg,
5680 build_real (TREE_TYPE (arg), c2));
5681
5682 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5683 if (lang_hooks.decls.global_bindings_p () == 0
5684 && ! CONTAINS_PLACEHOLDER_P (arg))
5685 {
5686 arg = save_expr (arg);
5687 return fold_build2 (TRUTH_ANDIF_EXPR, type,
5688 fold_build2 (GE_EXPR, type, arg,
5689 build_real (TREE_TYPE (arg),
5690 dconst0)),
5691 fold_build2 (code, type, arg,
5692 build_real (TREE_TYPE (arg),
5693 c2)));
5694 }
5695 }
5696 }
5697
5698 return NULL_TREE;
5699 }
5700
5701 /* Subroutine of fold() that optimizes comparisons against Infinities,
5702 either +Inf or -Inf.
5703
5704 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5705 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5706 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5707
5708 The function returns the constant folded tree if a simplification
5709 can be made, and NULL_TREE otherwise. */
5710
5711 static tree
5712 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5713 {
5714 enum machine_mode mode;
5715 REAL_VALUE_TYPE max;
5716 tree temp;
5717 bool neg;
5718
5719 mode = TYPE_MODE (TREE_TYPE (arg0));
5720
5721 /* For negative infinity swap the sense of the comparison. */
5722 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
5723 if (neg)
5724 code = swap_tree_comparison (code);
5725
5726 switch (code)
5727 {
5728 case GT_EXPR:
5729 /* x > +Inf is always false, if with ignore sNANs. */
5730 if (HONOR_SNANS (mode))
5731 return NULL_TREE;
5732 return omit_one_operand (type, integer_zero_node, arg0);
5733
5734 case LE_EXPR:
5735 /* x <= +Inf is always true, if we don't case about NaNs. */
5736 if (! HONOR_NANS (mode))
5737 return omit_one_operand (type, integer_one_node, arg0);
5738
5739 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5740 if (lang_hooks.decls.global_bindings_p () == 0
5741 && ! CONTAINS_PLACEHOLDER_P (arg0))
5742 {
5743 arg0 = save_expr (arg0);
5744 return fold_build2 (EQ_EXPR, type, arg0, arg0);
5745 }
5746 break;
5747
5748 case EQ_EXPR:
5749 case GE_EXPR:
5750 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
5751 real_maxval (&max, neg, mode);
5752 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
5753 arg0, build_real (TREE_TYPE (arg0), max));
5754
5755 case LT_EXPR:
5756 /* x < +Inf is always equal to x <= DBL_MAX. */
5757 real_maxval (&max, neg, mode);
5758 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
5759 arg0, build_real (TREE_TYPE (arg0), max));
5760
5761 case NE_EXPR:
5762 /* x != +Inf is always equal to !(x > DBL_MAX). */
5763 real_maxval (&max, neg, mode);
5764 if (! HONOR_NANS (mode))
5765 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
5766 arg0, build_real (TREE_TYPE (arg0), max));
5767
5768 /* The transformation below creates non-gimple code and thus is
5769 not appropriate if we are in gimple form. */
5770 if (in_gimple_form)
5771 return NULL_TREE;
5772
5773 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
5774 arg0, build_real (TREE_TYPE (arg0), max));
5775 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
5776
5777 default:
5778 break;
5779 }
5780
5781 return NULL_TREE;
5782 }
5783
5784 /* Subroutine of fold() that optimizes comparisons of a division by
5785 a nonzero integer constant against an integer constant, i.e.
5786 X/C1 op C2.
5787
5788 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5789 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5790 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5791
5792 The function returns the constant folded tree if a simplification
5793 can be made, and NULL_TREE otherwise. */
5794
5795 static tree
5796 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5797 {
5798 tree prod, tmp, hi, lo;
5799 tree arg00 = TREE_OPERAND (arg0, 0);
5800 tree arg01 = TREE_OPERAND (arg0, 1);
5801 unsigned HOST_WIDE_INT lpart;
5802 HOST_WIDE_INT hpart;
5803 int overflow;
5804
5805 /* We have to do this the hard way to detect unsigned overflow.
5806 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
5807 overflow = mul_double (TREE_INT_CST_LOW (arg01),
5808 TREE_INT_CST_HIGH (arg01),
5809 TREE_INT_CST_LOW (arg1),
5810 TREE_INT_CST_HIGH (arg1), &lpart, &hpart);
5811 prod = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
5812 prod = force_fit_type (prod, -1, overflow, false);
5813
5814 if (TYPE_UNSIGNED (TREE_TYPE (arg0)))
5815 {
5816 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5817 lo = prod;
5818
5819 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
5820 overflow = add_double (TREE_INT_CST_LOW (prod),
5821 TREE_INT_CST_HIGH (prod),
5822 TREE_INT_CST_LOW (tmp),
5823 TREE_INT_CST_HIGH (tmp),
5824 &lpart, &hpart);
5825 hi = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
5826 hi = force_fit_type (hi, -1, overflow | TREE_OVERFLOW (prod),
5827 TREE_CONSTANT_OVERFLOW (prod));
5828 }
5829 else if (tree_int_cst_sgn (arg01) >= 0)
5830 {
5831 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5832 switch (tree_int_cst_sgn (arg1))
5833 {
5834 case -1:
5835 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5836 hi = prod;
5837 break;
5838
5839 case 0:
5840 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
5841 hi = tmp;
5842 break;
5843
5844 case 1:
5845 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
5846 lo = prod;
5847 break;
5848
5849 default:
5850 gcc_unreachable ();
5851 }
5852 }
5853 else
5854 {
5855 /* A negative divisor reverses the relational operators. */
5856 code = swap_tree_comparison (code);
5857
5858 tmp = int_const_binop (PLUS_EXPR, arg01, integer_one_node, 0);
5859 switch (tree_int_cst_sgn (arg1))
5860 {
5861 case -1:
5862 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5863 lo = prod;
5864 break;
5865
5866 case 0:
5867 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
5868 lo = tmp;
5869 break;
5870
5871 case 1:
5872 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
5873 hi = prod;
5874 break;
5875
5876 default:
5877 gcc_unreachable ();
5878 }
5879 }
5880
5881 switch (code)
5882 {
5883 case EQ_EXPR:
5884 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
5885 return omit_one_operand (type, integer_zero_node, arg00);
5886 if (TREE_OVERFLOW (hi))
5887 return fold_build2 (GE_EXPR, type, arg00, lo);
5888 if (TREE_OVERFLOW (lo))
5889 return fold_build2 (LE_EXPR, type, arg00, hi);
5890 return build_range_check (type, arg00, 1, lo, hi);
5891
5892 case NE_EXPR:
5893 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
5894 return omit_one_operand (type, integer_one_node, arg00);
5895 if (TREE_OVERFLOW (hi))
5896 return fold_build2 (LT_EXPR, type, arg00, lo);
5897 if (TREE_OVERFLOW (lo))
5898 return fold_build2 (GT_EXPR, type, arg00, hi);
5899 return build_range_check (type, arg00, 0, lo, hi);
5900
5901 case LT_EXPR:
5902 if (TREE_OVERFLOW (lo))
5903 return omit_one_operand (type, integer_zero_node, arg00);
5904 return fold_build2 (LT_EXPR, type, arg00, lo);
5905
5906 case LE_EXPR:
5907 if (TREE_OVERFLOW (hi))
5908 return omit_one_operand (type, integer_one_node, arg00);
5909 return fold_build2 (LE_EXPR, type, arg00, hi);
5910
5911 case GT_EXPR:
5912 if (TREE_OVERFLOW (hi))
5913 return omit_one_operand (type, integer_zero_node, arg00);
5914 return fold_build2 (GT_EXPR, type, arg00, hi);
5915
5916 case GE_EXPR:
5917 if (TREE_OVERFLOW (lo))
5918 return omit_one_operand (type, integer_one_node, arg00);
5919 return fold_build2 (GE_EXPR, type, arg00, lo);
5920
5921 default:
5922 break;
5923 }
5924
5925 return NULL_TREE;
5926 }
5927
5928
5929 /* If CODE with arguments ARG0 and ARG1 represents a single bit
5930 equality/inequality test, then return a simplified form of
5931 the test using shifts and logical operations. Otherwise return
5932 NULL. TYPE is the desired result type. */
5933
5934 tree
5935 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
5936 tree result_type)
5937 {
5938 /* If this is testing a single bit, we can optimize the test. */
5939 if ((code == NE_EXPR || code == EQ_EXPR)
5940 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
5941 && integer_pow2p (TREE_OPERAND (arg0, 1)))
5942 {
5943 tree inner = TREE_OPERAND (arg0, 0);
5944 tree type = TREE_TYPE (arg0);
5945 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
5946 enum machine_mode operand_mode = TYPE_MODE (type);
5947 int ops_unsigned;
5948 tree signed_type, unsigned_type, intermediate_type;
5949 tree arg00;
5950
5951 /* If we have (A & C) != 0 where C is the sign bit of A, convert
5952 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
5953 arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
5954 if (arg00 != NULL_TREE
5955 /* This is only a win if casting to a signed type is cheap,
5956 i.e. when arg00's type is not a partial mode. */
5957 && TYPE_PRECISION (TREE_TYPE (arg00))
5958 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
5959 {
5960 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
5961 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
5962 result_type, fold_convert (stype, arg00),
5963 fold_convert (stype, integer_zero_node));
5964 }
5965
5966 /* Otherwise we have (A & C) != 0 where C is a single bit,
5967 convert that into ((A >> C2) & 1). Where C2 = log2(C).
5968 Similarly for (A & C) == 0. */
5969
5970 /* If INNER is a right shift of a constant and it plus BITNUM does
5971 not overflow, adjust BITNUM and INNER. */
5972 if (TREE_CODE (inner) == RSHIFT_EXPR
5973 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
5974 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
5975 && bitnum < TYPE_PRECISION (type)
5976 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
5977 bitnum - TYPE_PRECISION (type)))
5978 {
5979 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
5980 inner = TREE_OPERAND (inner, 0);
5981 }
5982
5983 /* If we are going to be able to omit the AND below, we must do our
5984 operations as unsigned. If we must use the AND, we have a choice.
5985 Normally unsigned is faster, but for some machines signed is. */
5986 #ifdef LOAD_EXTEND_OP
5987 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
5988 && !flag_syntax_only) ? 0 : 1;
5989 #else
5990 ops_unsigned = 1;
5991 #endif
5992
5993 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
5994 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
5995 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
5996 inner = fold_convert (intermediate_type, inner);
5997
5998 if (bitnum != 0)
5999 inner = build2 (RSHIFT_EXPR, intermediate_type,
6000 inner, size_int (bitnum));
6001
6002 if (code == EQ_EXPR)
6003 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type,
6004 inner, integer_one_node);
6005
6006 /* Put the AND last so it can combine with more things. */
6007 inner = build2 (BIT_AND_EXPR, intermediate_type,
6008 inner, integer_one_node);
6009
6010 /* Make sure to return the proper type. */
6011 inner = fold_convert (result_type, inner);
6012
6013 return inner;
6014 }
6015 return NULL_TREE;
6016 }
6017
6018 /* Check whether we are allowed to reorder operands arg0 and arg1,
6019 such that the evaluation of arg1 occurs before arg0. */
6020
6021 static bool
6022 reorder_operands_p (tree arg0, tree arg1)
6023 {
6024 if (! flag_evaluation_order)
6025 return true;
6026 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6027 return true;
6028 return ! TREE_SIDE_EFFECTS (arg0)
6029 && ! TREE_SIDE_EFFECTS (arg1);
6030 }
6031
6032 /* Test whether it is preferable two swap two operands, ARG0 and
6033 ARG1, for example because ARG0 is an integer constant and ARG1
6034 isn't. If REORDER is true, only recommend swapping if we can
6035 evaluate the operands in reverse order. */
6036
6037 bool
6038 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
6039 {
6040 STRIP_SIGN_NOPS (arg0);
6041 STRIP_SIGN_NOPS (arg1);
6042
6043 if (TREE_CODE (arg1) == INTEGER_CST)
6044 return 0;
6045 if (TREE_CODE (arg0) == INTEGER_CST)
6046 return 1;
6047
6048 if (TREE_CODE (arg1) == REAL_CST)
6049 return 0;
6050 if (TREE_CODE (arg0) == REAL_CST)
6051 return 1;
6052
6053 if (TREE_CODE (arg1) == COMPLEX_CST)
6054 return 0;
6055 if (TREE_CODE (arg0) == COMPLEX_CST)
6056 return 1;
6057
6058 if (TREE_CONSTANT (arg1))
6059 return 0;
6060 if (TREE_CONSTANT (arg0))
6061 return 1;
6062
6063 if (optimize_size)
6064 return 0;
6065
6066 if (reorder && flag_evaluation_order
6067 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6068 return 0;
6069
6070 if (DECL_P (arg1))
6071 return 0;
6072 if (DECL_P (arg0))
6073 return 1;
6074
6075 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6076 for commutative and comparison operators. Ensuring a canonical
6077 form allows the optimizers to find additional redundancies without
6078 having to explicitly check for both orderings. */
6079 if (TREE_CODE (arg0) == SSA_NAME
6080 && TREE_CODE (arg1) == SSA_NAME
6081 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6082 return 1;
6083
6084 return 0;
6085 }
6086
6087 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6088 ARG0 is extended to a wider type. */
6089
6090 static tree
6091 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6092 {
6093 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6094 tree arg1_unw;
6095 tree shorter_type, outer_type;
6096 tree min, max;
6097 bool above, below;
6098
6099 if (arg0_unw == arg0)
6100 return NULL_TREE;
6101 shorter_type = TREE_TYPE (arg0_unw);
6102
6103 #ifdef HAVE_canonicalize_funcptr_for_compare
6104 /* Disable this optimization if we're casting a function pointer
6105 type on targets that require function pointer canonicalization. */
6106 if (HAVE_canonicalize_funcptr_for_compare
6107 && TREE_CODE (shorter_type) == POINTER_TYPE
6108 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6109 return NULL_TREE;
6110 #endif
6111
6112 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6113 return NULL_TREE;
6114
6115 arg1_unw = get_unwidened (arg1, shorter_type);
6116 if (!arg1_unw)
6117 return NULL_TREE;
6118
6119 /* If possible, express the comparison in the shorter mode. */
6120 if ((code == EQ_EXPR || code == NE_EXPR
6121 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6122 && (TREE_TYPE (arg1_unw) == shorter_type
6123 || (TREE_CODE (arg1_unw) == INTEGER_CST
6124 && TREE_CODE (shorter_type) == INTEGER_TYPE
6125 && int_fits_type_p (arg1_unw, shorter_type))))
6126 return fold_build2 (code, type, arg0_unw,
6127 fold_convert (shorter_type, arg1_unw));
6128
6129 if (TREE_CODE (arg1_unw) != INTEGER_CST)
6130 return NULL_TREE;
6131
6132 /* If we are comparing with the integer that does not fit into the range
6133 of the shorter type, the result is known. */
6134 outer_type = TREE_TYPE (arg1_unw);
6135 min = lower_bound_in_type (outer_type, shorter_type);
6136 max = upper_bound_in_type (outer_type, shorter_type);
6137
6138 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6139 max, arg1_unw));
6140 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6141 arg1_unw, min));
6142
6143 switch (code)
6144 {
6145 case EQ_EXPR:
6146 if (above || below)
6147 return omit_one_operand (type, integer_zero_node, arg0);
6148 break;
6149
6150 case NE_EXPR:
6151 if (above || below)
6152 return omit_one_operand (type, integer_one_node, arg0);
6153 break;
6154
6155 case LT_EXPR:
6156 case LE_EXPR:
6157 if (above)
6158 return omit_one_operand (type, integer_one_node, arg0);
6159 else if (below)
6160 return omit_one_operand (type, integer_zero_node, arg0);
6161
6162 case GT_EXPR:
6163 case GE_EXPR:
6164 if (above)
6165 return omit_one_operand (type, integer_zero_node, arg0);
6166 else if (below)
6167 return omit_one_operand (type, integer_one_node, arg0);
6168
6169 default:
6170 break;
6171 }
6172
6173 return NULL_TREE;
6174 }
6175
6176 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6177 ARG0 just the signedness is changed. */
6178
6179 static tree
6180 fold_sign_changed_comparison (enum tree_code code, tree type,
6181 tree arg0, tree arg1)
6182 {
6183 tree arg0_inner, tmp;
6184 tree inner_type, outer_type;
6185
6186 if (TREE_CODE (arg0) != NOP_EXPR
6187 && TREE_CODE (arg0) != CONVERT_EXPR)
6188 return NULL_TREE;
6189
6190 outer_type = TREE_TYPE (arg0);
6191 arg0_inner = TREE_OPERAND (arg0, 0);
6192 inner_type = TREE_TYPE (arg0_inner);
6193
6194 #ifdef HAVE_canonicalize_funcptr_for_compare
6195 /* Disable this optimization if we're casting a function pointer
6196 type on targets that require function pointer canonicalization. */
6197 if (HAVE_canonicalize_funcptr_for_compare
6198 && TREE_CODE (inner_type) == POINTER_TYPE
6199 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6200 return NULL_TREE;
6201 #endif
6202
6203 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6204 return NULL_TREE;
6205
6206 if (TREE_CODE (arg1) != INTEGER_CST
6207 && !((TREE_CODE (arg1) == NOP_EXPR
6208 || TREE_CODE (arg1) == CONVERT_EXPR)
6209 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6210 return NULL_TREE;
6211
6212 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6213 && code != NE_EXPR
6214 && code != EQ_EXPR)
6215 return NULL_TREE;
6216
6217 if (TREE_CODE (arg1) == INTEGER_CST)
6218 {
6219 tmp = build_int_cst_wide (inner_type,
6220 TREE_INT_CST_LOW (arg1),
6221 TREE_INT_CST_HIGH (arg1));
6222 arg1 = force_fit_type (tmp, 0,
6223 TREE_OVERFLOW (arg1),
6224 TREE_CONSTANT_OVERFLOW (arg1));
6225 }
6226 else
6227 arg1 = fold_convert (inner_type, arg1);
6228
6229 return fold_build2 (code, type, arg0_inner, arg1);
6230 }
6231
6232 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6233 step of the array. ADDR is the address. MULT is the multiplicative expression.
6234 If the function succeeds, the new address expression is returned. Otherwise
6235 NULL_TREE is returned. */
6236
6237 static tree
6238 try_move_mult_to_index (enum tree_code code, tree addr, tree mult)
6239 {
6240 tree s, delta, step;
6241 tree arg0 = TREE_OPERAND (mult, 0), arg1 = TREE_OPERAND (mult, 1);
6242 tree ref = TREE_OPERAND (addr, 0), pref;
6243 tree ret, pos;
6244 tree itype;
6245
6246 STRIP_NOPS (arg0);
6247 STRIP_NOPS (arg1);
6248
6249 if (TREE_CODE (arg0) == INTEGER_CST)
6250 {
6251 s = arg0;
6252 delta = arg1;
6253 }
6254 else if (TREE_CODE (arg1) == INTEGER_CST)
6255 {
6256 s = arg1;
6257 delta = arg0;
6258 }
6259 else
6260 return NULL_TREE;
6261
6262 for (;; ref = TREE_OPERAND (ref, 0))
6263 {
6264 if (TREE_CODE (ref) == ARRAY_REF)
6265 {
6266 step = array_ref_element_size (ref);
6267
6268 if (TREE_CODE (step) != INTEGER_CST)
6269 continue;
6270
6271 itype = TREE_TYPE (step);
6272
6273 /* If the type sizes do not match, we might run into problems
6274 when one of them would overflow. */
6275 if (TYPE_PRECISION (itype) != TYPE_PRECISION (TREE_TYPE (s)))
6276 continue;
6277
6278 if (!operand_equal_p (step, fold_convert (itype, s), 0))
6279 continue;
6280
6281 delta = fold_convert (itype, delta);
6282 break;
6283 }
6284
6285 if (!handled_component_p (ref))
6286 return NULL_TREE;
6287 }
6288
6289 /* We found the suitable array reference. So copy everything up to it,
6290 and replace the index. */
6291
6292 pref = TREE_OPERAND (addr, 0);
6293 ret = copy_node (pref);
6294 pos = ret;
6295
6296 while (pref != ref)
6297 {
6298 pref = TREE_OPERAND (pref, 0);
6299 TREE_OPERAND (pos, 0) = copy_node (pref);
6300 pos = TREE_OPERAND (pos, 0);
6301 }
6302
6303 TREE_OPERAND (pos, 1) = fold_build2 (code, itype,
6304 TREE_OPERAND (pos, 1),
6305 delta);
6306
6307 return build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6308 }
6309
6310
6311 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6312 means A >= Y && A != MAX, but in this case we know that
6313 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6314
6315 static tree
6316 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6317 {
6318 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6319
6320 if (TREE_CODE (bound) == LT_EXPR)
6321 a = TREE_OPERAND (bound, 0);
6322 else if (TREE_CODE (bound) == GT_EXPR)
6323 a = TREE_OPERAND (bound, 1);
6324 else
6325 return NULL_TREE;
6326
6327 typea = TREE_TYPE (a);
6328 if (!INTEGRAL_TYPE_P (typea)
6329 && !POINTER_TYPE_P (typea))
6330 return NULL_TREE;
6331
6332 if (TREE_CODE (ineq) == LT_EXPR)
6333 {
6334 a1 = TREE_OPERAND (ineq, 1);
6335 y = TREE_OPERAND (ineq, 0);
6336 }
6337 else if (TREE_CODE (ineq) == GT_EXPR)
6338 {
6339 a1 = TREE_OPERAND (ineq, 0);
6340 y = TREE_OPERAND (ineq, 1);
6341 }
6342 else
6343 return NULL_TREE;
6344
6345 if (TREE_TYPE (a1) != typea)
6346 return NULL_TREE;
6347
6348 diff = fold_build2 (MINUS_EXPR, typea, a1, a);
6349 if (!integer_onep (diff))
6350 return NULL_TREE;
6351
6352 return fold_build2 (GE_EXPR, type, a, y);
6353 }
6354
6355 /* Fold complex addition when both components are accessible by parts.
6356 Return non-null if successful. CODE should be PLUS_EXPR for addition,
6357 or MINUS_EXPR for subtraction. */
6358
6359 static tree
6360 fold_complex_add (tree type, tree ac, tree bc, enum tree_code code)
6361 {
6362 tree ar, ai, br, bi, rr, ri, inner_type;
6363
6364 if (TREE_CODE (ac) == COMPLEX_EXPR)
6365 ar = TREE_OPERAND (ac, 0), ai = TREE_OPERAND (ac, 1);
6366 else if (TREE_CODE (ac) == COMPLEX_CST)
6367 ar = TREE_REALPART (ac), ai = TREE_IMAGPART (ac);
6368 else
6369 return NULL;
6370
6371 if (TREE_CODE (bc) == COMPLEX_EXPR)
6372 br = TREE_OPERAND (bc, 0), bi = TREE_OPERAND (bc, 1);
6373 else if (TREE_CODE (bc) == COMPLEX_CST)
6374 br = TREE_REALPART (bc), bi = TREE_IMAGPART (bc);
6375 else
6376 return NULL;
6377
6378 inner_type = TREE_TYPE (type);
6379
6380 rr = fold_build2 (code, inner_type, ar, br);
6381 ri = fold_build2 (code, inner_type, ai, bi);
6382
6383 return fold_build2 (COMPLEX_EXPR, type, rr, ri);
6384 }
6385
6386 /* Perform some simplifications of complex multiplication when one or more
6387 of the components are constants or zeros. Return non-null if successful. */
6388
6389 tree
6390 fold_complex_mult_parts (tree type, tree ar, tree ai, tree br, tree bi)
6391 {
6392 tree rr, ri, inner_type, zero;
6393 bool ar0, ai0, br0, bi0, bi1;
6394
6395 inner_type = TREE_TYPE (type);
6396 zero = NULL;
6397
6398 if (SCALAR_FLOAT_TYPE_P (inner_type))
6399 {
6400 ar0 = ai0 = br0 = bi0 = bi1 = false;
6401
6402 /* We're only interested in +0.0 here, thus we don't use real_zerop. */
6403
6404 if (TREE_CODE (ar) == REAL_CST
6405 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (ar), dconst0))
6406 ar0 = true, zero = ar;
6407
6408 if (TREE_CODE (ai) == REAL_CST
6409 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (ai), dconst0))
6410 ai0 = true, zero = ai;
6411
6412 if (TREE_CODE (br) == REAL_CST
6413 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (br), dconst0))
6414 br0 = true, zero = br;
6415
6416 if (TREE_CODE (bi) == REAL_CST)
6417 {
6418 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (bi), dconst0))
6419 bi0 = true, zero = bi;
6420 else if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (bi), dconst1))
6421 bi1 = true;
6422 }
6423 }
6424 else
6425 {
6426 ar0 = integer_zerop (ar);
6427 if (ar0)
6428 zero = ar;
6429 ai0 = integer_zerop (ai);
6430 if (ai0)
6431 zero = ai;
6432 br0 = integer_zerop (br);
6433 if (br0)
6434 zero = br;
6435 bi0 = integer_zerop (bi);
6436 if (bi0)
6437 {
6438 zero = bi;
6439 bi1 = false;
6440 }
6441 else
6442 bi1 = integer_onep (bi);
6443 }
6444
6445 /* We won't optimize anything below unless something is zero. */
6446 if (zero == NULL)
6447 return NULL;
6448
6449 if (ai0 && br0 && bi1)
6450 {
6451 rr = zero;
6452 ri = ar;
6453 }
6454 else if (ai0 && bi0)
6455 {
6456 rr = fold_build2 (MULT_EXPR, inner_type, ar, br);
6457 ri = zero;
6458 }
6459 else if (ai0 && br0)
6460 {
6461 rr = zero;
6462 ri = fold_build2 (MULT_EXPR, inner_type, ar, bi);
6463 }
6464 else if (ar0 && bi0)
6465 {
6466 rr = zero;
6467 ri = fold_build2 (MULT_EXPR, inner_type, ai, br);
6468 }
6469 else if (ar0 && br0)
6470 {
6471 rr = fold_build2 (MULT_EXPR, inner_type, ai, bi);
6472 rr = fold_build1 (NEGATE_EXPR, inner_type, rr);
6473 ri = zero;
6474 }
6475 else if (bi0)
6476 {
6477 rr = fold_build2 (MULT_EXPR, inner_type, ar, br);
6478 ri = fold_build2 (MULT_EXPR, inner_type, ai, br);
6479 }
6480 else if (ai0)
6481 {
6482 rr = fold_build2 (MULT_EXPR, inner_type, ar, br);
6483 ri = fold_build2 (MULT_EXPR, inner_type, ar, bi);
6484 }
6485 else if (br0)
6486 {
6487 rr = fold_build2 (MULT_EXPR, inner_type, ai, bi);
6488 rr = fold_build1 (NEGATE_EXPR, inner_type, rr);
6489 ri = fold_build2 (MULT_EXPR, inner_type, ar, bi);
6490 }
6491 else if (ar0)
6492 {
6493 rr = fold_build2 (MULT_EXPR, inner_type, ai, bi);
6494 rr = fold_build1 (NEGATE_EXPR, inner_type, rr);
6495 ri = fold_build2 (MULT_EXPR, inner_type, ai, br);
6496 }
6497 else
6498 return NULL;
6499
6500 return fold_build2 (COMPLEX_EXPR, type, rr, ri);
6501 }
6502
6503 static tree
6504 fold_complex_mult (tree type, tree ac, tree bc)
6505 {
6506 tree ar, ai, br, bi;
6507
6508 if (TREE_CODE (ac) == COMPLEX_EXPR)
6509 ar = TREE_OPERAND (ac, 0), ai = TREE_OPERAND (ac, 1);
6510 else if (TREE_CODE (ac) == COMPLEX_CST)
6511 ar = TREE_REALPART (ac), ai = TREE_IMAGPART (ac);
6512 else
6513 return NULL;
6514
6515 if (TREE_CODE (bc) == COMPLEX_EXPR)
6516 br = TREE_OPERAND (bc, 0), bi = TREE_OPERAND (bc, 1);
6517 else if (TREE_CODE (bc) == COMPLEX_CST)
6518 br = TREE_REALPART (bc), bi = TREE_IMAGPART (bc);
6519 else
6520 return NULL;
6521
6522 return fold_complex_mult_parts (type, ar, ai, br, bi);
6523 }
6524
6525 /* Perform some simplifications of complex division when one or more of
6526 the components are constants or zeros. Return non-null if successful. */
6527
6528 tree
6529 fold_complex_div_parts (tree type, tree ar, tree ai, tree br, tree bi,
6530 enum tree_code code)
6531 {
6532 tree rr, ri, inner_type, zero;
6533 bool ar0, ai0, br0, bi0, bi1;
6534
6535 inner_type = TREE_TYPE (type);
6536 zero = NULL;
6537
6538 if (SCALAR_FLOAT_TYPE_P (inner_type))
6539 {
6540 ar0 = ai0 = br0 = bi0 = bi1 = false;
6541
6542 /* We're only interested in +0.0 here, thus we don't use real_zerop. */
6543
6544 if (TREE_CODE (ar) == REAL_CST
6545 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (ar), dconst0))
6546 ar0 = true, zero = ar;
6547
6548 if (TREE_CODE (ai) == REAL_CST
6549 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (ai), dconst0))
6550 ai0 = true, zero = ai;
6551
6552 if (TREE_CODE (br) == REAL_CST
6553 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (br), dconst0))
6554 br0 = true, zero = br;
6555
6556 if (TREE_CODE (bi) == REAL_CST)
6557 {
6558 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (bi), dconst0))
6559 bi0 = true, zero = bi;
6560 else if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (bi), dconst1))
6561 bi1 = true;
6562 }
6563 }
6564 else
6565 {
6566 ar0 = integer_zerop (ar);
6567 if (ar0)
6568 zero = ar;
6569 ai0 = integer_zerop (ai);
6570 if (ai0)
6571 zero = ai;
6572 br0 = integer_zerop (br);
6573 if (br0)
6574 zero = br;
6575 bi0 = integer_zerop (bi);
6576 if (bi0)
6577 {
6578 zero = bi;
6579 bi1 = false;
6580 }
6581 else
6582 bi1 = integer_onep (bi);
6583 }
6584
6585 /* We won't optimize anything below unless something is zero. */
6586 if (zero == NULL)
6587 return NULL;
6588
6589 if (ai0 && bi0)
6590 {
6591 rr = fold_build2 (code, inner_type, ar, br);
6592 ri = zero;
6593 }
6594 else if (ai0 && br0)
6595 {
6596 rr = zero;
6597 ri = fold_build2 (code, inner_type, ar, bi);
6598 ri = fold_build1 (NEGATE_EXPR, inner_type, ri);
6599 }
6600 else if (ar0 && bi0)
6601 {
6602 rr = zero;
6603 ri = fold_build2 (code, inner_type, ai, br);
6604 }
6605 else if (ar0 && br0)
6606 {
6607 rr = fold_build2 (code, inner_type, ai, bi);
6608 ri = zero;
6609 }
6610 else if (bi0)
6611 {
6612 rr = fold_build2 (code, inner_type, ar, br);
6613 ri = fold_build2 (code, inner_type, ai, br);
6614 }
6615 else if (br0)
6616 {
6617 rr = fold_build2 (code, inner_type, ai, bi);
6618 ri = fold_build2 (code, inner_type, ar, bi);
6619 ri = fold_build1 (NEGATE_EXPR, inner_type, ri);
6620 }
6621 else
6622 return NULL;
6623
6624 return fold_build2 (COMPLEX_EXPR, type, rr, ri);
6625 }
6626
6627 static tree
6628 fold_complex_div (tree type, tree ac, tree bc, enum tree_code code)
6629 {
6630 tree ar, ai, br, bi;
6631
6632 if (TREE_CODE (ac) == COMPLEX_EXPR)
6633 ar = TREE_OPERAND (ac, 0), ai = TREE_OPERAND (ac, 1);
6634 else if (TREE_CODE (ac) == COMPLEX_CST)
6635 ar = TREE_REALPART (ac), ai = TREE_IMAGPART (ac);
6636 else
6637 return NULL;
6638
6639 if (TREE_CODE (bc) == COMPLEX_EXPR)
6640 br = TREE_OPERAND (bc, 0), bi = TREE_OPERAND (bc, 1);
6641 else if (TREE_CODE (bc) == COMPLEX_CST)
6642 br = TREE_REALPART (bc), bi = TREE_IMAGPART (bc);
6643 else
6644 return NULL;
6645
6646 return fold_complex_div_parts (type, ar, ai, br, bi, code);
6647 }
6648
6649 /* Fold a unary expression of code CODE and type TYPE with operand
6650 OP0. Return the folded expression if folding is successful.
6651 Otherwise, return NULL_TREE. */
6652
6653 static tree
6654 fold_unary (enum tree_code code, tree type, tree op0)
6655 {
6656 tree tem;
6657 tree arg0;
6658 enum tree_code_class kind = TREE_CODE_CLASS (code);
6659
6660 gcc_assert (IS_EXPR_CODE_CLASS (kind)
6661 && TREE_CODE_LENGTH (code) == 1);
6662
6663 arg0 = op0;
6664 if (arg0)
6665 {
6666 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
6667 {
6668 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
6669 STRIP_SIGN_NOPS (arg0);
6670 }
6671 else
6672 {
6673 /* Strip any conversions that don't change the mode. This
6674 is safe for every expression, except for a comparison
6675 expression because its signedness is derived from its
6676 operands.
6677
6678 Note that this is done as an internal manipulation within
6679 the constant folder, in order to find the simplest
6680 representation of the arguments so that their form can be
6681 studied. In any cases, the appropriate type conversions
6682 should be put back in the tree that will get out of the
6683 constant folder. */
6684 STRIP_NOPS (arg0);
6685 }
6686 }
6687
6688 if (TREE_CODE_CLASS (code) == tcc_unary)
6689 {
6690 if (TREE_CODE (arg0) == COMPOUND_EXPR)
6691 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6692 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
6693 else if (TREE_CODE (arg0) == COND_EXPR)
6694 {
6695 tree arg01 = TREE_OPERAND (arg0, 1);
6696 tree arg02 = TREE_OPERAND (arg0, 2);
6697 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
6698 arg01 = fold_build1 (code, type, arg01);
6699 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
6700 arg02 = fold_build1 (code, type, arg02);
6701 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
6702 arg01, arg02);
6703
6704 /* If this was a conversion, and all we did was to move into
6705 inside the COND_EXPR, bring it back out. But leave it if
6706 it is a conversion from integer to integer and the
6707 result precision is no wider than a word since such a
6708 conversion is cheap and may be optimized away by combine,
6709 while it couldn't if it were outside the COND_EXPR. Then return
6710 so we don't get into an infinite recursion loop taking the
6711 conversion out and then back in. */
6712
6713 if ((code == NOP_EXPR || code == CONVERT_EXPR
6714 || code == NON_LVALUE_EXPR)
6715 && TREE_CODE (tem) == COND_EXPR
6716 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
6717 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
6718 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
6719 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
6720 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
6721 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
6722 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
6723 && (INTEGRAL_TYPE_P
6724 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
6725 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
6726 || flag_syntax_only))
6727 tem = build1 (code, type,
6728 build3 (COND_EXPR,
6729 TREE_TYPE (TREE_OPERAND
6730 (TREE_OPERAND (tem, 1), 0)),
6731 TREE_OPERAND (tem, 0),
6732 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
6733 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
6734 return tem;
6735 }
6736 else if (COMPARISON_CLASS_P (arg0))
6737 {
6738 if (TREE_CODE (type) == BOOLEAN_TYPE)
6739 {
6740 arg0 = copy_node (arg0);
6741 TREE_TYPE (arg0) = type;
6742 return arg0;
6743 }
6744 else if (TREE_CODE (type) != INTEGER_TYPE)
6745 return fold_build3 (COND_EXPR, type, arg0,
6746 fold_build1 (code, type,
6747 integer_one_node),
6748 fold_build1 (code, type,
6749 integer_zero_node));
6750 }
6751 }
6752
6753 switch (code)
6754 {
6755 case NOP_EXPR:
6756 case FLOAT_EXPR:
6757 case CONVERT_EXPR:
6758 case FIX_TRUNC_EXPR:
6759 case FIX_CEIL_EXPR:
6760 case FIX_FLOOR_EXPR:
6761 case FIX_ROUND_EXPR:
6762 if (TREE_TYPE (op0) == type)
6763 return op0;
6764
6765 /* Handle cases of two conversions in a row. */
6766 if (TREE_CODE (op0) == NOP_EXPR
6767 || TREE_CODE (op0) == CONVERT_EXPR)
6768 {
6769 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
6770 tree inter_type = TREE_TYPE (op0);
6771 int inside_int = INTEGRAL_TYPE_P (inside_type);
6772 int inside_ptr = POINTER_TYPE_P (inside_type);
6773 int inside_float = FLOAT_TYPE_P (inside_type);
6774 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
6775 unsigned int inside_prec = TYPE_PRECISION (inside_type);
6776 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
6777 int inter_int = INTEGRAL_TYPE_P (inter_type);
6778 int inter_ptr = POINTER_TYPE_P (inter_type);
6779 int inter_float = FLOAT_TYPE_P (inter_type);
6780 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
6781 unsigned int inter_prec = TYPE_PRECISION (inter_type);
6782 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
6783 int final_int = INTEGRAL_TYPE_P (type);
6784 int final_ptr = POINTER_TYPE_P (type);
6785 int final_float = FLOAT_TYPE_P (type);
6786 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
6787 unsigned int final_prec = TYPE_PRECISION (type);
6788 int final_unsignedp = TYPE_UNSIGNED (type);
6789
6790 /* In addition to the cases of two conversions in a row
6791 handled below, if we are converting something to its own
6792 type via an object of identical or wider precision, neither
6793 conversion is needed. */
6794 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
6795 && ((inter_int && final_int) || (inter_float && final_float))
6796 && inter_prec >= final_prec)
6797 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6798
6799 /* Likewise, if the intermediate and final types are either both
6800 float or both integer, we don't need the middle conversion if
6801 it is wider than the final type and doesn't change the signedness
6802 (for integers). Avoid this if the final type is a pointer
6803 since then we sometimes need the inner conversion. Likewise if
6804 the outer has a precision not equal to the size of its mode. */
6805 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
6806 || (inter_float && inside_float)
6807 || (inter_vec && inside_vec))
6808 && inter_prec >= inside_prec
6809 && (inter_float || inter_vec
6810 || inter_unsignedp == inside_unsignedp)
6811 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6812 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6813 && ! final_ptr
6814 && (! final_vec || inter_prec == inside_prec))
6815 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6816
6817 /* If we have a sign-extension of a zero-extended value, we can
6818 replace that by a single zero-extension. */
6819 if (inside_int && inter_int && final_int
6820 && inside_prec < inter_prec && inter_prec < final_prec
6821 && inside_unsignedp && !inter_unsignedp)
6822 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6823
6824 /* Two conversions in a row are not needed unless:
6825 - some conversion is floating-point (overstrict for now), or
6826 - some conversion is a vector (overstrict for now), or
6827 - the intermediate type is narrower than both initial and
6828 final, or
6829 - the intermediate type and innermost type differ in signedness,
6830 and the outermost type is wider than the intermediate, or
6831 - the initial type is a pointer type and the precisions of the
6832 intermediate and final types differ, or
6833 - the final type is a pointer type and the precisions of the
6834 initial and intermediate types differ. */
6835 if (! inside_float && ! inter_float && ! final_float
6836 && ! inside_vec && ! inter_vec && ! final_vec
6837 && (inter_prec > inside_prec || inter_prec > final_prec)
6838 && ! (inside_int && inter_int
6839 && inter_unsignedp != inside_unsignedp
6840 && inter_prec < final_prec)
6841 && ((inter_unsignedp && inter_prec > inside_prec)
6842 == (final_unsignedp && final_prec > inter_prec))
6843 && ! (inside_ptr && inter_prec != final_prec)
6844 && ! (final_ptr && inside_prec != inter_prec)
6845 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6846 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6847 && ! final_ptr)
6848 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6849 }
6850
6851 if (TREE_CODE (op0) == MODIFY_EXPR
6852 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
6853 /* Detect assigning a bitfield. */
6854 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
6855 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
6856 {
6857 /* Don't leave an assignment inside a conversion
6858 unless assigning a bitfield. */
6859 tem = build1 (code, type, TREE_OPERAND (op0, 1));
6860 /* First do the assignment, then return converted constant. */
6861 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, fold (tem));
6862 TREE_NO_WARNING (tem) = 1;
6863 TREE_USED (tem) = 1;
6864 return tem;
6865 }
6866
6867 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
6868 constants (if x has signed type, the sign bit cannot be set
6869 in c). This folds extension into the BIT_AND_EXPR. */
6870 if (INTEGRAL_TYPE_P (type)
6871 && TREE_CODE (type) != BOOLEAN_TYPE
6872 && TREE_CODE (op0) == BIT_AND_EXPR
6873 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
6874 {
6875 tree and = op0;
6876 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
6877 int change = 0;
6878
6879 if (TYPE_UNSIGNED (TREE_TYPE (and))
6880 || (TYPE_PRECISION (type)
6881 <= TYPE_PRECISION (TREE_TYPE (and))))
6882 change = 1;
6883 else if (TYPE_PRECISION (TREE_TYPE (and1))
6884 <= HOST_BITS_PER_WIDE_INT
6885 && host_integerp (and1, 1))
6886 {
6887 unsigned HOST_WIDE_INT cst;
6888
6889 cst = tree_low_cst (and1, 1);
6890 cst &= (HOST_WIDE_INT) -1
6891 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
6892 change = (cst == 0);
6893 #ifdef LOAD_EXTEND_OP
6894 if (change
6895 && !flag_syntax_only
6896 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
6897 == ZERO_EXTEND))
6898 {
6899 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
6900 and0 = fold_convert (uns, and0);
6901 and1 = fold_convert (uns, and1);
6902 }
6903 #endif
6904 }
6905 if (change)
6906 {
6907 tem = build_int_cst_wide (type, TREE_INT_CST_LOW (and1),
6908 TREE_INT_CST_HIGH (and1));
6909 tem = force_fit_type (tem, 0, TREE_OVERFLOW (and1),
6910 TREE_CONSTANT_OVERFLOW (and1));
6911 return fold_build2 (BIT_AND_EXPR, type,
6912 fold_convert (type, and0), tem);
6913 }
6914 }
6915
6916 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
6917 T2 being pointers to types of the same size. */
6918 if (POINTER_TYPE_P (type)
6919 && BINARY_CLASS_P (arg0)
6920 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
6921 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6922 {
6923 tree arg00 = TREE_OPERAND (arg0, 0);
6924 tree t0 = type;
6925 tree t1 = TREE_TYPE (arg00);
6926 tree tt0 = TREE_TYPE (t0);
6927 tree tt1 = TREE_TYPE (t1);
6928 tree s0 = TYPE_SIZE (tt0);
6929 tree s1 = TYPE_SIZE (tt1);
6930
6931 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
6932 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
6933 TREE_OPERAND (arg0, 1));
6934 }
6935
6936 tem = fold_convert_const (code, type, arg0);
6937 return tem ? tem : NULL_TREE;
6938
6939 case VIEW_CONVERT_EXPR:
6940 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
6941 return build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
6942 return NULL_TREE;
6943
6944 case NEGATE_EXPR:
6945 if (negate_expr_p (arg0))
6946 return fold_convert (type, negate_expr (arg0));
6947 /* Convert - (~A) to A + 1. */
6948 if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == BIT_NOT_EXPR)
6949 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (arg0, 0),
6950 build_int_cst (type, 1));
6951 return NULL_TREE;
6952
6953 case ABS_EXPR:
6954 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
6955 return fold_abs_const (arg0, type);
6956 else if (TREE_CODE (arg0) == NEGATE_EXPR)
6957 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
6958 /* Convert fabs((double)float) into (double)fabsf(float). */
6959 else if (TREE_CODE (arg0) == NOP_EXPR
6960 && TREE_CODE (type) == REAL_TYPE)
6961 {
6962 tree targ0 = strip_float_extensions (arg0);
6963 if (targ0 != arg0)
6964 return fold_convert (type, fold_build1 (ABS_EXPR,
6965 TREE_TYPE (targ0),
6966 targ0));
6967 }
6968 else if (tree_expr_nonnegative_p (arg0))
6969 return arg0;
6970
6971 /* Strip sign ops from argument. */
6972 if (TREE_CODE (type) == REAL_TYPE)
6973 {
6974 tem = fold_strip_sign_ops (arg0);
6975 if (tem)
6976 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
6977 }
6978 return NULL_TREE;
6979
6980 case CONJ_EXPR:
6981 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
6982 return fold_convert (type, arg0);
6983 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
6984 return build2 (COMPLEX_EXPR, type,
6985 TREE_OPERAND (arg0, 0),
6986 negate_expr (TREE_OPERAND (arg0, 1)));
6987 else if (TREE_CODE (arg0) == COMPLEX_CST)
6988 return build_complex (type, TREE_REALPART (arg0),
6989 negate_expr (TREE_IMAGPART (arg0)));
6990 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
6991 return fold_build2 (TREE_CODE (arg0), type,
6992 fold_build1 (CONJ_EXPR, type,
6993 TREE_OPERAND (arg0, 0)),
6994 fold_build1 (CONJ_EXPR, type,
6995 TREE_OPERAND (arg0, 1)));
6996 else if (TREE_CODE (arg0) == CONJ_EXPR)
6997 return TREE_OPERAND (arg0, 0);
6998 return NULL_TREE;
6999
7000 case BIT_NOT_EXPR:
7001 if (TREE_CODE (arg0) == INTEGER_CST)
7002 return fold_not_const (arg0, type);
7003 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
7004 return TREE_OPERAND (arg0, 0);
7005 /* Convert ~ (-A) to A - 1. */
7006 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
7007 return fold_build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0),
7008 build_int_cst (type, 1));
7009 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
7010 else if (INTEGRAL_TYPE_P (type)
7011 && ((TREE_CODE (arg0) == MINUS_EXPR
7012 && integer_onep (TREE_OPERAND (arg0, 1)))
7013 || (TREE_CODE (arg0) == PLUS_EXPR
7014 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
7015 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7016 return NULL_TREE;
7017
7018 case TRUTH_NOT_EXPR:
7019 /* The argument to invert_truthvalue must have Boolean type. */
7020 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
7021 arg0 = fold_convert (boolean_type_node, arg0);
7022
7023 /* Note that the operand of this must be an int
7024 and its values must be 0 or 1.
7025 ("true" is a fixed value perhaps depending on the language,
7026 but we don't handle values other than 1 correctly yet.) */
7027 tem = invert_truthvalue (arg0);
7028 /* Avoid infinite recursion. */
7029 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
7030 return NULL_TREE;
7031 return fold_convert (type, tem);
7032
7033 case REALPART_EXPR:
7034 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7035 return NULL_TREE;
7036 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7037 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
7038 TREE_OPERAND (arg0, 1));
7039 else if (TREE_CODE (arg0) == COMPLEX_CST)
7040 return TREE_REALPART (arg0);
7041 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7042 return fold_build2 (TREE_CODE (arg0), type,
7043 fold_build1 (REALPART_EXPR, type,
7044 TREE_OPERAND (arg0, 0)),
7045 fold_build1 (REALPART_EXPR, type,
7046 TREE_OPERAND (arg0, 1)));
7047 return NULL_TREE;
7048
7049 case IMAGPART_EXPR:
7050 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7051 return fold_convert (type, integer_zero_node);
7052 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7053 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
7054 TREE_OPERAND (arg0, 0));
7055 else if (TREE_CODE (arg0) == COMPLEX_CST)
7056 return TREE_IMAGPART (arg0);
7057 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7058 return fold_build2 (TREE_CODE (arg0), type,
7059 fold_build1 (IMAGPART_EXPR, type,
7060 TREE_OPERAND (arg0, 0)),
7061 fold_build1 (IMAGPART_EXPR, type,
7062 TREE_OPERAND (arg0, 1)));
7063 return NULL_TREE;
7064
7065 default:
7066 return NULL_TREE;
7067 } /* switch (code) */
7068 }
7069
7070 /* Fold a binary expression of code CODE and type TYPE with operands
7071 OP0 and OP1. Return the folded expression if folding is
7072 successful. Otherwise, return NULL_TREE. */
7073
7074 static tree
7075 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
7076 {
7077 tree t1 = NULL_TREE;
7078 tree tem;
7079 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
7080 enum tree_code_class kind = TREE_CODE_CLASS (code);
7081
7082 /* WINS will be nonzero when the switch is done
7083 if all operands are constant. */
7084 int wins = 1;
7085
7086 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7087 && TREE_CODE_LENGTH (code) == 2);
7088
7089 arg0 = op0;
7090 arg1 = op1;
7091
7092 if (arg0)
7093 {
7094 tree subop;
7095
7096 /* Strip any conversions that don't change the mode. This is
7097 safe for every expression, except for a comparison expression
7098 because its signedness is derived from its operands. So, in
7099 the latter case, only strip conversions that don't change the
7100 signedness.
7101
7102 Note that this is done as an internal manipulation within the
7103 constant folder, in order to find the simplest representation
7104 of the arguments so that their form can be studied. In any
7105 cases, the appropriate type conversions should be put back in
7106 the tree that will get out of the constant folder. */
7107 if (kind == tcc_comparison)
7108 STRIP_SIGN_NOPS (arg0);
7109 else
7110 STRIP_NOPS (arg0);
7111
7112 if (TREE_CODE (arg0) == COMPLEX_CST)
7113 subop = TREE_REALPART (arg0);
7114 else
7115 subop = arg0;
7116
7117 if (TREE_CODE (subop) != INTEGER_CST
7118 && TREE_CODE (subop) != REAL_CST)
7119 /* Note that TREE_CONSTANT isn't enough:
7120 static var addresses are constant but we can't
7121 do arithmetic on them. */
7122 wins = 0;
7123 }
7124
7125 if (arg1)
7126 {
7127 tree subop;
7128
7129 /* Strip any conversions that don't change the mode. This is
7130 safe for every expression, except for a comparison expression
7131 because its signedness is derived from its operands. So, in
7132 the latter case, only strip conversions that don't change the
7133 signedness.
7134
7135 Note that this is done as an internal manipulation within the
7136 constant folder, in order to find the simplest representation
7137 of the arguments so that their form can be studied. In any
7138 cases, the appropriate type conversions should be put back in
7139 the tree that will get out of the constant folder. */
7140 if (kind == tcc_comparison)
7141 STRIP_SIGN_NOPS (arg1);
7142 else
7143 STRIP_NOPS (arg1);
7144
7145 if (TREE_CODE (arg1) == COMPLEX_CST)
7146 subop = TREE_REALPART (arg1);
7147 else
7148 subop = arg1;
7149
7150 if (TREE_CODE (subop) != INTEGER_CST
7151 && TREE_CODE (subop) != REAL_CST)
7152 /* Note that TREE_CONSTANT isn't enough:
7153 static var addresses are constant but we can't
7154 do arithmetic on them. */
7155 wins = 0;
7156 }
7157
7158 /* If this is a commutative operation, and ARG0 is a constant, move it
7159 to ARG1 to reduce the number of tests below. */
7160 if (commutative_tree_code (code)
7161 && tree_swap_operands_p (arg0, arg1, true))
7162 return fold_build2 (code, type, op1, op0);
7163
7164 /* Now WINS is set as described above,
7165 ARG0 is the first operand of EXPR,
7166 and ARG1 is the second operand (if it has more than one operand).
7167
7168 First check for cases where an arithmetic operation is applied to a
7169 compound, conditional, or comparison operation. Push the arithmetic
7170 operation inside the compound or conditional to see if any folding
7171 can then be done. Convert comparison to conditional for this purpose.
7172 The also optimizes non-constant cases that used to be done in
7173 expand_expr.
7174
7175 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
7176 one of the operands is a comparison and the other is a comparison, a
7177 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
7178 code below would make the expression more complex. Change it to a
7179 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
7180 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
7181
7182 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
7183 || code == EQ_EXPR || code == NE_EXPR)
7184 && ((truth_value_p (TREE_CODE (arg0))
7185 && (truth_value_p (TREE_CODE (arg1))
7186 || (TREE_CODE (arg1) == BIT_AND_EXPR
7187 && integer_onep (TREE_OPERAND (arg1, 1)))))
7188 || (truth_value_p (TREE_CODE (arg1))
7189 && (truth_value_p (TREE_CODE (arg0))
7190 || (TREE_CODE (arg0) == BIT_AND_EXPR
7191 && integer_onep (TREE_OPERAND (arg0, 1)))))))
7192 {
7193 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
7194 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
7195 : TRUTH_XOR_EXPR,
7196 boolean_type_node,
7197 fold_convert (boolean_type_node, arg0),
7198 fold_convert (boolean_type_node, arg1));
7199
7200 if (code == EQ_EXPR)
7201 tem = invert_truthvalue (tem);
7202
7203 return fold_convert (type, tem);
7204 }
7205
7206 if (TREE_CODE_CLASS (code) == tcc_comparison
7207 && TREE_CODE (arg0) == COMPOUND_EXPR)
7208 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7209 fold_build2 (code, type, TREE_OPERAND (arg0, 1), arg1));
7210 else if (TREE_CODE_CLASS (code) == tcc_comparison
7211 && TREE_CODE (arg1) == COMPOUND_EXPR)
7212 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
7213 fold_build2 (code, type, arg0, TREE_OPERAND (arg1, 1)));
7214 else if (TREE_CODE_CLASS (code) == tcc_binary
7215 || TREE_CODE_CLASS (code) == tcc_comparison)
7216 {
7217 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7218 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7219 fold_build2 (code, type, TREE_OPERAND (arg0, 1),
7220 arg1));
7221 if (TREE_CODE (arg1) == COMPOUND_EXPR
7222 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
7223 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
7224 fold_build2 (code, type,
7225 arg0, TREE_OPERAND (arg1, 1)));
7226
7227 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
7228 {
7229 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
7230 arg0, arg1,
7231 /*cond_first_p=*/1);
7232 if (tem != NULL_TREE)
7233 return tem;
7234 }
7235
7236 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
7237 {
7238 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
7239 arg1, arg0,
7240 /*cond_first_p=*/0);
7241 if (tem != NULL_TREE)
7242 return tem;
7243 }
7244 }
7245
7246 switch (code)
7247 {
7248 case PLUS_EXPR:
7249 /* A + (-B) -> A - B */
7250 if (TREE_CODE (arg1) == NEGATE_EXPR)
7251 return fold_build2 (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0));
7252 /* (-A) + B -> B - A */
7253 if (TREE_CODE (arg0) == NEGATE_EXPR
7254 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
7255 return fold_build2 (MINUS_EXPR, type, arg1, TREE_OPERAND (arg0, 0));
7256 /* Convert ~A + 1 to -A. */
7257 if (INTEGRAL_TYPE_P (type)
7258 && TREE_CODE (arg0) == BIT_NOT_EXPR
7259 && integer_onep (arg1))
7260 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7261
7262 if (TREE_CODE (type) == COMPLEX_TYPE)
7263 {
7264 tem = fold_complex_add (type, arg0, arg1, PLUS_EXPR);
7265 if (tem)
7266 return tem;
7267 }
7268
7269 if (! FLOAT_TYPE_P (type))
7270 {
7271 if (integer_zerop (arg1))
7272 return non_lvalue (fold_convert (type, arg0));
7273
7274 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
7275 with a constant, and the two constants have no bits in common,
7276 we should treat this as a BIT_IOR_EXPR since this may produce more
7277 simplifications. */
7278 if (TREE_CODE (arg0) == BIT_AND_EXPR
7279 && TREE_CODE (arg1) == BIT_AND_EXPR
7280 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7281 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
7282 && integer_zerop (const_binop (BIT_AND_EXPR,
7283 TREE_OPERAND (arg0, 1),
7284 TREE_OPERAND (arg1, 1), 0)))
7285 {
7286 code = BIT_IOR_EXPR;
7287 goto bit_ior;
7288 }
7289
7290 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
7291 (plus (plus (mult) (mult)) (foo)) so that we can
7292 take advantage of the factoring cases below. */
7293 if (((TREE_CODE (arg0) == PLUS_EXPR
7294 || TREE_CODE (arg0) == MINUS_EXPR)
7295 && TREE_CODE (arg1) == MULT_EXPR)
7296 || ((TREE_CODE (arg1) == PLUS_EXPR
7297 || TREE_CODE (arg1) == MINUS_EXPR)
7298 && TREE_CODE (arg0) == MULT_EXPR))
7299 {
7300 tree parg0, parg1, parg, marg;
7301 enum tree_code pcode;
7302
7303 if (TREE_CODE (arg1) == MULT_EXPR)
7304 parg = arg0, marg = arg1;
7305 else
7306 parg = arg1, marg = arg0;
7307 pcode = TREE_CODE (parg);
7308 parg0 = TREE_OPERAND (parg, 0);
7309 parg1 = TREE_OPERAND (parg, 1);
7310 STRIP_NOPS (parg0);
7311 STRIP_NOPS (parg1);
7312
7313 if (TREE_CODE (parg0) == MULT_EXPR
7314 && TREE_CODE (parg1) != MULT_EXPR)
7315 return fold_build2 (pcode, type,
7316 fold_build2 (PLUS_EXPR, type,
7317 fold_convert (type, parg0),
7318 fold_convert (type, marg)),
7319 fold_convert (type, parg1));
7320 if (TREE_CODE (parg0) != MULT_EXPR
7321 && TREE_CODE (parg1) == MULT_EXPR)
7322 return fold_build2 (PLUS_EXPR, type,
7323 fold_convert (type, parg0),
7324 fold_build2 (pcode, type,
7325 fold_convert (type, marg),
7326 fold_convert (type,
7327 parg1)));
7328 }
7329
7330 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
7331 {
7332 tree arg00, arg01, arg10, arg11;
7333 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7334
7335 /* (A * C) + (B * C) -> (A+B) * C.
7336 We are most concerned about the case where C is a constant,
7337 but other combinations show up during loop reduction. Since
7338 it is not difficult, try all four possibilities. */
7339
7340 arg00 = TREE_OPERAND (arg0, 0);
7341 arg01 = TREE_OPERAND (arg0, 1);
7342 arg10 = TREE_OPERAND (arg1, 0);
7343 arg11 = TREE_OPERAND (arg1, 1);
7344 same = NULL_TREE;
7345
7346 if (operand_equal_p (arg01, arg11, 0))
7347 same = arg01, alt0 = arg00, alt1 = arg10;
7348 else if (operand_equal_p (arg00, arg10, 0))
7349 same = arg00, alt0 = arg01, alt1 = arg11;
7350 else if (operand_equal_p (arg00, arg11, 0))
7351 same = arg00, alt0 = arg01, alt1 = arg10;
7352 else if (operand_equal_p (arg01, arg10, 0))
7353 same = arg01, alt0 = arg00, alt1 = arg11;
7354
7355 /* No identical multiplicands; see if we can find a common
7356 power-of-two factor in non-power-of-two multiplies. This
7357 can help in multi-dimensional array access. */
7358 else if (TREE_CODE (arg01) == INTEGER_CST
7359 && TREE_CODE (arg11) == INTEGER_CST
7360 && TREE_INT_CST_HIGH (arg01) == 0
7361 && TREE_INT_CST_HIGH (arg11) == 0)
7362 {
7363 HOST_WIDE_INT int01, int11, tmp;
7364 int01 = TREE_INT_CST_LOW (arg01);
7365 int11 = TREE_INT_CST_LOW (arg11);
7366
7367 /* Move min of absolute values to int11. */
7368 if ((int01 >= 0 ? int01 : -int01)
7369 < (int11 >= 0 ? int11 : -int11))
7370 {
7371 tmp = int01, int01 = int11, int11 = tmp;
7372 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7373 alt0 = arg01, arg01 = arg11, arg11 = alt0;
7374 }
7375
7376 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
7377 {
7378 alt0 = fold_build2 (MULT_EXPR, type, arg00,
7379 build_int_cst (NULL_TREE,
7380 int01 / int11));
7381 alt1 = arg10;
7382 same = arg11;
7383 }
7384 }
7385
7386 if (same)
7387 return fold_build2 (MULT_EXPR, type,
7388 fold_build2 (PLUS_EXPR, type,
7389 fold_convert (type, alt0),
7390 fold_convert (type, alt1)),
7391 same);
7392 }
7393
7394 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
7395 of the array. Loop optimizer sometimes produce this type of
7396 expressions. */
7397 if (TREE_CODE (arg0) == ADDR_EXPR
7398 && TREE_CODE (arg1) == MULT_EXPR)
7399 {
7400 tem = try_move_mult_to_index (PLUS_EXPR, arg0, arg1);
7401 if (tem)
7402 return fold_convert (type, fold (tem));
7403 }
7404 else if (TREE_CODE (arg1) == ADDR_EXPR
7405 && TREE_CODE (arg0) == MULT_EXPR)
7406 {
7407 tem = try_move_mult_to_index (PLUS_EXPR, arg1, arg0);
7408 if (tem)
7409 return fold_convert (type, fold (tem));
7410 }
7411 }
7412 else
7413 {
7414 /* See if ARG1 is zero and X + ARG1 reduces to X. */
7415 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
7416 return non_lvalue (fold_convert (type, arg0));
7417
7418 /* Likewise if the operands are reversed. */
7419 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
7420 return non_lvalue (fold_convert (type, arg1));
7421
7422 /* Convert X + -C into X - C. */
7423 if (TREE_CODE (arg1) == REAL_CST
7424 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
7425 {
7426 tem = fold_negate_const (arg1, type);
7427 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
7428 return fold_build2 (MINUS_EXPR, type,
7429 fold_convert (type, arg0),
7430 fold_convert (type, tem));
7431 }
7432
7433 /* Convert x+x into x*2.0. */
7434 if (operand_equal_p (arg0, arg1, 0)
7435 && SCALAR_FLOAT_TYPE_P (type))
7436 return fold_build2 (MULT_EXPR, type, arg0,
7437 build_real (type, dconst2));
7438
7439 /* Convert x*c+x into x*(c+1). */
7440 if (flag_unsafe_math_optimizations
7441 && TREE_CODE (arg0) == MULT_EXPR
7442 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7443 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
7444 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7445 {
7446 REAL_VALUE_TYPE c;
7447
7448 c = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
7449 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7450 return fold_build2 (MULT_EXPR, type, arg1,
7451 build_real (type, c));
7452 }
7453
7454 /* Convert x+x*c into x*(c+1). */
7455 if (flag_unsafe_math_optimizations
7456 && TREE_CODE (arg1) == MULT_EXPR
7457 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
7458 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
7459 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
7460 {
7461 REAL_VALUE_TYPE c;
7462
7463 c = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
7464 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7465 return fold_build2 (MULT_EXPR, type, arg0,
7466 build_real (type, c));
7467 }
7468
7469 /* Convert x*c1+x*c2 into x*(c1+c2). */
7470 if (flag_unsafe_math_optimizations
7471 && TREE_CODE (arg0) == MULT_EXPR
7472 && TREE_CODE (arg1) == MULT_EXPR
7473 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7474 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
7475 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
7476 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
7477 && operand_equal_p (TREE_OPERAND (arg0, 0),
7478 TREE_OPERAND (arg1, 0), 0))
7479 {
7480 REAL_VALUE_TYPE c1, c2;
7481
7482 c1 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
7483 c2 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
7484 real_arithmetic (&c1, PLUS_EXPR, &c1, &c2);
7485 return fold_build2 (MULT_EXPR, type,
7486 TREE_OPERAND (arg0, 0),
7487 build_real (type, c1));
7488 }
7489 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
7490 if (flag_unsafe_math_optimizations
7491 && TREE_CODE (arg1) == PLUS_EXPR
7492 && TREE_CODE (arg0) != MULT_EXPR)
7493 {
7494 tree tree10 = TREE_OPERAND (arg1, 0);
7495 tree tree11 = TREE_OPERAND (arg1, 1);
7496 if (TREE_CODE (tree11) == MULT_EXPR
7497 && TREE_CODE (tree10) == MULT_EXPR)
7498 {
7499 tree tree0;
7500 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
7501 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
7502 }
7503 }
7504 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
7505 if (flag_unsafe_math_optimizations
7506 && TREE_CODE (arg0) == PLUS_EXPR
7507 && TREE_CODE (arg1) != MULT_EXPR)
7508 {
7509 tree tree00 = TREE_OPERAND (arg0, 0);
7510 tree tree01 = TREE_OPERAND (arg0, 1);
7511 if (TREE_CODE (tree01) == MULT_EXPR
7512 && TREE_CODE (tree00) == MULT_EXPR)
7513 {
7514 tree tree0;
7515 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
7516 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
7517 }
7518 }
7519 }
7520
7521 bit_rotate:
7522 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
7523 is a rotate of A by C1 bits. */
7524 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
7525 is a rotate of A by B bits. */
7526 {
7527 enum tree_code code0, code1;
7528 code0 = TREE_CODE (arg0);
7529 code1 = TREE_CODE (arg1);
7530 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
7531 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
7532 && operand_equal_p (TREE_OPERAND (arg0, 0),
7533 TREE_OPERAND (arg1, 0), 0)
7534 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7535 {
7536 tree tree01, tree11;
7537 enum tree_code code01, code11;
7538
7539 tree01 = TREE_OPERAND (arg0, 1);
7540 tree11 = TREE_OPERAND (arg1, 1);
7541 STRIP_NOPS (tree01);
7542 STRIP_NOPS (tree11);
7543 code01 = TREE_CODE (tree01);
7544 code11 = TREE_CODE (tree11);
7545 if (code01 == INTEGER_CST
7546 && code11 == INTEGER_CST
7547 && TREE_INT_CST_HIGH (tree01) == 0
7548 && TREE_INT_CST_HIGH (tree11) == 0
7549 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
7550 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
7551 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
7552 code0 == LSHIFT_EXPR ? tree01 : tree11);
7553 else if (code11 == MINUS_EXPR)
7554 {
7555 tree tree110, tree111;
7556 tree110 = TREE_OPERAND (tree11, 0);
7557 tree111 = TREE_OPERAND (tree11, 1);
7558 STRIP_NOPS (tree110);
7559 STRIP_NOPS (tree111);
7560 if (TREE_CODE (tree110) == INTEGER_CST
7561 && 0 == compare_tree_int (tree110,
7562 TYPE_PRECISION
7563 (TREE_TYPE (TREE_OPERAND
7564 (arg0, 0))))
7565 && operand_equal_p (tree01, tree111, 0))
7566 return build2 ((code0 == LSHIFT_EXPR
7567 ? LROTATE_EXPR
7568 : RROTATE_EXPR),
7569 type, TREE_OPERAND (arg0, 0), tree01);
7570 }
7571 else if (code01 == MINUS_EXPR)
7572 {
7573 tree tree010, tree011;
7574 tree010 = TREE_OPERAND (tree01, 0);
7575 tree011 = TREE_OPERAND (tree01, 1);
7576 STRIP_NOPS (tree010);
7577 STRIP_NOPS (tree011);
7578 if (TREE_CODE (tree010) == INTEGER_CST
7579 && 0 == compare_tree_int (tree010,
7580 TYPE_PRECISION
7581 (TREE_TYPE (TREE_OPERAND
7582 (arg0, 0))))
7583 && operand_equal_p (tree11, tree011, 0))
7584 return build2 ((code0 != LSHIFT_EXPR
7585 ? LROTATE_EXPR
7586 : RROTATE_EXPR),
7587 type, TREE_OPERAND (arg0, 0), tree11);
7588 }
7589 }
7590 }
7591
7592 associate:
7593 /* In most languages, can't associate operations on floats through
7594 parentheses. Rather than remember where the parentheses were, we
7595 don't associate floats at all, unless the user has specified
7596 -funsafe-math-optimizations. */
7597
7598 if (! wins
7599 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
7600 {
7601 tree var0, con0, lit0, minus_lit0;
7602 tree var1, con1, lit1, minus_lit1;
7603
7604 /* Split both trees into variables, constants, and literals. Then
7605 associate each group together, the constants with literals,
7606 then the result with variables. This increases the chances of
7607 literals being recombined later and of generating relocatable
7608 expressions for the sum of a constant and literal. */
7609 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
7610 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
7611 code == MINUS_EXPR);
7612
7613 /* Only do something if we found more than two objects. Otherwise,
7614 nothing has changed and we risk infinite recursion. */
7615 if (2 < ((var0 != 0) + (var1 != 0)
7616 + (con0 != 0) + (con1 != 0)
7617 + (lit0 != 0) + (lit1 != 0)
7618 + (minus_lit0 != 0) + (minus_lit1 != 0)))
7619 {
7620 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
7621 if (code == MINUS_EXPR)
7622 code = PLUS_EXPR;
7623
7624 var0 = associate_trees (var0, var1, code, type);
7625 con0 = associate_trees (con0, con1, code, type);
7626 lit0 = associate_trees (lit0, lit1, code, type);
7627 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
7628
7629 /* Preserve the MINUS_EXPR if the negative part of the literal is
7630 greater than the positive part. Otherwise, the multiplicative
7631 folding code (i.e extract_muldiv) may be fooled in case
7632 unsigned constants are subtracted, like in the following
7633 example: ((X*2 + 4) - 8U)/2. */
7634 if (minus_lit0 && lit0)
7635 {
7636 if (TREE_CODE (lit0) == INTEGER_CST
7637 && TREE_CODE (minus_lit0) == INTEGER_CST
7638 && tree_int_cst_lt (lit0, minus_lit0))
7639 {
7640 minus_lit0 = associate_trees (minus_lit0, lit0,
7641 MINUS_EXPR, type);
7642 lit0 = 0;
7643 }
7644 else
7645 {
7646 lit0 = associate_trees (lit0, minus_lit0,
7647 MINUS_EXPR, type);
7648 minus_lit0 = 0;
7649 }
7650 }
7651 if (minus_lit0)
7652 {
7653 if (con0 == 0)
7654 return fold_convert (type,
7655 associate_trees (var0, minus_lit0,
7656 MINUS_EXPR, type));
7657 else
7658 {
7659 con0 = associate_trees (con0, minus_lit0,
7660 MINUS_EXPR, type);
7661 return fold_convert (type,
7662 associate_trees (var0, con0,
7663 PLUS_EXPR, type));
7664 }
7665 }
7666
7667 con0 = associate_trees (con0, lit0, code, type);
7668 return fold_convert (type, associate_trees (var0, con0,
7669 code, type));
7670 }
7671 }
7672
7673 binary:
7674 if (wins)
7675 t1 = const_binop (code, arg0, arg1, 0);
7676 if (t1 != NULL_TREE)
7677 {
7678 /* The return value should always have
7679 the same type as the original expression. */
7680 if (TREE_TYPE (t1) != type)
7681 t1 = fold_convert (type, t1);
7682
7683 return t1;
7684 }
7685 return NULL_TREE;
7686
7687 case MINUS_EXPR:
7688 /* A - (-B) -> A + B */
7689 if (TREE_CODE (arg1) == NEGATE_EXPR)
7690 return fold_build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0));
7691 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
7692 if (TREE_CODE (arg0) == NEGATE_EXPR
7693 && (FLOAT_TYPE_P (type)
7694 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
7695 && negate_expr_p (arg1)
7696 && reorder_operands_p (arg0, arg1))
7697 return fold_build2 (MINUS_EXPR, type, negate_expr (arg1),
7698 TREE_OPERAND (arg0, 0));
7699 /* Convert -A - 1 to ~A. */
7700 if (INTEGRAL_TYPE_P (type)
7701 && TREE_CODE (arg0) == NEGATE_EXPR
7702 && integer_onep (arg1))
7703 return fold_build1 (BIT_NOT_EXPR, type, TREE_OPERAND (arg0, 0));
7704
7705 /* Convert -1 - A to ~A. */
7706 if (INTEGRAL_TYPE_P (type)
7707 && integer_all_onesp (arg0))
7708 return fold_build1 (BIT_NOT_EXPR, type, arg1);
7709
7710 if (TREE_CODE (type) == COMPLEX_TYPE)
7711 {
7712 tem = fold_complex_add (type, arg0, arg1, MINUS_EXPR);
7713 if (tem)
7714 return tem;
7715 }
7716
7717 if (! FLOAT_TYPE_P (type))
7718 {
7719 if (! wins && integer_zerop (arg0))
7720 return negate_expr (fold_convert (type, arg1));
7721 if (integer_zerop (arg1))
7722 return non_lvalue (fold_convert (type, arg0));
7723
7724 /* Fold A - (A & B) into ~B & A. */
7725 if (!TREE_SIDE_EFFECTS (arg0)
7726 && TREE_CODE (arg1) == BIT_AND_EXPR)
7727 {
7728 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
7729 return fold_build2 (BIT_AND_EXPR, type,
7730 fold_build1 (BIT_NOT_EXPR, type,
7731 TREE_OPERAND (arg1, 0)),
7732 arg0);
7733 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7734 return fold_build2 (BIT_AND_EXPR, type,
7735 fold_build1 (BIT_NOT_EXPR, type,
7736 TREE_OPERAND (arg1, 1)),
7737 arg0);
7738 }
7739
7740 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
7741 any power of 2 minus 1. */
7742 if (TREE_CODE (arg0) == BIT_AND_EXPR
7743 && TREE_CODE (arg1) == BIT_AND_EXPR
7744 && operand_equal_p (TREE_OPERAND (arg0, 0),
7745 TREE_OPERAND (arg1, 0), 0))
7746 {
7747 tree mask0 = TREE_OPERAND (arg0, 1);
7748 tree mask1 = TREE_OPERAND (arg1, 1);
7749 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
7750
7751 if (operand_equal_p (tem, mask1, 0))
7752 {
7753 tem = fold_build2 (BIT_XOR_EXPR, type,
7754 TREE_OPERAND (arg0, 0), mask1);
7755 return fold_build2 (MINUS_EXPR, type, tem, mask1);
7756 }
7757 }
7758 }
7759
7760 /* See if ARG1 is zero and X - ARG1 reduces to X. */
7761 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
7762 return non_lvalue (fold_convert (type, arg0));
7763
7764 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
7765 ARG0 is zero and X + ARG0 reduces to X, since that would mean
7766 (-ARG1 + ARG0) reduces to -ARG1. */
7767 else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
7768 return negate_expr (fold_convert (type, arg1));
7769
7770 /* Fold &x - &x. This can happen from &x.foo - &x.
7771 This is unsafe for certain floats even in non-IEEE formats.
7772 In IEEE, it is unsafe because it does wrong for NaNs.
7773 Also note that operand_equal_p is always false if an operand
7774 is volatile. */
7775
7776 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
7777 && operand_equal_p (arg0, arg1, 0))
7778 return fold_convert (type, integer_zero_node);
7779
7780 /* A - B -> A + (-B) if B is easily negatable. */
7781 if (!wins && negate_expr_p (arg1)
7782 && ((FLOAT_TYPE_P (type)
7783 /* Avoid this transformation if B is a positive REAL_CST. */
7784 && (TREE_CODE (arg1) != REAL_CST
7785 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
7786 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
7787 return fold_build2 (PLUS_EXPR, type, arg0, negate_expr (arg1));
7788
7789 /* Try folding difference of addresses. */
7790 {
7791 HOST_WIDE_INT diff;
7792
7793 if ((TREE_CODE (arg0) == ADDR_EXPR
7794 || TREE_CODE (arg1) == ADDR_EXPR)
7795 && ptr_difference_const (arg0, arg1, &diff))
7796 return build_int_cst_type (type, diff);
7797 }
7798
7799 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
7800 of the array. Loop optimizer sometimes produce this type of
7801 expressions. */
7802 if (TREE_CODE (arg0) == ADDR_EXPR
7803 && TREE_CODE (arg1) == MULT_EXPR)
7804 {
7805 tem = try_move_mult_to_index (MINUS_EXPR, arg0, arg1);
7806 if (tem)
7807 return fold_convert (type, fold (tem));
7808 }
7809
7810 if (TREE_CODE (arg0) == MULT_EXPR
7811 && TREE_CODE (arg1) == MULT_EXPR
7812 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
7813 {
7814 /* (A * C) - (B * C) -> (A-B) * C. */
7815 if (operand_equal_p (TREE_OPERAND (arg0, 1),
7816 TREE_OPERAND (arg1, 1), 0))
7817 return fold_build2 (MULT_EXPR, type,
7818 fold_build2 (MINUS_EXPR, type,
7819 TREE_OPERAND (arg0, 0),
7820 TREE_OPERAND (arg1, 0)),
7821 TREE_OPERAND (arg0, 1));
7822 /* (A * C1) - (A * C2) -> A * (C1-C2). */
7823 if (operand_equal_p (TREE_OPERAND (arg0, 0),
7824 TREE_OPERAND (arg1, 0), 0))
7825 return fold_build2 (MULT_EXPR, type,
7826 TREE_OPERAND (arg0, 0),
7827 fold_build2 (MINUS_EXPR, type,
7828 TREE_OPERAND (arg0, 1),
7829 TREE_OPERAND (arg1, 1)));
7830 }
7831
7832 goto associate;
7833
7834 case MULT_EXPR:
7835 /* (-A) * (-B) -> A * B */
7836 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
7837 return fold_build2 (MULT_EXPR, type,
7838 TREE_OPERAND (arg0, 0),
7839 negate_expr (arg1));
7840 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
7841 return fold_build2 (MULT_EXPR, type,
7842 negate_expr (arg0),
7843 TREE_OPERAND (arg1, 0));
7844
7845 if (TREE_CODE (type) == COMPLEX_TYPE)
7846 {
7847 tem = fold_complex_mult (type, arg0, arg1);
7848 if (tem)
7849 return tem;
7850 }
7851
7852 if (! FLOAT_TYPE_P (type))
7853 {
7854 if (integer_zerop (arg1))
7855 return omit_one_operand (type, arg1, arg0);
7856 if (integer_onep (arg1))
7857 return non_lvalue (fold_convert (type, arg0));
7858 /* Transform x * -1 into -x. */
7859 if (integer_all_onesp (arg1))
7860 return fold_convert (type, negate_expr (arg0));
7861
7862 /* (a * (1 << b)) is (a << b) */
7863 if (TREE_CODE (arg1) == LSHIFT_EXPR
7864 && integer_onep (TREE_OPERAND (arg1, 0)))
7865 return fold_build2 (LSHIFT_EXPR, type, arg0,
7866 TREE_OPERAND (arg1, 1));
7867 if (TREE_CODE (arg0) == LSHIFT_EXPR
7868 && integer_onep (TREE_OPERAND (arg0, 0)))
7869 return fold_build2 (LSHIFT_EXPR, type, arg1,
7870 TREE_OPERAND (arg0, 1));
7871
7872 if (TREE_CODE (arg1) == INTEGER_CST
7873 && 0 != (tem = extract_muldiv (op0,
7874 fold_convert (type, arg1),
7875 code, NULL_TREE)))
7876 return fold_convert (type, tem);
7877
7878 }
7879 else
7880 {
7881 /* Maybe fold x * 0 to 0. The expressions aren't the same
7882 when x is NaN, since x * 0 is also NaN. Nor are they the
7883 same in modes with signed zeros, since multiplying a
7884 negative value by 0 gives -0, not +0. */
7885 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
7886 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
7887 && real_zerop (arg1))
7888 return omit_one_operand (type, arg1, arg0);
7889 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
7890 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7891 && real_onep (arg1))
7892 return non_lvalue (fold_convert (type, arg0));
7893
7894 /* Transform x * -1.0 into -x. */
7895 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7896 && real_minus_onep (arg1))
7897 return fold_convert (type, negate_expr (arg0));
7898
7899 /* Convert (C1/X)*C2 into (C1*C2)/X. */
7900 if (flag_unsafe_math_optimizations
7901 && TREE_CODE (arg0) == RDIV_EXPR
7902 && TREE_CODE (arg1) == REAL_CST
7903 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
7904 {
7905 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
7906 arg1, 0);
7907 if (tem)
7908 return fold_build2 (RDIV_EXPR, type, tem,
7909 TREE_OPERAND (arg0, 1));
7910 }
7911
7912 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
7913 if (operand_equal_p (arg0, arg1, 0))
7914 {
7915 tree tem = fold_strip_sign_ops (arg0);
7916 if (tem != NULL_TREE)
7917 {
7918 tem = fold_convert (type, tem);
7919 return fold_build2 (MULT_EXPR, type, tem, tem);
7920 }
7921 }
7922
7923 if (flag_unsafe_math_optimizations)
7924 {
7925 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
7926 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
7927
7928 /* Optimizations of root(...)*root(...). */
7929 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
7930 {
7931 tree rootfn, arg, arglist;
7932 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7933 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7934
7935 /* Optimize sqrt(x)*sqrt(x) as x. */
7936 if (BUILTIN_SQRT_P (fcode0)
7937 && operand_equal_p (arg00, arg10, 0)
7938 && ! HONOR_SNANS (TYPE_MODE (type)))
7939 return arg00;
7940
7941 /* Optimize root(x)*root(y) as root(x*y). */
7942 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7943 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
7944 arglist = build_tree_list (NULL_TREE, arg);
7945 return build_function_call_expr (rootfn, arglist);
7946 }
7947
7948 /* Optimize expN(x)*expN(y) as expN(x+y). */
7949 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
7950 {
7951 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7952 tree arg = build2 (PLUS_EXPR, type,
7953 TREE_VALUE (TREE_OPERAND (arg0, 1)),
7954 TREE_VALUE (TREE_OPERAND (arg1, 1)));
7955 tree arglist = build_tree_list (NULL_TREE, fold (arg));
7956 return build_function_call_expr (expfn, arglist);
7957 }
7958
7959 /* Optimizations of pow(...)*pow(...). */
7960 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
7961 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
7962 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
7963 {
7964 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7965 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
7966 1)));
7967 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7968 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7969 1)));
7970
7971 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
7972 if (operand_equal_p (arg01, arg11, 0))
7973 {
7974 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7975 tree arg = build2 (MULT_EXPR, type, arg00, arg10);
7976 tree arglist = tree_cons (NULL_TREE, fold (arg),
7977 build_tree_list (NULL_TREE,
7978 arg01));
7979 return build_function_call_expr (powfn, arglist);
7980 }
7981
7982 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
7983 if (operand_equal_p (arg00, arg10, 0))
7984 {
7985 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7986 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
7987 tree arglist = tree_cons (NULL_TREE, arg00,
7988 build_tree_list (NULL_TREE,
7989 arg));
7990 return build_function_call_expr (powfn, arglist);
7991 }
7992 }
7993
7994 /* Optimize tan(x)*cos(x) as sin(x). */
7995 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
7996 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
7997 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
7998 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
7999 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
8000 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
8001 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
8002 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
8003 {
8004 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
8005
8006 if (sinfn != NULL_TREE)
8007 return build_function_call_expr (sinfn,
8008 TREE_OPERAND (arg0, 1));
8009 }
8010
8011 /* Optimize x*pow(x,c) as pow(x,c+1). */
8012 if (fcode1 == BUILT_IN_POW
8013 || fcode1 == BUILT_IN_POWF
8014 || fcode1 == BUILT_IN_POWL)
8015 {
8016 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
8017 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
8018 1)));
8019 if (TREE_CODE (arg11) == REAL_CST
8020 && ! TREE_CONSTANT_OVERFLOW (arg11)
8021 && operand_equal_p (arg0, arg10, 0))
8022 {
8023 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
8024 REAL_VALUE_TYPE c;
8025 tree arg, arglist;
8026
8027 c = TREE_REAL_CST (arg11);
8028 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
8029 arg = build_real (type, c);
8030 arglist = build_tree_list (NULL_TREE, arg);
8031 arglist = tree_cons (NULL_TREE, arg0, arglist);
8032 return build_function_call_expr (powfn, arglist);
8033 }
8034 }
8035
8036 /* Optimize pow(x,c)*x as pow(x,c+1). */
8037 if (fcode0 == BUILT_IN_POW
8038 || fcode0 == BUILT_IN_POWF
8039 || fcode0 == BUILT_IN_POWL)
8040 {
8041 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8042 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
8043 1)));
8044 if (TREE_CODE (arg01) == REAL_CST
8045 && ! TREE_CONSTANT_OVERFLOW (arg01)
8046 && operand_equal_p (arg1, arg00, 0))
8047 {
8048 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8049 REAL_VALUE_TYPE c;
8050 tree arg, arglist;
8051
8052 c = TREE_REAL_CST (arg01);
8053 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
8054 arg = build_real (type, c);
8055 arglist = build_tree_list (NULL_TREE, arg);
8056 arglist = tree_cons (NULL_TREE, arg1, arglist);
8057 return build_function_call_expr (powfn, arglist);
8058 }
8059 }
8060
8061 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
8062 if (! optimize_size
8063 && operand_equal_p (arg0, arg1, 0))
8064 {
8065 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
8066
8067 if (powfn)
8068 {
8069 tree arg = build_real (type, dconst2);
8070 tree arglist = build_tree_list (NULL_TREE, arg);
8071 arglist = tree_cons (NULL_TREE, arg0, arglist);
8072 return build_function_call_expr (powfn, arglist);
8073 }
8074 }
8075 }
8076 }
8077 goto associate;
8078
8079 case BIT_IOR_EXPR:
8080 bit_ior:
8081 if (integer_all_onesp (arg1))
8082 return omit_one_operand (type, arg1, arg0);
8083 if (integer_zerop (arg1))
8084 return non_lvalue (fold_convert (type, arg0));
8085 if (operand_equal_p (arg0, arg1, 0))
8086 return non_lvalue (fold_convert (type, arg0));
8087
8088 /* ~X | X is -1. */
8089 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8090 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8091 {
8092 t1 = build_int_cst (type, -1);
8093 t1 = force_fit_type (t1, 0, false, false);
8094 return omit_one_operand (type, t1, arg1);
8095 }
8096
8097 /* X | ~X is -1. */
8098 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8099 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8100 {
8101 t1 = build_int_cst (type, -1);
8102 t1 = force_fit_type (t1, 0, false, false);
8103 return omit_one_operand (type, t1, arg0);
8104 }
8105
8106 t1 = distribute_bit_expr (code, type, arg0, arg1);
8107 if (t1 != NULL_TREE)
8108 return t1;
8109
8110 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
8111
8112 This results in more efficient code for machines without a NAND
8113 instruction. Combine will canonicalize to the first form
8114 which will allow use of NAND instructions provided by the
8115 backend if they exist. */
8116 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8117 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8118 {
8119 return fold_build1 (BIT_NOT_EXPR, type,
8120 build2 (BIT_AND_EXPR, type,
8121 TREE_OPERAND (arg0, 0),
8122 TREE_OPERAND (arg1, 0)));
8123 }
8124
8125 /* See if this can be simplified into a rotate first. If that
8126 is unsuccessful continue in the association code. */
8127 goto bit_rotate;
8128
8129 case BIT_XOR_EXPR:
8130 if (integer_zerop (arg1))
8131 return non_lvalue (fold_convert (type, arg0));
8132 if (integer_all_onesp (arg1))
8133 return fold_build1 (BIT_NOT_EXPR, type, arg0);
8134 if (operand_equal_p (arg0, arg1, 0))
8135 return omit_one_operand (type, integer_zero_node, arg0);
8136
8137 /* ~X ^ X is -1. */
8138 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8139 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8140 {
8141 t1 = build_int_cst (type, -1);
8142 t1 = force_fit_type (t1, 0, false, false);
8143 return omit_one_operand (type, t1, arg1);
8144 }
8145
8146 /* X ^ ~X is -1. */
8147 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8148 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8149 {
8150 t1 = build_int_cst (type, -1);
8151 t1 = force_fit_type (t1, 0, false, false);
8152 return omit_one_operand (type, t1, arg0);
8153 }
8154
8155 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
8156 with a constant, and the two constants have no bits in common,
8157 we should treat this as a BIT_IOR_EXPR since this may produce more
8158 simplifications. */
8159 if (TREE_CODE (arg0) == BIT_AND_EXPR
8160 && TREE_CODE (arg1) == BIT_AND_EXPR
8161 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8162 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8163 && integer_zerop (const_binop (BIT_AND_EXPR,
8164 TREE_OPERAND (arg0, 1),
8165 TREE_OPERAND (arg1, 1), 0)))
8166 {
8167 code = BIT_IOR_EXPR;
8168 goto bit_ior;
8169 }
8170
8171 /* See if this can be simplified into a rotate first. If that
8172 is unsuccessful continue in the association code. */
8173 goto bit_rotate;
8174
8175 case BIT_AND_EXPR:
8176 if (integer_all_onesp (arg1))
8177 return non_lvalue (fold_convert (type, arg0));
8178 if (integer_zerop (arg1))
8179 return omit_one_operand (type, arg1, arg0);
8180 if (operand_equal_p (arg0, arg1, 0))
8181 return non_lvalue (fold_convert (type, arg0));
8182
8183 /* ~X & X is always zero. */
8184 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8185 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8186 return omit_one_operand (type, integer_zero_node, arg1);
8187
8188 /* X & ~X is always zero. */
8189 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8190 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8191 return omit_one_operand (type, integer_zero_node, arg0);
8192
8193 t1 = distribute_bit_expr (code, type, arg0, arg1);
8194 if (t1 != NULL_TREE)
8195 return t1;
8196 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
8197 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
8198 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
8199 {
8200 unsigned int prec
8201 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
8202
8203 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
8204 && (~TREE_INT_CST_LOW (arg1)
8205 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
8206 return fold_convert (type, TREE_OPERAND (arg0, 0));
8207 }
8208
8209 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
8210
8211 This results in more efficient code for machines without a NOR
8212 instruction. Combine will canonicalize to the first form
8213 which will allow use of NOR instructions provided by the
8214 backend if they exist. */
8215 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8216 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8217 {
8218 return fold_build1 (BIT_NOT_EXPR, type,
8219 build2 (BIT_IOR_EXPR, type,
8220 TREE_OPERAND (arg0, 0),
8221 TREE_OPERAND (arg1, 0)));
8222 }
8223
8224 goto associate;
8225
8226 case RDIV_EXPR:
8227 /* Don't touch a floating-point divide by zero unless the mode
8228 of the constant can represent infinity. */
8229 if (TREE_CODE (arg1) == REAL_CST
8230 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
8231 && real_zerop (arg1))
8232 return NULL_TREE;
8233
8234 /* (-A) / (-B) -> A / B */
8235 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
8236 return fold_build2 (RDIV_EXPR, type,
8237 TREE_OPERAND (arg0, 0),
8238 negate_expr (arg1));
8239 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
8240 return fold_build2 (RDIV_EXPR, type,
8241 negate_expr (arg0),
8242 TREE_OPERAND (arg1, 0));
8243
8244 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
8245 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8246 && real_onep (arg1))
8247 return non_lvalue (fold_convert (type, arg0));
8248
8249 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
8250 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8251 && real_minus_onep (arg1))
8252 return non_lvalue (fold_convert (type, negate_expr (arg0)));
8253
8254 /* If ARG1 is a constant, we can convert this to a multiply by the
8255 reciprocal. This does not have the same rounding properties,
8256 so only do this if -funsafe-math-optimizations. We can actually
8257 always safely do it if ARG1 is a power of two, but it's hard to
8258 tell if it is or not in a portable manner. */
8259 if (TREE_CODE (arg1) == REAL_CST)
8260 {
8261 if (flag_unsafe_math_optimizations
8262 && 0 != (tem = const_binop (code, build_real (type, dconst1),
8263 arg1, 0)))
8264 return fold_build2 (MULT_EXPR, type, arg0, tem);
8265 /* Find the reciprocal if optimizing and the result is exact. */
8266 if (optimize)
8267 {
8268 REAL_VALUE_TYPE r;
8269 r = TREE_REAL_CST (arg1);
8270 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
8271 {
8272 tem = build_real (type, r);
8273 return fold_build2 (MULT_EXPR, type, arg0, tem);
8274 }
8275 }
8276 }
8277 /* Convert A/B/C to A/(B*C). */
8278 if (flag_unsafe_math_optimizations
8279 && TREE_CODE (arg0) == RDIV_EXPR)
8280 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
8281 fold_build2 (MULT_EXPR, type,
8282 TREE_OPERAND (arg0, 1), arg1));
8283
8284 /* Convert A/(B/C) to (A/B)*C. */
8285 if (flag_unsafe_math_optimizations
8286 && TREE_CODE (arg1) == RDIV_EXPR)
8287 return fold_build2 (MULT_EXPR, type,
8288 fold_build2 (RDIV_EXPR, type, arg0,
8289 TREE_OPERAND (arg1, 0)),
8290 TREE_OPERAND (arg1, 1));
8291
8292 /* Convert C1/(X*C2) into (C1/C2)/X. */
8293 if (flag_unsafe_math_optimizations
8294 && TREE_CODE (arg1) == MULT_EXPR
8295 && TREE_CODE (arg0) == REAL_CST
8296 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
8297 {
8298 tree tem = const_binop (RDIV_EXPR, arg0,
8299 TREE_OPERAND (arg1, 1), 0);
8300 if (tem)
8301 return fold_build2 (RDIV_EXPR, type, tem,
8302 TREE_OPERAND (arg1, 0));
8303 }
8304
8305 if (TREE_CODE (type) == COMPLEX_TYPE)
8306 {
8307 tem = fold_complex_div (type, arg0, arg1, code);
8308 if (tem)
8309 return tem;
8310 }
8311
8312 if (flag_unsafe_math_optimizations)
8313 {
8314 enum built_in_function fcode = builtin_mathfn_code (arg1);
8315 /* Optimize x/expN(y) into x*expN(-y). */
8316 if (BUILTIN_EXPONENT_P (fcode))
8317 {
8318 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
8319 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
8320 tree arglist = build_tree_list (NULL_TREE,
8321 fold_convert (type, arg));
8322 arg1 = build_function_call_expr (expfn, arglist);
8323 return fold_build2 (MULT_EXPR, type, arg0, arg1);
8324 }
8325
8326 /* Optimize x/pow(y,z) into x*pow(y,-z). */
8327 if (fcode == BUILT_IN_POW
8328 || fcode == BUILT_IN_POWF
8329 || fcode == BUILT_IN_POWL)
8330 {
8331 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
8332 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
8333 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
8334 tree neg11 = fold_convert (type, negate_expr (arg11));
8335 tree arglist = tree_cons(NULL_TREE, arg10,
8336 build_tree_list (NULL_TREE, neg11));
8337 arg1 = build_function_call_expr (powfn, arglist);
8338 return fold_build2 (MULT_EXPR, type, arg0, arg1);
8339 }
8340 }
8341
8342 if (flag_unsafe_math_optimizations)
8343 {
8344 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
8345 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
8346
8347 /* Optimize sin(x)/cos(x) as tan(x). */
8348 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
8349 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
8350 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
8351 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
8352 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
8353 {
8354 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
8355
8356 if (tanfn != NULL_TREE)
8357 return build_function_call_expr (tanfn,
8358 TREE_OPERAND (arg0, 1));
8359 }
8360
8361 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
8362 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
8363 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
8364 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
8365 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
8366 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
8367 {
8368 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
8369
8370 if (tanfn != NULL_TREE)
8371 {
8372 tree tmp = TREE_OPERAND (arg0, 1);
8373 tmp = build_function_call_expr (tanfn, tmp);
8374 return fold_build2 (RDIV_EXPR, type,
8375 build_real (type, dconst1), tmp);
8376 }
8377 }
8378
8379 /* Optimize pow(x,c)/x as pow(x,c-1). */
8380 if (fcode0 == BUILT_IN_POW
8381 || fcode0 == BUILT_IN_POWF
8382 || fcode0 == BUILT_IN_POWL)
8383 {
8384 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8385 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
8386 if (TREE_CODE (arg01) == REAL_CST
8387 && ! TREE_CONSTANT_OVERFLOW (arg01)
8388 && operand_equal_p (arg1, arg00, 0))
8389 {
8390 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8391 REAL_VALUE_TYPE c;
8392 tree arg, arglist;
8393
8394 c = TREE_REAL_CST (arg01);
8395 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
8396 arg = build_real (type, c);
8397 arglist = build_tree_list (NULL_TREE, arg);
8398 arglist = tree_cons (NULL_TREE, arg1, arglist);
8399 return build_function_call_expr (powfn, arglist);
8400 }
8401 }
8402 }
8403 goto binary;
8404
8405 case TRUNC_DIV_EXPR:
8406 case ROUND_DIV_EXPR:
8407 case FLOOR_DIV_EXPR:
8408 case CEIL_DIV_EXPR:
8409 case EXACT_DIV_EXPR:
8410 if (integer_onep (arg1))
8411 return non_lvalue (fold_convert (type, arg0));
8412 if (integer_zerop (arg1))
8413 return NULL_TREE;
8414 /* X / -1 is -X. */
8415 if (!TYPE_UNSIGNED (type)
8416 && TREE_CODE (arg1) == INTEGER_CST
8417 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
8418 && TREE_INT_CST_HIGH (arg1) == -1)
8419 return fold_convert (type, negate_expr (arg0));
8420
8421 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
8422 operation, EXACT_DIV_EXPR.
8423
8424 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
8425 At one time others generated faster code, it's not clear if they do
8426 after the last round to changes to the DIV code in expmed.c. */
8427 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
8428 && multiple_of_p (type, arg0, arg1))
8429 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
8430
8431 if (TREE_CODE (arg1) == INTEGER_CST
8432 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
8433 return fold_convert (type, tem);
8434
8435 if (TREE_CODE (type) == COMPLEX_TYPE)
8436 {
8437 tem = fold_complex_div (type, arg0, arg1, code);
8438 if (tem)
8439 return tem;
8440 }
8441 goto binary;
8442
8443 case CEIL_MOD_EXPR:
8444 case FLOOR_MOD_EXPR:
8445 case ROUND_MOD_EXPR:
8446 case TRUNC_MOD_EXPR:
8447 /* X % 1 is always zero, but be sure to preserve any side
8448 effects in X. */
8449 if (integer_onep (arg1))
8450 return omit_one_operand (type, integer_zero_node, arg0);
8451
8452 /* X % 0, return X % 0 unchanged so that we can get the
8453 proper warnings and errors. */
8454 if (integer_zerop (arg1))
8455 return NULL_TREE;
8456
8457 /* 0 % X is always zero, but be sure to preserve any side
8458 effects in X. Place this after checking for X == 0. */
8459 if (integer_zerop (arg0))
8460 return omit_one_operand (type, integer_zero_node, arg1);
8461
8462 /* X % -1 is zero. */
8463 if (!TYPE_UNSIGNED (type)
8464 && TREE_CODE (arg1) == INTEGER_CST
8465 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
8466 && TREE_INT_CST_HIGH (arg1) == -1)
8467 return omit_one_operand (type, integer_zero_node, arg0);
8468
8469 /* Optimize unsigned TRUNC_MOD_EXPR by a power of two into a
8470 BIT_AND_EXPR, i.e. "X % C" into "X & C2". */
8471 if (code == TRUNC_MOD_EXPR
8472 && TYPE_UNSIGNED (type)
8473 && integer_pow2p (arg1))
8474 {
8475 unsigned HOST_WIDE_INT high, low;
8476 tree mask;
8477 int l;
8478
8479 l = tree_log2 (arg1);
8480 if (l >= HOST_BITS_PER_WIDE_INT)
8481 {
8482 high = ((unsigned HOST_WIDE_INT) 1
8483 << (l - HOST_BITS_PER_WIDE_INT)) - 1;
8484 low = -1;
8485 }
8486 else
8487 {
8488 high = 0;
8489 low = ((unsigned HOST_WIDE_INT) 1 << l) - 1;
8490 }
8491
8492 mask = build_int_cst_wide (type, low, high);
8493 return fold_build2 (BIT_AND_EXPR, type,
8494 fold_convert (type, arg0), mask);
8495 }
8496
8497 /* X % -C is the same as X % C. */
8498 if (code == TRUNC_MOD_EXPR
8499 && !TYPE_UNSIGNED (type)
8500 && TREE_CODE (arg1) == INTEGER_CST
8501 && TREE_INT_CST_HIGH (arg1) < 0
8502 && !flag_trapv
8503 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
8504 && !sign_bit_p (arg1, arg1))
8505 return fold_build2 (code, type, fold_convert (type, arg0),
8506 fold_convert (type, negate_expr (arg1)));
8507
8508 /* X % -Y is the same as X % Y. */
8509 if (code == TRUNC_MOD_EXPR
8510 && !TYPE_UNSIGNED (type)
8511 && TREE_CODE (arg1) == NEGATE_EXPR
8512 && !flag_trapv)
8513 return fold_build2 (code, type, fold_convert (type, arg0),
8514 fold_convert (type, TREE_OPERAND (arg1, 0)));
8515
8516 if (TREE_CODE (arg1) == INTEGER_CST
8517 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
8518 return fold_convert (type, tem);
8519
8520 goto binary;
8521
8522 case LROTATE_EXPR:
8523 case RROTATE_EXPR:
8524 if (integer_all_onesp (arg0))
8525 return omit_one_operand (type, arg0, arg1);
8526 goto shift;
8527
8528 case RSHIFT_EXPR:
8529 /* Optimize -1 >> x for arithmetic right shifts. */
8530 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
8531 return omit_one_operand (type, arg0, arg1);
8532 /* ... fall through ... */
8533
8534 case LSHIFT_EXPR:
8535 shift:
8536 if (integer_zerop (arg1))
8537 return non_lvalue (fold_convert (type, arg0));
8538 if (integer_zerop (arg0))
8539 return omit_one_operand (type, arg0, arg1);
8540
8541 /* Since negative shift count is not well-defined,
8542 don't try to compute it in the compiler. */
8543 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
8544 return NULL_TREE;
8545 /* Rewrite an LROTATE_EXPR by a constant into an
8546 RROTATE_EXPR by a new constant. */
8547 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
8548 {
8549 tree tem = build_int_cst (NULL_TREE,
8550 GET_MODE_BITSIZE (TYPE_MODE (type)));
8551 tem = fold_convert (TREE_TYPE (arg1), tem);
8552 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
8553 return fold_build2 (RROTATE_EXPR, type, arg0, tem);
8554 }
8555
8556 /* If we have a rotate of a bit operation with the rotate count and
8557 the second operand of the bit operation both constant,
8558 permute the two operations. */
8559 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8560 && (TREE_CODE (arg0) == BIT_AND_EXPR
8561 || TREE_CODE (arg0) == BIT_IOR_EXPR
8562 || TREE_CODE (arg0) == BIT_XOR_EXPR)
8563 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8564 return fold_build2 (TREE_CODE (arg0), type,
8565 fold_build2 (code, type,
8566 TREE_OPERAND (arg0, 0), arg1),
8567 fold_build2 (code, type,
8568 TREE_OPERAND (arg0, 1), arg1));
8569
8570 /* Two consecutive rotates adding up to the width of the mode can
8571 be ignored. */
8572 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8573 && TREE_CODE (arg0) == RROTATE_EXPR
8574 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8575 && TREE_INT_CST_HIGH (arg1) == 0
8576 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
8577 && ((TREE_INT_CST_LOW (arg1)
8578 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
8579 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
8580 return TREE_OPERAND (arg0, 0);
8581
8582 goto binary;
8583
8584 case MIN_EXPR:
8585 if (operand_equal_p (arg0, arg1, 0))
8586 return omit_one_operand (type, arg0, arg1);
8587 if (INTEGRAL_TYPE_P (type)
8588 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
8589 return omit_one_operand (type, arg1, arg0);
8590 goto associate;
8591
8592 case MAX_EXPR:
8593 if (operand_equal_p (arg0, arg1, 0))
8594 return omit_one_operand (type, arg0, arg1);
8595 if (INTEGRAL_TYPE_P (type)
8596 && TYPE_MAX_VALUE (type)
8597 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
8598 return omit_one_operand (type, arg1, arg0);
8599 goto associate;
8600
8601 case TRUTH_ANDIF_EXPR:
8602 /* Note that the operands of this must be ints
8603 and their values must be 0 or 1.
8604 ("true" is a fixed value perhaps depending on the language.) */
8605 /* If first arg is constant zero, return it. */
8606 if (integer_zerop (arg0))
8607 return fold_convert (type, arg0);
8608 case TRUTH_AND_EXPR:
8609 /* If either arg is constant true, drop it. */
8610 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8611 return non_lvalue (fold_convert (type, arg1));
8612 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
8613 /* Preserve sequence points. */
8614 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
8615 return non_lvalue (fold_convert (type, arg0));
8616 /* If second arg is constant zero, result is zero, but first arg
8617 must be evaluated. */
8618 if (integer_zerop (arg1))
8619 return omit_one_operand (type, arg1, arg0);
8620 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
8621 case will be handled here. */
8622 if (integer_zerop (arg0))
8623 return omit_one_operand (type, arg0, arg1);
8624
8625 /* !X && X is always false. */
8626 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8627 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8628 return omit_one_operand (type, integer_zero_node, arg1);
8629 /* X && !X is always false. */
8630 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8631 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8632 return omit_one_operand (type, integer_zero_node, arg0);
8633
8634 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
8635 means A >= Y && A != MAX, but in this case we know that
8636 A < X <= MAX. */
8637
8638 if (!TREE_SIDE_EFFECTS (arg0)
8639 && !TREE_SIDE_EFFECTS (arg1))
8640 {
8641 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
8642 if (tem)
8643 return fold_build2 (code, type, tem, arg1);
8644
8645 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
8646 if (tem)
8647 return fold_build2 (code, type, arg0, tem);
8648 }
8649
8650 truth_andor:
8651 /* We only do these simplifications if we are optimizing. */
8652 if (!optimize)
8653 return NULL_TREE;
8654
8655 /* Check for things like (A || B) && (A || C). We can convert this
8656 to A || (B && C). Note that either operator can be any of the four
8657 truth and/or operations and the transformation will still be
8658 valid. Also note that we only care about order for the
8659 ANDIF and ORIF operators. If B contains side effects, this
8660 might change the truth-value of A. */
8661 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8662 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8663 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8664 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8665 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8666 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8667 {
8668 tree a00 = TREE_OPERAND (arg0, 0);
8669 tree a01 = TREE_OPERAND (arg0, 1);
8670 tree a10 = TREE_OPERAND (arg1, 0);
8671 tree a11 = TREE_OPERAND (arg1, 1);
8672 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8673 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8674 && (code == TRUTH_AND_EXPR
8675 || code == TRUTH_OR_EXPR));
8676
8677 if (operand_equal_p (a00, a10, 0))
8678 return fold_build2 (TREE_CODE (arg0), type, a00,
8679 fold_build2 (code, type, a01, a11));
8680 else if (commutative && operand_equal_p (a00, a11, 0))
8681 return fold_build2 (TREE_CODE (arg0), type, a00,
8682 fold_build2 (code, type, a01, a10));
8683 else if (commutative && operand_equal_p (a01, a10, 0))
8684 return fold_build2 (TREE_CODE (arg0), type, a01,
8685 fold_build2 (code, type, a00, a11));
8686
8687 /* This case if tricky because we must either have commutative
8688 operators or else A10 must not have side-effects. */
8689
8690 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8691 && operand_equal_p (a01, a11, 0))
8692 return fold_build2 (TREE_CODE (arg0), type,
8693 fold_build2 (code, type, a00, a10),
8694 a01);
8695 }
8696
8697 /* See if we can build a range comparison. */
8698 if (0 != (tem = fold_range_test (code, type, op0, op1)))
8699 return tem;
8700
8701 /* Check for the possibility of merging component references. If our
8702 lhs is another similar operation, try to merge its rhs with our
8703 rhs. Then try to merge our lhs and rhs. */
8704 if (TREE_CODE (arg0) == code
8705 && 0 != (tem = fold_truthop (code, type,
8706 TREE_OPERAND (arg0, 1), arg1)))
8707 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
8708
8709 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
8710 return tem;
8711
8712 return NULL_TREE;
8713
8714 case TRUTH_ORIF_EXPR:
8715 /* Note that the operands of this must be ints
8716 and their values must be 0 or true.
8717 ("true" is a fixed value perhaps depending on the language.) */
8718 /* If first arg is constant true, return it. */
8719 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8720 return fold_convert (type, arg0);
8721 case TRUTH_OR_EXPR:
8722 /* If either arg is constant zero, drop it. */
8723 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
8724 return non_lvalue (fold_convert (type, arg1));
8725 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
8726 /* Preserve sequence points. */
8727 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
8728 return non_lvalue (fold_convert (type, arg0));
8729 /* If second arg is constant true, result is true, but we must
8730 evaluate first arg. */
8731 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
8732 return omit_one_operand (type, arg1, arg0);
8733 /* Likewise for first arg, but note this only occurs here for
8734 TRUTH_OR_EXPR. */
8735 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8736 return omit_one_operand (type, arg0, arg1);
8737
8738 /* !X || X is always true. */
8739 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8740 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8741 return omit_one_operand (type, integer_one_node, arg1);
8742 /* X || !X is always true. */
8743 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8744 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8745 return omit_one_operand (type, integer_one_node, arg0);
8746
8747 goto truth_andor;
8748
8749 case TRUTH_XOR_EXPR:
8750 /* If the second arg is constant zero, drop it. */
8751 if (integer_zerop (arg1))
8752 return non_lvalue (fold_convert (type, arg0));
8753 /* If the second arg is constant true, this is a logical inversion. */
8754 if (integer_onep (arg1))
8755 {
8756 /* Only call invert_truthvalue if operand is a truth value. */
8757 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8758 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
8759 else
8760 tem = invert_truthvalue (arg0);
8761 return non_lvalue (fold_convert (type, tem));
8762 }
8763 /* Identical arguments cancel to zero. */
8764 if (operand_equal_p (arg0, arg1, 0))
8765 return omit_one_operand (type, integer_zero_node, arg0);
8766
8767 /* !X ^ X is always true. */
8768 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8769 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8770 return omit_one_operand (type, integer_one_node, arg1);
8771
8772 /* X ^ !X is always true. */
8773 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8774 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8775 return omit_one_operand (type, integer_one_node, arg0);
8776
8777 return NULL_TREE;
8778
8779 case EQ_EXPR:
8780 case NE_EXPR:
8781 case LT_EXPR:
8782 case GT_EXPR:
8783 case LE_EXPR:
8784 case GE_EXPR:
8785 /* If one arg is a real or integer constant, put it last. */
8786 if (tree_swap_operands_p (arg0, arg1, true))
8787 return fold_build2 (swap_tree_comparison (code), type, arg1, arg0);
8788
8789 /* If this is an equality comparison of the address of a non-weak
8790 object against zero, then we know the result. */
8791 if ((code == EQ_EXPR || code == NE_EXPR)
8792 && TREE_CODE (arg0) == ADDR_EXPR
8793 && DECL_P (TREE_OPERAND (arg0, 0))
8794 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
8795 && integer_zerop (arg1))
8796 return constant_boolean_node (code != EQ_EXPR, type);
8797
8798 /* If this is an equality comparison of the address of two non-weak,
8799 unaliased symbols neither of which are extern (since we do not
8800 have access to attributes for externs), then we know the result. */
8801 if ((code == EQ_EXPR || code == NE_EXPR)
8802 && TREE_CODE (arg0) == ADDR_EXPR
8803 && DECL_P (TREE_OPERAND (arg0, 0))
8804 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
8805 && ! lookup_attribute ("alias",
8806 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
8807 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
8808 && TREE_CODE (arg1) == ADDR_EXPR
8809 && DECL_P (TREE_OPERAND (arg1, 0))
8810 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
8811 && ! lookup_attribute ("alias",
8812 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
8813 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
8814 return constant_boolean_node (operand_equal_p (arg0, arg1, 0)
8815 ? code == EQ_EXPR : code != EQ_EXPR,
8816 type);
8817
8818 /* If this is a comparison of two exprs that look like an
8819 ARRAY_REF of the same object, then we can fold this to a
8820 comparison of the two offsets. */
8821 if (TREE_CODE_CLASS (code) == tcc_comparison)
8822 {
8823 tree base0, offset0, base1, offset1;
8824
8825 if (extract_array_ref (arg0, &base0, &offset0)
8826 && extract_array_ref (arg1, &base1, &offset1)
8827 && operand_equal_p (base0, base1, 0))
8828 {
8829 if (offset0 == NULL_TREE
8830 && offset1 == NULL_TREE)
8831 {
8832 offset0 = integer_zero_node;
8833 offset1 = integer_zero_node;
8834 }
8835 else if (offset0 == NULL_TREE)
8836 offset0 = build_int_cst (TREE_TYPE (offset1), 0);
8837 else if (offset1 == NULL_TREE)
8838 offset1 = build_int_cst (TREE_TYPE (offset0), 0);
8839
8840 if (TREE_TYPE (offset0) == TREE_TYPE (offset1))
8841 return fold_build2 (code, type, offset0, offset1);
8842 }
8843 }
8844
8845 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8846 {
8847 tree targ0 = strip_float_extensions (arg0);
8848 tree targ1 = strip_float_extensions (arg1);
8849 tree newtype = TREE_TYPE (targ0);
8850
8851 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8852 newtype = TREE_TYPE (targ1);
8853
8854 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8855 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8856 return fold_build2 (code, type, fold_convert (newtype, targ0),
8857 fold_convert (newtype, targ1));
8858
8859 /* (-a) CMP (-b) -> b CMP a */
8860 if (TREE_CODE (arg0) == NEGATE_EXPR
8861 && TREE_CODE (arg1) == NEGATE_EXPR)
8862 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
8863 TREE_OPERAND (arg0, 0));
8864
8865 if (TREE_CODE (arg1) == REAL_CST)
8866 {
8867 REAL_VALUE_TYPE cst;
8868 cst = TREE_REAL_CST (arg1);
8869
8870 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8871 if (TREE_CODE (arg0) == NEGATE_EXPR)
8872 return
8873 fold_build2 (swap_tree_comparison (code), type,
8874 TREE_OPERAND (arg0, 0),
8875 build_real (TREE_TYPE (arg1),
8876 REAL_VALUE_NEGATE (cst)));
8877
8878 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8879 /* a CMP (-0) -> a CMP 0 */
8880 if (REAL_VALUE_MINUS_ZERO (cst))
8881 return fold_build2 (code, type, arg0,
8882 build_real (TREE_TYPE (arg1), dconst0));
8883
8884 /* x != NaN is always true, other ops are always false. */
8885 if (REAL_VALUE_ISNAN (cst)
8886 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8887 {
8888 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8889 return omit_one_operand (type, tem, arg0);
8890 }
8891
8892 /* Fold comparisons against infinity. */
8893 if (REAL_VALUE_ISINF (cst))
8894 {
8895 tem = fold_inf_compare (code, type, arg0, arg1);
8896 if (tem != NULL_TREE)
8897 return tem;
8898 }
8899 }
8900
8901 /* If this is a comparison of a real constant with a PLUS_EXPR
8902 or a MINUS_EXPR of a real constant, we can convert it into a
8903 comparison with a revised real constant as long as no overflow
8904 occurs when unsafe_math_optimizations are enabled. */
8905 if (flag_unsafe_math_optimizations
8906 && TREE_CODE (arg1) == REAL_CST
8907 && (TREE_CODE (arg0) == PLUS_EXPR
8908 || TREE_CODE (arg0) == MINUS_EXPR)
8909 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8910 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8911 ? MINUS_EXPR : PLUS_EXPR,
8912 arg1, TREE_OPERAND (arg0, 1), 0))
8913 && ! TREE_CONSTANT_OVERFLOW (tem))
8914 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
8915
8916 /* Likewise, we can simplify a comparison of a real constant with
8917 a MINUS_EXPR whose first operand is also a real constant, i.e.
8918 (c1 - x) < c2 becomes x > c1-c2. */
8919 if (flag_unsafe_math_optimizations
8920 && TREE_CODE (arg1) == REAL_CST
8921 && TREE_CODE (arg0) == MINUS_EXPR
8922 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
8923 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
8924 arg1, 0))
8925 && ! TREE_CONSTANT_OVERFLOW (tem))
8926 return fold_build2 (swap_tree_comparison (code), type,
8927 TREE_OPERAND (arg0, 1), tem);
8928
8929 /* Fold comparisons against built-in math functions. */
8930 if (TREE_CODE (arg1) == REAL_CST
8931 && flag_unsafe_math_optimizations
8932 && ! flag_errno_math)
8933 {
8934 enum built_in_function fcode = builtin_mathfn_code (arg0);
8935
8936 if (fcode != END_BUILTINS)
8937 {
8938 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
8939 if (tem != NULL_TREE)
8940 return tem;
8941 }
8942 }
8943 }
8944
8945 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
8946 if (TREE_CONSTANT (arg1)
8947 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
8948 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
8949 /* This optimization is invalid for ordered comparisons
8950 if CONST+INCR overflows or if foo+incr might overflow.
8951 This optimization is invalid for floating point due to rounding.
8952 For pointer types we assume overflow doesn't happen. */
8953 && (POINTER_TYPE_P (TREE_TYPE (arg0))
8954 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8955 && (code == EQ_EXPR || code == NE_EXPR))))
8956 {
8957 tree varop, newconst;
8958
8959 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
8960 {
8961 newconst = fold_build2 (PLUS_EXPR, TREE_TYPE (arg0),
8962 arg1, TREE_OPERAND (arg0, 1));
8963 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
8964 TREE_OPERAND (arg0, 0),
8965 TREE_OPERAND (arg0, 1));
8966 }
8967 else
8968 {
8969 newconst = fold_build2 (MINUS_EXPR, TREE_TYPE (arg0),
8970 arg1, TREE_OPERAND (arg0, 1));
8971 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
8972 TREE_OPERAND (arg0, 0),
8973 TREE_OPERAND (arg0, 1));
8974 }
8975
8976
8977 /* If VAROP is a reference to a bitfield, we must mask
8978 the constant by the width of the field. */
8979 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
8980 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
8981 && host_integerp (DECL_SIZE (TREE_OPERAND
8982 (TREE_OPERAND (varop, 0), 1)), 1))
8983 {
8984 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
8985 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
8986 tree folded_compare, shift;
8987
8988 /* First check whether the comparison would come out
8989 always the same. If we don't do that we would
8990 change the meaning with the masking. */
8991 folded_compare = fold_build2 (code, type,
8992 TREE_OPERAND (varop, 0), arg1);
8993 if (integer_zerop (folded_compare)
8994 || integer_onep (folded_compare))
8995 return omit_one_operand (type, folded_compare, varop);
8996
8997 shift = build_int_cst (NULL_TREE,
8998 TYPE_PRECISION (TREE_TYPE (varop)) - size);
8999 shift = fold_convert (TREE_TYPE (varop), shift);
9000 newconst = fold_build2 (LSHIFT_EXPR, TREE_TYPE (varop),
9001 newconst, shift);
9002 newconst = fold_build2 (RSHIFT_EXPR, TREE_TYPE (varop),
9003 newconst, shift);
9004 }
9005
9006 return fold_build2 (code, type, varop, newconst);
9007 }
9008
9009 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
9010 This transformation affects the cases which are handled in later
9011 optimizations involving comparisons with non-negative constants. */
9012 if (TREE_CODE (arg1) == INTEGER_CST
9013 && TREE_CODE (arg0) != INTEGER_CST
9014 && tree_int_cst_sgn (arg1) > 0)
9015 {
9016 switch (code)
9017 {
9018 case GE_EXPR:
9019 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
9020 return fold_build2 (GT_EXPR, type, arg0, arg1);
9021
9022 case LT_EXPR:
9023 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
9024 return fold_build2 (LE_EXPR, type, arg0, arg1);
9025
9026 default:
9027 break;
9028 }
9029 }
9030
9031 /* Comparisons with the highest or lowest possible integer of
9032 the specified size will have known values.
9033
9034 This is quite similar to fold_relational_hi_lo, however,
9035 attempts to share the code have been nothing but trouble. */
9036 {
9037 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
9038
9039 if (TREE_CODE (arg1) == INTEGER_CST
9040 && ! TREE_CONSTANT_OVERFLOW (arg1)
9041 && width <= 2 * HOST_BITS_PER_WIDE_INT
9042 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9043 || POINTER_TYPE_P (TREE_TYPE (arg1))))
9044 {
9045 HOST_WIDE_INT signed_max_hi;
9046 unsigned HOST_WIDE_INT signed_max_lo;
9047 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
9048
9049 if (width <= HOST_BITS_PER_WIDE_INT)
9050 {
9051 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
9052 - 1;
9053 signed_max_hi = 0;
9054 max_hi = 0;
9055
9056 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
9057 {
9058 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
9059 min_lo = 0;
9060 min_hi = 0;
9061 }
9062 else
9063 {
9064 max_lo = signed_max_lo;
9065 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
9066 min_hi = -1;
9067 }
9068 }
9069 else
9070 {
9071 width -= HOST_BITS_PER_WIDE_INT;
9072 signed_max_lo = -1;
9073 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
9074 - 1;
9075 max_lo = -1;
9076 min_lo = 0;
9077
9078 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
9079 {
9080 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
9081 min_hi = 0;
9082 }
9083 else
9084 {
9085 max_hi = signed_max_hi;
9086 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
9087 }
9088 }
9089
9090 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
9091 && TREE_INT_CST_LOW (arg1) == max_lo)
9092 switch (code)
9093 {
9094 case GT_EXPR:
9095 return omit_one_operand (type, integer_zero_node, arg0);
9096
9097 case GE_EXPR:
9098 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9099
9100 case LE_EXPR:
9101 return omit_one_operand (type, integer_one_node, arg0);
9102
9103 case LT_EXPR:
9104 return fold_build2 (NE_EXPR, type, arg0, arg1);
9105
9106 /* The GE_EXPR and LT_EXPR cases above are not normally
9107 reached because of previous transformations. */
9108
9109 default:
9110 break;
9111 }
9112 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9113 == max_hi
9114 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
9115 switch (code)
9116 {
9117 case GT_EXPR:
9118 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
9119 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9120 case LE_EXPR:
9121 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
9122 return fold_build2 (NE_EXPR, type, arg0, arg1);
9123 default:
9124 break;
9125 }
9126 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9127 == min_hi
9128 && TREE_INT_CST_LOW (arg1) == min_lo)
9129 switch (code)
9130 {
9131 case LT_EXPR:
9132 return omit_one_operand (type, integer_zero_node, arg0);
9133
9134 case LE_EXPR:
9135 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9136
9137 case GE_EXPR:
9138 return omit_one_operand (type, integer_one_node, arg0);
9139
9140 case GT_EXPR:
9141 return fold_build2 (NE_EXPR, type, arg0, arg1);
9142
9143 default:
9144 break;
9145 }
9146 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9147 == min_hi
9148 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
9149 switch (code)
9150 {
9151 case GE_EXPR:
9152 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
9153 return fold_build2 (NE_EXPR, type, arg0, arg1);
9154 case LT_EXPR:
9155 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
9156 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9157 default:
9158 break;
9159 }
9160
9161 else if (!in_gimple_form
9162 && TREE_INT_CST_HIGH (arg1) == signed_max_hi
9163 && TREE_INT_CST_LOW (arg1) == signed_max_lo
9164 && TYPE_UNSIGNED (TREE_TYPE (arg1))
9165 /* signed_type does not work on pointer types. */
9166 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
9167 {
9168 /* The following case also applies to X < signed_max+1
9169 and X >= signed_max+1 because previous transformations. */
9170 if (code == LE_EXPR || code == GT_EXPR)
9171 {
9172 tree st0, st1;
9173 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
9174 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
9175 return fold
9176 (build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
9177 type, fold_convert (st0, arg0),
9178 fold_convert (st1, integer_zero_node)));
9179 }
9180 }
9181 }
9182 }
9183
9184 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
9185 a MINUS_EXPR of a constant, we can convert it into a comparison with
9186 a revised constant as long as no overflow occurs. */
9187 if ((code == EQ_EXPR || code == NE_EXPR)
9188 && TREE_CODE (arg1) == INTEGER_CST
9189 && (TREE_CODE (arg0) == PLUS_EXPR
9190 || TREE_CODE (arg0) == MINUS_EXPR)
9191 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9192 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9193 ? MINUS_EXPR : PLUS_EXPR,
9194 arg1, TREE_OPERAND (arg0, 1), 0))
9195 && ! TREE_CONSTANT_OVERFLOW (tem))
9196 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9197
9198 /* Similarly for a NEGATE_EXPR. */
9199 else if ((code == EQ_EXPR || code == NE_EXPR)
9200 && TREE_CODE (arg0) == NEGATE_EXPR
9201 && TREE_CODE (arg1) == INTEGER_CST
9202 && 0 != (tem = negate_expr (arg1))
9203 && TREE_CODE (tem) == INTEGER_CST
9204 && ! TREE_CONSTANT_OVERFLOW (tem))
9205 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9206
9207 /* If we have X - Y == 0, we can convert that to X == Y and similarly
9208 for !=. Don't do this for ordered comparisons due to overflow. */
9209 else if ((code == NE_EXPR || code == EQ_EXPR)
9210 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
9211 return fold_build2 (code, type,
9212 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
9213
9214 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9215 && (TREE_CODE (arg0) == NOP_EXPR
9216 || TREE_CODE (arg0) == CONVERT_EXPR))
9217 {
9218 /* If we are widening one operand of an integer comparison,
9219 see if the other operand is similarly being widened. Perhaps we
9220 can do the comparison in the narrower type. */
9221 tem = fold_widened_comparison (code, type, arg0, arg1);
9222 if (tem)
9223 return tem;
9224
9225 /* Or if we are changing signedness. */
9226 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
9227 if (tem)
9228 return tem;
9229 }
9230
9231 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9232 constant, we can simplify it. */
9233 else if (TREE_CODE (arg1) == INTEGER_CST
9234 && (TREE_CODE (arg0) == MIN_EXPR
9235 || TREE_CODE (arg0) == MAX_EXPR)
9236 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9237 {
9238 tem = optimize_minmax_comparison (code, type, op0, op1);
9239 if (tem)
9240 return tem;
9241
9242 return NULL_TREE;
9243 }
9244
9245 /* If we are comparing an ABS_EXPR with a constant, we can
9246 convert all the cases into explicit comparisons, but they may
9247 well not be faster than doing the ABS and one comparison.
9248 But ABS (X) <= C is a range comparison, which becomes a subtraction
9249 and a comparison, and is probably faster. */
9250 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
9251 && TREE_CODE (arg0) == ABS_EXPR
9252 && ! TREE_SIDE_EFFECTS (arg0)
9253 && (0 != (tem = negate_expr (arg1)))
9254 && TREE_CODE (tem) == INTEGER_CST
9255 && ! TREE_CONSTANT_OVERFLOW (tem))
9256 return fold_build2 (TRUTH_ANDIF_EXPR, type,
9257 build2 (GE_EXPR, type,
9258 TREE_OPERAND (arg0, 0), tem),
9259 build2 (LE_EXPR, type,
9260 TREE_OPERAND (arg0, 0), arg1));
9261
9262 /* Convert ABS_EXPR<x> >= 0 to true. */
9263 else if (code == GE_EXPR
9264 && tree_expr_nonnegative_p (arg0)
9265 && (integer_zerop (arg1)
9266 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9267 && real_zerop (arg1))))
9268 return omit_one_operand (type, integer_one_node, arg0);
9269
9270 /* Convert ABS_EXPR<x> < 0 to false. */
9271 else if (code == LT_EXPR
9272 && tree_expr_nonnegative_p (arg0)
9273 && (integer_zerop (arg1) || real_zerop (arg1)))
9274 return omit_one_operand (type, integer_zero_node, arg0);
9275
9276 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
9277 else if ((code == EQ_EXPR || code == NE_EXPR)
9278 && TREE_CODE (arg0) == ABS_EXPR
9279 && (integer_zerop (arg1) || real_zerop (arg1)))
9280 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
9281
9282 /* If this is an EQ or NE comparison with zero and ARG0 is
9283 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
9284 two operations, but the latter can be done in one less insn
9285 on machines that have only two-operand insns or on which a
9286 constant cannot be the first operand. */
9287 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
9288 && TREE_CODE (arg0) == BIT_AND_EXPR)
9289 {
9290 tree arg00 = TREE_OPERAND (arg0, 0);
9291 tree arg01 = TREE_OPERAND (arg0, 1);
9292 if (TREE_CODE (arg00) == LSHIFT_EXPR
9293 && integer_onep (TREE_OPERAND (arg00, 0)))
9294 return
9295 fold_build2 (code, type,
9296 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9297 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
9298 arg01, TREE_OPERAND (arg00, 1)),
9299 fold_convert (TREE_TYPE (arg0),
9300 integer_one_node)),
9301 arg1);
9302 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
9303 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
9304 return
9305 fold_build2 (code, type,
9306 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9307 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
9308 arg00, TREE_OPERAND (arg01, 1)),
9309 fold_convert (TREE_TYPE (arg0),
9310 integer_one_node)),
9311 arg1);
9312 }
9313
9314 /* If this is an NE or EQ comparison of zero against the result of a
9315 signed MOD operation whose second operand is a power of 2, make
9316 the MOD operation unsigned since it is simpler and equivalent. */
9317 if ((code == NE_EXPR || code == EQ_EXPR)
9318 && integer_zerop (arg1)
9319 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
9320 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
9321 || TREE_CODE (arg0) == CEIL_MOD_EXPR
9322 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
9323 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
9324 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9325 {
9326 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
9327 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
9328 fold_convert (newtype,
9329 TREE_OPERAND (arg0, 0)),
9330 fold_convert (newtype,
9331 TREE_OPERAND (arg0, 1)));
9332
9333 return fold_build2 (code, type, newmod,
9334 fold_convert (newtype, arg1));
9335 }
9336
9337 /* If this is an NE comparison of zero with an AND of one, remove the
9338 comparison since the AND will give the correct value. */
9339 if (code == NE_EXPR && integer_zerop (arg1)
9340 && TREE_CODE (arg0) == BIT_AND_EXPR
9341 && integer_onep (TREE_OPERAND (arg0, 1)))
9342 return fold_convert (type, arg0);
9343
9344 /* If we have (A & C) == C where C is a power of 2, convert this into
9345 (A & C) != 0. Similarly for NE_EXPR. */
9346 if ((code == EQ_EXPR || code == NE_EXPR)
9347 && TREE_CODE (arg0) == BIT_AND_EXPR
9348 && integer_pow2p (TREE_OPERAND (arg0, 1))
9349 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9350 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
9351 arg0, fold_convert (TREE_TYPE (arg0),
9352 integer_zero_node));
9353
9354 /* If we have (A & C) != 0 or (A & C) == 0 and C is a power of
9355 2, then fold the expression into shifts and logical operations. */
9356 tem = fold_single_bit_test (code, arg0, arg1, type);
9357 if (tem)
9358 return tem;
9359
9360 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
9361 Similarly for NE_EXPR. */
9362 if ((code == EQ_EXPR || code == NE_EXPR)
9363 && TREE_CODE (arg0) == BIT_AND_EXPR
9364 && TREE_CODE (arg1) == INTEGER_CST
9365 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9366 {
9367 tree notc = fold_build1 (BIT_NOT_EXPR,
9368 TREE_TYPE (TREE_OPERAND (arg0, 1)),
9369 TREE_OPERAND (arg0, 1));
9370 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9371 arg1, notc);
9372 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
9373 if (integer_nonzerop (dandnotc))
9374 return omit_one_operand (type, rslt, arg0);
9375 }
9376
9377 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
9378 Similarly for NE_EXPR. */
9379 if ((code == EQ_EXPR || code == NE_EXPR)
9380 && TREE_CODE (arg0) == BIT_IOR_EXPR
9381 && TREE_CODE (arg1) == INTEGER_CST
9382 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9383 {
9384 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
9385 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9386 TREE_OPERAND (arg0, 1), notd);
9387 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
9388 if (integer_nonzerop (candnotd))
9389 return omit_one_operand (type, rslt, arg0);
9390 }
9391
9392 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
9393 and similarly for >= into !=. */
9394 if ((code == LT_EXPR || code == GE_EXPR)
9395 && TYPE_UNSIGNED (TREE_TYPE (arg0))
9396 && TREE_CODE (arg1) == LSHIFT_EXPR
9397 && integer_onep (TREE_OPERAND (arg1, 0)))
9398 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
9399 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
9400 TREE_OPERAND (arg1, 1)),
9401 fold_convert (TREE_TYPE (arg0), integer_zero_node));
9402
9403 else if ((code == LT_EXPR || code == GE_EXPR)
9404 && TYPE_UNSIGNED (TREE_TYPE (arg0))
9405 && (TREE_CODE (arg1) == NOP_EXPR
9406 || TREE_CODE (arg1) == CONVERT_EXPR)
9407 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
9408 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
9409 return
9410 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
9411 fold_convert (TREE_TYPE (arg0),
9412 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
9413 TREE_OPERAND (TREE_OPERAND (arg1, 0),
9414 1))),
9415 fold_convert (TREE_TYPE (arg0), integer_zero_node));
9416
9417 /* Simplify comparison of something with itself. (For IEEE
9418 floating-point, we can only do some of these simplifications.) */
9419 if (operand_equal_p (arg0, arg1, 0))
9420 {
9421 switch (code)
9422 {
9423 case EQ_EXPR:
9424 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9425 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9426 return constant_boolean_node (1, type);
9427 break;
9428
9429 case GE_EXPR:
9430 case LE_EXPR:
9431 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9432 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9433 return constant_boolean_node (1, type);
9434 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9435
9436 case NE_EXPR:
9437 /* For NE, we can only do this simplification if integer
9438 or we don't honor IEEE floating point NaNs. */
9439 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9440 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9441 break;
9442 /* ... fall through ... */
9443 case GT_EXPR:
9444 case LT_EXPR:
9445 return constant_boolean_node (0, type);
9446 default:
9447 gcc_unreachable ();
9448 }
9449 }
9450
9451 /* If we are comparing an expression that just has comparisons
9452 of two integer values, arithmetic expressions of those comparisons,
9453 and constants, we can simplify it. There are only three cases
9454 to check: the two values can either be equal, the first can be
9455 greater, or the second can be greater. Fold the expression for
9456 those three values. Since each value must be 0 or 1, we have
9457 eight possibilities, each of which corresponds to the constant 0
9458 or 1 or one of the six possible comparisons.
9459
9460 This handles common cases like (a > b) == 0 but also handles
9461 expressions like ((x > y) - (y > x)) > 0, which supposedly
9462 occur in macroized code. */
9463
9464 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9465 {
9466 tree cval1 = 0, cval2 = 0;
9467 int save_p = 0;
9468
9469 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9470 /* Don't handle degenerate cases here; they should already
9471 have been handled anyway. */
9472 && cval1 != 0 && cval2 != 0
9473 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9474 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9475 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9476 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9477 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9478 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9479 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9480 {
9481 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9482 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9483
9484 /* We can't just pass T to eval_subst in case cval1 or cval2
9485 was the same as ARG1. */
9486
9487 tree high_result
9488 = fold_build2 (code, type,
9489 eval_subst (arg0, cval1, maxval,
9490 cval2, minval),
9491 arg1);
9492 tree equal_result
9493 = fold_build2 (code, type,
9494 eval_subst (arg0, cval1, maxval,
9495 cval2, maxval),
9496 arg1);
9497 tree low_result
9498 = fold_build2 (code, type,
9499 eval_subst (arg0, cval1, minval,
9500 cval2, maxval),
9501 arg1);
9502
9503 /* All three of these results should be 0 or 1. Confirm they
9504 are. Then use those values to select the proper code
9505 to use. */
9506
9507 if ((integer_zerop (high_result)
9508 || integer_onep (high_result))
9509 && (integer_zerop (equal_result)
9510 || integer_onep (equal_result))
9511 && (integer_zerop (low_result)
9512 || integer_onep (low_result)))
9513 {
9514 /* Make a 3-bit mask with the high-order bit being the
9515 value for `>', the next for '=', and the low for '<'. */
9516 switch ((integer_onep (high_result) * 4)
9517 + (integer_onep (equal_result) * 2)
9518 + integer_onep (low_result))
9519 {
9520 case 0:
9521 /* Always false. */
9522 return omit_one_operand (type, integer_zero_node, arg0);
9523 case 1:
9524 code = LT_EXPR;
9525 break;
9526 case 2:
9527 code = EQ_EXPR;
9528 break;
9529 case 3:
9530 code = LE_EXPR;
9531 break;
9532 case 4:
9533 code = GT_EXPR;
9534 break;
9535 case 5:
9536 code = NE_EXPR;
9537 break;
9538 case 6:
9539 code = GE_EXPR;
9540 break;
9541 case 7:
9542 /* Always true. */
9543 return omit_one_operand (type, integer_one_node, arg0);
9544 }
9545
9546 tem = build2 (code, type, cval1, cval2);
9547 if (save_p)
9548 return save_expr (tem);
9549 else
9550 return fold (tem);
9551 }
9552 }
9553 }
9554
9555 /* If this is a comparison of a field, we may be able to simplify it. */
9556 if (((TREE_CODE (arg0) == COMPONENT_REF
9557 && lang_hooks.can_use_bit_fields_p ())
9558 || TREE_CODE (arg0) == BIT_FIELD_REF)
9559 && (code == EQ_EXPR || code == NE_EXPR)
9560 /* Handle the constant case even without -O
9561 to make sure the warnings are given. */
9562 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
9563 {
9564 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
9565 if (t1)
9566 return t1;
9567 }
9568
9569 /* If this is a comparison of complex values and either or both sides
9570 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
9571 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
9572 This may prevent needless evaluations. */
9573 if ((code == EQ_EXPR || code == NE_EXPR)
9574 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
9575 && (TREE_CODE (arg0) == COMPLEX_EXPR
9576 || TREE_CODE (arg1) == COMPLEX_EXPR
9577 || TREE_CODE (arg0) == COMPLEX_CST
9578 || TREE_CODE (arg1) == COMPLEX_CST))
9579 {
9580 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
9581 tree real0, imag0, real1, imag1;
9582
9583 arg0 = save_expr (arg0);
9584 arg1 = save_expr (arg1);
9585 real0 = fold_build1 (REALPART_EXPR, subtype, arg0);
9586 imag0 = fold_build1 (IMAGPART_EXPR, subtype, arg0);
9587 real1 = fold_build1 (REALPART_EXPR, subtype, arg1);
9588 imag1 = fold_build1 (IMAGPART_EXPR, subtype, arg1);
9589
9590 return fold_build2 ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
9591 : TRUTH_ORIF_EXPR),
9592 type,
9593 fold_build2 (code, type, real0, real1),
9594 fold_build2 (code, type, imag0, imag1));
9595 }
9596
9597 /* Optimize comparisons of strlen vs zero to a compare of the
9598 first character of the string vs zero. To wit,
9599 strlen(ptr) == 0 => *ptr == 0
9600 strlen(ptr) != 0 => *ptr != 0
9601 Other cases should reduce to one of these two (or a constant)
9602 due to the return value of strlen being unsigned. */
9603 if ((code == EQ_EXPR || code == NE_EXPR)
9604 && integer_zerop (arg1)
9605 && TREE_CODE (arg0) == CALL_EXPR)
9606 {
9607 tree fndecl = get_callee_fndecl (arg0);
9608 tree arglist;
9609
9610 if (fndecl
9611 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
9612 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
9613 && (arglist = TREE_OPERAND (arg0, 1))
9614 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
9615 && ! TREE_CHAIN (arglist))
9616 return fold_build2 (code, type,
9617 build1 (INDIRECT_REF, char_type_node,
9618 TREE_VALUE (arglist)),
9619 fold_convert (char_type_node,
9620 integer_zero_node));
9621 }
9622
9623 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9624 into a single range test. */
9625 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9626 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9627 && TREE_CODE (arg1) == INTEGER_CST
9628 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9629 && !integer_zerop (TREE_OPERAND (arg0, 1))
9630 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9631 && !TREE_OVERFLOW (arg1))
9632 {
9633 t1 = fold_div_compare (code, type, arg0, arg1);
9634 if (t1 != NULL_TREE)
9635 return t1;
9636 }
9637
9638 if ((code == EQ_EXPR || code == NE_EXPR)
9639 && !TREE_SIDE_EFFECTS (arg0)
9640 && integer_zerop (arg1)
9641 && tree_expr_nonzero_p (arg0))
9642 return constant_boolean_node (code==NE_EXPR, type);
9643
9644 t1 = fold_relational_const (code, type, arg0, arg1);
9645 return t1 == NULL_TREE ? NULL_TREE : t1;
9646
9647 case UNORDERED_EXPR:
9648 case ORDERED_EXPR:
9649 case UNLT_EXPR:
9650 case UNLE_EXPR:
9651 case UNGT_EXPR:
9652 case UNGE_EXPR:
9653 case UNEQ_EXPR:
9654 case LTGT_EXPR:
9655 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9656 {
9657 t1 = fold_relational_const (code, type, arg0, arg1);
9658 if (t1 != NULL_TREE)
9659 return t1;
9660 }
9661
9662 /* If the first operand is NaN, the result is constant. */
9663 if (TREE_CODE (arg0) == REAL_CST
9664 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
9665 && (code != LTGT_EXPR || ! flag_trapping_math))
9666 {
9667 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
9668 ? integer_zero_node
9669 : integer_one_node;
9670 return omit_one_operand (type, t1, arg1);
9671 }
9672
9673 /* If the second operand is NaN, the result is constant. */
9674 if (TREE_CODE (arg1) == REAL_CST
9675 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
9676 && (code != LTGT_EXPR || ! flag_trapping_math))
9677 {
9678 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
9679 ? integer_zero_node
9680 : integer_one_node;
9681 return omit_one_operand (type, t1, arg0);
9682 }
9683
9684 /* Simplify unordered comparison of something with itself. */
9685 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
9686 && operand_equal_p (arg0, arg1, 0))
9687 return constant_boolean_node (1, type);
9688
9689 if (code == LTGT_EXPR
9690 && !flag_trapping_math
9691 && operand_equal_p (arg0, arg1, 0))
9692 return constant_boolean_node (0, type);
9693
9694 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9695 {
9696 tree targ0 = strip_float_extensions (arg0);
9697 tree targ1 = strip_float_extensions (arg1);
9698 tree newtype = TREE_TYPE (targ0);
9699
9700 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9701 newtype = TREE_TYPE (targ1);
9702
9703 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9704 return fold_build2 (code, type, fold_convert (newtype, targ0),
9705 fold_convert (newtype, targ1));
9706 }
9707
9708 return NULL_TREE;
9709
9710 case COMPOUND_EXPR:
9711 /* When pedantic, a compound expression can be neither an lvalue
9712 nor an integer constant expression. */
9713 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
9714 return NULL_TREE;
9715 /* Don't let (0, 0) be null pointer constant. */
9716 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
9717 : fold_convert (type, arg1);
9718 return pedantic_non_lvalue (tem);
9719
9720 case COMPLEX_EXPR:
9721 if (wins)
9722 return build_complex (type, arg0, arg1);
9723 return NULL_TREE;
9724
9725 default:
9726 return NULL_TREE;
9727 } /* switch (code) */
9728 }
9729
9730 /* Fold a ternary expression of code CODE and type TYPE with operands
9731 OP0, OP1, and OP2. Return the folded expression if folding is
9732 successful. Otherwise, return NULL_TREE. */
9733
9734 static tree
9735 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
9736 {
9737 tree tem;
9738 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
9739 enum tree_code_class kind = TREE_CODE_CLASS (code);
9740
9741 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9742 && TREE_CODE_LENGTH (code) == 3);
9743
9744 /* Strip any conversions that don't change the mode. This is safe
9745 for every expression, except for a comparison expression because
9746 its signedness is derived from its operands. So, in the latter
9747 case, only strip conversions that don't change the signedness.
9748
9749 Note that this is done as an internal manipulation within the
9750 constant folder, in order to find the simplest representation of
9751 the arguments so that their form can be studied. In any cases,
9752 the appropriate type conversions should be put back in the tree
9753 that will get out of the constant folder. */
9754 if (op0)
9755 {
9756 arg0 = op0;
9757 STRIP_NOPS (arg0);
9758 }
9759
9760 if (op1)
9761 {
9762 arg1 = op1;
9763 STRIP_NOPS (arg1);
9764 }
9765
9766 switch (code)
9767 {
9768 case COMPONENT_REF:
9769 if (TREE_CODE (arg0) == CONSTRUCTOR
9770 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
9771 {
9772 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
9773 if (m)
9774 return TREE_VALUE (m);
9775 }
9776 return NULL_TREE;
9777
9778 case COND_EXPR:
9779 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
9780 so all simple results must be passed through pedantic_non_lvalue. */
9781 if (TREE_CODE (arg0) == INTEGER_CST)
9782 {
9783 tem = integer_zerop (arg0) ? op2 : op1;
9784 /* Only optimize constant conditions when the selected branch
9785 has the same type as the COND_EXPR. This avoids optimizing
9786 away "c ? x : throw", where the throw has a void type. */
9787 if (! VOID_TYPE_P (TREE_TYPE (tem))
9788 || VOID_TYPE_P (type))
9789 return pedantic_non_lvalue (tem);
9790 return NULL_TREE;
9791 }
9792 if (operand_equal_p (arg1, op2, 0))
9793 return pedantic_omit_one_operand (type, arg1, arg0);
9794
9795 /* If we have A op B ? A : C, we may be able to convert this to a
9796 simpler expression, depending on the operation and the values
9797 of B and C. Signed zeros prevent all of these transformations,
9798 for reasons given above each one.
9799
9800 Also try swapping the arguments and inverting the conditional. */
9801 if (COMPARISON_CLASS_P (arg0)
9802 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
9803 arg1, TREE_OPERAND (arg0, 1))
9804 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
9805 {
9806 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
9807 if (tem)
9808 return tem;
9809 }
9810
9811 if (COMPARISON_CLASS_P (arg0)
9812 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
9813 op2,
9814 TREE_OPERAND (arg0, 1))
9815 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
9816 {
9817 tem = invert_truthvalue (arg0);
9818 if (COMPARISON_CLASS_P (tem))
9819 {
9820 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
9821 if (tem)
9822 return tem;
9823 }
9824 }
9825
9826 /* If the second operand is simpler than the third, swap them
9827 since that produces better jump optimization results. */
9828 if (tree_swap_operands_p (op1, op2, false))
9829 {
9830 /* See if this can be inverted. If it can't, possibly because
9831 it was a floating-point inequality comparison, don't do
9832 anything. */
9833 tem = invert_truthvalue (arg0);
9834
9835 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
9836 return fold_build3 (code, type, tem, op2, op1);
9837 }
9838
9839 /* Convert A ? 1 : 0 to simply A. */
9840 if (integer_onep (op1)
9841 && integer_zerop (op2)
9842 /* If we try to convert OP0 to our type, the
9843 call to fold will try to move the conversion inside
9844 a COND, which will recurse. In that case, the COND_EXPR
9845 is probably the best choice, so leave it alone. */
9846 && type == TREE_TYPE (arg0))
9847 return pedantic_non_lvalue (arg0);
9848
9849 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
9850 over COND_EXPR in cases such as floating point comparisons. */
9851 if (integer_zerop (op1)
9852 && integer_onep (op2)
9853 && truth_value_p (TREE_CODE (arg0)))
9854 return pedantic_non_lvalue (fold_convert (type,
9855 invert_truthvalue (arg0)));
9856
9857 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
9858 if (TREE_CODE (arg0) == LT_EXPR
9859 && integer_zerop (TREE_OPERAND (arg0, 1))
9860 && integer_zerop (op2)
9861 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
9862 return fold_convert (type, fold_build2 (BIT_AND_EXPR,
9863 TREE_TYPE (tem), tem, arg1));
9864
9865 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
9866 already handled above. */
9867 if (TREE_CODE (arg0) == BIT_AND_EXPR
9868 && integer_onep (TREE_OPERAND (arg0, 1))
9869 && integer_zerop (op2)
9870 && integer_pow2p (arg1))
9871 {
9872 tree tem = TREE_OPERAND (arg0, 0);
9873 STRIP_NOPS (tem);
9874 if (TREE_CODE (tem) == RSHIFT_EXPR
9875 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
9876 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
9877 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
9878 return fold_build2 (BIT_AND_EXPR, type,
9879 TREE_OPERAND (tem, 0), arg1);
9880 }
9881
9882 /* A & N ? N : 0 is simply A & N if N is a power of two. This
9883 is probably obsolete because the first operand should be a
9884 truth value (that's why we have the two cases above), but let's
9885 leave it in until we can confirm this for all front-ends. */
9886 if (integer_zerop (op2)
9887 && TREE_CODE (arg0) == NE_EXPR
9888 && integer_zerop (TREE_OPERAND (arg0, 1))
9889 && integer_pow2p (arg1)
9890 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
9891 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
9892 arg1, OEP_ONLY_CONST))
9893 return pedantic_non_lvalue (fold_convert (type,
9894 TREE_OPERAND (arg0, 0)));
9895
9896 /* Convert A ? B : 0 into A && B if A and B are truth values. */
9897 if (integer_zerop (op2)
9898 && truth_value_p (TREE_CODE (arg0))
9899 && truth_value_p (TREE_CODE (arg1)))
9900 return fold_build2 (TRUTH_ANDIF_EXPR, type, arg0, arg1);
9901
9902 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
9903 if (integer_onep (op2)
9904 && truth_value_p (TREE_CODE (arg0))
9905 && truth_value_p (TREE_CODE (arg1)))
9906 {
9907 /* Only perform transformation if ARG0 is easily inverted. */
9908 tem = invert_truthvalue (arg0);
9909 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
9910 return fold_build2 (TRUTH_ORIF_EXPR, type, tem, arg1);
9911 }
9912
9913 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
9914 if (integer_zerop (arg1)
9915 && truth_value_p (TREE_CODE (arg0))
9916 && truth_value_p (TREE_CODE (op2)))
9917 {
9918 /* Only perform transformation if ARG0 is easily inverted. */
9919 tem = invert_truthvalue (arg0);
9920 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
9921 return fold_build2 (TRUTH_ANDIF_EXPR, type, tem, op2);
9922 }
9923
9924 /* Convert A ? 1 : B into A || B if A and B are truth values. */
9925 if (integer_onep (arg1)
9926 && truth_value_p (TREE_CODE (arg0))
9927 && truth_value_p (TREE_CODE (op2)))
9928 return fold_build2 (TRUTH_ORIF_EXPR, type, arg0, op2);
9929
9930 return NULL_TREE;
9931
9932 case CALL_EXPR:
9933 /* Check for a built-in function. */
9934 if (TREE_CODE (op0) == ADDR_EXPR
9935 && TREE_CODE (TREE_OPERAND (op0, 0)) == FUNCTION_DECL
9936 && DECL_BUILT_IN (TREE_OPERAND (op0, 0)))
9937 {
9938 tree fndecl = TREE_OPERAND (op0, 0);
9939 tree arglist = op1;
9940 tree tmp = fold_builtin (fndecl, arglist, false);
9941 if (tmp)
9942 return tmp;
9943 }
9944 return NULL_TREE;
9945
9946 default:
9947 return NULL_TREE;
9948 } /* switch (code) */
9949 }
9950
9951 /* Perform constant folding and related simplification of EXPR.
9952 The related simplifications include x*1 => x, x*0 => 0, etc.,
9953 and application of the associative law.
9954 NOP_EXPR conversions may be removed freely (as long as we
9955 are careful not to change the type of the overall expression).
9956 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
9957 but we can constant-fold them if they have constant operands. */
9958
9959 #ifdef ENABLE_FOLD_CHECKING
9960 # define fold(x) fold_1 (x)
9961 static tree fold_1 (tree);
9962 static
9963 #endif
9964 tree
9965 fold (tree expr)
9966 {
9967 const tree t = expr;
9968 enum tree_code code = TREE_CODE (t);
9969 enum tree_code_class kind = TREE_CODE_CLASS (code);
9970 tree tem;
9971
9972 /* Return right away if a constant. */
9973 if (kind == tcc_constant)
9974 return t;
9975
9976 if (IS_EXPR_CODE_CLASS (kind))
9977 {
9978 tree type = TREE_TYPE (t);
9979 tree op0, op1, op2;
9980
9981 switch (TREE_CODE_LENGTH (code))
9982 {
9983 case 1:
9984 op0 = TREE_OPERAND (t, 0);
9985 tem = fold_unary (code, type, op0);
9986 return tem ? tem : expr;
9987 case 2:
9988 op0 = TREE_OPERAND (t, 0);
9989 op1 = TREE_OPERAND (t, 1);
9990 tem = fold_binary (code, type, op0, op1);
9991 return tem ? tem : expr;
9992 case 3:
9993 op0 = TREE_OPERAND (t, 0);
9994 op1 = TREE_OPERAND (t, 1);
9995 op2 = TREE_OPERAND (t, 2);
9996 tem = fold_ternary (code, type, op0, op1, op2);
9997 return tem ? tem : expr;
9998 default:
9999 break;
10000 }
10001 }
10002
10003 switch (code)
10004 {
10005 case CONST_DECL:
10006 return fold (DECL_INITIAL (t));
10007
10008 case ASSERT_EXPR:
10009 {
10010 /* Given ASSERT_EXPR <Y, COND>, return Y if COND can be folded
10011 to boolean_true_node. If COND folds to boolean_false_node,
10012 return ASSERT_EXPR <Y, 0>. Otherwise, return the original
10013 expression. */
10014 tree c = fold (ASSERT_EXPR_COND (t));
10015 if (c == boolean_true_node)
10016 return ASSERT_EXPR_VAR (t);
10017 else if (c == boolean_false_node)
10018 return build (ASSERT_EXPR, TREE_TYPE (t), ASSERT_EXPR_VAR (t), c);
10019 else
10020 return t;
10021 }
10022
10023 default:
10024 return t;
10025 } /* switch (code) */
10026 }
10027
10028 #ifdef ENABLE_FOLD_CHECKING
10029 #undef fold
10030
10031 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
10032 static void fold_check_failed (tree, tree);
10033 void print_fold_checksum (tree);
10034
10035 /* When --enable-checking=fold, compute a digest of expr before
10036 and after actual fold call to see if fold did not accidentally
10037 change original expr. */
10038
10039 tree
10040 fold (tree expr)
10041 {
10042 tree ret;
10043 struct md5_ctx ctx;
10044 unsigned char checksum_before[16], checksum_after[16];
10045 htab_t ht;
10046
10047 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10048 md5_init_ctx (&ctx);
10049 fold_checksum_tree (expr, &ctx, ht);
10050 md5_finish_ctx (&ctx, checksum_before);
10051 htab_empty (ht);
10052
10053 ret = fold_1 (expr);
10054
10055 md5_init_ctx (&ctx);
10056 fold_checksum_tree (expr, &ctx, ht);
10057 md5_finish_ctx (&ctx, checksum_after);
10058 htab_delete (ht);
10059
10060 if (memcmp (checksum_before, checksum_after, 16))
10061 fold_check_failed (expr, ret);
10062
10063 return ret;
10064 }
10065
10066 void
10067 print_fold_checksum (tree expr)
10068 {
10069 struct md5_ctx ctx;
10070 unsigned char checksum[16], cnt;
10071 htab_t ht;
10072
10073 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10074 md5_init_ctx (&ctx);
10075 fold_checksum_tree (expr, &ctx, ht);
10076 md5_finish_ctx (&ctx, checksum);
10077 htab_delete (ht);
10078 for (cnt = 0; cnt < 16; ++cnt)
10079 fprintf (stderr, "%02x", checksum[cnt]);
10080 putc ('\n', stderr);
10081 }
10082
10083 static void
10084 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
10085 {
10086 internal_error ("fold check: original tree changed by fold");
10087 }
10088
10089 static void
10090 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
10091 {
10092 void **slot;
10093 enum tree_code code;
10094 char buf[sizeof (struct tree_decl)];
10095 int i, len;
10096
10097 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
10098 <= sizeof (struct tree_decl))
10099 && sizeof (struct tree_type) <= sizeof (struct tree_decl));
10100 if (expr == NULL)
10101 return;
10102 slot = htab_find_slot (ht, expr, INSERT);
10103 if (*slot != NULL)
10104 return;
10105 *slot = expr;
10106 code = TREE_CODE (expr);
10107 if (TREE_CODE_CLASS (code) == tcc_declaration
10108 && DECL_ASSEMBLER_NAME_SET_P (expr))
10109 {
10110 /* Allow DECL_ASSEMBLER_NAME to be modified. */
10111 memcpy (buf, expr, tree_size (expr));
10112 expr = (tree) buf;
10113 SET_DECL_ASSEMBLER_NAME (expr, NULL);
10114 }
10115 else if (TREE_CODE_CLASS (code) == tcc_type
10116 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
10117 || TYPE_CACHED_VALUES_P (expr)))
10118 {
10119 /* Allow these fields to be modified. */
10120 memcpy (buf, expr, tree_size (expr));
10121 expr = (tree) buf;
10122 TYPE_POINTER_TO (expr) = NULL;
10123 TYPE_REFERENCE_TO (expr) = NULL;
10124 if (TYPE_CACHED_VALUES_P (expr))
10125 {
10126 TYPE_CACHED_VALUES_P (expr) = 0;
10127 TYPE_CACHED_VALUES (expr) = NULL;
10128 }
10129 }
10130 md5_process_bytes (expr, tree_size (expr), ctx);
10131 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
10132 if (TREE_CODE_CLASS (code) != tcc_type
10133 && TREE_CODE_CLASS (code) != tcc_declaration)
10134 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
10135 switch (TREE_CODE_CLASS (code))
10136 {
10137 case tcc_constant:
10138 switch (code)
10139 {
10140 case STRING_CST:
10141 md5_process_bytes (TREE_STRING_POINTER (expr),
10142 TREE_STRING_LENGTH (expr), ctx);
10143 break;
10144 case COMPLEX_CST:
10145 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
10146 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
10147 break;
10148 case VECTOR_CST:
10149 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
10150 break;
10151 default:
10152 break;
10153 }
10154 break;
10155 case tcc_exceptional:
10156 switch (code)
10157 {
10158 case TREE_LIST:
10159 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
10160 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
10161 break;
10162 case TREE_VEC:
10163 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
10164 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
10165 break;
10166 default:
10167 break;
10168 }
10169 break;
10170 case tcc_expression:
10171 case tcc_reference:
10172 case tcc_comparison:
10173 case tcc_unary:
10174 case tcc_binary:
10175 case tcc_statement:
10176 len = TREE_CODE_LENGTH (code);
10177 for (i = 0; i < len; ++i)
10178 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
10179 break;
10180 case tcc_declaration:
10181 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
10182 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
10183 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
10184 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
10185 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
10186 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
10187 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
10188 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
10189 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
10190 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
10191 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
10192 break;
10193 case tcc_type:
10194 if (TREE_CODE (expr) == ENUMERAL_TYPE)
10195 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
10196 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
10197 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
10198 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
10199 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
10200 if (INTEGRAL_TYPE_P (expr)
10201 || SCALAR_FLOAT_TYPE_P (expr))
10202 {
10203 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
10204 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
10205 }
10206 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
10207 if (TREE_CODE (expr) == RECORD_TYPE
10208 || TREE_CODE (expr) == UNION_TYPE
10209 || TREE_CODE (expr) == QUAL_UNION_TYPE)
10210 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
10211 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
10212 break;
10213 default:
10214 break;
10215 }
10216 }
10217
10218 #endif
10219
10220 /* Fold a unary tree expression with code CODE of type TYPE with an
10221 operand OP0. Return a folded expression if successful. Otherwise,
10222 return a tree expression with code CODE of type TYPE with an
10223 operand OP0. */
10224
10225 tree
10226 fold_build1 (enum tree_code code, tree type, tree op0)
10227 {
10228 tree tem = fold_unary (code, type, op0);
10229 if (tem)
10230 return tem;
10231
10232 return build1 (code, type, op0);
10233 }
10234
10235 /* Fold a binary tree expression with code CODE of type TYPE with
10236 operands OP0 and OP1. Return a folded expression if successful.
10237 Otherwise, return a tree expression with code CODE of type TYPE
10238 with operands OP0 and OP1. */
10239
10240 tree
10241 fold_build2 (enum tree_code code, tree type, tree op0, tree op1)
10242 {
10243 tree tem = fold_binary (code, type, op0, op1);
10244 if (tem)
10245 return tem;
10246
10247 return build2 (code, type, op0, op1);
10248 }
10249
10250 /* Fold a ternary tree expression with code CODE of type TYPE with
10251 operands OP0, OP1, and OP2. Return a folded expression if
10252 successful. Otherwise, return a tree expression with code CODE of
10253 type TYPE with operands OP0, OP1, and OP2. */
10254
10255 tree
10256 fold_build3 (enum tree_code code, tree type, tree op0, tree op1, tree op2)
10257 {
10258 tree tem = fold_ternary (code, type, op0, op1, op2);
10259 if (tem)
10260 return tem;
10261
10262 return build3 (code, type, op0, op1, op2);
10263 }
10264
10265 /* Perform constant folding and related simplification of initializer
10266 expression EXPR. This behaves identically to "fold" but ignores
10267 potential run-time traps and exceptions that fold must preserve. */
10268
10269 tree
10270 fold_initializer (tree expr)
10271 {
10272 int saved_signaling_nans = flag_signaling_nans;
10273 int saved_trapping_math = flag_trapping_math;
10274 int saved_rounding_math = flag_rounding_math;
10275 int saved_trapv = flag_trapv;
10276 tree result;
10277
10278 flag_signaling_nans = 0;
10279 flag_trapping_math = 0;
10280 flag_rounding_math = 0;
10281 flag_trapv = 0;
10282
10283 result = fold (expr);
10284
10285 flag_signaling_nans = saved_signaling_nans;
10286 flag_trapping_math = saved_trapping_math;
10287 flag_rounding_math = saved_rounding_math;
10288 flag_trapv = saved_trapv;
10289
10290 return result;
10291 }
10292
10293 /* Determine if first argument is a multiple of second argument. Return 0 if
10294 it is not, or we cannot easily determined it to be.
10295
10296 An example of the sort of thing we care about (at this point; this routine
10297 could surely be made more general, and expanded to do what the *_DIV_EXPR's
10298 fold cases do now) is discovering that
10299
10300 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
10301
10302 is a multiple of
10303
10304 SAVE_EXPR (J * 8)
10305
10306 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
10307
10308 This code also handles discovering that
10309
10310 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
10311
10312 is a multiple of 8 so we don't have to worry about dealing with a
10313 possible remainder.
10314
10315 Note that we *look* inside a SAVE_EXPR only to determine how it was
10316 calculated; it is not safe for fold to do much of anything else with the
10317 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
10318 at run time. For example, the latter example above *cannot* be implemented
10319 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
10320 evaluation time of the original SAVE_EXPR is not necessarily the same at
10321 the time the new expression is evaluated. The only optimization of this
10322 sort that would be valid is changing
10323
10324 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
10325
10326 divided by 8 to
10327
10328 SAVE_EXPR (I) * SAVE_EXPR (J)
10329
10330 (where the same SAVE_EXPR (J) is used in the original and the
10331 transformed version). */
10332
10333 static int
10334 multiple_of_p (tree type, tree top, tree bottom)
10335 {
10336 if (operand_equal_p (top, bottom, 0))
10337 return 1;
10338
10339 if (TREE_CODE (type) != INTEGER_TYPE)
10340 return 0;
10341
10342 switch (TREE_CODE (top))
10343 {
10344 case BIT_AND_EXPR:
10345 /* Bitwise and provides a power of two multiple. If the mask is
10346 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
10347 if (!integer_pow2p (bottom))
10348 return 0;
10349 /* FALLTHRU */
10350
10351 case MULT_EXPR:
10352 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
10353 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
10354
10355 case PLUS_EXPR:
10356 case MINUS_EXPR:
10357 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
10358 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
10359
10360 case LSHIFT_EXPR:
10361 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
10362 {
10363 tree op1, t1;
10364
10365 op1 = TREE_OPERAND (top, 1);
10366 /* const_binop may not detect overflow correctly,
10367 so check for it explicitly here. */
10368 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
10369 > TREE_INT_CST_LOW (op1)
10370 && TREE_INT_CST_HIGH (op1) == 0
10371 && 0 != (t1 = fold_convert (type,
10372 const_binop (LSHIFT_EXPR,
10373 size_one_node,
10374 op1, 0)))
10375 && ! TREE_OVERFLOW (t1))
10376 return multiple_of_p (type, t1, bottom);
10377 }
10378 return 0;
10379
10380 case NOP_EXPR:
10381 /* Can't handle conversions from non-integral or wider integral type. */
10382 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
10383 || (TYPE_PRECISION (type)
10384 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
10385 return 0;
10386
10387 /* .. fall through ... */
10388
10389 case SAVE_EXPR:
10390 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
10391
10392 case INTEGER_CST:
10393 if (TREE_CODE (bottom) != INTEGER_CST
10394 || (TYPE_UNSIGNED (type)
10395 && (tree_int_cst_sgn (top) < 0
10396 || tree_int_cst_sgn (bottom) < 0)))
10397 return 0;
10398 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
10399 top, bottom, 0));
10400
10401 default:
10402 return 0;
10403 }
10404 }
10405
10406 /* Return true if `t' is known to be non-negative. */
10407
10408 int
10409 tree_expr_nonnegative_p (tree t)
10410 {
10411 switch (TREE_CODE (t))
10412 {
10413 case ABS_EXPR:
10414 return 1;
10415
10416 case INTEGER_CST:
10417 return tree_int_cst_sgn (t) >= 0;
10418
10419 case REAL_CST:
10420 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
10421
10422 case PLUS_EXPR:
10423 if (FLOAT_TYPE_P (TREE_TYPE (t)))
10424 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10425 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10426
10427 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
10428 both unsigned and at least 2 bits shorter than the result. */
10429 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
10430 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
10431 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
10432 {
10433 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
10434 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
10435 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
10436 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
10437 {
10438 unsigned int prec = MAX (TYPE_PRECISION (inner1),
10439 TYPE_PRECISION (inner2)) + 1;
10440 return prec < TYPE_PRECISION (TREE_TYPE (t));
10441 }
10442 }
10443 break;
10444
10445 case MULT_EXPR:
10446 if (FLOAT_TYPE_P (TREE_TYPE (t)))
10447 {
10448 /* x * x for floating point x is always non-negative. */
10449 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
10450 return 1;
10451 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10452 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10453 }
10454
10455 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
10456 both unsigned and their total bits is shorter than the result. */
10457 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
10458 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
10459 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
10460 {
10461 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
10462 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
10463 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
10464 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
10465 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
10466 < TYPE_PRECISION (TREE_TYPE (t));
10467 }
10468 return 0;
10469
10470 case TRUNC_DIV_EXPR:
10471 case CEIL_DIV_EXPR:
10472 case FLOOR_DIV_EXPR:
10473 case ROUND_DIV_EXPR:
10474 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10475 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10476
10477 case TRUNC_MOD_EXPR:
10478 case CEIL_MOD_EXPR:
10479 case FLOOR_MOD_EXPR:
10480 case ROUND_MOD_EXPR:
10481 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10482
10483 case RDIV_EXPR:
10484 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10485 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10486
10487 case BIT_AND_EXPR:
10488 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
10489 || tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10490 case BIT_IOR_EXPR:
10491 case BIT_XOR_EXPR:
10492 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10493 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10494
10495 case NOP_EXPR:
10496 {
10497 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
10498 tree outer_type = TREE_TYPE (t);
10499
10500 if (TREE_CODE (outer_type) == REAL_TYPE)
10501 {
10502 if (TREE_CODE (inner_type) == REAL_TYPE)
10503 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10504 if (TREE_CODE (inner_type) == INTEGER_TYPE)
10505 {
10506 if (TYPE_UNSIGNED (inner_type))
10507 return 1;
10508 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10509 }
10510 }
10511 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
10512 {
10513 if (TREE_CODE (inner_type) == REAL_TYPE)
10514 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
10515 if (TREE_CODE (inner_type) == INTEGER_TYPE)
10516 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
10517 && TYPE_UNSIGNED (inner_type);
10518 }
10519 }
10520 break;
10521
10522 case COND_EXPR:
10523 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
10524 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
10525 case COMPOUND_EXPR:
10526 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10527 case MIN_EXPR:
10528 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10529 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10530 case MAX_EXPR:
10531 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10532 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10533 case MODIFY_EXPR:
10534 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10535 case BIND_EXPR:
10536 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t, 1)));
10537 case SAVE_EXPR:
10538 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10539 case NON_LVALUE_EXPR:
10540 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10541 case FLOAT_EXPR:
10542 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10543
10544 case TARGET_EXPR:
10545 {
10546 tree temp = TARGET_EXPR_SLOT (t);
10547 t = TARGET_EXPR_INITIAL (t);
10548
10549 /* If the initializer is non-void, then it's a normal expression
10550 that will be assigned to the slot. */
10551 if (!VOID_TYPE_P (t))
10552 return tree_expr_nonnegative_p (t);
10553
10554 /* Otherwise, the initializer sets the slot in some way. One common
10555 way is an assignment statement at the end of the initializer. */
10556 while (1)
10557 {
10558 if (TREE_CODE (t) == BIND_EXPR)
10559 t = expr_last (BIND_EXPR_BODY (t));
10560 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
10561 || TREE_CODE (t) == TRY_CATCH_EXPR)
10562 t = expr_last (TREE_OPERAND (t, 0));
10563 else if (TREE_CODE (t) == STATEMENT_LIST)
10564 t = expr_last (t);
10565 else
10566 break;
10567 }
10568 if (TREE_CODE (t) == MODIFY_EXPR
10569 && TREE_OPERAND (t, 0) == temp)
10570 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10571
10572 return 0;
10573 }
10574
10575 case CALL_EXPR:
10576 {
10577 tree fndecl = get_callee_fndecl (t);
10578 tree arglist = TREE_OPERAND (t, 1);
10579 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
10580 switch (DECL_FUNCTION_CODE (fndecl))
10581 {
10582 #define CASE_BUILTIN_F(BUILT_IN_FN) \
10583 case BUILT_IN_FN: case BUILT_IN_FN##F: case BUILT_IN_FN##L:
10584 #define CASE_BUILTIN_I(BUILT_IN_FN) \
10585 case BUILT_IN_FN: case BUILT_IN_FN##L: case BUILT_IN_FN##LL:
10586
10587 CASE_BUILTIN_F (BUILT_IN_ACOS)
10588 CASE_BUILTIN_F (BUILT_IN_ACOSH)
10589 CASE_BUILTIN_F (BUILT_IN_CABS)
10590 CASE_BUILTIN_F (BUILT_IN_COSH)
10591 CASE_BUILTIN_F (BUILT_IN_ERFC)
10592 CASE_BUILTIN_F (BUILT_IN_EXP)
10593 CASE_BUILTIN_F (BUILT_IN_EXP10)
10594 CASE_BUILTIN_F (BUILT_IN_EXP2)
10595 CASE_BUILTIN_F (BUILT_IN_FABS)
10596 CASE_BUILTIN_F (BUILT_IN_FDIM)
10597 CASE_BUILTIN_F (BUILT_IN_FREXP)
10598 CASE_BUILTIN_F (BUILT_IN_HYPOT)
10599 CASE_BUILTIN_F (BUILT_IN_POW10)
10600 CASE_BUILTIN_I (BUILT_IN_FFS)
10601 CASE_BUILTIN_I (BUILT_IN_PARITY)
10602 CASE_BUILTIN_I (BUILT_IN_POPCOUNT)
10603 /* Always true. */
10604 return 1;
10605
10606 CASE_BUILTIN_F (BUILT_IN_SQRT)
10607 /* sqrt(-0.0) is -0.0. */
10608 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
10609 return 1;
10610 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
10611
10612 CASE_BUILTIN_F (BUILT_IN_ASINH)
10613 CASE_BUILTIN_F (BUILT_IN_ATAN)
10614 CASE_BUILTIN_F (BUILT_IN_ATANH)
10615 CASE_BUILTIN_F (BUILT_IN_CBRT)
10616 CASE_BUILTIN_F (BUILT_IN_CEIL)
10617 CASE_BUILTIN_F (BUILT_IN_ERF)
10618 CASE_BUILTIN_F (BUILT_IN_EXPM1)
10619 CASE_BUILTIN_F (BUILT_IN_FLOOR)
10620 CASE_BUILTIN_F (BUILT_IN_FMOD)
10621 CASE_BUILTIN_F (BUILT_IN_LDEXP)
10622 CASE_BUILTIN_F (BUILT_IN_LFLOOR)
10623 CASE_BUILTIN_F (BUILT_IN_LLFLOOR)
10624 CASE_BUILTIN_F (BUILT_IN_LLRINT)
10625 CASE_BUILTIN_F (BUILT_IN_LLROUND)
10626 CASE_BUILTIN_F (BUILT_IN_LRINT)
10627 CASE_BUILTIN_F (BUILT_IN_LROUND)
10628 CASE_BUILTIN_F (BUILT_IN_MODF)
10629 CASE_BUILTIN_F (BUILT_IN_NEARBYINT)
10630 CASE_BUILTIN_F (BUILT_IN_POW)
10631 CASE_BUILTIN_F (BUILT_IN_RINT)
10632 CASE_BUILTIN_F (BUILT_IN_ROUND)
10633 CASE_BUILTIN_F (BUILT_IN_SIGNBIT)
10634 CASE_BUILTIN_F (BUILT_IN_SINH)
10635 CASE_BUILTIN_F (BUILT_IN_TANH)
10636 CASE_BUILTIN_F (BUILT_IN_TRUNC)
10637 /* True if the 1st argument is nonnegative. */
10638 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
10639
10640 CASE_BUILTIN_F (BUILT_IN_FMAX)
10641 /* True if the 1st OR 2nd arguments are nonnegative. */
10642 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
10643 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
10644
10645 CASE_BUILTIN_F (BUILT_IN_FMIN)
10646 /* True if the 1st AND 2nd arguments are nonnegative. */
10647 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
10648 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
10649
10650 CASE_BUILTIN_F (BUILT_IN_COPYSIGN)
10651 /* True if the 2nd argument is nonnegative. */
10652 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
10653
10654 default:
10655 break;
10656 #undef CASE_BUILTIN_F
10657 #undef CASE_BUILTIN_I
10658 }
10659 }
10660
10661 /* ... fall through ... */
10662
10663 default:
10664 if (truth_value_p (TREE_CODE (t)))
10665 /* Truth values evaluate to 0 or 1, which is nonnegative. */
10666 return 1;
10667 }
10668
10669 /* We don't know sign of `t', so be conservative and return false. */
10670 return 0;
10671 }
10672
10673 /* Return true when T is an address and is known to be nonzero.
10674 For floating point we further ensure that T is not denormal.
10675 Similar logic is present in nonzero_address in rtlanal.h. */
10676
10677 static bool
10678 tree_expr_nonzero_p (tree t)
10679 {
10680 tree type = TREE_TYPE (t);
10681
10682 /* Doing something useful for floating point would need more work. */
10683 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
10684 return false;
10685
10686 switch (TREE_CODE (t))
10687 {
10688 case ABS_EXPR:
10689 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
10690 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
10691
10692 case INTEGER_CST:
10693 /* We used to test for !integer_zerop here. This does not work correctly
10694 if TREE_CONSTANT_OVERFLOW (t). */
10695 return (TREE_INT_CST_LOW (t) != 0
10696 || TREE_INT_CST_HIGH (t) != 0);
10697
10698 case PLUS_EXPR:
10699 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
10700 {
10701 /* With the presence of negative values it is hard
10702 to say something. */
10703 if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10704 || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
10705 return false;
10706 /* One of operands must be positive and the other non-negative. */
10707 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
10708 || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
10709 }
10710 break;
10711
10712 case MULT_EXPR:
10713 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
10714 {
10715 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
10716 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
10717 }
10718 break;
10719
10720 case NOP_EXPR:
10721 {
10722 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
10723 tree outer_type = TREE_TYPE (t);
10724
10725 return (TYPE_PRECISION (inner_type) >= TYPE_PRECISION (outer_type)
10726 && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
10727 }
10728 break;
10729
10730 case ADDR_EXPR:
10731 {
10732 tree base = get_base_address (TREE_OPERAND (t, 0));
10733
10734 if (!base)
10735 return false;
10736
10737 /* Weak declarations may link to NULL. */
10738 if (DECL_P (base))
10739 return !DECL_WEAK (base);
10740
10741 /* Constants are never weak. */
10742 if (CONSTANT_CLASS_P (base))
10743 return true;
10744
10745 return false;
10746 }
10747
10748 case COND_EXPR:
10749 return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
10750 && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
10751
10752 case MIN_EXPR:
10753 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
10754 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
10755
10756 case MAX_EXPR:
10757 if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
10758 {
10759 /* When both operands are nonzero, then MAX must be too. */
10760 if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
10761 return true;
10762
10763 /* MAX where operand 0 is positive is positive. */
10764 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10765 }
10766 /* MAX where operand 1 is positive is positive. */
10767 else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
10768 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
10769 return true;
10770 break;
10771
10772 case COMPOUND_EXPR:
10773 case MODIFY_EXPR:
10774 case BIND_EXPR:
10775 return tree_expr_nonzero_p (TREE_OPERAND (t, 1));
10776
10777 case SAVE_EXPR:
10778 case NON_LVALUE_EXPR:
10779 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
10780
10781 case BIT_IOR_EXPR:
10782 return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
10783 || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
10784
10785 default:
10786 break;
10787 }
10788 return false;
10789 }
10790
10791 /* See if we are applying CODE, a relational to the highest or lowest
10792 possible integer of TYPE. If so, then the result is a compile
10793 time constant. */
10794
10795 static tree
10796 fold_relational_hi_lo (enum tree_code *code_p, const tree type, tree *op0_p,
10797 tree *op1_p)
10798 {
10799 tree op0 = *op0_p;
10800 tree op1 = *op1_p;
10801 enum tree_code code = *code_p;
10802 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (op1)));
10803
10804 if (TREE_CODE (op1) == INTEGER_CST
10805 && ! TREE_CONSTANT_OVERFLOW (op1)
10806 && width <= HOST_BITS_PER_WIDE_INT
10807 && (INTEGRAL_TYPE_P (TREE_TYPE (op1))
10808 || POINTER_TYPE_P (TREE_TYPE (op1))))
10809 {
10810 unsigned HOST_WIDE_INT signed_max;
10811 unsigned HOST_WIDE_INT max, min;
10812
10813 signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
10814
10815 if (TYPE_UNSIGNED (TREE_TYPE (op1)))
10816 {
10817 max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
10818 min = 0;
10819 }
10820 else
10821 {
10822 max = signed_max;
10823 min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
10824 }
10825
10826 if (TREE_INT_CST_HIGH (op1) == 0
10827 && TREE_INT_CST_LOW (op1) == max)
10828 switch (code)
10829 {
10830 case GT_EXPR:
10831 return omit_one_operand (type, integer_zero_node, op0);
10832
10833 case GE_EXPR:
10834 *code_p = EQ_EXPR;
10835 break;
10836 case LE_EXPR:
10837 return omit_one_operand (type, integer_one_node, op0);
10838
10839 case LT_EXPR:
10840 *code_p = NE_EXPR;
10841 break;
10842
10843 /* The GE_EXPR and LT_EXPR cases above are not normally
10844 reached because of previous transformations. */
10845
10846 default:
10847 break;
10848 }
10849 else if (TREE_INT_CST_HIGH (op1) == 0
10850 && TREE_INT_CST_LOW (op1) == max - 1)
10851 switch (code)
10852 {
10853 case GT_EXPR:
10854 *code_p = EQ_EXPR;
10855 *op1_p = const_binop (PLUS_EXPR, op1, integer_one_node, 0);
10856 break;
10857 case LE_EXPR:
10858 *code_p = NE_EXPR;
10859 *op1_p = const_binop (PLUS_EXPR, op1, integer_one_node, 0);
10860 break;
10861 default:
10862 break;
10863 }
10864 else if (TREE_INT_CST_HIGH (op1) == (min ? -1 : 0)
10865 && TREE_INT_CST_LOW (op1) == min)
10866 switch (code)
10867 {
10868 case LT_EXPR:
10869 return omit_one_operand (type, integer_zero_node, op0);
10870
10871 case LE_EXPR:
10872 *code_p = EQ_EXPR;
10873 break;
10874
10875 case GE_EXPR:
10876 return omit_one_operand (type, integer_one_node, op0);
10877
10878 case GT_EXPR:
10879 *code_p = NE_EXPR;
10880 break;
10881
10882 default:
10883 break;
10884 }
10885 else if (TREE_INT_CST_HIGH (op1) == (min ? -1 : 0)
10886 && TREE_INT_CST_LOW (op1) == min + 1)
10887 switch (code)
10888 {
10889 case GE_EXPR:
10890 *code_p = NE_EXPR;
10891 *op1_p = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
10892 break;
10893 case LT_EXPR:
10894 *code_p = EQ_EXPR;
10895 *op1_p = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
10896 break;
10897 default:
10898 break;
10899 }
10900
10901 else if (TREE_INT_CST_HIGH (op1) == 0
10902 && TREE_INT_CST_LOW (op1) == signed_max
10903 && TYPE_UNSIGNED (TREE_TYPE (op1))
10904 /* signed_type does not work on pointer types. */
10905 && INTEGRAL_TYPE_P (TREE_TYPE (op1)))
10906 {
10907 /* The following case also applies to X < signed_max+1
10908 and X >= signed_max+1 because previous transformations. */
10909 if (code == LE_EXPR || code == GT_EXPR)
10910 {
10911 tree st0, st1, exp, retval;
10912 st0 = lang_hooks.types.signed_type (TREE_TYPE (op0));
10913 st1 = lang_hooks.types.signed_type (TREE_TYPE (op1));
10914
10915 exp = build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
10916 type,
10917 fold_convert (st0, op0),
10918 fold_convert (st1, integer_zero_node));
10919
10920 retval = fold_binary_to_constant (TREE_CODE (exp),
10921 TREE_TYPE (exp),
10922 TREE_OPERAND (exp, 0),
10923 TREE_OPERAND (exp, 1));
10924
10925 /* If we are in gimple form, then returning EXP would create
10926 non-gimple expressions. Clearing it is safe and insures
10927 we do not allow a non-gimple expression to escape. */
10928 if (in_gimple_form)
10929 exp = NULL;
10930
10931 return (retval ? retval : exp);
10932 }
10933 }
10934 }
10935
10936 return NULL_TREE;
10937 }
10938
10939
10940 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
10941 attempt to fold the expression to a constant without modifying TYPE,
10942 OP0 or OP1.
10943
10944 If the expression could be simplified to a constant, then return
10945 the constant. If the expression would not be simplified to a
10946 constant, then return NULL_TREE.
10947
10948 Note this is primarily designed to be called after gimplification
10949 of the tree structures and when at least one operand is a constant.
10950 As a result of those simplifying assumptions this routine is far
10951 simpler than the generic fold routine. */
10952
10953 tree
10954 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
10955 {
10956 int wins = 1;
10957 tree subop0;
10958 tree subop1;
10959 tree tem;
10960
10961 /* If this is a commutative operation, and ARG0 is a constant, move it
10962 to ARG1 to reduce the number of tests below. */
10963 if (commutative_tree_code (code)
10964 && (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST))
10965 {
10966 tem = op0;
10967 op0 = op1;
10968 op1 = tem;
10969 }
10970
10971 /* If either operand is a complex type, extract its real component. */
10972 if (TREE_CODE (op0) == COMPLEX_CST)
10973 subop0 = TREE_REALPART (op0);
10974 else
10975 subop0 = op0;
10976
10977 if (TREE_CODE (op1) == COMPLEX_CST)
10978 subop1 = TREE_REALPART (op1);
10979 else
10980 subop1 = op1;
10981
10982 /* Note if either argument is not a real or integer constant.
10983 With a few exceptions, simplification is limited to cases
10984 where both arguments are constants. */
10985 if ((TREE_CODE (subop0) != INTEGER_CST
10986 && TREE_CODE (subop0) != REAL_CST)
10987 || (TREE_CODE (subop1) != INTEGER_CST
10988 && TREE_CODE (subop1) != REAL_CST))
10989 wins = 0;
10990
10991 switch (code)
10992 {
10993 case PLUS_EXPR:
10994 /* (plus (address) (const_int)) is a constant. */
10995 if (TREE_CODE (op0) == PLUS_EXPR
10996 && TREE_CODE (op1) == INTEGER_CST
10997 && (TREE_CODE (TREE_OPERAND (op0, 0)) == ADDR_EXPR
10998 || (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
10999 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (op0, 0), 0))
11000 == ADDR_EXPR)))
11001 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
11002 {
11003 return build2 (PLUS_EXPR, type, TREE_OPERAND (op0, 0),
11004 const_binop (PLUS_EXPR, op1,
11005 TREE_OPERAND (op0, 1), 0));
11006 }
11007 case BIT_XOR_EXPR:
11008
11009 binary:
11010 if (!wins)
11011 return NULL_TREE;
11012
11013 /* Both arguments are constants. Simplify. */
11014 tem = const_binop (code, op0, op1, 0);
11015 if (tem != NULL_TREE)
11016 {
11017 /* The return value should always have the same type as
11018 the original expression. */
11019 if (TREE_TYPE (tem) != type)
11020 tem = fold_convert (type, tem);
11021
11022 return tem;
11023 }
11024 return NULL_TREE;
11025
11026 case MINUS_EXPR:
11027 /* Fold &x - &x. This can happen from &x.foo - &x.
11028 This is unsafe for certain floats even in non-IEEE formats.
11029 In IEEE, it is unsafe because it does wrong for NaNs.
11030 Also note that operand_equal_p is always false if an
11031 operand is volatile. */
11032 if (! FLOAT_TYPE_P (type) && operand_equal_p (op0, op1, 0))
11033 return fold_convert (type, integer_zero_node);
11034
11035 goto binary;
11036
11037 case MULT_EXPR:
11038 case BIT_AND_EXPR:
11039 /* Special case multiplication or bitwise AND where one argument
11040 is zero. */
11041 if (! FLOAT_TYPE_P (type) && integer_zerop (op1))
11042 return omit_one_operand (type, op1, op0);
11043 else
11044 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (op0)))
11045 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op0)))
11046 && real_zerop (op1))
11047 return omit_one_operand (type, op1, op0);
11048
11049 goto binary;
11050
11051 case BIT_IOR_EXPR:
11052 /* Special case when we know the result will be all ones. */
11053 if (integer_all_onesp (op1))
11054 return omit_one_operand (type, op1, op0);
11055
11056 goto binary;
11057
11058 case TRUNC_DIV_EXPR:
11059 case ROUND_DIV_EXPR:
11060 case FLOOR_DIV_EXPR:
11061 case CEIL_DIV_EXPR:
11062 case EXACT_DIV_EXPR:
11063 case TRUNC_MOD_EXPR:
11064 case ROUND_MOD_EXPR:
11065 case FLOOR_MOD_EXPR:
11066 case CEIL_MOD_EXPR:
11067 case RDIV_EXPR:
11068 /* Division by zero is undefined. */
11069 if (integer_zerop (op1))
11070 return NULL_TREE;
11071
11072 if (TREE_CODE (op1) == REAL_CST
11073 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (op1)))
11074 && real_zerop (op1))
11075 return NULL_TREE;
11076
11077 goto binary;
11078
11079 case MIN_EXPR:
11080 if (INTEGRAL_TYPE_P (type)
11081 && operand_equal_p (op1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
11082 return omit_one_operand (type, op1, op0);
11083
11084 goto binary;
11085
11086 case MAX_EXPR:
11087 if (INTEGRAL_TYPE_P (type)
11088 && TYPE_MAX_VALUE (type)
11089 && operand_equal_p (op1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
11090 return omit_one_operand (type, op1, op0);
11091
11092 goto binary;
11093
11094 case RSHIFT_EXPR:
11095 /* Optimize -1 >> x for arithmetic right shifts. */
11096 if (integer_all_onesp (op0) && ! TYPE_UNSIGNED (type))
11097 return omit_one_operand (type, op0, op1);
11098 /* ... fall through ... */
11099
11100 case LSHIFT_EXPR:
11101 if (integer_zerop (op0))
11102 return omit_one_operand (type, op0, op1);
11103
11104 /* Since negative shift count is not well-defined, don't
11105 try to compute it in the compiler. */
11106 if (TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sgn (op1) < 0)
11107 return NULL_TREE;
11108
11109 goto binary;
11110
11111 case LROTATE_EXPR:
11112 case RROTATE_EXPR:
11113 /* -1 rotated either direction by any amount is still -1. */
11114 if (integer_all_onesp (op0))
11115 return omit_one_operand (type, op0, op1);
11116
11117 /* 0 rotated either direction by any amount is still zero. */
11118 if (integer_zerop (op0))
11119 return omit_one_operand (type, op0, op1);
11120
11121 goto binary;
11122
11123 case COMPLEX_EXPR:
11124 if (wins)
11125 return build_complex (type, op0, op1);
11126 return NULL_TREE;
11127
11128 case LT_EXPR:
11129 case LE_EXPR:
11130 case GT_EXPR:
11131 case GE_EXPR:
11132 case EQ_EXPR:
11133 case NE_EXPR:
11134 /* If one arg is a real or integer constant, put it last. */
11135 if ((TREE_CODE (op0) == INTEGER_CST
11136 && TREE_CODE (op1) != INTEGER_CST)
11137 || (TREE_CODE (op0) == REAL_CST
11138 && TREE_CODE (op0) != REAL_CST))
11139 {
11140 tree temp;
11141
11142 temp = op0;
11143 op0 = op1;
11144 op1 = temp;
11145 code = swap_tree_comparison (code);
11146 }
11147
11148 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
11149 This transformation affects the cases which are handled in later
11150 optimizations involving comparisons with non-negative constants. */
11151 if (TREE_CODE (op1) == INTEGER_CST
11152 && TREE_CODE (op0) != INTEGER_CST
11153 && tree_int_cst_sgn (op1) > 0)
11154 {
11155 switch (code)
11156 {
11157 case GE_EXPR:
11158 code = GT_EXPR;
11159 op1 = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
11160 break;
11161
11162 case LT_EXPR:
11163 code = LE_EXPR;
11164 op1 = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
11165 break;
11166
11167 default:
11168 break;
11169 }
11170 }
11171
11172 tem = fold_relational_hi_lo (&code, type, &op0, &op1);
11173 if (tem)
11174 return tem;
11175
11176 /* Fall through. */
11177
11178 case ORDERED_EXPR:
11179 case UNORDERED_EXPR:
11180 case UNLT_EXPR:
11181 case UNLE_EXPR:
11182 case UNGT_EXPR:
11183 case UNGE_EXPR:
11184 case UNEQ_EXPR:
11185 case LTGT_EXPR:
11186 if (!wins)
11187 return NULL_TREE;
11188
11189 return fold_relational_const (code, type, op0, op1);
11190
11191 case RANGE_EXPR:
11192 /* This could probably be handled. */
11193 return NULL_TREE;
11194
11195 case TRUTH_AND_EXPR:
11196 /* If second arg is constant zero, result is zero, but first arg
11197 must be evaluated. */
11198 if (integer_zerop (op1))
11199 return omit_one_operand (type, op1, op0);
11200 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
11201 case will be handled here. */
11202 if (integer_zerop (op0))
11203 return omit_one_operand (type, op0, op1);
11204 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
11205 return constant_boolean_node (true, type);
11206 return NULL_TREE;
11207
11208 case TRUTH_OR_EXPR:
11209 /* If second arg is constant true, result is true, but we must
11210 evaluate first arg. */
11211 if (TREE_CODE (op1) == INTEGER_CST && ! integer_zerop (op1))
11212 return omit_one_operand (type, op1, op0);
11213 /* Likewise for first arg, but note this only occurs here for
11214 TRUTH_OR_EXPR. */
11215 if (TREE_CODE (op0) == INTEGER_CST && ! integer_zerop (op0))
11216 return omit_one_operand (type, op0, op1);
11217 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
11218 return constant_boolean_node (false, type);
11219 return NULL_TREE;
11220
11221 case TRUTH_XOR_EXPR:
11222 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
11223 {
11224 int x = ! integer_zerop (op0) ^ ! integer_zerop (op1);
11225 return constant_boolean_node (x, type);
11226 }
11227 return NULL_TREE;
11228
11229 default:
11230 return NULL_TREE;
11231 }
11232 }
11233
11234 /* Given the components of a unary expression CODE, TYPE and OP0,
11235 attempt to fold the expression to a constant without modifying
11236 TYPE or OP0.
11237
11238 If the expression could be simplified to a constant, then return
11239 the constant. If the expression would not be simplified to a
11240 constant, then return NULL_TREE.
11241
11242 Note this is primarily designed to be called after gimplification
11243 of the tree structures and when op0 is a constant. As a result
11244 of those simplifying assumptions this routine is far simpler than
11245 the generic fold routine. */
11246
11247 tree
11248 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
11249 {
11250 /* Make sure we have a suitable constant argument. */
11251 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
11252 {
11253 tree subop;
11254
11255 if (TREE_CODE (op0) == COMPLEX_CST)
11256 subop = TREE_REALPART (op0);
11257 else
11258 subop = op0;
11259
11260 if (TREE_CODE (subop) != INTEGER_CST && TREE_CODE (subop) != REAL_CST)
11261 return NULL_TREE;
11262 }
11263
11264 switch (code)
11265 {
11266 case NOP_EXPR:
11267 case FLOAT_EXPR:
11268 case CONVERT_EXPR:
11269 case FIX_TRUNC_EXPR:
11270 case FIX_FLOOR_EXPR:
11271 case FIX_CEIL_EXPR:
11272 case FIX_ROUND_EXPR:
11273 return fold_convert_const (code, type, op0);
11274
11275 case NEGATE_EXPR:
11276 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
11277 return fold_negate_const (op0, type);
11278 else
11279 return NULL_TREE;
11280
11281 case ABS_EXPR:
11282 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
11283 return fold_abs_const (op0, type);
11284 else
11285 return NULL_TREE;
11286
11287 case BIT_NOT_EXPR:
11288 if (TREE_CODE (op0) == INTEGER_CST)
11289 return fold_not_const (op0, type);
11290 else
11291 return NULL_TREE;
11292
11293 case REALPART_EXPR:
11294 if (TREE_CODE (op0) == COMPLEX_CST)
11295 return TREE_REALPART (op0);
11296 else
11297 return NULL_TREE;
11298
11299 case IMAGPART_EXPR:
11300 if (TREE_CODE (op0) == COMPLEX_CST)
11301 return TREE_IMAGPART (op0);
11302 else
11303 return NULL_TREE;
11304
11305 case CONJ_EXPR:
11306 if (TREE_CODE (op0) == COMPLEX_CST
11307 && TREE_CODE (TREE_TYPE (op0)) == COMPLEX_TYPE)
11308 return build_complex (type, TREE_REALPART (op0),
11309 negate_expr (TREE_IMAGPART (op0)));
11310 return NULL_TREE;
11311
11312 default:
11313 return NULL_TREE;
11314 }
11315 }
11316
11317 /* If EXP represents referencing an element in a constant string
11318 (either via pointer arithmetic or array indexing), return the
11319 tree representing the value accessed, otherwise return NULL. */
11320
11321 tree
11322 fold_read_from_constant_string (tree exp)
11323 {
11324 if (TREE_CODE (exp) == INDIRECT_REF || TREE_CODE (exp) == ARRAY_REF)
11325 {
11326 tree exp1 = TREE_OPERAND (exp, 0);
11327 tree index;
11328 tree string;
11329
11330 if (TREE_CODE (exp) == INDIRECT_REF)
11331 string = string_constant (exp1, &index);
11332 else
11333 {
11334 tree low_bound = array_ref_low_bound (exp);
11335 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
11336
11337 /* Optimize the special-case of a zero lower bound.
11338
11339 We convert the low_bound to sizetype to avoid some problems
11340 with constant folding. (E.g. suppose the lower bound is 1,
11341 and its mode is QI. Without the conversion,l (ARRAY
11342 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
11343 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
11344 if (! integer_zerop (low_bound))
11345 index = size_diffop (index, fold_convert (sizetype, low_bound));
11346
11347 string = exp1;
11348 }
11349
11350 if (string
11351 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (string))
11352 && TREE_CODE (string) == STRING_CST
11353 && TREE_CODE (index) == INTEGER_CST
11354 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
11355 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
11356 == MODE_INT)
11357 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
11358 return fold_convert (TREE_TYPE (exp),
11359 build_int_cst (NULL_TREE,
11360 (TREE_STRING_POINTER (string)
11361 [TREE_INT_CST_LOW (index)])));
11362 }
11363 return NULL;
11364 }
11365
11366 /* Return the tree for neg (ARG0) when ARG0 is known to be either
11367 an integer constant or real constant.
11368
11369 TYPE is the type of the result. */
11370
11371 static tree
11372 fold_negate_const (tree arg0, tree type)
11373 {
11374 tree t = NULL_TREE;
11375
11376 switch (TREE_CODE (arg0))
11377 {
11378 case INTEGER_CST:
11379 {
11380 unsigned HOST_WIDE_INT low;
11381 HOST_WIDE_INT high;
11382 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
11383 TREE_INT_CST_HIGH (arg0),
11384 &low, &high);
11385 t = build_int_cst_wide (type, low, high);
11386 t = force_fit_type (t, 1,
11387 (overflow | TREE_OVERFLOW (arg0))
11388 && !TYPE_UNSIGNED (type),
11389 TREE_CONSTANT_OVERFLOW (arg0));
11390 break;
11391 }
11392
11393 case REAL_CST:
11394 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
11395 break;
11396
11397 default:
11398 gcc_unreachable ();
11399 }
11400
11401 return t;
11402 }
11403
11404 /* Return the tree for abs (ARG0) when ARG0 is known to be either
11405 an integer constant or real constant.
11406
11407 TYPE is the type of the result. */
11408
11409 tree
11410 fold_abs_const (tree arg0, tree type)
11411 {
11412 tree t = NULL_TREE;
11413
11414 switch (TREE_CODE (arg0))
11415 {
11416 case INTEGER_CST:
11417 /* If the value is unsigned, then the absolute value is
11418 the same as the ordinary value. */
11419 if (TYPE_UNSIGNED (type))
11420 t = arg0;
11421 /* Similarly, if the value is non-negative. */
11422 else if (INT_CST_LT (integer_minus_one_node, arg0))
11423 t = arg0;
11424 /* If the value is negative, then the absolute value is
11425 its negation. */
11426 else
11427 {
11428 unsigned HOST_WIDE_INT low;
11429 HOST_WIDE_INT high;
11430 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
11431 TREE_INT_CST_HIGH (arg0),
11432 &low, &high);
11433 t = build_int_cst_wide (type, low, high);
11434 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg0),
11435 TREE_CONSTANT_OVERFLOW (arg0));
11436 }
11437 break;
11438
11439 case REAL_CST:
11440 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
11441 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
11442 else
11443 t = arg0;
11444 break;
11445
11446 default:
11447 gcc_unreachable ();
11448 }
11449
11450 return t;
11451 }
11452
11453 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
11454 constant. TYPE is the type of the result. */
11455
11456 static tree
11457 fold_not_const (tree arg0, tree type)
11458 {
11459 tree t = NULL_TREE;
11460
11461 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
11462
11463 t = build_int_cst_wide (type,
11464 ~ TREE_INT_CST_LOW (arg0),
11465 ~ TREE_INT_CST_HIGH (arg0));
11466 t = force_fit_type (t, 0, TREE_OVERFLOW (arg0),
11467 TREE_CONSTANT_OVERFLOW (arg0));
11468
11469 return t;
11470 }
11471
11472 /* Given CODE, a relational operator, the target type, TYPE and two
11473 constant operands OP0 and OP1, return the result of the
11474 relational operation. If the result is not a compile time
11475 constant, then return NULL_TREE. */
11476
11477 static tree
11478 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
11479 {
11480 int result, invert;
11481
11482 /* From here on, the only cases we handle are when the result is
11483 known to be a constant. */
11484
11485 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
11486 {
11487 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
11488 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
11489
11490 /* Handle the cases where either operand is a NaN. */
11491 if (real_isnan (c0) || real_isnan (c1))
11492 {
11493 switch (code)
11494 {
11495 case EQ_EXPR:
11496 case ORDERED_EXPR:
11497 result = 0;
11498 break;
11499
11500 case NE_EXPR:
11501 case UNORDERED_EXPR:
11502 case UNLT_EXPR:
11503 case UNLE_EXPR:
11504 case UNGT_EXPR:
11505 case UNGE_EXPR:
11506 case UNEQ_EXPR:
11507 result = 1;
11508 break;
11509
11510 case LT_EXPR:
11511 case LE_EXPR:
11512 case GT_EXPR:
11513 case GE_EXPR:
11514 case LTGT_EXPR:
11515 if (flag_trapping_math)
11516 return NULL_TREE;
11517 result = 0;
11518 break;
11519
11520 default:
11521 gcc_unreachable ();
11522 }
11523
11524 return constant_boolean_node (result, type);
11525 }
11526
11527 return constant_boolean_node (real_compare (code, c0, c1), type);
11528 }
11529
11530 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
11531
11532 To compute GT, swap the arguments and do LT.
11533 To compute GE, do LT and invert the result.
11534 To compute LE, swap the arguments, do LT and invert the result.
11535 To compute NE, do EQ and invert the result.
11536
11537 Therefore, the code below must handle only EQ and LT. */
11538
11539 if (code == LE_EXPR || code == GT_EXPR)
11540 {
11541 tree tem = op0;
11542 op0 = op1;
11543 op1 = tem;
11544 code = swap_tree_comparison (code);
11545 }
11546
11547 /* Note that it is safe to invert for real values here because we
11548 have already handled the one case that it matters. */
11549
11550 invert = 0;
11551 if (code == NE_EXPR || code == GE_EXPR)
11552 {
11553 invert = 1;
11554 code = invert_tree_comparison (code, false);
11555 }
11556
11557 /* Compute a result for LT or EQ if args permit;
11558 Otherwise return T. */
11559 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
11560 {
11561 if (code == EQ_EXPR)
11562 result = tree_int_cst_equal (op0, op1);
11563 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
11564 result = INT_CST_LT_UNSIGNED (op0, op1);
11565 else
11566 result = INT_CST_LT (op0, op1);
11567 }
11568 else
11569 return NULL_TREE;
11570
11571 if (invert)
11572 result ^= 1;
11573 return constant_boolean_node (result, type);
11574 }
11575
11576 /* Build an expression for the a clean point containing EXPR with type TYPE.
11577 Don't build a cleanup point expression for EXPR which don't have side
11578 effects. */
11579
11580 tree
11581 fold_build_cleanup_point_expr (tree type, tree expr)
11582 {
11583 /* If the expression does not have side effects then we don't have to wrap
11584 it with a cleanup point expression. */
11585 if (!TREE_SIDE_EFFECTS (expr))
11586 return expr;
11587
11588 /* If the expression is a return, check to see if the expression inside the
11589 return has no side effects or the right hand side of the modify expression
11590 inside the return. If either don't have side effects set we don't need to
11591 wrap the expression in a cleanup point expression. Note we don't check the
11592 left hand side of the modify because it should always be a return decl. */
11593 if (TREE_CODE (expr) == RETURN_EXPR)
11594 {
11595 tree op = TREE_OPERAND (expr, 0);
11596 if (!op || !TREE_SIDE_EFFECTS (op))
11597 return expr;
11598 op = TREE_OPERAND (op, 1);
11599 if (!TREE_SIDE_EFFECTS (op))
11600 return expr;
11601 }
11602
11603 return build1 (CLEANUP_POINT_EXPR, type, expr);
11604 }
11605
11606 /* Build an expression for the address of T. Folds away INDIRECT_REF to
11607 avoid confusing the gimplify process. */
11608
11609 tree
11610 build_fold_addr_expr_with_type (tree t, tree ptrtype)
11611 {
11612 /* The size of the object is not relevant when talking about its address. */
11613 if (TREE_CODE (t) == WITH_SIZE_EXPR)
11614 t = TREE_OPERAND (t, 0);
11615
11616 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
11617 if (TREE_CODE (t) == INDIRECT_REF
11618 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
11619 {
11620 t = TREE_OPERAND (t, 0);
11621 if (TREE_TYPE (t) != ptrtype)
11622 t = build1 (NOP_EXPR, ptrtype, t);
11623 }
11624 else
11625 {
11626 tree base = t;
11627
11628 while (handled_component_p (base))
11629 base = TREE_OPERAND (base, 0);
11630 if (DECL_P (base))
11631 TREE_ADDRESSABLE (base) = 1;
11632
11633 t = build1 (ADDR_EXPR, ptrtype, t);
11634 }
11635
11636 return t;
11637 }
11638
11639 tree
11640 build_fold_addr_expr (tree t)
11641 {
11642 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
11643 }
11644
11645 /* Given a pointer value T, return a simplified version of an indirection
11646 through T, or NULL_TREE if no simplification is possible. */
11647
11648 static tree
11649 fold_indirect_ref_1 (tree t)
11650 {
11651 tree type = TREE_TYPE (TREE_TYPE (t));
11652 tree sub = t;
11653 tree subtype;
11654
11655 STRIP_NOPS (sub);
11656 subtype = TREE_TYPE (sub);
11657 if (!POINTER_TYPE_P (subtype))
11658 return NULL_TREE;
11659
11660 if (TREE_CODE (sub) == ADDR_EXPR)
11661 {
11662 tree op = TREE_OPERAND (sub, 0);
11663 tree optype = TREE_TYPE (op);
11664 /* *&p => p */
11665 if (lang_hooks.types_compatible_p (type, optype))
11666 return op;
11667 /* *(foo *)&fooarray => fooarray[0] */
11668 else if (TREE_CODE (optype) == ARRAY_TYPE
11669 && lang_hooks.types_compatible_p (type, TREE_TYPE (optype)))
11670 {
11671 tree type_domain = TYPE_DOMAIN (optype);
11672 tree min_val = size_zero_node;
11673 if (type_domain && TYPE_MIN_VALUE (type_domain))
11674 min_val = TYPE_MIN_VALUE (type_domain);
11675 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
11676 }
11677 }
11678
11679 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
11680 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
11681 && lang_hooks.types_compatible_p (type, TREE_TYPE (TREE_TYPE (subtype))))
11682 {
11683 tree type_domain;
11684 tree min_val = size_zero_node;
11685 sub = build_fold_indirect_ref (sub);
11686 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
11687 if (type_domain && TYPE_MIN_VALUE (type_domain))
11688 min_val = TYPE_MIN_VALUE (type_domain);
11689 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
11690 }
11691
11692 return NULL_TREE;
11693 }
11694
11695 /* Builds an expression for an indirection through T, simplifying some
11696 cases. */
11697
11698 tree
11699 build_fold_indirect_ref (tree t)
11700 {
11701 tree sub = fold_indirect_ref_1 (t);
11702
11703 if (sub)
11704 return sub;
11705 else
11706 return build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (t)), t);
11707 }
11708
11709 /* Given an INDIRECT_REF T, return either T or a simplified version. */
11710
11711 tree
11712 fold_indirect_ref (tree t)
11713 {
11714 tree sub = fold_indirect_ref_1 (TREE_OPERAND (t, 0));
11715
11716 if (sub)
11717 return sub;
11718 else
11719 return t;
11720 }
11721
11722 /* Strip non-trapping, non-side-effecting tree nodes from an expression
11723 whose result is ignored. The type of the returned tree need not be
11724 the same as the original expression. */
11725
11726 tree
11727 fold_ignored_result (tree t)
11728 {
11729 if (!TREE_SIDE_EFFECTS (t))
11730 return integer_zero_node;
11731
11732 for (;;)
11733 switch (TREE_CODE_CLASS (TREE_CODE (t)))
11734 {
11735 case tcc_unary:
11736 t = TREE_OPERAND (t, 0);
11737 break;
11738
11739 case tcc_binary:
11740 case tcc_comparison:
11741 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
11742 t = TREE_OPERAND (t, 0);
11743 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
11744 t = TREE_OPERAND (t, 1);
11745 else
11746 return t;
11747 break;
11748
11749 case tcc_expression:
11750 switch (TREE_CODE (t))
11751 {
11752 case COMPOUND_EXPR:
11753 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
11754 return t;
11755 t = TREE_OPERAND (t, 0);
11756 break;
11757
11758 case COND_EXPR:
11759 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
11760 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
11761 return t;
11762 t = TREE_OPERAND (t, 0);
11763 break;
11764
11765 default:
11766 return t;
11767 }
11768 break;
11769
11770 default:
11771 return t;
11772 }
11773 }
11774
11775 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
11776 This can only be applied to objects of a sizetype. */
11777
11778 tree
11779 round_up (tree value, int divisor)
11780 {
11781 tree div = NULL_TREE;
11782
11783 gcc_assert (divisor > 0);
11784 if (divisor == 1)
11785 return value;
11786
11787 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
11788 have to do anything. Only do this when we are not given a const,
11789 because in that case, this check is more expensive than just
11790 doing it. */
11791 if (TREE_CODE (value) != INTEGER_CST)
11792 {
11793 div = build_int_cst (TREE_TYPE (value), divisor);
11794
11795 if (multiple_of_p (TREE_TYPE (value), value, div))
11796 return value;
11797 }
11798
11799 /* If divisor is a power of two, simplify this to bit manipulation. */
11800 if (divisor == (divisor & -divisor))
11801 {
11802 tree t;
11803
11804 t = build_int_cst (TREE_TYPE (value), divisor - 1);
11805 value = size_binop (PLUS_EXPR, value, t);
11806 t = build_int_cst (TREE_TYPE (value), -divisor);
11807 value = size_binop (BIT_AND_EXPR, value, t);
11808 }
11809 else
11810 {
11811 if (!div)
11812 div = build_int_cst (TREE_TYPE (value), divisor);
11813 value = size_binop (CEIL_DIV_EXPR, value, div);
11814 value = size_binop (MULT_EXPR, value, div);
11815 }
11816
11817 return value;
11818 }
11819
11820 /* Likewise, but round down. */
11821
11822 tree
11823 round_down (tree value, int divisor)
11824 {
11825 tree div = NULL_TREE;
11826
11827 gcc_assert (divisor > 0);
11828 if (divisor == 1)
11829 return value;
11830
11831 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
11832 have to do anything. Only do this when we are not given a const,
11833 because in that case, this check is more expensive than just
11834 doing it. */
11835 if (TREE_CODE (value) != INTEGER_CST)
11836 {
11837 div = build_int_cst (TREE_TYPE (value), divisor);
11838
11839 if (multiple_of_p (TREE_TYPE (value), value, div))
11840 return value;
11841 }
11842
11843 /* If divisor is a power of two, simplify this to bit manipulation. */
11844 if (divisor == (divisor & -divisor))
11845 {
11846 tree t;
11847
11848 t = build_int_cst (TREE_TYPE (value), -divisor);
11849 value = size_binop (BIT_AND_EXPR, value, t);
11850 }
11851 else
11852 {
11853 if (!div)
11854 div = build_int_cst (TREE_TYPE (value), divisor);
11855 value = size_binop (FLOOR_DIV_EXPR, value, div);
11856 value = size_binop (MULT_EXPR, value, div);
11857 }
11858
11859 return value;
11860 }
11861
11862 /* Returns the pointer to the base of the object addressed by EXP and
11863 extracts the information about the offset of the access, storing it
11864 to PBITPOS and POFFSET. */
11865
11866 static tree
11867 split_address_to_core_and_offset (tree exp,
11868 HOST_WIDE_INT *pbitpos, tree *poffset)
11869 {
11870 tree core;
11871 enum machine_mode mode;
11872 int unsignedp, volatilep;
11873 HOST_WIDE_INT bitsize;
11874
11875 if (TREE_CODE (exp) == ADDR_EXPR)
11876 {
11877 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
11878 poffset, &mode, &unsignedp, &volatilep,
11879 false);
11880
11881 if (TREE_CODE (core) == INDIRECT_REF)
11882 core = TREE_OPERAND (core, 0);
11883 }
11884 else
11885 {
11886 core = exp;
11887 *pbitpos = 0;
11888 *poffset = NULL_TREE;
11889 }
11890
11891 return core;
11892 }
11893
11894 /* Returns true if addresses of E1 and E2 differ by a constant, false
11895 otherwise. If they do, E1 - E2 is stored in *DIFF. */
11896
11897 bool
11898 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
11899 {
11900 tree core1, core2;
11901 HOST_WIDE_INT bitpos1, bitpos2;
11902 tree toffset1, toffset2, tdiff, type;
11903
11904 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
11905 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
11906
11907 if (bitpos1 % BITS_PER_UNIT != 0
11908 || bitpos2 % BITS_PER_UNIT != 0
11909 || !operand_equal_p (core1, core2, 0))
11910 return false;
11911
11912 if (toffset1 && toffset2)
11913 {
11914 type = TREE_TYPE (toffset1);
11915 if (type != TREE_TYPE (toffset2))
11916 toffset2 = fold_convert (type, toffset2);
11917
11918 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
11919 if (!host_integerp (tdiff, 0))
11920 return false;
11921
11922 *diff = tree_low_cst (tdiff, 0);
11923 }
11924 else if (toffset1 || toffset2)
11925 {
11926 /* If only one of the offsets is non-constant, the difference cannot
11927 be a constant. */
11928 return false;
11929 }
11930 else
11931 *diff = 0;
11932
11933 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
11934 return true;
11935 }
11936
11937 /* Simplify the floating point expression EXP when the sign of the
11938 result is not significant. Return NULL_TREE if no simplification
11939 is possible. */
11940
11941 tree
11942 fold_strip_sign_ops (tree exp)
11943 {
11944 tree arg0, arg1;
11945
11946 switch (TREE_CODE (exp))
11947 {
11948 case ABS_EXPR:
11949 case NEGATE_EXPR:
11950 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
11951 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
11952
11953 case MULT_EXPR:
11954 case RDIV_EXPR:
11955 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
11956 return NULL_TREE;
11957 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
11958 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
11959 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
11960 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
11961 arg0 ? arg0 : TREE_OPERAND (exp, 0),
11962 arg1 ? arg1 : TREE_OPERAND (exp, 1));
11963 break;
11964
11965 default:
11966 break;
11967 }
11968 return NULL_TREE;
11969 }
11970