re PR middle-end/37103 (possible integer codegen bug)
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
4 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
29
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type_double.
32
33 fold takes a tree as argument and returns a simplified tree.
34
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
38
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
41
42 force_fit_type_double takes a constant, an overflowable flag and a
43 prior overflow indicator. It forces the value to fit the type and
44 sets TREE_OVERFLOW.
45
46 Note: Since the folders get called on non-gimple code as well as
47 gimple code, we need to handle GIMPLE tuples as well as their
48 corresponding tree equivalents. */
49
50 #include "config.h"
51 #include "system.h"
52 #include "coretypes.h"
53 #include "tm.h"
54 #include "flags.h"
55 #include "tree.h"
56 #include "real.h"
57 #include "fixed-value.h"
58 #include "rtl.h"
59 #include "expr.h"
60 #include "tm_p.h"
61 #include "target.h"
62 #include "toplev.h"
63 #include "intl.h"
64 #include "ggc.h"
65 #include "hashtab.h"
66 #include "langhooks.h"
67 #include "md5.h"
68 #include "gimple.h"
69
70 /* Nonzero if we are folding constants inside an initializer; zero
71 otherwise. */
72 int folding_initializer = 0;
73
74 /* The following constants represent a bit based encoding of GCC's
75 comparison operators. This encoding simplifies transformations
76 on relational comparison operators, such as AND and OR. */
77 enum comparison_code {
78 COMPCODE_FALSE = 0,
79 COMPCODE_LT = 1,
80 COMPCODE_EQ = 2,
81 COMPCODE_LE = 3,
82 COMPCODE_GT = 4,
83 COMPCODE_LTGT = 5,
84 COMPCODE_GE = 6,
85 COMPCODE_ORD = 7,
86 COMPCODE_UNORD = 8,
87 COMPCODE_UNLT = 9,
88 COMPCODE_UNEQ = 10,
89 COMPCODE_UNLE = 11,
90 COMPCODE_UNGT = 12,
91 COMPCODE_NE = 13,
92 COMPCODE_UNGE = 14,
93 COMPCODE_TRUE = 15
94 };
95
96 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
97 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
98 static bool negate_mathfn_p (enum built_in_function);
99 static bool negate_expr_p (tree);
100 static tree negate_expr (tree);
101 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
102 static tree associate_trees (tree, tree, enum tree_code, tree);
103 static tree const_binop (enum tree_code, tree, tree, int);
104 static enum comparison_code comparison_to_compcode (enum tree_code);
105 static enum tree_code compcode_to_comparison (enum comparison_code);
106 static tree combine_comparisons (enum tree_code, enum tree_code,
107 enum tree_code, tree, tree, tree);
108 static int truth_value_p (enum tree_code);
109 static int operand_equal_for_comparison_p (tree, tree, tree);
110 static int twoval_comparison_p (tree, tree *, tree *, int *);
111 static tree eval_subst (tree, tree, tree, tree, tree);
112 static tree pedantic_omit_one_operand (tree, tree, tree);
113 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
114 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
115 enum machine_mode *, int *, int *,
116 tree *, tree *);
117 static tree sign_bit_p (tree, const_tree);
118 static int simple_operand_p (const_tree);
119 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
120 static tree range_predecessor (tree);
121 static tree range_successor (tree);
122 static tree make_range (tree, int *, tree *, tree *, bool *);
123 static tree build_range_check (tree, tree, int, tree, tree);
124 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
125 tree);
126 static tree fold_range_test (enum tree_code, tree, tree, tree);
127 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
128 static tree unextend (tree, int, int, tree);
129 static tree fold_truthop (enum tree_code, tree, tree, tree);
130 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
131 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
132 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
133 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
134 tree, tree,
135 tree, tree, int);
136 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
137 tree, tree, tree);
138 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
139 static tree fold_div_compare (enum tree_code, tree, tree, tree);
140 static bool reorder_operands_p (const_tree, const_tree);
141 static tree fold_negate_const (tree, tree);
142 static tree fold_not_const (tree, tree);
143 static tree fold_relational_const (enum tree_code, tree, tree, tree);
144
145
146 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
147 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
148 and SUM1. Then this yields nonzero if overflow occurred during the
149 addition.
150
151 Overflow occurs if A and B have the same sign, but A and SUM differ in
152 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
153 sign. */
154 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
155 \f
156 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
157 We do that by representing the two-word integer in 4 words, with only
158 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
159 number. The value of the word is LOWPART + HIGHPART * BASE. */
160
161 #define LOWPART(x) \
162 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
163 #define HIGHPART(x) \
164 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
165 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
166
167 /* Unpack a two-word integer into 4 words.
168 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
169 WORDS points to the array of HOST_WIDE_INTs. */
170
171 static void
172 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
173 {
174 words[0] = LOWPART (low);
175 words[1] = HIGHPART (low);
176 words[2] = LOWPART (hi);
177 words[3] = HIGHPART (hi);
178 }
179
180 /* Pack an array of 4 words into a two-word integer.
181 WORDS points to the array of words.
182 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
183
184 static void
185 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
186 HOST_WIDE_INT *hi)
187 {
188 *low = words[0] + words[1] * BASE;
189 *hi = words[2] + words[3] * BASE;
190 }
191 \f
192 /* Force the double-word integer L1, H1 to be within the range of the
193 integer type TYPE. Stores the properly truncated and sign-extended
194 double-word integer in *LV, *HV. Returns true if the operation
195 overflows, that is, argument and result are different. */
196
197 int
198 fit_double_type (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
199 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, const_tree type)
200 {
201 unsigned HOST_WIDE_INT low0 = l1;
202 HOST_WIDE_INT high0 = h1;
203 unsigned int prec;
204 int sign_extended_type;
205
206 if (POINTER_TYPE_P (type)
207 || TREE_CODE (type) == OFFSET_TYPE)
208 prec = POINTER_SIZE;
209 else
210 prec = TYPE_PRECISION (type);
211
212 /* Size types *are* sign extended. */
213 sign_extended_type = (!TYPE_UNSIGNED (type)
214 || (TREE_CODE (type) == INTEGER_TYPE
215 && TYPE_IS_SIZETYPE (type)));
216
217 /* First clear all bits that are beyond the type's precision. */
218 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
219 ;
220 else if (prec > HOST_BITS_PER_WIDE_INT)
221 h1 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
222 else
223 {
224 h1 = 0;
225 if (prec < HOST_BITS_PER_WIDE_INT)
226 l1 &= ~((HOST_WIDE_INT) (-1) << prec);
227 }
228
229 /* Then do sign extension if necessary. */
230 if (!sign_extended_type)
231 /* No sign extension */;
232 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
233 /* Correct width already. */;
234 else if (prec > HOST_BITS_PER_WIDE_INT)
235 {
236 /* Sign extend top half? */
237 if (h1 & ((unsigned HOST_WIDE_INT)1
238 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
239 h1 |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
240 }
241 else if (prec == HOST_BITS_PER_WIDE_INT)
242 {
243 if ((HOST_WIDE_INT)l1 < 0)
244 h1 = -1;
245 }
246 else
247 {
248 /* Sign extend bottom half? */
249 if (l1 & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
250 {
251 h1 = -1;
252 l1 |= (HOST_WIDE_INT)(-1) << prec;
253 }
254 }
255
256 *lv = l1;
257 *hv = h1;
258
259 /* If the value didn't fit, signal overflow. */
260 return l1 != low0 || h1 != high0;
261 }
262
263 /* We force the double-int HIGH:LOW to the range of the type TYPE by
264 sign or zero extending it.
265 OVERFLOWABLE indicates if we are interested
266 in overflow of the value, when >0 we are only interested in signed
267 overflow, for <0 we are interested in any overflow. OVERFLOWED
268 indicates whether overflow has already occurred. CONST_OVERFLOWED
269 indicates whether constant overflow has already occurred. We force
270 T's value to be within range of T's type (by setting to 0 or 1 all
271 the bits outside the type's range). We set TREE_OVERFLOWED if,
272 OVERFLOWED is nonzero,
273 or OVERFLOWABLE is >0 and signed overflow occurs
274 or OVERFLOWABLE is <0 and any overflow occurs
275 We return a new tree node for the extended double-int. The node
276 is shared if no overflow flags are set. */
277
278 tree
279 force_fit_type_double (tree type, unsigned HOST_WIDE_INT low,
280 HOST_WIDE_INT high, int overflowable,
281 bool overflowed)
282 {
283 int sign_extended_type;
284 bool overflow;
285
286 /* Size types *are* sign extended. */
287 sign_extended_type = (!TYPE_UNSIGNED (type)
288 || (TREE_CODE (type) == INTEGER_TYPE
289 && TYPE_IS_SIZETYPE (type)));
290
291 overflow = fit_double_type (low, high, &low, &high, type);
292
293 /* If we need to set overflow flags, return a new unshared node. */
294 if (overflowed || overflow)
295 {
296 if (overflowed
297 || overflowable < 0
298 || (overflowable > 0 && sign_extended_type))
299 {
300 tree t = make_node (INTEGER_CST);
301 TREE_INT_CST_LOW (t) = low;
302 TREE_INT_CST_HIGH (t) = high;
303 TREE_TYPE (t) = type;
304 TREE_OVERFLOW (t) = 1;
305 return t;
306 }
307 }
308
309 /* Else build a shared node. */
310 return build_int_cst_wide (type, low, high);
311 }
312 \f
313 /* Add two doubleword integers with doubleword result.
314 Return nonzero if the operation overflows according to UNSIGNED_P.
315 Each argument is given as two `HOST_WIDE_INT' pieces.
316 One argument is L1 and H1; the other, L2 and H2.
317 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
318
319 int
320 add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
321 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
322 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
323 bool unsigned_p)
324 {
325 unsigned HOST_WIDE_INT l;
326 HOST_WIDE_INT h;
327
328 l = l1 + l2;
329 h = h1 + h2 + (l < l1);
330
331 *lv = l;
332 *hv = h;
333
334 if (unsigned_p)
335 return (unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1;
336 else
337 return OVERFLOW_SUM_SIGN (h1, h2, h);
338 }
339
340 /* Negate a doubleword integer with doubleword result.
341 Return nonzero if the operation overflows, assuming it's signed.
342 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
343 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
344
345 int
346 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
347 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
348 {
349 if (l1 == 0)
350 {
351 *lv = 0;
352 *hv = - h1;
353 return (*hv & h1) < 0;
354 }
355 else
356 {
357 *lv = -l1;
358 *hv = ~h1;
359 return 0;
360 }
361 }
362 \f
363 /* Multiply two doubleword integers with doubleword result.
364 Return nonzero if the operation overflows according to UNSIGNED_P.
365 Each argument is given as two `HOST_WIDE_INT' pieces.
366 One argument is L1 and H1; the other, L2 and H2.
367 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
368
369 int
370 mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
371 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
372 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
373 bool unsigned_p)
374 {
375 HOST_WIDE_INT arg1[4];
376 HOST_WIDE_INT arg2[4];
377 HOST_WIDE_INT prod[4 * 2];
378 unsigned HOST_WIDE_INT carry;
379 int i, j, k;
380 unsigned HOST_WIDE_INT toplow, neglow;
381 HOST_WIDE_INT tophigh, neghigh;
382
383 encode (arg1, l1, h1);
384 encode (arg2, l2, h2);
385
386 memset (prod, 0, sizeof prod);
387
388 for (i = 0; i < 4; i++)
389 {
390 carry = 0;
391 for (j = 0; j < 4; j++)
392 {
393 k = i + j;
394 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
395 carry += arg1[i] * arg2[j];
396 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
397 carry += prod[k];
398 prod[k] = LOWPART (carry);
399 carry = HIGHPART (carry);
400 }
401 prod[i + 4] = carry;
402 }
403
404 decode (prod, lv, hv);
405 decode (prod + 4, &toplow, &tophigh);
406
407 /* Unsigned overflow is immediate. */
408 if (unsigned_p)
409 return (toplow | tophigh) != 0;
410
411 /* Check for signed overflow by calculating the signed representation of the
412 top half of the result; it should agree with the low half's sign bit. */
413 if (h1 < 0)
414 {
415 neg_double (l2, h2, &neglow, &neghigh);
416 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
417 }
418 if (h2 < 0)
419 {
420 neg_double (l1, h1, &neglow, &neghigh);
421 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
422 }
423 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
424 }
425 \f
426 /* Shift the doubleword integer in L1, H1 left by COUNT places
427 keeping only PREC bits of result.
428 Shift right if COUNT is negative.
429 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
430 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
431
432 void
433 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
434 HOST_WIDE_INT count, unsigned int prec,
435 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
436 {
437 unsigned HOST_WIDE_INT signmask;
438
439 if (count < 0)
440 {
441 rshift_double (l1, h1, -count, prec, lv, hv, arith);
442 return;
443 }
444
445 if (SHIFT_COUNT_TRUNCATED)
446 count %= prec;
447
448 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
449 {
450 /* Shifting by the host word size is undefined according to the
451 ANSI standard, so we must handle this as a special case. */
452 *hv = 0;
453 *lv = 0;
454 }
455 else if (count >= HOST_BITS_PER_WIDE_INT)
456 {
457 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
458 *lv = 0;
459 }
460 else
461 {
462 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
463 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
464 *lv = l1 << count;
465 }
466
467 /* Sign extend all bits that are beyond the precision. */
468
469 signmask = -((prec > HOST_BITS_PER_WIDE_INT
470 ? ((unsigned HOST_WIDE_INT) *hv
471 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
472 : (*lv >> (prec - 1))) & 1);
473
474 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
475 ;
476 else if (prec >= HOST_BITS_PER_WIDE_INT)
477 {
478 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
479 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
480 }
481 else
482 {
483 *hv = signmask;
484 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
485 *lv |= signmask << prec;
486 }
487 }
488
489 /* Shift the doubleword integer in L1, H1 right by COUNT places
490 keeping only PREC bits of result. COUNT must be positive.
491 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
492 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
493
494 void
495 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
496 HOST_WIDE_INT count, unsigned int prec,
497 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
498 int arith)
499 {
500 unsigned HOST_WIDE_INT signmask;
501
502 signmask = (arith
503 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
504 : 0);
505
506 if (SHIFT_COUNT_TRUNCATED)
507 count %= prec;
508
509 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
510 {
511 /* Shifting by the host word size is undefined according to the
512 ANSI standard, so we must handle this as a special case. */
513 *hv = 0;
514 *lv = 0;
515 }
516 else if (count >= HOST_BITS_PER_WIDE_INT)
517 {
518 *hv = 0;
519 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
520 }
521 else
522 {
523 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
524 *lv = ((l1 >> count)
525 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
526 }
527
528 /* Zero / sign extend all bits that are beyond the precision. */
529
530 if (count >= (HOST_WIDE_INT)prec)
531 {
532 *hv = signmask;
533 *lv = signmask;
534 }
535 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
536 ;
537 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
538 {
539 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
540 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
541 }
542 else
543 {
544 *hv = signmask;
545 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
546 *lv |= signmask << (prec - count);
547 }
548 }
549 \f
550 /* Rotate the doubleword integer in L1, H1 left by COUNT places
551 keeping only PREC bits of result.
552 Rotate right if COUNT is negative.
553 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
554
555 void
556 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
557 HOST_WIDE_INT count, unsigned int prec,
558 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
559 {
560 unsigned HOST_WIDE_INT s1l, s2l;
561 HOST_WIDE_INT s1h, s2h;
562
563 count %= prec;
564 if (count < 0)
565 count += prec;
566
567 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
568 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
569 *lv = s1l | s2l;
570 *hv = s1h | s2h;
571 }
572
573 /* Rotate the doubleword integer in L1, H1 left by COUNT places
574 keeping only PREC bits of result. COUNT must be positive.
575 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
576
577 void
578 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
579 HOST_WIDE_INT count, unsigned int prec,
580 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
581 {
582 unsigned HOST_WIDE_INT s1l, s2l;
583 HOST_WIDE_INT s1h, s2h;
584
585 count %= prec;
586 if (count < 0)
587 count += prec;
588
589 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
590 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
591 *lv = s1l | s2l;
592 *hv = s1h | s2h;
593 }
594 \f
595 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
596 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
597 CODE is a tree code for a kind of division, one of
598 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
599 or EXACT_DIV_EXPR
600 It controls how the quotient is rounded to an integer.
601 Return nonzero if the operation overflows.
602 UNS nonzero says do unsigned division. */
603
604 int
605 div_and_round_double (enum tree_code code, int uns,
606 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
607 HOST_WIDE_INT hnum_orig,
608 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
609 HOST_WIDE_INT hden_orig,
610 unsigned HOST_WIDE_INT *lquo,
611 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
612 HOST_WIDE_INT *hrem)
613 {
614 int quo_neg = 0;
615 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
616 HOST_WIDE_INT den[4], quo[4];
617 int i, j;
618 unsigned HOST_WIDE_INT work;
619 unsigned HOST_WIDE_INT carry = 0;
620 unsigned HOST_WIDE_INT lnum = lnum_orig;
621 HOST_WIDE_INT hnum = hnum_orig;
622 unsigned HOST_WIDE_INT lden = lden_orig;
623 HOST_WIDE_INT hden = hden_orig;
624 int overflow = 0;
625
626 if (hden == 0 && lden == 0)
627 overflow = 1, lden = 1;
628
629 /* Calculate quotient sign and convert operands to unsigned. */
630 if (!uns)
631 {
632 if (hnum < 0)
633 {
634 quo_neg = ~ quo_neg;
635 /* (minimum integer) / (-1) is the only overflow case. */
636 if (neg_double (lnum, hnum, &lnum, &hnum)
637 && ((HOST_WIDE_INT) lden & hden) == -1)
638 overflow = 1;
639 }
640 if (hden < 0)
641 {
642 quo_neg = ~ quo_neg;
643 neg_double (lden, hden, &lden, &hden);
644 }
645 }
646
647 if (hnum == 0 && hden == 0)
648 { /* single precision */
649 *hquo = *hrem = 0;
650 /* This unsigned division rounds toward zero. */
651 *lquo = lnum / lden;
652 goto finish_up;
653 }
654
655 if (hnum == 0)
656 { /* trivial case: dividend < divisor */
657 /* hden != 0 already checked. */
658 *hquo = *lquo = 0;
659 *hrem = hnum;
660 *lrem = lnum;
661 goto finish_up;
662 }
663
664 memset (quo, 0, sizeof quo);
665
666 memset (num, 0, sizeof num); /* to zero 9th element */
667 memset (den, 0, sizeof den);
668
669 encode (num, lnum, hnum);
670 encode (den, lden, hden);
671
672 /* Special code for when the divisor < BASE. */
673 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
674 {
675 /* hnum != 0 already checked. */
676 for (i = 4 - 1; i >= 0; i--)
677 {
678 work = num[i] + carry * BASE;
679 quo[i] = work / lden;
680 carry = work % lden;
681 }
682 }
683 else
684 {
685 /* Full double precision division,
686 with thanks to Don Knuth's "Seminumerical Algorithms". */
687 int num_hi_sig, den_hi_sig;
688 unsigned HOST_WIDE_INT quo_est, scale;
689
690 /* Find the highest nonzero divisor digit. */
691 for (i = 4 - 1;; i--)
692 if (den[i] != 0)
693 {
694 den_hi_sig = i;
695 break;
696 }
697
698 /* Insure that the first digit of the divisor is at least BASE/2.
699 This is required by the quotient digit estimation algorithm. */
700
701 scale = BASE / (den[den_hi_sig] + 1);
702 if (scale > 1)
703 { /* scale divisor and dividend */
704 carry = 0;
705 for (i = 0; i <= 4 - 1; i++)
706 {
707 work = (num[i] * scale) + carry;
708 num[i] = LOWPART (work);
709 carry = HIGHPART (work);
710 }
711
712 num[4] = carry;
713 carry = 0;
714 for (i = 0; i <= 4 - 1; i++)
715 {
716 work = (den[i] * scale) + carry;
717 den[i] = LOWPART (work);
718 carry = HIGHPART (work);
719 if (den[i] != 0) den_hi_sig = i;
720 }
721 }
722
723 num_hi_sig = 4;
724
725 /* Main loop */
726 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
727 {
728 /* Guess the next quotient digit, quo_est, by dividing the first
729 two remaining dividend digits by the high order quotient digit.
730 quo_est is never low and is at most 2 high. */
731 unsigned HOST_WIDE_INT tmp;
732
733 num_hi_sig = i + den_hi_sig + 1;
734 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
735 if (num[num_hi_sig] != den[den_hi_sig])
736 quo_est = work / den[den_hi_sig];
737 else
738 quo_est = BASE - 1;
739
740 /* Refine quo_est so it's usually correct, and at most one high. */
741 tmp = work - quo_est * den[den_hi_sig];
742 if (tmp < BASE
743 && (den[den_hi_sig - 1] * quo_est
744 > (tmp * BASE + num[num_hi_sig - 2])))
745 quo_est--;
746
747 /* Try QUO_EST as the quotient digit, by multiplying the
748 divisor by QUO_EST and subtracting from the remaining dividend.
749 Keep in mind that QUO_EST is the I - 1st digit. */
750
751 carry = 0;
752 for (j = 0; j <= den_hi_sig; j++)
753 {
754 work = quo_est * den[j] + carry;
755 carry = HIGHPART (work);
756 work = num[i + j] - LOWPART (work);
757 num[i + j] = LOWPART (work);
758 carry += HIGHPART (work) != 0;
759 }
760
761 /* If quo_est was high by one, then num[i] went negative and
762 we need to correct things. */
763 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
764 {
765 quo_est--;
766 carry = 0; /* add divisor back in */
767 for (j = 0; j <= den_hi_sig; j++)
768 {
769 work = num[i + j] + den[j] + carry;
770 carry = HIGHPART (work);
771 num[i + j] = LOWPART (work);
772 }
773
774 num [num_hi_sig] += carry;
775 }
776
777 /* Store the quotient digit. */
778 quo[i] = quo_est;
779 }
780 }
781
782 decode (quo, lquo, hquo);
783
784 finish_up:
785 /* If result is negative, make it so. */
786 if (quo_neg)
787 neg_double (*lquo, *hquo, lquo, hquo);
788
789 /* Compute trial remainder: rem = num - (quo * den) */
790 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
791 neg_double (*lrem, *hrem, lrem, hrem);
792 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
793
794 switch (code)
795 {
796 case TRUNC_DIV_EXPR:
797 case TRUNC_MOD_EXPR: /* round toward zero */
798 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
799 return overflow;
800
801 case FLOOR_DIV_EXPR:
802 case FLOOR_MOD_EXPR: /* round toward negative infinity */
803 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
804 {
805 /* quo = quo - 1; */
806 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
807 lquo, hquo);
808 }
809 else
810 return overflow;
811 break;
812
813 case CEIL_DIV_EXPR:
814 case CEIL_MOD_EXPR: /* round toward positive infinity */
815 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
816 {
817 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
818 lquo, hquo);
819 }
820 else
821 return overflow;
822 break;
823
824 case ROUND_DIV_EXPR:
825 case ROUND_MOD_EXPR: /* round to closest integer */
826 {
827 unsigned HOST_WIDE_INT labs_rem = *lrem;
828 HOST_WIDE_INT habs_rem = *hrem;
829 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
830 HOST_WIDE_INT habs_den = hden, htwice;
831
832 /* Get absolute values. */
833 if (*hrem < 0)
834 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
835 if (hden < 0)
836 neg_double (lden, hden, &labs_den, &habs_den);
837
838 /* If (2 * abs (lrem) >= abs (lden)) */
839 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
840 labs_rem, habs_rem, &ltwice, &htwice);
841
842 if (((unsigned HOST_WIDE_INT) habs_den
843 < (unsigned HOST_WIDE_INT) htwice)
844 || (((unsigned HOST_WIDE_INT) habs_den
845 == (unsigned HOST_WIDE_INT) htwice)
846 && (labs_den < ltwice)))
847 {
848 if (*hquo < 0)
849 /* quo = quo - 1; */
850 add_double (*lquo, *hquo,
851 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
852 else
853 /* quo = quo + 1; */
854 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
855 lquo, hquo);
856 }
857 else
858 return overflow;
859 }
860 break;
861
862 default:
863 gcc_unreachable ();
864 }
865
866 /* Compute true remainder: rem = num - (quo * den) */
867 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
868 neg_double (*lrem, *hrem, lrem, hrem);
869 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
870 return overflow;
871 }
872
873 /* If ARG2 divides ARG1 with zero remainder, carries out the division
874 of type CODE and returns the quotient.
875 Otherwise returns NULL_TREE. */
876
877 static tree
878 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
879 {
880 unsigned HOST_WIDE_INT int1l, int2l;
881 HOST_WIDE_INT int1h, int2h;
882 unsigned HOST_WIDE_INT quol, reml;
883 HOST_WIDE_INT quoh, remh;
884 tree type = TREE_TYPE (arg1);
885 int uns = TYPE_UNSIGNED (type);
886
887 int1l = TREE_INT_CST_LOW (arg1);
888 int1h = TREE_INT_CST_HIGH (arg1);
889 /* &obj[0] + -128 really should be compiled as &obj[-8] rather than
890 &obj[some_exotic_number]. */
891 if (POINTER_TYPE_P (type))
892 {
893 uns = false;
894 type = signed_type_for (type);
895 fit_double_type (int1l, int1h, &int1l, &int1h,
896 type);
897 }
898 else
899 fit_double_type (int1l, int1h, &int1l, &int1h, type);
900 int2l = TREE_INT_CST_LOW (arg2);
901 int2h = TREE_INT_CST_HIGH (arg2);
902
903 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
904 &quol, &quoh, &reml, &remh);
905 if (remh != 0 || reml != 0)
906 return NULL_TREE;
907
908 return build_int_cst_wide (type, quol, quoh);
909 }
910 \f
911 /* This is nonzero if we should defer warnings about undefined
912 overflow. This facility exists because these warnings are a
913 special case. The code to estimate loop iterations does not want
914 to issue any warnings, since it works with expressions which do not
915 occur in user code. Various bits of cleanup code call fold(), but
916 only use the result if it has certain characteristics (e.g., is a
917 constant); that code only wants to issue a warning if the result is
918 used. */
919
920 static int fold_deferring_overflow_warnings;
921
922 /* If a warning about undefined overflow is deferred, this is the
923 warning. Note that this may cause us to turn two warnings into
924 one, but that is fine since it is sufficient to only give one
925 warning per expression. */
926
927 static const char* fold_deferred_overflow_warning;
928
929 /* If a warning about undefined overflow is deferred, this is the
930 level at which the warning should be emitted. */
931
932 static enum warn_strict_overflow_code fold_deferred_overflow_code;
933
934 /* Start deferring overflow warnings. We could use a stack here to
935 permit nested calls, but at present it is not necessary. */
936
937 void
938 fold_defer_overflow_warnings (void)
939 {
940 ++fold_deferring_overflow_warnings;
941 }
942
943 /* Stop deferring overflow warnings. If there is a pending warning,
944 and ISSUE is true, then issue the warning if appropriate. STMT is
945 the statement with which the warning should be associated (used for
946 location information); STMT may be NULL. CODE is the level of the
947 warning--a warn_strict_overflow_code value. This function will use
948 the smaller of CODE and the deferred code when deciding whether to
949 issue the warning. CODE may be zero to mean to always use the
950 deferred code. */
951
952 void
953 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
954 {
955 const char *warnmsg;
956 location_t locus;
957
958 gcc_assert (fold_deferring_overflow_warnings > 0);
959 --fold_deferring_overflow_warnings;
960 if (fold_deferring_overflow_warnings > 0)
961 {
962 if (fold_deferred_overflow_warning != NULL
963 && code != 0
964 && code < (int) fold_deferred_overflow_code)
965 fold_deferred_overflow_code = code;
966 return;
967 }
968
969 warnmsg = fold_deferred_overflow_warning;
970 fold_deferred_overflow_warning = NULL;
971
972 if (!issue || warnmsg == NULL)
973 return;
974
975 if (gimple_no_warning_p (stmt))
976 return;
977
978 /* Use the smallest code level when deciding to issue the
979 warning. */
980 if (code == 0 || code > (int) fold_deferred_overflow_code)
981 code = fold_deferred_overflow_code;
982
983 if (!issue_strict_overflow_warning (code))
984 return;
985
986 if (stmt == NULL)
987 locus = input_location;
988 else
989 locus = gimple_location (stmt);
990 warning (OPT_Wstrict_overflow, "%H%s", &locus, warnmsg);
991 }
992
993 /* Stop deferring overflow warnings, ignoring any deferred
994 warnings. */
995
996 void
997 fold_undefer_and_ignore_overflow_warnings (void)
998 {
999 fold_undefer_overflow_warnings (false, NULL, 0);
1000 }
1001
1002 /* Whether we are deferring overflow warnings. */
1003
1004 bool
1005 fold_deferring_overflow_warnings_p (void)
1006 {
1007 return fold_deferring_overflow_warnings > 0;
1008 }
1009
1010 /* This is called when we fold something based on the fact that signed
1011 overflow is undefined. */
1012
1013 static void
1014 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
1015 {
1016 if (fold_deferring_overflow_warnings > 0)
1017 {
1018 if (fold_deferred_overflow_warning == NULL
1019 || wc < fold_deferred_overflow_code)
1020 {
1021 fold_deferred_overflow_warning = gmsgid;
1022 fold_deferred_overflow_code = wc;
1023 }
1024 }
1025 else if (issue_strict_overflow_warning (wc))
1026 warning (OPT_Wstrict_overflow, gmsgid);
1027 }
1028 \f
1029 /* Return true if the built-in mathematical function specified by CODE
1030 is odd, i.e. -f(x) == f(-x). */
1031
1032 static bool
1033 negate_mathfn_p (enum built_in_function code)
1034 {
1035 switch (code)
1036 {
1037 CASE_FLT_FN (BUILT_IN_ASIN):
1038 CASE_FLT_FN (BUILT_IN_ASINH):
1039 CASE_FLT_FN (BUILT_IN_ATAN):
1040 CASE_FLT_FN (BUILT_IN_ATANH):
1041 CASE_FLT_FN (BUILT_IN_CASIN):
1042 CASE_FLT_FN (BUILT_IN_CASINH):
1043 CASE_FLT_FN (BUILT_IN_CATAN):
1044 CASE_FLT_FN (BUILT_IN_CATANH):
1045 CASE_FLT_FN (BUILT_IN_CBRT):
1046 CASE_FLT_FN (BUILT_IN_CPROJ):
1047 CASE_FLT_FN (BUILT_IN_CSIN):
1048 CASE_FLT_FN (BUILT_IN_CSINH):
1049 CASE_FLT_FN (BUILT_IN_CTAN):
1050 CASE_FLT_FN (BUILT_IN_CTANH):
1051 CASE_FLT_FN (BUILT_IN_ERF):
1052 CASE_FLT_FN (BUILT_IN_LLROUND):
1053 CASE_FLT_FN (BUILT_IN_LROUND):
1054 CASE_FLT_FN (BUILT_IN_ROUND):
1055 CASE_FLT_FN (BUILT_IN_SIN):
1056 CASE_FLT_FN (BUILT_IN_SINH):
1057 CASE_FLT_FN (BUILT_IN_TAN):
1058 CASE_FLT_FN (BUILT_IN_TANH):
1059 CASE_FLT_FN (BUILT_IN_TRUNC):
1060 return true;
1061
1062 CASE_FLT_FN (BUILT_IN_LLRINT):
1063 CASE_FLT_FN (BUILT_IN_LRINT):
1064 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1065 CASE_FLT_FN (BUILT_IN_RINT):
1066 return !flag_rounding_math;
1067
1068 default:
1069 break;
1070 }
1071 return false;
1072 }
1073
1074 /* Check whether we may negate an integer constant T without causing
1075 overflow. */
1076
1077 bool
1078 may_negate_without_overflow_p (const_tree t)
1079 {
1080 unsigned HOST_WIDE_INT val;
1081 unsigned int prec;
1082 tree type;
1083
1084 gcc_assert (TREE_CODE (t) == INTEGER_CST);
1085
1086 type = TREE_TYPE (t);
1087 if (TYPE_UNSIGNED (type))
1088 return false;
1089
1090 prec = TYPE_PRECISION (type);
1091 if (prec > HOST_BITS_PER_WIDE_INT)
1092 {
1093 if (TREE_INT_CST_LOW (t) != 0)
1094 return true;
1095 prec -= HOST_BITS_PER_WIDE_INT;
1096 val = TREE_INT_CST_HIGH (t);
1097 }
1098 else
1099 val = TREE_INT_CST_LOW (t);
1100 if (prec < HOST_BITS_PER_WIDE_INT)
1101 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
1102 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
1103 }
1104
1105 /* Determine whether an expression T can be cheaply negated using
1106 the function negate_expr without introducing undefined overflow. */
1107
1108 static bool
1109 negate_expr_p (tree t)
1110 {
1111 tree type;
1112
1113 if (t == 0)
1114 return false;
1115
1116 type = TREE_TYPE (t);
1117
1118 STRIP_SIGN_NOPS (t);
1119 switch (TREE_CODE (t))
1120 {
1121 case INTEGER_CST:
1122 if (TYPE_OVERFLOW_WRAPS (type))
1123 return true;
1124
1125 /* Check that -CST will not overflow type. */
1126 return may_negate_without_overflow_p (t);
1127 case BIT_NOT_EXPR:
1128 return (INTEGRAL_TYPE_P (type)
1129 && TYPE_OVERFLOW_WRAPS (type));
1130
1131 case FIXED_CST:
1132 case REAL_CST:
1133 case NEGATE_EXPR:
1134 return true;
1135
1136 case COMPLEX_CST:
1137 return negate_expr_p (TREE_REALPART (t))
1138 && negate_expr_p (TREE_IMAGPART (t));
1139
1140 case COMPLEX_EXPR:
1141 return negate_expr_p (TREE_OPERAND (t, 0))
1142 && negate_expr_p (TREE_OPERAND (t, 1));
1143
1144 case CONJ_EXPR:
1145 return negate_expr_p (TREE_OPERAND (t, 0));
1146
1147 case PLUS_EXPR:
1148 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1149 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1150 return false;
1151 /* -(A + B) -> (-B) - A. */
1152 if (negate_expr_p (TREE_OPERAND (t, 1))
1153 && reorder_operands_p (TREE_OPERAND (t, 0),
1154 TREE_OPERAND (t, 1)))
1155 return true;
1156 /* -(A + B) -> (-A) - B. */
1157 return negate_expr_p (TREE_OPERAND (t, 0));
1158
1159 case MINUS_EXPR:
1160 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
1161 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1162 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1163 && reorder_operands_p (TREE_OPERAND (t, 0),
1164 TREE_OPERAND (t, 1));
1165
1166 case MULT_EXPR:
1167 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1168 break;
1169
1170 /* Fall through. */
1171
1172 case RDIV_EXPR:
1173 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1174 return negate_expr_p (TREE_OPERAND (t, 1))
1175 || negate_expr_p (TREE_OPERAND (t, 0));
1176 break;
1177
1178 case TRUNC_DIV_EXPR:
1179 case ROUND_DIV_EXPR:
1180 case FLOOR_DIV_EXPR:
1181 case CEIL_DIV_EXPR:
1182 case EXACT_DIV_EXPR:
1183 /* In general we can't negate A / B, because if A is INT_MIN and
1184 B is 1, we may turn this into INT_MIN / -1 which is undefined
1185 and actually traps on some architectures. But if overflow is
1186 undefined, we can negate, because - (INT_MIN / 1) is an
1187 overflow. */
1188 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
1189 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
1190 break;
1191 return negate_expr_p (TREE_OPERAND (t, 1))
1192 || negate_expr_p (TREE_OPERAND (t, 0));
1193
1194 case NOP_EXPR:
1195 /* Negate -((double)float) as (double)(-float). */
1196 if (TREE_CODE (type) == REAL_TYPE)
1197 {
1198 tree tem = strip_float_extensions (t);
1199 if (tem != t)
1200 return negate_expr_p (tem);
1201 }
1202 break;
1203
1204 case CALL_EXPR:
1205 /* Negate -f(x) as f(-x). */
1206 if (negate_mathfn_p (builtin_mathfn_code (t)))
1207 return negate_expr_p (CALL_EXPR_ARG (t, 0));
1208 break;
1209
1210 case RSHIFT_EXPR:
1211 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1212 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1213 {
1214 tree op1 = TREE_OPERAND (t, 1);
1215 if (TREE_INT_CST_HIGH (op1) == 0
1216 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1217 == TREE_INT_CST_LOW (op1))
1218 return true;
1219 }
1220 break;
1221
1222 default:
1223 break;
1224 }
1225 return false;
1226 }
1227
1228 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1229 simplification is possible.
1230 If negate_expr_p would return true for T, NULL_TREE will never be
1231 returned. */
1232
1233 static tree
1234 fold_negate_expr (tree t)
1235 {
1236 tree type = TREE_TYPE (t);
1237 tree tem;
1238
1239 switch (TREE_CODE (t))
1240 {
1241 /* Convert - (~A) to A + 1. */
1242 case BIT_NOT_EXPR:
1243 if (INTEGRAL_TYPE_P (type))
1244 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0),
1245 build_int_cst (type, 1));
1246 break;
1247
1248 case INTEGER_CST:
1249 tem = fold_negate_const (t, type);
1250 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
1251 || !TYPE_OVERFLOW_TRAPS (type))
1252 return tem;
1253 break;
1254
1255 case REAL_CST:
1256 tem = fold_negate_const (t, type);
1257 /* Two's complement FP formats, such as c4x, may overflow. */
1258 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
1259 return tem;
1260 break;
1261
1262 case FIXED_CST:
1263 tem = fold_negate_const (t, type);
1264 return tem;
1265
1266 case COMPLEX_CST:
1267 {
1268 tree rpart = negate_expr (TREE_REALPART (t));
1269 tree ipart = negate_expr (TREE_IMAGPART (t));
1270
1271 if ((TREE_CODE (rpart) == REAL_CST
1272 && TREE_CODE (ipart) == REAL_CST)
1273 || (TREE_CODE (rpart) == INTEGER_CST
1274 && TREE_CODE (ipart) == INTEGER_CST))
1275 return build_complex (type, rpart, ipart);
1276 }
1277 break;
1278
1279 case COMPLEX_EXPR:
1280 if (negate_expr_p (t))
1281 return fold_build2 (COMPLEX_EXPR, type,
1282 fold_negate_expr (TREE_OPERAND (t, 0)),
1283 fold_negate_expr (TREE_OPERAND (t, 1)));
1284 break;
1285
1286 case CONJ_EXPR:
1287 if (negate_expr_p (t))
1288 return fold_build1 (CONJ_EXPR, type,
1289 fold_negate_expr (TREE_OPERAND (t, 0)));
1290 break;
1291
1292 case NEGATE_EXPR:
1293 return TREE_OPERAND (t, 0);
1294
1295 case PLUS_EXPR:
1296 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1297 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1298 {
1299 /* -(A + B) -> (-B) - A. */
1300 if (negate_expr_p (TREE_OPERAND (t, 1))
1301 && reorder_operands_p (TREE_OPERAND (t, 0),
1302 TREE_OPERAND (t, 1)))
1303 {
1304 tem = negate_expr (TREE_OPERAND (t, 1));
1305 return fold_build2 (MINUS_EXPR, type,
1306 tem, TREE_OPERAND (t, 0));
1307 }
1308
1309 /* -(A + B) -> (-A) - B. */
1310 if (negate_expr_p (TREE_OPERAND (t, 0)))
1311 {
1312 tem = negate_expr (TREE_OPERAND (t, 0));
1313 return fold_build2 (MINUS_EXPR, type,
1314 tem, TREE_OPERAND (t, 1));
1315 }
1316 }
1317 break;
1318
1319 case MINUS_EXPR:
1320 /* - (A - B) -> B - A */
1321 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1322 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1323 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1324 return fold_build2 (MINUS_EXPR, type,
1325 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
1326 break;
1327
1328 case MULT_EXPR:
1329 if (TYPE_UNSIGNED (type))
1330 break;
1331
1332 /* Fall through. */
1333
1334 case RDIV_EXPR:
1335 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
1336 {
1337 tem = TREE_OPERAND (t, 1);
1338 if (negate_expr_p (tem))
1339 return fold_build2 (TREE_CODE (t), type,
1340 TREE_OPERAND (t, 0), negate_expr (tem));
1341 tem = TREE_OPERAND (t, 0);
1342 if (negate_expr_p (tem))
1343 return fold_build2 (TREE_CODE (t), type,
1344 negate_expr (tem), TREE_OPERAND (t, 1));
1345 }
1346 break;
1347
1348 case TRUNC_DIV_EXPR:
1349 case ROUND_DIV_EXPR:
1350 case FLOOR_DIV_EXPR:
1351 case CEIL_DIV_EXPR:
1352 case EXACT_DIV_EXPR:
1353 /* In general we can't negate A / B, because if A is INT_MIN and
1354 B is 1, we may turn this into INT_MIN / -1 which is undefined
1355 and actually traps on some architectures. But if overflow is
1356 undefined, we can negate, because - (INT_MIN / 1) is an
1357 overflow. */
1358 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
1359 {
1360 const char * const warnmsg = G_("assuming signed overflow does not "
1361 "occur when negating a division");
1362 tem = TREE_OPERAND (t, 1);
1363 if (negate_expr_p (tem))
1364 {
1365 if (INTEGRAL_TYPE_P (type)
1366 && (TREE_CODE (tem) != INTEGER_CST
1367 || integer_onep (tem)))
1368 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1369 return fold_build2 (TREE_CODE (t), type,
1370 TREE_OPERAND (t, 0), negate_expr (tem));
1371 }
1372 tem = TREE_OPERAND (t, 0);
1373 if (negate_expr_p (tem))
1374 {
1375 if (INTEGRAL_TYPE_P (type)
1376 && (TREE_CODE (tem) != INTEGER_CST
1377 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
1378 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1379 return fold_build2 (TREE_CODE (t), type,
1380 negate_expr (tem), TREE_OPERAND (t, 1));
1381 }
1382 }
1383 break;
1384
1385 case NOP_EXPR:
1386 /* Convert -((double)float) into (double)(-float). */
1387 if (TREE_CODE (type) == REAL_TYPE)
1388 {
1389 tem = strip_float_extensions (t);
1390 if (tem != t && negate_expr_p (tem))
1391 return fold_convert (type, negate_expr (tem));
1392 }
1393 break;
1394
1395 case CALL_EXPR:
1396 /* Negate -f(x) as f(-x). */
1397 if (negate_mathfn_p (builtin_mathfn_code (t))
1398 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
1399 {
1400 tree fndecl, arg;
1401
1402 fndecl = get_callee_fndecl (t);
1403 arg = negate_expr (CALL_EXPR_ARG (t, 0));
1404 return build_call_expr (fndecl, 1, arg);
1405 }
1406 break;
1407
1408 case RSHIFT_EXPR:
1409 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1410 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1411 {
1412 tree op1 = TREE_OPERAND (t, 1);
1413 if (TREE_INT_CST_HIGH (op1) == 0
1414 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1415 == TREE_INT_CST_LOW (op1))
1416 {
1417 tree ntype = TYPE_UNSIGNED (type)
1418 ? signed_type_for (type)
1419 : unsigned_type_for (type);
1420 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1421 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1422 return fold_convert (type, temp);
1423 }
1424 }
1425 break;
1426
1427 default:
1428 break;
1429 }
1430
1431 return NULL_TREE;
1432 }
1433
1434 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1435 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1436 return NULL_TREE. */
1437
1438 static tree
1439 negate_expr (tree t)
1440 {
1441 tree type, tem;
1442
1443 if (t == NULL_TREE)
1444 return NULL_TREE;
1445
1446 type = TREE_TYPE (t);
1447 STRIP_SIGN_NOPS (t);
1448
1449 tem = fold_negate_expr (t);
1450 if (!tem)
1451 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1452 return fold_convert (type, tem);
1453 }
1454 \f
1455 /* Split a tree IN into a constant, literal and variable parts that could be
1456 combined with CODE to make IN. "constant" means an expression with
1457 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1458 commutative arithmetic operation. Store the constant part into *CONP,
1459 the literal in *LITP and return the variable part. If a part isn't
1460 present, set it to null. If the tree does not decompose in this way,
1461 return the entire tree as the variable part and the other parts as null.
1462
1463 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1464 case, we negate an operand that was subtracted. Except if it is a
1465 literal for which we use *MINUS_LITP instead.
1466
1467 If NEGATE_P is true, we are negating all of IN, again except a literal
1468 for which we use *MINUS_LITP instead.
1469
1470 If IN is itself a literal or constant, return it as appropriate.
1471
1472 Note that we do not guarantee that any of the three values will be the
1473 same type as IN, but they will have the same signedness and mode. */
1474
1475 static tree
1476 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1477 tree *minus_litp, int negate_p)
1478 {
1479 tree var = 0;
1480
1481 *conp = 0;
1482 *litp = 0;
1483 *minus_litp = 0;
1484
1485 /* Strip any conversions that don't change the machine mode or signedness. */
1486 STRIP_SIGN_NOPS (in);
1487
1488 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
1489 || TREE_CODE (in) == FIXED_CST)
1490 *litp = in;
1491 else if (TREE_CODE (in) == code
1492 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
1493 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
1494 /* We can associate addition and subtraction together (even
1495 though the C standard doesn't say so) for integers because
1496 the value is not affected. For reals, the value might be
1497 affected, so we can't. */
1498 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1499 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1500 {
1501 tree op0 = TREE_OPERAND (in, 0);
1502 tree op1 = TREE_OPERAND (in, 1);
1503 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1504 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1505
1506 /* First see if either of the operands is a literal, then a constant. */
1507 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
1508 || TREE_CODE (op0) == FIXED_CST)
1509 *litp = op0, op0 = 0;
1510 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
1511 || TREE_CODE (op1) == FIXED_CST)
1512 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1513
1514 if (op0 != 0 && TREE_CONSTANT (op0))
1515 *conp = op0, op0 = 0;
1516 else if (op1 != 0 && TREE_CONSTANT (op1))
1517 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1518
1519 /* If we haven't dealt with either operand, this is not a case we can
1520 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1521 if (op0 != 0 && op1 != 0)
1522 var = in;
1523 else if (op0 != 0)
1524 var = op0;
1525 else
1526 var = op1, neg_var_p = neg1_p;
1527
1528 /* Now do any needed negations. */
1529 if (neg_litp_p)
1530 *minus_litp = *litp, *litp = 0;
1531 if (neg_conp_p)
1532 *conp = negate_expr (*conp);
1533 if (neg_var_p)
1534 var = negate_expr (var);
1535 }
1536 else if (TREE_CONSTANT (in))
1537 *conp = in;
1538 else
1539 var = in;
1540
1541 if (negate_p)
1542 {
1543 if (*litp)
1544 *minus_litp = *litp, *litp = 0;
1545 else if (*minus_litp)
1546 *litp = *minus_litp, *minus_litp = 0;
1547 *conp = negate_expr (*conp);
1548 var = negate_expr (var);
1549 }
1550
1551 return var;
1552 }
1553
1554 /* Re-associate trees split by the above function. T1 and T2 are either
1555 expressions to associate or null. Return the new expression, if any. If
1556 we build an operation, do it in TYPE and with CODE. */
1557
1558 static tree
1559 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1560 {
1561 if (t1 == 0)
1562 return t2;
1563 else if (t2 == 0)
1564 return t1;
1565
1566 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1567 try to fold this since we will have infinite recursion. But do
1568 deal with any NEGATE_EXPRs. */
1569 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1570 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1571 {
1572 if (code == PLUS_EXPR)
1573 {
1574 if (TREE_CODE (t1) == NEGATE_EXPR)
1575 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1576 fold_convert (type, TREE_OPERAND (t1, 0)));
1577 else if (TREE_CODE (t2) == NEGATE_EXPR)
1578 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1579 fold_convert (type, TREE_OPERAND (t2, 0)));
1580 else if (integer_zerop (t2))
1581 return fold_convert (type, t1);
1582 }
1583 else if (code == MINUS_EXPR)
1584 {
1585 if (integer_zerop (t2))
1586 return fold_convert (type, t1);
1587 }
1588
1589 return build2 (code, type, fold_convert (type, t1),
1590 fold_convert (type, t2));
1591 }
1592
1593 return fold_build2 (code, type, fold_convert (type, t1),
1594 fold_convert (type, t2));
1595 }
1596 \f
1597 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1598 for use in int_const_binop, size_binop and size_diffop. */
1599
1600 static bool
1601 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
1602 {
1603 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
1604 return false;
1605 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
1606 return false;
1607
1608 switch (code)
1609 {
1610 case LSHIFT_EXPR:
1611 case RSHIFT_EXPR:
1612 case LROTATE_EXPR:
1613 case RROTATE_EXPR:
1614 return true;
1615
1616 default:
1617 break;
1618 }
1619
1620 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
1621 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
1622 && TYPE_MODE (type1) == TYPE_MODE (type2);
1623 }
1624
1625
1626 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1627 to produce a new constant. Return NULL_TREE if we don't know how
1628 to evaluate CODE at compile-time.
1629
1630 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1631
1632 tree
1633 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2, int notrunc)
1634 {
1635 unsigned HOST_WIDE_INT int1l, int2l;
1636 HOST_WIDE_INT int1h, int2h;
1637 unsigned HOST_WIDE_INT low;
1638 HOST_WIDE_INT hi;
1639 unsigned HOST_WIDE_INT garbagel;
1640 HOST_WIDE_INT garbageh;
1641 tree t;
1642 tree type = TREE_TYPE (arg1);
1643 int uns = TYPE_UNSIGNED (type);
1644 int is_sizetype
1645 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1646 int overflow = 0;
1647
1648 int1l = TREE_INT_CST_LOW (arg1);
1649 int1h = TREE_INT_CST_HIGH (arg1);
1650 int2l = TREE_INT_CST_LOW (arg2);
1651 int2h = TREE_INT_CST_HIGH (arg2);
1652
1653 switch (code)
1654 {
1655 case BIT_IOR_EXPR:
1656 low = int1l | int2l, hi = int1h | int2h;
1657 break;
1658
1659 case BIT_XOR_EXPR:
1660 low = int1l ^ int2l, hi = int1h ^ int2h;
1661 break;
1662
1663 case BIT_AND_EXPR:
1664 low = int1l & int2l, hi = int1h & int2h;
1665 break;
1666
1667 case RSHIFT_EXPR:
1668 int2l = -int2l;
1669 case LSHIFT_EXPR:
1670 /* It's unclear from the C standard whether shifts can overflow.
1671 The following code ignores overflow; perhaps a C standard
1672 interpretation ruling is needed. */
1673 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1674 &low, &hi, !uns);
1675 break;
1676
1677 case RROTATE_EXPR:
1678 int2l = - int2l;
1679 case LROTATE_EXPR:
1680 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1681 &low, &hi);
1682 break;
1683
1684 case PLUS_EXPR:
1685 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1686 break;
1687
1688 case MINUS_EXPR:
1689 neg_double (int2l, int2h, &low, &hi);
1690 add_double (int1l, int1h, low, hi, &low, &hi);
1691 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1692 break;
1693
1694 case MULT_EXPR:
1695 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1696 break;
1697
1698 case TRUNC_DIV_EXPR:
1699 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1700 case EXACT_DIV_EXPR:
1701 /* This is a shortcut for a common special case. */
1702 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1703 && !TREE_OVERFLOW (arg1)
1704 && !TREE_OVERFLOW (arg2)
1705 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1706 {
1707 if (code == CEIL_DIV_EXPR)
1708 int1l += int2l - 1;
1709
1710 low = int1l / int2l, hi = 0;
1711 break;
1712 }
1713
1714 /* ... fall through ... */
1715
1716 case ROUND_DIV_EXPR:
1717 if (int2h == 0 && int2l == 0)
1718 return NULL_TREE;
1719 if (int2h == 0 && int2l == 1)
1720 {
1721 low = int1l, hi = int1h;
1722 break;
1723 }
1724 if (int1l == int2l && int1h == int2h
1725 && ! (int1l == 0 && int1h == 0))
1726 {
1727 low = 1, hi = 0;
1728 break;
1729 }
1730 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1731 &low, &hi, &garbagel, &garbageh);
1732 break;
1733
1734 case TRUNC_MOD_EXPR:
1735 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1736 /* This is a shortcut for a common special case. */
1737 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1738 && !TREE_OVERFLOW (arg1)
1739 && !TREE_OVERFLOW (arg2)
1740 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1741 {
1742 if (code == CEIL_MOD_EXPR)
1743 int1l += int2l - 1;
1744 low = int1l % int2l, hi = 0;
1745 break;
1746 }
1747
1748 /* ... fall through ... */
1749
1750 case ROUND_MOD_EXPR:
1751 if (int2h == 0 && int2l == 0)
1752 return NULL_TREE;
1753 overflow = div_and_round_double (code, uns,
1754 int1l, int1h, int2l, int2h,
1755 &garbagel, &garbageh, &low, &hi);
1756 break;
1757
1758 case MIN_EXPR:
1759 case MAX_EXPR:
1760 if (uns)
1761 low = (((unsigned HOST_WIDE_INT) int1h
1762 < (unsigned HOST_WIDE_INT) int2h)
1763 || (((unsigned HOST_WIDE_INT) int1h
1764 == (unsigned HOST_WIDE_INT) int2h)
1765 && int1l < int2l));
1766 else
1767 low = (int1h < int2h
1768 || (int1h == int2h && int1l < int2l));
1769
1770 if (low == (code == MIN_EXPR))
1771 low = int1l, hi = int1h;
1772 else
1773 low = int2l, hi = int2h;
1774 break;
1775
1776 default:
1777 return NULL_TREE;
1778 }
1779
1780 if (notrunc)
1781 {
1782 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1783
1784 /* Propagate overflow flags ourselves. */
1785 if (((!uns || is_sizetype) && overflow)
1786 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1787 {
1788 t = copy_node (t);
1789 TREE_OVERFLOW (t) = 1;
1790 }
1791 }
1792 else
1793 t = force_fit_type_double (TREE_TYPE (arg1), low, hi, 1,
1794 ((!uns || is_sizetype) && overflow)
1795 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1796
1797 return t;
1798 }
1799
1800 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1801 constant. We assume ARG1 and ARG2 have the same data type, or at least
1802 are the same kind of constant and the same machine mode. Return zero if
1803 combining the constants is not allowed in the current operating mode.
1804
1805 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1806
1807 static tree
1808 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1809 {
1810 /* Sanity check for the recursive cases. */
1811 if (!arg1 || !arg2)
1812 return NULL_TREE;
1813
1814 STRIP_NOPS (arg1);
1815 STRIP_NOPS (arg2);
1816
1817 if (TREE_CODE (arg1) == INTEGER_CST)
1818 return int_const_binop (code, arg1, arg2, notrunc);
1819
1820 if (TREE_CODE (arg1) == REAL_CST)
1821 {
1822 enum machine_mode mode;
1823 REAL_VALUE_TYPE d1;
1824 REAL_VALUE_TYPE d2;
1825 REAL_VALUE_TYPE value;
1826 REAL_VALUE_TYPE result;
1827 bool inexact;
1828 tree t, type;
1829
1830 /* The following codes are handled by real_arithmetic. */
1831 switch (code)
1832 {
1833 case PLUS_EXPR:
1834 case MINUS_EXPR:
1835 case MULT_EXPR:
1836 case RDIV_EXPR:
1837 case MIN_EXPR:
1838 case MAX_EXPR:
1839 break;
1840
1841 default:
1842 return NULL_TREE;
1843 }
1844
1845 d1 = TREE_REAL_CST (arg1);
1846 d2 = TREE_REAL_CST (arg2);
1847
1848 type = TREE_TYPE (arg1);
1849 mode = TYPE_MODE (type);
1850
1851 /* Don't perform operation if we honor signaling NaNs and
1852 either operand is a NaN. */
1853 if (HONOR_SNANS (mode)
1854 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1855 return NULL_TREE;
1856
1857 /* Don't perform operation if it would raise a division
1858 by zero exception. */
1859 if (code == RDIV_EXPR
1860 && REAL_VALUES_EQUAL (d2, dconst0)
1861 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1862 return NULL_TREE;
1863
1864 /* If either operand is a NaN, just return it. Otherwise, set up
1865 for floating-point trap; we return an overflow. */
1866 if (REAL_VALUE_ISNAN (d1))
1867 return arg1;
1868 else if (REAL_VALUE_ISNAN (d2))
1869 return arg2;
1870
1871 inexact = real_arithmetic (&value, code, &d1, &d2);
1872 real_convert (&result, mode, &value);
1873
1874 /* Don't constant fold this floating point operation if
1875 the result has overflowed and flag_trapping_math. */
1876 if (flag_trapping_math
1877 && MODE_HAS_INFINITIES (mode)
1878 && REAL_VALUE_ISINF (result)
1879 && !REAL_VALUE_ISINF (d1)
1880 && !REAL_VALUE_ISINF (d2))
1881 return NULL_TREE;
1882
1883 /* Don't constant fold this floating point operation if the
1884 result may dependent upon the run-time rounding mode and
1885 flag_rounding_math is set, or if GCC's software emulation
1886 is unable to accurately represent the result. */
1887 if ((flag_rounding_math
1888 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1889 && (inexact || !real_identical (&result, &value)))
1890 return NULL_TREE;
1891
1892 t = build_real (type, result);
1893
1894 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1895 return t;
1896 }
1897
1898 if (TREE_CODE (arg1) == FIXED_CST)
1899 {
1900 FIXED_VALUE_TYPE f1;
1901 FIXED_VALUE_TYPE f2;
1902 FIXED_VALUE_TYPE result;
1903 tree t, type;
1904 int sat_p;
1905 bool overflow_p;
1906
1907 /* The following codes are handled by fixed_arithmetic. */
1908 switch (code)
1909 {
1910 case PLUS_EXPR:
1911 case MINUS_EXPR:
1912 case MULT_EXPR:
1913 case TRUNC_DIV_EXPR:
1914 f2 = TREE_FIXED_CST (arg2);
1915 break;
1916
1917 case LSHIFT_EXPR:
1918 case RSHIFT_EXPR:
1919 f2.data.high = TREE_INT_CST_HIGH (arg2);
1920 f2.data.low = TREE_INT_CST_LOW (arg2);
1921 f2.mode = SImode;
1922 break;
1923
1924 default:
1925 return NULL_TREE;
1926 }
1927
1928 f1 = TREE_FIXED_CST (arg1);
1929 type = TREE_TYPE (arg1);
1930 sat_p = TYPE_SATURATING (type);
1931 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1932 t = build_fixed (type, result);
1933 /* Propagate overflow flags. */
1934 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1935 {
1936 TREE_OVERFLOW (t) = 1;
1937 TREE_CONSTANT_OVERFLOW (t) = 1;
1938 }
1939 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1940 TREE_CONSTANT_OVERFLOW (t) = 1;
1941 return t;
1942 }
1943
1944 if (TREE_CODE (arg1) == COMPLEX_CST)
1945 {
1946 tree type = TREE_TYPE (arg1);
1947 tree r1 = TREE_REALPART (arg1);
1948 tree i1 = TREE_IMAGPART (arg1);
1949 tree r2 = TREE_REALPART (arg2);
1950 tree i2 = TREE_IMAGPART (arg2);
1951 tree real, imag;
1952
1953 switch (code)
1954 {
1955 case PLUS_EXPR:
1956 case MINUS_EXPR:
1957 real = const_binop (code, r1, r2, notrunc);
1958 imag = const_binop (code, i1, i2, notrunc);
1959 break;
1960
1961 case MULT_EXPR:
1962 real = const_binop (MINUS_EXPR,
1963 const_binop (MULT_EXPR, r1, r2, notrunc),
1964 const_binop (MULT_EXPR, i1, i2, notrunc),
1965 notrunc);
1966 imag = const_binop (PLUS_EXPR,
1967 const_binop (MULT_EXPR, r1, i2, notrunc),
1968 const_binop (MULT_EXPR, i1, r2, notrunc),
1969 notrunc);
1970 break;
1971
1972 case RDIV_EXPR:
1973 {
1974 tree magsquared
1975 = const_binop (PLUS_EXPR,
1976 const_binop (MULT_EXPR, r2, r2, notrunc),
1977 const_binop (MULT_EXPR, i2, i2, notrunc),
1978 notrunc);
1979 tree t1
1980 = const_binop (PLUS_EXPR,
1981 const_binop (MULT_EXPR, r1, r2, notrunc),
1982 const_binop (MULT_EXPR, i1, i2, notrunc),
1983 notrunc);
1984 tree t2
1985 = const_binop (MINUS_EXPR,
1986 const_binop (MULT_EXPR, i1, r2, notrunc),
1987 const_binop (MULT_EXPR, r1, i2, notrunc),
1988 notrunc);
1989
1990 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1991 code = TRUNC_DIV_EXPR;
1992
1993 real = const_binop (code, t1, magsquared, notrunc);
1994 imag = const_binop (code, t2, magsquared, notrunc);
1995 }
1996 break;
1997
1998 default:
1999 return NULL_TREE;
2000 }
2001
2002 if (real && imag)
2003 return build_complex (type, real, imag);
2004 }
2005
2006 return NULL_TREE;
2007 }
2008
2009 /* Create a size type INT_CST node with NUMBER sign extended. KIND
2010 indicates which particular sizetype to create. */
2011
2012 tree
2013 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
2014 {
2015 return build_int_cst (sizetype_tab[(int) kind], number);
2016 }
2017 \f
2018 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
2019 is a tree code. The type of the result is taken from the operands.
2020 Both must be equivalent integer types, ala int_binop_types_match_p.
2021 If the operands are constant, so is the result. */
2022
2023 tree
2024 size_binop (enum tree_code code, tree arg0, tree arg1)
2025 {
2026 tree type = TREE_TYPE (arg0);
2027
2028 if (arg0 == error_mark_node || arg1 == error_mark_node)
2029 return error_mark_node;
2030
2031 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
2032 TREE_TYPE (arg1)));
2033
2034 /* Handle the special case of two integer constants faster. */
2035 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2036 {
2037 /* And some specific cases even faster than that. */
2038 if (code == PLUS_EXPR)
2039 {
2040 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
2041 return arg1;
2042 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
2043 return arg0;
2044 }
2045 else if (code == MINUS_EXPR)
2046 {
2047 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
2048 return arg0;
2049 }
2050 else if (code == MULT_EXPR)
2051 {
2052 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
2053 return arg1;
2054 }
2055
2056 /* Handle general case of two integer constants. */
2057 return int_const_binop (code, arg0, arg1, 0);
2058 }
2059
2060 return fold_build2 (code, type, arg0, arg1);
2061 }
2062
2063 /* Given two values, either both of sizetype or both of bitsizetype,
2064 compute the difference between the two values. Return the value
2065 in signed type corresponding to the type of the operands. */
2066
2067 tree
2068 size_diffop (tree arg0, tree arg1)
2069 {
2070 tree type = TREE_TYPE (arg0);
2071 tree ctype;
2072
2073 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
2074 TREE_TYPE (arg1)));
2075
2076 /* If the type is already signed, just do the simple thing. */
2077 if (!TYPE_UNSIGNED (type))
2078 return size_binop (MINUS_EXPR, arg0, arg1);
2079
2080 if (type == sizetype)
2081 ctype = ssizetype;
2082 else if (type == bitsizetype)
2083 ctype = sbitsizetype;
2084 else
2085 ctype = signed_type_for (type);
2086
2087 /* If either operand is not a constant, do the conversions to the signed
2088 type and subtract. The hardware will do the right thing with any
2089 overflow in the subtraction. */
2090 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
2091 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
2092 fold_convert (ctype, arg1));
2093
2094 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
2095 Otherwise, subtract the other way, convert to CTYPE (we know that can't
2096 overflow) and negate (which can't either). Special-case a result
2097 of zero while we're here. */
2098 if (tree_int_cst_equal (arg0, arg1))
2099 return build_int_cst (ctype, 0);
2100 else if (tree_int_cst_lt (arg1, arg0))
2101 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
2102 else
2103 return size_binop (MINUS_EXPR, build_int_cst (ctype, 0),
2104 fold_convert (ctype, size_binop (MINUS_EXPR,
2105 arg1, arg0)));
2106 }
2107 \f
2108 /* A subroutine of fold_convert_const handling conversions of an
2109 INTEGER_CST to another integer type. */
2110
2111 static tree
2112 fold_convert_const_int_from_int (tree type, const_tree arg1)
2113 {
2114 tree t;
2115
2116 /* Given an integer constant, make new constant with new type,
2117 appropriately sign-extended or truncated. */
2118 t = force_fit_type_double (type, TREE_INT_CST_LOW (arg1),
2119 TREE_INT_CST_HIGH (arg1),
2120 /* Don't set the overflow when
2121 converting from a pointer, */
2122 !POINTER_TYPE_P (TREE_TYPE (arg1))
2123 /* or to a sizetype with same signedness
2124 and the precision is unchanged.
2125 ??? sizetype is always sign-extended,
2126 but its signedness depends on the
2127 frontend. Thus we see spurious overflows
2128 here if we do not check this. */
2129 && !((TYPE_PRECISION (TREE_TYPE (arg1))
2130 == TYPE_PRECISION (type))
2131 && (TYPE_UNSIGNED (TREE_TYPE (arg1))
2132 == TYPE_UNSIGNED (type))
2133 && ((TREE_CODE (TREE_TYPE (arg1)) == INTEGER_TYPE
2134 && TYPE_IS_SIZETYPE (TREE_TYPE (arg1)))
2135 || (TREE_CODE (type) == INTEGER_TYPE
2136 && TYPE_IS_SIZETYPE (type)))),
2137 (TREE_INT_CST_HIGH (arg1) < 0
2138 && (TYPE_UNSIGNED (type)
2139 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2140 | TREE_OVERFLOW (arg1));
2141
2142 return t;
2143 }
2144
2145 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2146 to an integer type. */
2147
2148 static tree
2149 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
2150 {
2151 int overflow = 0;
2152 tree t;
2153
2154 /* The following code implements the floating point to integer
2155 conversion rules required by the Java Language Specification,
2156 that IEEE NaNs are mapped to zero and values that overflow
2157 the target precision saturate, i.e. values greater than
2158 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2159 are mapped to INT_MIN. These semantics are allowed by the
2160 C and C++ standards that simply state that the behavior of
2161 FP-to-integer conversion is unspecified upon overflow. */
2162
2163 HOST_WIDE_INT high, low;
2164 REAL_VALUE_TYPE r;
2165 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2166
2167 switch (code)
2168 {
2169 case FIX_TRUNC_EXPR:
2170 real_trunc (&r, VOIDmode, &x);
2171 break;
2172
2173 default:
2174 gcc_unreachable ();
2175 }
2176
2177 /* If R is NaN, return zero and show we have an overflow. */
2178 if (REAL_VALUE_ISNAN (r))
2179 {
2180 overflow = 1;
2181 high = 0;
2182 low = 0;
2183 }
2184
2185 /* See if R is less than the lower bound or greater than the
2186 upper bound. */
2187
2188 if (! overflow)
2189 {
2190 tree lt = TYPE_MIN_VALUE (type);
2191 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2192 if (REAL_VALUES_LESS (r, l))
2193 {
2194 overflow = 1;
2195 high = TREE_INT_CST_HIGH (lt);
2196 low = TREE_INT_CST_LOW (lt);
2197 }
2198 }
2199
2200 if (! overflow)
2201 {
2202 tree ut = TYPE_MAX_VALUE (type);
2203 if (ut)
2204 {
2205 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2206 if (REAL_VALUES_LESS (u, r))
2207 {
2208 overflow = 1;
2209 high = TREE_INT_CST_HIGH (ut);
2210 low = TREE_INT_CST_LOW (ut);
2211 }
2212 }
2213 }
2214
2215 if (! overflow)
2216 REAL_VALUE_TO_INT (&low, &high, r);
2217
2218 t = force_fit_type_double (type, low, high, -1,
2219 overflow | TREE_OVERFLOW (arg1));
2220 return t;
2221 }
2222
2223 /* A subroutine of fold_convert_const handling conversions of a
2224 FIXED_CST to an integer type. */
2225
2226 static tree
2227 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
2228 {
2229 tree t;
2230 double_int temp, temp_trunc;
2231 unsigned int mode;
2232
2233 /* Right shift FIXED_CST to temp by fbit. */
2234 temp = TREE_FIXED_CST (arg1).data;
2235 mode = TREE_FIXED_CST (arg1).mode;
2236 if (GET_MODE_FBIT (mode) < 2 * HOST_BITS_PER_WIDE_INT)
2237 {
2238 lshift_double (temp.low, temp.high,
2239 - GET_MODE_FBIT (mode), 2 * HOST_BITS_PER_WIDE_INT,
2240 &temp.low, &temp.high, SIGNED_FIXED_POINT_MODE_P (mode));
2241
2242 /* Left shift temp to temp_trunc by fbit. */
2243 lshift_double (temp.low, temp.high,
2244 GET_MODE_FBIT (mode), 2 * HOST_BITS_PER_WIDE_INT,
2245 &temp_trunc.low, &temp_trunc.high,
2246 SIGNED_FIXED_POINT_MODE_P (mode));
2247 }
2248 else
2249 {
2250 temp.low = 0;
2251 temp.high = 0;
2252 temp_trunc.low = 0;
2253 temp_trunc.high = 0;
2254 }
2255
2256 /* If FIXED_CST is negative, we need to round the value toward 0.
2257 By checking if the fractional bits are not zero to add 1 to temp. */
2258 if (SIGNED_FIXED_POINT_MODE_P (mode) && temp_trunc.high < 0
2259 && !double_int_equal_p (TREE_FIXED_CST (arg1).data, temp_trunc))
2260 {
2261 double_int one;
2262 one.low = 1;
2263 one.high = 0;
2264 temp = double_int_add (temp, one);
2265 }
2266
2267 /* Given a fixed-point constant, make new constant with new type,
2268 appropriately sign-extended or truncated. */
2269 t = force_fit_type_double (type, temp.low, temp.high, -1,
2270 (temp.high < 0
2271 && (TYPE_UNSIGNED (type)
2272 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2273 | TREE_OVERFLOW (arg1));
2274
2275 return t;
2276 }
2277
2278 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2279 to another floating point type. */
2280
2281 static tree
2282 fold_convert_const_real_from_real (tree type, const_tree arg1)
2283 {
2284 REAL_VALUE_TYPE value;
2285 tree t;
2286
2287 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2288 t = build_real (type, value);
2289
2290 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2291 return t;
2292 }
2293
2294 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2295 to a floating point type. */
2296
2297 static tree
2298 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2299 {
2300 REAL_VALUE_TYPE value;
2301 tree t;
2302
2303 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
2304 t = build_real (type, value);
2305
2306 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2307 TREE_CONSTANT_OVERFLOW (t)
2308 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
2309 return t;
2310 }
2311
2312 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2313 to another fixed-point type. */
2314
2315 static tree
2316 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2317 {
2318 FIXED_VALUE_TYPE value;
2319 tree t;
2320 bool overflow_p;
2321
2322 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2323 TYPE_SATURATING (type));
2324 t = build_fixed (type, value);
2325
2326 /* Propagate overflow flags. */
2327 if (overflow_p | TREE_OVERFLOW (arg1))
2328 {
2329 TREE_OVERFLOW (t) = 1;
2330 TREE_CONSTANT_OVERFLOW (t) = 1;
2331 }
2332 else if (TREE_CONSTANT_OVERFLOW (arg1))
2333 TREE_CONSTANT_OVERFLOW (t) = 1;
2334 return t;
2335 }
2336
2337 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2338 to a fixed-point type. */
2339
2340 static tree
2341 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2342 {
2343 FIXED_VALUE_TYPE value;
2344 tree t;
2345 bool overflow_p;
2346
2347 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
2348 TREE_INT_CST (arg1),
2349 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2350 TYPE_SATURATING (type));
2351 t = build_fixed (type, value);
2352
2353 /* Propagate overflow flags. */
2354 if (overflow_p | TREE_OVERFLOW (arg1))
2355 {
2356 TREE_OVERFLOW (t) = 1;
2357 TREE_CONSTANT_OVERFLOW (t) = 1;
2358 }
2359 else if (TREE_CONSTANT_OVERFLOW (arg1))
2360 TREE_CONSTANT_OVERFLOW (t) = 1;
2361 return t;
2362 }
2363
2364 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2365 to a fixed-point type. */
2366
2367 static tree
2368 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2369 {
2370 FIXED_VALUE_TYPE value;
2371 tree t;
2372 bool overflow_p;
2373
2374 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2375 &TREE_REAL_CST (arg1),
2376 TYPE_SATURATING (type));
2377 t = build_fixed (type, value);
2378
2379 /* Propagate overflow flags. */
2380 if (overflow_p | TREE_OVERFLOW (arg1))
2381 {
2382 TREE_OVERFLOW (t) = 1;
2383 TREE_CONSTANT_OVERFLOW (t) = 1;
2384 }
2385 else if (TREE_CONSTANT_OVERFLOW (arg1))
2386 TREE_CONSTANT_OVERFLOW (t) = 1;
2387 return t;
2388 }
2389
2390 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2391 type TYPE. If no simplification can be done return NULL_TREE. */
2392
2393 static tree
2394 fold_convert_const (enum tree_code code, tree type, tree arg1)
2395 {
2396 if (TREE_TYPE (arg1) == type)
2397 return arg1;
2398
2399 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2400 || TREE_CODE (type) == OFFSET_TYPE)
2401 {
2402 if (TREE_CODE (arg1) == INTEGER_CST)
2403 return fold_convert_const_int_from_int (type, arg1);
2404 else if (TREE_CODE (arg1) == REAL_CST)
2405 return fold_convert_const_int_from_real (code, type, arg1);
2406 else if (TREE_CODE (arg1) == FIXED_CST)
2407 return fold_convert_const_int_from_fixed (type, arg1);
2408 }
2409 else if (TREE_CODE (type) == REAL_TYPE)
2410 {
2411 if (TREE_CODE (arg1) == INTEGER_CST)
2412 return build_real_from_int_cst (type, arg1);
2413 else if (TREE_CODE (arg1) == REAL_CST)
2414 return fold_convert_const_real_from_real (type, arg1);
2415 else if (TREE_CODE (arg1) == FIXED_CST)
2416 return fold_convert_const_real_from_fixed (type, arg1);
2417 }
2418 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2419 {
2420 if (TREE_CODE (arg1) == FIXED_CST)
2421 return fold_convert_const_fixed_from_fixed (type, arg1);
2422 else if (TREE_CODE (arg1) == INTEGER_CST)
2423 return fold_convert_const_fixed_from_int (type, arg1);
2424 else if (TREE_CODE (arg1) == REAL_CST)
2425 return fold_convert_const_fixed_from_real (type, arg1);
2426 }
2427 return NULL_TREE;
2428 }
2429
2430 /* Construct a vector of zero elements of vector type TYPE. */
2431
2432 static tree
2433 build_zero_vector (tree type)
2434 {
2435 tree elem, list;
2436 int i, units;
2437
2438 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2439 units = TYPE_VECTOR_SUBPARTS (type);
2440
2441 list = NULL_TREE;
2442 for (i = 0; i < units; i++)
2443 list = tree_cons (NULL_TREE, elem, list);
2444 return build_vector (type, list);
2445 }
2446
2447 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2448
2449 bool
2450 fold_convertible_p (const_tree type, const_tree arg)
2451 {
2452 tree orig = TREE_TYPE (arg);
2453
2454 if (type == orig)
2455 return true;
2456
2457 if (TREE_CODE (arg) == ERROR_MARK
2458 || TREE_CODE (type) == ERROR_MARK
2459 || TREE_CODE (orig) == ERROR_MARK)
2460 return false;
2461
2462 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2463 return true;
2464
2465 switch (TREE_CODE (type))
2466 {
2467 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2468 case POINTER_TYPE: case REFERENCE_TYPE:
2469 case OFFSET_TYPE:
2470 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2471 || TREE_CODE (orig) == OFFSET_TYPE)
2472 return true;
2473 return (TREE_CODE (orig) == VECTOR_TYPE
2474 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2475
2476 case REAL_TYPE:
2477 case FIXED_POINT_TYPE:
2478 case COMPLEX_TYPE:
2479 case VECTOR_TYPE:
2480 case VOID_TYPE:
2481 return TREE_CODE (type) == TREE_CODE (orig);
2482
2483 default:
2484 return false;
2485 }
2486 }
2487
2488 /* Convert expression ARG to type TYPE. Used by the middle-end for
2489 simple conversions in preference to calling the front-end's convert. */
2490
2491 tree
2492 fold_convert (tree type, tree arg)
2493 {
2494 tree orig = TREE_TYPE (arg);
2495 tree tem;
2496
2497 if (type == orig)
2498 return arg;
2499
2500 if (TREE_CODE (arg) == ERROR_MARK
2501 || TREE_CODE (type) == ERROR_MARK
2502 || TREE_CODE (orig) == ERROR_MARK)
2503 return error_mark_node;
2504
2505 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2506 return fold_build1 (NOP_EXPR, type, arg);
2507
2508 switch (TREE_CODE (type))
2509 {
2510 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2511 case POINTER_TYPE: case REFERENCE_TYPE:
2512 case OFFSET_TYPE:
2513 if (TREE_CODE (arg) == INTEGER_CST)
2514 {
2515 tem = fold_convert_const (NOP_EXPR, type, arg);
2516 if (tem != NULL_TREE)
2517 return tem;
2518 }
2519 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2520 || TREE_CODE (orig) == OFFSET_TYPE)
2521 return fold_build1 (NOP_EXPR, type, arg);
2522 if (TREE_CODE (orig) == COMPLEX_TYPE)
2523 {
2524 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2525 return fold_convert (type, tem);
2526 }
2527 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2528 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2529 return fold_build1 (NOP_EXPR, type, arg);
2530
2531 case REAL_TYPE:
2532 if (TREE_CODE (arg) == INTEGER_CST)
2533 {
2534 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2535 if (tem != NULL_TREE)
2536 return tem;
2537 }
2538 else if (TREE_CODE (arg) == REAL_CST)
2539 {
2540 tem = fold_convert_const (NOP_EXPR, type, arg);
2541 if (tem != NULL_TREE)
2542 return tem;
2543 }
2544 else if (TREE_CODE (arg) == FIXED_CST)
2545 {
2546 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2547 if (tem != NULL_TREE)
2548 return tem;
2549 }
2550
2551 switch (TREE_CODE (orig))
2552 {
2553 case INTEGER_TYPE:
2554 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2555 case POINTER_TYPE: case REFERENCE_TYPE:
2556 return fold_build1 (FLOAT_EXPR, type, arg);
2557
2558 case REAL_TYPE:
2559 return fold_build1 (NOP_EXPR, type, arg);
2560
2561 case FIXED_POINT_TYPE:
2562 return fold_build1 (FIXED_CONVERT_EXPR, type, arg);
2563
2564 case COMPLEX_TYPE:
2565 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2566 return fold_convert (type, tem);
2567
2568 default:
2569 gcc_unreachable ();
2570 }
2571
2572 case FIXED_POINT_TYPE:
2573 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2574 || TREE_CODE (arg) == REAL_CST)
2575 {
2576 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2577 if (tem != NULL_TREE)
2578 return tem;
2579 }
2580
2581 switch (TREE_CODE (orig))
2582 {
2583 case FIXED_POINT_TYPE:
2584 case INTEGER_TYPE:
2585 case ENUMERAL_TYPE:
2586 case BOOLEAN_TYPE:
2587 case REAL_TYPE:
2588 return fold_build1 (FIXED_CONVERT_EXPR, type, arg);
2589
2590 case COMPLEX_TYPE:
2591 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2592 return fold_convert (type, tem);
2593
2594 default:
2595 gcc_unreachable ();
2596 }
2597
2598 case COMPLEX_TYPE:
2599 switch (TREE_CODE (orig))
2600 {
2601 case INTEGER_TYPE:
2602 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2603 case POINTER_TYPE: case REFERENCE_TYPE:
2604 case REAL_TYPE:
2605 case FIXED_POINT_TYPE:
2606 return build2 (COMPLEX_EXPR, type,
2607 fold_convert (TREE_TYPE (type), arg),
2608 fold_convert (TREE_TYPE (type), integer_zero_node));
2609 case COMPLEX_TYPE:
2610 {
2611 tree rpart, ipart;
2612
2613 if (TREE_CODE (arg) == COMPLEX_EXPR)
2614 {
2615 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2616 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2617 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2618 }
2619
2620 arg = save_expr (arg);
2621 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2622 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2623 rpart = fold_convert (TREE_TYPE (type), rpart);
2624 ipart = fold_convert (TREE_TYPE (type), ipart);
2625 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2626 }
2627
2628 default:
2629 gcc_unreachable ();
2630 }
2631
2632 case VECTOR_TYPE:
2633 if (integer_zerop (arg))
2634 return build_zero_vector (type);
2635 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2636 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2637 || TREE_CODE (orig) == VECTOR_TYPE);
2638 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2639
2640 case VOID_TYPE:
2641 tem = fold_ignored_result (arg);
2642 if (TREE_CODE (tem) == MODIFY_EXPR)
2643 return tem;
2644 return fold_build1 (NOP_EXPR, type, tem);
2645
2646 default:
2647 gcc_unreachable ();
2648 }
2649 }
2650 \f
2651 /* Return false if expr can be assumed not to be an lvalue, true
2652 otherwise. */
2653
2654 static bool
2655 maybe_lvalue_p (const_tree x)
2656 {
2657 /* We only need to wrap lvalue tree codes. */
2658 switch (TREE_CODE (x))
2659 {
2660 case VAR_DECL:
2661 case PARM_DECL:
2662 case RESULT_DECL:
2663 case LABEL_DECL:
2664 case FUNCTION_DECL:
2665 case SSA_NAME:
2666
2667 case COMPONENT_REF:
2668 case INDIRECT_REF:
2669 case ALIGN_INDIRECT_REF:
2670 case MISALIGNED_INDIRECT_REF:
2671 case ARRAY_REF:
2672 case ARRAY_RANGE_REF:
2673 case BIT_FIELD_REF:
2674 case OBJ_TYPE_REF:
2675
2676 case REALPART_EXPR:
2677 case IMAGPART_EXPR:
2678 case PREINCREMENT_EXPR:
2679 case PREDECREMENT_EXPR:
2680 case SAVE_EXPR:
2681 case TRY_CATCH_EXPR:
2682 case WITH_CLEANUP_EXPR:
2683 case COMPOUND_EXPR:
2684 case MODIFY_EXPR:
2685 case TARGET_EXPR:
2686 case COND_EXPR:
2687 case BIND_EXPR:
2688 case MIN_EXPR:
2689 case MAX_EXPR:
2690 break;
2691
2692 default:
2693 /* Assume the worst for front-end tree codes. */
2694 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2695 break;
2696 return false;
2697 }
2698
2699 return true;
2700 }
2701
2702 /* Return an expr equal to X but certainly not valid as an lvalue. */
2703
2704 tree
2705 non_lvalue (tree x)
2706 {
2707 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2708 us. */
2709 if (in_gimple_form)
2710 return x;
2711
2712 if (! maybe_lvalue_p (x))
2713 return x;
2714 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2715 }
2716
2717 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2718 Zero means allow extended lvalues. */
2719
2720 int pedantic_lvalues;
2721
2722 /* When pedantic, return an expr equal to X but certainly not valid as a
2723 pedantic lvalue. Otherwise, return X. */
2724
2725 static tree
2726 pedantic_non_lvalue (tree x)
2727 {
2728 if (pedantic_lvalues)
2729 return non_lvalue (x);
2730 else
2731 return x;
2732 }
2733 \f
2734 /* Given a tree comparison code, return the code that is the logical inverse
2735 of the given code. It is not safe to do this for floating-point
2736 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2737 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2738
2739 enum tree_code
2740 invert_tree_comparison (enum tree_code code, bool honor_nans)
2741 {
2742 if (honor_nans && flag_trapping_math)
2743 return ERROR_MARK;
2744
2745 switch (code)
2746 {
2747 case EQ_EXPR:
2748 return NE_EXPR;
2749 case NE_EXPR:
2750 return EQ_EXPR;
2751 case GT_EXPR:
2752 return honor_nans ? UNLE_EXPR : LE_EXPR;
2753 case GE_EXPR:
2754 return honor_nans ? UNLT_EXPR : LT_EXPR;
2755 case LT_EXPR:
2756 return honor_nans ? UNGE_EXPR : GE_EXPR;
2757 case LE_EXPR:
2758 return honor_nans ? UNGT_EXPR : GT_EXPR;
2759 case LTGT_EXPR:
2760 return UNEQ_EXPR;
2761 case UNEQ_EXPR:
2762 return LTGT_EXPR;
2763 case UNGT_EXPR:
2764 return LE_EXPR;
2765 case UNGE_EXPR:
2766 return LT_EXPR;
2767 case UNLT_EXPR:
2768 return GE_EXPR;
2769 case UNLE_EXPR:
2770 return GT_EXPR;
2771 case ORDERED_EXPR:
2772 return UNORDERED_EXPR;
2773 case UNORDERED_EXPR:
2774 return ORDERED_EXPR;
2775 default:
2776 gcc_unreachable ();
2777 }
2778 }
2779
2780 /* Similar, but return the comparison that results if the operands are
2781 swapped. This is safe for floating-point. */
2782
2783 enum tree_code
2784 swap_tree_comparison (enum tree_code code)
2785 {
2786 switch (code)
2787 {
2788 case EQ_EXPR:
2789 case NE_EXPR:
2790 case ORDERED_EXPR:
2791 case UNORDERED_EXPR:
2792 case LTGT_EXPR:
2793 case UNEQ_EXPR:
2794 return code;
2795 case GT_EXPR:
2796 return LT_EXPR;
2797 case GE_EXPR:
2798 return LE_EXPR;
2799 case LT_EXPR:
2800 return GT_EXPR;
2801 case LE_EXPR:
2802 return GE_EXPR;
2803 case UNGT_EXPR:
2804 return UNLT_EXPR;
2805 case UNGE_EXPR:
2806 return UNLE_EXPR;
2807 case UNLT_EXPR:
2808 return UNGT_EXPR;
2809 case UNLE_EXPR:
2810 return UNGE_EXPR;
2811 default:
2812 gcc_unreachable ();
2813 }
2814 }
2815
2816
2817 /* Convert a comparison tree code from an enum tree_code representation
2818 into a compcode bit-based encoding. This function is the inverse of
2819 compcode_to_comparison. */
2820
2821 static enum comparison_code
2822 comparison_to_compcode (enum tree_code code)
2823 {
2824 switch (code)
2825 {
2826 case LT_EXPR:
2827 return COMPCODE_LT;
2828 case EQ_EXPR:
2829 return COMPCODE_EQ;
2830 case LE_EXPR:
2831 return COMPCODE_LE;
2832 case GT_EXPR:
2833 return COMPCODE_GT;
2834 case NE_EXPR:
2835 return COMPCODE_NE;
2836 case GE_EXPR:
2837 return COMPCODE_GE;
2838 case ORDERED_EXPR:
2839 return COMPCODE_ORD;
2840 case UNORDERED_EXPR:
2841 return COMPCODE_UNORD;
2842 case UNLT_EXPR:
2843 return COMPCODE_UNLT;
2844 case UNEQ_EXPR:
2845 return COMPCODE_UNEQ;
2846 case UNLE_EXPR:
2847 return COMPCODE_UNLE;
2848 case UNGT_EXPR:
2849 return COMPCODE_UNGT;
2850 case LTGT_EXPR:
2851 return COMPCODE_LTGT;
2852 case UNGE_EXPR:
2853 return COMPCODE_UNGE;
2854 default:
2855 gcc_unreachable ();
2856 }
2857 }
2858
2859 /* Convert a compcode bit-based encoding of a comparison operator back
2860 to GCC's enum tree_code representation. This function is the
2861 inverse of comparison_to_compcode. */
2862
2863 static enum tree_code
2864 compcode_to_comparison (enum comparison_code code)
2865 {
2866 switch (code)
2867 {
2868 case COMPCODE_LT:
2869 return LT_EXPR;
2870 case COMPCODE_EQ:
2871 return EQ_EXPR;
2872 case COMPCODE_LE:
2873 return LE_EXPR;
2874 case COMPCODE_GT:
2875 return GT_EXPR;
2876 case COMPCODE_NE:
2877 return NE_EXPR;
2878 case COMPCODE_GE:
2879 return GE_EXPR;
2880 case COMPCODE_ORD:
2881 return ORDERED_EXPR;
2882 case COMPCODE_UNORD:
2883 return UNORDERED_EXPR;
2884 case COMPCODE_UNLT:
2885 return UNLT_EXPR;
2886 case COMPCODE_UNEQ:
2887 return UNEQ_EXPR;
2888 case COMPCODE_UNLE:
2889 return UNLE_EXPR;
2890 case COMPCODE_UNGT:
2891 return UNGT_EXPR;
2892 case COMPCODE_LTGT:
2893 return LTGT_EXPR;
2894 case COMPCODE_UNGE:
2895 return UNGE_EXPR;
2896 default:
2897 gcc_unreachable ();
2898 }
2899 }
2900
2901 /* Return a tree for the comparison which is the combination of
2902 doing the AND or OR (depending on CODE) of the two operations LCODE
2903 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2904 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2905 if this makes the transformation invalid. */
2906
2907 tree
2908 combine_comparisons (enum tree_code code, enum tree_code lcode,
2909 enum tree_code rcode, tree truth_type,
2910 tree ll_arg, tree lr_arg)
2911 {
2912 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2913 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2914 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2915 enum comparison_code compcode;
2916
2917 switch (code)
2918 {
2919 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2920 compcode = lcompcode & rcompcode;
2921 break;
2922
2923 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2924 compcode = lcompcode | rcompcode;
2925 break;
2926
2927 default:
2928 return NULL_TREE;
2929 }
2930
2931 if (!honor_nans)
2932 {
2933 /* Eliminate unordered comparisons, as well as LTGT and ORD
2934 which are not used unless the mode has NaNs. */
2935 compcode &= ~COMPCODE_UNORD;
2936 if (compcode == COMPCODE_LTGT)
2937 compcode = COMPCODE_NE;
2938 else if (compcode == COMPCODE_ORD)
2939 compcode = COMPCODE_TRUE;
2940 }
2941 else if (flag_trapping_math)
2942 {
2943 /* Check that the original operation and the optimized ones will trap
2944 under the same condition. */
2945 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2946 && (lcompcode != COMPCODE_EQ)
2947 && (lcompcode != COMPCODE_ORD);
2948 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2949 && (rcompcode != COMPCODE_EQ)
2950 && (rcompcode != COMPCODE_ORD);
2951 bool trap = (compcode & COMPCODE_UNORD) == 0
2952 && (compcode != COMPCODE_EQ)
2953 && (compcode != COMPCODE_ORD);
2954
2955 /* In a short-circuited boolean expression the LHS might be
2956 such that the RHS, if evaluated, will never trap. For
2957 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2958 if neither x nor y is NaN. (This is a mixed blessing: for
2959 example, the expression above will never trap, hence
2960 optimizing it to x < y would be invalid). */
2961 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2962 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2963 rtrap = false;
2964
2965 /* If the comparison was short-circuited, and only the RHS
2966 trapped, we may now generate a spurious trap. */
2967 if (rtrap && !ltrap
2968 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2969 return NULL_TREE;
2970
2971 /* If we changed the conditions that cause a trap, we lose. */
2972 if ((ltrap || rtrap) != trap)
2973 return NULL_TREE;
2974 }
2975
2976 if (compcode == COMPCODE_TRUE)
2977 return constant_boolean_node (true, truth_type);
2978 else if (compcode == COMPCODE_FALSE)
2979 return constant_boolean_node (false, truth_type);
2980 else
2981 return fold_build2 (compcode_to_comparison (compcode),
2982 truth_type, ll_arg, lr_arg);
2983 }
2984
2985 /* Return nonzero if CODE is a tree code that represents a truth value. */
2986
2987 static int
2988 truth_value_p (enum tree_code code)
2989 {
2990 return (TREE_CODE_CLASS (code) == tcc_comparison
2991 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2992 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2993 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2994 }
2995 \f
2996 /* Return nonzero if two operands (typically of the same tree node)
2997 are necessarily equal. If either argument has side-effects this
2998 function returns zero. FLAGS modifies behavior as follows:
2999
3000 If OEP_ONLY_CONST is set, only return nonzero for constants.
3001 This function tests whether the operands are indistinguishable;
3002 it does not test whether they are equal using C's == operation.
3003 The distinction is important for IEEE floating point, because
3004 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
3005 (2) two NaNs may be indistinguishable, but NaN!=NaN.
3006
3007 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
3008 even though it may hold multiple values during a function.
3009 This is because a GCC tree node guarantees that nothing else is
3010 executed between the evaluation of its "operands" (which may often
3011 be evaluated in arbitrary order). Hence if the operands themselves
3012 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
3013 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
3014 unset means assuming isochronic (or instantaneous) tree equivalence.
3015 Unless comparing arbitrary expression trees, such as from different
3016 statements, this flag can usually be left unset.
3017
3018 If OEP_PURE_SAME is set, then pure functions with identical arguments
3019 are considered the same. It is used when the caller has other ways
3020 to ensure that global memory is unchanged in between. */
3021
3022 int
3023 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
3024 {
3025 /* If either is ERROR_MARK, they aren't equal. */
3026 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
3027 return 0;
3028
3029 /* Check equality of integer constants before bailing out due to
3030 precision differences. */
3031 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
3032 return tree_int_cst_equal (arg0, arg1);
3033
3034 /* If both types don't have the same signedness, then we can't consider
3035 them equal. We must check this before the STRIP_NOPS calls
3036 because they may change the signedness of the arguments. As pointers
3037 strictly don't have a signedness, require either two pointers or
3038 two non-pointers as well. */
3039 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
3040 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
3041 return 0;
3042
3043 /* If both types don't have the same precision, then it is not safe
3044 to strip NOPs. */
3045 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
3046 return 0;
3047
3048 STRIP_NOPS (arg0);
3049 STRIP_NOPS (arg1);
3050
3051 /* In case both args are comparisons but with different comparison
3052 code, try to swap the comparison operands of one arg to produce
3053 a match and compare that variant. */
3054 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3055 && COMPARISON_CLASS_P (arg0)
3056 && COMPARISON_CLASS_P (arg1))
3057 {
3058 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
3059
3060 if (TREE_CODE (arg0) == swap_code)
3061 return operand_equal_p (TREE_OPERAND (arg0, 0),
3062 TREE_OPERAND (arg1, 1), flags)
3063 && operand_equal_p (TREE_OPERAND (arg0, 1),
3064 TREE_OPERAND (arg1, 0), flags);
3065 }
3066
3067 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3068 /* This is needed for conversions and for COMPONENT_REF.
3069 Might as well play it safe and always test this. */
3070 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
3071 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
3072 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
3073 return 0;
3074
3075 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3076 We don't care about side effects in that case because the SAVE_EXPR
3077 takes care of that for us. In all other cases, two expressions are
3078 equal if they have no side effects. If we have two identical
3079 expressions with side effects that should be treated the same due
3080 to the only side effects being identical SAVE_EXPR's, that will
3081 be detected in the recursive calls below. */
3082 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
3083 && (TREE_CODE (arg0) == SAVE_EXPR
3084 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
3085 return 1;
3086
3087 /* Next handle constant cases, those for which we can return 1 even
3088 if ONLY_CONST is set. */
3089 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
3090 switch (TREE_CODE (arg0))
3091 {
3092 case INTEGER_CST:
3093 return tree_int_cst_equal (arg0, arg1);
3094
3095 case FIXED_CST:
3096 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
3097 TREE_FIXED_CST (arg1));
3098
3099 case REAL_CST:
3100 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
3101 TREE_REAL_CST (arg1)))
3102 return 1;
3103
3104
3105 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
3106 {
3107 /* If we do not distinguish between signed and unsigned zero,
3108 consider them equal. */
3109 if (real_zerop (arg0) && real_zerop (arg1))
3110 return 1;
3111 }
3112 return 0;
3113
3114 case VECTOR_CST:
3115 {
3116 tree v1, v2;
3117
3118 v1 = TREE_VECTOR_CST_ELTS (arg0);
3119 v2 = TREE_VECTOR_CST_ELTS (arg1);
3120 while (v1 && v2)
3121 {
3122 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
3123 flags))
3124 return 0;
3125 v1 = TREE_CHAIN (v1);
3126 v2 = TREE_CHAIN (v2);
3127 }
3128
3129 return v1 == v2;
3130 }
3131
3132 case COMPLEX_CST:
3133 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
3134 flags)
3135 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
3136 flags));
3137
3138 case STRING_CST:
3139 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
3140 && ! memcmp (TREE_STRING_POINTER (arg0),
3141 TREE_STRING_POINTER (arg1),
3142 TREE_STRING_LENGTH (arg0)));
3143
3144 case ADDR_EXPR:
3145 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
3146 0);
3147 default:
3148 break;
3149 }
3150
3151 if (flags & OEP_ONLY_CONST)
3152 return 0;
3153
3154 /* Define macros to test an operand from arg0 and arg1 for equality and a
3155 variant that allows null and views null as being different from any
3156 non-null value. In the latter case, if either is null, the both
3157 must be; otherwise, do the normal comparison. */
3158 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
3159 TREE_OPERAND (arg1, N), flags)
3160
3161 #define OP_SAME_WITH_NULL(N) \
3162 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3163 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3164
3165 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3166 {
3167 case tcc_unary:
3168 /* Two conversions are equal only if signedness and modes match. */
3169 switch (TREE_CODE (arg0))
3170 {
3171 CASE_CONVERT:
3172 case FIX_TRUNC_EXPR:
3173 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3174 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3175 return 0;
3176 break;
3177 default:
3178 break;
3179 }
3180
3181 return OP_SAME (0);
3182
3183
3184 case tcc_comparison:
3185 case tcc_binary:
3186 if (OP_SAME (0) && OP_SAME (1))
3187 return 1;
3188
3189 /* For commutative ops, allow the other order. */
3190 return (commutative_tree_code (TREE_CODE (arg0))
3191 && operand_equal_p (TREE_OPERAND (arg0, 0),
3192 TREE_OPERAND (arg1, 1), flags)
3193 && operand_equal_p (TREE_OPERAND (arg0, 1),
3194 TREE_OPERAND (arg1, 0), flags));
3195
3196 case tcc_reference:
3197 /* If either of the pointer (or reference) expressions we are
3198 dereferencing contain a side effect, these cannot be equal. */
3199 if (TREE_SIDE_EFFECTS (arg0)
3200 || TREE_SIDE_EFFECTS (arg1))
3201 return 0;
3202
3203 switch (TREE_CODE (arg0))
3204 {
3205 case INDIRECT_REF:
3206 case ALIGN_INDIRECT_REF:
3207 case MISALIGNED_INDIRECT_REF:
3208 case REALPART_EXPR:
3209 case IMAGPART_EXPR:
3210 return OP_SAME (0);
3211
3212 case ARRAY_REF:
3213 case ARRAY_RANGE_REF:
3214 /* Operands 2 and 3 may be null.
3215 Compare the array index by value if it is constant first as we
3216 may have different types but same value here. */
3217 return (OP_SAME (0)
3218 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3219 TREE_OPERAND (arg1, 1))
3220 || OP_SAME (1))
3221 && OP_SAME_WITH_NULL (2)
3222 && OP_SAME_WITH_NULL (3));
3223
3224 case COMPONENT_REF:
3225 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3226 may be NULL when we're called to compare MEM_EXPRs. */
3227 return OP_SAME_WITH_NULL (0)
3228 && OP_SAME (1)
3229 && OP_SAME_WITH_NULL (2);
3230
3231 case BIT_FIELD_REF:
3232 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3233
3234 default:
3235 return 0;
3236 }
3237
3238 case tcc_expression:
3239 switch (TREE_CODE (arg0))
3240 {
3241 case ADDR_EXPR:
3242 case TRUTH_NOT_EXPR:
3243 return OP_SAME (0);
3244
3245 case TRUTH_ANDIF_EXPR:
3246 case TRUTH_ORIF_EXPR:
3247 return OP_SAME (0) && OP_SAME (1);
3248
3249 case TRUTH_AND_EXPR:
3250 case TRUTH_OR_EXPR:
3251 case TRUTH_XOR_EXPR:
3252 if (OP_SAME (0) && OP_SAME (1))
3253 return 1;
3254
3255 /* Otherwise take into account this is a commutative operation. */
3256 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3257 TREE_OPERAND (arg1, 1), flags)
3258 && operand_equal_p (TREE_OPERAND (arg0, 1),
3259 TREE_OPERAND (arg1, 0), flags));
3260
3261 case COND_EXPR:
3262 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3263
3264 default:
3265 return 0;
3266 }
3267
3268 case tcc_vl_exp:
3269 switch (TREE_CODE (arg0))
3270 {
3271 case CALL_EXPR:
3272 /* If the CALL_EXPRs call different functions, then they
3273 clearly can not be equal. */
3274 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3275 flags))
3276 return 0;
3277
3278 {
3279 unsigned int cef = call_expr_flags (arg0);
3280 if (flags & OEP_PURE_SAME)
3281 cef &= ECF_CONST | ECF_PURE;
3282 else
3283 cef &= ECF_CONST;
3284 if (!cef)
3285 return 0;
3286 }
3287
3288 /* Now see if all the arguments are the same. */
3289 {
3290 const_call_expr_arg_iterator iter0, iter1;
3291 const_tree a0, a1;
3292 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3293 a1 = first_const_call_expr_arg (arg1, &iter1);
3294 a0 && a1;
3295 a0 = next_const_call_expr_arg (&iter0),
3296 a1 = next_const_call_expr_arg (&iter1))
3297 if (! operand_equal_p (a0, a1, flags))
3298 return 0;
3299
3300 /* If we get here and both argument lists are exhausted
3301 then the CALL_EXPRs are equal. */
3302 return ! (a0 || a1);
3303 }
3304 default:
3305 return 0;
3306 }
3307
3308 case tcc_declaration:
3309 /* Consider __builtin_sqrt equal to sqrt. */
3310 return (TREE_CODE (arg0) == FUNCTION_DECL
3311 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3312 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3313 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3314
3315 default:
3316 return 0;
3317 }
3318
3319 #undef OP_SAME
3320 #undef OP_SAME_WITH_NULL
3321 }
3322 \f
3323 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3324 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3325
3326 When in doubt, return 0. */
3327
3328 static int
3329 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3330 {
3331 int unsignedp1, unsignedpo;
3332 tree primarg0, primarg1, primother;
3333 unsigned int correct_width;
3334
3335 if (operand_equal_p (arg0, arg1, 0))
3336 return 1;
3337
3338 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3339 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3340 return 0;
3341
3342 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3343 and see if the inner values are the same. This removes any
3344 signedness comparison, which doesn't matter here. */
3345 primarg0 = arg0, primarg1 = arg1;
3346 STRIP_NOPS (primarg0);
3347 STRIP_NOPS (primarg1);
3348 if (operand_equal_p (primarg0, primarg1, 0))
3349 return 1;
3350
3351 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3352 actual comparison operand, ARG0.
3353
3354 First throw away any conversions to wider types
3355 already present in the operands. */
3356
3357 primarg1 = get_narrower (arg1, &unsignedp1);
3358 primother = get_narrower (other, &unsignedpo);
3359
3360 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3361 if (unsignedp1 == unsignedpo
3362 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3363 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3364 {
3365 tree type = TREE_TYPE (arg0);
3366
3367 /* Make sure shorter operand is extended the right way
3368 to match the longer operand. */
3369 primarg1 = fold_convert (signed_or_unsigned_type_for
3370 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3371
3372 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3373 return 1;
3374 }
3375
3376 return 0;
3377 }
3378 \f
3379 /* See if ARG is an expression that is either a comparison or is performing
3380 arithmetic on comparisons. The comparisons must only be comparing
3381 two different values, which will be stored in *CVAL1 and *CVAL2; if
3382 they are nonzero it means that some operands have already been found.
3383 No variables may be used anywhere else in the expression except in the
3384 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3385 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3386
3387 If this is true, return 1. Otherwise, return zero. */
3388
3389 static int
3390 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3391 {
3392 enum tree_code code = TREE_CODE (arg);
3393 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3394
3395 /* We can handle some of the tcc_expression cases here. */
3396 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3397 tclass = tcc_unary;
3398 else if (tclass == tcc_expression
3399 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3400 || code == COMPOUND_EXPR))
3401 tclass = tcc_binary;
3402
3403 else if (tclass == tcc_expression && code == SAVE_EXPR
3404 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3405 {
3406 /* If we've already found a CVAL1 or CVAL2, this expression is
3407 two complex to handle. */
3408 if (*cval1 || *cval2)
3409 return 0;
3410
3411 tclass = tcc_unary;
3412 *save_p = 1;
3413 }
3414
3415 switch (tclass)
3416 {
3417 case tcc_unary:
3418 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3419
3420 case tcc_binary:
3421 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3422 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3423 cval1, cval2, save_p));
3424
3425 case tcc_constant:
3426 return 1;
3427
3428 case tcc_expression:
3429 if (code == COND_EXPR)
3430 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3431 cval1, cval2, save_p)
3432 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3433 cval1, cval2, save_p)
3434 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3435 cval1, cval2, save_p));
3436 return 0;
3437
3438 case tcc_comparison:
3439 /* First see if we can handle the first operand, then the second. For
3440 the second operand, we know *CVAL1 can't be zero. It must be that
3441 one side of the comparison is each of the values; test for the
3442 case where this isn't true by failing if the two operands
3443 are the same. */
3444
3445 if (operand_equal_p (TREE_OPERAND (arg, 0),
3446 TREE_OPERAND (arg, 1), 0))
3447 return 0;
3448
3449 if (*cval1 == 0)
3450 *cval1 = TREE_OPERAND (arg, 0);
3451 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3452 ;
3453 else if (*cval2 == 0)
3454 *cval2 = TREE_OPERAND (arg, 0);
3455 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3456 ;
3457 else
3458 return 0;
3459
3460 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3461 ;
3462 else if (*cval2 == 0)
3463 *cval2 = TREE_OPERAND (arg, 1);
3464 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3465 ;
3466 else
3467 return 0;
3468
3469 return 1;
3470
3471 default:
3472 return 0;
3473 }
3474 }
3475 \f
3476 /* ARG is a tree that is known to contain just arithmetic operations and
3477 comparisons. Evaluate the operations in the tree substituting NEW0 for
3478 any occurrence of OLD0 as an operand of a comparison and likewise for
3479 NEW1 and OLD1. */
3480
3481 static tree
3482 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
3483 {
3484 tree type = TREE_TYPE (arg);
3485 enum tree_code code = TREE_CODE (arg);
3486 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3487
3488 /* We can handle some of the tcc_expression cases here. */
3489 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3490 tclass = tcc_unary;
3491 else if (tclass == tcc_expression
3492 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3493 tclass = tcc_binary;
3494
3495 switch (tclass)
3496 {
3497 case tcc_unary:
3498 return fold_build1 (code, type,
3499 eval_subst (TREE_OPERAND (arg, 0),
3500 old0, new0, old1, new1));
3501
3502 case tcc_binary:
3503 return fold_build2 (code, type,
3504 eval_subst (TREE_OPERAND (arg, 0),
3505 old0, new0, old1, new1),
3506 eval_subst (TREE_OPERAND (arg, 1),
3507 old0, new0, old1, new1));
3508
3509 case tcc_expression:
3510 switch (code)
3511 {
3512 case SAVE_EXPR:
3513 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
3514
3515 case COMPOUND_EXPR:
3516 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
3517
3518 case COND_EXPR:
3519 return fold_build3 (code, type,
3520 eval_subst (TREE_OPERAND (arg, 0),
3521 old0, new0, old1, new1),
3522 eval_subst (TREE_OPERAND (arg, 1),
3523 old0, new0, old1, new1),
3524 eval_subst (TREE_OPERAND (arg, 2),
3525 old0, new0, old1, new1));
3526 default:
3527 break;
3528 }
3529 /* Fall through - ??? */
3530
3531 case tcc_comparison:
3532 {
3533 tree arg0 = TREE_OPERAND (arg, 0);
3534 tree arg1 = TREE_OPERAND (arg, 1);
3535
3536 /* We need to check both for exact equality and tree equality. The
3537 former will be true if the operand has a side-effect. In that
3538 case, we know the operand occurred exactly once. */
3539
3540 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3541 arg0 = new0;
3542 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3543 arg0 = new1;
3544
3545 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3546 arg1 = new0;
3547 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3548 arg1 = new1;
3549
3550 return fold_build2 (code, type, arg0, arg1);
3551 }
3552
3553 default:
3554 return arg;
3555 }
3556 }
3557 \f
3558 /* Return a tree for the case when the result of an expression is RESULT
3559 converted to TYPE and OMITTED was previously an operand of the expression
3560 but is now not needed (e.g., we folded OMITTED * 0).
3561
3562 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3563 the conversion of RESULT to TYPE. */
3564
3565 tree
3566 omit_one_operand (tree type, tree result, tree omitted)
3567 {
3568 tree t = fold_convert (type, result);
3569
3570 /* If the resulting operand is an empty statement, just return the omitted
3571 statement casted to void. */
3572 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3573 return build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3574
3575 if (TREE_SIDE_EFFECTS (omitted))
3576 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3577
3578 return non_lvalue (t);
3579 }
3580
3581 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3582
3583 static tree
3584 pedantic_omit_one_operand (tree type, tree result, tree omitted)
3585 {
3586 tree t = fold_convert (type, result);
3587
3588 /* If the resulting operand is an empty statement, just return the omitted
3589 statement casted to void. */
3590 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3591 return build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3592
3593 if (TREE_SIDE_EFFECTS (omitted))
3594 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3595
3596 return pedantic_non_lvalue (t);
3597 }
3598
3599 /* Return a tree for the case when the result of an expression is RESULT
3600 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3601 of the expression but are now not needed.
3602
3603 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3604 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3605 evaluated before OMITTED2. Otherwise, if neither has side effects,
3606 just do the conversion of RESULT to TYPE. */
3607
3608 tree
3609 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
3610 {
3611 tree t = fold_convert (type, result);
3612
3613 if (TREE_SIDE_EFFECTS (omitted2))
3614 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3615 if (TREE_SIDE_EFFECTS (omitted1))
3616 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3617
3618 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
3619 }
3620
3621 \f
3622 /* Return a simplified tree node for the truth-negation of ARG. This
3623 never alters ARG itself. We assume that ARG is an operation that
3624 returns a truth value (0 or 1).
3625
3626 FIXME: one would think we would fold the result, but it causes
3627 problems with the dominator optimizer. */
3628
3629 tree
3630 fold_truth_not_expr (tree arg)
3631 {
3632 tree type = TREE_TYPE (arg);
3633 enum tree_code code = TREE_CODE (arg);
3634
3635 /* If this is a comparison, we can simply invert it, except for
3636 floating-point non-equality comparisons, in which case we just
3637 enclose a TRUTH_NOT_EXPR around what we have. */
3638
3639 if (TREE_CODE_CLASS (code) == tcc_comparison)
3640 {
3641 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3642 if (FLOAT_TYPE_P (op_type)
3643 && flag_trapping_math
3644 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3645 && code != NE_EXPR && code != EQ_EXPR)
3646 return NULL_TREE;
3647 else
3648 {
3649 code = invert_tree_comparison (code,
3650 HONOR_NANS (TYPE_MODE (op_type)));
3651 if (code == ERROR_MARK)
3652 return NULL_TREE;
3653 else
3654 return build2 (code, type,
3655 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3656 }
3657 }
3658
3659 switch (code)
3660 {
3661 case INTEGER_CST:
3662 return constant_boolean_node (integer_zerop (arg), type);
3663
3664 case TRUTH_AND_EXPR:
3665 return build2 (TRUTH_OR_EXPR, type,
3666 invert_truthvalue (TREE_OPERAND (arg, 0)),
3667 invert_truthvalue (TREE_OPERAND (arg, 1)));
3668
3669 case TRUTH_OR_EXPR:
3670 return build2 (TRUTH_AND_EXPR, type,
3671 invert_truthvalue (TREE_OPERAND (arg, 0)),
3672 invert_truthvalue (TREE_OPERAND (arg, 1)));
3673
3674 case TRUTH_XOR_EXPR:
3675 /* Here we can invert either operand. We invert the first operand
3676 unless the second operand is a TRUTH_NOT_EXPR in which case our
3677 result is the XOR of the first operand with the inside of the
3678 negation of the second operand. */
3679
3680 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3681 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3682 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3683 else
3684 return build2 (TRUTH_XOR_EXPR, type,
3685 invert_truthvalue (TREE_OPERAND (arg, 0)),
3686 TREE_OPERAND (arg, 1));
3687
3688 case TRUTH_ANDIF_EXPR:
3689 return build2 (TRUTH_ORIF_EXPR, type,
3690 invert_truthvalue (TREE_OPERAND (arg, 0)),
3691 invert_truthvalue (TREE_OPERAND (arg, 1)));
3692
3693 case TRUTH_ORIF_EXPR:
3694 return build2 (TRUTH_ANDIF_EXPR, type,
3695 invert_truthvalue (TREE_OPERAND (arg, 0)),
3696 invert_truthvalue (TREE_OPERAND (arg, 1)));
3697
3698 case TRUTH_NOT_EXPR:
3699 return TREE_OPERAND (arg, 0);
3700
3701 case COND_EXPR:
3702 {
3703 tree arg1 = TREE_OPERAND (arg, 1);
3704 tree arg2 = TREE_OPERAND (arg, 2);
3705 /* A COND_EXPR may have a throw as one operand, which
3706 then has void type. Just leave void operands
3707 as they are. */
3708 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3709 VOID_TYPE_P (TREE_TYPE (arg1))
3710 ? arg1 : invert_truthvalue (arg1),
3711 VOID_TYPE_P (TREE_TYPE (arg2))
3712 ? arg2 : invert_truthvalue (arg2));
3713 }
3714
3715 case COMPOUND_EXPR:
3716 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3717 invert_truthvalue (TREE_OPERAND (arg, 1)));
3718
3719 case NON_LVALUE_EXPR:
3720 return invert_truthvalue (TREE_OPERAND (arg, 0));
3721
3722 case NOP_EXPR:
3723 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3724 return build1 (TRUTH_NOT_EXPR, type, arg);
3725
3726 case CONVERT_EXPR:
3727 case FLOAT_EXPR:
3728 return build1 (TREE_CODE (arg), type,
3729 invert_truthvalue (TREE_OPERAND (arg, 0)));
3730
3731 case BIT_AND_EXPR:
3732 if (!integer_onep (TREE_OPERAND (arg, 1)))
3733 break;
3734 return build2 (EQ_EXPR, type, arg,
3735 build_int_cst (type, 0));
3736
3737 case SAVE_EXPR:
3738 return build1 (TRUTH_NOT_EXPR, type, arg);
3739
3740 case CLEANUP_POINT_EXPR:
3741 return build1 (CLEANUP_POINT_EXPR, type,
3742 invert_truthvalue (TREE_OPERAND (arg, 0)));
3743
3744 default:
3745 break;
3746 }
3747
3748 return NULL_TREE;
3749 }
3750
3751 /* Return a simplified tree node for the truth-negation of ARG. This
3752 never alters ARG itself. We assume that ARG is an operation that
3753 returns a truth value (0 or 1).
3754
3755 FIXME: one would think we would fold the result, but it causes
3756 problems with the dominator optimizer. */
3757
3758 tree
3759 invert_truthvalue (tree arg)
3760 {
3761 tree tem;
3762
3763 if (TREE_CODE (arg) == ERROR_MARK)
3764 return arg;
3765
3766 tem = fold_truth_not_expr (arg);
3767 if (!tem)
3768 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3769
3770 return tem;
3771 }
3772
3773 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3774 operands are another bit-wise operation with a common input. If so,
3775 distribute the bit operations to save an operation and possibly two if
3776 constants are involved. For example, convert
3777 (A | B) & (A | C) into A | (B & C)
3778 Further simplification will occur if B and C are constants.
3779
3780 If this optimization cannot be done, 0 will be returned. */
3781
3782 static tree
3783 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3784 {
3785 tree common;
3786 tree left, right;
3787
3788 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3789 || TREE_CODE (arg0) == code
3790 || (TREE_CODE (arg0) != BIT_AND_EXPR
3791 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3792 return 0;
3793
3794 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3795 {
3796 common = TREE_OPERAND (arg0, 0);
3797 left = TREE_OPERAND (arg0, 1);
3798 right = TREE_OPERAND (arg1, 1);
3799 }
3800 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3801 {
3802 common = TREE_OPERAND (arg0, 0);
3803 left = TREE_OPERAND (arg0, 1);
3804 right = TREE_OPERAND (arg1, 0);
3805 }
3806 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3807 {
3808 common = TREE_OPERAND (arg0, 1);
3809 left = TREE_OPERAND (arg0, 0);
3810 right = TREE_OPERAND (arg1, 1);
3811 }
3812 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3813 {
3814 common = TREE_OPERAND (arg0, 1);
3815 left = TREE_OPERAND (arg0, 0);
3816 right = TREE_OPERAND (arg1, 0);
3817 }
3818 else
3819 return 0;
3820
3821 return fold_build2 (TREE_CODE (arg0), type, common,
3822 fold_build2 (code, type, left, right));
3823 }
3824
3825 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3826 with code CODE. This optimization is unsafe. */
3827 static tree
3828 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3829 {
3830 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3831 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3832
3833 /* (A / C) +- (B / C) -> (A +- B) / C. */
3834 if (mul0 == mul1
3835 && operand_equal_p (TREE_OPERAND (arg0, 1),
3836 TREE_OPERAND (arg1, 1), 0))
3837 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3838 fold_build2 (code, type,
3839 TREE_OPERAND (arg0, 0),
3840 TREE_OPERAND (arg1, 0)),
3841 TREE_OPERAND (arg0, 1));
3842
3843 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3844 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3845 TREE_OPERAND (arg1, 0), 0)
3846 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3847 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3848 {
3849 REAL_VALUE_TYPE r0, r1;
3850 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3851 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3852 if (!mul0)
3853 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3854 if (!mul1)
3855 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3856 real_arithmetic (&r0, code, &r0, &r1);
3857 return fold_build2 (MULT_EXPR, type,
3858 TREE_OPERAND (arg0, 0),
3859 build_real (type, r0));
3860 }
3861
3862 return NULL_TREE;
3863 }
3864 \f
3865 /* Subroutine for fold_truthop: decode a field reference.
3866
3867 If EXP is a comparison reference, we return the innermost reference.
3868
3869 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3870 set to the starting bit number.
3871
3872 If the innermost field can be completely contained in a mode-sized
3873 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3874
3875 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3876 otherwise it is not changed.
3877
3878 *PUNSIGNEDP is set to the signedness of the field.
3879
3880 *PMASK is set to the mask used. This is either contained in a
3881 BIT_AND_EXPR or derived from the width of the field.
3882
3883 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3884
3885 Return 0 if this is not a component reference or is one that we can't
3886 do anything with. */
3887
3888 static tree
3889 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3890 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3891 int *punsignedp, int *pvolatilep,
3892 tree *pmask, tree *pand_mask)
3893 {
3894 tree outer_type = 0;
3895 tree and_mask = 0;
3896 tree mask, inner, offset;
3897 tree unsigned_type;
3898 unsigned int precision;
3899
3900 /* All the optimizations using this function assume integer fields.
3901 There are problems with FP fields since the type_for_size call
3902 below can fail for, e.g., XFmode. */
3903 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3904 return 0;
3905
3906 /* We are interested in the bare arrangement of bits, so strip everything
3907 that doesn't affect the machine mode. However, record the type of the
3908 outermost expression if it may matter below. */
3909 if (CONVERT_EXPR_P (exp)
3910 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3911 outer_type = TREE_TYPE (exp);
3912 STRIP_NOPS (exp);
3913
3914 if (TREE_CODE (exp) == BIT_AND_EXPR)
3915 {
3916 and_mask = TREE_OPERAND (exp, 1);
3917 exp = TREE_OPERAND (exp, 0);
3918 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3919 if (TREE_CODE (and_mask) != INTEGER_CST)
3920 return 0;
3921 }
3922
3923 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3924 punsignedp, pvolatilep, false);
3925 if ((inner == exp && and_mask == 0)
3926 || *pbitsize < 0 || offset != 0
3927 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3928 return 0;
3929
3930 /* If the number of bits in the reference is the same as the bitsize of
3931 the outer type, then the outer type gives the signedness. Otherwise
3932 (in case of a small bitfield) the signedness is unchanged. */
3933 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3934 *punsignedp = TYPE_UNSIGNED (outer_type);
3935
3936 /* Compute the mask to access the bitfield. */
3937 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3938 precision = TYPE_PRECISION (unsigned_type);
3939
3940 mask = build_int_cst_type (unsigned_type, -1);
3941
3942 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3943 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3944
3945 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3946 if (and_mask != 0)
3947 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3948 fold_convert (unsigned_type, and_mask), mask);
3949
3950 *pmask = mask;
3951 *pand_mask = and_mask;
3952 return inner;
3953 }
3954
3955 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3956 represents the sign bit of EXP's type. If EXP represents a sign
3957 or zero extension, also test VAL against the unextended type.
3958 The return value is the (sub)expression whose sign bit is VAL,
3959 or NULL_TREE otherwise. */
3960
3961 static tree
3962 sign_bit_p (tree exp, const_tree val)
3963 {
3964 unsigned HOST_WIDE_INT mask_lo, lo;
3965 HOST_WIDE_INT mask_hi, hi;
3966 int width;
3967 tree t;
3968
3969 /* Tree EXP must have an integral type. */
3970 t = TREE_TYPE (exp);
3971 if (! INTEGRAL_TYPE_P (t))
3972 return NULL_TREE;
3973
3974 /* Tree VAL must be an integer constant. */
3975 if (TREE_CODE (val) != INTEGER_CST
3976 || TREE_OVERFLOW (val))
3977 return NULL_TREE;
3978
3979 width = TYPE_PRECISION (t);
3980 if (width > HOST_BITS_PER_WIDE_INT)
3981 {
3982 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3983 lo = 0;
3984
3985 mask_hi = ((unsigned HOST_WIDE_INT) -1
3986 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3987 mask_lo = -1;
3988 }
3989 else
3990 {
3991 hi = 0;
3992 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3993
3994 mask_hi = 0;
3995 mask_lo = ((unsigned HOST_WIDE_INT) -1
3996 >> (HOST_BITS_PER_WIDE_INT - width));
3997 }
3998
3999 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
4000 treat VAL as if it were unsigned. */
4001 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
4002 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
4003 return exp;
4004
4005 /* Handle extension from a narrower type. */
4006 if (TREE_CODE (exp) == NOP_EXPR
4007 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4008 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4009
4010 return NULL_TREE;
4011 }
4012
4013 /* Subroutine for fold_truthop: determine if an operand is simple enough
4014 to be evaluated unconditionally. */
4015
4016 static int
4017 simple_operand_p (const_tree exp)
4018 {
4019 /* Strip any conversions that don't change the machine mode. */
4020 STRIP_NOPS (exp);
4021
4022 return (CONSTANT_CLASS_P (exp)
4023 || TREE_CODE (exp) == SSA_NAME
4024 || (DECL_P (exp)
4025 && ! TREE_ADDRESSABLE (exp)
4026 && ! TREE_THIS_VOLATILE (exp)
4027 && ! DECL_NONLOCAL (exp)
4028 /* Don't regard global variables as simple. They may be
4029 allocated in ways unknown to the compiler (shared memory,
4030 #pragma weak, etc). */
4031 && ! TREE_PUBLIC (exp)
4032 && ! DECL_EXTERNAL (exp)
4033 /* Loading a static variable is unduly expensive, but global
4034 registers aren't expensive. */
4035 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4036 }
4037 \f
4038 /* The following functions are subroutines to fold_range_test and allow it to
4039 try to change a logical combination of comparisons into a range test.
4040
4041 For example, both
4042 X == 2 || X == 3 || X == 4 || X == 5
4043 and
4044 X >= 2 && X <= 5
4045 are converted to
4046 (unsigned) (X - 2) <= 3
4047
4048 We describe each set of comparisons as being either inside or outside
4049 a range, using a variable named like IN_P, and then describe the
4050 range with a lower and upper bound. If one of the bounds is omitted,
4051 it represents either the highest or lowest value of the type.
4052
4053 In the comments below, we represent a range by two numbers in brackets
4054 preceded by a "+" to designate being inside that range, or a "-" to
4055 designate being outside that range, so the condition can be inverted by
4056 flipping the prefix. An omitted bound is represented by a "-". For
4057 example, "- [-, 10]" means being outside the range starting at the lowest
4058 possible value and ending at 10, in other words, being greater than 10.
4059 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4060 always false.
4061
4062 We set up things so that the missing bounds are handled in a consistent
4063 manner so neither a missing bound nor "true" and "false" need to be
4064 handled using a special case. */
4065
4066 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4067 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4068 and UPPER1_P are nonzero if the respective argument is an upper bound
4069 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4070 must be specified for a comparison. ARG1 will be converted to ARG0's
4071 type if both are specified. */
4072
4073 static tree
4074 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4075 tree arg1, int upper1_p)
4076 {
4077 tree tem;
4078 int result;
4079 int sgn0, sgn1;
4080
4081 /* If neither arg represents infinity, do the normal operation.
4082 Else, if not a comparison, return infinity. Else handle the special
4083 comparison rules. Note that most of the cases below won't occur, but
4084 are handled for consistency. */
4085
4086 if (arg0 != 0 && arg1 != 0)
4087 {
4088 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4089 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4090 STRIP_NOPS (tem);
4091 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4092 }
4093
4094 if (TREE_CODE_CLASS (code) != tcc_comparison)
4095 return 0;
4096
4097 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4098 for neither. In real maths, we cannot assume open ended ranges are
4099 the same. But, this is computer arithmetic, where numbers are finite.
4100 We can therefore make the transformation of any unbounded range with
4101 the value Z, Z being greater than any representable number. This permits
4102 us to treat unbounded ranges as equal. */
4103 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4104 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4105 switch (code)
4106 {
4107 case EQ_EXPR:
4108 result = sgn0 == sgn1;
4109 break;
4110 case NE_EXPR:
4111 result = sgn0 != sgn1;
4112 break;
4113 case LT_EXPR:
4114 result = sgn0 < sgn1;
4115 break;
4116 case LE_EXPR:
4117 result = sgn0 <= sgn1;
4118 break;
4119 case GT_EXPR:
4120 result = sgn0 > sgn1;
4121 break;
4122 case GE_EXPR:
4123 result = sgn0 >= sgn1;
4124 break;
4125 default:
4126 gcc_unreachable ();
4127 }
4128
4129 return constant_boolean_node (result, type);
4130 }
4131 \f
4132 /* Given EXP, a logical expression, set the range it is testing into
4133 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4134 actually being tested. *PLOW and *PHIGH will be made of the same
4135 type as the returned expression. If EXP is not a comparison, we
4136 will most likely not be returning a useful value and range. Set
4137 *STRICT_OVERFLOW_P to true if the return value is only valid
4138 because signed overflow is undefined; otherwise, do not change
4139 *STRICT_OVERFLOW_P. */
4140
4141 static tree
4142 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4143 bool *strict_overflow_p)
4144 {
4145 enum tree_code code;
4146 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
4147 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
4148 int in_p, n_in_p;
4149 tree low, high, n_low, n_high;
4150
4151 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4152 and see if we can refine the range. Some of the cases below may not
4153 happen, but it doesn't seem worth worrying about this. We "continue"
4154 the outer loop when we've changed something; otherwise we "break"
4155 the switch, which will "break" the while. */
4156
4157 in_p = 0;
4158 low = high = build_int_cst (TREE_TYPE (exp), 0);
4159
4160 while (1)
4161 {
4162 code = TREE_CODE (exp);
4163 exp_type = TREE_TYPE (exp);
4164
4165 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4166 {
4167 if (TREE_OPERAND_LENGTH (exp) > 0)
4168 arg0 = TREE_OPERAND (exp, 0);
4169 if (TREE_CODE_CLASS (code) == tcc_comparison
4170 || TREE_CODE_CLASS (code) == tcc_unary
4171 || TREE_CODE_CLASS (code) == tcc_binary)
4172 arg0_type = TREE_TYPE (arg0);
4173 if (TREE_CODE_CLASS (code) == tcc_binary
4174 || TREE_CODE_CLASS (code) == tcc_comparison
4175 || (TREE_CODE_CLASS (code) == tcc_expression
4176 && TREE_OPERAND_LENGTH (exp) > 1))
4177 arg1 = TREE_OPERAND (exp, 1);
4178 }
4179
4180 switch (code)
4181 {
4182 case TRUTH_NOT_EXPR:
4183 in_p = ! in_p, exp = arg0;
4184 continue;
4185
4186 case EQ_EXPR: case NE_EXPR:
4187 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4188 /* We can only do something if the range is testing for zero
4189 and if the second operand is an integer constant. Note that
4190 saying something is "in" the range we make is done by
4191 complementing IN_P since it will set in the initial case of
4192 being not equal to zero; "out" is leaving it alone. */
4193 if (low == 0 || high == 0
4194 || ! integer_zerop (low) || ! integer_zerop (high)
4195 || TREE_CODE (arg1) != INTEGER_CST)
4196 break;
4197
4198 switch (code)
4199 {
4200 case NE_EXPR: /* - [c, c] */
4201 low = high = arg1;
4202 break;
4203 case EQ_EXPR: /* + [c, c] */
4204 in_p = ! in_p, low = high = arg1;
4205 break;
4206 case GT_EXPR: /* - [-, c] */
4207 low = 0, high = arg1;
4208 break;
4209 case GE_EXPR: /* + [c, -] */
4210 in_p = ! in_p, low = arg1, high = 0;
4211 break;
4212 case LT_EXPR: /* - [c, -] */
4213 low = arg1, high = 0;
4214 break;
4215 case LE_EXPR: /* + [-, c] */
4216 in_p = ! in_p, low = 0, high = arg1;
4217 break;
4218 default:
4219 gcc_unreachable ();
4220 }
4221
4222 /* If this is an unsigned comparison, we also know that EXP is
4223 greater than or equal to zero. We base the range tests we make
4224 on that fact, so we record it here so we can parse existing
4225 range tests. We test arg0_type since often the return type
4226 of, e.g. EQ_EXPR, is boolean. */
4227 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4228 {
4229 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4230 in_p, low, high, 1,
4231 build_int_cst (arg0_type, 0),
4232 NULL_TREE))
4233 break;
4234
4235 in_p = n_in_p, low = n_low, high = n_high;
4236
4237 /* If the high bound is missing, but we have a nonzero low
4238 bound, reverse the range so it goes from zero to the low bound
4239 minus 1. */
4240 if (high == 0 && low && ! integer_zerop (low))
4241 {
4242 in_p = ! in_p;
4243 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4244 integer_one_node, 0);
4245 low = build_int_cst (arg0_type, 0);
4246 }
4247 }
4248
4249 exp = arg0;
4250 continue;
4251
4252 case NEGATE_EXPR:
4253 /* (-x) IN [a,b] -> x in [-b, -a] */
4254 n_low = range_binop (MINUS_EXPR, exp_type,
4255 build_int_cst (exp_type, 0),
4256 0, high, 1);
4257 n_high = range_binop (MINUS_EXPR, exp_type,
4258 build_int_cst (exp_type, 0),
4259 0, low, 0);
4260 low = n_low, high = n_high;
4261 exp = arg0;
4262 continue;
4263
4264 case BIT_NOT_EXPR:
4265 /* ~ X -> -X - 1 */
4266 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
4267 build_int_cst (exp_type, 1));
4268 continue;
4269
4270 case PLUS_EXPR: case MINUS_EXPR:
4271 if (TREE_CODE (arg1) != INTEGER_CST)
4272 break;
4273
4274 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4275 move a constant to the other side. */
4276 if (!TYPE_UNSIGNED (arg0_type)
4277 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4278 break;
4279
4280 /* If EXP is signed, any overflow in the computation is undefined,
4281 so we don't worry about it so long as our computations on
4282 the bounds don't overflow. For unsigned, overflow is defined
4283 and this is exactly the right thing. */
4284 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4285 arg0_type, low, 0, arg1, 0);
4286 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4287 arg0_type, high, 1, arg1, 0);
4288 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4289 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4290 break;
4291
4292 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4293 *strict_overflow_p = true;
4294
4295 /* Check for an unsigned range which has wrapped around the maximum
4296 value thus making n_high < n_low, and normalize it. */
4297 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4298 {
4299 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4300 integer_one_node, 0);
4301 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4302 integer_one_node, 0);
4303
4304 /* If the range is of the form +/- [ x+1, x ], we won't
4305 be able to normalize it. But then, it represents the
4306 whole range or the empty set, so make it
4307 +/- [ -, - ]. */
4308 if (tree_int_cst_equal (n_low, low)
4309 && tree_int_cst_equal (n_high, high))
4310 low = high = 0;
4311 else
4312 in_p = ! in_p;
4313 }
4314 else
4315 low = n_low, high = n_high;
4316
4317 exp = arg0;
4318 continue;
4319
4320 CASE_CONVERT: case NON_LVALUE_EXPR:
4321 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4322 break;
4323
4324 if (! INTEGRAL_TYPE_P (arg0_type)
4325 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4326 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4327 break;
4328
4329 n_low = low, n_high = high;
4330
4331 if (n_low != 0)
4332 n_low = fold_convert (arg0_type, n_low);
4333
4334 if (n_high != 0)
4335 n_high = fold_convert (arg0_type, n_high);
4336
4337
4338 /* If we're converting arg0 from an unsigned type, to exp,
4339 a signed type, we will be doing the comparison as unsigned.
4340 The tests above have already verified that LOW and HIGH
4341 are both positive.
4342
4343 So we have to ensure that we will handle large unsigned
4344 values the same way that the current signed bounds treat
4345 negative values. */
4346
4347 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4348 {
4349 tree high_positive;
4350 tree equiv_type;
4351 /* For fixed-point modes, we need to pass the saturating flag
4352 as the 2nd parameter. */
4353 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4354 equiv_type = lang_hooks.types.type_for_mode
4355 (TYPE_MODE (arg0_type),
4356 TYPE_SATURATING (arg0_type));
4357 else
4358 equiv_type = lang_hooks.types.type_for_mode
4359 (TYPE_MODE (arg0_type), 1);
4360
4361 /* A range without an upper bound is, naturally, unbounded.
4362 Since convert would have cropped a very large value, use
4363 the max value for the destination type. */
4364 high_positive
4365 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4366 : TYPE_MAX_VALUE (arg0_type);
4367
4368 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4369 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
4370 fold_convert (arg0_type,
4371 high_positive),
4372 build_int_cst (arg0_type, 1));
4373
4374 /* If the low bound is specified, "and" the range with the
4375 range for which the original unsigned value will be
4376 positive. */
4377 if (low != 0)
4378 {
4379 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4380 1, n_low, n_high, 1,
4381 fold_convert (arg0_type,
4382 integer_zero_node),
4383 high_positive))
4384 break;
4385
4386 in_p = (n_in_p == in_p);
4387 }
4388 else
4389 {
4390 /* Otherwise, "or" the range with the range of the input
4391 that will be interpreted as negative. */
4392 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4393 0, n_low, n_high, 1,
4394 fold_convert (arg0_type,
4395 integer_zero_node),
4396 high_positive))
4397 break;
4398
4399 in_p = (in_p != n_in_p);
4400 }
4401 }
4402
4403 exp = arg0;
4404 low = n_low, high = n_high;
4405 continue;
4406
4407 default:
4408 break;
4409 }
4410
4411 break;
4412 }
4413
4414 /* If EXP is a constant, we can evaluate whether this is true or false. */
4415 if (TREE_CODE (exp) == INTEGER_CST)
4416 {
4417 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4418 exp, 0, low, 0))
4419 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4420 exp, 1, high, 1)));
4421 low = high = 0;
4422 exp = 0;
4423 }
4424
4425 *pin_p = in_p, *plow = low, *phigh = high;
4426 return exp;
4427 }
4428 \f
4429 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4430 type, TYPE, return an expression to test if EXP is in (or out of, depending
4431 on IN_P) the range. Return 0 if the test couldn't be created. */
4432
4433 static tree
4434 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
4435 {
4436 tree etype = TREE_TYPE (exp);
4437 tree value;
4438
4439 #ifdef HAVE_canonicalize_funcptr_for_compare
4440 /* Disable this optimization for function pointer expressions
4441 on targets that require function pointer canonicalization. */
4442 if (HAVE_canonicalize_funcptr_for_compare
4443 && TREE_CODE (etype) == POINTER_TYPE
4444 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4445 return NULL_TREE;
4446 #endif
4447
4448 if (! in_p)
4449 {
4450 value = build_range_check (type, exp, 1, low, high);
4451 if (value != 0)
4452 return invert_truthvalue (value);
4453
4454 return 0;
4455 }
4456
4457 if (low == 0 && high == 0)
4458 return build_int_cst (type, 1);
4459
4460 if (low == 0)
4461 return fold_build2 (LE_EXPR, type, exp,
4462 fold_convert (etype, high));
4463
4464 if (high == 0)
4465 return fold_build2 (GE_EXPR, type, exp,
4466 fold_convert (etype, low));
4467
4468 if (operand_equal_p (low, high, 0))
4469 return fold_build2 (EQ_EXPR, type, exp,
4470 fold_convert (etype, low));
4471
4472 if (integer_zerop (low))
4473 {
4474 if (! TYPE_UNSIGNED (etype))
4475 {
4476 etype = unsigned_type_for (etype);
4477 high = fold_convert (etype, high);
4478 exp = fold_convert (etype, exp);
4479 }
4480 return build_range_check (type, exp, 1, 0, high);
4481 }
4482
4483 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4484 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4485 {
4486 unsigned HOST_WIDE_INT lo;
4487 HOST_WIDE_INT hi;
4488 int prec;
4489
4490 prec = TYPE_PRECISION (etype);
4491 if (prec <= HOST_BITS_PER_WIDE_INT)
4492 {
4493 hi = 0;
4494 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4495 }
4496 else
4497 {
4498 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4499 lo = (unsigned HOST_WIDE_INT) -1;
4500 }
4501
4502 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4503 {
4504 if (TYPE_UNSIGNED (etype))
4505 {
4506 etype = signed_type_for (etype);
4507 exp = fold_convert (etype, exp);
4508 }
4509 return fold_build2 (GT_EXPR, type, exp,
4510 build_int_cst (etype, 0));
4511 }
4512 }
4513
4514 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4515 This requires wrap-around arithmetics for the type of the expression. */
4516 switch (TREE_CODE (etype))
4517 {
4518 case INTEGER_TYPE:
4519 /* There is no requirement that LOW be within the range of ETYPE
4520 if the latter is a subtype. It must, however, be within the base
4521 type of ETYPE. So be sure we do the subtraction in that type. */
4522 if (TREE_TYPE (etype))
4523 etype = TREE_TYPE (etype);
4524 break;
4525
4526 case ENUMERAL_TYPE:
4527 case BOOLEAN_TYPE:
4528 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4529 TYPE_UNSIGNED (etype));
4530 break;
4531
4532 default:
4533 break;
4534 }
4535
4536 /* If we don't have wrap-around arithmetics upfront, try to force it. */
4537 if (TREE_CODE (etype) == INTEGER_TYPE
4538 && !TYPE_OVERFLOW_WRAPS (etype))
4539 {
4540 tree utype, minv, maxv;
4541
4542 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4543 for the type in question, as we rely on this here. */
4544 utype = unsigned_type_for (etype);
4545 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4546 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4547 integer_one_node, 1);
4548 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4549
4550 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4551 minv, 1, maxv, 1)))
4552 etype = utype;
4553 else
4554 return 0;
4555 }
4556
4557 high = fold_convert (etype, high);
4558 low = fold_convert (etype, low);
4559 exp = fold_convert (etype, exp);
4560
4561 value = const_binop (MINUS_EXPR, high, low, 0);
4562
4563
4564 if (POINTER_TYPE_P (etype))
4565 {
4566 if (value != 0 && !TREE_OVERFLOW (value))
4567 {
4568 low = fold_convert (sizetype, low);
4569 low = fold_build1 (NEGATE_EXPR, sizetype, low);
4570 return build_range_check (type,
4571 fold_build2 (POINTER_PLUS_EXPR, etype, exp, low),
4572 1, build_int_cst (etype, 0), value);
4573 }
4574 return 0;
4575 }
4576
4577 if (value != 0 && !TREE_OVERFLOW (value))
4578 return build_range_check (type,
4579 fold_build2 (MINUS_EXPR, etype, exp, low),
4580 1, build_int_cst (etype, 0), value);
4581
4582 return 0;
4583 }
4584 \f
4585 /* Return the predecessor of VAL in its type, handling the infinite case. */
4586
4587 static tree
4588 range_predecessor (tree val)
4589 {
4590 tree type = TREE_TYPE (val);
4591
4592 if (INTEGRAL_TYPE_P (type)
4593 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4594 return 0;
4595 else
4596 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4597 }
4598
4599 /* Return the successor of VAL in its type, handling the infinite case. */
4600
4601 static tree
4602 range_successor (tree val)
4603 {
4604 tree type = TREE_TYPE (val);
4605
4606 if (INTEGRAL_TYPE_P (type)
4607 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4608 return 0;
4609 else
4610 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4611 }
4612
4613 /* Given two ranges, see if we can merge them into one. Return 1 if we
4614 can, 0 if we can't. Set the output range into the specified parameters. */
4615
4616 static int
4617 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4618 tree high0, int in1_p, tree low1, tree high1)
4619 {
4620 int no_overlap;
4621 int subset;
4622 int temp;
4623 tree tem;
4624 int in_p;
4625 tree low, high;
4626 int lowequal = ((low0 == 0 && low1 == 0)
4627 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4628 low0, 0, low1, 0)));
4629 int highequal = ((high0 == 0 && high1 == 0)
4630 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4631 high0, 1, high1, 1)));
4632
4633 /* Make range 0 be the range that starts first, or ends last if they
4634 start at the same value. Swap them if it isn't. */
4635 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4636 low0, 0, low1, 0))
4637 || (lowequal
4638 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4639 high1, 1, high0, 1))))
4640 {
4641 temp = in0_p, in0_p = in1_p, in1_p = temp;
4642 tem = low0, low0 = low1, low1 = tem;
4643 tem = high0, high0 = high1, high1 = tem;
4644 }
4645
4646 /* Now flag two cases, whether the ranges are disjoint or whether the
4647 second range is totally subsumed in the first. Note that the tests
4648 below are simplified by the ones above. */
4649 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4650 high0, 1, low1, 0));
4651 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4652 high1, 1, high0, 1));
4653
4654 /* We now have four cases, depending on whether we are including or
4655 excluding the two ranges. */
4656 if (in0_p && in1_p)
4657 {
4658 /* If they don't overlap, the result is false. If the second range
4659 is a subset it is the result. Otherwise, the range is from the start
4660 of the second to the end of the first. */
4661 if (no_overlap)
4662 in_p = 0, low = high = 0;
4663 else if (subset)
4664 in_p = 1, low = low1, high = high1;
4665 else
4666 in_p = 1, low = low1, high = high0;
4667 }
4668
4669 else if (in0_p && ! in1_p)
4670 {
4671 /* If they don't overlap, the result is the first range. If they are
4672 equal, the result is false. If the second range is a subset of the
4673 first, and the ranges begin at the same place, we go from just after
4674 the end of the second range to the end of the first. If the second
4675 range is not a subset of the first, or if it is a subset and both
4676 ranges end at the same place, the range starts at the start of the
4677 first range and ends just before the second range.
4678 Otherwise, we can't describe this as a single range. */
4679 if (no_overlap)
4680 in_p = 1, low = low0, high = high0;
4681 else if (lowequal && highequal)
4682 in_p = 0, low = high = 0;
4683 else if (subset && lowequal)
4684 {
4685 low = range_successor (high1);
4686 high = high0;
4687 in_p = 1;
4688 if (low == 0)
4689 {
4690 /* We are in the weird situation where high0 > high1 but
4691 high1 has no successor. Punt. */
4692 return 0;
4693 }
4694 }
4695 else if (! subset || highequal)
4696 {
4697 low = low0;
4698 high = range_predecessor (low1);
4699 in_p = 1;
4700 if (high == 0)
4701 {
4702 /* low0 < low1 but low1 has no predecessor. Punt. */
4703 return 0;
4704 }
4705 }
4706 else
4707 return 0;
4708 }
4709
4710 else if (! in0_p && in1_p)
4711 {
4712 /* If they don't overlap, the result is the second range. If the second
4713 is a subset of the first, the result is false. Otherwise,
4714 the range starts just after the first range and ends at the
4715 end of the second. */
4716 if (no_overlap)
4717 in_p = 1, low = low1, high = high1;
4718 else if (subset || highequal)
4719 in_p = 0, low = high = 0;
4720 else
4721 {
4722 low = range_successor (high0);
4723 high = high1;
4724 in_p = 1;
4725 if (low == 0)
4726 {
4727 /* high1 > high0 but high0 has no successor. Punt. */
4728 return 0;
4729 }
4730 }
4731 }
4732
4733 else
4734 {
4735 /* The case where we are excluding both ranges. Here the complex case
4736 is if they don't overlap. In that case, the only time we have a
4737 range is if they are adjacent. If the second is a subset of the
4738 first, the result is the first. Otherwise, the range to exclude
4739 starts at the beginning of the first range and ends at the end of the
4740 second. */
4741 if (no_overlap)
4742 {
4743 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4744 range_successor (high0),
4745 1, low1, 0)))
4746 in_p = 0, low = low0, high = high1;
4747 else
4748 {
4749 /* Canonicalize - [min, x] into - [-, x]. */
4750 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4751 switch (TREE_CODE (TREE_TYPE (low0)))
4752 {
4753 case ENUMERAL_TYPE:
4754 if (TYPE_PRECISION (TREE_TYPE (low0))
4755 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4756 break;
4757 /* FALLTHROUGH */
4758 case INTEGER_TYPE:
4759 if (tree_int_cst_equal (low0,
4760 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4761 low0 = 0;
4762 break;
4763 case POINTER_TYPE:
4764 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4765 && integer_zerop (low0))
4766 low0 = 0;
4767 break;
4768 default:
4769 break;
4770 }
4771
4772 /* Canonicalize - [x, max] into - [x, -]. */
4773 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4774 switch (TREE_CODE (TREE_TYPE (high1)))
4775 {
4776 case ENUMERAL_TYPE:
4777 if (TYPE_PRECISION (TREE_TYPE (high1))
4778 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4779 break;
4780 /* FALLTHROUGH */
4781 case INTEGER_TYPE:
4782 if (tree_int_cst_equal (high1,
4783 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4784 high1 = 0;
4785 break;
4786 case POINTER_TYPE:
4787 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4788 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4789 high1, 1,
4790 integer_one_node, 1)))
4791 high1 = 0;
4792 break;
4793 default:
4794 break;
4795 }
4796
4797 /* The ranges might be also adjacent between the maximum and
4798 minimum values of the given type. For
4799 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4800 return + [x + 1, y - 1]. */
4801 if (low0 == 0 && high1 == 0)
4802 {
4803 low = range_successor (high0);
4804 high = range_predecessor (low1);
4805 if (low == 0 || high == 0)
4806 return 0;
4807
4808 in_p = 1;
4809 }
4810 else
4811 return 0;
4812 }
4813 }
4814 else if (subset)
4815 in_p = 0, low = low0, high = high0;
4816 else
4817 in_p = 0, low = low0, high = high1;
4818 }
4819
4820 *pin_p = in_p, *plow = low, *phigh = high;
4821 return 1;
4822 }
4823 \f
4824
4825 /* Subroutine of fold, looking inside expressions of the form
4826 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4827 of the COND_EXPR. This function is being used also to optimize
4828 A op B ? C : A, by reversing the comparison first.
4829
4830 Return a folded expression whose code is not a COND_EXPR
4831 anymore, or NULL_TREE if no folding opportunity is found. */
4832
4833 static tree
4834 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4835 {
4836 enum tree_code comp_code = TREE_CODE (arg0);
4837 tree arg00 = TREE_OPERAND (arg0, 0);
4838 tree arg01 = TREE_OPERAND (arg0, 1);
4839 tree arg1_type = TREE_TYPE (arg1);
4840 tree tem;
4841
4842 STRIP_NOPS (arg1);
4843 STRIP_NOPS (arg2);
4844
4845 /* If we have A op 0 ? A : -A, consider applying the following
4846 transformations:
4847
4848 A == 0? A : -A same as -A
4849 A != 0? A : -A same as A
4850 A >= 0? A : -A same as abs (A)
4851 A > 0? A : -A same as abs (A)
4852 A <= 0? A : -A same as -abs (A)
4853 A < 0? A : -A same as -abs (A)
4854
4855 None of these transformations work for modes with signed
4856 zeros. If A is +/-0, the first two transformations will
4857 change the sign of the result (from +0 to -0, or vice
4858 versa). The last four will fix the sign of the result,
4859 even though the original expressions could be positive or
4860 negative, depending on the sign of A.
4861
4862 Note that all these transformations are correct if A is
4863 NaN, since the two alternatives (A and -A) are also NaNs. */
4864 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4865 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4866 ? real_zerop (arg01)
4867 : integer_zerop (arg01))
4868 && ((TREE_CODE (arg2) == NEGATE_EXPR
4869 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4870 /* In the case that A is of the form X-Y, '-A' (arg2) may
4871 have already been folded to Y-X, check for that. */
4872 || (TREE_CODE (arg1) == MINUS_EXPR
4873 && TREE_CODE (arg2) == MINUS_EXPR
4874 && operand_equal_p (TREE_OPERAND (arg1, 0),
4875 TREE_OPERAND (arg2, 1), 0)
4876 && operand_equal_p (TREE_OPERAND (arg1, 1),
4877 TREE_OPERAND (arg2, 0), 0))))
4878 switch (comp_code)
4879 {
4880 case EQ_EXPR:
4881 case UNEQ_EXPR:
4882 tem = fold_convert (arg1_type, arg1);
4883 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4884 case NE_EXPR:
4885 case LTGT_EXPR:
4886 return pedantic_non_lvalue (fold_convert (type, arg1));
4887 case UNGE_EXPR:
4888 case UNGT_EXPR:
4889 if (flag_trapping_math)
4890 break;
4891 /* Fall through. */
4892 case GE_EXPR:
4893 case GT_EXPR:
4894 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4895 arg1 = fold_convert (signed_type_for
4896 (TREE_TYPE (arg1)), arg1);
4897 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4898 return pedantic_non_lvalue (fold_convert (type, tem));
4899 case UNLE_EXPR:
4900 case UNLT_EXPR:
4901 if (flag_trapping_math)
4902 break;
4903 case LE_EXPR:
4904 case LT_EXPR:
4905 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4906 arg1 = fold_convert (signed_type_for
4907 (TREE_TYPE (arg1)), arg1);
4908 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4909 return negate_expr (fold_convert (type, tem));
4910 default:
4911 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4912 break;
4913 }
4914
4915 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4916 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4917 both transformations are correct when A is NaN: A != 0
4918 is then true, and A == 0 is false. */
4919
4920 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4921 && integer_zerop (arg01) && integer_zerop (arg2))
4922 {
4923 if (comp_code == NE_EXPR)
4924 return pedantic_non_lvalue (fold_convert (type, arg1));
4925 else if (comp_code == EQ_EXPR)
4926 return build_int_cst (type, 0);
4927 }
4928
4929 /* Try some transformations of A op B ? A : B.
4930
4931 A == B? A : B same as B
4932 A != B? A : B same as A
4933 A >= B? A : B same as max (A, B)
4934 A > B? A : B same as max (B, A)
4935 A <= B? A : B same as min (A, B)
4936 A < B? A : B same as min (B, A)
4937
4938 As above, these transformations don't work in the presence
4939 of signed zeros. For example, if A and B are zeros of
4940 opposite sign, the first two transformations will change
4941 the sign of the result. In the last four, the original
4942 expressions give different results for (A=+0, B=-0) and
4943 (A=-0, B=+0), but the transformed expressions do not.
4944
4945 The first two transformations are correct if either A or B
4946 is a NaN. In the first transformation, the condition will
4947 be false, and B will indeed be chosen. In the case of the
4948 second transformation, the condition A != B will be true,
4949 and A will be chosen.
4950
4951 The conversions to max() and min() are not correct if B is
4952 a number and A is not. The conditions in the original
4953 expressions will be false, so all four give B. The min()
4954 and max() versions would give a NaN instead. */
4955 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4956 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4957 /* Avoid these transformations if the COND_EXPR may be used
4958 as an lvalue in the C++ front-end. PR c++/19199. */
4959 && (in_gimple_form
4960 || (strcmp (lang_hooks.name, "GNU C++") != 0
4961 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4962 || ! maybe_lvalue_p (arg1)
4963 || ! maybe_lvalue_p (arg2)))
4964 {
4965 tree comp_op0 = arg00;
4966 tree comp_op1 = arg01;
4967 tree comp_type = TREE_TYPE (comp_op0);
4968
4969 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4970 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4971 {
4972 comp_type = type;
4973 comp_op0 = arg1;
4974 comp_op1 = arg2;
4975 }
4976
4977 switch (comp_code)
4978 {
4979 case EQ_EXPR:
4980 return pedantic_non_lvalue (fold_convert (type, arg2));
4981 case NE_EXPR:
4982 return pedantic_non_lvalue (fold_convert (type, arg1));
4983 case LE_EXPR:
4984 case LT_EXPR:
4985 case UNLE_EXPR:
4986 case UNLT_EXPR:
4987 /* In C++ a ?: expression can be an lvalue, so put the
4988 operand which will be used if they are equal first
4989 so that we can convert this back to the
4990 corresponding COND_EXPR. */
4991 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4992 {
4993 comp_op0 = fold_convert (comp_type, comp_op0);
4994 comp_op1 = fold_convert (comp_type, comp_op1);
4995 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4996 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4997 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4998 return pedantic_non_lvalue (fold_convert (type, tem));
4999 }
5000 break;
5001 case GE_EXPR:
5002 case GT_EXPR:
5003 case UNGE_EXPR:
5004 case UNGT_EXPR:
5005 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5006 {
5007 comp_op0 = fold_convert (comp_type, comp_op0);
5008 comp_op1 = fold_convert (comp_type, comp_op1);
5009 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5010 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
5011 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
5012 return pedantic_non_lvalue (fold_convert (type, tem));
5013 }
5014 break;
5015 case UNEQ_EXPR:
5016 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5017 return pedantic_non_lvalue (fold_convert (type, arg2));
5018 break;
5019 case LTGT_EXPR:
5020 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5021 return pedantic_non_lvalue (fold_convert (type, arg1));
5022 break;
5023 default:
5024 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5025 break;
5026 }
5027 }
5028
5029 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5030 we might still be able to simplify this. For example,
5031 if C1 is one less or one more than C2, this might have started
5032 out as a MIN or MAX and been transformed by this function.
5033 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5034
5035 if (INTEGRAL_TYPE_P (type)
5036 && TREE_CODE (arg01) == INTEGER_CST
5037 && TREE_CODE (arg2) == INTEGER_CST)
5038 switch (comp_code)
5039 {
5040 case EQ_EXPR:
5041 /* We can replace A with C1 in this case. */
5042 arg1 = fold_convert (type, arg01);
5043 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
5044
5045 case LT_EXPR:
5046 /* If C1 is C2 + 1, this is min(A, C2). */
5047 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5048 OEP_ONLY_CONST)
5049 && operand_equal_p (arg01,
5050 const_binop (PLUS_EXPR, arg2,
5051 build_int_cst (type, 1), 0),
5052 OEP_ONLY_CONST))
5053 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
5054 type,
5055 fold_convert (type, arg1),
5056 arg2));
5057 break;
5058
5059 case LE_EXPR:
5060 /* If C1 is C2 - 1, this is min(A, C2). */
5061 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5062 OEP_ONLY_CONST)
5063 && operand_equal_p (arg01,
5064 const_binop (MINUS_EXPR, arg2,
5065 build_int_cst (type, 1), 0),
5066 OEP_ONLY_CONST))
5067 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
5068 type,
5069 fold_convert (type, arg1),
5070 arg2));
5071 break;
5072
5073 case GT_EXPR:
5074 /* If C1 is C2 - 1, this is max(A, C2). */
5075 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5076 OEP_ONLY_CONST)
5077 && operand_equal_p (arg01,
5078 const_binop (MINUS_EXPR, arg2,
5079 build_int_cst (type, 1), 0),
5080 OEP_ONLY_CONST))
5081 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
5082 type,
5083 fold_convert (type, arg1),
5084 arg2));
5085 break;
5086
5087 case GE_EXPR:
5088 /* If C1 is C2 + 1, this is max(A, C2). */
5089 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5090 OEP_ONLY_CONST)
5091 && operand_equal_p (arg01,
5092 const_binop (PLUS_EXPR, arg2,
5093 build_int_cst (type, 1), 0),
5094 OEP_ONLY_CONST))
5095 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
5096 type,
5097 fold_convert (type, arg1),
5098 arg2));
5099 break;
5100 case NE_EXPR:
5101 break;
5102 default:
5103 gcc_unreachable ();
5104 }
5105
5106 return NULL_TREE;
5107 }
5108
5109
5110 \f
5111 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5112 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
5113 #endif
5114
5115 /* EXP is some logical combination of boolean tests. See if we can
5116 merge it into some range test. Return the new tree if so. */
5117
5118 static tree
5119 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
5120 {
5121 int or_op = (code == TRUTH_ORIF_EXPR
5122 || code == TRUTH_OR_EXPR);
5123 int in0_p, in1_p, in_p;
5124 tree low0, low1, low, high0, high1, high;
5125 bool strict_overflow_p = false;
5126 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5127 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5128 tree tem;
5129 const char * const warnmsg = G_("assuming signed overflow does not occur "
5130 "when simplifying range test");
5131
5132 /* If this is an OR operation, invert both sides; we will invert
5133 again at the end. */
5134 if (or_op)
5135 in0_p = ! in0_p, in1_p = ! in1_p;
5136
5137 /* If both expressions are the same, if we can merge the ranges, and we
5138 can build the range test, return it or it inverted. If one of the
5139 ranges is always true or always false, consider it to be the same
5140 expression as the other. */
5141 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5142 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5143 in1_p, low1, high1)
5144 && 0 != (tem = (build_range_check (type,
5145 lhs != 0 ? lhs
5146 : rhs != 0 ? rhs : integer_zero_node,
5147 in_p, low, high))))
5148 {
5149 if (strict_overflow_p)
5150 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5151 return or_op ? invert_truthvalue (tem) : tem;
5152 }
5153
5154 /* On machines where the branch cost is expensive, if this is a
5155 short-circuited branch and the underlying object on both sides
5156 is the same, make a non-short-circuit operation. */
5157 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5158 && lhs != 0 && rhs != 0
5159 && (code == TRUTH_ANDIF_EXPR
5160 || code == TRUTH_ORIF_EXPR)
5161 && operand_equal_p (lhs, rhs, 0))
5162 {
5163 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5164 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5165 which cases we can't do this. */
5166 if (simple_operand_p (lhs))
5167 return build2 (code == TRUTH_ANDIF_EXPR
5168 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5169 type, op0, op1);
5170
5171 else if (lang_hooks.decls.global_bindings_p () == 0
5172 && ! CONTAINS_PLACEHOLDER_P (lhs))
5173 {
5174 tree common = save_expr (lhs);
5175
5176 if (0 != (lhs = build_range_check (type, common,
5177 or_op ? ! in0_p : in0_p,
5178 low0, high0))
5179 && (0 != (rhs = build_range_check (type, common,
5180 or_op ? ! in1_p : in1_p,
5181 low1, high1))))
5182 {
5183 if (strict_overflow_p)
5184 fold_overflow_warning (warnmsg,
5185 WARN_STRICT_OVERFLOW_COMPARISON);
5186 return build2 (code == TRUTH_ANDIF_EXPR
5187 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5188 type, lhs, rhs);
5189 }
5190 }
5191 }
5192
5193 return 0;
5194 }
5195 \f
5196 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
5197 bit value. Arrange things so the extra bits will be set to zero if and
5198 only if C is signed-extended to its full width. If MASK is nonzero,
5199 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5200
5201 static tree
5202 unextend (tree c, int p, int unsignedp, tree mask)
5203 {
5204 tree type = TREE_TYPE (c);
5205 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5206 tree temp;
5207
5208 if (p == modesize || unsignedp)
5209 return c;
5210
5211 /* We work by getting just the sign bit into the low-order bit, then
5212 into the high-order bit, then sign-extend. We then XOR that value
5213 with C. */
5214 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
5215 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
5216
5217 /* We must use a signed type in order to get an arithmetic right shift.
5218 However, we must also avoid introducing accidental overflows, so that
5219 a subsequent call to integer_zerop will work. Hence we must
5220 do the type conversion here. At this point, the constant is either
5221 zero or one, and the conversion to a signed type can never overflow.
5222 We could get an overflow if this conversion is done anywhere else. */
5223 if (TYPE_UNSIGNED (type))
5224 temp = fold_convert (signed_type_for (type), temp);
5225
5226 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
5227 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
5228 if (mask != 0)
5229 temp = const_binop (BIT_AND_EXPR, temp,
5230 fold_convert (TREE_TYPE (c), mask), 0);
5231 /* If necessary, convert the type back to match the type of C. */
5232 if (TYPE_UNSIGNED (type))
5233 temp = fold_convert (type, temp);
5234
5235 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
5236 }
5237 \f
5238 /* Find ways of folding logical expressions of LHS and RHS:
5239 Try to merge two comparisons to the same innermost item.
5240 Look for range tests like "ch >= '0' && ch <= '9'".
5241 Look for combinations of simple terms on machines with expensive branches
5242 and evaluate the RHS unconditionally.
5243
5244 For example, if we have p->a == 2 && p->b == 4 and we can make an
5245 object large enough to span both A and B, we can do this with a comparison
5246 against the object ANDed with the a mask.
5247
5248 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5249 operations to do this with one comparison.
5250
5251 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5252 function and the one above.
5253
5254 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5255 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5256
5257 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5258 two operands.
5259
5260 We return the simplified tree or 0 if no optimization is possible. */
5261
5262 static tree
5263 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
5264 {
5265 /* If this is the "or" of two comparisons, we can do something if
5266 the comparisons are NE_EXPR. If this is the "and", we can do something
5267 if the comparisons are EQ_EXPR. I.e.,
5268 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5269
5270 WANTED_CODE is this operation code. For single bit fields, we can
5271 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5272 comparison for one-bit fields. */
5273
5274 enum tree_code wanted_code;
5275 enum tree_code lcode, rcode;
5276 tree ll_arg, lr_arg, rl_arg, rr_arg;
5277 tree ll_inner, lr_inner, rl_inner, rr_inner;
5278 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5279 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5280 HOST_WIDE_INT xll_bitpos, xrl_bitpos;
5281 HOST_WIDE_INT lnbitsize, lnbitpos;
5282 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5283 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5284 enum machine_mode lnmode;
5285 tree ll_mask, lr_mask, rl_mask, rr_mask;
5286 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5287 tree l_const, r_const;
5288 tree lntype, result;
5289 int first_bit, end_bit;
5290 int volatilep;
5291 tree orig_lhs = lhs, orig_rhs = rhs;
5292 enum tree_code orig_code = code;
5293
5294 /* Start by getting the comparison codes. Fail if anything is volatile.
5295 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5296 it were surrounded with a NE_EXPR. */
5297
5298 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5299 return 0;
5300
5301 lcode = TREE_CODE (lhs);
5302 rcode = TREE_CODE (rhs);
5303
5304 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5305 {
5306 lhs = build2 (NE_EXPR, truth_type, lhs,
5307 build_int_cst (TREE_TYPE (lhs), 0));
5308 lcode = NE_EXPR;
5309 }
5310
5311 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5312 {
5313 rhs = build2 (NE_EXPR, truth_type, rhs,
5314 build_int_cst (TREE_TYPE (rhs), 0));
5315 rcode = NE_EXPR;
5316 }
5317
5318 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5319 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5320 return 0;
5321
5322 ll_arg = TREE_OPERAND (lhs, 0);
5323 lr_arg = TREE_OPERAND (lhs, 1);
5324 rl_arg = TREE_OPERAND (rhs, 0);
5325 rr_arg = TREE_OPERAND (rhs, 1);
5326
5327 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5328 if (simple_operand_p (ll_arg)
5329 && simple_operand_p (lr_arg))
5330 {
5331 tree result;
5332 if (operand_equal_p (ll_arg, rl_arg, 0)
5333 && operand_equal_p (lr_arg, rr_arg, 0))
5334 {
5335 result = combine_comparisons (code, lcode, rcode,
5336 truth_type, ll_arg, lr_arg);
5337 if (result)
5338 return result;
5339 }
5340 else if (operand_equal_p (ll_arg, rr_arg, 0)
5341 && operand_equal_p (lr_arg, rl_arg, 0))
5342 {
5343 result = combine_comparisons (code, lcode,
5344 swap_tree_comparison (rcode),
5345 truth_type, ll_arg, lr_arg);
5346 if (result)
5347 return result;
5348 }
5349 }
5350
5351 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5352 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5353
5354 /* If the RHS can be evaluated unconditionally and its operands are
5355 simple, it wins to evaluate the RHS unconditionally on machines
5356 with expensive branches. In this case, this isn't a comparison
5357 that can be merged. Avoid doing this if the RHS is a floating-point
5358 comparison since those can trap. */
5359
5360 if (BRANCH_COST >= 2
5361 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5362 && simple_operand_p (rl_arg)
5363 && simple_operand_p (rr_arg))
5364 {
5365 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5366 if (code == TRUTH_OR_EXPR
5367 && lcode == NE_EXPR && integer_zerop (lr_arg)
5368 && rcode == NE_EXPR && integer_zerop (rr_arg)
5369 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5370 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5371 return build2 (NE_EXPR, truth_type,
5372 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5373 ll_arg, rl_arg),
5374 build_int_cst (TREE_TYPE (ll_arg), 0));
5375
5376 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5377 if (code == TRUTH_AND_EXPR
5378 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5379 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5380 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5381 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5382 return build2 (EQ_EXPR, truth_type,
5383 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5384 ll_arg, rl_arg),
5385 build_int_cst (TREE_TYPE (ll_arg), 0));
5386
5387 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5388 {
5389 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5390 return build2 (code, truth_type, lhs, rhs);
5391 return NULL_TREE;
5392 }
5393 }
5394
5395 /* See if the comparisons can be merged. Then get all the parameters for
5396 each side. */
5397
5398 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5399 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5400 return 0;
5401
5402 volatilep = 0;
5403 ll_inner = decode_field_reference (ll_arg,
5404 &ll_bitsize, &ll_bitpos, &ll_mode,
5405 &ll_unsignedp, &volatilep, &ll_mask,
5406 &ll_and_mask);
5407 lr_inner = decode_field_reference (lr_arg,
5408 &lr_bitsize, &lr_bitpos, &lr_mode,
5409 &lr_unsignedp, &volatilep, &lr_mask,
5410 &lr_and_mask);
5411 rl_inner = decode_field_reference (rl_arg,
5412 &rl_bitsize, &rl_bitpos, &rl_mode,
5413 &rl_unsignedp, &volatilep, &rl_mask,
5414 &rl_and_mask);
5415 rr_inner = decode_field_reference (rr_arg,
5416 &rr_bitsize, &rr_bitpos, &rr_mode,
5417 &rr_unsignedp, &volatilep, &rr_mask,
5418 &rr_and_mask);
5419
5420 /* It must be true that the inner operation on the lhs of each
5421 comparison must be the same if we are to be able to do anything.
5422 Then see if we have constants. If not, the same must be true for
5423 the rhs's. */
5424 if (volatilep || ll_inner == 0 || rl_inner == 0
5425 || ! operand_equal_p (ll_inner, rl_inner, 0))
5426 return 0;
5427
5428 if (TREE_CODE (lr_arg) == INTEGER_CST
5429 && TREE_CODE (rr_arg) == INTEGER_CST)
5430 l_const = lr_arg, r_const = rr_arg;
5431 else if (lr_inner == 0 || rr_inner == 0
5432 || ! operand_equal_p (lr_inner, rr_inner, 0))
5433 return 0;
5434 else
5435 l_const = r_const = 0;
5436
5437 /* If either comparison code is not correct for our logical operation,
5438 fail. However, we can convert a one-bit comparison against zero into
5439 the opposite comparison against that bit being set in the field. */
5440
5441 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5442 if (lcode != wanted_code)
5443 {
5444 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5445 {
5446 /* Make the left operand unsigned, since we are only interested
5447 in the value of one bit. Otherwise we are doing the wrong
5448 thing below. */
5449 ll_unsignedp = 1;
5450 l_const = ll_mask;
5451 }
5452 else
5453 return 0;
5454 }
5455
5456 /* This is analogous to the code for l_const above. */
5457 if (rcode != wanted_code)
5458 {
5459 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5460 {
5461 rl_unsignedp = 1;
5462 r_const = rl_mask;
5463 }
5464 else
5465 return 0;
5466 }
5467
5468 /* See if we can find a mode that contains both fields being compared on
5469 the left. If we can't, fail. Otherwise, update all constants and masks
5470 to be relative to a field of that size. */
5471 first_bit = MIN (ll_bitpos, rl_bitpos);
5472 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5473 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5474 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5475 volatilep);
5476 if (lnmode == VOIDmode)
5477 return 0;
5478
5479 lnbitsize = GET_MODE_BITSIZE (lnmode);
5480 lnbitpos = first_bit & ~ (lnbitsize - 1);
5481 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5482 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5483
5484 if (BYTES_BIG_ENDIAN)
5485 {
5486 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5487 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5488 }
5489
5490 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
5491 size_int (xll_bitpos), 0);
5492 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
5493 size_int (xrl_bitpos), 0);
5494
5495 if (l_const)
5496 {
5497 l_const = fold_convert (lntype, l_const);
5498 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5499 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
5500 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5501 fold_build1 (BIT_NOT_EXPR,
5502 lntype, ll_mask),
5503 0)))
5504 {
5505 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5506
5507 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5508 }
5509 }
5510 if (r_const)
5511 {
5512 r_const = fold_convert (lntype, r_const);
5513 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5514 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
5515 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5516 fold_build1 (BIT_NOT_EXPR,
5517 lntype, rl_mask),
5518 0)))
5519 {
5520 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5521
5522 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5523 }
5524 }
5525
5526 /* Handle the case of comparisons with constants. If there is something in
5527 common between the masks, those bits of the constants must be the same.
5528 If not, the condition is always false. Test for this to avoid generating
5529 incorrect code below. */
5530 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5531 if (! integer_zerop (result)
5532 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5533 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5534 {
5535 if (wanted_code == NE_EXPR)
5536 {
5537 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5538 return constant_boolean_node (true, truth_type);
5539 }
5540 else
5541 {
5542 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5543 return constant_boolean_node (false, truth_type);
5544 }
5545 }
5546
5547 return NULL_TREE;
5548 }
5549 \f
5550 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5551 constant. */
5552
5553 static tree
5554 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5555 {
5556 tree arg0 = op0;
5557 enum tree_code op_code;
5558 tree comp_const;
5559 tree minmax_const;
5560 int consts_equal, consts_lt;
5561 tree inner;
5562
5563 STRIP_SIGN_NOPS (arg0);
5564
5565 op_code = TREE_CODE (arg0);
5566 minmax_const = TREE_OPERAND (arg0, 1);
5567 comp_const = fold_convert (TREE_TYPE (arg0), op1);
5568 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5569 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5570 inner = TREE_OPERAND (arg0, 0);
5571
5572 /* If something does not permit us to optimize, return the original tree. */
5573 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5574 || TREE_CODE (comp_const) != INTEGER_CST
5575 || TREE_OVERFLOW (comp_const)
5576 || TREE_CODE (minmax_const) != INTEGER_CST
5577 || TREE_OVERFLOW (minmax_const))
5578 return NULL_TREE;
5579
5580 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5581 and GT_EXPR, doing the rest with recursive calls using logical
5582 simplifications. */
5583 switch (code)
5584 {
5585 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5586 {
5587 tree tem = optimize_minmax_comparison (invert_tree_comparison (code, false),
5588 type, op0, op1);
5589 if (tem)
5590 return invert_truthvalue (tem);
5591 return NULL_TREE;
5592 }
5593
5594 case GE_EXPR:
5595 return
5596 fold_build2 (TRUTH_ORIF_EXPR, type,
5597 optimize_minmax_comparison
5598 (EQ_EXPR, type, arg0, comp_const),
5599 optimize_minmax_comparison
5600 (GT_EXPR, type, arg0, comp_const));
5601
5602 case EQ_EXPR:
5603 if (op_code == MAX_EXPR && consts_equal)
5604 /* MAX (X, 0) == 0 -> X <= 0 */
5605 return fold_build2 (LE_EXPR, type, inner, comp_const);
5606
5607 else if (op_code == MAX_EXPR && consts_lt)
5608 /* MAX (X, 0) == 5 -> X == 5 */
5609 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5610
5611 else if (op_code == MAX_EXPR)
5612 /* MAX (X, 0) == -1 -> false */
5613 return omit_one_operand (type, integer_zero_node, inner);
5614
5615 else if (consts_equal)
5616 /* MIN (X, 0) == 0 -> X >= 0 */
5617 return fold_build2 (GE_EXPR, type, inner, comp_const);
5618
5619 else if (consts_lt)
5620 /* MIN (X, 0) == 5 -> false */
5621 return omit_one_operand (type, integer_zero_node, inner);
5622
5623 else
5624 /* MIN (X, 0) == -1 -> X == -1 */
5625 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5626
5627 case GT_EXPR:
5628 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5629 /* MAX (X, 0) > 0 -> X > 0
5630 MAX (X, 0) > 5 -> X > 5 */
5631 return fold_build2 (GT_EXPR, type, inner, comp_const);
5632
5633 else if (op_code == MAX_EXPR)
5634 /* MAX (X, 0) > -1 -> true */
5635 return omit_one_operand (type, integer_one_node, inner);
5636
5637 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5638 /* MIN (X, 0) > 0 -> false
5639 MIN (X, 0) > 5 -> false */
5640 return omit_one_operand (type, integer_zero_node, inner);
5641
5642 else
5643 /* MIN (X, 0) > -1 -> X > -1 */
5644 return fold_build2 (GT_EXPR, type, inner, comp_const);
5645
5646 default:
5647 return NULL_TREE;
5648 }
5649 }
5650 \f
5651 /* T is an integer expression that is being multiplied, divided, or taken a
5652 modulus (CODE says which and what kind of divide or modulus) by a
5653 constant C. See if we can eliminate that operation by folding it with
5654 other operations already in T. WIDE_TYPE, if non-null, is a type that
5655 should be used for the computation if wider than our type.
5656
5657 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5658 (X * 2) + (Y * 4). We must, however, be assured that either the original
5659 expression would not overflow or that overflow is undefined for the type
5660 in the language in question.
5661
5662 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5663 the machine has a multiply-accumulate insn or that this is part of an
5664 addressing calculation.
5665
5666 If we return a non-null expression, it is an equivalent form of the
5667 original computation, but need not be in the original type.
5668
5669 We set *STRICT_OVERFLOW_P to true if the return values depends on
5670 signed overflow being undefined. Otherwise we do not change
5671 *STRICT_OVERFLOW_P. */
5672
5673 static tree
5674 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5675 bool *strict_overflow_p)
5676 {
5677 /* To avoid exponential search depth, refuse to allow recursion past
5678 three levels. Beyond that (1) it's highly unlikely that we'll find
5679 something interesting and (2) we've probably processed it before
5680 when we built the inner expression. */
5681
5682 static int depth;
5683 tree ret;
5684
5685 if (depth > 3)
5686 return NULL;
5687
5688 depth++;
5689 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5690 depth--;
5691
5692 return ret;
5693 }
5694
5695 static tree
5696 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5697 bool *strict_overflow_p)
5698 {
5699 tree type = TREE_TYPE (t);
5700 enum tree_code tcode = TREE_CODE (t);
5701 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5702 > GET_MODE_SIZE (TYPE_MODE (type)))
5703 ? wide_type : type);
5704 tree t1, t2;
5705 int same_p = tcode == code;
5706 tree op0 = NULL_TREE, op1 = NULL_TREE;
5707 bool sub_strict_overflow_p;
5708
5709 /* Don't deal with constants of zero here; they confuse the code below. */
5710 if (integer_zerop (c))
5711 return NULL_TREE;
5712
5713 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5714 op0 = TREE_OPERAND (t, 0);
5715
5716 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5717 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5718
5719 /* Note that we need not handle conditional operations here since fold
5720 already handles those cases. So just do arithmetic here. */
5721 switch (tcode)
5722 {
5723 case INTEGER_CST:
5724 /* For a constant, we can always simplify if we are a multiply
5725 or (for divide and modulus) if it is a multiple of our constant. */
5726 if (code == MULT_EXPR
5727 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5728 return const_binop (code, fold_convert (ctype, t),
5729 fold_convert (ctype, c), 0);
5730 break;
5731
5732 CASE_CONVERT: case NON_LVALUE_EXPR:
5733 /* If op0 is an expression ... */
5734 if ((COMPARISON_CLASS_P (op0)
5735 || UNARY_CLASS_P (op0)
5736 || BINARY_CLASS_P (op0)
5737 || VL_EXP_CLASS_P (op0)
5738 || EXPRESSION_CLASS_P (op0))
5739 /* ... and has wrapping overflow, and its type is smaller
5740 than ctype, then we cannot pass through as widening. */
5741 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5742 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5743 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5744 && (TYPE_PRECISION (ctype)
5745 > TYPE_PRECISION (TREE_TYPE (op0))))
5746 /* ... or this is a truncation (t is narrower than op0),
5747 then we cannot pass through this narrowing. */
5748 || (TYPE_PRECISION (type)
5749 < TYPE_PRECISION (TREE_TYPE (op0)))
5750 /* ... or signedness changes for division or modulus,
5751 then we cannot pass through this conversion. */
5752 || (code != MULT_EXPR
5753 && (TYPE_UNSIGNED (ctype)
5754 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5755 /* ... or has undefined overflow while the converted to
5756 type has not, we cannot do the operation in the inner type
5757 as that would introduce undefined overflow. */
5758 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5759 && !TYPE_OVERFLOW_UNDEFINED (type))))
5760 break;
5761
5762 /* Pass the constant down and see if we can make a simplification. If
5763 we can, replace this expression with the inner simplification for
5764 possible later conversion to our or some other type. */
5765 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5766 && TREE_CODE (t2) == INTEGER_CST
5767 && !TREE_OVERFLOW (t2)
5768 && (0 != (t1 = extract_muldiv (op0, t2, code,
5769 code == MULT_EXPR
5770 ? ctype : NULL_TREE,
5771 strict_overflow_p))))
5772 return t1;
5773 break;
5774
5775 case ABS_EXPR:
5776 /* If widening the type changes it from signed to unsigned, then we
5777 must avoid building ABS_EXPR itself as unsigned. */
5778 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5779 {
5780 tree cstype = (*signed_type_for) (ctype);
5781 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5782 != 0)
5783 {
5784 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5785 return fold_convert (ctype, t1);
5786 }
5787 break;
5788 }
5789 /* If the constant is negative, we cannot simplify this. */
5790 if (tree_int_cst_sgn (c) == -1)
5791 break;
5792 /* FALLTHROUGH */
5793 case NEGATE_EXPR:
5794 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5795 != 0)
5796 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5797 break;
5798
5799 case MIN_EXPR: case MAX_EXPR:
5800 /* If widening the type changes the signedness, then we can't perform
5801 this optimization as that changes the result. */
5802 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5803 break;
5804
5805 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5806 sub_strict_overflow_p = false;
5807 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5808 &sub_strict_overflow_p)) != 0
5809 && (t2 = extract_muldiv (op1, c, code, wide_type,
5810 &sub_strict_overflow_p)) != 0)
5811 {
5812 if (tree_int_cst_sgn (c) < 0)
5813 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5814 if (sub_strict_overflow_p)
5815 *strict_overflow_p = true;
5816 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5817 fold_convert (ctype, t2));
5818 }
5819 break;
5820
5821 case LSHIFT_EXPR: case RSHIFT_EXPR:
5822 /* If the second operand is constant, this is a multiplication
5823 or floor division, by a power of two, so we can treat it that
5824 way unless the multiplier or divisor overflows. Signed
5825 left-shift overflow is implementation-defined rather than
5826 undefined in C90, so do not convert signed left shift into
5827 multiplication. */
5828 if (TREE_CODE (op1) == INTEGER_CST
5829 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5830 /* const_binop may not detect overflow correctly,
5831 so check for it explicitly here. */
5832 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5833 && TREE_INT_CST_HIGH (op1) == 0
5834 && 0 != (t1 = fold_convert (ctype,
5835 const_binop (LSHIFT_EXPR,
5836 size_one_node,
5837 op1, 0)))
5838 && !TREE_OVERFLOW (t1))
5839 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5840 ? MULT_EXPR : FLOOR_DIV_EXPR,
5841 ctype, fold_convert (ctype, op0), t1),
5842 c, code, wide_type, strict_overflow_p);
5843 break;
5844
5845 case PLUS_EXPR: case MINUS_EXPR:
5846 /* See if we can eliminate the operation on both sides. If we can, we
5847 can return a new PLUS or MINUS. If we can't, the only remaining
5848 cases where we can do anything are if the second operand is a
5849 constant. */
5850 sub_strict_overflow_p = false;
5851 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5852 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5853 if (t1 != 0 && t2 != 0
5854 && (code == MULT_EXPR
5855 /* If not multiplication, we can only do this if both operands
5856 are divisible by c. */
5857 || (multiple_of_p (ctype, op0, c)
5858 && multiple_of_p (ctype, op1, c))))
5859 {
5860 if (sub_strict_overflow_p)
5861 *strict_overflow_p = true;
5862 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5863 fold_convert (ctype, t2));
5864 }
5865
5866 /* If this was a subtraction, negate OP1 and set it to be an addition.
5867 This simplifies the logic below. */
5868 if (tcode == MINUS_EXPR)
5869 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5870
5871 if (TREE_CODE (op1) != INTEGER_CST)
5872 break;
5873
5874 /* If either OP1 or C are negative, this optimization is not safe for
5875 some of the division and remainder types while for others we need
5876 to change the code. */
5877 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5878 {
5879 if (code == CEIL_DIV_EXPR)
5880 code = FLOOR_DIV_EXPR;
5881 else if (code == FLOOR_DIV_EXPR)
5882 code = CEIL_DIV_EXPR;
5883 else if (code != MULT_EXPR
5884 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5885 break;
5886 }
5887
5888 /* If it's a multiply or a division/modulus operation of a multiple
5889 of our constant, do the operation and verify it doesn't overflow. */
5890 if (code == MULT_EXPR
5891 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5892 {
5893 op1 = const_binop (code, fold_convert (ctype, op1),
5894 fold_convert (ctype, c), 0);
5895 /* We allow the constant to overflow with wrapping semantics. */
5896 if (op1 == 0
5897 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5898 break;
5899 }
5900 else
5901 break;
5902
5903 /* If we have an unsigned type is not a sizetype, we cannot widen
5904 the operation since it will change the result if the original
5905 computation overflowed. */
5906 if (TYPE_UNSIGNED (ctype)
5907 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5908 && ctype != type)
5909 break;
5910
5911 /* If we were able to eliminate our operation from the first side,
5912 apply our operation to the second side and reform the PLUS. */
5913 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5914 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5915
5916 /* The last case is if we are a multiply. In that case, we can
5917 apply the distributive law to commute the multiply and addition
5918 if the multiplication of the constants doesn't overflow. */
5919 if (code == MULT_EXPR)
5920 return fold_build2 (tcode, ctype,
5921 fold_build2 (code, ctype,
5922 fold_convert (ctype, op0),
5923 fold_convert (ctype, c)),
5924 op1);
5925
5926 break;
5927
5928 case MULT_EXPR:
5929 /* We have a special case here if we are doing something like
5930 (C * 8) % 4 since we know that's zero. */
5931 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5932 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5933 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5934 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5935 return omit_one_operand (type, integer_zero_node, op0);
5936
5937 /* ... fall through ... */
5938
5939 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5940 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5941 /* If we can extract our operation from the LHS, do so and return a
5942 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5943 do something only if the second operand is a constant. */
5944 if (same_p
5945 && (t1 = extract_muldiv (op0, c, code, wide_type,
5946 strict_overflow_p)) != 0)
5947 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5948 fold_convert (ctype, op1));
5949 else if (tcode == MULT_EXPR && code == MULT_EXPR
5950 && (t1 = extract_muldiv (op1, c, code, wide_type,
5951 strict_overflow_p)) != 0)
5952 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5953 fold_convert (ctype, t1));
5954 else if (TREE_CODE (op1) != INTEGER_CST)
5955 return 0;
5956
5957 /* If these are the same operation types, we can associate them
5958 assuming no overflow. */
5959 if (tcode == code
5960 && 0 != (t1 = int_const_binop (MULT_EXPR, fold_convert (ctype, op1),
5961 fold_convert (ctype, c), 1))
5962 && 0 != (t1 = force_fit_type_double (ctype, TREE_INT_CST_LOW (t1),
5963 TREE_INT_CST_HIGH (t1),
5964 (TYPE_UNSIGNED (ctype)
5965 && tcode != MULT_EXPR) ? -1 : 1,
5966 TREE_OVERFLOW (t1)))
5967 && !TREE_OVERFLOW (t1))
5968 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5969
5970 /* If these operations "cancel" each other, we have the main
5971 optimizations of this pass, which occur when either constant is a
5972 multiple of the other, in which case we replace this with either an
5973 operation or CODE or TCODE.
5974
5975 If we have an unsigned type that is not a sizetype, we cannot do
5976 this since it will change the result if the original computation
5977 overflowed. */
5978 if ((TYPE_OVERFLOW_UNDEFINED (ctype)
5979 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5980 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5981 || (tcode == MULT_EXPR
5982 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5983 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
5984 && code != MULT_EXPR)))
5985 {
5986 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5987 {
5988 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5989 *strict_overflow_p = true;
5990 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5991 fold_convert (ctype,
5992 const_binop (TRUNC_DIV_EXPR,
5993 op1, c, 0)));
5994 }
5995 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5996 {
5997 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5998 *strict_overflow_p = true;
5999 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6000 fold_convert (ctype,
6001 const_binop (TRUNC_DIV_EXPR,
6002 c, op1, 0)));
6003 }
6004 }
6005 break;
6006
6007 default:
6008 break;
6009 }
6010
6011 return 0;
6012 }
6013 \f
6014 /* Return a node which has the indicated constant VALUE (either 0 or
6015 1), and is of the indicated TYPE. */
6016
6017 tree
6018 constant_boolean_node (int value, tree type)
6019 {
6020 if (type == integer_type_node)
6021 return value ? integer_one_node : integer_zero_node;
6022 else if (type == boolean_type_node)
6023 return value ? boolean_true_node : boolean_false_node;
6024 else
6025 return build_int_cst (type, value);
6026 }
6027
6028
6029 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6030 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6031 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6032 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6033 COND is the first argument to CODE; otherwise (as in the example
6034 given here), it is the second argument. TYPE is the type of the
6035 original expression. Return NULL_TREE if no simplification is
6036 possible. */
6037
6038 static tree
6039 fold_binary_op_with_conditional_arg (enum tree_code code,
6040 tree type, tree op0, tree op1,
6041 tree cond, tree arg, int cond_first_p)
6042 {
6043 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6044 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6045 tree test, true_value, false_value;
6046 tree lhs = NULL_TREE;
6047 tree rhs = NULL_TREE;
6048
6049 /* This transformation is only worthwhile if we don't have to wrap
6050 arg in a SAVE_EXPR, and the operation can be simplified on at least
6051 one of the branches once its pushed inside the COND_EXPR. */
6052 if (!TREE_CONSTANT (arg))
6053 return NULL_TREE;
6054
6055 if (TREE_CODE (cond) == COND_EXPR)
6056 {
6057 test = TREE_OPERAND (cond, 0);
6058 true_value = TREE_OPERAND (cond, 1);
6059 false_value = TREE_OPERAND (cond, 2);
6060 /* If this operand throws an expression, then it does not make
6061 sense to try to perform a logical or arithmetic operation
6062 involving it. */
6063 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6064 lhs = true_value;
6065 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6066 rhs = false_value;
6067 }
6068 else
6069 {
6070 tree testtype = TREE_TYPE (cond);
6071 test = cond;
6072 true_value = constant_boolean_node (true, testtype);
6073 false_value = constant_boolean_node (false, testtype);
6074 }
6075
6076 arg = fold_convert (arg_type, arg);
6077 if (lhs == 0)
6078 {
6079 true_value = fold_convert (cond_type, true_value);
6080 if (cond_first_p)
6081 lhs = fold_build2 (code, type, true_value, arg);
6082 else
6083 lhs = fold_build2 (code, type, arg, true_value);
6084 }
6085 if (rhs == 0)
6086 {
6087 false_value = fold_convert (cond_type, false_value);
6088 if (cond_first_p)
6089 rhs = fold_build2 (code, type, false_value, arg);
6090 else
6091 rhs = fold_build2 (code, type, arg, false_value);
6092 }
6093
6094 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
6095 return fold_convert (type, test);
6096 }
6097
6098 \f
6099 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6100
6101 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6102 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6103 ADDEND is the same as X.
6104
6105 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6106 and finite. The problematic cases are when X is zero, and its mode
6107 has signed zeros. In the case of rounding towards -infinity,
6108 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6109 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6110
6111 bool
6112 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6113 {
6114 if (!real_zerop (addend))
6115 return false;
6116
6117 /* Don't allow the fold with -fsignaling-nans. */
6118 if (HONOR_SNANS (TYPE_MODE (type)))
6119 return false;
6120
6121 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6122 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6123 return true;
6124
6125 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6126 if (TREE_CODE (addend) == REAL_CST
6127 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6128 negate = !negate;
6129
6130 /* The mode has signed zeros, and we have to honor their sign.
6131 In this situation, there is only one case we can return true for.
6132 X - 0 is the same as X unless rounding towards -infinity is
6133 supported. */
6134 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6135 }
6136
6137 /* Subroutine of fold() that checks comparisons of built-in math
6138 functions against real constants.
6139
6140 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6141 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6142 is the type of the result and ARG0 and ARG1 are the operands of the
6143 comparison. ARG1 must be a TREE_REAL_CST.
6144
6145 The function returns the constant folded tree if a simplification
6146 can be made, and NULL_TREE otherwise. */
6147
6148 static tree
6149 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
6150 tree type, tree arg0, tree arg1)
6151 {
6152 REAL_VALUE_TYPE c;
6153
6154 if (BUILTIN_SQRT_P (fcode))
6155 {
6156 tree arg = CALL_EXPR_ARG (arg0, 0);
6157 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6158
6159 c = TREE_REAL_CST (arg1);
6160 if (REAL_VALUE_NEGATIVE (c))
6161 {
6162 /* sqrt(x) < y is always false, if y is negative. */
6163 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6164 return omit_one_operand (type, integer_zero_node, arg);
6165
6166 /* sqrt(x) > y is always true, if y is negative and we
6167 don't care about NaNs, i.e. negative values of x. */
6168 if (code == NE_EXPR || !HONOR_NANS (mode))
6169 return omit_one_operand (type, integer_one_node, arg);
6170
6171 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6172 return fold_build2 (GE_EXPR, type, arg,
6173 build_real (TREE_TYPE (arg), dconst0));
6174 }
6175 else if (code == GT_EXPR || code == GE_EXPR)
6176 {
6177 REAL_VALUE_TYPE c2;
6178
6179 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6180 real_convert (&c2, mode, &c2);
6181
6182 if (REAL_VALUE_ISINF (c2))
6183 {
6184 /* sqrt(x) > y is x == +Inf, when y is very large. */
6185 if (HONOR_INFINITIES (mode))
6186 return fold_build2 (EQ_EXPR, type, arg,
6187 build_real (TREE_TYPE (arg), c2));
6188
6189 /* sqrt(x) > y is always false, when y is very large
6190 and we don't care about infinities. */
6191 return omit_one_operand (type, integer_zero_node, arg);
6192 }
6193
6194 /* sqrt(x) > c is the same as x > c*c. */
6195 return fold_build2 (code, type, arg,
6196 build_real (TREE_TYPE (arg), c2));
6197 }
6198 else if (code == LT_EXPR || code == LE_EXPR)
6199 {
6200 REAL_VALUE_TYPE c2;
6201
6202 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6203 real_convert (&c2, mode, &c2);
6204
6205 if (REAL_VALUE_ISINF (c2))
6206 {
6207 /* sqrt(x) < y is always true, when y is a very large
6208 value and we don't care about NaNs or Infinities. */
6209 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6210 return omit_one_operand (type, integer_one_node, arg);
6211
6212 /* sqrt(x) < y is x != +Inf when y is very large and we
6213 don't care about NaNs. */
6214 if (! HONOR_NANS (mode))
6215 return fold_build2 (NE_EXPR, type, arg,
6216 build_real (TREE_TYPE (arg), c2));
6217
6218 /* sqrt(x) < y is x >= 0 when y is very large and we
6219 don't care about Infinities. */
6220 if (! HONOR_INFINITIES (mode))
6221 return fold_build2 (GE_EXPR, type, arg,
6222 build_real (TREE_TYPE (arg), dconst0));
6223
6224 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6225 if (lang_hooks.decls.global_bindings_p () != 0
6226 || CONTAINS_PLACEHOLDER_P (arg))
6227 return NULL_TREE;
6228
6229 arg = save_expr (arg);
6230 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6231 fold_build2 (GE_EXPR, type, arg,
6232 build_real (TREE_TYPE (arg),
6233 dconst0)),
6234 fold_build2 (NE_EXPR, type, arg,
6235 build_real (TREE_TYPE (arg),
6236 c2)));
6237 }
6238
6239 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6240 if (! HONOR_NANS (mode))
6241 return fold_build2 (code, type, arg,
6242 build_real (TREE_TYPE (arg), c2));
6243
6244 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6245 if (lang_hooks.decls.global_bindings_p () == 0
6246 && ! CONTAINS_PLACEHOLDER_P (arg))
6247 {
6248 arg = save_expr (arg);
6249 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6250 fold_build2 (GE_EXPR, type, arg,
6251 build_real (TREE_TYPE (arg),
6252 dconst0)),
6253 fold_build2 (code, type, arg,
6254 build_real (TREE_TYPE (arg),
6255 c2)));
6256 }
6257 }
6258 }
6259
6260 return NULL_TREE;
6261 }
6262
6263 /* Subroutine of fold() that optimizes comparisons against Infinities,
6264 either +Inf or -Inf.
6265
6266 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6267 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6268 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6269
6270 The function returns the constant folded tree if a simplification
6271 can be made, and NULL_TREE otherwise. */
6272
6273 static tree
6274 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6275 {
6276 enum machine_mode mode;
6277 REAL_VALUE_TYPE max;
6278 tree temp;
6279 bool neg;
6280
6281 mode = TYPE_MODE (TREE_TYPE (arg0));
6282
6283 /* For negative infinity swap the sense of the comparison. */
6284 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6285 if (neg)
6286 code = swap_tree_comparison (code);
6287
6288 switch (code)
6289 {
6290 case GT_EXPR:
6291 /* x > +Inf is always false, if with ignore sNANs. */
6292 if (HONOR_SNANS (mode))
6293 return NULL_TREE;
6294 return omit_one_operand (type, integer_zero_node, arg0);
6295
6296 case LE_EXPR:
6297 /* x <= +Inf is always true, if we don't case about NaNs. */
6298 if (! HONOR_NANS (mode))
6299 return omit_one_operand (type, integer_one_node, arg0);
6300
6301 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6302 if (lang_hooks.decls.global_bindings_p () == 0
6303 && ! CONTAINS_PLACEHOLDER_P (arg0))
6304 {
6305 arg0 = save_expr (arg0);
6306 return fold_build2 (EQ_EXPR, type, arg0, arg0);
6307 }
6308 break;
6309
6310 case EQ_EXPR:
6311 case GE_EXPR:
6312 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6313 real_maxval (&max, neg, mode);
6314 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6315 arg0, build_real (TREE_TYPE (arg0), max));
6316
6317 case LT_EXPR:
6318 /* x < +Inf is always equal to x <= DBL_MAX. */
6319 real_maxval (&max, neg, mode);
6320 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6321 arg0, build_real (TREE_TYPE (arg0), max));
6322
6323 case NE_EXPR:
6324 /* x != +Inf is always equal to !(x > DBL_MAX). */
6325 real_maxval (&max, neg, mode);
6326 if (! HONOR_NANS (mode))
6327 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6328 arg0, build_real (TREE_TYPE (arg0), max));
6329
6330 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6331 arg0, build_real (TREE_TYPE (arg0), max));
6332 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
6333
6334 default:
6335 break;
6336 }
6337
6338 return NULL_TREE;
6339 }
6340
6341 /* Subroutine of fold() that optimizes comparisons of a division by
6342 a nonzero integer constant against an integer constant, i.e.
6343 X/C1 op C2.
6344
6345 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6346 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6347 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6348
6349 The function returns the constant folded tree if a simplification
6350 can be made, and NULL_TREE otherwise. */
6351
6352 static tree
6353 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6354 {
6355 tree prod, tmp, hi, lo;
6356 tree arg00 = TREE_OPERAND (arg0, 0);
6357 tree arg01 = TREE_OPERAND (arg0, 1);
6358 unsigned HOST_WIDE_INT lpart;
6359 HOST_WIDE_INT hpart;
6360 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6361 bool neg_overflow;
6362 int overflow;
6363
6364 /* We have to do this the hard way to detect unsigned overflow.
6365 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6366 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6367 TREE_INT_CST_HIGH (arg01),
6368 TREE_INT_CST_LOW (arg1),
6369 TREE_INT_CST_HIGH (arg1),
6370 &lpart, &hpart, unsigned_p);
6371 prod = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6372 -1, overflow);
6373 neg_overflow = false;
6374
6375 if (unsigned_p)
6376 {
6377 tmp = int_const_binop (MINUS_EXPR, arg01,
6378 build_int_cst (TREE_TYPE (arg01), 1), 0);
6379 lo = prod;
6380
6381 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6382 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6383 TREE_INT_CST_HIGH (prod),
6384 TREE_INT_CST_LOW (tmp),
6385 TREE_INT_CST_HIGH (tmp),
6386 &lpart, &hpart, unsigned_p);
6387 hi = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6388 -1, overflow | TREE_OVERFLOW (prod));
6389 }
6390 else if (tree_int_cst_sgn (arg01) >= 0)
6391 {
6392 tmp = int_const_binop (MINUS_EXPR, arg01,
6393 build_int_cst (TREE_TYPE (arg01), 1), 0);
6394 switch (tree_int_cst_sgn (arg1))
6395 {
6396 case -1:
6397 neg_overflow = true;
6398 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6399 hi = prod;
6400 break;
6401
6402 case 0:
6403 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6404 hi = tmp;
6405 break;
6406
6407 case 1:
6408 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6409 lo = prod;
6410 break;
6411
6412 default:
6413 gcc_unreachable ();
6414 }
6415 }
6416 else
6417 {
6418 /* A negative divisor reverses the relational operators. */
6419 code = swap_tree_comparison (code);
6420
6421 tmp = int_const_binop (PLUS_EXPR, arg01,
6422 build_int_cst (TREE_TYPE (arg01), 1), 0);
6423 switch (tree_int_cst_sgn (arg1))
6424 {
6425 case -1:
6426 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6427 lo = prod;
6428 break;
6429
6430 case 0:
6431 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6432 lo = tmp;
6433 break;
6434
6435 case 1:
6436 neg_overflow = true;
6437 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6438 hi = prod;
6439 break;
6440
6441 default:
6442 gcc_unreachable ();
6443 }
6444 }
6445
6446 switch (code)
6447 {
6448 case EQ_EXPR:
6449 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6450 return omit_one_operand (type, integer_zero_node, arg00);
6451 if (TREE_OVERFLOW (hi))
6452 return fold_build2 (GE_EXPR, type, arg00, lo);
6453 if (TREE_OVERFLOW (lo))
6454 return fold_build2 (LE_EXPR, type, arg00, hi);
6455 return build_range_check (type, arg00, 1, lo, hi);
6456
6457 case NE_EXPR:
6458 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6459 return omit_one_operand (type, integer_one_node, arg00);
6460 if (TREE_OVERFLOW (hi))
6461 return fold_build2 (LT_EXPR, type, arg00, lo);
6462 if (TREE_OVERFLOW (lo))
6463 return fold_build2 (GT_EXPR, type, arg00, hi);
6464 return build_range_check (type, arg00, 0, lo, hi);
6465
6466 case LT_EXPR:
6467 if (TREE_OVERFLOW (lo))
6468 {
6469 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6470 return omit_one_operand (type, tmp, arg00);
6471 }
6472 return fold_build2 (LT_EXPR, type, arg00, lo);
6473
6474 case LE_EXPR:
6475 if (TREE_OVERFLOW (hi))
6476 {
6477 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6478 return omit_one_operand (type, tmp, arg00);
6479 }
6480 return fold_build2 (LE_EXPR, type, arg00, hi);
6481
6482 case GT_EXPR:
6483 if (TREE_OVERFLOW (hi))
6484 {
6485 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6486 return omit_one_operand (type, tmp, arg00);
6487 }
6488 return fold_build2 (GT_EXPR, type, arg00, hi);
6489
6490 case GE_EXPR:
6491 if (TREE_OVERFLOW (lo))
6492 {
6493 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6494 return omit_one_operand (type, tmp, arg00);
6495 }
6496 return fold_build2 (GE_EXPR, type, arg00, lo);
6497
6498 default:
6499 break;
6500 }
6501
6502 return NULL_TREE;
6503 }
6504
6505
6506 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6507 equality/inequality test, then return a simplified form of the test
6508 using a sign testing. Otherwise return NULL. TYPE is the desired
6509 result type. */
6510
6511 static tree
6512 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6513 tree result_type)
6514 {
6515 /* If this is testing a single bit, we can optimize the test. */
6516 if ((code == NE_EXPR || code == EQ_EXPR)
6517 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6518 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6519 {
6520 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6521 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6522 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6523
6524 if (arg00 != NULL_TREE
6525 /* This is only a win if casting to a signed type is cheap,
6526 i.e. when arg00's type is not a partial mode. */
6527 && TYPE_PRECISION (TREE_TYPE (arg00))
6528 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6529 {
6530 tree stype = signed_type_for (TREE_TYPE (arg00));
6531 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6532 result_type, fold_convert (stype, arg00),
6533 build_int_cst (stype, 0));
6534 }
6535 }
6536
6537 return NULL_TREE;
6538 }
6539
6540 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6541 equality/inequality test, then return a simplified form of
6542 the test using shifts and logical operations. Otherwise return
6543 NULL. TYPE is the desired result type. */
6544
6545 tree
6546 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6547 tree result_type)
6548 {
6549 /* If this is testing a single bit, we can optimize the test. */
6550 if ((code == NE_EXPR || code == EQ_EXPR)
6551 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6552 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6553 {
6554 tree inner = TREE_OPERAND (arg0, 0);
6555 tree type = TREE_TYPE (arg0);
6556 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6557 enum machine_mode operand_mode = TYPE_MODE (type);
6558 int ops_unsigned;
6559 tree signed_type, unsigned_type, intermediate_type;
6560 tree tem, one;
6561
6562 /* First, see if we can fold the single bit test into a sign-bit
6563 test. */
6564 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6565 result_type);
6566 if (tem)
6567 return tem;
6568
6569 /* Otherwise we have (A & C) != 0 where C is a single bit,
6570 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6571 Similarly for (A & C) == 0. */
6572
6573 /* If INNER is a right shift of a constant and it plus BITNUM does
6574 not overflow, adjust BITNUM and INNER. */
6575 if (TREE_CODE (inner) == RSHIFT_EXPR
6576 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6577 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6578 && bitnum < TYPE_PRECISION (type)
6579 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6580 bitnum - TYPE_PRECISION (type)))
6581 {
6582 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6583 inner = TREE_OPERAND (inner, 0);
6584 }
6585
6586 /* If we are going to be able to omit the AND below, we must do our
6587 operations as unsigned. If we must use the AND, we have a choice.
6588 Normally unsigned is faster, but for some machines signed is. */
6589 #ifdef LOAD_EXTEND_OP
6590 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6591 && !flag_syntax_only) ? 0 : 1;
6592 #else
6593 ops_unsigned = 1;
6594 #endif
6595
6596 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6597 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6598 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6599 inner = fold_convert (intermediate_type, inner);
6600
6601 if (bitnum != 0)
6602 inner = build2 (RSHIFT_EXPR, intermediate_type,
6603 inner, size_int (bitnum));
6604
6605 one = build_int_cst (intermediate_type, 1);
6606
6607 if (code == EQ_EXPR)
6608 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type, inner, one);
6609
6610 /* Put the AND last so it can combine with more things. */
6611 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6612
6613 /* Make sure to return the proper type. */
6614 inner = fold_convert (result_type, inner);
6615
6616 return inner;
6617 }
6618 return NULL_TREE;
6619 }
6620
6621 /* Check whether we are allowed to reorder operands arg0 and arg1,
6622 such that the evaluation of arg1 occurs before arg0. */
6623
6624 static bool
6625 reorder_operands_p (const_tree arg0, const_tree arg1)
6626 {
6627 if (! flag_evaluation_order)
6628 return true;
6629 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6630 return true;
6631 return ! TREE_SIDE_EFFECTS (arg0)
6632 && ! TREE_SIDE_EFFECTS (arg1);
6633 }
6634
6635 /* Test whether it is preferable two swap two operands, ARG0 and
6636 ARG1, for example because ARG0 is an integer constant and ARG1
6637 isn't. If REORDER is true, only recommend swapping if we can
6638 evaluate the operands in reverse order. */
6639
6640 bool
6641 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6642 {
6643 STRIP_SIGN_NOPS (arg0);
6644 STRIP_SIGN_NOPS (arg1);
6645
6646 if (TREE_CODE (arg1) == INTEGER_CST)
6647 return 0;
6648 if (TREE_CODE (arg0) == INTEGER_CST)
6649 return 1;
6650
6651 if (TREE_CODE (arg1) == REAL_CST)
6652 return 0;
6653 if (TREE_CODE (arg0) == REAL_CST)
6654 return 1;
6655
6656 if (TREE_CODE (arg1) == FIXED_CST)
6657 return 0;
6658 if (TREE_CODE (arg0) == FIXED_CST)
6659 return 1;
6660
6661 if (TREE_CODE (arg1) == COMPLEX_CST)
6662 return 0;
6663 if (TREE_CODE (arg0) == COMPLEX_CST)
6664 return 1;
6665
6666 if (TREE_CONSTANT (arg1))
6667 return 0;
6668 if (TREE_CONSTANT (arg0))
6669 return 1;
6670
6671 if (optimize_size)
6672 return 0;
6673
6674 if (reorder && flag_evaluation_order
6675 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6676 return 0;
6677
6678 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6679 for commutative and comparison operators. Ensuring a canonical
6680 form allows the optimizers to find additional redundancies without
6681 having to explicitly check for both orderings. */
6682 if (TREE_CODE (arg0) == SSA_NAME
6683 && TREE_CODE (arg1) == SSA_NAME
6684 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6685 return 1;
6686
6687 /* Put SSA_NAMEs last. */
6688 if (TREE_CODE (arg1) == SSA_NAME)
6689 return 0;
6690 if (TREE_CODE (arg0) == SSA_NAME)
6691 return 1;
6692
6693 /* Put variables last. */
6694 if (DECL_P (arg1))
6695 return 0;
6696 if (DECL_P (arg0))
6697 return 1;
6698
6699 return 0;
6700 }
6701
6702 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6703 ARG0 is extended to a wider type. */
6704
6705 static tree
6706 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6707 {
6708 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6709 tree arg1_unw;
6710 tree shorter_type, outer_type;
6711 tree min, max;
6712 bool above, below;
6713
6714 if (arg0_unw == arg0)
6715 return NULL_TREE;
6716 shorter_type = TREE_TYPE (arg0_unw);
6717
6718 #ifdef HAVE_canonicalize_funcptr_for_compare
6719 /* Disable this optimization if we're casting a function pointer
6720 type on targets that require function pointer canonicalization. */
6721 if (HAVE_canonicalize_funcptr_for_compare
6722 && TREE_CODE (shorter_type) == POINTER_TYPE
6723 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6724 return NULL_TREE;
6725 #endif
6726
6727 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6728 return NULL_TREE;
6729
6730 arg1_unw = get_unwidened (arg1, NULL_TREE);
6731
6732 /* If possible, express the comparison in the shorter mode. */
6733 if ((code == EQ_EXPR || code == NE_EXPR
6734 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6735 && (TREE_TYPE (arg1_unw) == shorter_type
6736 || ((TYPE_PRECISION (shorter_type)
6737 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6738 && (TYPE_UNSIGNED (shorter_type)
6739 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6740 || (TREE_CODE (arg1_unw) == INTEGER_CST
6741 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6742 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6743 && int_fits_type_p (arg1_unw, shorter_type))))
6744 return fold_build2 (code, type, arg0_unw,
6745 fold_convert (shorter_type, arg1_unw));
6746
6747 if (TREE_CODE (arg1_unw) != INTEGER_CST
6748 || TREE_CODE (shorter_type) != INTEGER_TYPE
6749 || !int_fits_type_p (arg1_unw, shorter_type))
6750 return NULL_TREE;
6751
6752 /* If we are comparing with the integer that does not fit into the range
6753 of the shorter type, the result is known. */
6754 outer_type = TREE_TYPE (arg1_unw);
6755 min = lower_bound_in_type (outer_type, shorter_type);
6756 max = upper_bound_in_type (outer_type, shorter_type);
6757
6758 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6759 max, arg1_unw));
6760 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6761 arg1_unw, min));
6762
6763 switch (code)
6764 {
6765 case EQ_EXPR:
6766 if (above || below)
6767 return omit_one_operand (type, integer_zero_node, arg0);
6768 break;
6769
6770 case NE_EXPR:
6771 if (above || below)
6772 return omit_one_operand (type, integer_one_node, arg0);
6773 break;
6774
6775 case LT_EXPR:
6776 case LE_EXPR:
6777 if (above)
6778 return omit_one_operand (type, integer_one_node, arg0);
6779 else if (below)
6780 return omit_one_operand (type, integer_zero_node, arg0);
6781
6782 case GT_EXPR:
6783 case GE_EXPR:
6784 if (above)
6785 return omit_one_operand (type, integer_zero_node, arg0);
6786 else if (below)
6787 return omit_one_operand (type, integer_one_node, arg0);
6788
6789 default:
6790 break;
6791 }
6792
6793 return NULL_TREE;
6794 }
6795
6796 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6797 ARG0 just the signedness is changed. */
6798
6799 static tree
6800 fold_sign_changed_comparison (enum tree_code code, tree type,
6801 tree arg0, tree arg1)
6802 {
6803 tree arg0_inner;
6804 tree inner_type, outer_type;
6805
6806 if (!CONVERT_EXPR_P (arg0))
6807 return NULL_TREE;
6808
6809 outer_type = TREE_TYPE (arg0);
6810 arg0_inner = TREE_OPERAND (arg0, 0);
6811 inner_type = TREE_TYPE (arg0_inner);
6812
6813 #ifdef HAVE_canonicalize_funcptr_for_compare
6814 /* Disable this optimization if we're casting a function pointer
6815 type on targets that require function pointer canonicalization. */
6816 if (HAVE_canonicalize_funcptr_for_compare
6817 && TREE_CODE (inner_type) == POINTER_TYPE
6818 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6819 return NULL_TREE;
6820 #endif
6821
6822 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6823 return NULL_TREE;
6824
6825 /* If the conversion is from an integral subtype to its basetype
6826 leave it alone. */
6827 if (TREE_TYPE (inner_type) == outer_type)
6828 return NULL_TREE;
6829
6830 if (TREE_CODE (arg1) != INTEGER_CST
6831 && !(CONVERT_EXPR_P (arg1)
6832 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6833 return NULL_TREE;
6834
6835 if ((TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6836 || POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6837 && code != NE_EXPR
6838 && code != EQ_EXPR)
6839 return NULL_TREE;
6840
6841 if (TREE_CODE (arg1) == INTEGER_CST)
6842 arg1 = force_fit_type_double (inner_type, TREE_INT_CST_LOW (arg1),
6843 TREE_INT_CST_HIGH (arg1), 0,
6844 TREE_OVERFLOW (arg1));
6845 else
6846 arg1 = fold_convert (inner_type, arg1);
6847
6848 return fold_build2 (code, type, arg0_inner, arg1);
6849 }
6850
6851 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6852 step of the array. Reconstructs s and delta in the case of s * delta
6853 being an integer constant (and thus already folded).
6854 ADDR is the address. MULT is the multiplicative expression.
6855 If the function succeeds, the new address expression is returned. Otherwise
6856 NULL_TREE is returned. */
6857
6858 static tree
6859 try_move_mult_to_index (tree addr, tree op1)
6860 {
6861 tree s, delta, step;
6862 tree ref = TREE_OPERAND (addr, 0), pref;
6863 tree ret, pos;
6864 tree itype;
6865 bool mdim = false;
6866
6867 /* Strip the nops that might be added when converting op1 to sizetype. */
6868 STRIP_NOPS (op1);
6869
6870 /* Canonicalize op1 into a possibly non-constant delta
6871 and an INTEGER_CST s. */
6872 if (TREE_CODE (op1) == MULT_EXPR)
6873 {
6874 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6875
6876 STRIP_NOPS (arg0);
6877 STRIP_NOPS (arg1);
6878
6879 if (TREE_CODE (arg0) == INTEGER_CST)
6880 {
6881 s = arg0;
6882 delta = arg1;
6883 }
6884 else if (TREE_CODE (arg1) == INTEGER_CST)
6885 {
6886 s = arg1;
6887 delta = arg0;
6888 }
6889 else
6890 return NULL_TREE;
6891 }
6892 else if (TREE_CODE (op1) == INTEGER_CST)
6893 {
6894 delta = op1;
6895 s = NULL_TREE;
6896 }
6897 else
6898 {
6899 /* Simulate we are delta * 1. */
6900 delta = op1;
6901 s = integer_one_node;
6902 }
6903
6904 for (;; ref = TREE_OPERAND (ref, 0))
6905 {
6906 if (TREE_CODE (ref) == ARRAY_REF)
6907 {
6908 /* Remember if this was a multi-dimensional array. */
6909 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6910 mdim = true;
6911
6912 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6913 if (! itype)
6914 continue;
6915
6916 step = array_ref_element_size (ref);
6917 if (TREE_CODE (step) != INTEGER_CST)
6918 continue;
6919
6920 if (s)
6921 {
6922 if (! tree_int_cst_equal (step, s))
6923 continue;
6924 }
6925 else
6926 {
6927 /* Try if delta is a multiple of step. */
6928 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6929 if (! tmp)
6930 continue;
6931 delta = tmp;
6932 }
6933
6934 /* Only fold here if we can verify we do not overflow one
6935 dimension of a multi-dimensional array. */
6936 if (mdim)
6937 {
6938 tree tmp;
6939
6940 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
6941 || !INTEGRAL_TYPE_P (itype)
6942 || !TYPE_MAX_VALUE (itype)
6943 || TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST)
6944 continue;
6945
6946 tmp = fold_binary (PLUS_EXPR, itype,
6947 fold_convert (itype,
6948 TREE_OPERAND (ref, 1)),
6949 fold_convert (itype, delta));
6950 if (!tmp
6951 || TREE_CODE (tmp) != INTEGER_CST
6952 || tree_int_cst_lt (TYPE_MAX_VALUE (itype), tmp))
6953 continue;
6954 }
6955
6956 break;
6957 }
6958 else
6959 mdim = false;
6960
6961 if (!handled_component_p (ref))
6962 return NULL_TREE;
6963 }
6964
6965 /* We found the suitable array reference. So copy everything up to it,
6966 and replace the index. */
6967
6968 pref = TREE_OPERAND (addr, 0);
6969 ret = copy_node (pref);
6970 pos = ret;
6971
6972 while (pref != ref)
6973 {
6974 pref = TREE_OPERAND (pref, 0);
6975 TREE_OPERAND (pos, 0) = copy_node (pref);
6976 pos = TREE_OPERAND (pos, 0);
6977 }
6978
6979 TREE_OPERAND (pos, 1) = fold_build2 (PLUS_EXPR, itype,
6980 fold_convert (itype,
6981 TREE_OPERAND (pos, 1)),
6982 fold_convert (itype, delta));
6983
6984 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6985 }
6986
6987
6988 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6989 means A >= Y && A != MAX, but in this case we know that
6990 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6991
6992 static tree
6993 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6994 {
6995 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6996
6997 if (TREE_CODE (bound) == LT_EXPR)
6998 a = TREE_OPERAND (bound, 0);
6999 else if (TREE_CODE (bound) == GT_EXPR)
7000 a = TREE_OPERAND (bound, 1);
7001 else
7002 return NULL_TREE;
7003
7004 typea = TREE_TYPE (a);
7005 if (!INTEGRAL_TYPE_P (typea)
7006 && !POINTER_TYPE_P (typea))
7007 return NULL_TREE;
7008
7009 if (TREE_CODE (ineq) == LT_EXPR)
7010 {
7011 a1 = TREE_OPERAND (ineq, 1);
7012 y = TREE_OPERAND (ineq, 0);
7013 }
7014 else if (TREE_CODE (ineq) == GT_EXPR)
7015 {
7016 a1 = TREE_OPERAND (ineq, 0);
7017 y = TREE_OPERAND (ineq, 1);
7018 }
7019 else
7020 return NULL_TREE;
7021
7022 if (TREE_TYPE (a1) != typea)
7023 return NULL_TREE;
7024
7025 if (POINTER_TYPE_P (typea))
7026 {
7027 /* Convert the pointer types into integer before taking the difference. */
7028 tree ta = fold_convert (ssizetype, a);
7029 tree ta1 = fold_convert (ssizetype, a1);
7030 diff = fold_binary (MINUS_EXPR, ssizetype, ta1, ta);
7031 }
7032 else
7033 diff = fold_binary (MINUS_EXPR, typea, a1, a);
7034
7035 if (!diff || !integer_onep (diff))
7036 return NULL_TREE;
7037
7038 return fold_build2 (GE_EXPR, type, a, y);
7039 }
7040
7041 /* Fold a sum or difference of at least one multiplication.
7042 Returns the folded tree or NULL if no simplification could be made. */
7043
7044 static tree
7045 fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
7046 {
7047 tree arg00, arg01, arg10, arg11;
7048 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7049
7050 /* (A * C) +- (B * C) -> (A+-B) * C.
7051 (A * C) +- A -> A * (C+-1).
7052 We are most concerned about the case where C is a constant,
7053 but other combinations show up during loop reduction. Since
7054 it is not difficult, try all four possibilities. */
7055
7056 if (TREE_CODE (arg0) == MULT_EXPR)
7057 {
7058 arg00 = TREE_OPERAND (arg0, 0);
7059 arg01 = TREE_OPERAND (arg0, 1);
7060 }
7061 else if (TREE_CODE (arg0) == INTEGER_CST)
7062 {
7063 arg00 = build_one_cst (type);
7064 arg01 = arg0;
7065 }
7066 else
7067 {
7068 /* We cannot generate constant 1 for fract. */
7069 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7070 return NULL_TREE;
7071 arg00 = arg0;
7072 arg01 = build_one_cst (type);
7073 }
7074 if (TREE_CODE (arg1) == MULT_EXPR)
7075 {
7076 arg10 = TREE_OPERAND (arg1, 0);
7077 arg11 = TREE_OPERAND (arg1, 1);
7078 }
7079 else if (TREE_CODE (arg1) == INTEGER_CST)
7080 {
7081 arg10 = build_one_cst (type);
7082 arg11 = arg1;
7083 }
7084 else
7085 {
7086 /* We cannot generate constant 1 for fract. */
7087 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7088 return NULL_TREE;
7089 arg10 = arg1;
7090 arg11 = build_one_cst (type);
7091 }
7092 same = NULL_TREE;
7093
7094 if (operand_equal_p (arg01, arg11, 0))
7095 same = arg01, alt0 = arg00, alt1 = arg10;
7096 else if (operand_equal_p (arg00, arg10, 0))
7097 same = arg00, alt0 = arg01, alt1 = arg11;
7098 else if (operand_equal_p (arg00, arg11, 0))
7099 same = arg00, alt0 = arg01, alt1 = arg10;
7100 else if (operand_equal_p (arg01, arg10, 0))
7101 same = arg01, alt0 = arg00, alt1 = arg11;
7102
7103 /* No identical multiplicands; see if we can find a common
7104 power-of-two factor in non-power-of-two multiplies. This
7105 can help in multi-dimensional array access. */
7106 else if (host_integerp (arg01, 0)
7107 && host_integerp (arg11, 0))
7108 {
7109 HOST_WIDE_INT int01, int11, tmp;
7110 bool swap = false;
7111 tree maybe_same;
7112 int01 = TREE_INT_CST_LOW (arg01);
7113 int11 = TREE_INT_CST_LOW (arg11);
7114
7115 /* Move min of absolute values to int11. */
7116 if ((int01 >= 0 ? int01 : -int01)
7117 < (int11 >= 0 ? int11 : -int11))
7118 {
7119 tmp = int01, int01 = int11, int11 = tmp;
7120 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7121 maybe_same = arg01;
7122 swap = true;
7123 }
7124 else
7125 maybe_same = arg11;
7126
7127 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0)
7128 {
7129 alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
7130 build_int_cst (TREE_TYPE (arg00),
7131 int01 / int11));
7132 alt1 = arg10;
7133 same = maybe_same;
7134 if (swap)
7135 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7136 }
7137 }
7138
7139 if (same)
7140 return fold_build2 (MULT_EXPR, type,
7141 fold_build2 (code, type,
7142 fold_convert (type, alt0),
7143 fold_convert (type, alt1)),
7144 fold_convert (type, same));
7145
7146 return NULL_TREE;
7147 }
7148
7149 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7150 specified by EXPR into the buffer PTR of length LEN bytes.
7151 Return the number of bytes placed in the buffer, or zero
7152 upon failure. */
7153
7154 static int
7155 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7156 {
7157 tree type = TREE_TYPE (expr);
7158 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7159 int byte, offset, word, words;
7160 unsigned char value;
7161
7162 if (total_bytes > len)
7163 return 0;
7164 words = total_bytes / UNITS_PER_WORD;
7165
7166 for (byte = 0; byte < total_bytes; byte++)
7167 {
7168 int bitpos = byte * BITS_PER_UNIT;
7169 if (bitpos < HOST_BITS_PER_WIDE_INT)
7170 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7171 else
7172 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7173 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7174
7175 if (total_bytes > UNITS_PER_WORD)
7176 {
7177 word = byte / UNITS_PER_WORD;
7178 if (WORDS_BIG_ENDIAN)
7179 word = (words - 1) - word;
7180 offset = word * UNITS_PER_WORD;
7181 if (BYTES_BIG_ENDIAN)
7182 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7183 else
7184 offset += byte % UNITS_PER_WORD;
7185 }
7186 else
7187 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7188 ptr[offset] = value;
7189 }
7190 return total_bytes;
7191 }
7192
7193
7194 /* Subroutine of native_encode_expr. Encode the REAL_CST
7195 specified by EXPR into the buffer PTR of length LEN bytes.
7196 Return the number of bytes placed in the buffer, or zero
7197 upon failure. */
7198
7199 static int
7200 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7201 {
7202 tree type = TREE_TYPE (expr);
7203 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7204 int byte, offset, word, words, bitpos;
7205 unsigned char value;
7206
7207 /* There are always 32 bits in each long, no matter the size of
7208 the hosts long. We handle floating point representations with
7209 up to 192 bits. */
7210 long tmp[6];
7211
7212 if (total_bytes > len)
7213 return 0;
7214 words = 32 / UNITS_PER_WORD;
7215
7216 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7217
7218 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7219 bitpos += BITS_PER_UNIT)
7220 {
7221 byte = (bitpos / BITS_PER_UNIT) & 3;
7222 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7223
7224 if (UNITS_PER_WORD < 4)
7225 {
7226 word = byte / UNITS_PER_WORD;
7227 if (WORDS_BIG_ENDIAN)
7228 word = (words - 1) - word;
7229 offset = word * UNITS_PER_WORD;
7230 if (BYTES_BIG_ENDIAN)
7231 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7232 else
7233 offset += byte % UNITS_PER_WORD;
7234 }
7235 else
7236 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7237 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7238 }
7239 return total_bytes;
7240 }
7241
7242 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7243 specified by EXPR into the buffer PTR of length LEN bytes.
7244 Return the number of bytes placed in the buffer, or zero
7245 upon failure. */
7246
7247 static int
7248 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7249 {
7250 int rsize, isize;
7251 tree part;
7252
7253 part = TREE_REALPART (expr);
7254 rsize = native_encode_expr (part, ptr, len);
7255 if (rsize == 0)
7256 return 0;
7257 part = TREE_IMAGPART (expr);
7258 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7259 if (isize != rsize)
7260 return 0;
7261 return rsize + isize;
7262 }
7263
7264
7265 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7266 specified by EXPR into the buffer PTR of length LEN bytes.
7267 Return the number of bytes placed in the buffer, or zero
7268 upon failure. */
7269
7270 static int
7271 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7272 {
7273 int i, size, offset, count;
7274 tree itype, elem, elements;
7275
7276 offset = 0;
7277 elements = TREE_VECTOR_CST_ELTS (expr);
7278 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7279 itype = TREE_TYPE (TREE_TYPE (expr));
7280 size = GET_MODE_SIZE (TYPE_MODE (itype));
7281 for (i = 0; i < count; i++)
7282 {
7283 if (elements)
7284 {
7285 elem = TREE_VALUE (elements);
7286 elements = TREE_CHAIN (elements);
7287 }
7288 else
7289 elem = NULL_TREE;
7290
7291 if (elem)
7292 {
7293 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7294 return 0;
7295 }
7296 else
7297 {
7298 if (offset + size > len)
7299 return 0;
7300 memset (ptr+offset, 0, size);
7301 }
7302 offset += size;
7303 }
7304 return offset;
7305 }
7306
7307
7308 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7309 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7310 buffer PTR of length LEN bytes. Return the number of bytes
7311 placed in the buffer, or zero upon failure. */
7312
7313 int
7314 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7315 {
7316 switch (TREE_CODE (expr))
7317 {
7318 case INTEGER_CST:
7319 return native_encode_int (expr, ptr, len);
7320
7321 case REAL_CST:
7322 return native_encode_real (expr, ptr, len);
7323
7324 case COMPLEX_CST:
7325 return native_encode_complex (expr, ptr, len);
7326
7327 case VECTOR_CST:
7328 return native_encode_vector (expr, ptr, len);
7329
7330 default:
7331 return 0;
7332 }
7333 }
7334
7335
7336 /* Subroutine of native_interpret_expr. Interpret the contents of
7337 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7338 If the buffer cannot be interpreted, return NULL_TREE. */
7339
7340 static tree
7341 native_interpret_int (tree type, const unsigned char *ptr, int len)
7342 {
7343 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7344 int byte, offset, word, words;
7345 unsigned char value;
7346 unsigned int HOST_WIDE_INT lo = 0;
7347 HOST_WIDE_INT hi = 0;
7348
7349 if (total_bytes > len)
7350 return NULL_TREE;
7351 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7352 return NULL_TREE;
7353 words = total_bytes / UNITS_PER_WORD;
7354
7355 for (byte = 0; byte < total_bytes; byte++)
7356 {
7357 int bitpos = byte * BITS_PER_UNIT;
7358 if (total_bytes > UNITS_PER_WORD)
7359 {
7360 word = byte / UNITS_PER_WORD;
7361 if (WORDS_BIG_ENDIAN)
7362 word = (words - 1) - word;
7363 offset = word * UNITS_PER_WORD;
7364 if (BYTES_BIG_ENDIAN)
7365 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7366 else
7367 offset += byte % UNITS_PER_WORD;
7368 }
7369 else
7370 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7371 value = ptr[offset];
7372
7373 if (bitpos < HOST_BITS_PER_WIDE_INT)
7374 lo |= (unsigned HOST_WIDE_INT) value << bitpos;
7375 else
7376 hi |= (unsigned HOST_WIDE_INT) value
7377 << (bitpos - HOST_BITS_PER_WIDE_INT);
7378 }
7379
7380 return build_int_cst_wide_type (type, lo, hi);
7381 }
7382
7383
7384 /* Subroutine of native_interpret_expr. Interpret the contents of
7385 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7386 If the buffer cannot be interpreted, return NULL_TREE. */
7387
7388 static tree
7389 native_interpret_real (tree type, const unsigned char *ptr, int len)
7390 {
7391 enum machine_mode mode = TYPE_MODE (type);
7392 int total_bytes = GET_MODE_SIZE (mode);
7393 int byte, offset, word, words, bitpos;
7394 unsigned char value;
7395 /* There are always 32 bits in each long, no matter the size of
7396 the hosts long. We handle floating point representations with
7397 up to 192 bits. */
7398 REAL_VALUE_TYPE r;
7399 long tmp[6];
7400
7401 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7402 if (total_bytes > len || total_bytes > 24)
7403 return NULL_TREE;
7404 words = 32 / UNITS_PER_WORD;
7405
7406 memset (tmp, 0, sizeof (tmp));
7407 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7408 bitpos += BITS_PER_UNIT)
7409 {
7410 byte = (bitpos / BITS_PER_UNIT) & 3;
7411 if (UNITS_PER_WORD < 4)
7412 {
7413 word = byte / UNITS_PER_WORD;
7414 if (WORDS_BIG_ENDIAN)
7415 word = (words - 1) - word;
7416 offset = word * UNITS_PER_WORD;
7417 if (BYTES_BIG_ENDIAN)
7418 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7419 else
7420 offset += byte % UNITS_PER_WORD;
7421 }
7422 else
7423 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7424 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7425
7426 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7427 }
7428
7429 real_from_target (&r, tmp, mode);
7430 return build_real (type, r);
7431 }
7432
7433
7434 /* Subroutine of native_interpret_expr. Interpret the contents of
7435 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7436 If the buffer cannot be interpreted, return NULL_TREE. */
7437
7438 static tree
7439 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7440 {
7441 tree etype, rpart, ipart;
7442 int size;
7443
7444 etype = TREE_TYPE (type);
7445 size = GET_MODE_SIZE (TYPE_MODE (etype));
7446 if (size * 2 > len)
7447 return NULL_TREE;
7448 rpart = native_interpret_expr (etype, ptr, size);
7449 if (!rpart)
7450 return NULL_TREE;
7451 ipart = native_interpret_expr (etype, ptr+size, size);
7452 if (!ipart)
7453 return NULL_TREE;
7454 return build_complex (type, rpart, ipart);
7455 }
7456
7457
7458 /* Subroutine of native_interpret_expr. Interpret the contents of
7459 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7460 If the buffer cannot be interpreted, return NULL_TREE. */
7461
7462 static tree
7463 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7464 {
7465 tree etype, elem, elements;
7466 int i, size, count;
7467
7468 etype = TREE_TYPE (type);
7469 size = GET_MODE_SIZE (TYPE_MODE (etype));
7470 count = TYPE_VECTOR_SUBPARTS (type);
7471 if (size * count > len)
7472 return NULL_TREE;
7473
7474 elements = NULL_TREE;
7475 for (i = count - 1; i >= 0; i--)
7476 {
7477 elem = native_interpret_expr (etype, ptr+(i*size), size);
7478 if (!elem)
7479 return NULL_TREE;
7480 elements = tree_cons (NULL_TREE, elem, elements);
7481 }
7482 return build_vector (type, elements);
7483 }
7484
7485
7486 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7487 the buffer PTR of length LEN as a constant of type TYPE. For
7488 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7489 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7490 return NULL_TREE. */
7491
7492 tree
7493 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7494 {
7495 switch (TREE_CODE (type))
7496 {
7497 case INTEGER_TYPE:
7498 case ENUMERAL_TYPE:
7499 case BOOLEAN_TYPE:
7500 return native_interpret_int (type, ptr, len);
7501
7502 case REAL_TYPE:
7503 return native_interpret_real (type, ptr, len);
7504
7505 case COMPLEX_TYPE:
7506 return native_interpret_complex (type, ptr, len);
7507
7508 case VECTOR_TYPE:
7509 return native_interpret_vector (type, ptr, len);
7510
7511 default:
7512 return NULL_TREE;
7513 }
7514 }
7515
7516
7517 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7518 TYPE at compile-time. If we're unable to perform the conversion
7519 return NULL_TREE. */
7520
7521 static tree
7522 fold_view_convert_expr (tree type, tree expr)
7523 {
7524 /* We support up to 512-bit values (for V8DFmode). */
7525 unsigned char buffer[64];
7526 int len;
7527
7528 /* Check that the host and target are sane. */
7529 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7530 return NULL_TREE;
7531
7532 len = native_encode_expr (expr, buffer, sizeof (buffer));
7533 if (len == 0)
7534 return NULL_TREE;
7535
7536 return native_interpret_expr (type, buffer, len);
7537 }
7538
7539 /* Build an expression for the address of T. Folds away INDIRECT_REF
7540 to avoid confusing the gimplify process. When IN_FOLD is true
7541 avoid modifications of T. */
7542
7543 static tree
7544 build_fold_addr_expr_with_type_1 (tree t, tree ptrtype, bool in_fold)
7545 {
7546 /* The size of the object is not relevant when talking about its address. */
7547 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7548 t = TREE_OPERAND (t, 0);
7549
7550 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
7551 if (TREE_CODE (t) == INDIRECT_REF
7552 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
7553 {
7554 t = TREE_OPERAND (t, 0);
7555
7556 if (TREE_TYPE (t) != ptrtype)
7557 t = build1 (NOP_EXPR, ptrtype, t);
7558 }
7559 else if (!in_fold)
7560 {
7561 tree base = t;
7562
7563 while (handled_component_p (base))
7564 base = TREE_OPERAND (base, 0);
7565
7566 if (DECL_P (base))
7567 TREE_ADDRESSABLE (base) = 1;
7568
7569 t = build1 (ADDR_EXPR, ptrtype, t);
7570 }
7571 else
7572 t = build1 (ADDR_EXPR, ptrtype, t);
7573
7574 return t;
7575 }
7576
7577 /* Build an expression for the address of T with type PTRTYPE. This
7578 function modifies the input parameter 'T' by sometimes setting the
7579 TREE_ADDRESSABLE flag. */
7580
7581 tree
7582 build_fold_addr_expr_with_type (tree t, tree ptrtype)
7583 {
7584 return build_fold_addr_expr_with_type_1 (t, ptrtype, false);
7585 }
7586
7587 /* Build an expression for the address of T. This function modifies
7588 the input parameter 'T' by sometimes setting the TREE_ADDRESSABLE
7589 flag. When called from fold functions, use fold_addr_expr instead. */
7590
7591 tree
7592 build_fold_addr_expr (tree t)
7593 {
7594 return build_fold_addr_expr_with_type_1 (t,
7595 build_pointer_type (TREE_TYPE (t)),
7596 false);
7597 }
7598
7599 /* Same as build_fold_addr_expr, builds an expression for the address
7600 of T, but avoids touching the input node 't'. Fold functions
7601 should use this version. */
7602
7603 static tree
7604 fold_addr_expr (tree t)
7605 {
7606 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7607
7608 return build_fold_addr_expr_with_type_1 (t, ptrtype, true);
7609 }
7610
7611 /* Fold a unary expression of code CODE and type TYPE with operand
7612 OP0. Return the folded expression if folding is successful.
7613 Otherwise, return NULL_TREE. */
7614
7615 tree
7616 fold_unary (enum tree_code code, tree type, tree op0)
7617 {
7618 tree tem;
7619 tree arg0;
7620 enum tree_code_class kind = TREE_CODE_CLASS (code);
7621
7622 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7623 && TREE_CODE_LENGTH (code) == 1);
7624
7625 arg0 = op0;
7626 if (arg0)
7627 {
7628 if (code == NOP_EXPR || code == CONVERT_EXPR
7629 || code == FLOAT_EXPR || code == ABS_EXPR)
7630 {
7631 /* Don't use STRIP_NOPS, because signedness of argument type
7632 matters. */
7633 STRIP_SIGN_NOPS (arg0);
7634 }
7635 else
7636 {
7637 /* Strip any conversions that don't change the mode. This
7638 is safe for every expression, except for a comparison
7639 expression because its signedness is derived from its
7640 operands.
7641
7642 Note that this is done as an internal manipulation within
7643 the constant folder, in order to find the simplest
7644 representation of the arguments so that their form can be
7645 studied. In any cases, the appropriate type conversions
7646 should be put back in the tree that will get out of the
7647 constant folder. */
7648 STRIP_NOPS (arg0);
7649 }
7650 }
7651
7652 if (TREE_CODE_CLASS (code) == tcc_unary)
7653 {
7654 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7655 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7656 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
7657 else if (TREE_CODE (arg0) == COND_EXPR)
7658 {
7659 tree arg01 = TREE_OPERAND (arg0, 1);
7660 tree arg02 = TREE_OPERAND (arg0, 2);
7661 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7662 arg01 = fold_build1 (code, type, arg01);
7663 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7664 arg02 = fold_build1 (code, type, arg02);
7665 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
7666 arg01, arg02);
7667
7668 /* If this was a conversion, and all we did was to move into
7669 inside the COND_EXPR, bring it back out. But leave it if
7670 it is a conversion from integer to integer and the
7671 result precision is no wider than a word since such a
7672 conversion is cheap and may be optimized away by combine,
7673 while it couldn't if it were outside the COND_EXPR. Then return
7674 so we don't get into an infinite recursion loop taking the
7675 conversion out and then back in. */
7676
7677 if ((code == NOP_EXPR || code == CONVERT_EXPR
7678 || code == NON_LVALUE_EXPR)
7679 && TREE_CODE (tem) == COND_EXPR
7680 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7681 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7682 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7683 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7684 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7685 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7686 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7687 && (INTEGRAL_TYPE_P
7688 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7689 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7690 || flag_syntax_only))
7691 tem = build1 (code, type,
7692 build3 (COND_EXPR,
7693 TREE_TYPE (TREE_OPERAND
7694 (TREE_OPERAND (tem, 1), 0)),
7695 TREE_OPERAND (tem, 0),
7696 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7697 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
7698 return tem;
7699 }
7700 else if (COMPARISON_CLASS_P (arg0))
7701 {
7702 if (TREE_CODE (type) == BOOLEAN_TYPE)
7703 {
7704 arg0 = copy_node (arg0);
7705 TREE_TYPE (arg0) = type;
7706 return arg0;
7707 }
7708 else if (TREE_CODE (type) != INTEGER_TYPE)
7709 return fold_build3 (COND_EXPR, type, arg0,
7710 fold_build1 (code, type,
7711 integer_one_node),
7712 fold_build1 (code, type,
7713 integer_zero_node));
7714 }
7715 }
7716
7717 switch (code)
7718 {
7719 case PAREN_EXPR:
7720 /* Re-association barriers around constants and other re-association
7721 barriers can be removed. */
7722 if (CONSTANT_CLASS_P (op0)
7723 || TREE_CODE (op0) == PAREN_EXPR)
7724 return fold_convert (type, op0);
7725 return NULL_TREE;
7726
7727 CASE_CONVERT:
7728 case FLOAT_EXPR:
7729 case FIX_TRUNC_EXPR:
7730 if (TREE_TYPE (op0) == type)
7731 return op0;
7732
7733 /* If we have (type) (a CMP b) and type is an integral type, return
7734 new expression involving the new type. */
7735 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
7736 return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
7737 TREE_OPERAND (op0, 1));
7738
7739 /* Handle cases of two conversions in a row. */
7740 if (CONVERT_EXPR_P (op0))
7741 {
7742 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7743 tree inter_type = TREE_TYPE (op0);
7744 int inside_int = INTEGRAL_TYPE_P (inside_type);
7745 int inside_ptr = POINTER_TYPE_P (inside_type);
7746 int inside_float = FLOAT_TYPE_P (inside_type);
7747 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7748 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7749 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7750 int inter_int = INTEGRAL_TYPE_P (inter_type);
7751 int inter_ptr = POINTER_TYPE_P (inter_type);
7752 int inter_float = FLOAT_TYPE_P (inter_type);
7753 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7754 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7755 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7756 int final_int = INTEGRAL_TYPE_P (type);
7757 int final_ptr = POINTER_TYPE_P (type);
7758 int final_float = FLOAT_TYPE_P (type);
7759 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7760 unsigned int final_prec = TYPE_PRECISION (type);
7761 int final_unsignedp = TYPE_UNSIGNED (type);
7762
7763 /* In addition to the cases of two conversions in a row
7764 handled below, if we are converting something to its own
7765 type via an object of identical or wider precision, neither
7766 conversion is needed. */
7767 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7768 && (((inter_int || inter_ptr) && final_int)
7769 || (inter_float && final_float))
7770 && inter_prec >= final_prec)
7771 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7772
7773 /* Likewise, if the intermediate and final types are either both
7774 float or both integer, we don't need the middle conversion if
7775 it is wider than the final type and doesn't change the signedness
7776 (for integers). Avoid this if the final type is a pointer
7777 since then we sometimes need the inner conversion. Likewise if
7778 the outer has a precision not equal to the size of its mode. */
7779 if (((inter_int && inside_int)
7780 || (inter_float && inside_float)
7781 || (inter_vec && inside_vec))
7782 && inter_prec >= inside_prec
7783 && (inter_float || inter_vec
7784 || inter_unsignedp == inside_unsignedp)
7785 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7786 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7787 && ! final_ptr
7788 && (! final_vec || inter_prec == inside_prec))
7789 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7790
7791 /* If we have a sign-extension of a zero-extended value, we can
7792 replace that by a single zero-extension. */
7793 if (inside_int && inter_int && final_int
7794 && inside_prec < inter_prec && inter_prec < final_prec
7795 && inside_unsignedp && !inter_unsignedp)
7796 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7797
7798 /* Two conversions in a row are not needed unless:
7799 - some conversion is floating-point (overstrict for now), or
7800 - some conversion is a vector (overstrict for now), or
7801 - the intermediate type is narrower than both initial and
7802 final, or
7803 - the intermediate type and innermost type differ in signedness,
7804 and the outermost type is wider than the intermediate, or
7805 - the initial type is a pointer type and the precisions of the
7806 intermediate and final types differ, or
7807 - the final type is a pointer type and the precisions of the
7808 initial and intermediate types differ. */
7809 if (! inside_float && ! inter_float && ! final_float
7810 && ! inside_vec && ! inter_vec && ! final_vec
7811 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7812 && ! (inside_int && inter_int
7813 && inter_unsignedp != inside_unsignedp
7814 && inter_prec < final_prec)
7815 && ((inter_unsignedp && inter_prec > inside_prec)
7816 == (final_unsignedp && final_prec > inter_prec))
7817 && ! (inside_ptr && inter_prec != final_prec)
7818 && ! (final_ptr && inside_prec != inter_prec)
7819 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7820 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
7821 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7822 }
7823
7824 /* Handle (T *)&A.B.C for A being of type T and B and C
7825 living at offset zero. This occurs frequently in
7826 C++ upcasting and then accessing the base. */
7827 if (TREE_CODE (op0) == ADDR_EXPR
7828 && POINTER_TYPE_P (type)
7829 && handled_component_p (TREE_OPERAND (op0, 0)))
7830 {
7831 HOST_WIDE_INT bitsize, bitpos;
7832 tree offset;
7833 enum machine_mode mode;
7834 int unsignedp, volatilep;
7835 tree base = TREE_OPERAND (op0, 0);
7836 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7837 &mode, &unsignedp, &volatilep, false);
7838 /* If the reference was to a (constant) zero offset, we can use
7839 the address of the base if it has the same base type
7840 as the result type. */
7841 if (! offset && bitpos == 0
7842 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
7843 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7844 return fold_convert (type, fold_addr_expr (base));
7845 }
7846
7847 if (TREE_CODE (op0) == MODIFY_EXPR
7848 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7849 /* Detect assigning a bitfield. */
7850 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7851 && DECL_BIT_FIELD
7852 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7853 {
7854 /* Don't leave an assignment inside a conversion
7855 unless assigning a bitfield. */
7856 tem = fold_build1 (code, type, TREE_OPERAND (op0, 1));
7857 /* First do the assignment, then return converted constant. */
7858 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7859 TREE_NO_WARNING (tem) = 1;
7860 TREE_USED (tem) = 1;
7861 return tem;
7862 }
7863
7864 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7865 constants (if x has signed type, the sign bit cannot be set
7866 in c). This folds extension into the BIT_AND_EXPR.
7867 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7868 very likely don't have maximal range for their precision and this
7869 transformation effectively doesn't preserve non-maximal ranges. */
7870 if (TREE_CODE (type) == INTEGER_TYPE
7871 && TREE_CODE (op0) == BIT_AND_EXPR
7872 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7873 {
7874 tree and = op0;
7875 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
7876 int change = 0;
7877
7878 if (TYPE_UNSIGNED (TREE_TYPE (and))
7879 || (TYPE_PRECISION (type)
7880 <= TYPE_PRECISION (TREE_TYPE (and))))
7881 change = 1;
7882 else if (TYPE_PRECISION (TREE_TYPE (and1))
7883 <= HOST_BITS_PER_WIDE_INT
7884 && host_integerp (and1, 1))
7885 {
7886 unsigned HOST_WIDE_INT cst;
7887
7888 cst = tree_low_cst (and1, 1);
7889 cst &= (HOST_WIDE_INT) -1
7890 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7891 change = (cst == 0);
7892 #ifdef LOAD_EXTEND_OP
7893 if (change
7894 && !flag_syntax_only
7895 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7896 == ZERO_EXTEND))
7897 {
7898 tree uns = unsigned_type_for (TREE_TYPE (and0));
7899 and0 = fold_convert (uns, and0);
7900 and1 = fold_convert (uns, and1);
7901 }
7902 #endif
7903 }
7904 if (change)
7905 {
7906 tem = force_fit_type_double (type, TREE_INT_CST_LOW (and1),
7907 TREE_INT_CST_HIGH (and1), 0,
7908 TREE_OVERFLOW (and1));
7909 return fold_build2 (BIT_AND_EXPR, type,
7910 fold_convert (type, and0), tem);
7911 }
7912 }
7913
7914 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7915 when one of the new casts will fold away. Conservatively we assume
7916 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7917 if (POINTER_TYPE_P (type)
7918 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7919 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7920 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7921 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
7922 {
7923 tree arg00 = TREE_OPERAND (arg0, 0);
7924 tree arg01 = TREE_OPERAND (arg0, 1);
7925
7926 return fold_build2 (TREE_CODE (arg0), type, fold_convert (type, arg00),
7927 fold_convert (sizetype, arg01));
7928 }
7929
7930 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7931 of the same precision, and X is an integer type not narrower than
7932 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7933 if (INTEGRAL_TYPE_P (type)
7934 && TREE_CODE (op0) == BIT_NOT_EXPR
7935 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7936 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7937 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7938 {
7939 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7940 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7941 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7942 return fold_build1 (BIT_NOT_EXPR, type, fold_convert (type, tem));
7943 }
7944
7945 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7946 type of X and Y (integer types only). */
7947 if (INTEGRAL_TYPE_P (type)
7948 && TREE_CODE (op0) == MULT_EXPR
7949 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7950 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7951 {
7952 /* Be careful not to introduce new overflows. */
7953 tree mult_type;
7954 if (TYPE_OVERFLOW_WRAPS (type))
7955 mult_type = type;
7956 else
7957 mult_type = unsigned_type_for (type);
7958
7959 tem = fold_build2 (MULT_EXPR, mult_type,
7960 fold_convert (mult_type, TREE_OPERAND (op0, 0)),
7961 fold_convert (mult_type, TREE_OPERAND (op0, 1)));
7962 return fold_convert (type, tem);
7963 }
7964
7965 tem = fold_convert_const (code, type, op0);
7966 return tem ? tem : NULL_TREE;
7967
7968 case FIXED_CONVERT_EXPR:
7969 tem = fold_convert_const (code, type, arg0);
7970 return tem ? tem : NULL_TREE;
7971
7972 case VIEW_CONVERT_EXPR:
7973 if (TREE_TYPE (op0) == type)
7974 return op0;
7975 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
7976 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
7977
7978 /* For integral conversions with the same precision or pointer
7979 conversions use a NOP_EXPR instead. */
7980 if ((INTEGRAL_TYPE_P (type)
7981 || POINTER_TYPE_P (type))
7982 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
7983 || POINTER_TYPE_P (TREE_TYPE (op0)))
7984 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0))
7985 /* Do not muck with VIEW_CONVERT_EXPRs that convert from
7986 a sub-type to its base type as generated by the Ada FE. */
7987 && !(INTEGRAL_TYPE_P (TREE_TYPE (op0))
7988 && TREE_TYPE (TREE_TYPE (op0))))
7989 return fold_convert (type, op0);
7990
7991 /* Strip inner integral conversions that do not change the precision. */
7992 if (CONVERT_EXPR_P (op0)
7993 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
7994 || POINTER_TYPE_P (TREE_TYPE (op0)))
7995 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
7996 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
7997 && (TYPE_PRECISION (TREE_TYPE (op0))
7998 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
7999 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
8000
8001 return fold_view_convert_expr (type, op0);
8002
8003 case NEGATE_EXPR:
8004 tem = fold_negate_expr (arg0);
8005 if (tem)
8006 return fold_convert (type, tem);
8007 return NULL_TREE;
8008
8009 case ABS_EXPR:
8010 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8011 return fold_abs_const (arg0, type);
8012 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8013 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8014 /* Convert fabs((double)float) into (double)fabsf(float). */
8015 else if (TREE_CODE (arg0) == NOP_EXPR
8016 && TREE_CODE (type) == REAL_TYPE)
8017 {
8018 tree targ0 = strip_float_extensions (arg0);
8019 if (targ0 != arg0)
8020 return fold_convert (type, fold_build1 (ABS_EXPR,
8021 TREE_TYPE (targ0),
8022 targ0));
8023 }
8024 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8025 else if (TREE_CODE (arg0) == ABS_EXPR)
8026 return arg0;
8027 else if (tree_expr_nonnegative_p (arg0))
8028 return arg0;
8029
8030 /* Strip sign ops from argument. */
8031 if (TREE_CODE (type) == REAL_TYPE)
8032 {
8033 tem = fold_strip_sign_ops (arg0);
8034 if (tem)
8035 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
8036 }
8037 return NULL_TREE;
8038
8039 case CONJ_EXPR:
8040 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8041 return fold_convert (type, arg0);
8042 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8043 {
8044 tree itype = TREE_TYPE (type);
8045 tree rpart = fold_convert (itype, TREE_OPERAND (arg0, 0));
8046 tree ipart = fold_convert (itype, TREE_OPERAND (arg0, 1));
8047 return fold_build2 (COMPLEX_EXPR, type, rpart, negate_expr (ipart));
8048 }
8049 if (TREE_CODE (arg0) == COMPLEX_CST)
8050 {
8051 tree itype = TREE_TYPE (type);
8052 tree rpart = fold_convert (itype, TREE_REALPART (arg0));
8053 tree ipart = fold_convert (itype, TREE_IMAGPART (arg0));
8054 return build_complex (type, rpart, negate_expr (ipart));
8055 }
8056 if (TREE_CODE (arg0) == CONJ_EXPR)
8057 return fold_convert (type, TREE_OPERAND (arg0, 0));
8058 return NULL_TREE;
8059
8060 case BIT_NOT_EXPR:
8061 if (TREE_CODE (arg0) == INTEGER_CST)
8062 return fold_not_const (arg0, type);
8063 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8064 return fold_convert (type, TREE_OPERAND (arg0, 0));
8065 /* Convert ~ (-A) to A - 1. */
8066 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8067 return fold_build2 (MINUS_EXPR, type,
8068 fold_convert (type, TREE_OPERAND (arg0, 0)),
8069 build_int_cst (type, 1));
8070 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8071 else if (INTEGRAL_TYPE_P (type)
8072 && ((TREE_CODE (arg0) == MINUS_EXPR
8073 && integer_onep (TREE_OPERAND (arg0, 1)))
8074 || (TREE_CODE (arg0) == PLUS_EXPR
8075 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8076 return fold_build1 (NEGATE_EXPR, type,
8077 fold_convert (type, TREE_OPERAND (arg0, 0)));
8078 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8079 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8080 && (tem = fold_unary (BIT_NOT_EXPR, type,
8081 fold_convert (type,
8082 TREE_OPERAND (arg0, 0)))))
8083 return fold_build2 (BIT_XOR_EXPR, type, tem,
8084 fold_convert (type, TREE_OPERAND (arg0, 1)));
8085 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8086 && (tem = fold_unary (BIT_NOT_EXPR, type,
8087 fold_convert (type,
8088 TREE_OPERAND (arg0, 1)))))
8089 return fold_build2 (BIT_XOR_EXPR, type,
8090 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
8091 /* Perform BIT_NOT_EXPR on each element individually. */
8092 else if (TREE_CODE (arg0) == VECTOR_CST)
8093 {
8094 tree elements = TREE_VECTOR_CST_ELTS (arg0), elem, list = NULL_TREE;
8095 int count = TYPE_VECTOR_SUBPARTS (type), i;
8096
8097 for (i = 0; i < count; i++)
8098 {
8099 if (elements)
8100 {
8101 elem = TREE_VALUE (elements);
8102 elem = fold_unary (BIT_NOT_EXPR, TREE_TYPE (type), elem);
8103 if (elem == NULL_TREE)
8104 break;
8105 elements = TREE_CHAIN (elements);
8106 }
8107 else
8108 elem = build_int_cst (TREE_TYPE (type), -1);
8109 list = tree_cons (NULL_TREE, elem, list);
8110 }
8111 if (i == count)
8112 return build_vector (type, nreverse (list));
8113 }
8114
8115 return NULL_TREE;
8116
8117 case TRUTH_NOT_EXPR:
8118 /* The argument to invert_truthvalue must have Boolean type. */
8119 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8120 arg0 = fold_convert (boolean_type_node, arg0);
8121
8122 /* Note that the operand of this must be an int
8123 and its values must be 0 or 1.
8124 ("true" is a fixed value perhaps depending on the language,
8125 but we don't handle values other than 1 correctly yet.) */
8126 tem = fold_truth_not_expr (arg0);
8127 if (!tem)
8128 return NULL_TREE;
8129 return fold_convert (type, tem);
8130
8131 case REALPART_EXPR:
8132 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8133 return fold_convert (type, arg0);
8134 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8135 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
8136 TREE_OPERAND (arg0, 1));
8137 if (TREE_CODE (arg0) == COMPLEX_CST)
8138 return fold_convert (type, TREE_REALPART (arg0));
8139 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8140 {
8141 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8142 tem = fold_build2 (TREE_CODE (arg0), itype,
8143 fold_build1 (REALPART_EXPR, itype,
8144 TREE_OPERAND (arg0, 0)),
8145 fold_build1 (REALPART_EXPR, itype,
8146 TREE_OPERAND (arg0, 1)));
8147 return fold_convert (type, tem);
8148 }
8149 if (TREE_CODE (arg0) == CONJ_EXPR)
8150 {
8151 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8152 tem = fold_build1 (REALPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8153 return fold_convert (type, tem);
8154 }
8155 if (TREE_CODE (arg0) == CALL_EXPR)
8156 {
8157 tree fn = get_callee_fndecl (arg0);
8158 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8159 switch (DECL_FUNCTION_CODE (fn))
8160 {
8161 CASE_FLT_FN (BUILT_IN_CEXPI):
8162 fn = mathfn_built_in (type, BUILT_IN_COS);
8163 if (fn)
8164 return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0));
8165 break;
8166
8167 default:
8168 break;
8169 }
8170 }
8171 return NULL_TREE;
8172
8173 case IMAGPART_EXPR:
8174 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8175 return fold_convert (type, integer_zero_node);
8176 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8177 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
8178 TREE_OPERAND (arg0, 0));
8179 if (TREE_CODE (arg0) == COMPLEX_CST)
8180 return fold_convert (type, TREE_IMAGPART (arg0));
8181 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8182 {
8183 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8184 tem = fold_build2 (TREE_CODE (arg0), itype,
8185 fold_build1 (IMAGPART_EXPR, itype,
8186 TREE_OPERAND (arg0, 0)),
8187 fold_build1 (IMAGPART_EXPR, itype,
8188 TREE_OPERAND (arg0, 1)));
8189 return fold_convert (type, tem);
8190 }
8191 if (TREE_CODE (arg0) == CONJ_EXPR)
8192 {
8193 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8194 tem = fold_build1 (IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8195 return fold_convert (type, negate_expr (tem));
8196 }
8197 if (TREE_CODE (arg0) == CALL_EXPR)
8198 {
8199 tree fn = get_callee_fndecl (arg0);
8200 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8201 switch (DECL_FUNCTION_CODE (fn))
8202 {
8203 CASE_FLT_FN (BUILT_IN_CEXPI):
8204 fn = mathfn_built_in (type, BUILT_IN_SIN);
8205 if (fn)
8206 return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0));
8207 break;
8208
8209 default:
8210 break;
8211 }
8212 }
8213 return NULL_TREE;
8214
8215 default:
8216 return NULL_TREE;
8217 } /* switch (code) */
8218 }
8219
8220 /* Fold a binary expression of code CODE and type TYPE with operands
8221 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8222 Return the folded expression if folding is successful. Otherwise,
8223 return NULL_TREE. */
8224
8225 static tree
8226 fold_minmax (enum tree_code code, tree type, tree op0, tree op1)
8227 {
8228 enum tree_code compl_code;
8229
8230 if (code == MIN_EXPR)
8231 compl_code = MAX_EXPR;
8232 else if (code == MAX_EXPR)
8233 compl_code = MIN_EXPR;
8234 else
8235 gcc_unreachable ();
8236
8237 /* MIN (MAX (a, b), b) == b. */
8238 if (TREE_CODE (op0) == compl_code
8239 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8240 return omit_one_operand (type, op1, TREE_OPERAND (op0, 0));
8241
8242 /* MIN (MAX (b, a), b) == b. */
8243 if (TREE_CODE (op0) == compl_code
8244 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8245 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8246 return omit_one_operand (type, op1, TREE_OPERAND (op0, 1));
8247
8248 /* MIN (a, MAX (a, b)) == a. */
8249 if (TREE_CODE (op1) == compl_code
8250 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8251 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8252 return omit_one_operand (type, op0, TREE_OPERAND (op1, 1));
8253
8254 /* MIN (a, MAX (b, a)) == a. */
8255 if (TREE_CODE (op1) == compl_code
8256 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8257 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8258 return omit_one_operand (type, op0, TREE_OPERAND (op1, 0));
8259
8260 return NULL_TREE;
8261 }
8262
8263 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8264 by changing CODE to reduce the magnitude of constants involved in
8265 ARG0 of the comparison.
8266 Returns a canonicalized comparison tree if a simplification was
8267 possible, otherwise returns NULL_TREE.
8268 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8269 valid if signed overflow is undefined. */
8270
8271 static tree
8272 maybe_canonicalize_comparison_1 (enum tree_code code, tree type,
8273 tree arg0, tree arg1,
8274 bool *strict_overflow_p)
8275 {
8276 enum tree_code code0 = TREE_CODE (arg0);
8277 tree t, cst0 = NULL_TREE;
8278 int sgn0;
8279 bool swap = false;
8280
8281 /* Match A +- CST code arg1 and CST code arg1. */
8282 if (!(((code0 == MINUS_EXPR
8283 || code0 == PLUS_EXPR)
8284 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8285 || code0 == INTEGER_CST))
8286 return NULL_TREE;
8287
8288 /* Identify the constant in arg0 and its sign. */
8289 if (code0 == INTEGER_CST)
8290 cst0 = arg0;
8291 else
8292 cst0 = TREE_OPERAND (arg0, 1);
8293 sgn0 = tree_int_cst_sgn (cst0);
8294
8295 /* Overflowed constants and zero will cause problems. */
8296 if (integer_zerop (cst0)
8297 || TREE_OVERFLOW (cst0))
8298 return NULL_TREE;
8299
8300 /* See if we can reduce the magnitude of the constant in
8301 arg0 by changing the comparison code. */
8302 if (code0 == INTEGER_CST)
8303 {
8304 /* CST <= arg1 -> CST-1 < arg1. */
8305 if (code == LE_EXPR && sgn0 == 1)
8306 code = LT_EXPR;
8307 /* -CST < arg1 -> -CST-1 <= arg1. */
8308 else if (code == LT_EXPR && sgn0 == -1)
8309 code = LE_EXPR;
8310 /* CST > arg1 -> CST-1 >= arg1. */
8311 else if (code == GT_EXPR && sgn0 == 1)
8312 code = GE_EXPR;
8313 /* -CST >= arg1 -> -CST-1 > arg1. */
8314 else if (code == GE_EXPR && sgn0 == -1)
8315 code = GT_EXPR;
8316 else
8317 return NULL_TREE;
8318 /* arg1 code' CST' might be more canonical. */
8319 swap = true;
8320 }
8321 else
8322 {
8323 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8324 if (code == LT_EXPR
8325 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8326 code = LE_EXPR;
8327 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8328 else if (code == GT_EXPR
8329 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8330 code = GE_EXPR;
8331 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8332 else if (code == LE_EXPR
8333 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8334 code = LT_EXPR;
8335 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8336 else if (code == GE_EXPR
8337 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8338 code = GT_EXPR;
8339 else
8340 return NULL_TREE;
8341 *strict_overflow_p = true;
8342 }
8343
8344 /* Now build the constant reduced in magnitude. */
8345 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8346 cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
8347 if (code0 != INTEGER_CST)
8348 t = fold_build2 (code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8349
8350 /* If swapping might yield to a more canonical form, do so. */
8351 if (swap)
8352 return fold_build2 (swap_tree_comparison (code), type, arg1, t);
8353 else
8354 return fold_build2 (code, type, t, arg1);
8355 }
8356
8357 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8358 overflow further. Try to decrease the magnitude of constants involved
8359 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8360 and put sole constants at the second argument position.
8361 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8362
8363 static tree
8364 maybe_canonicalize_comparison (enum tree_code code, tree type,
8365 tree arg0, tree arg1)
8366 {
8367 tree t;
8368 bool strict_overflow_p;
8369 const char * const warnmsg = G_("assuming signed overflow does not occur "
8370 "when reducing constant in comparison");
8371
8372 /* In principle pointers also have undefined overflow behavior,
8373 but that causes problems elsewhere. */
8374 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8375 || POINTER_TYPE_P (TREE_TYPE (arg0)))
8376 return NULL_TREE;
8377
8378 /* Try canonicalization by simplifying arg0. */
8379 strict_overflow_p = false;
8380 t = maybe_canonicalize_comparison_1 (code, type, arg0, arg1,
8381 &strict_overflow_p);
8382 if (t)
8383 {
8384 if (strict_overflow_p)
8385 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8386 return t;
8387 }
8388
8389 /* Try canonicalization by simplifying arg1 using the swapped
8390 comparison. */
8391 code = swap_tree_comparison (code);
8392 strict_overflow_p = false;
8393 t = maybe_canonicalize_comparison_1 (code, type, arg1, arg0,
8394 &strict_overflow_p);
8395 if (t && strict_overflow_p)
8396 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8397 return t;
8398 }
8399
8400 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8401 space. This is used to avoid issuing overflow warnings for
8402 expressions like &p->x which can not wrap. */
8403
8404 static bool
8405 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8406 {
8407 unsigned HOST_WIDE_INT offset_low, total_low;
8408 HOST_WIDE_INT size, offset_high, total_high;
8409
8410 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8411 return true;
8412
8413 if (bitpos < 0)
8414 return true;
8415
8416 if (offset == NULL_TREE)
8417 {
8418 offset_low = 0;
8419 offset_high = 0;
8420 }
8421 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8422 return true;
8423 else
8424 {
8425 offset_low = TREE_INT_CST_LOW (offset);
8426 offset_high = TREE_INT_CST_HIGH (offset);
8427 }
8428
8429 if (add_double_with_sign (offset_low, offset_high,
8430 bitpos / BITS_PER_UNIT, 0,
8431 &total_low, &total_high,
8432 true))
8433 return true;
8434
8435 if (total_high != 0)
8436 return true;
8437
8438 size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8439 if (size <= 0)
8440 return true;
8441
8442 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8443 array. */
8444 if (TREE_CODE (base) == ADDR_EXPR)
8445 {
8446 HOST_WIDE_INT base_size;
8447
8448 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8449 if (base_size > 0 && size < base_size)
8450 size = base_size;
8451 }
8452
8453 return total_low > (unsigned HOST_WIDE_INT) size;
8454 }
8455
8456 /* Subroutine of fold_binary. This routine performs all of the
8457 transformations that are common to the equality/inequality
8458 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8459 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8460 fold_binary should call fold_binary. Fold a comparison with
8461 tree code CODE and type TYPE with operands OP0 and OP1. Return
8462 the folded comparison or NULL_TREE. */
8463
8464 static tree
8465 fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
8466 {
8467 tree arg0, arg1, tem;
8468
8469 arg0 = op0;
8470 arg1 = op1;
8471
8472 STRIP_SIGN_NOPS (arg0);
8473 STRIP_SIGN_NOPS (arg1);
8474
8475 tem = fold_relational_const (code, type, arg0, arg1);
8476 if (tem != NULL_TREE)
8477 return tem;
8478
8479 /* If one arg is a real or integer constant, put it last. */
8480 if (tree_swap_operands_p (arg0, arg1, true))
8481 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
8482
8483 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8484 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8485 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8486 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8487 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8488 && (TREE_CODE (arg1) == INTEGER_CST
8489 && !TREE_OVERFLOW (arg1)))
8490 {
8491 tree const1 = TREE_OPERAND (arg0, 1);
8492 tree const2 = arg1;
8493 tree variable = TREE_OPERAND (arg0, 0);
8494 tree lhs;
8495 int lhs_add;
8496 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8497
8498 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
8499 TREE_TYPE (arg1), const2, const1);
8500
8501 /* If the constant operation overflowed this can be
8502 simplified as a comparison against INT_MAX/INT_MIN. */
8503 if (TREE_CODE (lhs) == INTEGER_CST
8504 && TREE_OVERFLOW (lhs))
8505 {
8506 int const1_sgn = tree_int_cst_sgn (const1);
8507 enum tree_code code2 = code;
8508
8509 /* Get the sign of the constant on the lhs if the
8510 operation were VARIABLE + CONST1. */
8511 if (TREE_CODE (arg0) == MINUS_EXPR)
8512 const1_sgn = -const1_sgn;
8513
8514 /* The sign of the constant determines if we overflowed
8515 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8516 Canonicalize to the INT_MIN overflow by swapping the comparison
8517 if necessary. */
8518 if (const1_sgn == -1)
8519 code2 = swap_tree_comparison (code);
8520
8521 /* We now can look at the canonicalized case
8522 VARIABLE + 1 CODE2 INT_MIN
8523 and decide on the result. */
8524 if (code2 == LT_EXPR
8525 || code2 == LE_EXPR
8526 || code2 == EQ_EXPR)
8527 return omit_one_operand (type, boolean_false_node, variable);
8528 else if (code2 == NE_EXPR
8529 || code2 == GE_EXPR
8530 || code2 == GT_EXPR)
8531 return omit_one_operand (type, boolean_true_node, variable);
8532 }
8533
8534 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8535 && (TREE_CODE (lhs) != INTEGER_CST
8536 || !TREE_OVERFLOW (lhs)))
8537 {
8538 fold_overflow_warning (("assuming signed overflow does not occur "
8539 "when changing X +- C1 cmp C2 to "
8540 "X cmp C1 +- C2"),
8541 WARN_STRICT_OVERFLOW_COMPARISON);
8542 return fold_build2 (code, type, variable, lhs);
8543 }
8544 }
8545
8546 /* For comparisons of pointers we can decompose it to a compile time
8547 comparison of the base objects and the offsets into the object.
8548 This requires at least one operand being an ADDR_EXPR or a
8549 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8550 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8551 && (TREE_CODE (arg0) == ADDR_EXPR
8552 || TREE_CODE (arg1) == ADDR_EXPR
8553 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8554 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8555 {
8556 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8557 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8558 enum machine_mode mode;
8559 int volatilep, unsignedp;
8560 bool indirect_base0 = false, indirect_base1 = false;
8561
8562 /* Get base and offset for the access. Strip ADDR_EXPR for
8563 get_inner_reference, but put it back by stripping INDIRECT_REF
8564 off the base object if possible. indirect_baseN will be true
8565 if baseN is not an address but refers to the object itself. */
8566 base0 = arg0;
8567 if (TREE_CODE (arg0) == ADDR_EXPR)
8568 {
8569 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8570 &bitsize, &bitpos0, &offset0, &mode,
8571 &unsignedp, &volatilep, false);
8572 if (TREE_CODE (base0) == INDIRECT_REF)
8573 base0 = TREE_OPERAND (base0, 0);
8574 else
8575 indirect_base0 = true;
8576 }
8577 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8578 {
8579 base0 = TREE_OPERAND (arg0, 0);
8580 offset0 = TREE_OPERAND (arg0, 1);
8581 }
8582
8583 base1 = arg1;
8584 if (TREE_CODE (arg1) == ADDR_EXPR)
8585 {
8586 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8587 &bitsize, &bitpos1, &offset1, &mode,
8588 &unsignedp, &volatilep, false);
8589 if (TREE_CODE (base1) == INDIRECT_REF)
8590 base1 = TREE_OPERAND (base1, 0);
8591 else
8592 indirect_base1 = true;
8593 }
8594 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8595 {
8596 base1 = TREE_OPERAND (arg1, 0);
8597 offset1 = TREE_OPERAND (arg1, 1);
8598 }
8599
8600 /* If we have equivalent bases we might be able to simplify. */
8601 if (indirect_base0 == indirect_base1
8602 && operand_equal_p (base0, base1, 0))
8603 {
8604 /* We can fold this expression to a constant if the non-constant
8605 offset parts are equal. */
8606 if ((offset0 == offset1
8607 || (offset0 && offset1
8608 && operand_equal_p (offset0, offset1, 0)))
8609 && (code == EQ_EXPR
8610 || code == NE_EXPR
8611 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8612
8613 {
8614 if (code != EQ_EXPR
8615 && code != NE_EXPR
8616 && bitpos0 != bitpos1
8617 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8618 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8619 fold_overflow_warning (("assuming pointer wraparound does not "
8620 "occur when comparing P +- C1 with "
8621 "P +- C2"),
8622 WARN_STRICT_OVERFLOW_CONDITIONAL);
8623
8624 switch (code)
8625 {
8626 case EQ_EXPR:
8627 return build_int_cst (boolean_type_node, bitpos0 == bitpos1);
8628 case NE_EXPR:
8629 return build_int_cst (boolean_type_node, bitpos0 != bitpos1);
8630 case LT_EXPR:
8631 return build_int_cst (boolean_type_node, bitpos0 < bitpos1);
8632 case LE_EXPR:
8633 return build_int_cst (boolean_type_node, bitpos0 <= bitpos1);
8634 case GE_EXPR:
8635 return build_int_cst (boolean_type_node, bitpos0 >= bitpos1);
8636 case GT_EXPR:
8637 return build_int_cst (boolean_type_node, bitpos0 > bitpos1);
8638 default:;
8639 }
8640 }
8641 /* We can simplify the comparison to a comparison of the variable
8642 offset parts if the constant offset parts are equal.
8643 Be careful to use signed size type here because otherwise we
8644 mess with array offsets in the wrong way. This is possible
8645 because pointer arithmetic is restricted to retain within an
8646 object and overflow on pointer differences is undefined as of
8647 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8648 else if (bitpos0 == bitpos1
8649 && ((code == EQ_EXPR || code == NE_EXPR)
8650 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8651 {
8652 tree signed_size_type_node;
8653 signed_size_type_node = signed_type_for (size_type_node);
8654
8655 /* By converting to signed size type we cover middle-end pointer
8656 arithmetic which operates on unsigned pointer types of size
8657 type size and ARRAY_REF offsets which are properly sign or
8658 zero extended from their type in case it is narrower than
8659 size type. */
8660 if (offset0 == NULL_TREE)
8661 offset0 = build_int_cst (signed_size_type_node, 0);
8662 else
8663 offset0 = fold_convert (signed_size_type_node, offset0);
8664 if (offset1 == NULL_TREE)
8665 offset1 = build_int_cst (signed_size_type_node, 0);
8666 else
8667 offset1 = fold_convert (signed_size_type_node, offset1);
8668
8669 if (code != EQ_EXPR
8670 && code != NE_EXPR
8671 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8672 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8673 fold_overflow_warning (("assuming pointer wraparound does not "
8674 "occur when comparing P +- C1 with "
8675 "P +- C2"),
8676 WARN_STRICT_OVERFLOW_COMPARISON);
8677
8678 return fold_build2 (code, type, offset0, offset1);
8679 }
8680 }
8681 /* For non-equal bases we can simplify if they are addresses
8682 of local binding decls or constants. */
8683 else if (indirect_base0 && indirect_base1
8684 /* We know that !operand_equal_p (base0, base1, 0)
8685 because the if condition was false. But make
8686 sure two decls are not the same. */
8687 && base0 != base1
8688 && TREE_CODE (arg0) == ADDR_EXPR
8689 && TREE_CODE (arg1) == ADDR_EXPR
8690 && (((TREE_CODE (base0) == VAR_DECL
8691 || TREE_CODE (base0) == PARM_DECL)
8692 && (targetm.binds_local_p (base0)
8693 || CONSTANT_CLASS_P (base1)))
8694 || CONSTANT_CLASS_P (base0))
8695 && (((TREE_CODE (base1) == VAR_DECL
8696 || TREE_CODE (base1) == PARM_DECL)
8697 && (targetm.binds_local_p (base1)
8698 || CONSTANT_CLASS_P (base0)))
8699 || CONSTANT_CLASS_P (base1)))
8700 {
8701 if (code == EQ_EXPR)
8702 return omit_two_operands (type, boolean_false_node, arg0, arg1);
8703 else if (code == NE_EXPR)
8704 return omit_two_operands (type, boolean_true_node, arg0, arg1);
8705 }
8706 /* For equal offsets we can simplify to a comparison of the
8707 base addresses. */
8708 else if (bitpos0 == bitpos1
8709 && (indirect_base0
8710 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8711 && (indirect_base1
8712 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8713 && ((offset0 == offset1)
8714 || (offset0 && offset1
8715 && operand_equal_p (offset0, offset1, 0))))
8716 {
8717 if (indirect_base0)
8718 base0 = fold_addr_expr (base0);
8719 if (indirect_base1)
8720 base1 = fold_addr_expr (base1);
8721 return fold_build2 (code, type, base0, base1);
8722 }
8723 }
8724
8725 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8726 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8727 the resulting offset is smaller in absolute value than the
8728 original one. */
8729 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8730 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8731 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8732 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8733 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8734 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8735 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8736 {
8737 tree const1 = TREE_OPERAND (arg0, 1);
8738 tree const2 = TREE_OPERAND (arg1, 1);
8739 tree variable1 = TREE_OPERAND (arg0, 0);
8740 tree variable2 = TREE_OPERAND (arg1, 0);
8741 tree cst;
8742 const char * const warnmsg = G_("assuming signed overflow does not "
8743 "occur when combining constants around "
8744 "a comparison");
8745
8746 /* Put the constant on the side where it doesn't overflow and is
8747 of lower absolute value than before. */
8748 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8749 ? MINUS_EXPR : PLUS_EXPR,
8750 const2, const1, 0);
8751 if (!TREE_OVERFLOW (cst)
8752 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
8753 {
8754 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8755 return fold_build2 (code, type,
8756 variable1,
8757 fold_build2 (TREE_CODE (arg1), TREE_TYPE (arg1),
8758 variable2, cst));
8759 }
8760
8761 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8762 ? MINUS_EXPR : PLUS_EXPR,
8763 const1, const2, 0);
8764 if (!TREE_OVERFLOW (cst)
8765 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
8766 {
8767 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8768 return fold_build2 (code, type,
8769 fold_build2 (TREE_CODE (arg0), TREE_TYPE (arg0),
8770 variable1, cst),
8771 variable2);
8772 }
8773 }
8774
8775 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
8776 signed arithmetic case. That form is created by the compiler
8777 often enough for folding it to be of value. One example is in
8778 computing loop trip counts after Operator Strength Reduction. */
8779 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8780 && TREE_CODE (arg0) == MULT_EXPR
8781 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8782 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8783 && integer_zerop (arg1))
8784 {
8785 tree const1 = TREE_OPERAND (arg0, 1);
8786 tree const2 = arg1; /* zero */
8787 tree variable1 = TREE_OPERAND (arg0, 0);
8788 enum tree_code cmp_code = code;
8789
8790 gcc_assert (!integer_zerop (const1));
8791
8792 fold_overflow_warning (("assuming signed overflow does not occur when "
8793 "eliminating multiplication in comparison "
8794 "with zero"),
8795 WARN_STRICT_OVERFLOW_COMPARISON);
8796
8797 /* If const1 is negative we swap the sense of the comparison. */
8798 if (tree_int_cst_sgn (const1) < 0)
8799 cmp_code = swap_tree_comparison (cmp_code);
8800
8801 return fold_build2 (cmp_code, type, variable1, const2);
8802 }
8803
8804 tem = maybe_canonicalize_comparison (code, type, op0, op1);
8805 if (tem)
8806 return tem;
8807
8808 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8809 {
8810 tree targ0 = strip_float_extensions (arg0);
8811 tree targ1 = strip_float_extensions (arg1);
8812 tree newtype = TREE_TYPE (targ0);
8813
8814 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8815 newtype = TREE_TYPE (targ1);
8816
8817 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8818 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8819 return fold_build2 (code, type, fold_convert (newtype, targ0),
8820 fold_convert (newtype, targ1));
8821
8822 /* (-a) CMP (-b) -> b CMP a */
8823 if (TREE_CODE (arg0) == NEGATE_EXPR
8824 && TREE_CODE (arg1) == NEGATE_EXPR)
8825 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
8826 TREE_OPERAND (arg0, 0));
8827
8828 if (TREE_CODE (arg1) == REAL_CST)
8829 {
8830 REAL_VALUE_TYPE cst;
8831 cst = TREE_REAL_CST (arg1);
8832
8833 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8834 if (TREE_CODE (arg0) == NEGATE_EXPR)
8835 return fold_build2 (swap_tree_comparison (code), type,
8836 TREE_OPERAND (arg0, 0),
8837 build_real (TREE_TYPE (arg1),
8838 REAL_VALUE_NEGATE (cst)));
8839
8840 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8841 /* a CMP (-0) -> a CMP 0 */
8842 if (REAL_VALUE_MINUS_ZERO (cst))
8843 return fold_build2 (code, type, arg0,
8844 build_real (TREE_TYPE (arg1), dconst0));
8845
8846 /* x != NaN is always true, other ops are always false. */
8847 if (REAL_VALUE_ISNAN (cst)
8848 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8849 {
8850 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8851 return omit_one_operand (type, tem, arg0);
8852 }
8853
8854 /* Fold comparisons against infinity. */
8855 if (REAL_VALUE_ISINF (cst))
8856 {
8857 tem = fold_inf_compare (code, type, arg0, arg1);
8858 if (tem != NULL_TREE)
8859 return tem;
8860 }
8861 }
8862
8863 /* If this is a comparison of a real constant with a PLUS_EXPR
8864 or a MINUS_EXPR of a real constant, we can convert it into a
8865 comparison with a revised real constant as long as no overflow
8866 occurs when unsafe_math_optimizations are enabled. */
8867 if (flag_unsafe_math_optimizations
8868 && TREE_CODE (arg1) == REAL_CST
8869 && (TREE_CODE (arg0) == PLUS_EXPR
8870 || TREE_CODE (arg0) == MINUS_EXPR)
8871 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8872 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8873 ? MINUS_EXPR : PLUS_EXPR,
8874 arg1, TREE_OPERAND (arg0, 1), 0))
8875 && !TREE_OVERFLOW (tem))
8876 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
8877
8878 /* Likewise, we can simplify a comparison of a real constant with
8879 a MINUS_EXPR whose first operand is also a real constant, i.e.
8880 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
8881 floating-point types only if -fassociative-math is set. */
8882 if (flag_associative_math
8883 && TREE_CODE (arg1) == REAL_CST
8884 && TREE_CODE (arg0) == MINUS_EXPR
8885 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
8886 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
8887 arg1, 0))
8888 && !TREE_OVERFLOW (tem))
8889 return fold_build2 (swap_tree_comparison (code), type,
8890 TREE_OPERAND (arg0, 1), tem);
8891
8892 /* Fold comparisons against built-in math functions. */
8893 if (TREE_CODE (arg1) == REAL_CST
8894 && flag_unsafe_math_optimizations
8895 && ! flag_errno_math)
8896 {
8897 enum built_in_function fcode = builtin_mathfn_code (arg0);
8898
8899 if (fcode != END_BUILTINS)
8900 {
8901 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
8902 if (tem != NULL_TREE)
8903 return tem;
8904 }
8905 }
8906 }
8907
8908 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
8909 && CONVERT_EXPR_P (arg0))
8910 {
8911 /* If we are widening one operand of an integer comparison,
8912 see if the other operand is similarly being widened. Perhaps we
8913 can do the comparison in the narrower type. */
8914 tem = fold_widened_comparison (code, type, arg0, arg1);
8915 if (tem)
8916 return tem;
8917
8918 /* Or if we are changing signedness. */
8919 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
8920 if (tem)
8921 return tem;
8922 }
8923
8924 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8925 constant, we can simplify it. */
8926 if (TREE_CODE (arg1) == INTEGER_CST
8927 && (TREE_CODE (arg0) == MIN_EXPR
8928 || TREE_CODE (arg0) == MAX_EXPR)
8929 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8930 {
8931 tem = optimize_minmax_comparison (code, type, op0, op1);
8932 if (tem)
8933 return tem;
8934 }
8935
8936 /* Simplify comparison of something with itself. (For IEEE
8937 floating-point, we can only do some of these simplifications.) */
8938 if (operand_equal_p (arg0, arg1, 0))
8939 {
8940 switch (code)
8941 {
8942 case EQ_EXPR:
8943 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8944 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8945 return constant_boolean_node (1, type);
8946 break;
8947
8948 case GE_EXPR:
8949 case LE_EXPR:
8950 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8951 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8952 return constant_boolean_node (1, type);
8953 return fold_build2 (EQ_EXPR, type, arg0, arg1);
8954
8955 case NE_EXPR:
8956 /* For NE, we can only do this simplification if integer
8957 or we don't honor IEEE floating point NaNs. */
8958 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
8959 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8960 break;
8961 /* ... fall through ... */
8962 case GT_EXPR:
8963 case LT_EXPR:
8964 return constant_boolean_node (0, type);
8965 default:
8966 gcc_unreachable ();
8967 }
8968 }
8969
8970 /* If we are comparing an expression that just has comparisons
8971 of two integer values, arithmetic expressions of those comparisons,
8972 and constants, we can simplify it. There are only three cases
8973 to check: the two values can either be equal, the first can be
8974 greater, or the second can be greater. Fold the expression for
8975 those three values. Since each value must be 0 or 1, we have
8976 eight possibilities, each of which corresponds to the constant 0
8977 or 1 or one of the six possible comparisons.
8978
8979 This handles common cases like (a > b) == 0 but also handles
8980 expressions like ((x > y) - (y > x)) > 0, which supposedly
8981 occur in macroized code. */
8982
8983 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8984 {
8985 tree cval1 = 0, cval2 = 0;
8986 int save_p = 0;
8987
8988 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8989 /* Don't handle degenerate cases here; they should already
8990 have been handled anyway. */
8991 && cval1 != 0 && cval2 != 0
8992 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8993 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8994 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8995 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8996 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8997 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8998 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8999 {
9000 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9001 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9002
9003 /* We can't just pass T to eval_subst in case cval1 or cval2
9004 was the same as ARG1. */
9005
9006 tree high_result
9007 = fold_build2 (code, type,
9008 eval_subst (arg0, cval1, maxval,
9009 cval2, minval),
9010 arg1);
9011 tree equal_result
9012 = fold_build2 (code, type,
9013 eval_subst (arg0, cval1, maxval,
9014 cval2, maxval),
9015 arg1);
9016 tree low_result
9017 = fold_build2 (code, type,
9018 eval_subst (arg0, cval1, minval,
9019 cval2, maxval),
9020 arg1);
9021
9022 /* All three of these results should be 0 or 1. Confirm they are.
9023 Then use those values to select the proper code to use. */
9024
9025 if (TREE_CODE (high_result) == INTEGER_CST
9026 && TREE_CODE (equal_result) == INTEGER_CST
9027 && TREE_CODE (low_result) == INTEGER_CST)
9028 {
9029 /* Make a 3-bit mask with the high-order bit being the
9030 value for `>', the next for '=', and the low for '<'. */
9031 switch ((integer_onep (high_result) * 4)
9032 + (integer_onep (equal_result) * 2)
9033 + integer_onep (low_result))
9034 {
9035 case 0:
9036 /* Always false. */
9037 return omit_one_operand (type, integer_zero_node, arg0);
9038 case 1:
9039 code = LT_EXPR;
9040 break;
9041 case 2:
9042 code = EQ_EXPR;
9043 break;
9044 case 3:
9045 code = LE_EXPR;
9046 break;
9047 case 4:
9048 code = GT_EXPR;
9049 break;
9050 case 5:
9051 code = NE_EXPR;
9052 break;
9053 case 6:
9054 code = GE_EXPR;
9055 break;
9056 case 7:
9057 /* Always true. */
9058 return omit_one_operand (type, integer_one_node, arg0);
9059 }
9060
9061 if (save_p)
9062 return save_expr (build2 (code, type, cval1, cval2));
9063 return fold_build2 (code, type, cval1, cval2);
9064 }
9065 }
9066 }
9067
9068 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9069 into a single range test. */
9070 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9071 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9072 && TREE_CODE (arg1) == INTEGER_CST
9073 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9074 && !integer_zerop (TREE_OPERAND (arg0, 1))
9075 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9076 && !TREE_OVERFLOW (arg1))
9077 {
9078 tem = fold_div_compare (code, type, arg0, arg1);
9079 if (tem != NULL_TREE)
9080 return tem;
9081 }
9082
9083 /* Fold ~X op ~Y as Y op X. */
9084 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9085 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9086 {
9087 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9088 return fold_build2 (code, type,
9089 fold_convert (cmp_type, TREE_OPERAND (arg1, 0)),
9090 TREE_OPERAND (arg0, 0));
9091 }
9092
9093 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9094 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9095 && TREE_CODE (arg1) == INTEGER_CST)
9096 {
9097 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9098 return fold_build2 (swap_tree_comparison (code), type,
9099 TREE_OPERAND (arg0, 0),
9100 fold_build1 (BIT_NOT_EXPR, cmp_type,
9101 fold_convert (cmp_type, arg1)));
9102 }
9103
9104 return NULL_TREE;
9105 }
9106
9107
9108 /* Subroutine of fold_binary. Optimize complex multiplications of the
9109 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9110 argument EXPR represents the expression "z" of type TYPE. */
9111
9112 static tree
9113 fold_mult_zconjz (tree type, tree expr)
9114 {
9115 tree itype = TREE_TYPE (type);
9116 tree rpart, ipart, tem;
9117
9118 if (TREE_CODE (expr) == COMPLEX_EXPR)
9119 {
9120 rpart = TREE_OPERAND (expr, 0);
9121 ipart = TREE_OPERAND (expr, 1);
9122 }
9123 else if (TREE_CODE (expr) == COMPLEX_CST)
9124 {
9125 rpart = TREE_REALPART (expr);
9126 ipart = TREE_IMAGPART (expr);
9127 }
9128 else
9129 {
9130 expr = save_expr (expr);
9131 rpart = fold_build1 (REALPART_EXPR, itype, expr);
9132 ipart = fold_build1 (IMAGPART_EXPR, itype, expr);
9133 }
9134
9135 rpart = save_expr (rpart);
9136 ipart = save_expr (ipart);
9137 tem = fold_build2 (PLUS_EXPR, itype,
9138 fold_build2 (MULT_EXPR, itype, rpart, rpart),
9139 fold_build2 (MULT_EXPR, itype, ipart, ipart));
9140 return fold_build2 (COMPLEX_EXPR, type, tem,
9141 fold_convert (itype, integer_zero_node));
9142 }
9143
9144
9145 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9146 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9147 guarantees that P and N have the same least significant log2(M) bits.
9148 N is not otherwise constrained. In particular, N is not normalized to
9149 0 <= N < M as is common. In general, the precise value of P is unknown.
9150 M is chosen as large as possible such that constant N can be determined.
9151
9152 Returns M and sets *RESIDUE to N. */
9153
9154 static unsigned HOST_WIDE_INT
9155 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue)
9156 {
9157 enum tree_code code;
9158
9159 *residue = 0;
9160
9161 code = TREE_CODE (expr);
9162 if (code == ADDR_EXPR)
9163 {
9164 expr = TREE_OPERAND (expr, 0);
9165 if (handled_component_p (expr))
9166 {
9167 HOST_WIDE_INT bitsize, bitpos;
9168 tree offset;
9169 enum machine_mode mode;
9170 int unsignedp, volatilep;
9171
9172 expr = get_inner_reference (expr, &bitsize, &bitpos, &offset,
9173 &mode, &unsignedp, &volatilep, false);
9174 *residue = bitpos / BITS_PER_UNIT;
9175 if (offset)
9176 {
9177 if (TREE_CODE (offset) == INTEGER_CST)
9178 *residue += TREE_INT_CST_LOW (offset);
9179 else
9180 /* We don't handle more complicated offset expressions. */
9181 return 1;
9182 }
9183 }
9184
9185 if (DECL_P (expr) && TREE_CODE (expr) != FUNCTION_DECL)
9186 return DECL_ALIGN_UNIT (expr);
9187 }
9188 else if (code == POINTER_PLUS_EXPR)
9189 {
9190 tree op0, op1;
9191 unsigned HOST_WIDE_INT modulus;
9192 enum tree_code inner_code;
9193
9194 op0 = TREE_OPERAND (expr, 0);
9195 STRIP_NOPS (op0);
9196 modulus = get_pointer_modulus_and_residue (op0, residue);
9197
9198 op1 = TREE_OPERAND (expr, 1);
9199 STRIP_NOPS (op1);
9200 inner_code = TREE_CODE (op1);
9201 if (inner_code == INTEGER_CST)
9202 {
9203 *residue += TREE_INT_CST_LOW (op1);
9204 return modulus;
9205 }
9206 else if (inner_code == MULT_EXPR)
9207 {
9208 op1 = TREE_OPERAND (op1, 1);
9209 if (TREE_CODE (op1) == INTEGER_CST)
9210 {
9211 unsigned HOST_WIDE_INT align;
9212
9213 /* Compute the greatest power-of-2 divisor of op1. */
9214 align = TREE_INT_CST_LOW (op1);
9215 align &= -align;
9216
9217 /* If align is non-zero and less than *modulus, replace
9218 *modulus with align., If align is 0, then either op1 is 0
9219 or the greatest power-of-2 divisor of op1 doesn't fit in an
9220 unsigned HOST_WIDE_INT. In either case, no additional
9221 constraint is imposed. */
9222 if (align)
9223 modulus = MIN (modulus, align);
9224
9225 return modulus;
9226 }
9227 }
9228 }
9229
9230 /* If we get here, we were unable to determine anything useful about the
9231 expression. */
9232 return 1;
9233 }
9234
9235
9236 /* Fold a binary expression of code CODE and type TYPE with operands
9237 OP0 and OP1. Return the folded expression if folding is
9238 successful. Otherwise, return NULL_TREE. */
9239
9240 tree
9241 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
9242 {
9243 enum tree_code_class kind = TREE_CODE_CLASS (code);
9244 tree arg0, arg1, tem;
9245 tree t1 = NULL_TREE;
9246 bool strict_overflow_p;
9247
9248 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9249 && TREE_CODE_LENGTH (code) == 2
9250 && op0 != NULL_TREE
9251 && op1 != NULL_TREE);
9252
9253 arg0 = op0;
9254 arg1 = op1;
9255
9256 /* Strip any conversions that don't change the mode. This is
9257 safe for every expression, except for a comparison expression
9258 because its signedness is derived from its operands. So, in
9259 the latter case, only strip conversions that don't change the
9260 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9261 preserved.
9262
9263 Note that this is done as an internal manipulation within the
9264 constant folder, in order to find the simplest representation
9265 of the arguments so that their form can be studied. In any
9266 cases, the appropriate type conversions should be put back in
9267 the tree that will get out of the constant folder. */
9268
9269 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9270 {
9271 STRIP_SIGN_NOPS (arg0);
9272 STRIP_SIGN_NOPS (arg1);
9273 }
9274 else
9275 {
9276 STRIP_NOPS (arg0);
9277 STRIP_NOPS (arg1);
9278 }
9279
9280 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9281 constant but we can't do arithmetic on them. */
9282 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9283 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9284 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9285 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9286 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9287 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9288 {
9289 if (kind == tcc_binary)
9290 {
9291 /* Make sure type and arg0 have the same saturating flag. */
9292 gcc_assert (TYPE_SATURATING (type)
9293 == TYPE_SATURATING (TREE_TYPE (arg0)));
9294 tem = const_binop (code, arg0, arg1, 0);
9295 }
9296 else if (kind == tcc_comparison)
9297 tem = fold_relational_const (code, type, arg0, arg1);
9298 else
9299 tem = NULL_TREE;
9300
9301 if (tem != NULL_TREE)
9302 {
9303 if (TREE_TYPE (tem) != type)
9304 tem = fold_convert (type, tem);
9305 return tem;
9306 }
9307 }
9308
9309 /* If this is a commutative operation, and ARG0 is a constant, move it
9310 to ARG1 to reduce the number of tests below. */
9311 if (commutative_tree_code (code)
9312 && tree_swap_operands_p (arg0, arg1, true))
9313 return fold_build2 (code, type, op1, op0);
9314
9315 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9316
9317 First check for cases where an arithmetic operation is applied to a
9318 compound, conditional, or comparison operation. Push the arithmetic
9319 operation inside the compound or conditional to see if any folding
9320 can then be done. Convert comparison to conditional for this purpose.
9321 The also optimizes non-constant cases that used to be done in
9322 expand_expr.
9323
9324 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9325 one of the operands is a comparison and the other is a comparison, a
9326 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9327 code below would make the expression more complex. Change it to a
9328 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9329 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9330
9331 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9332 || code == EQ_EXPR || code == NE_EXPR)
9333 && ((truth_value_p (TREE_CODE (arg0))
9334 && (truth_value_p (TREE_CODE (arg1))
9335 || (TREE_CODE (arg1) == BIT_AND_EXPR
9336 && integer_onep (TREE_OPERAND (arg1, 1)))))
9337 || (truth_value_p (TREE_CODE (arg1))
9338 && (truth_value_p (TREE_CODE (arg0))
9339 || (TREE_CODE (arg0) == BIT_AND_EXPR
9340 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9341 {
9342 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9343 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9344 : TRUTH_XOR_EXPR,
9345 boolean_type_node,
9346 fold_convert (boolean_type_node, arg0),
9347 fold_convert (boolean_type_node, arg1));
9348
9349 if (code == EQ_EXPR)
9350 tem = invert_truthvalue (tem);
9351
9352 return fold_convert (type, tem);
9353 }
9354
9355 if (TREE_CODE_CLASS (code) == tcc_binary
9356 || TREE_CODE_CLASS (code) == tcc_comparison)
9357 {
9358 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9359 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9360 fold_build2 (code, type,
9361 fold_convert (TREE_TYPE (op0),
9362 TREE_OPERAND (arg0, 1)),
9363 op1));
9364 if (TREE_CODE (arg1) == COMPOUND_EXPR
9365 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9366 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9367 fold_build2 (code, type, op0,
9368 fold_convert (TREE_TYPE (op1),
9369 TREE_OPERAND (arg1, 1))));
9370
9371 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
9372 {
9373 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
9374 arg0, arg1,
9375 /*cond_first_p=*/1);
9376 if (tem != NULL_TREE)
9377 return tem;
9378 }
9379
9380 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
9381 {
9382 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
9383 arg1, arg0,
9384 /*cond_first_p=*/0);
9385 if (tem != NULL_TREE)
9386 return tem;
9387 }
9388 }
9389
9390 switch (code)
9391 {
9392 case POINTER_PLUS_EXPR:
9393 /* 0 +p index -> (type)index */
9394 if (integer_zerop (arg0))
9395 return non_lvalue (fold_convert (type, arg1));
9396
9397 /* PTR +p 0 -> PTR */
9398 if (integer_zerop (arg1))
9399 return non_lvalue (fold_convert (type, arg0));
9400
9401 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9402 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9403 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9404 return fold_convert (type, fold_build2 (PLUS_EXPR, sizetype,
9405 fold_convert (sizetype, arg1),
9406 fold_convert (sizetype, arg0)));
9407
9408 /* index +p PTR -> PTR +p index */
9409 if (POINTER_TYPE_P (TREE_TYPE (arg1))
9410 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9411 return fold_build2 (POINTER_PLUS_EXPR, type,
9412 fold_convert (type, arg1),
9413 fold_convert (sizetype, arg0));
9414
9415 /* (PTR +p B) +p A -> PTR +p (B + A) */
9416 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9417 {
9418 tree inner;
9419 tree arg01 = fold_convert (sizetype, TREE_OPERAND (arg0, 1));
9420 tree arg00 = TREE_OPERAND (arg0, 0);
9421 inner = fold_build2 (PLUS_EXPR, sizetype,
9422 arg01, fold_convert (sizetype, arg1));
9423 return fold_convert (type,
9424 fold_build2 (POINTER_PLUS_EXPR,
9425 TREE_TYPE (arg00), arg00, inner));
9426 }
9427
9428 /* PTR_CST +p CST -> CST1 */
9429 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9430 return fold_build2 (PLUS_EXPR, type, arg0, fold_convert (type, arg1));
9431
9432 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9433 of the array. Loop optimizer sometimes produce this type of
9434 expressions. */
9435 if (TREE_CODE (arg0) == ADDR_EXPR)
9436 {
9437 tem = try_move_mult_to_index (arg0, fold_convert (sizetype, arg1));
9438 if (tem)
9439 return fold_convert (type, tem);
9440 }
9441
9442 return NULL_TREE;
9443
9444 case PLUS_EXPR:
9445 /* PTR + INT -> (INT)(PTR p+ INT) */
9446 if (POINTER_TYPE_P (TREE_TYPE (arg0))
9447 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
9448 return fold_convert (type, fold_build2 (POINTER_PLUS_EXPR,
9449 TREE_TYPE (arg0),
9450 arg0,
9451 fold_convert (sizetype, arg1)));
9452 /* INT + PTR -> (INT)(PTR p+ INT) */
9453 if (POINTER_TYPE_P (TREE_TYPE (arg1))
9454 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9455 return fold_convert (type, fold_build2 (POINTER_PLUS_EXPR,
9456 TREE_TYPE (arg1),
9457 arg1,
9458 fold_convert (sizetype, arg0)));
9459 /* A + (-B) -> A - B */
9460 if (TREE_CODE (arg1) == NEGATE_EXPR)
9461 return fold_build2 (MINUS_EXPR, type,
9462 fold_convert (type, arg0),
9463 fold_convert (type, TREE_OPERAND (arg1, 0)));
9464 /* (-A) + B -> B - A */
9465 if (TREE_CODE (arg0) == NEGATE_EXPR
9466 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
9467 return fold_build2 (MINUS_EXPR, type,
9468 fold_convert (type, arg1),
9469 fold_convert (type, TREE_OPERAND (arg0, 0)));
9470
9471 if (INTEGRAL_TYPE_P (type))
9472 {
9473 /* Convert ~A + 1 to -A. */
9474 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9475 && integer_onep (arg1))
9476 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
9477
9478 /* ~X + X is -1. */
9479 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9480 && !TYPE_OVERFLOW_TRAPS (type))
9481 {
9482 tree tem = TREE_OPERAND (arg0, 0);
9483
9484 STRIP_NOPS (tem);
9485 if (operand_equal_p (tem, arg1, 0))
9486 {
9487 t1 = build_int_cst_type (type, -1);
9488 return omit_one_operand (type, t1, arg1);
9489 }
9490 }
9491
9492 /* X + ~X is -1. */
9493 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9494 && !TYPE_OVERFLOW_TRAPS (type))
9495 {
9496 tree tem = TREE_OPERAND (arg1, 0);
9497
9498 STRIP_NOPS (tem);
9499 if (operand_equal_p (arg0, tem, 0))
9500 {
9501 t1 = build_int_cst_type (type, -1);
9502 return omit_one_operand (type, t1, arg0);
9503 }
9504 }
9505
9506 /* X + (X / CST) * -CST is X % CST. */
9507 if (TREE_CODE (arg1) == MULT_EXPR
9508 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9509 && operand_equal_p (arg0,
9510 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9511 {
9512 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9513 tree cst1 = TREE_OPERAND (arg1, 1);
9514 tree sum = fold_binary (PLUS_EXPR, TREE_TYPE (cst1), cst1, cst0);
9515 if (sum && integer_zerop (sum))
9516 return fold_convert (type,
9517 fold_build2 (TRUNC_MOD_EXPR,
9518 TREE_TYPE (arg0), arg0, cst0));
9519 }
9520 }
9521
9522 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
9523 same or one. Make sure type is not saturating.
9524 fold_plusminus_mult_expr will re-associate. */
9525 if ((TREE_CODE (arg0) == MULT_EXPR
9526 || TREE_CODE (arg1) == MULT_EXPR)
9527 && !TYPE_SATURATING (type)
9528 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9529 {
9530 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
9531 if (tem)
9532 return tem;
9533 }
9534
9535 if (! FLOAT_TYPE_P (type))
9536 {
9537 if (integer_zerop (arg1))
9538 return non_lvalue (fold_convert (type, arg0));
9539
9540 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9541 with a constant, and the two constants have no bits in common,
9542 we should treat this as a BIT_IOR_EXPR since this may produce more
9543 simplifications. */
9544 if (TREE_CODE (arg0) == BIT_AND_EXPR
9545 && TREE_CODE (arg1) == BIT_AND_EXPR
9546 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9547 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9548 && integer_zerop (const_binop (BIT_AND_EXPR,
9549 TREE_OPERAND (arg0, 1),
9550 TREE_OPERAND (arg1, 1), 0)))
9551 {
9552 code = BIT_IOR_EXPR;
9553 goto bit_ior;
9554 }
9555
9556 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9557 (plus (plus (mult) (mult)) (foo)) so that we can
9558 take advantage of the factoring cases below. */
9559 if (((TREE_CODE (arg0) == PLUS_EXPR
9560 || TREE_CODE (arg0) == MINUS_EXPR)
9561 && TREE_CODE (arg1) == MULT_EXPR)
9562 || ((TREE_CODE (arg1) == PLUS_EXPR
9563 || TREE_CODE (arg1) == MINUS_EXPR)
9564 && TREE_CODE (arg0) == MULT_EXPR))
9565 {
9566 tree parg0, parg1, parg, marg;
9567 enum tree_code pcode;
9568
9569 if (TREE_CODE (arg1) == MULT_EXPR)
9570 parg = arg0, marg = arg1;
9571 else
9572 parg = arg1, marg = arg0;
9573 pcode = TREE_CODE (parg);
9574 parg0 = TREE_OPERAND (parg, 0);
9575 parg1 = TREE_OPERAND (parg, 1);
9576 STRIP_NOPS (parg0);
9577 STRIP_NOPS (parg1);
9578
9579 if (TREE_CODE (parg0) == MULT_EXPR
9580 && TREE_CODE (parg1) != MULT_EXPR)
9581 return fold_build2 (pcode, type,
9582 fold_build2 (PLUS_EXPR, type,
9583 fold_convert (type, parg0),
9584 fold_convert (type, marg)),
9585 fold_convert (type, parg1));
9586 if (TREE_CODE (parg0) != MULT_EXPR
9587 && TREE_CODE (parg1) == MULT_EXPR)
9588 return fold_build2 (PLUS_EXPR, type,
9589 fold_convert (type, parg0),
9590 fold_build2 (pcode, type,
9591 fold_convert (type, marg),
9592 fold_convert (type,
9593 parg1)));
9594 }
9595 }
9596 else
9597 {
9598 /* See if ARG1 is zero and X + ARG1 reduces to X. */
9599 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
9600 return non_lvalue (fold_convert (type, arg0));
9601
9602 /* Likewise if the operands are reversed. */
9603 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9604 return non_lvalue (fold_convert (type, arg1));
9605
9606 /* Convert X + -C into X - C. */
9607 if (TREE_CODE (arg1) == REAL_CST
9608 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
9609 {
9610 tem = fold_negate_const (arg1, type);
9611 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
9612 return fold_build2 (MINUS_EXPR, type,
9613 fold_convert (type, arg0),
9614 fold_convert (type, tem));
9615 }
9616
9617 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9618 to __complex__ ( x, y ). This is not the same for SNaNs or
9619 if signed zeros are involved. */
9620 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9621 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9622 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9623 {
9624 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9625 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
9626 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
9627 bool arg0rz = false, arg0iz = false;
9628 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9629 || (arg0i && (arg0iz = real_zerop (arg0i))))
9630 {
9631 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
9632 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
9633 if (arg0rz && arg1i && real_zerop (arg1i))
9634 {
9635 tree rp = arg1r ? arg1r
9636 : build1 (REALPART_EXPR, rtype, arg1);
9637 tree ip = arg0i ? arg0i
9638 : build1 (IMAGPART_EXPR, rtype, arg0);
9639 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9640 }
9641 else if (arg0iz && arg1r && real_zerop (arg1r))
9642 {
9643 tree rp = arg0r ? arg0r
9644 : build1 (REALPART_EXPR, rtype, arg0);
9645 tree ip = arg1i ? arg1i
9646 : build1 (IMAGPART_EXPR, rtype, arg1);
9647 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9648 }
9649 }
9650 }
9651
9652 if (flag_unsafe_math_optimizations
9653 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9654 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9655 && (tem = distribute_real_division (code, type, arg0, arg1)))
9656 return tem;
9657
9658 /* Convert x+x into x*2.0. */
9659 if (operand_equal_p (arg0, arg1, 0)
9660 && SCALAR_FLOAT_TYPE_P (type))
9661 return fold_build2 (MULT_EXPR, type, arg0,
9662 build_real (type, dconst2));
9663
9664 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9665 We associate floats only if the user has specified
9666 -fassociative-math. */
9667 if (flag_associative_math
9668 && TREE_CODE (arg1) == PLUS_EXPR
9669 && TREE_CODE (arg0) != MULT_EXPR)
9670 {
9671 tree tree10 = TREE_OPERAND (arg1, 0);
9672 tree tree11 = TREE_OPERAND (arg1, 1);
9673 if (TREE_CODE (tree11) == MULT_EXPR
9674 && TREE_CODE (tree10) == MULT_EXPR)
9675 {
9676 tree tree0;
9677 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
9678 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
9679 }
9680 }
9681 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9682 We associate floats only if the user has specified
9683 -fassociative-math. */
9684 if (flag_associative_math
9685 && TREE_CODE (arg0) == PLUS_EXPR
9686 && TREE_CODE (arg1) != MULT_EXPR)
9687 {
9688 tree tree00 = TREE_OPERAND (arg0, 0);
9689 tree tree01 = TREE_OPERAND (arg0, 1);
9690 if (TREE_CODE (tree01) == MULT_EXPR
9691 && TREE_CODE (tree00) == MULT_EXPR)
9692 {
9693 tree tree0;
9694 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
9695 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
9696 }
9697 }
9698 }
9699
9700 bit_rotate:
9701 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9702 is a rotate of A by C1 bits. */
9703 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9704 is a rotate of A by B bits. */
9705 {
9706 enum tree_code code0, code1;
9707 tree rtype;
9708 code0 = TREE_CODE (arg0);
9709 code1 = TREE_CODE (arg1);
9710 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9711 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9712 && operand_equal_p (TREE_OPERAND (arg0, 0),
9713 TREE_OPERAND (arg1, 0), 0)
9714 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9715 TYPE_UNSIGNED (rtype))
9716 /* Only create rotates in complete modes. Other cases are not
9717 expanded properly. */
9718 && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
9719 {
9720 tree tree01, tree11;
9721 enum tree_code code01, code11;
9722
9723 tree01 = TREE_OPERAND (arg0, 1);
9724 tree11 = TREE_OPERAND (arg1, 1);
9725 STRIP_NOPS (tree01);
9726 STRIP_NOPS (tree11);
9727 code01 = TREE_CODE (tree01);
9728 code11 = TREE_CODE (tree11);
9729 if (code01 == INTEGER_CST
9730 && code11 == INTEGER_CST
9731 && TREE_INT_CST_HIGH (tree01) == 0
9732 && TREE_INT_CST_HIGH (tree11) == 0
9733 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
9734 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9735 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
9736 code0 == LSHIFT_EXPR ? tree01 : tree11);
9737 else if (code11 == MINUS_EXPR)
9738 {
9739 tree tree110, tree111;
9740 tree110 = TREE_OPERAND (tree11, 0);
9741 tree111 = TREE_OPERAND (tree11, 1);
9742 STRIP_NOPS (tree110);
9743 STRIP_NOPS (tree111);
9744 if (TREE_CODE (tree110) == INTEGER_CST
9745 && 0 == compare_tree_int (tree110,
9746 TYPE_PRECISION
9747 (TREE_TYPE (TREE_OPERAND
9748 (arg0, 0))))
9749 && operand_equal_p (tree01, tree111, 0))
9750 return build2 ((code0 == LSHIFT_EXPR
9751 ? LROTATE_EXPR
9752 : RROTATE_EXPR),
9753 type, TREE_OPERAND (arg0, 0), tree01);
9754 }
9755 else if (code01 == MINUS_EXPR)
9756 {
9757 tree tree010, tree011;
9758 tree010 = TREE_OPERAND (tree01, 0);
9759 tree011 = TREE_OPERAND (tree01, 1);
9760 STRIP_NOPS (tree010);
9761 STRIP_NOPS (tree011);
9762 if (TREE_CODE (tree010) == INTEGER_CST
9763 && 0 == compare_tree_int (tree010,
9764 TYPE_PRECISION
9765 (TREE_TYPE (TREE_OPERAND
9766 (arg0, 0))))
9767 && operand_equal_p (tree11, tree011, 0))
9768 return build2 ((code0 != LSHIFT_EXPR
9769 ? LROTATE_EXPR
9770 : RROTATE_EXPR),
9771 type, TREE_OPERAND (arg0, 0), tree11);
9772 }
9773 }
9774 }
9775
9776 associate:
9777 /* In most languages, can't associate operations on floats through
9778 parentheses. Rather than remember where the parentheses were, we
9779 don't associate floats at all, unless the user has specified
9780 -fassociative-math.
9781 And, we need to make sure type is not saturating. */
9782
9783 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
9784 && !TYPE_SATURATING (type))
9785 {
9786 tree var0, con0, lit0, minus_lit0;
9787 tree var1, con1, lit1, minus_lit1;
9788 bool ok = true;
9789
9790 /* Split both trees into variables, constants, and literals. Then
9791 associate each group together, the constants with literals,
9792 then the result with variables. This increases the chances of
9793 literals being recombined later and of generating relocatable
9794 expressions for the sum of a constant and literal. */
9795 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
9796 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
9797 code == MINUS_EXPR);
9798
9799 /* With undefined overflow we can only associate constants
9800 with one variable. */
9801 if (((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9802 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
9803 && var0 && var1)
9804 {
9805 tree tmp0 = var0;
9806 tree tmp1 = var1;
9807
9808 if (TREE_CODE (tmp0) == NEGATE_EXPR)
9809 tmp0 = TREE_OPERAND (tmp0, 0);
9810 if (TREE_CODE (tmp1) == NEGATE_EXPR)
9811 tmp1 = TREE_OPERAND (tmp1, 0);
9812 /* The only case we can still associate with two variables
9813 is if they are the same, modulo negation. */
9814 if (!operand_equal_p (tmp0, tmp1, 0))
9815 ok = false;
9816 }
9817
9818 /* Only do something if we found more than two objects. Otherwise,
9819 nothing has changed and we risk infinite recursion. */
9820 if (ok
9821 && (2 < ((var0 != 0) + (var1 != 0)
9822 + (con0 != 0) + (con1 != 0)
9823 + (lit0 != 0) + (lit1 != 0)
9824 + (minus_lit0 != 0) + (minus_lit1 != 0))))
9825 {
9826 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9827 if (code == MINUS_EXPR)
9828 code = PLUS_EXPR;
9829
9830 var0 = associate_trees (var0, var1, code, type);
9831 con0 = associate_trees (con0, con1, code, type);
9832 lit0 = associate_trees (lit0, lit1, code, type);
9833 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
9834
9835 /* Preserve the MINUS_EXPR if the negative part of the literal is
9836 greater than the positive part. Otherwise, the multiplicative
9837 folding code (i.e extract_muldiv) may be fooled in case
9838 unsigned constants are subtracted, like in the following
9839 example: ((X*2 + 4) - 8U)/2. */
9840 if (minus_lit0 && lit0)
9841 {
9842 if (TREE_CODE (lit0) == INTEGER_CST
9843 && TREE_CODE (minus_lit0) == INTEGER_CST
9844 && tree_int_cst_lt (lit0, minus_lit0))
9845 {
9846 minus_lit0 = associate_trees (minus_lit0, lit0,
9847 MINUS_EXPR, type);
9848 lit0 = 0;
9849 }
9850 else
9851 {
9852 lit0 = associate_trees (lit0, minus_lit0,
9853 MINUS_EXPR, type);
9854 minus_lit0 = 0;
9855 }
9856 }
9857 if (minus_lit0)
9858 {
9859 if (con0 == 0)
9860 return fold_convert (type,
9861 associate_trees (var0, minus_lit0,
9862 MINUS_EXPR, type));
9863 else
9864 {
9865 con0 = associate_trees (con0, minus_lit0,
9866 MINUS_EXPR, type);
9867 return fold_convert (type,
9868 associate_trees (var0, con0,
9869 PLUS_EXPR, type));
9870 }
9871 }
9872
9873 con0 = associate_trees (con0, lit0, code, type);
9874 return fold_convert (type, associate_trees (var0, con0,
9875 code, type));
9876 }
9877 }
9878
9879 return NULL_TREE;
9880
9881 case MINUS_EXPR:
9882 /* Pointer simplifications for subtraction, simple reassociations. */
9883 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
9884 {
9885 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
9886 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
9887 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9888 {
9889 tree arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
9890 tree arg01 = fold_convert (type, TREE_OPERAND (arg0, 1));
9891 tree arg10 = fold_convert (type, TREE_OPERAND (arg1, 0));
9892 tree arg11 = fold_convert (type, TREE_OPERAND (arg1, 1));
9893 return fold_build2 (PLUS_EXPR, type,
9894 fold_build2 (MINUS_EXPR, type, arg00, arg10),
9895 fold_build2 (MINUS_EXPR, type, arg01, arg11));
9896 }
9897 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
9898 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9899 {
9900 tree arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
9901 tree arg01 = fold_convert (type, TREE_OPERAND (arg0, 1));
9902 tree tmp = fold_binary (MINUS_EXPR, type, arg00, fold_convert (type, arg1));
9903 if (tmp)
9904 return fold_build2 (PLUS_EXPR, type, tmp, arg01);
9905 }
9906 }
9907 /* A - (-B) -> A + B */
9908 if (TREE_CODE (arg1) == NEGATE_EXPR)
9909 return fold_build2 (PLUS_EXPR, type, op0,
9910 fold_convert (type, TREE_OPERAND (arg1, 0)));
9911 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9912 if (TREE_CODE (arg0) == NEGATE_EXPR
9913 && (FLOAT_TYPE_P (type)
9914 || INTEGRAL_TYPE_P (type))
9915 && negate_expr_p (arg1)
9916 && reorder_operands_p (arg0, arg1))
9917 return fold_build2 (MINUS_EXPR, type,
9918 fold_convert (type, negate_expr (arg1)),
9919 fold_convert (type, TREE_OPERAND (arg0, 0)));
9920 /* Convert -A - 1 to ~A. */
9921 if (INTEGRAL_TYPE_P (type)
9922 && TREE_CODE (arg0) == NEGATE_EXPR
9923 && integer_onep (arg1)
9924 && !TYPE_OVERFLOW_TRAPS (type))
9925 return fold_build1 (BIT_NOT_EXPR, type,
9926 fold_convert (type, TREE_OPERAND (arg0, 0)));
9927
9928 /* Convert -1 - A to ~A. */
9929 if (INTEGRAL_TYPE_P (type)
9930 && integer_all_onesp (arg0))
9931 return fold_build1 (BIT_NOT_EXPR, type, op1);
9932
9933
9934 /* X - (X / CST) * CST is X % CST. */
9935 if (INTEGRAL_TYPE_P (type)
9936 && TREE_CODE (arg1) == MULT_EXPR
9937 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9938 && operand_equal_p (arg0,
9939 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
9940 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
9941 TREE_OPERAND (arg1, 1), 0))
9942 return fold_convert (type,
9943 fold_build2 (TRUNC_MOD_EXPR, TREE_TYPE (arg0),
9944 arg0, TREE_OPERAND (arg1, 1)));
9945
9946 if (! FLOAT_TYPE_P (type))
9947 {
9948 if (integer_zerop (arg0))
9949 return negate_expr (fold_convert (type, arg1));
9950 if (integer_zerop (arg1))
9951 return non_lvalue (fold_convert (type, arg0));
9952
9953 /* Fold A - (A & B) into ~B & A. */
9954 if (!TREE_SIDE_EFFECTS (arg0)
9955 && TREE_CODE (arg1) == BIT_AND_EXPR)
9956 {
9957 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
9958 {
9959 tree arg10 = fold_convert (type, TREE_OPERAND (arg1, 0));
9960 return fold_build2 (BIT_AND_EXPR, type,
9961 fold_build1 (BIT_NOT_EXPR, type, arg10),
9962 fold_convert (type, arg0));
9963 }
9964 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9965 {
9966 tree arg11 = fold_convert (type, TREE_OPERAND (arg1, 1));
9967 return fold_build2 (BIT_AND_EXPR, type,
9968 fold_build1 (BIT_NOT_EXPR, type, arg11),
9969 fold_convert (type, arg0));
9970 }
9971 }
9972
9973 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
9974 any power of 2 minus 1. */
9975 if (TREE_CODE (arg0) == BIT_AND_EXPR
9976 && TREE_CODE (arg1) == BIT_AND_EXPR
9977 && operand_equal_p (TREE_OPERAND (arg0, 0),
9978 TREE_OPERAND (arg1, 0), 0))
9979 {
9980 tree mask0 = TREE_OPERAND (arg0, 1);
9981 tree mask1 = TREE_OPERAND (arg1, 1);
9982 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
9983
9984 if (operand_equal_p (tem, mask1, 0))
9985 {
9986 tem = fold_build2 (BIT_XOR_EXPR, type,
9987 TREE_OPERAND (arg0, 0), mask1);
9988 return fold_build2 (MINUS_EXPR, type, tem, mask1);
9989 }
9990 }
9991 }
9992
9993 /* See if ARG1 is zero and X - ARG1 reduces to X. */
9994 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
9995 return non_lvalue (fold_convert (type, arg0));
9996
9997 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
9998 ARG0 is zero and X + ARG0 reduces to X, since that would mean
9999 (-ARG1 + ARG0) reduces to -ARG1. */
10000 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10001 return negate_expr (fold_convert (type, arg1));
10002
10003 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10004 __complex__ ( x, -y ). This is not the same for SNaNs or if
10005 signed zeros are involved. */
10006 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10007 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10008 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10009 {
10010 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10011 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
10012 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
10013 bool arg0rz = false, arg0iz = false;
10014 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10015 || (arg0i && (arg0iz = real_zerop (arg0i))))
10016 {
10017 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
10018 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
10019 if (arg0rz && arg1i && real_zerop (arg1i))
10020 {
10021 tree rp = fold_build1 (NEGATE_EXPR, rtype,
10022 arg1r ? arg1r
10023 : build1 (REALPART_EXPR, rtype, arg1));
10024 tree ip = arg0i ? arg0i
10025 : build1 (IMAGPART_EXPR, rtype, arg0);
10026 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
10027 }
10028 else if (arg0iz && arg1r && real_zerop (arg1r))
10029 {
10030 tree rp = arg0r ? arg0r
10031 : build1 (REALPART_EXPR, rtype, arg0);
10032 tree ip = fold_build1 (NEGATE_EXPR, rtype,
10033 arg1i ? arg1i
10034 : build1 (IMAGPART_EXPR, rtype, arg1));
10035 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
10036 }
10037 }
10038 }
10039
10040 /* Fold &x - &x. This can happen from &x.foo - &x.
10041 This is unsafe for certain floats even in non-IEEE formats.
10042 In IEEE, it is unsafe because it does wrong for NaNs.
10043 Also note that operand_equal_p is always false if an operand
10044 is volatile. */
10045
10046 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10047 && operand_equal_p (arg0, arg1, 0))
10048 return fold_convert (type, integer_zero_node);
10049
10050 /* A - B -> A + (-B) if B is easily negatable. */
10051 if (negate_expr_p (arg1)
10052 && ((FLOAT_TYPE_P (type)
10053 /* Avoid this transformation if B is a positive REAL_CST. */
10054 && (TREE_CODE (arg1) != REAL_CST
10055 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10056 || INTEGRAL_TYPE_P (type)))
10057 return fold_build2 (PLUS_EXPR, type,
10058 fold_convert (type, arg0),
10059 fold_convert (type, negate_expr (arg1)));
10060
10061 /* Try folding difference of addresses. */
10062 {
10063 HOST_WIDE_INT diff;
10064
10065 if ((TREE_CODE (arg0) == ADDR_EXPR
10066 || TREE_CODE (arg1) == ADDR_EXPR)
10067 && ptr_difference_const (arg0, arg1, &diff))
10068 return build_int_cst_type (type, diff);
10069 }
10070
10071 /* Fold &a[i] - &a[j] to i-j. */
10072 if (TREE_CODE (arg0) == ADDR_EXPR
10073 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10074 && TREE_CODE (arg1) == ADDR_EXPR
10075 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10076 {
10077 tree aref0 = TREE_OPERAND (arg0, 0);
10078 tree aref1 = TREE_OPERAND (arg1, 0);
10079 if (operand_equal_p (TREE_OPERAND (aref0, 0),
10080 TREE_OPERAND (aref1, 0), 0))
10081 {
10082 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
10083 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
10084 tree esz = array_ref_element_size (aref0);
10085 tree diff = build2 (MINUS_EXPR, type, op0, op1);
10086 return fold_build2 (MULT_EXPR, type, diff,
10087 fold_convert (type, esz));
10088
10089 }
10090 }
10091
10092 if (flag_unsafe_math_optimizations
10093 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10094 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10095 && (tem = distribute_real_division (code, type, arg0, arg1)))
10096 return tem;
10097
10098 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
10099 same or one. Make sure type is not saturating.
10100 fold_plusminus_mult_expr will re-associate. */
10101 if ((TREE_CODE (arg0) == MULT_EXPR
10102 || TREE_CODE (arg1) == MULT_EXPR)
10103 && !TYPE_SATURATING (type)
10104 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10105 {
10106 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
10107 if (tem)
10108 return tem;
10109 }
10110
10111 goto associate;
10112
10113 case MULT_EXPR:
10114 /* (-A) * (-B) -> A * B */
10115 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10116 return fold_build2 (MULT_EXPR, type,
10117 fold_convert (type, TREE_OPERAND (arg0, 0)),
10118 fold_convert (type, negate_expr (arg1)));
10119 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10120 return fold_build2 (MULT_EXPR, type,
10121 fold_convert (type, negate_expr (arg0)),
10122 fold_convert (type, TREE_OPERAND (arg1, 0)));
10123
10124 if (! FLOAT_TYPE_P (type))
10125 {
10126 if (integer_zerop (arg1))
10127 return omit_one_operand (type, arg1, arg0);
10128 if (integer_onep (arg1))
10129 return non_lvalue (fold_convert (type, arg0));
10130 /* Transform x * -1 into -x. Make sure to do the negation
10131 on the original operand with conversions not stripped
10132 because we can only strip non-sign-changing conversions. */
10133 if (integer_all_onesp (arg1))
10134 return fold_convert (type, negate_expr (op0));
10135 /* Transform x * -C into -x * C if x is easily negatable. */
10136 if (TREE_CODE (arg1) == INTEGER_CST
10137 && tree_int_cst_sgn (arg1) == -1
10138 && negate_expr_p (arg0)
10139 && (tem = negate_expr (arg1)) != arg1
10140 && !TREE_OVERFLOW (tem))
10141 return fold_build2 (MULT_EXPR, type,
10142 fold_convert (type, negate_expr (arg0)), tem);
10143
10144 /* (a * (1 << b)) is (a << b) */
10145 if (TREE_CODE (arg1) == LSHIFT_EXPR
10146 && integer_onep (TREE_OPERAND (arg1, 0)))
10147 return fold_build2 (LSHIFT_EXPR, type, op0,
10148 TREE_OPERAND (arg1, 1));
10149 if (TREE_CODE (arg0) == LSHIFT_EXPR
10150 && integer_onep (TREE_OPERAND (arg0, 0)))
10151 return fold_build2 (LSHIFT_EXPR, type, op1,
10152 TREE_OPERAND (arg0, 1));
10153
10154 /* (A + A) * C -> A * 2 * C */
10155 if (TREE_CODE (arg0) == PLUS_EXPR
10156 && TREE_CODE (arg1) == INTEGER_CST
10157 && operand_equal_p (TREE_OPERAND (arg0, 0),
10158 TREE_OPERAND (arg0, 1), 0))
10159 return fold_build2 (MULT_EXPR, type,
10160 omit_one_operand (type, TREE_OPERAND (arg0, 0),
10161 TREE_OPERAND (arg0, 1)),
10162 fold_build2 (MULT_EXPR, type,
10163 build_int_cst (type, 2) , arg1));
10164
10165 strict_overflow_p = false;
10166 if (TREE_CODE (arg1) == INTEGER_CST
10167 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10168 &strict_overflow_p)))
10169 {
10170 if (strict_overflow_p)
10171 fold_overflow_warning (("assuming signed overflow does not "
10172 "occur when simplifying "
10173 "multiplication"),
10174 WARN_STRICT_OVERFLOW_MISC);
10175 return fold_convert (type, tem);
10176 }
10177
10178 /* Optimize z * conj(z) for integer complex numbers. */
10179 if (TREE_CODE (arg0) == CONJ_EXPR
10180 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10181 return fold_mult_zconjz (type, arg1);
10182 if (TREE_CODE (arg1) == CONJ_EXPR
10183 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10184 return fold_mult_zconjz (type, arg0);
10185 }
10186 else
10187 {
10188 /* Maybe fold x * 0 to 0. The expressions aren't the same
10189 when x is NaN, since x * 0 is also NaN. Nor are they the
10190 same in modes with signed zeros, since multiplying a
10191 negative value by 0 gives -0, not +0. */
10192 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10193 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10194 && real_zerop (arg1))
10195 return omit_one_operand (type, arg1, arg0);
10196 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
10197 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10198 && real_onep (arg1))
10199 return non_lvalue (fold_convert (type, arg0));
10200
10201 /* Transform x * -1.0 into -x. */
10202 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10203 && real_minus_onep (arg1))
10204 return fold_convert (type, negate_expr (arg0));
10205
10206 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10207 the result for floating point types due to rounding so it is applied
10208 only if -fassociative-math was specify. */
10209 if (flag_associative_math
10210 && TREE_CODE (arg0) == RDIV_EXPR
10211 && TREE_CODE (arg1) == REAL_CST
10212 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10213 {
10214 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10215 arg1, 0);
10216 if (tem)
10217 return fold_build2 (RDIV_EXPR, type, tem,
10218 TREE_OPERAND (arg0, 1));
10219 }
10220
10221 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10222 if (operand_equal_p (arg0, arg1, 0))
10223 {
10224 tree tem = fold_strip_sign_ops (arg0);
10225 if (tem != NULL_TREE)
10226 {
10227 tem = fold_convert (type, tem);
10228 return fold_build2 (MULT_EXPR, type, tem, tem);
10229 }
10230 }
10231
10232 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10233 This is not the same for NaNs or if signed zeros are
10234 involved. */
10235 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10236 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10237 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10238 && TREE_CODE (arg1) == COMPLEX_CST
10239 && real_zerop (TREE_REALPART (arg1)))
10240 {
10241 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10242 if (real_onep (TREE_IMAGPART (arg1)))
10243 return fold_build2 (COMPLEX_EXPR, type,
10244 negate_expr (fold_build1 (IMAGPART_EXPR,
10245 rtype, arg0)),
10246 fold_build1 (REALPART_EXPR, rtype, arg0));
10247 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10248 return fold_build2 (COMPLEX_EXPR, type,
10249 fold_build1 (IMAGPART_EXPR, rtype, arg0),
10250 negate_expr (fold_build1 (REALPART_EXPR,
10251 rtype, arg0)));
10252 }
10253
10254 /* Optimize z * conj(z) for floating point complex numbers.
10255 Guarded by flag_unsafe_math_optimizations as non-finite
10256 imaginary components don't produce scalar results. */
10257 if (flag_unsafe_math_optimizations
10258 && TREE_CODE (arg0) == CONJ_EXPR
10259 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10260 return fold_mult_zconjz (type, arg1);
10261 if (flag_unsafe_math_optimizations
10262 && TREE_CODE (arg1) == CONJ_EXPR
10263 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10264 return fold_mult_zconjz (type, arg0);
10265
10266 if (flag_unsafe_math_optimizations)
10267 {
10268 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10269 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10270
10271 /* Optimizations of root(...)*root(...). */
10272 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10273 {
10274 tree rootfn, arg;
10275 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10276 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10277
10278 /* Optimize sqrt(x)*sqrt(x) as x. */
10279 if (BUILTIN_SQRT_P (fcode0)
10280 && operand_equal_p (arg00, arg10, 0)
10281 && ! HONOR_SNANS (TYPE_MODE (type)))
10282 return arg00;
10283
10284 /* Optimize root(x)*root(y) as root(x*y). */
10285 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10286 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
10287 return build_call_expr (rootfn, 1, arg);
10288 }
10289
10290 /* Optimize expN(x)*expN(y) as expN(x+y). */
10291 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10292 {
10293 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10294 tree arg = fold_build2 (PLUS_EXPR, type,
10295 CALL_EXPR_ARG (arg0, 0),
10296 CALL_EXPR_ARG (arg1, 0));
10297 return build_call_expr (expfn, 1, arg);
10298 }
10299
10300 /* Optimizations of pow(...)*pow(...). */
10301 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10302 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10303 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10304 {
10305 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10306 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10307 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10308 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10309
10310 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10311 if (operand_equal_p (arg01, arg11, 0))
10312 {
10313 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10314 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
10315 return build_call_expr (powfn, 2, arg, arg01);
10316 }
10317
10318 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10319 if (operand_equal_p (arg00, arg10, 0))
10320 {
10321 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10322 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
10323 return build_call_expr (powfn, 2, arg00, arg);
10324 }
10325 }
10326
10327 /* Optimize tan(x)*cos(x) as sin(x). */
10328 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10329 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10330 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10331 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10332 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10333 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10334 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10335 CALL_EXPR_ARG (arg1, 0), 0))
10336 {
10337 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10338
10339 if (sinfn != NULL_TREE)
10340 return build_call_expr (sinfn, 1, CALL_EXPR_ARG (arg0, 0));
10341 }
10342
10343 /* Optimize x*pow(x,c) as pow(x,c+1). */
10344 if (fcode1 == BUILT_IN_POW
10345 || fcode1 == BUILT_IN_POWF
10346 || fcode1 == BUILT_IN_POWL)
10347 {
10348 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10349 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10350 if (TREE_CODE (arg11) == REAL_CST
10351 && !TREE_OVERFLOW (arg11)
10352 && operand_equal_p (arg0, arg10, 0))
10353 {
10354 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10355 REAL_VALUE_TYPE c;
10356 tree arg;
10357
10358 c = TREE_REAL_CST (arg11);
10359 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10360 arg = build_real (type, c);
10361 return build_call_expr (powfn, 2, arg0, arg);
10362 }
10363 }
10364
10365 /* Optimize pow(x,c)*x as pow(x,c+1). */
10366 if (fcode0 == BUILT_IN_POW
10367 || fcode0 == BUILT_IN_POWF
10368 || fcode0 == BUILT_IN_POWL)
10369 {
10370 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10371 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10372 if (TREE_CODE (arg01) == REAL_CST
10373 && !TREE_OVERFLOW (arg01)
10374 && operand_equal_p (arg1, arg00, 0))
10375 {
10376 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10377 REAL_VALUE_TYPE c;
10378 tree arg;
10379
10380 c = TREE_REAL_CST (arg01);
10381 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10382 arg = build_real (type, c);
10383 return build_call_expr (powfn, 2, arg1, arg);
10384 }
10385 }
10386
10387 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
10388 if (! optimize_size
10389 && operand_equal_p (arg0, arg1, 0))
10390 {
10391 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10392
10393 if (powfn)
10394 {
10395 tree arg = build_real (type, dconst2);
10396 return build_call_expr (powfn, 2, arg0, arg);
10397 }
10398 }
10399 }
10400 }
10401 goto associate;
10402
10403 case BIT_IOR_EXPR:
10404 bit_ior:
10405 if (integer_all_onesp (arg1))
10406 return omit_one_operand (type, arg1, arg0);
10407 if (integer_zerop (arg1))
10408 return non_lvalue (fold_convert (type, arg0));
10409 if (operand_equal_p (arg0, arg1, 0))
10410 return non_lvalue (fold_convert (type, arg0));
10411
10412 /* ~X | X is -1. */
10413 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10414 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10415 {
10416 t1 = fold_convert (type, integer_zero_node);
10417 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10418 return omit_one_operand (type, t1, arg1);
10419 }
10420
10421 /* X | ~X is -1. */
10422 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10423 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10424 {
10425 t1 = fold_convert (type, integer_zero_node);
10426 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10427 return omit_one_operand (type, t1, arg0);
10428 }
10429
10430 /* Canonicalize (X & C1) | C2. */
10431 if (TREE_CODE (arg0) == BIT_AND_EXPR
10432 && TREE_CODE (arg1) == INTEGER_CST
10433 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10434 {
10435 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, hi3, lo3, mlo, mhi;
10436 int width = TYPE_PRECISION (type), w;
10437 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
10438 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10439 hi2 = TREE_INT_CST_HIGH (arg1);
10440 lo2 = TREE_INT_CST_LOW (arg1);
10441
10442 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10443 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
10444 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10445
10446 if (width > HOST_BITS_PER_WIDE_INT)
10447 {
10448 mhi = (unsigned HOST_WIDE_INT) -1
10449 >> (2 * HOST_BITS_PER_WIDE_INT - width);
10450 mlo = -1;
10451 }
10452 else
10453 {
10454 mhi = 0;
10455 mlo = (unsigned HOST_WIDE_INT) -1
10456 >> (HOST_BITS_PER_WIDE_INT - width);
10457 }
10458
10459 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10460 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
10461 return fold_build2 (BIT_IOR_EXPR, type,
10462 TREE_OPERAND (arg0, 0), arg1);
10463
10464 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10465 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10466 mode which allows further optimizations. */
10467 hi1 &= mhi;
10468 lo1 &= mlo;
10469 hi2 &= mhi;
10470 lo2 &= mlo;
10471 hi3 = hi1 & ~hi2;
10472 lo3 = lo1 & ~lo2;
10473 for (w = BITS_PER_UNIT;
10474 w <= width && w <= HOST_BITS_PER_WIDE_INT;
10475 w <<= 1)
10476 {
10477 unsigned HOST_WIDE_INT mask
10478 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
10479 if (((lo1 | lo2) & mask) == mask
10480 && (lo1 & ~mask) == 0 && hi1 == 0)
10481 {
10482 hi3 = 0;
10483 lo3 = mask;
10484 break;
10485 }
10486 }
10487 if (hi3 != hi1 || lo3 != lo1)
10488 return fold_build2 (BIT_IOR_EXPR, type,
10489 fold_build2 (BIT_AND_EXPR, type,
10490 TREE_OPERAND (arg0, 0),
10491 build_int_cst_wide (type,
10492 lo3, hi3)),
10493 arg1);
10494 }
10495
10496 /* (X & Y) | Y is (X, Y). */
10497 if (TREE_CODE (arg0) == BIT_AND_EXPR
10498 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10499 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10500 /* (X & Y) | X is (Y, X). */
10501 if (TREE_CODE (arg0) == BIT_AND_EXPR
10502 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10503 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10504 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
10505 /* X | (X & Y) is (Y, X). */
10506 if (TREE_CODE (arg1) == BIT_AND_EXPR
10507 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10508 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10509 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
10510 /* X | (Y & X) is (Y, X). */
10511 if (TREE_CODE (arg1) == BIT_AND_EXPR
10512 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10513 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10514 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
10515
10516 t1 = distribute_bit_expr (code, type, arg0, arg1);
10517 if (t1 != NULL_TREE)
10518 return t1;
10519
10520 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
10521
10522 This results in more efficient code for machines without a NAND
10523 instruction. Combine will canonicalize to the first form
10524 which will allow use of NAND instructions provided by the
10525 backend if they exist. */
10526 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10527 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10528 {
10529 return fold_build1 (BIT_NOT_EXPR, type,
10530 build2 (BIT_AND_EXPR, type,
10531 fold_convert (type,
10532 TREE_OPERAND (arg0, 0)),
10533 fold_convert (type,
10534 TREE_OPERAND (arg1, 0))));
10535 }
10536
10537 /* See if this can be simplified into a rotate first. If that
10538 is unsuccessful continue in the association code. */
10539 goto bit_rotate;
10540
10541 case BIT_XOR_EXPR:
10542 if (integer_zerop (arg1))
10543 return non_lvalue (fold_convert (type, arg0));
10544 if (integer_all_onesp (arg1))
10545 return fold_build1 (BIT_NOT_EXPR, type, op0);
10546 if (operand_equal_p (arg0, arg1, 0))
10547 return omit_one_operand (type, integer_zero_node, arg0);
10548
10549 /* ~X ^ X is -1. */
10550 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10551 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10552 {
10553 t1 = fold_convert (type, integer_zero_node);
10554 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10555 return omit_one_operand (type, t1, arg1);
10556 }
10557
10558 /* X ^ ~X is -1. */
10559 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10560 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10561 {
10562 t1 = fold_convert (type, integer_zero_node);
10563 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10564 return omit_one_operand (type, t1, arg0);
10565 }
10566
10567 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
10568 with a constant, and the two constants have no bits in common,
10569 we should treat this as a BIT_IOR_EXPR since this may produce more
10570 simplifications. */
10571 if (TREE_CODE (arg0) == BIT_AND_EXPR
10572 && TREE_CODE (arg1) == BIT_AND_EXPR
10573 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10574 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10575 && integer_zerop (const_binop (BIT_AND_EXPR,
10576 TREE_OPERAND (arg0, 1),
10577 TREE_OPERAND (arg1, 1), 0)))
10578 {
10579 code = BIT_IOR_EXPR;
10580 goto bit_ior;
10581 }
10582
10583 /* (X | Y) ^ X -> Y & ~ X*/
10584 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10585 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10586 {
10587 tree t2 = TREE_OPERAND (arg0, 1);
10588 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
10589 arg1);
10590 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10591 fold_convert (type, t1));
10592 return t1;
10593 }
10594
10595 /* (Y | X) ^ X -> Y & ~ X*/
10596 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10597 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10598 {
10599 tree t2 = TREE_OPERAND (arg0, 0);
10600 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
10601 arg1);
10602 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10603 fold_convert (type, t1));
10604 return t1;
10605 }
10606
10607 /* X ^ (X | Y) -> Y & ~ X*/
10608 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10609 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
10610 {
10611 tree t2 = TREE_OPERAND (arg1, 1);
10612 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
10613 arg0);
10614 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10615 fold_convert (type, t1));
10616 return t1;
10617 }
10618
10619 /* X ^ (Y | X) -> Y & ~ X*/
10620 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10621 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
10622 {
10623 tree t2 = TREE_OPERAND (arg1, 0);
10624 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
10625 arg0);
10626 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10627 fold_convert (type, t1));
10628 return t1;
10629 }
10630
10631 /* Convert ~X ^ ~Y to X ^ Y. */
10632 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10633 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10634 return fold_build2 (code, type,
10635 fold_convert (type, TREE_OPERAND (arg0, 0)),
10636 fold_convert (type, TREE_OPERAND (arg1, 0)));
10637
10638 /* Convert ~X ^ C to X ^ ~C. */
10639 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10640 && TREE_CODE (arg1) == INTEGER_CST)
10641 return fold_build2 (code, type,
10642 fold_convert (type, TREE_OPERAND (arg0, 0)),
10643 fold_build1 (BIT_NOT_EXPR, type, arg1));
10644
10645 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10646 if (TREE_CODE (arg0) == BIT_AND_EXPR
10647 && integer_onep (TREE_OPERAND (arg0, 1))
10648 && integer_onep (arg1))
10649 return fold_build2 (EQ_EXPR, type, arg0,
10650 build_int_cst (TREE_TYPE (arg0), 0));
10651
10652 /* Fold (X & Y) ^ Y as ~X & Y. */
10653 if (TREE_CODE (arg0) == BIT_AND_EXPR
10654 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10655 {
10656 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
10657 return fold_build2 (BIT_AND_EXPR, type,
10658 fold_build1 (BIT_NOT_EXPR, type, tem),
10659 fold_convert (type, arg1));
10660 }
10661 /* Fold (X & Y) ^ X as ~Y & X. */
10662 if (TREE_CODE (arg0) == BIT_AND_EXPR
10663 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10664 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10665 {
10666 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
10667 return fold_build2 (BIT_AND_EXPR, type,
10668 fold_build1 (BIT_NOT_EXPR, type, tem),
10669 fold_convert (type, arg1));
10670 }
10671 /* Fold X ^ (X & Y) as X & ~Y. */
10672 if (TREE_CODE (arg1) == BIT_AND_EXPR
10673 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10674 {
10675 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
10676 return fold_build2 (BIT_AND_EXPR, type,
10677 fold_convert (type, arg0),
10678 fold_build1 (BIT_NOT_EXPR, type, tem));
10679 }
10680 /* Fold X ^ (Y & X) as ~Y & X. */
10681 if (TREE_CODE (arg1) == BIT_AND_EXPR
10682 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10683 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10684 {
10685 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
10686 return fold_build2 (BIT_AND_EXPR, type,
10687 fold_build1 (BIT_NOT_EXPR, type, tem),
10688 fold_convert (type, arg0));
10689 }
10690
10691 /* See if this can be simplified into a rotate first. If that
10692 is unsuccessful continue in the association code. */
10693 goto bit_rotate;
10694
10695 case BIT_AND_EXPR:
10696 if (integer_all_onesp (arg1))
10697 return non_lvalue (fold_convert (type, arg0));
10698 if (integer_zerop (arg1))
10699 return omit_one_operand (type, arg1, arg0);
10700 if (operand_equal_p (arg0, arg1, 0))
10701 return non_lvalue (fold_convert (type, arg0));
10702
10703 /* ~X & X is always zero. */
10704 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10705 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10706 return omit_one_operand (type, integer_zero_node, arg1);
10707
10708 /* X & ~X is always zero. */
10709 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10710 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10711 return omit_one_operand (type, integer_zero_node, arg0);
10712
10713 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
10714 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10715 && TREE_CODE (arg1) == INTEGER_CST
10716 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10717 {
10718 tree tmp1 = fold_convert (TREE_TYPE (arg0), arg1);
10719 tree tmp2 = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10720 TREE_OPERAND (arg0, 0), tmp1);
10721 tree tmp3 = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10722 TREE_OPERAND (arg0, 1), tmp1);
10723 return fold_convert (type,
10724 fold_build2 (BIT_IOR_EXPR, TREE_TYPE (arg0),
10725 tmp2, tmp3));
10726 }
10727
10728 /* (X | Y) & Y is (X, Y). */
10729 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10730 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10731 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10732 /* (X | Y) & X is (Y, X). */
10733 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10734 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10735 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10736 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
10737 /* X & (X | Y) is (Y, X). */
10738 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10739 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10740 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10741 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
10742 /* X & (Y | X) is (Y, X). */
10743 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10744 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10745 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10746 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
10747
10748 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
10749 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10750 && integer_onep (TREE_OPERAND (arg0, 1))
10751 && integer_onep (arg1))
10752 {
10753 tem = TREE_OPERAND (arg0, 0);
10754 return fold_build2 (EQ_EXPR, type,
10755 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
10756 build_int_cst (TREE_TYPE (tem), 1)),
10757 build_int_cst (TREE_TYPE (tem), 0));
10758 }
10759 /* Fold ~X & 1 as (X & 1) == 0. */
10760 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10761 && integer_onep (arg1))
10762 {
10763 tem = TREE_OPERAND (arg0, 0);
10764 return fold_build2 (EQ_EXPR, type,
10765 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
10766 build_int_cst (TREE_TYPE (tem), 1)),
10767 build_int_cst (TREE_TYPE (tem), 0));
10768 }
10769
10770 /* Fold (X ^ Y) & Y as ~X & Y. */
10771 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10772 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10773 {
10774 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
10775 return fold_build2 (BIT_AND_EXPR, type,
10776 fold_build1 (BIT_NOT_EXPR, type, tem),
10777 fold_convert (type, arg1));
10778 }
10779 /* Fold (X ^ Y) & X as ~Y & X. */
10780 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10781 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10782 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10783 {
10784 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
10785 return fold_build2 (BIT_AND_EXPR, type,
10786 fold_build1 (BIT_NOT_EXPR, type, tem),
10787 fold_convert (type, arg1));
10788 }
10789 /* Fold X & (X ^ Y) as X & ~Y. */
10790 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10791 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10792 {
10793 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
10794 return fold_build2 (BIT_AND_EXPR, type,
10795 fold_convert (type, arg0),
10796 fold_build1 (BIT_NOT_EXPR, type, tem));
10797 }
10798 /* Fold X & (Y ^ X) as ~Y & X. */
10799 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10800 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10801 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10802 {
10803 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
10804 return fold_build2 (BIT_AND_EXPR, type,
10805 fold_build1 (BIT_NOT_EXPR, type, tem),
10806 fold_convert (type, arg0));
10807 }
10808
10809 t1 = distribute_bit_expr (code, type, arg0, arg1);
10810 if (t1 != NULL_TREE)
10811 return t1;
10812 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10813 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
10814 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
10815 {
10816 unsigned int prec
10817 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
10818
10819 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
10820 && (~TREE_INT_CST_LOW (arg1)
10821 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
10822 return fold_convert (type, TREE_OPERAND (arg0, 0));
10823 }
10824
10825 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
10826
10827 This results in more efficient code for machines without a NOR
10828 instruction. Combine will canonicalize to the first form
10829 which will allow use of NOR instructions provided by the
10830 backend if they exist. */
10831 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10832 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10833 {
10834 return fold_build1 (BIT_NOT_EXPR, type,
10835 build2 (BIT_IOR_EXPR, type,
10836 fold_convert (type,
10837 TREE_OPERAND (arg0, 0)),
10838 fold_convert (type,
10839 TREE_OPERAND (arg1, 0))));
10840 }
10841
10842 /* If arg0 is derived from the address of an object or function, we may
10843 be able to fold this expression using the object or function's
10844 alignment. */
10845 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
10846 {
10847 unsigned HOST_WIDE_INT modulus, residue;
10848 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
10849
10850 modulus = get_pointer_modulus_and_residue (arg0, &residue);
10851
10852 /* This works because modulus is a power of 2. If this weren't the
10853 case, we'd have to replace it by its greatest power-of-2
10854 divisor: modulus & -modulus. */
10855 if (low < modulus)
10856 return build_int_cst (type, residue & low);
10857 }
10858
10859 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
10860 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
10861 if the new mask might be further optimized. */
10862 if ((TREE_CODE (arg0) == LSHIFT_EXPR
10863 || TREE_CODE (arg0) == RSHIFT_EXPR)
10864 && host_integerp (TREE_OPERAND (arg0, 1), 1)
10865 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
10866 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
10867 < TYPE_PRECISION (TREE_TYPE (arg0))
10868 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
10869 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
10870 {
10871 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
10872 unsigned HOST_WIDE_INT mask
10873 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
10874 unsigned HOST_WIDE_INT newmask, zerobits = 0;
10875 tree shift_type = TREE_TYPE (arg0);
10876
10877 if (TREE_CODE (arg0) == LSHIFT_EXPR)
10878 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
10879 else if (TREE_CODE (arg0) == RSHIFT_EXPR
10880 && TYPE_PRECISION (TREE_TYPE (arg0))
10881 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
10882 {
10883 unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
10884 tree arg00 = TREE_OPERAND (arg0, 0);
10885 /* See if more bits can be proven as zero because of
10886 zero extension. */
10887 if (TREE_CODE (arg00) == NOP_EXPR
10888 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
10889 {
10890 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
10891 if (TYPE_PRECISION (inner_type)
10892 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
10893 && TYPE_PRECISION (inner_type) < prec)
10894 {
10895 prec = TYPE_PRECISION (inner_type);
10896 /* See if we can shorten the right shift. */
10897 if (shiftc < prec)
10898 shift_type = inner_type;
10899 }
10900 }
10901 zerobits = ~(unsigned HOST_WIDE_INT) 0;
10902 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
10903 zerobits <<= prec - shiftc;
10904 /* For arithmetic shift if sign bit could be set, zerobits
10905 can contain actually sign bits, so no transformation is
10906 possible, unless MASK masks them all away. In that
10907 case the shift needs to be converted into logical shift. */
10908 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
10909 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
10910 {
10911 if ((mask & zerobits) == 0)
10912 shift_type = unsigned_type_for (TREE_TYPE (arg0));
10913 else
10914 zerobits = 0;
10915 }
10916 }
10917
10918 /* ((X << 16) & 0xff00) is (X, 0). */
10919 if ((mask & zerobits) == mask)
10920 return omit_one_operand (type, build_int_cst (type, 0), arg0);
10921
10922 newmask = mask | zerobits;
10923 if (newmask != mask && (newmask & (newmask + 1)) == 0)
10924 {
10925 unsigned int prec;
10926
10927 /* Only do the transformation if NEWMASK is some integer
10928 mode's mask. */
10929 for (prec = BITS_PER_UNIT;
10930 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
10931 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
10932 break;
10933 if (prec < HOST_BITS_PER_WIDE_INT
10934 || newmask == ~(unsigned HOST_WIDE_INT) 0)
10935 {
10936 if (shift_type != TREE_TYPE (arg0))
10937 {
10938 tem = fold_build2 (TREE_CODE (arg0), shift_type,
10939 fold_convert (shift_type,
10940 TREE_OPERAND (arg0, 0)),
10941 TREE_OPERAND (arg0, 1));
10942 tem = fold_convert (type, tem);
10943 }
10944 else
10945 tem = op0;
10946 return fold_build2 (BIT_AND_EXPR, type, tem,
10947 build_int_cst_type (TREE_TYPE (op1),
10948 newmask));
10949 }
10950 }
10951 }
10952
10953 goto associate;
10954
10955 case RDIV_EXPR:
10956 /* Don't touch a floating-point divide by zero unless the mode
10957 of the constant can represent infinity. */
10958 if (TREE_CODE (arg1) == REAL_CST
10959 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
10960 && real_zerop (arg1))
10961 return NULL_TREE;
10962
10963 /* Optimize A / A to 1.0 if we don't care about
10964 NaNs or Infinities. Skip the transformation
10965 for non-real operands. */
10966 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
10967 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10968 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
10969 && operand_equal_p (arg0, arg1, 0))
10970 {
10971 tree r = build_real (TREE_TYPE (arg0), dconst1);
10972
10973 return omit_two_operands (type, r, arg0, arg1);
10974 }
10975
10976 /* The complex version of the above A / A optimization. */
10977 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10978 && operand_equal_p (arg0, arg1, 0))
10979 {
10980 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
10981 if (! HONOR_NANS (TYPE_MODE (elem_type))
10982 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
10983 {
10984 tree r = build_real (elem_type, dconst1);
10985 /* omit_two_operands will call fold_convert for us. */
10986 return omit_two_operands (type, r, arg0, arg1);
10987 }
10988 }
10989
10990 /* (-A) / (-B) -> A / B */
10991 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10992 return fold_build2 (RDIV_EXPR, type,
10993 TREE_OPERAND (arg0, 0),
10994 negate_expr (arg1));
10995 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10996 return fold_build2 (RDIV_EXPR, type,
10997 negate_expr (arg0),
10998 TREE_OPERAND (arg1, 0));
10999
11000 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11001 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11002 && real_onep (arg1))
11003 return non_lvalue (fold_convert (type, arg0));
11004
11005 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11006 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11007 && real_minus_onep (arg1))
11008 return non_lvalue (fold_convert (type, negate_expr (arg0)));
11009
11010 /* If ARG1 is a constant, we can convert this to a multiply by the
11011 reciprocal. This does not have the same rounding properties,
11012 so only do this if -freciprocal-math. We can actually
11013 always safely do it if ARG1 is a power of two, but it's hard to
11014 tell if it is or not in a portable manner. */
11015 if (TREE_CODE (arg1) == REAL_CST)
11016 {
11017 if (flag_reciprocal_math
11018 && 0 != (tem = const_binop (code, build_real (type, dconst1),
11019 arg1, 0)))
11020 return fold_build2 (MULT_EXPR, type, arg0, tem);
11021 /* Find the reciprocal if optimizing and the result is exact. */
11022 if (optimize)
11023 {
11024 REAL_VALUE_TYPE r;
11025 r = TREE_REAL_CST (arg1);
11026 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
11027 {
11028 tem = build_real (type, r);
11029 return fold_build2 (MULT_EXPR, type,
11030 fold_convert (type, arg0), tem);
11031 }
11032 }
11033 }
11034 /* Convert A/B/C to A/(B*C). */
11035 if (flag_reciprocal_math
11036 && TREE_CODE (arg0) == RDIV_EXPR)
11037 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11038 fold_build2 (MULT_EXPR, type,
11039 TREE_OPERAND (arg0, 1), arg1));
11040
11041 /* Convert A/(B/C) to (A/B)*C. */
11042 if (flag_reciprocal_math
11043 && TREE_CODE (arg1) == RDIV_EXPR)
11044 return fold_build2 (MULT_EXPR, type,
11045 fold_build2 (RDIV_EXPR, type, arg0,
11046 TREE_OPERAND (arg1, 0)),
11047 TREE_OPERAND (arg1, 1));
11048
11049 /* Convert C1/(X*C2) into (C1/C2)/X. */
11050 if (flag_reciprocal_math
11051 && TREE_CODE (arg1) == MULT_EXPR
11052 && TREE_CODE (arg0) == REAL_CST
11053 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11054 {
11055 tree tem = const_binop (RDIV_EXPR, arg0,
11056 TREE_OPERAND (arg1, 1), 0);
11057 if (tem)
11058 return fold_build2 (RDIV_EXPR, type, tem,
11059 TREE_OPERAND (arg1, 0));
11060 }
11061
11062 if (flag_unsafe_math_optimizations)
11063 {
11064 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11065 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11066
11067 /* Optimize sin(x)/cos(x) as tan(x). */
11068 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11069 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11070 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11071 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11072 CALL_EXPR_ARG (arg1, 0), 0))
11073 {
11074 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11075
11076 if (tanfn != NULL_TREE)
11077 return build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11078 }
11079
11080 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11081 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11082 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11083 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11084 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11085 CALL_EXPR_ARG (arg1, 0), 0))
11086 {
11087 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11088
11089 if (tanfn != NULL_TREE)
11090 {
11091 tree tmp = build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11092 return fold_build2 (RDIV_EXPR, type,
11093 build_real (type, dconst1), tmp);
11094 }
11095 }
11096
11097 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11098 NaNs or Infinities. */
11099 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11100 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11101 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11102 {
11103 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11104 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11105
11106 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11107 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11108 && operand_equal_p (arg00, arg01, 0))
11109 {
11110 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11111
11112 if (cosfn != NULL_TREE)
11113 return build_call_expr (cosfn, 1, arg00);
11114 }
11115 }
11116
11117 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11118 NaNs or Infinities. */
11119 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11120 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11121 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11122 {
11123 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11124 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11125
11126 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11127 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11128 && operand_equal_p (arg00, arg01, 0))
11129 {
11130 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11131
11132 if (cosfn != NULL_TREE)
11133 {
11134 tree tmp = build_call_expr (cosfn, 1, arg00);
11135 return fold_build2 (RDIV_EXPR, type,
11136 build_real (type, dconst1),
11137 tmp);
11138 }
11139 }
11140 }
11141
11142 /* Optimize pow(x,c)/x as pow(x,c-1). */
11143 if (fcode0 == BUILT_IN_POW
11144 || fcode0 == BUILT_IN_POWF
11145 || fcode0 == BUILT_IN_POWL)
11146 {
11147 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11148 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11149 if (TREE_CODE (arg01) == REAL_CST
11150 && !TREE_OVERFLOW (arg01)
11151 && operand_equal_p (arg1, arg00, 0))
11152 {
11153 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11154 REAL_VALUE_TYPE c;
11155 tree arg;
11156
11157 c = TREE_REAL_CST (arg01);
11158 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11159 arg = build_real (type, c);
11160 return build_call_expr (powfn, 2, arg1, arg);
11161 }
11162 }
11163
11164 /* Optimize a/root(b/c) into a*root(c/b). */
11165 if (BUILTIN_ROOT_P (fcode1))
11166 {
11167 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11168
11169 if (TREE_CODE (rootarg) == RDIV_EXPR)
11170 {
11171 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11172 tree b = TREE_OPERAND (rootarg, 0);
11173 tree c = TREE_OPERAND (rootarg, 1);
11174
11175 tree tmp = fold_build2 (RDIV_EXPR, type, c, b);
11176
11177 tmp = build_call_expr (rootfn, 1, tmp);
11178 return fold_build2 (MULT_EXPR, type, arg0, tmp);
11179 }
11180 }
11181
11182 /* Optimize x/expN(y) into x*expN(-y). */
11183 if (BUILTIN_EXPONENT_P (fcode1))
11184 {
11185 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11186 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11187 arg1 = build_call_expr (expfn, 1, fold_convert (type, arg));
11188 return fold_build2 (MULT_EXPR, type, arg0, arg1);
11189 }
11190
11191 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11192 if (fcode1 == BUILT_IN_POW
11193 || fcode1 == BUILT_IN_POWF
11194 || fcode1 == BUILT_IN_POWL)
11195 {
11196 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11197 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11198 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11199 tree neg11 = fold_convert (type, negate_expr (arg11));
11200 arg1 = build_call_expr (powfn, 2, arg10, neg11);
11201 return fold_build2 (MULT_EXPR, type, arg0, arg1);
11202 }
11203 }
11204 return NULL_TREE;
11205
11206 case TRUNC_DIV_EXPR:
11207 case FLOOR_DIV_EXPR:
11208 /* Simplify A / (B << N) where A and B are positive and B is
11209 a power of 2, to A >> (N + log2(B)). */
11210 strict_overflow_p = false;
11211 if (TREE_CODE (arg1) == LSHIFT_EXPR
11212 && (TYPE_UNSIGNED (type)
11213 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11214 {
11215 tree sval = TREE_OPERAND (arg1, 0);
11216 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11217 {
11218 tree sh_cnt = TREE_OPERAND (arg1, 1);
11219 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
11220
11221 if (strict_overflow_p)
11222 fold_overflow_warning (("assuming signed overflow does not "
11223 "occur when simplifying A / (B << N)"),
11224 WARN_STRICT_OVERFLOW_MISC);
11225
11226 sh_cnt = fold_build2 (PLUS_EXPR, TREE_TYPE (sh_cnt),
11227 sh_cnt, build_int_cst (NULL_TREE, pow2));
11228 return fold_build2 (RSHIFT_EXPR, type,
11229 fold_convert (type, arg0), sh_cnt);
11230 }
11231 }
11232
11233 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
11234 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
11235 if (INTEGRAL_TYPE_P (type)
11236 && TYPE_UNSIGNED (type)
11237 && code == FLOOR_DIV_EXPR)
11238 return fold_build2 (TRUNC_DIV_EXPR, type, op0, op1);
11239
11240 /* Fall thru */
11241
11242 case ROUND_DIV_EXPR:
11243 case CEIL_DIV_EXPR:
11244 case EXACT_DIV_EXPR:
11245 if (integer_onep (arg1))
11246 return non_lvalue (fold_convert (type, arg0));
11247 if (integer_zerop (arg1))
11248 return NULL_TREE;
11249 /* X / -1 is -X. */
11250 if (!TYPE_UNSIGNED (type)
11251 && TREE_CODE (arg1) == INTEGER_CST
11252 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11253 && TREE_INT_CST_HIGH (arg1) == -1)
11254 return fold_convert (type, negate_expr (arg0));
11255
11256 /* Convert -A / -B to A / B when the type is signed and overflow is
11257 undefined. */
11258 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11259 && TREE_CODE (arg0) == NEGATE_EXPR
11260 && negate_expr_p (arg1))
11261 {
11262 if (INTEGRAL_TYPE_P (type))
11263 fold_overflow_warning (("assuming signed overflow does not occur "
11264 "when distributing negation across "
11265 "division"),
11266 WARN_STRICT_OVERFLOW_MISC);
11267 return fold_build2 (code, type,
11268 fold_convert (type, TREE_OPERAND (arg0, 0)),
11269 negate_expr (arg1));
11270 }
11271 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11272 && TREE_CODE (arg1) == NEGATE_EXPR
11273 && negate_expr_p (arg0))
11274 {
11275 if (INTEGRAL_TYPE_P (type))
11276 fold_overflow_warning (("assuming signed overflow does not occur "
11277 "when distributing negation across "
11278 "division"),
11279 WARN_STRICT_OVERFLOW_MISC);
11280 return fold_build2 (code, type, negate_expr (arg0),
11281 TREE_OPERAND (arg1, 0));
11282 }
11283
11284 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11285 operation, EXACT_DIV_EXPR.
11286
11287 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11288 At one time others generated faster code, it's not clear if they do
11289 after the last round to changes to the DIV code in expmed.c. */
11290 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11291 && multiple_of_p (type, arg0, arg1))
11292 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
11293
11294 strict_overflow_p = false;
11295 if (TREE_CODE (arg1) == INTEGER_CST
11296 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11297 &strict_overflow_p)))
11298 {
11299 if (strict_overflow_p)
11300 fold_overflow_warning (("assuming signed overflow does not occur "
11301 "when simplifying division"),
11302 WARN_STRICT_OVERFLOW_MISC);
11303 return fold_convert (type, tem);
11304 }
11305
11306 return NULL_TREE;
11307
11308 case CEIL_MOD_EXPR:
11309 case FLOOR_MOD_EXPR:
11310 case ROUND_MOD_EXPR:
11311 case TRUNC_MOD_EXPR:
11312 /* X % 1 is always zero, but be sure to preserve any side
11313 effects in X. */
11314 if (integer_onep (arg1))
11315 return omit_one_operand (type, integer_zero_node, arg0);
11316
11317 /* X % 0, return X % 0 unchanged so that we can get the
11318 proper warnings and errors. */
11319 if (integer_zerop (arg1))
11320 return NULL_TREE;
11321
11322 /* 0 % X is always zero, but be sure to preserve any side
11323 effects in X. Place this after checking for X == 0. */
11324 if (integer_zerop (arg0))
11325 return omit_one_operand (type, integer_zero_node, arg1);
11326
11327 /* X % -1 is zero. */
11328 if (!TYPE_UNSIGNED (type)
11329 && TREE_CODE (arg1) == INTEGER_CST
11330 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11331 && TREE_INT_CST_HIGH (arg1) == -1)
11332 return omit_one_operand (type, integer_zero_node, arg0);
11333
11334 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
11335 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
11336 strict_overflow_p = false;
11337 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
11338 && (TYPE_UNSIGNED (type)
11339 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11340 {
11341 tree c = arg1;
11342 /* Also optimize A % (C << N) where C is a power of 2,
11343 to A & ((C << N) - 1). */
11344 if (TREE_CODE (arg1) == LSHIFT_EXPR)
11345 c = TREE_OPERAND (arg1, 0);
11346
11347 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
11348 {
11349 tree mask = fold_build2 (MINUS_EXPR, TREE_TYPE (arg1), arg1,
11350 build_int_cst (TREE_TYPE (arg1), 1));
11351 if (strict_overflow_p)
11352 fold_overflow_warning (("assuming signed overflow does not "
11353 "occur when simplifying "
11354 "X % (power of two)"),
11355 WARN_STRICT_OVERFLOW_MISC);
11356 return fold_build2 (BIT_AND_EXPR, type,
11357 fold_convert (type, arg0),
11358 fold_convert (type, mask));
11359 }
11360 }
11361
11362 /* X % -C is the same as X % C. */
11363 if (code == TRUNC_MOD_EXPR
11364 && !TYPE_UNSIGNED (type)
11365 && TREE_CODE (arg1) == INTEGER_CST
11366 && !TREE_OVERFLOW (arg1)
11367 && TREE_INT_CST_HIGH (arg1) < 0
11368 && !TYPE_OVERFLOW_TRAPS (type)
11369 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
11370 && !sign_bit_p (arg1, arg1))
11371 return fold_build2 (code, type, fold_convert (type, arg0),
11372 fold_convert (type, negate_expr (arg1)));
11373
11374 /* X % -Y is the same as X % Y. */
11375 if (code == TRUNC_MOD_EXPR
11376 && !TYPE_UNSIGNED (type)
11377 && TREE_CODE (arg1) == NEGATE_EXPR
11378 && !TYPE_OVERFLOW_TRAPS (type))
11379 return fold_build2 (code, type, fold_convert (type, arg0),
11380 fold_convert (type, TREE_OPERAND (arg1, 0)));
11381
11382 if (TREE_CODE (arg1) == INTEGER_CST
11383 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11384 &strict_overflow_p)))
11385 {
11386 if (strict_overflow_p)
11387 fold_overflow_warning (("assuming signed overflow does not occur "
11388 "when simplifying modulus"),
11389 WARN_STRICT_OVERFLOW_MISC);
11390 return fold_convert (type, tem);
11391 }
11392
11393 return NULL_TREE;
11394
11395 case LROTATE_EXPR:
11396 case RROTATE_EXPR:
11397 if (integer_all_onesp (arg0))
11398 return omit_one_operand (type, arg0, arg1);
11399 goto shift;
11400
11401 case RSHIFT_EXPR:
11402 /* Optimize -1 >> x for arithmetic right shifts. */
11403 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
11404 return omit_one_operand (type, arg0, arg1);
11405 /* ... fall through ... */
11406
11407 case LSHIFT_EXPR:
11408 shift:
11409 if (integer_zerop (arg1))
11410 return non_lvalue (fold_convert (type, arg0));
11411 if (integer_zerop (arg0))
11412 return omit_one_operand (type, arg0, arg1);
11413
11414 /* Since negative shift count is not well-defined,
11415 don't try to compute it in the compiler. */
11416 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11417 return NULL_TREE;
11418
11419 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
11420 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
11421 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11422 && host_integerp (TREE_OPERAND (arg0, 1), false)
11423 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11424 {
11425 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
11426 + TREE_INT_CST_LOW (arg1));
11427
11428 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11429 being well defined. */
11430 if (low >= TYPE_PRECISION (type))
11431 {
11432 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
11433 low = low % TYPE_PRECISION (type);
11434 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
11435 return build_int_cst (type, 0);
11436 else
11437 low = TYPE_PRECISION (type) - 1;
11438 }
11439
11440 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11441 build_int_cst (type, low));
11442 }
11443
11444 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
11445 into x & ((unsigned)-1 >> c) for unsigned types. */
11446 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
11447 || (TYPE_UNSIGNED (type)
11448 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
11449 && host_integerp (arg1, false)
11450 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11451 && host_integerp (TREE_OPERAND (arg0, 1), false)
11452 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11453 {
11454 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
11455 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
11456 tree lshift;
11457 tree arg00;
11458
11459 if (low0 == low1)
11460 {
11461 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
11462
11463 lshift = build_int_cst (type, -1);
11464 lshift = int_const_binop (code, lshift, arg1, 0);
11465
11466 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
11467 }
11468 }
11469
11470 /* Rewrite an LROTATE_EXPR by a constant into an
11471 RROTATE_EXPR by a new constant. */
11472 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
11473 {
11474 tree tem = build_int_cst (TREE_TYPE (arg1),
11475 TYPE_PRECISION (type));
11476 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
11477 return fold_build2 (RROTATE_EXPR, type, op0, tem);
11478 }
11479
11480 /* If we have a rotate of a bit operation with the rotate count and
11481 the second operand of the bit operation both constant,
11482 permute the two operations. */
11483 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11484 && (TREE_CODE (arg0) == BIT_AND_EXPR
11485 || TREE_CODE (arg0) == BIT_IOR_EXPR
11486 || TREE_CODE (arg0) == BIT_XOR_EXPR)
11487 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11488 return fold_build2 (TREE_CODE (arg0), type,
11489 fold_build2 (code, type,
11490 TREE_OPERAND (arg0, 0), arg1),
11491 fold_build2 (code, type,
11492 TREE_OPERAND (arg0, 1), arg1));
11493
11494 /* Two consecutive rotates adding up to the precision of the
11495 type can be ignored. */
11496 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11497 && TREE_CODE (arg0) == RROTATE_EXPR
11498 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11499 && TREE_INT_CST_HIGH (arg1) == 0
11500 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
11501 && ((TREE_INT_CST_LOW (arg1)
11502 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
11503 == (unsigned int) TYPE_PRECISION (type)))
11504 return TREE_OPERAND (arg0, 0);
11505
11506 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
11507 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
11508 if the latter can be further optimized. */
11509 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
11510 && TREE_CODE (arg0) == BIT_AND_EXPR
11511 && TREE_CODE (arg1) == INTEGER_CST
11512 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11513 {
11514 tree mask = fold_build2 (code, type,
11515 fold_convert (type, TREE_OPERAND (arg0, 1)),
11516 arg1);
11517 tree shift = fold_build2 (code, type,
11518 fold_convert (type, TREE_OPERAND (arg0, 0)),
11519 arg1);
11520 tem = fold_binary (BIT_AND_EXPR, type, shift, mask);
11521 if (tem)
11522 return tem;
11523 }
11524
11525 return NULL_TREE;
11526
11527 case MIN_EXPR:
11528 if (operand_equal_p (arg0, arg1, 0))
11529 return omit_one_operand (type, arg0, arg1);
11530 if (INTEGRAL_TYPE_P (type)
11531 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
11532 return omit_one_operand (type, arg1, arg0);
11533 tem = fold_minmax (MIN_EXPR, type, arg0, arg1);
11534 if (tem)
11535 return tem;
11536 goto associate;
11537
11538 case MAX_EXPR:
11539 if (operand_equal_p (arg0, arg1, 0))
11540 return omit_one_operand (type, arg0, arg1);
11541 if (INTEGRAL_TYPE_P (type)
11542 && TYPE_MAX_VALUE (type)
11543 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
11544 return omit_one_operand (type, arg1, arg0);
11545 tem = fold_minmax (MAX_EXPR, type, arg0, arg1);
11546 if (tem)
11547 return tem;
11548 goto associate;
11549
11550 case TRUTH_ANDIF_EXPR:
11551 /* Note that the operands of this must be ints
11552 and their values must be 0 or 1.
11553 ("true" is a fixed value perhaps depending on the language.) */
11554 /* If first arg is constant zero, return it. */
11555 if (integer_zerop (arg0))
11556 return fold_convert (type, arg0);
11557 case TRUTH_AND_EXPR:
11558 /* If either arg is constant true, drop it. */
11559 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11560 return non_lvalue (fold_convert (type, arg1));
11561 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
11562 /* Preserve sequence points. */
11563 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11564 return non_lvalue (fold_convert (type, arg0));
11565 /* If second arg is constant zero, result is zero, but first arg
11566 must be evaluated. */
11567 if (integer_zerop (arg1))
11568 return omit_one_operand (type, arg1, arg0);
11569 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
11570 case will be handled here. */
11571 if (integer_zerop (arg0))
11572 return omit_one_operand (type, arg0, arg1);
11573
11574 /* !X && X is always false. */
11575 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11576 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11577 return omit_one_operand (type, integer_zero_node, arg1);
11578 /* X && !X is always false. */
11579 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11580 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11581 return omit_one_operand (type, integer_zero_node, arg0);
11582
11583 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
11584 means A >= Y && A != MAX, but in this case we know that
11585 A < X <= MAX. */
11586
11587 if (!TREE_SIDE_EFFECTS (arg0)
11588 && !TREE_SIDE_EFFECTS (arg1))
11589 {
11590 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
11591 if (tem && !operand_equal_p (tem, arg0, 0))
11592 return fold_build2 (code, type, tem, arg1);
11593
11594 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
11595 if (tem && !operand_equal_p (tem, arg1, 0))
11596 return fold_build2 (code, type, arg0, tem);
11597 }
11598
11599 truth_andor:
11600 /* We only do these simplifications if we are optimizing. */
11601 if (!optimize)
11602 return NULL_TREE;
11603
11604 /* Check for things like (A || B) && (A || C). We can convert this
11605 to A || (B && C). Note that either operator can be any of the four
11606 truth and/or operations and the transformation will still be
11607 valid. Also note that we only care about order for the
11608 ANDIF and ORIF operators. If B contains side effects, this
11609 might change the truth-value of A. */
11610 if (TREE_CODE (arg0) == TREE_CODE (arg1)
11611 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
11612 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
11613 || TREE_CODE (arg0) == TRUTH_AND_EXPR
11614 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
11615 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
11616 {
11617 tree a00 = TREE_OPERAND (arg0, 0);
11618 tree a01 = TREE_OPERAND (arg0, 1);
11619 tree a10 = TREE_OPERAND (arg1, 0);
11620 tree a11 = TREE_OPERAND (arg1, 1);
11621 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
11622 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
11623 && (code == TRUTH_AND_EXPR
11624 || code == TRUTH_OR_EXPR));
11625
11626 if (operand_equal_p (a00, a10, 0))
11627 return fold_build2 (TREE_CODE (arg0), type, a00,
11628 fold_build2 (code, type, a01, a11));
11629 else if (commutative && operand_equal_p (a00, a11, 0))
11630 return fold_build2 (TREE_CODE (arg0), type, a00,
11631 fold_build2 (code, type, a01, a10));
11632 else if (commutative && operand_equal_p (a01, a10, 0))
11633 return fold_build2 (TREE_CODE (arg0), type, a01,
11634 fold_build2 (code, type, a00, a11));
11635
11636 /* This case if tricky because we must either have commutative
11637 operators or else A10 must not have side-effects. */
11638
11639 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
11640 && operand_equal_p (a01, a11, 0))
11641 return fold_build2 (TREE_CODE (arg0), type,
11642 fold_build2 (code, type, a00, a10),
11643 a01);
11644 }
11645
11646 /* See if we can build a range comparison. */
11647 if (0 != (tem = fold_range_test (code, type, op0, op1)))
11648 return tem;
11649
11650 /* Check for the possibility of merging component references. If our
11651 lhs is another similar operation, try to merge its rhs with our
11652 rhs. Then try to merge our lhs and rhs. */
11653 if (TREE_CODE (arg0) == code
11654 && 0 != (tem = fold_truthop (code, type,
11655 TREE_OPERAND (arg0, 1), arg1)))
11656 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11657
11658 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
11659 return tem;
11660
11661 return NULL_TREE;
11662
11663 case TRUTH_ORIF_EXPR:
11664 /* Note that the operands of this must be ints
11665 and their values must be 0 or true.
11666 ("true" is a fixed value perhaps depending on the language.) */
11667 /* If first arg is constant true, return it. */
11668 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11669 return fold_convert (type, arg0);
11670 case TRUTH_OR_EXPR:
11671 /* If either arg is constant zero, drop it. */
11672 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
11673 return non_lvalue (fold_convert (type, arg1));
11674 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
11675 /* Preserve sequence points. */
11676 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11677 return non_lvalue (fold_convert (type, arg0));
11678 /* If second arg is constant true, result is true, but we must
11679 evaluate first arg. */
11680 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
11681 return omit_one_operand (type, arg1, arg0);
11682 /* Likewise for first arg, but note this only occurs here for
11683 TRUTH_OR_EXPR. */
11684 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11685 return omit_one_operand (type, arg0, arg1);
11686
11687 /* !X || X is always true. */
11688 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11689 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11690 return omit_one_operand (type, integer_one_node, arg1);
11691 /* X || !X is always true. */
11692 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11693 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11694 return omit_one_operand (type, integer_one_node, arg0);
11695
11696 goto truth_andor;
11697
11698 case TRUTH_XOR_EXPR:
11699 /* If the second arg is constant zero, drop it. */
11700 if (integer_zerop (arg1))
11701 return non_lvalue (fold_convert (type, arg0));
11702 /* If the second arg is constant true, this is a logical inversion. */
11703 if (integer_onep (arg1))
11704 {
11705 /* Only call invert_truthvalue if operand is a truth value. */
11706 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
11707 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
11708 else
11709 tem = invert_truthvalue (arg0);
11710 return non_lvalue (fold_convert (type, tem));
11711 }
11712 /* Identical arguments cancel to zero. */
11713 if (operand_equal_p (arg0, arg1, 0))
11714 return omit_one_operand (type, integer_zero_node, arg0);
11715
11716 /* !X ^ X is always true. */
11717 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11718 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11719 return omit_one_operand (type, integer_one_node, arg1);
11720
11721 /* X ^ !X is always true. */
11722 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11723 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11724 return omit_one_operand (type, integer_one_node, arg0);
11725
11726 return NULL_TREE;
11727
11728 case EQ_EXPR:
11729 case NE_EXPR:
11730 tem = fold_comparison (code, type, op0, op1);
11731 if (tem != NULL_TREE)
11732 return tem;
11733
11734 /* bool_var != 0 becomes bool_var. */
11735 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
11736 && code == NE_EXPR)
11737 return non_lvalue (fold_convert (type, arg0));
11738
11739 /* bool_var == 1 becomes bool_var. */
11740 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
11741 && code == EQ_EXPR)
11742 return non_lvalue (fold_convert (type, arg0));
11743
11744 /* bool_var != 1 becomes !bool_var. */
11745 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
11746 && code == NE_EXPR)
11747 return fold_build1 (TRUTH_NOT_EXPR, type, fold_convert (type, arg0));
11748
11749 /* bool_var == 0 becomes !bool_var. */
11750 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
11751 && code == EQ_EXPR)
11752 return fold_build1 (TRUTH_NOT_EXPR, type, fold_convert (type, arg0));
11753
11754 /* If this is an equality comparison of the address of two non-weak,
11755 unaliased symbols neither of which are extern (since we do not
11756 have access to attributes for externs), then we know the result. */
11757 if (TREE_CODE (arg0) == ADDR_EXPR
11758 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
11759 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
11760 && ! lookup_attribute ("alias",
11761 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
11762 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
11763 && TREE_CODE (arg1) == ADDR_EXPR
11764 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
11765 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
11766 && ! lookup_attribute ("alias",
11767 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
11768 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
11769 {
11770 /* We know that we're looking at the address of two
11771 non-weak, unaliased, static _DECL nodes.
11772
11773 It is both wasteful and incorrect to call operand_equal_p
11774 to compare the two ADDR_EXPR nodes. It is wasteful in that
11775 all we need to do is test pointer equality for the arguments
11776 to the two ADDR_EXPR nodes. It is incorrect to use
11777 operand_equal_p as that function is NOT equivalent to a
11778 C equality test. It can in fact return false for two
11779 objects which would test as equal using the C equality
11780 operator. */
11781 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
11782 return constant_boolean_node (equal
11783 ? code == EQ_EXPR : code != EQ_EXPR,
11784 type);
11785 }
11786
11787 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
11788 a MINUS_EXPR of a constant, we can convert it into a comparison with
11789 a revised constant as long as no overflow occurs. */
11790 if (TREE_CODE (arg1) == INTEGER_CST
11791 && (TREE_CODE (arg0) == PLUS_EXPR
11792 || TREE_CODE (arg0) == MINUS_EXPR)
11793 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11794 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
11795 ? MINUS_EXPR : PLUS_EXPR,
11796 fold_convert (TREE_TYPE (arg0), arg1),
11797 TREE_OPERAND (arg0, 1), 0))
11798 && !TREE_OVERFLOW (tem))
11799 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11800
11801 /* Similarly for a NEGATE_EXPR. */
11802 if (TREE_CODE (arg0) == NEGATE_EXPR
11803 && TREE_CODE (arg1) == INTEGER_CST
11804 && 0 != (tem = negate_expr (arg1))
11805 && TREE_CODE (tem) == INTEGER_CST
11806 && !TREE_OVERFLOW (tem))
11807 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11808
11809 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
11810 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11811 && TREE_CODE (arg1) == INTEGER_CST
11812 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11813 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11814 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg0),
11815 fold_convert (TREE_TYPE (arg0), arg1),
11816 TREE_OPERAND (arg0, 1)));
11817
11818 /* Transform comparisons of the form X +- C CMP X. */
11819 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11820 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11821 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11822 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11823 || POINTER_TYPE_P (TREE_TYPE (arg0))))
11824 {
11825 tree cst = TREE_OPERAND (arg0, 1);
11826
11827 if (code == EQ_EXPR
11828 && !integer_zerop (cst))
11829 return omit_two_operands (type, boolean_false_node,
11830 TREE_OPERAND (arg0, 0), arg1);
11831 else
11832 return omit_two_operands (type, boolean_true_node,
11833 TREE_OPERAND (arg0, 0), arg1);
11834 }
11835
11836 /* If we have X - Y == 0, we can convert that to X == Y and similarly
11837 for !=. Don't do this for ordered comparisons due to overflow. */
11838 if (TREE_CODE (arg0) == MINUS_EXPR
11839 && integer_zerop (arg1))
11840 return fold_build2 (code, type,
11841 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
11842
11843 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
11844 if (TREE_CODE (arg0) == ABS_EXPR
11845 && (integer_zerop (arg1) || real_zerop (arg1)))
11846 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
11847
11848 /* If this is an EQ or NE comparison with zero and ARG0 is
11849 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
11850 two operations, but the latter can be done in one less insn
11851 on machines that have only two-operand insns or on which a
11852 constant cannot be the first operand. */
11853 if (TREE_CODE (arg0) == BIT_AND_EXPR
11854 && integer_zerop (arg1))
11855 {
11856 tree arg00 = TREE_OPERAND (arg0, 0);
11857 tree arg01 = TREE_OPERAND (arg0, 1);
11858 if (TREE_CODE (arg00) == LSHIFT_EXPR
11859 && integer_onep (TREE_OPERAND (arg00, 0)))
11860 {
11861 tree tem = fold_build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
11862 arg01, TREE_OPERAND (arg00, 1));
11863 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0), tem,
11864 build_int_cst (TREE_TYPE (arg0), 1));
11865 return fold_build2 (code, type,
11866 fold_convert (TREE_TYPE (arg1), tem), arg1);
11867 }
11868 else if (TREE_CODE (arg01) == LSHIFT_EXPR
11869 && integer_onep (TREE_OPERAND (arg01, 0)))
11870 {
11871 tree tem = fold_build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
11872 arg00, TREE_OPERAND (arg01, 1));
11873 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0), tem,
11874 build_int_cst (TREE_TYPE (arg0), 1));
11875 return fold_build2 (code, type,
11876 fold_convert (TREE_TYPE (arg1), tem), arg1);
11877 }
11878 }
11879
11880 /* If this is an NE or EQ comparison of zero against the result of a
11881 signed MOD operation whose second operand is a power of 2, make
11882 the MOD operation unsigned since it is simpler and equivalent. */
11883 if (integer_zerop (arg1)
11884 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
11885 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
11886 || TREE_CODE (arg0) == CEIL_MOD_EXPR
11887 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
11888 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
11889 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11890 {
11891 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
11892 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
11893 fold_convert (newtype,
11894 TREE_OPERAND (arg0, 0)),
11895 fold_convert (newtype,
11896 TREE_OPERAND (arg0, 1)));
11897
11898 return fold_build2 (code, type, newmod,
11899 fold_convert (newtype, arg1));
11900 }
11901
11902 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
11903 C1 is a valid shift constant, and C2 is a power of two, i.e.
11904 a single bit. */
11905 if (TREE_CODE (arg0) == BIT_AND_EXPR
11906 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
11907 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
11908 == INTEGER_CST
11909 && integer_pow2p (TREE_OPERAND (arg0, 1))
11910 && integer_zerop (arg1))
11911 {
11912 tree itype = TREE_TYPE (arg0);
11913 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
11914 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
11915
11916 /* Check for a valid shift count. */
11917 if (TREE_INT_CST_HIGH (arg001) == 0
11918 && TREE_INT_CST_LOW (arg001) < prec)
11919 {
11920 tree arg01 = TREE_OPERAND (arg0, 1);
11921 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
11922 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
11923 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
11924 can be rewritten as (X & (C2 << C1)) != 0. */
11925 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
11926 {
11927 tem = fold_build2 (LSHIFT_EXPR, itype, arg01, arg001);
11928 tem = fold_build2 (BIT_AND_EXPR, itype, arg000, tem);
11929 return fold_build2 (code, type, tem, arg1);
11930 }
11931 /* Otherwise, for signed (arithmetic) shifts,
11932 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
11933 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
11934 else if (!TYPE_UNSIGNED (itype))
11935 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
11936 arg000, build_int_cst (itype, 0));
11937 /* Otherwise, of unsigned (logical) shifts,
11938 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
11939 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
11940 else
11941 return omit_one_operand (type,
11942 code == EQ_EXPR ? integer_one_node
11943 : integer_zero_node,
11944 arg000);
11945 }
11946 }
11947
11948 /* If this is an NE comparison of zero with an AND of one, remove the
11949 comparison since the AND will give the correct value. */
11950 if (code == NE_EXPR
11951 && integer_zerop (arg1)
11952 && TREE_CODE (arg0) == BIT_AND_EXPR
11953 && integer_onep (TREE_OPERAND (arg0, 1)))
11954 return fold_convert (type, arg0);
11955
11956 /* If we have (A & C) == C where C is a power of 2, convert this into
11957 (A & C) != 0. Similarly for NE_EXPR. */
11958 if (TREE_CODE (arg0) == BIT_AND_EXPR
11959 && integer_pow2p (TREE_OPERAND (arg0, 1))
11960 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11961 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11962 arg0, fold_convert (TREE_TYPE (arg0),
11963 integer_zero_node));
11964
11965 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
11966 bit, then fold the expression into A < 0 or A >= 0. */
11967 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
11968 if (tem)
11969 return tem;
11970
11971 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
11972 Similarly for NE_EXPR. */
11973 if (TREE_CODE (arg0) == BIT_AND_EXPR
11974 && TREE_CODE (arg1) == INTEGER_CST
11975 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11976 {
11977 tree notc = fold_build1 (BIT_NOT_EXPR,
11978 TREE_TYPE (TREE_OPERAND (arg0, 1)),
11979 TREE_OPERAND (arg0, 1));
11980 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11981 arg1, notc);
11982 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
11983 if (integer_nonzerop (dandnotc))
11984 return omit_one_operand (type, rslt, arg0);
11985 }
11986
11987 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
11988 Similarly for NE_EXPR. */
11989 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11990 && TREE_CODE (arg1) == INTEGER_CST
11991 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11992 {
11993 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
11994 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11995 TREE_OPERAND (arg0, 1), notd);
11996 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
11997 if (integer_nonzerop (candnotd))
11998 return omit_one_operand (type, rslt, arg0);
11999 }
12000
12001 /* Optimize comparisons of strlen vs zero to a compare of the
12002 first character of the string vs zero. To wit,
12003 strlen(ptr) == 0 => *ptr == 0
12004 strlen(ptr) != 0 => *ptr != 0
12005 Other cases should reduce to one of these two (or a constant)
12006 due to the return value of strlen being unsigned. */
12007 if (TREE_CODE (arg0) == CALL_EXPR
12008 && integer_zerop (arg1))
12009 {
12010 tree fndecl = get_callee_fndecl (arg0);
12011
12012 if (fndecl
12013 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12014 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12015 && call_expr_nargs (arg0) == 1
12016 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12017 {
12018 tree iref = build_fold_indirect_ref (CALL_EXPR_ARG (arg0, 0));
12019 return fold_build2 (code, type, iref,
12020 build_int_cst (TREE_TYPE (iref), 0));
12021 }
12022 }
12023
12024 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12025 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12026 if (TREE_CODE (arg0) == RSHIFT_EXPR
12027 && integer_zerop (arg1)
12028 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12029 {
12030 tree arg00 = TREE_OPERAND (arg0, 0);
12031 tree arg01 = TREE_OPERAND (arg0, 1);
12032 tree itype = TREE_TYPE (arg00);
12033 if (TREE_INT_CST_HIGH (arg01) == 0
12034 && TREE_INT_CST_LOW (arg01)
12035 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
12036 {
12037 if (TYPE_UNSIGNED (itype))
12038 {
12039 itype = signed_type_for (itype);
12040 arg00 = fold_convert (itype, arg00);
12041 }
12042 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12043 type, arg00, build_int_cst (itype, 0));
12044 }
12045 }
12046
12047 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12048 if (integer_zerop (arg1)
12049 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12050 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12051 TREE_OPERAND (arg0, 1));
12052
12053 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12054 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12055 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12056 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12057 build_int_cst (TREE_TYPE (arg1), 0));
12058 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12059 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12060 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12061 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12062 return fold_build2 (code, type, TREE_OPERAND (arg0, 1),
12063 build_int_cst (TREE_TYPE (arg1), 0));
12064
12065 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12066 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12067 && TREE_CODE (arg1) == INTEGER_CST
12068 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12069 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12070 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg1),
12071 TREE_OPERAND (arg0, 1), arg1));
12072
12073 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12074 (X & C) == 0 when C is a single bit. */
12075 if (TREE_CODE (arg0) == BIT_AND_EXPR
12076 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12077 && integer_zerop (arg1)
12078 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12079 {
12080 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
12081 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12082 TREE_OPERAND (arg0, 1));
12083 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12084 type, tem, arg1);
12085 }
12086
12087 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12088 constant C is a power of two, i.e. a single bit. */
12089 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12090 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12091 && integer_zerop (arg1)
12092 && integer_pow2p (TREE_OPERAND (arg0, 1))
12093 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12094 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12095 {
12096 tree arg00 = TREE_OPERAND (arg0, 0);
12097 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12098 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12099 }
12100
12101 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12102 when is C is a power of two, i.e. a single bit. */
12103 if (TREE_CODE (arg0) == BIT_AND_EXPR
12104 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12105 && integer_zerop (arg1)
12106 && integer_pow2p (TREE_OPERAND (arg0, 1))
12107 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12108 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12109 {
12110 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12111 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg000),
12112 arg000, TREE_OPERAND (arg0, 1));
12113 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12114 tem, build_int_cst (TREE_TYPE (tem), 0));
12115 }
12116
12117 if (integer_zerop (arg1)
12118 && tree_expr_nonzero_p (arg0))
12119 {
12120 tree res = constant_boolean_node (code==NE_EXPR, type);
12121 return omit_one_operand (type, res, arg0);
12122 }
12123
12124 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12125 if (TREE_CODE (arg0) == NEGATE_EXPR
12126 && TREE_CODE (arg1) == NEGATE_EXPR)
12127 return fold_build2 (code, type,
12128 TREE_OPERAND (arg0, 0),
12129 TREE_OPERAND (arg1, 0));
12130
12131 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12132 if (TREE_CODE (arg0) == BIT_AND_EXPR
12133 && TREE_CODE (arg1) == BIT_AND_EXPR)
12134 {
12135 tree arg00 = TREE_OPERAND (arg0, 0);
12136 tree arg01 = TREE_OPERAND (arg0, 1);
12137 tree arg10 = TREE_OPERAND (arg1, 0);
12138 tree arg11 = TREE_OPERAND (arg1, 1);
12139 tree itype = TREE_TYPE (arg0);
12140
12141 if (operand_equal_p (arg01, arg11, 0))
12142 return fold_build2 (code, type,
12143 fold_build2 (BIT_AND_EXPR, itype,
12144 fold_build2 (BIT_XOR_EXPR, itype,
12145 arg00, arg10),
12146 arg01),
12147 build_int_cst (itype, 0));
12148
12149 if (operand_equal_p (arg01, arg10, 0))
12150 return fold_build2 (code, type,
12151 fold_build2 (BIT_AND_EXPR, itype,
12152 fold_build2 (BIT_XOR_EXPR, itype,
12153 arg00, arg11),
12154 arg01),
12155 build_int_cst (itype, 0));
12156
12157 if (operand_equal_p (arg00, arg11, 0))
12158 return fold_build2 (code, type,
12159 fold_build2 (BIT_AND_EXPR, itype,
12160 fold_build2 (BIT_XOR_EXPR, itype,
12161 arg01, arg10),
12162 arg00),
12163 build_int_cst (itype, 0));
12164
12165 if (operand_equal_p (arg00, arg10, 0))
12166 return fold_build2 (code, type,
12167 fold_build2 (BIT_AND_EXPR, itype,
12168 fold_build2 (BIT_XOR_EXPR, itype,
12169 arg01, arg11),
12170 arg00),
12171 build_int_cst (itype, 0));
12172 }
12173
12174 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12175 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12176 {
12177 tree arg00 = TREE_OPERAND (arg0, 0);
12178 tree arg01 = TREE_OPERAND (arg0, 1);
12179 tree arg10 = TREE_OPERAND (arg1, 0);
12180 tree arg11 = TREE_OPERAND (arg1, 1);
12181 tree itype = TREE_TYPE (arg0);
12182
12183 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12184 operand_equal_p guarantees no side-effects so we don't need
12185 to use omit_one_operand on Z. */
12186 if (operand_equal_p (arg01, arg11, 0))
12187 return fold_build2 (code, type, arg00, arg10);
12188 if (operand_equal_p (arg01, arg10, 0))
12189 return fold_build2 (code, type, arg00, arg11);
12190 if (operand_equal_p (arg00, arg11, 0))
12191 return fold_build2 (code, type, arg01, arg10);
12192 if (operand_equal_p (arg00, arg10, 0))
12193 return fold_build2 (code, type, arg01, arg11);
12194
12195 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12196 if (TREE_CODE (arg01) == INTEGER_CST
12197 && TREE_CODE (arg11) == INTEGER_CST)
12198 return fold_build2 (code, type,
12199 fold_build2 (BIT_XOR_EXPR, itype, arg00,
12200 fold_build2 (BIT_XOR_EXPR, itype,
12201 arg01, arg11)),
12202 arg10);
12203 }
12204
12205 /* Attempt to simplify equality/inequality comparisons of complex
12206 values. Only lower the comparison if the result is known or
12207 can be simplified to a single scalar comparison. */
12208 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12209 || TREE_CODE (arg0) == COMPLEX_CST)
12210 && (TREE_CODE (arg1) == COMPLEX_EXPR
12211 || TREE_CODE (arg1) == COMPLEX_CST))
12212 {
12213 tree real0, imag0, real1, imag1;
12214 tree rcond, icond;
12215
12216 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12217 {
12218 real0 = TREE_OPERAND (arg0, 0);
12219 imag0 = TREE_OPERAND (arg0, 1);
12220 }
12221 else
12222 {
12223 real0 = TREE_REALPART (arg0);
12224 imag0 = TREE_IMAGPART (arg0);
12225 }
12226
12227 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12228 {
12229 real1 = TREE_OPERAND (arg1, 0);
12230 imag1 = TREE_OPERAND (arg1, 1);
12231 }
12232 else
12233 {
12234 real1 = TREE_REALPART (arg1);
12235 imag1 = TREE_IMAGPART (arg1);
12236 }
12237
12238 rcond = fold_binary (code, type, real0, real1);
12239 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12240 {
12241 if (integer_zerop (rcond))
12242 {
12243 if (code == EQ_EXPR)
12244 return omit_two_operands (type, boolean_false_node,
12245 imag0, imag1);
12246 return fold_build2 (NE_EXPR, type, imag0, imag1);
12247 }
12248 else
12249 {
12250 if (code == NE_EXPR)
12251 return omit_two_operands (type, boolean_true_node,
12252 imag0, imag1);
12253 return fold_build2 (EQ_EXPR, type, imag0, imag1);
12254 }
12255 }
12256
12257 icond = fold_binary (code, type, imag0, imag1);
12258 if (icond && TREE_CODE (icond) == INTEGER_CST)
12259 {
12260 if (integer_zerop (icond))
12261 {
12262 if (code == EQ_EXPR)
12263 return omit_two_operands (type, boolean_false_node,
12264 real0, real1);
12265 return fold_build2 (NE_EXPR, type, real0, real1);
12266 }
12267 else
12268 {
12269 if (code == NE_EXPR)
12270 return omit_two_operands (type, boolean_true_node,
12271 real0, real1);
12272 return fold_build2 (EQ_EXPR, type, real0, real1);
12273 }
12274 }
12275 }
12276
12277 return NULL_TREE;
12278
12279 case LT_EXPR:
12280 case GT_EXPR:
12281 case LE_EXPR:
12282 case GE_EXPR:
12283 tem = fold_comparison (code, type, op0, op1);
12284 if (tem != NULL_TREE)
12285 return tem;
12286
12287 /* Transform comparisons of the form X +- C CMP X. */
12288 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12289 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12290 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12291 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
12292 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12293 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
12294 {
12295 tree arg01 = TREE_OPERAND (arg0, 1);
12296 enum tree_code code0 = TREE_CODE (arg0);
12297 int is_positive;
12298
12299 if (TREE_CODE (arg01) == REAL_CST)
12300 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12301 else
12302 is_positive = tree_int_cst_sgn (arg01);
12303
12304 /* (X - c) > X becomes false. */
12305 if (code == GT_EXPR
12306 && ((code0 == MINUS_EXPR && is_positive >= 0)
12307 || (code0 == PLUS_EXPR && is_positive <= 0)))
12308 {
12309 if (TREE_CODE (arg01) == INTEGER_CST
12310 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12311 fold_overflow_warning (("assuming signed overflow does not "
12312 "occur when assuming that (X - c) > X "
12313 "is always false"),
12314 WARN_STRICT_OVERFLOW_ALL);
12315 return constant_boolean_node (0, type);
12316 }
12317
12318 /* Likewise (X + c) < X becomes false. */
12319 if (code == LT_EXPR
12320 && ((code0 == PLUS_EXPR && is_positive >= 0)
12321 || (code0 == MINUS_EXPR && is_positive <= 0)))
12322 {
12323 if (TREE_CODE (arg01) == INTEGER_CST
12324 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12325 fold_overflow_warning (("assuming signed overflow does not "
12326 "occur when assuming that "
12327 "(X + c) < X is always false"),
12328 WARN_STRICT_OVERFLOW_ALL);
12329 return constant_boolean_node (0, type);
12330 }
12331
12332 /* Convert (X - c) <= X to true. */
12333 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12334 && code == LE_EXPR
12335 && ((code0 == MINUS_EXPR && is_positive >= 0)
12336 || (code0 == PLUS_EXPR && is_positive <= 0)))
12337 {
12338 if (TREE_CODE (arg01) == INTEGER_CST
12339 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12340 fold_overflow_warning (("assuming signed overflow does not "
12341 "occur when assuming that "
12342 "(X - c) <= X is always true"),
12343 WARN_STRICT_OVERFLOW_ALL);
12344 return constant_boolean_node (1, type);
12345 }
12346
12347 /* Convert (X + c) >= X to true. */
12348 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12349 && code == GE_EXPR
12350 && ((code0 == PLUS_EXPR && is_positive >= 0)
12351 || (code0 == MINUS_EXPR && is_positive <= 0)))
12352 {
12353 if (TREE_CODE (arg01) == INTEGER_CST
12354 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12355 fold_overflow_warning (("assuming signed overflow does not "
12356 "occur when assuming that "
12357 "(X + c) >= X is always true"),
12358 WARN_STRICT_OVERFLOW_ALL);
12359 return constant_boolean_node (1, type);
12360 }
12361
12362 if (TREE_CODE (arg01) == INTEGER_CST)
12363 {
12364 /* Convert X + c > X and X - c < X to true for integers. */
12365 if (code == GT_EXPR
12366 && ((code0 == PLUS_EXPR && is_positive > 0)
12367 || (code0 == MINUS_EXPR && is_positive < 0)))
12368 {
12369 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12370 fold_overflow_warning (("assuming signed overflow does "
12371 "not occur when assuming that "
12372 "(X + c) > X is always true"),
12373 WARN_STRICT_OVERFLOW_ALL);
12374 return constant_boolean_node (1, type);
12375 }
12376
12377 if (code == LT_EXPR
12378 && ((code0 == MINUS_EXPR && is_positive > 0)
12379 || (code0 == PLUS_EXPR && is_positive < 0)))
12380 {
12381 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12382 fold_overflow_warning (("assuming signed overflow does "
12383 "not occur when assuming that "
12384 "(X - c) < X is always true"),
12385 WARN_STRICT_OVERFLOW_ALL);
12386 return constant_boolean_node (1, type);
12387 }
12388
12389 /* Convert X + c <= X and X - c >= X to false for integers. */
12390 if (code == LE_EXPR
12391 && ((code0 == PLUS_EXPR && is_positive > 0)
12392 || (code0 == MINUS_EXPR && is_positive < 0)))
12393 {
12394 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12395 fold_overflow_warning (("assuming signed overflow does "
12396 "not occur when assuming that "
12397 "(X + c) <= X is always false"),
12398 WARN_STRICT_OVERFLOW_ALL);
12399 return constant_boolean_node (0, type);
12400 }
12401
12402 if (code == GE_EXPR
12403 && ((code0 == MINUS_EXPR && is_positive > 0)
12404 || (code0 == PLUS_EXPR && is_positive < 0)))
12405 {
12406 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12407 fold_overflow_warning (("assuming signed overflow does "
12408 "not occur when assuming that "
12409 "(X - c) >= X is always false"),
12410 WARN_STRICT_OVERFLOW_ALL);
12411 return constant_boolean_node (0, type);
12412 }
12413 }
12414 }
12415
12416 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
12417 This transformation affects the cases which are handled in later
12418 optimizations involving comparisons with non-negative constants. */
12419 if (TREE_CODE (arg1) == INTEGER_CST
12420 && TREE_CODE (arg0) != INTEGER_CST
12421 && tree_int_cst_sgn (arg1) > 0)
12422 {
12423 if (code == GE_EXPR)
12424 {
12425 arg1 = const_binop (MINUS_EXPR, arg1,
12426 build_int_cst (TREE_TYPE (arg1), 1), 0);
12427 return fold_build2 (GT_EXPR, type, arg0,
12428 fold_convert (TREE_TYPE (arg0), arg1));
12429 }
12430 if (code == LT_EXPR)
12431 {
12432 arg1 = const_binop (MINUS_EXPR, arg1,
12433 build_int_cst (TREE_TYPE (arg1), 1), 0);
12434 return fold_build2 (LE_EXPR, type, arg0,
12435 fold_convert (TREE_TYPE (arg0), arg1));
12436 }
12437 }
12438
12439 /* Comparisons with the highest or lowest possible integer of
12440 the specified precision will have known values. */
12441 {
12442 tree arg1_type = TREE_TYPE (arg1);
12443 unsigned int width = TYPE_PRECISION (arg1_type);
12444
12445 if (TREE_CODE (arg1) == INTEGER_CST
12446 && !TREE_OVERFLOW (arg1)
12447 && width <= 2 * HOST_BITS_PER_WIDE_INT
12448 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
12449 {
12450 HOST_WIDE_INT signed_max_hi;
12451 unsigned HOST_WIDE_INT signed_max_lo;
12452 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
12453
12454 if (width <= HOST_BITS_PER_WIDE_INT)
12455 {
12456 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12457 - 1;
12458 signed_max_hi = 0;
12459 max_hi = 0;
12460
12461 if (TYPE_UNSIGNED (arg1_type))
12462 {
12463 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12464 min_lo = 0;
12465 min_hi = 0;
12466 }
12467 else
12468 {
12469 max_lo = signed_max_lo;
12470 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12471 min_hi = -1;
12472 }
12473 }
12474 else
12475 {
12476 width -= HOST_BITS_PER_WIDE_INT;
12477 signed_max_lo = -1;
12478 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12479 - 1;
12480 max_lo = -1;
12481 min_lo = 0;
12482
12483 if (TYPE_UNSIGNED (arg1_type))
12484 {
12485 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12486 min_hi = 0;
12487 }
12488 else
12489 {
12490 max_hi = signed_max_hi;
12491 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12492 }
12493 }
12494
12495 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
12496 && TREE_INT_CST_LOW (arg1) == max_lo)
12497 switch (code)
12498 {
12499 case GT_EXPR:
12500 return omit_one_operand (type, integer_zero_node, arg0);
12501
12502 case GE_EXPR:
12503 return fold_build2 (EQ_EXPR, type, op0, op1);
12504
12505 case LE_EXPR:
12506 return omit_one_operand (type, integer_one_node, arg0);
12507
12508 case LT_EXPR:
12509 return fold_build2 (NE_EXPR, type, op0, op1);
12510
12511 /* The GE_EXPR and LT_EXPR cases above are not normally
12512 reached because of previous transformations. */
12513
12514 default:
12515 break;
12516 }
12517 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12518 == max_hi
12519 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
12520 switch (code)
12521 {
12522 case GT_EXPR:
12523 arg1 = const_binop (PLUS_EXPR, arg1,
12524 build_int_cst (TREE_TYPE (arg1), 1), 0);
12525 return fold_build2 (EQ_EXPR, type,
12526 fold_convert (TREE_TYPE (arg1), arg0),
12527 arg1);
12528 case LE_EXPR:
12529 arg1 = const_binop (PLUS_EXPR, arg1,
12530 build_int_cst (TREE_TYPE (arg1), 1), 0);
12531 return fold_build2 (NE_EXPR, type,
12532 fold_convert (TREE_TYPE (arg1), arg0),
12533 arg1);
12534 default:
12535 break;
12536 }
12537 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12538 == min_hi
12539 && TREE_INT_CST_LOW (arg1) == min_lo)
12540 switch (code)
12541 {
12542 case LT_EXPR:
12543 return omit_one_operand (type, integer_zero_node, arg0);
12544
12545 case LE_EXPR:
12546 return fold_build2 (EQ_EXPR, type, op0, op1);
12547
12548 case GE_EXPR:
12549 return omit_one_operand (type, integer_one_node, arg0);
12550
12551 case GT_EXPR:
12552 return fold_build2 (NE_EXPR, type, op0, op1);
12553
12554 default:
12555 break;
12556 }
12557 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12558 == min_hi
12559 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
12560 switch (code)
12561 {
12562 case GE_EXPR:
12563 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
12564 return fold_build2 (NE_EXPR, type,
12565 fold_convert (TREE_TYPE (arg1), arg0),
12566 arg1);
12567 case LT_EXPR:
12568 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
12569 return fold_build2 (EQ_EXPR, type,
12570 fold_convert (TREE_TYPE (arg1), arg0),
12571 arg1);
12572 default:
12573 break;
12574 }
12575
12576 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
12577 && TREE_INT_CST_LOW (arg1) == signed_max_lo
12578 && TYPE_UNSIGNED (arg1_type)
12579 /* We will flip the signedness of the comparison operator
12580 associated with the mode of arg1, so the sign bit is
12581 specified by this mode. Check that arg1 is the signed
12582 max associated with this sign bit. */
12583 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
12584 /* signed_type does not work on pointer types. */
12585 && INTEGRAL_TYPE_P (arg1_type))
12586 {
12587 /* The following case also applies to X < signed_max+1
12588 and X >= signed_max+1 because previous transformations. */
12589 if (code == LE_EXPR || code == GT_EXPR)
12590 {
12591 tree st;
12592 st = signed_type_for (TREE_TYPE (arg1));
12593 return fold_build2 (code == LE_EXPR ? GE_EXPR : LT_EXPR,
12594 type, fold_convert (st, arg0),
12595 build_int_cst (st, 0));
12596 }
12597 }
12598 }
12599 }
12600
12601 /* If we are comparing an ABS_EXPR with a constant, we can
12602 convert all the cases into explicit comparisons, but they may
12603 well not be faster than doing the ABS and one comparison.
12604 But ABS (X) <= C is a range comparison, which becomes a subtraction
12605 and a comparison, and is probably faster. */
12606 if (code == LE_EXPR
12607 && TREE_CODE (arg1) == INTEGER_CST
12608 && TREE_CODE (arg0) == ABS_EXPR
12609 && ! TREE_SIDE_EFFECTS (arg0)
12610 && (0 != (tem = negate_expr (arg1)))
12611 && TREE_CODE (tem) == INTEGER_CST
12612 && !TREE_OVERFLOW (tem))
12613 return fold_build2 (TRUTH_ANDIF_EXPR, type,
12614 build2 (GE_EXPR, type,
12615 TREE_OPERAND (arg0, 0), tem),
12616 build2 (LE_EXPR, type,
12617 TREE_OPERAND (arg0, 0), arg1));
12618
12619 /* Convert ABS_EXPR<x> >= 0 to true. */
12620 strict_overflow_p = false;
12621 if (code == GE_EXPR
12622 && (integer_zerop (arg1)
12623 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
12624 && real_zerop (arg1)))
12625 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12626 {
12627 if (strict_overflow_p)
12628 fold_overflow_warning (("assuming signed overflow does not occur "
12629 "when simplifying comparison of "
12630 "absolute value and zero"),
12631 WARN_STRICT_OVERFLOW_CONDITIONAL);
12632 return omit_one_operand (type, integer_one_node, arg0);
12633 }
12634
12635 /* Convert ABS_EXPR<x> < 0 to false. */
12636 strict_overflow_p = false;
12637 if (code == LT_EXPR
12638 && (integer_zerop (arg1) || real_zerop (arg1))
12639 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12640 {
12641 if (strict_overflow_p)
12642 fold_overflow_warning (("assuming signed overflow does not occur "
12643 "when simplifying comparison of "
12644 "absolute value and zero"),
12645 WARN_STRICT_OVERFLOW_CONDITIONAL);
12646 return omit_one_operand (type, integer_zero_node, arg0);
12647 }
12648
12649 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
12650 and similarly for >= into !=. */
12651 if ((code == LT_EXPR || code == GE_EXPR)
12652 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12653 && TREE_CODE (arg1) == LSHIFT_EXPR
12654 && integer_onep (TREE_OPERAND (arg1, 0)))
12655 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12656 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12657 TREE_OPERAND (arg1, 1)),
12658 build_int_cst (TREE_TYPE (arg0), 0));
12659
12660 if ((code == LT_EXPR || code == GE_EXPR)
12661 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12662 && CONVERT_EXPR_P (arg1)
12663 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
12664 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
12665 return
12666 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12667 fold_convert (TREE_TYPE (arg0),
12668 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12669 TREE_OPERAND (TREE_OPERAND (arg1, 0),
12670 1))),
12671 build_int_cst (TREE_TYPE (arg0), 0));
12672
12673 return NULL_TREE;
12674
12675 case UNORDERED_EXPR:
12676 case ORDERED_EXPR:
12677 case UNLT_EXPR:
12678 case UNLE_EXPR:
12679 case UNGT_EXPR:
12680 case UNGE_EXPR:
12681 case UNEQ_EXPR:
12682 case LTGT_EXPR:
12683 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
12684 {
12685 t1 = fold_relational_const (code, type, arg0, arg1);
12686 if (t1 != NULL_TREE)
12687 return t1;
12688 }
12689
12690 /* If the first operand is NaN, the result is constant. */
12691 if (TREE_CODE (arg0) == REAL_CST
12692 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
12693 && (code != LTGT_EXPR || ! flag_trapping_math))
12694 {
12695 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
12696 ? integer_zero_node
12697 : integer_one_node;
12698 return omit_one_operand (type, t1, arg1);
12699 }
12700
12701 /* If the second operand is NaN, the result is constant. */
12702 if (TREE_CODE (arg1) == REAL_CST
12703 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
12704 && (code != LTGT_EXPR || ! flag_trapping_math))
12705 {
12706 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
12707 ? integer_zero_node
12708 : integer_one_node;
12709 return omit_one_operand (type, t1, arg0);
12710 }
12711
12712 /* Simplify unordered comparison of something with itself. */
12713 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
12714 && operand_equal_p (arg0, arg1, 0))
12715 return constant_boolean_node (1, type);
12716
12717 if (code == LTGT_EXPR
12718 && !flag_trapping_math
12719 && operand_equal_p (arg0, arg1, 0))
12720 return constant_boolean_node (0, type);
12721
12722 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
12723 {
12724 tree targ0 = strip_float_extensions (arg0);
12725 tree targ1 = strip_float_extensions (arg1);
12726 tree newtype = TREE_TYPE (targ0);
12727
12728 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
12729 newtype = TREE_TYPE (targ1);
12730
12731 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
12732 return fold_build2 (code, type, fold_convert (newtype, targ0),
12733 fold_convert (newtype, targ1));
12734 }
12735
12736 return NULL_TREE;
12737
12738 case COMPOUND_EXPR:
12739 /* When pedantic, a compound expression can be neither an lvalue
12740 nor an integer constant expression. */
12741 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
12742 return NULL_TREE;
12743 /* Don't let (0, 0) be null pointer constant. */
12744 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
12745 : fold_convert (type, arg1);
12746 return pedantic_non_lvalue (tem);
12747
12748 case COMPLEX_EXPR:
12749 if ((TREE_CODE (arg0) == REAL_CST
12750 && TREE_CODE (arg1) == REAL_CST)
12751 || (TREE_CODE (arg0) == INTEGER_CST
12752 && TREE_CODE (arg1) == INTEGER_CST))
12753 return build_complex (type, arg0, arg1);
12754 return NULL_TREE;
12755
12756 case ASSERT_EXPR:
12757 /* An ASSERT_EXPR should never be passed to fold_binary. */
12758 gcc_unreachable ();
12759
12760 default:
12761 return NULL_TREE;
12762 } /* switch (code) */
12763 }
12764
12765 /* Callback for walk_tree, looking for LABEL_EXPR.
12766 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
12767 Do not check the sub-tree of GOTO_EXPR. */
12768
12769 static tree
12770 contains_label_1 (tree *tp,
12771 int *walk_subtrees,
12772 void *data ATTRIBUTE_UNUSED)
12773 {
12774 switch (TREE_CODE (*tp))
12775 {
12776 case LABEL_EXPR:
12777 return *tp;
12778 case GOTO_EXPR:
12779 *walk_subtrees = 0;
12780 /* no break */
12781 default:
12782 return NULL_TREE;
12783 }
12784 }
12785
12786 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
12787 accessible from outside the sub-tree. Returns NULL_TREE if no
12788 addressable label is found. */
12789
12790 static bool
12791 contains_label_p (tree st)
12792 {
12793 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
12794 }
12795
12796 /* Fold a ternary expression of code CODE and type TYPE with operands
12797 OP0, OP1, and OP2. Return the folded expression if folding is
12798 successful. Otherwise, return NULL_TREE. */
12799
12800 tree
12801 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
12802 {
12803 tree tem;
12804 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
12805 enum tree_code_class kind = TREE_CODE_CLASS (code);
12806
12807 gcc_assert (IS_EXPR_CODE_CLASS (kind)
12808 && TREE_CODE_LENGTH (code) == 3);
12809
12810 /* Strip any conversions that don't change the mode. This is safe
12811 for every expression, except for a comparison expression because
12812 its signedness is derived from its operands. So, in the latter
12813 case, only strip conversions that don't change the signedness.
12814
12815 Note that this is done as an internal manipulation within the
12816 constant folder, in order to find the simplest representation of
12817 the arguments so that their form can be studied. In any cases,
12818 the appropriate type conversions should be put back in the tree
12819 that will get out of the constant folder. */
12820 if (op0)
12821 {
12822 arg0 = op0;
12823 STRIP_NOPS (arg0);
12824 }
12825
12826 if (op1)
12827 {
12828 arg1 = op1;
12829 STRIP_NOPS (arg1);
12830 }
12831
12832 switch (code)
12833 {
12834 case COMPONENT_REF:
12835 if (TREE_CODE (arg0) == CONSTRUCTOR
12836 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
12837 {
12838 unsigned HOST_WIDE_INT idx;
12839 tree field, value;
12840 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
12841 if (field == arg1)
12842 return value;
12843 }
12844 return NULL_TREE;
12845
12846 case COND_EXPR:
12847 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
12848 so all simple results must be passed through pedantic_non_lvalue. */
12849 if (TREE_CODE (arg0) == INTEGER_CST)
12850 {
12851 tree unused_op = integer_zerop (arg0) ? op1 : op2;
12852 tem = integer_zerop (arg0) ? op2 : op1;
12853 /* Only optimize constant conditions when the selected branch
12854 has the same type as the COND_EXPR. This avoids optimizing
12855 away "c ? x : throw", where the throw has a void type.
12856 Avoid throwing away that operand which contains label. */
12857 if ((!TREE_SIDE_EFFECTS (unused_op)
12858 || !contains_label_p (unused_op))
12859 && (! VOID_TYPE_P (TREE_TYPE (tem))
12860 || VOID_TYPE_P (type)))
12861 return pedantic_non_lvalue (tem);
12862 return NULL_TREE;
12863 }
12864 if (operand_equal_p (arg1, op2, 0))
12865 return pedantic_omit_one_operand (type, arg1, arg0);
12866
12867 /* If we have A op B ? A : C, we may be able to convert this to a
12868 simpler expression, depending on the operation and the values
12869 of B and C. Signed zeros prevent all of these transformations,
12870 for reasons given above each one.
12871
12872 Also try swapping the arguments and inverting the conditional. */
12873 if (COMPARISON_CLASS_P (arg0)
12874 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
12875 arg1, TREE_OPERAND (arg0, 1))
12876 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
12877 {
12878 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
12879 if (tem)
12880 return tem;
12881 }
12882
12883 if (COMPARISON_CLASS_P (arg0)
12884 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
12885 op2,
12886 TREE_OPERAND (arg0, 1))
12887 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
12888 {
12889 tem = fold_truth_not_expr (arg0);
12890 if (tem && COMPARISON_CLASS_P (tem))
12891 {
12892 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
12893 if (tem)
12894 return tem;
12895 }
12896 }
12897
12898 /* If the second operand is simpler than the third, swap them
12899 since that produces better jump optimization results. */
12900 if (truth_value_p (TREE_CODE (arg0))
12901 && tree_swap_operands_p (op1, op2, false))
12902 {
12903 /* See if this can be inverted. If it can't, possibly because
12904 it was a floating-point inequality comparison, don't do
12905 anything. */
12906 tem = fold_truth_not_expr (arg0);
12907 if (tem)
12908 return fold_build3 (code, type, tem, op2, op1);
12909 }
12910
12911 /* Convert A ? 1 : 0 to simply A. */
12912 if (integer_onep (op1)
12913 && integer_zerop (op2)
12914 /* If we try to convert OP0 to our type, the
12915 call to fold will try to move the conversion inside
12916 a COND, which will recurse. In that case, the COND_EXPR
12917 is probably the best choice, so leave it alone. */
12918 && type == TREE_TYPE (arg0))
12919 return pedantic_non_lvalue (arg0);
12920
12921 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
12922 over COND_EXPR in cases such as floating point comparisons. */
12923 if (integer_zerop (op1)
12924 && integer_onep (op2)
12925 && truth_value_p (TREE_CODE (arg0)))
12926 return pedantic_non_lvalue (fold_convert (type,
12927 invert_truthvalue (arg0)));
12928
12929 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
12930 if (TREE_CODE (arg0) == LT_EXPR
12931 && integer_zerop (TREE_OPERAND (arg0, 1))
12932 && integer_zerop (op2)
12933 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
12934 {
12935 /* sign_bit_p only checks ARG1 bits within A's precision.
12936 If <sign bit of A> has wider type than A, bits outside
12937 of A's precision in <sign bit of A> need to be checked.
12938 If they are all 0, this optimization needs to be done
12939 in unsigned A's type, if they are all 1 in signed A's type,
12940 otherwise this can't be done. */
12941 if (TYPE_PRECISION (TREE_TYPE (tem))
12942 < TYPE_PRECISION (TREE_TYPE (arg1))
12943 && TYPE_PRECISION (TREE_TYPE (tem))
12944 < TYPE_PRECISION (type))
12945 {
12946 unsigned HOST_WIDE_INT mask_lo;
12947 HOST_WIDE_INT mask_hi;
12948 int inner_width, outer_width;
12949 tree tem_type;
12950
12951 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
12952 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
12953 if (outer_width > TYPE_PRECISION (type))
12954 outer_width = TYPE_PRECISION (type);
12955
12956 if (outer_width > HOST_BITS_PER_WIDE_INT)
12957 {
12958 mask_hi = ((unsigned HOST_WIDE_INT) -1
12959 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
12960 mask_lo = -1;
12961 }
12962 else
12963 {
12964 mask_hi = 0;
12965 mask_lo = ((unsigned HOST_WIDE_INT) -1
12966 >> (HOST_BITS_PER_WIDE_INT - outer_width));
12967 }
12968 if (inner_width > HOST_BITS_PER_WIDE_INT)
12969 {
12970 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
12971 >> (HOST_BITS_PER_WIDE_INT - inner_width));
12972 mask_lo = 0;
12973 }
12974 else
12975 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
12976 >> (HOST_BITS_PER_WIDE_INT - inner_width));
12977
12978 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
12979 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
12980 {
12981 tem_type = signed_type_for (TREE_TYPE (tem));
12982 tem = fold_convert (tem_type, tem);
12983 }
12984 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
12985 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
12986 {
12987 tem_type = unsigned_type_for (TREE_TYPE (tem));
12988 tem = fold_convert (tem_type, tem);
12989 }
12990 else
12991 tem = NULL;
12992 }
12993
12994 if (tem)
12995 return fold_convert (type,
12996 fold_build2 (BIT_AND_EXPR,
12997 TREE_TYPE (tem), tem,
12998 fold_convert (TREE_TYPE (tem),
12999 arg1)));
13000 }
13001
13002 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13003 already handled above. */
13004 if (TREE_CODE (arg0) == BIT_AND_EXPR
13005 && integer_onep (TREE_OPERAND (arg0, 1))
13006 && integer_zerop (op2)
13007 && integer_pow2p (arg1))
13008 {
13009 tree tem = TREE_OPERAND (arg0, 0);
13010 STRIP_NOPS (tem);
13011 if (TREE_CODE (tem) == RSHIFT_EXPR
13012 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
13013 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13014 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
13015 return fold_build2 (BIT_AND_EXPR, type,
13016 TREE_OPERAND (tem, 0), arg1);
13017 }
13018
13019 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13020 is probably obsolete because the first operand should be a
13021 truth value (that's why we have the two cases above), but let's
13022 leave it in until we can confirm this for all front-ends. */
13023 if (integer_zerop (op2)
13024 && TREE_CODE (arg0) == NE_EXPR
13025 && integer_zerop (TREE_OPERAND (arg0, 1))
13026 && integer_pow2p (arg1)
13027 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13028 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13029 arg1, OEP_ONLY_CONST))
13030 return pedantic_non_lvalue (fold_convert (type,
13031 TREE_OPERAND (arg0, 0)));
13032
13033 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13034 if (integer_zerop (op2)
13035 && truth_value_p (TREE_CODE (arg0))
13036 && truth_value_p (TREE_CODE (arg1)))
13037 return fold_build2 (TRUTH_ANDIF_EXPR, type,
13038 fold_convert (type, arg0),
13039 arg1);
13040
13041 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13042 if (integer_onep (op2)
13043 && truth_value_p (TREE_CODE (arg0))
13044 && truth_value_p (TREE_CODE (arg1)))
13045 {
13046 /* Only perform transformation if ARG0 is easily inverted. */
13047 tem = fold_truth_not_expr (arg0);
13048 if (tem)
13049 return fold_build2 (TRUTH_ORIF_EXPR, type,
13050 fold_convert (type, tem),
13051 arg1);
13052 }
13053
13054 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13055 if (integer_zerop (arg1)
13056 && truth_value_p (TREE_CODE (arg0))
13057 && truth_value_p (TREE_CODE (op2)))
13058 {
13059 /* Only perform transformation if ARG0 is easily inverted. */
13060 tem = fold_truth_not_expr (arg0);
13061 if (tem)
13062 return fold_build2 (TRUTH_ANDIF_EXPR, type,
13063 fold_convert (type, tem),
13064 op2);
13065 }
13066
13067 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13068 if (integer_onep (arg1)
13069 && truth_value_p (TREE_CODE (arg0))
13070 && truth_value_p (TREE_CODE (op2)))
13071 return fold_build2 (TRUTH_ORIF_EXPR, type,
13072 fold_convert (type, arg0),
13073 op2);
13074
13075 return NULL_TREE;
13076
13077 case CALL_EXPR:
13078 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13079 of fold_ternary on them. */
13080 gcc_unreachable ();
13081
13082 case BIT_FIELD_REF:
13083 if ((TREE_CODE (arg0) == VECTOR_CST
13084 || (TREE_CODE (arg0) == CONSTRUCTOR && TREE_CONSTANT (arg0)))
13085 && type == TREE_TYPE (TREE_TYPE (arg0)))
13086 {
13087 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
13088 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
13089
13090 if (width != 0
13091 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
13092 && (idx % width) == 0
13093 && (idx = idx / width)
13094 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
13095 {
13096 tree elements = NULL_TREE;
13097
13098 if (TREE_CODE (arg0) == VECTOR_CST)
13099 elements = TREE_VECTOR_CST_ELTS (arg0);
13100 else
13101 {
13102 unsigned HOST_WIDE_INT idx;
13103 tree value;
13104
13105 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (arg0), idx, value)
13106 elements = tree_cons (NULL_TREE, value, elements);
13107 }
13108 while (idx-- > 0 && elements)
13109 elements = TREE_CHAIN (elements);
13110 if (elements)
13111 return TREE_VALUE (elements);
13112 else
13113 return fold_convert (type, integer_zero_node);
13114 }
13115 }
13116
13117 /* A bit-field-ref that referenced the full argument can be stripped. */
13118 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
13119 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
13120 && integer_zerop (op2))
13121 return fold_convert (type, arg0);
13122
13123 return NULL_TREE;
13124
13125 default:
13126 return NULL_TREE;
13127 } /* switch (code) */
13128 }
13129
13130 /* Perform constant folding and related simplification of EXPR.
13131 The related simplifications include x*1 => x, x*0 => 0, etc.,
13132 and application of the associative law.
13133 NOP_EXPR conversions may be removed freely (as long as we
13134 are careful not to change the type of the overall expression).
13135 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13136 but we can constant-fold them if they have constant operands. */
13137
13138 #ifdef ENABLE_FOLD_CHECKING
13139 # define fold(x) fold_1 (x)
13140 static tree fold_1 (tree);
13141 static
13142 #endif
13143 tree
13144 fold (tree expr)
13145 {
13146 const tree t = expr;
13147 enum tree_code code = TREE_CODE (t);
13148 enum tree_code_class kind = TREE_CODE_CLASS (code);
13149 tree tem;
13150
13151 /* Return right away if a constant. */
13152 if (kind == tcc_constant)
13153 return t;
13154
13155 /* CALL_EXPR-like objects with variable numbers of operands are
13156 treated specially. */
13157 if (kind == tcc_vl_exp)
13158 {
13159 if (code == CALL_EXPR)
13160 {
13161 tem = fold_call_expr (expr, false);
13162 return tem ? tem : expr;
13163 }
13164 return expr;
13165 }
13166
13167 if (IS_EXPR_CODE_CLASS (kind))
13168 {
13169 tree type = TREE_TYPE (t);
13170 tree op0, op1, op2;
13171
13172 switch (TREE_CODE_LENGTH (code))
13173 {
13174 case 1:
13175 op0 = TREE_OPERAND (t, 0);
13176 tem = fold_unary (code, type, op0);
13177 return tem ? tem : expr;
13178 case 2:
13179 op0 = TREE_OPERAND (t, 0);
13180 op1 = TREE_OPERAND (t, 1);
13181 tem = fold_binary (code, type, op0, op1);
13182 return tem ? tem : expr;
13183 case 3:
13184 op0 = TREE_OPERAND (t, 0);
13185 op1 = TREE_OPERAND (t, 1);
13186 op2 = TREE_OPERAND (t, 2);
13187 tem = fold_ternary (code, type, op0, op1, op2);
13188 return tem ? tem : expr;
13189 default:
13190 break;
13191 }
13192 }
13193
13194 switch (code)
13195 {
13196 case ARRAY_REF:
13197 {
13198 tree op0 = TREE_OPERAND (t, 0);
13199 tree op1 = TREE_OPERAND (t, 1);
13200
13201 if (TREE_CODE (op1) == INTEGER_CST
13202 && TREE_CODE (op0) == CONSTRUCTOR
13203 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13204 {
13205 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (op0);
13206 unsigned HOST_WIDE_INT end = VEC_length (constructor_elt, elts);
13207 unsigned HOST_WIDE_INT begin = 0;
13208
13209 /* Find a matching index by means of a binary search. */
13210 while (begin != end)
13211 {
13212 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
13213 tree index = VEC_index (constructor_elt, elts, middle)->index;
13214
13215 if (TREE_CODE (index) == INTEGER_CST
13216 && tree_int_cst_lt (index, op1))
13217 begin = middle + 1;
13218 else if (TREE_CODE (index) == INTEGER_CST
13219 && tree_int_cst_lt (op1, index))
13220 end = middle;
13221 else if (TREE_CODE (index) == RANGE_EXPR
13222 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
13223 begin = middle + 1;
13224 else if (TREE_CODE (index) == RANGE_EXPR
13225 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
13226 end = middle;
13227 else
13228 return VEC_index (constructor_elt, elts, middle)->value;
13229 }
13230 }
13231
13232 return t;
13233 }
13234
13235 case CONST_DECL:
13236 return fold (DECL_INITIAL (t));
13237
13238 default:
13239 return t;
13240 } /* switch (code) */
13241 }
13242
13243 #ifdef ENABLE_FOLD_CHECKING
13244 #undef fold
13245
13246 static void fold_checksum_tree (const_tree, struct md5_ctx *, htab_t);
13247 static void fold_check_failed (const_tree, const_tree);
13248 void print_fold_checksum (const_tree);
13249
13250 /* When --enable-checking=fold, compute a digest of expr before
13251 and after actual fold call to see if fold did not accidentally
13252 change original expr. */
13253
13254 tree
13255 fold (tree expr)
13256 {
13257 tree ret;
13258 struct md5_ctx ctx;
13259 unsigned char checksum_before[16], checksum_after[16];
13260 htab_t ht;
13261
13262 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13263 md5_init_ctx (&ctx);
13264 fold_checksum_tree (expr, &ctx, ht);
13265 md5_finish_ctx (&ctx, checksum_before);
13266 htab_empty (ht);
13267
13268 ret = fold_1 (expr);
13269
13270 md5_init_ctx (&ctx);
13271 fold_checksum_tree (expr, &ctx, ht);
13272 md5_finish_ctx (&ctx, checksum_after);
13273 htab_delete (ht);
13274
13275 if (memcmp (checksum_before, checksum_after, 16))
13276 fold_check_failed (expr, ret);
13277
13278 return ret;
13279 }
13280
13281 void
13282 print_fold_checksum (const_tree expr)
13283 {
13284 struct md5_ctx ctx;
13285 unsigned char checksum[16], cnt;
13286 htab_t ht;
13287
13288 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13289 md5_init_ctx (&ctx);
13290 fold_checksum_tree (expr, &ctx, ht);
13291 md5_finish_ctx (&ctx, checksum);
13292 htab_delete (ht);
13293 for (cnt = 0; cnt < 16; ++cnt)
13294 fprintf (stderr, "%02x", checksum[cnt]);
13295 putc ('\n', stderr);
13296 }
13297
13298 static void
13299 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
13300 {
13301 internal_error ("fold check: original tree changed by fold");
13302 }
13303
13304 static void
13305 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx, htab_t ht)
13306 {
13307 const void **slot;
13308 enum tree_code code;
13309 struct tree_function_decl buf;
13310 int i, len;
13311
13312 recursive_label:
13313
13314 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
13315 <= sizeof (struct tree_function_decl))
13316 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
13317 if (expr == NULL)
13318 return;
13319 slot = (const void **) htab_find_slot (ht, expr, INSERT);
13320 if (*slot != NULL)
13321 return;
13322 *slot = expr;
13323 code = TREE_CODE (expr);
13324 if (TREE_CODE_CLASS (code) == tcc_declaration
13325 && DECL_ASSEMBLER_NAME_SET_P (expr))
13326 {
13327 /* Allow DECL_ASSEMBLER_NAME to be modified. */
13328 memcpy ((char *) &buf, expr, tree_size (expr));
13329 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
13330 expr = (tree) &buf;
13331 }
13332 else if (TREE_CODE_CLASS (code) == tcc_type
13333 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
13334 || TYPE_CACHED_VALUES_P (expr)
13335 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
13336 {
13337 /* Allow these fields to be modified. */
13338 tree tmp;
13339 memcpy ((char *) &buf, expr, tree_size (expr));
13340 expr = tmp = (tree) &buf;
13341 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
13342 TYPE_POINTER_TO (tmp) = NULL;
13343 TYPE_REFERENCE_TO (tmp) = NULL;
13344 if (TYPE_CACHED_VALUES_P (tmp))
13345 {
13346 TYPE_CACHED_VALUES_P (tmp) = 0;
13347 TYPE_CACHED_VALUES (tmp) = NULL;
13348 }
13349 }
13350 md5_process_bytes (expr, tree_size (expr), ctx);
13351 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
13352 if (TREE_CODE_CLASS (code) != tcc_type
13353 && TREE_CODE_CLASS (code) != tcc_declaration
13354 && code != TREE_LIST
13355 && code != SSA_NAME)
13356 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
13357 switch (TREE_CODE_CLASS (code))
13358 {
13359 case tcc_constant:
13360 switch (code)
13361 {
13362 case STRING_CST:
13363 md5_process_bytes (TREE_STRING_POINTER (expr),
13364 TREE_STRING_LENGTH (expr), ctx);
13365 break;
13366 case COMPLEX_CST:
13367 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
13368 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
13369 break;
13370 case VECTOR_CST:
13371 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
13372 break;
13373 default:
13374 break;
13375 }
13376 break;
13377 case tcc_exceptional:
13378 switch (code)
13379 {
13380 case TREE_LIST:
13381 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
13382 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
13383 expr = TREE_CHAIN (expr);
13384 goto recursive_label;
13385 break;
13386 case TREE_VEC:
13387 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
13388 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
13389 break;
13390 default:
13391 break;
13392 }
13393 break;
13394 case tcc_expression:
13395 case tcc_reference:
13396 case tcc_comparison:
13397 case tcc_unary:
13398 case tcc_binary:
13399 case tcc_statement:
13400 case tcc_vl_exp:
13401 len = TREE_OPERAND_LENGTH (expr);
13402 for (i = 0; i < len; ++i)
13403 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
13404 break;
13405 case tcc_declaration:
13406 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
13407 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
13408 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
13409 {
13410 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
13411 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
13412 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
13413 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
13414 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
13415 }
13416 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
13417 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
13418
13419 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
13420 {
13421 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
13422 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
13423 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
13424 }
13425 break;
13426 case tcc_type:
13427 if (TREE_CODE (expr) == ENUMERAL_TYPE)
13428 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
13429 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
13430 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
13431 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
13432 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
13433 if (INTEGRAL_TYPE_P (expr)
13434 || SCALAR_FLOAT_TYPE_P (expr))
13435 {
13436 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
13437 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
13438 }
13439 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
13440 if (TREE_CODE (expr) == RECORD_TYPE
13441 || TREE_CODE (expr) == UNION_TYPE
13442 || TREE_CODE (expr) == QUAL_UNION_TYPE)
13443 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
13444 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
13445 break;
13446 default:
13447 break;
13448 }
13449 }
13450
13451 /* Helper function for outputting the checksum of a tree T. When
13452 debugging with gdb, you can "define mynext" to be "next" followed
13453 by "call debug_fold_checksum (op0)", then just trace down till the
13454 outputs differ. */
13455
13456 void
13457 debug_fold_checksum (const_tree t)
13458 {
13459 int i;
13460 unsigned char checksum[16];
13461 struct md5_ctx ctx;
13462 htab_t ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13463
13464 md5_init_ctx (&ctx);
13465 fold_checksum_tree (t, &ctx, ht);
13466 md5_finish_ctx (&ctx, checksum);
13467 htab_empty (ht);
13468
13469 for (i = 0; i < 16; i++)
13470 fprintf (stderr, "%d ", checksum[i]);
13471
13472 fprintf (stderr, "\n");
13473 }
13474
13475 #endif
13476
13477 /* Fold a unary tree expression with code CODE of type TYPE with an
13478 operand OP0. Return a folded expression if successful. Otherwise,
13479 return a tree expression with code CODE of type TYPE with an
13480 operand OP0. */
13481
13482 tree
13483 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
13484 {
13485 tree tem;
13486 #ifdef ENABLE_FOLD_CHECKING
13487 unsigned char checksum_before[16], checksum_after[16];
13488 struct md5_ctx ctx;
13489 htab_t ht;
13490
13491 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13492 md5_init_ctx (&ctx);
13493 fold_checksum_tree (op0, &ctx, ht);
13494 md5_finish_ctx (&ctx, checksum_before);
13495 htab_empty (ht);
13496 #endif
13497
13498 tem = fold_unary (code, type, op0);
13499 if (!tem)
13500 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
13501
13502 #ifdef ENABLE_FOLD_CHECKING
13503 md5_init_ctx (&ctx);
13504 fold_checksum_tree (op0, &ctx, ht);
13505 md5_finish_ctx (&ctx, checksum_after);
13506 htab_delete (ht);
13507
13508 if (memcmp (checksum_before, checksum_after, 16))
13509 fold_check_failed (op0, tem);
13510 #endif
13511 return tem;
13512 }
13513
13514 /* Fold a binary tree expression with code CODE of type TYPE with
13515 operands OP0 and OP1. Return a folded expression if successful.
13516 Otherwise, return a tree expression with code CODE of type TYPE
13517 with operands OP0 and OP1. */
13518
13519 tree
13520 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
13521 MEM_STAT_DECL)
13522 {
13523 tree tem;
13524 #ifdef ENABLE_FOLD_CHECKING
13525 unsigned char checksum_before_op0[16],
13526 checksum_before_op1[16],
13527 checksum_after_op0[16],
13528 checksum_after_op1[16];
13529 struct md5_ctx ctx;
13530 htab_t ht;
13531
13532 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13533 md5_init_ctx (&ctx);
13534 fold_checksum_tree (op0, &ctx, ht);
13535 md5_finish_ctx (&ctx, checksum_before_op0);
13536 htab_empty (ht);
13537
13538 md5_init_ctx (&ctx);
13539 fold_checksum_tree (op1, &ctx, ht);
13540 md5_finish_ctx (&ctx, checksum_before_op1);
13541 htab_empty (ht);
13542 #endif
13543
13544 tem = fold_binary (code, type, op0, op1);
13545 if (!tem)
13546 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
13547
13548 #ifdef ENABLE_FOLD_CHECKING
13549 md5_init_ctx (&ctx);
13550 fold_checksum_tree (op0, &ctx, ht);
13551 md5_finish_ctx (&ctx, checksum_after_op0);
13552 htab_empty (ht);
13553
13554 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13555 fold_check_failed (op0, tem);
13556
13557 md5_init_ctx (&ctx);
13558 fold_checksum_tree (op1, &ctx, ht);
13559 md5_finish_ctx (&ctx, checksum_after_op1);
13560 htab_delete (ht);
13561
13562 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13563 fold_check_failed (op1, tem);
13564 #endif
13565 return tem;
13566 }
13567
13568 /* Fold a ternary tree expression with code CODE of type TYPE with
13569 operands OP0, OP1, and OP2. Return a folded expression if
13570 successful. Otherwise, return a tree expression with code CODE of
13571 type TYPE with operands OP0, OP1, and OP2. */
13572
13573 tree
13574 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
13575 MEM_STAT_DECL)
13576 {
13577 tree tem;
13578 #ifdef ENABLE_FOLD_CHECKING
13579 unsigned char checksum_before_op0[16],
13580 checksum_before_op1[16],
13581 checksum_before_op2[16],
13582 checksum_after_op0[16],
13583 checksum_after_op1[16],
13584 checksum_after_op2[16];
13585 struct md5_ctx ctx;
13586 htab_t ht;
13587
13588 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13589 md5_init_ctx (&ctx);
13590 fold_checksum_tree (op0, &ctx, ht);
13591 md5_finish_ctx (&ctx, checksum_before_op0);
13592 htab_empty (ht);
13593
13594 md5_init_ctx (&ctx);
13595 fold_checksum_tree (op1, &ctx, ht);
13596 md5_finish_ctx (&ctx, checksum_before_op1);
13597 htab_empty (ht);
13598
13599 md5_init_ctx (&ctx);
13600 fold_checksum_tree (op2, &ctx, ht);
13601 md5_finish_ctx (&ctx, checksum_before_op2);
13602 htab_empty (ht);
13603 #endif
13604
13605 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
13606 tem = fold_ternary (code, type, op0, op1, op2);
13607 if (!tem)
13608 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
13609
13610 #ifdef ENABLE_FOLD_CHECKING
13611 md5_init_ctx (&ctx);
13612 fold_checksum_tree (op0, &ctx, ht);
13613 md5_finish_ctx (&ctx, checksum_after_op0);
13614 htab_empty (ht);
13615
13616 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13617 fold_check_failed (op0, tem);
13618
13619 md5_init_ctx (&ctx);
13620 fold_checksum_tree (op1, &ctx, ht);
13621 md5_finish_ctx (&ctx, checksum_after_op1);
13622 htab_empty (ht);
13623
13624 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13625 fold_check_failed (op1, tem);
13626
13627 md5_init_ctx (&ctx);
13628 fold_checksum_tree (op2, &ctx, ht);
13629 md5_finish_ctx (&ctx, checksum_after_op2);
13630 htab_delete (ht);
13631
13632 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
13633 fold_check_failed (op2, tem);
13634 #endif
13635 return tem;
13636 }
13637
13638 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
13639 arguments in ARGARRAY, and a null static chain.
13640 Return a folded expression if successful. Otherwise, return a CALL_EXPR
13641 of type TYPE from the given operands as constructed by build_call_array. */
13642
13643 tree
13644 fold_build_call_array (tree type, tree fn, int nargs, tree *argarray)
13645 {
13646 tree tem;
13647 #ifdef ENABLE_FOLD_CHECKING
13648 unsigned char checksum_before_fn[16],
13649 checksum_before_arglist[16],
13650 checksum_after_fn[16],
13651 checksum_after_arglist[16];
13652 struct md5_ctx ctx;
13653 htab_t ht;
13654 int i;
13655
13656 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13657 md5_init_ctx (&ctx);
13658 fold_checksum_tree (fn, &ctx, ht);
13659 md5_finish_ctx (&ctx, checksum_before_fn);
13660 htab_empty (ht);
13661
13662 md5_init_ctx (&ctx);
13663 for (i = 0; i < nargs; i++)
13664 fold_checksum_tree (argarray[i], &ctx, ht);
13665 md5_finish_ctx (&ctx, checksum_before_arglist);
13666 htab_empty (ht);
13667 #endif
13668
13669 tem = fold_builtin_call_array (type, fn, nargs, argarray);
13670
13671 #ifdef ENABLE_FOLD_CHECKING
13672 md5_init_ctx (&ctx);
13673 fold_checksum_tree (fn, &ctx, ht);
13674 md5_finish_ctx (&ctx, checksum_after_fn);
13675 htab_empty (ht);
13676
13677 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
13678 fold_check_failed (fn, tem);
13679
13680 md5_init_ctx (&ctx);
13681 for (i = 0; i < nargs; i++)
13682 fold_checksum_tree (argarray[i], &ctx, ht);
13683 md5_finish_ctx (&ctx, checksum_after_arglist);
13684 htab_delete (ht);
13685
13686 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
13687 fold_check_failed (NULL_TREE, tem);
13688 #endif
13689 return tem;
13690 }
13691
13692 /* Perform constant folding and related simplification of initializer
13693 expression EXPR. These behave identically to "fold_buildN" but ignore
13694 potential run-time traps and exceptions that fold must preserve. */
13695
13696 #define START_FOLD_INIT \
13697 int saved_signaling_nans = flag_signaling_nans;\
13698 int saved_trapping_math = flag_trapping_math;\
13699 int saved_rounding_math = flag_rounding_math;\
13700 int saved_trapv = flag_trapv;\
13701 int saved_folding_initializer = folding_initializer;\
13702 flag_signaling_nans = 0;\
13703 flag_trapping_math = 0;\
13704 flag_rounding_math = 0;\
13705 flag_trapv = 0;\
13706 folding_initializer = 1;
13707
13708 #define END_FOLD_INIT \
13709 flag_signaling_nans = saved_signaling_nans;\
13710 flag_trapping_math = saved_trapping_math;\
13711 flag_rounding_math = saved_rounding_math;\
13712 flag_trapv = saved_trapv;\
13713 folding_initializer = saved_folding_initializer;
13714
13715 tree
13716 fold_build1_initializer (enum tree_code code, tree type, tree op)
13717 {
13718 tree result;
13719 START_FOLD_INIT;
13720
13721 result = fold_build1 (code, type, op);
13722
13723 END_FOLD_INIT;
13724 return result;
13725 }
13726
13727 tree
13728 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
13729 {
13730 tree result;
13731 START_FOLD_INIT;
13732
13733 result = fold_build2 (code, type, op0, op1);
13734
13735 END_FOLD_INIT;
13736 return result;
13737 }
13738
13739 tree
13740 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
13741 tree op2)
13742 {
13743 tree result;
13744 START_FOLD_INIT;
13745
13746 result = fold_build3 (code, type, op0, op1, op2);
13747
13748 END_FOLD_INIT;
13749 return result;
13750 }
13751
13752 tree
13753 fold_build_call_array_initializer (tree type, tree fn,
13754 int nargs, tree *argarray)
13755 {
13756 tree result;
13757 START_FOLD_INIT;
13758
13759 result = fold_build_call_array (type, fn, nargs, argarray);
13760
13761 END_FOLD_INIT;
13762 return result;
13763 }
13764
13765 #undef START_FOLD_INIT
13766 #undef END_FOLD_INIT
13767
13768 /* Determine if first argument is a multiple of second argument. Return 0 if
13769 it is not, or we cannot easily determined it to be.
13770
13771 An example of the sort of thing we care about (at this point; this routine
13772 could surely be made more general, and expanded to do what the *_DIV_EXPR's
13773 fold cases do now) is discovering that
13774
13775 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13776
13777 is a multiple of
13778
13779 SAVE_EXPR (J * 8)
13780
13781 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
13782
13783 This code also handles discovering that
13784
13785 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13786
13787 is a multiple of 8 so we don't have to worry about dealing with a
13788 possible remainder.
13789
13790 Note that we *look* inside a SAVE_EXPR only to determine how it was
13791 calculated; it is not safe for fold to do much of anything else with the
13792 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
13793 at run time. For example, the latter example above *cannot* be implemented
13794 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
13795 evaluation time of the original SAVE_EXPR is not necessarily the same at
13796 the time the new expression is evaluated. The only optimization of this
13797 sort that would be valid is changing
13798
13799 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
13800
13801 divided by 8 to
13802
13803 SAVE_EXPR (I) * SAVE_EXPR (J)
13804
13805 (where the same SAVE_EXPR (J) is used in the original and the
13806 transformed version). */
13807
13808 int
13809 multiple_of_p (tree type, const_tree top, const_tree bottom)
13810 {
13811 if (operand_equal_p (top, bottom, 0))
13812 return 1;
13813
13814 if (TREE_CODE (type) != INTEGER_TYPE)
13815 return 0;
13816
13817 switch (TREE_CODE (top))
13818 {
13819 case BIT_AND_EXPR:
13820 /* Bitwise and provides a power of two multiple. If the mask is
13821 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
13822 if (!integer_pow2p (bottom))
13823 return 0;
13824 /* FALLTHRU */
13825
13826 case MULT_EXPR:
13827 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
13828 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
13829
13830 case PLUS_EXPR:
13831 case MINUS_EXPR:
13832 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
13833 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
13834
13835 case LSHIFT_EXPR:
13836 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
13837 {
13838 tree op1, t1;
13839
13840 op1 = TREE_OPERAND (top, 1);
13841 /* const_binop may not detect overflow correctly,
13842 so check for it explicitly here. */
13843 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
13844 > TREE_INT_CST_LOW (op1)
13845 && TREE_INT_CST_HIGH (op1) == 0
13846 && 0 != (t1 = fold_convert (type,
13847 const_binop (LSHIFT_EXPR,
13848 size_one_node,
13849 op1, 0)))
13850 && !TREE_OVERFLOW (t1))
13851 return multiple_of_p (type, t1, bottom);
13852 }
13853 return 0;
13854
13855 case NOP_EXPR:
13856 /* Can't handle conversions from non-integral or wider integral type. */
13857 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
13858 || (TYPE_PRECISION (type)
13859 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
13860 return 0;
13861
13862 /* .. fall through ... */
13863
13864 case SAVE_EXPR:
13865 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
13866
13867 case INTEGER_CST:
13868 if (TREE_CODE (bottom) != INTEGER_CST
13869 || integer_zerop (bottom)
13870 || (TYPE_UNSIGNED (type)
13871 && (tree_int_cst_sgn (top) < 0
13872 || tree_int_cst_sgn (bottom) < 0)))
13873 return 0;
13874 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
13875 top, bottom, 0));
13876
13877 default:
13878 return 0;
13879 }
13880 }
13881
13882 /* Return true if CODE or TYPE is known to be non-negative. */
13883
13884 static bool
13885 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
13886 {
13887 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
13888 && truth_value_p (code))
13889 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
13890 have a signed:1 type (where the value is -1 and 0). */
13891 return true;
13892 return false;
13893 }
13894
13895 /* Return true if (CODE OP0) is known to be non-negative. If the return
13896 value is based on the assumption that signed overflow is undefined,
13897 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13898 *STRICT_OVERFLOW_P. */
13899
13900 bool
13901 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
13902 bool *strict_overflow_p)
13903 {
13904 if (TYPE_UNSIGNED (type))
13905 return true;
13906
13907 switch (code)
13908 {
13909 case ABS_EXPR:
13910 /* We can't return 1 if flag_wrapv is set because
13911 ABS_EXPR<INT_MIN> = INT_MIN. */
13912 if (!INTEGRAL_TYPE_P (type))
13913 return true;
13914 if (TYPE_OVERFLOW_UNDEFINED (type))
13915 {
13916 *strict_overflow_p = true;
13917 return true;
13918 }
13919 break;
13920
13921 case NON_LVALUE_EXPR:
13922 case FLOAT_EXPR:
13923 case FIX_TRUNC_EXPR:
13924 return tree_expr_nonnegative_warnv_p (op0,
13925 strict_overflow_p);
13926
13927 case NOP_EXPR:
13928 {
13929 tree inner_type = TREE_TYPE (op0);
13930 tree outer_type = type;
13931
13932 if (TREE_CODE (outer_type) == REAL_TYPE)
13933 {
13934 if (TREE_CODE (inner_type) == REAL_TYPE)
13935 return tree_expr_nonnegative_warnv_p (op0,
13936 strict_overflow_p);
13937 if (TREE_CODE (inner_type) == INTEGER_TYPE)
13938 {
13939 if (TYPE_UNSIGNED (inner_type))
13940 return true;
13941 return tree_expr_nonnegative_warnv_p (op0,
13942 strict_overflow_p);
13943 }
13944 }
13945 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
13946 {
13947 if (TREE_CODE (inner_type) == REAL_TYPE)
13948 return tree_expr_nonnegative_warnv_p (op0,
13949 strict_overflow_p);
13950 if (TREE_CODE (inner_type) == INTEGER_TYPE)
13951 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
13952 && TYPE_UNSIGNED (inner_type);
13953 }
13954 }
13955 break;
13956
13957 default:
13958 return tree_simple_nonnegative_warnv_p (code, type);
13959 }
13960
13961 /* We don't know sign of `t', so be conservative and return false. */
13962 return false;
13963 }
13964
13965 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
13966 value is based on the assumption that signed overflow is undefined,
13967 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13968 *STRICT_OVERFLOW_P. */
13969
13970 bool
13971 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
13972 tree op1, bool *strict_overflow_p)
13973 {
13974 if (TYPE_UNSIGNED (type))
13975 return true;
13976
13977 switch (code)
13978 {
13979 case POINTER_PLUS_EXPR:
13980 case PLUS_EXPR:
13981 if (FLOAT_TYPE_P (type))
13982 return (tree_expr_nonnegative_warnv_p (op0,
13983 strict_overflow_p)
13984 && tree_expr_nonnegative_warnv_p (op1,
13985 strict_overflow_p));
13986
13987 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
13988 both unsigned and at least 2 bits shorter than the result. */
13989 if (TREE_CODE (type) == INTEGER_TYPE
13990 && TREE_CODE (op0) == NOP_EXPR
13991 && TREE_CODE (op1) == NOP_EXPR)
13992 {
13993 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
13994 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
13995 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
13996 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
13997 {
13998 unsigned int prec = MAX (TYPE_PRECISION (inner1),
13999 TYPE_PRECISION (inner2)) + 1;
14000 return prec < TYPE_PRECISION (type);
14001 }
14002 }
14003 break;
14004
14005 case MULT_EXPR:
14006 if (FLOAT_TYPE_P (type))
14007 {
14008 /* x * x for floating point x is always non-negative. */
14009 if (operand_equal_p (op0, op1, 0))
14010 return true;
14011 return (tree_expr_nonnegative_warnv_p (op0,
14012 strict_overflow_p)
14013 && tree_expr_nonnegative_warnv_p (op1,
14014 strict_overflow_p));
14015 }
14016
14017 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14018 both unsigned and their total bits is shorter than the result. */
14019 if (TREE_CODE (type) == INTEGER_TYPE
14020 && TREE_CODE (op0) == NOP_EXPR
14021 && TREE_CODE (op1) == NOP_EXPR)
14022 {
14023 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14024 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14025 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14026 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14027 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
14028 < TYPE_PRECISION (type);
14029 }
14030 return false;
14031
14032 case BIT_AND_EXPR:
14033 case MAX_EXPR:
14034 return (tree_expr_nonnegative_warnv_p (op0,
14035 strict_overflow_p)
14036 || tree_expr_nonnegative_warnv_p (op1,
14037 strict_overflow_p));
14038
14039 case BIT_IOR_EXPR:
14040 case BIT_XOR_EXPR:
14041 case MIN_EXPR:
14042 case RDIV_EXPR:
14043 case TRUNC_DIV_EXPR:
14044 case CEIL_DIV_EXPR:
14045 case FLOOR_DIV_EXPR:
14046 case ROUND_DIV_EXPR:
14047 return (tree_expr_nonnegative_warnv_p (op0,
14048 strict_overflow_p)
14049 && tree_expr_nonnegative_warnv_p (op1,
14050 strict_overflow_p));
14051
14052 case TRUNC_MOD_EXPR:
14053 case CEIL_MOD_EXPR:
14054 case FLOOR_MOD_EXPR:
14055 case ROUND_MOD_EXPR:
14056 return tree_expr_nonnegative_warnv_p (op0,
14057 strict_overflow_p);
14058 default:
14059 return tree_simple_nonnegative_warnv_p (code, type);
14060 }
14061
14062 /* We don't know sign of `t', so be conservative and return false. */
14063 return false;
14064 }
14065
14066 /* Return true if T is known to be non-negative. If the return
14067 value is based on the assumption that signed overflow is undefined,
14068 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14069 *STRICT_OVERFLOW_P. */
14070
14071 bool
14072 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14073 {
14074 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14075 return true;
14076
14077 switch (TREE_CODE (t))
14078 {
14079 case INTEGER_CST:
14080 return tree_int_cst_sgn (t) >= 0;
14081
14082 case REAL_CST:
14083 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14084
14085 case FIXED_CST:
14086 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14087
14088 case COND_EXPR:
14089 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14090 strict_overflow_p)
14091 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
14092 strict_overflow_p));
14093 default:
14094 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14095 TREE_TYPE (t));
14096 }
14097 /* We don't know sign of `t', so be conservative and return false. */
14098 return false;
14099 }
14100
14101 /* Return true if T is known to be non-negative. If the return
14102 value is based on the assumption that signed overflow is undefined,
14103 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14104 *STRICT_OVERFLOW_P. */
14105
14106 bool
14107 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
14108 tree arg0, tree arg1, bool *strict_overflow_p)
14109 {
14110 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
14111 switch (DECL_FUNCTION_CODE (fndecl))
14112 {
14113 CASE_FLT_FN (BUILT_IN_ACOS):
14114 CASE_FLT_FN (BUILT_IN_ACOSH):
14115 CASE_FLT_FN (BUILT_IN_CABS):
14116 CASE_FLT_FN (BUILT_IN_COSH):
14117 CASE_FLT_FN (BUILT_IN_ERFC):
14118 CASE_FLT_FN (BUILT_IN_EXP):
14119 CASE_FLT_FN (BUILT_IN_EXP10):
14120 CASE_FLT_FN (BUILT_IN_EXP2):
14121 CASE_FLT_FN (BUILT_IN_FABS):
14122 CASE_FLT_FN (BUILT_IN_FDIM):
14123 CASE_FLT_FN (BUILT_IN_HYPOT):
14124 CASE_FLT_FN (BUILT_IN_POW10):
14125 CASE_INT_FN (BUILT_IN_FFS):
14126 CASE_INT_FN (BUILT_IN_PARITY):
14127 CASE_INT_FN (BUILT_IN_POPCOUNT):
14128 case BUILT_IN_BSWAP32:
14129 case BUILT_IN_BSWAP64:
14130 /* Always true. */
14131 return true;
14132
14133 CASE_FLT_FN (BUILT_IN_SQRT):
14134 /* sqrt(-0.0) is -0.0. */
14135 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
14136 return true;
14137 return tree_expr_nonnegative_warnv_p (arg0,
14138 strict_overflow_p);
14139
14140 CASE_FLT_FN (BUILT_IN_ASINH):
14141 CASE_FLT_FN (BUILT_IN_ATAN):
14142 CASE_FLT_FN (BUILT_IN_ATANH):
14143 CASE_FLT_FN (BUILT_IN_CBRT):
14144 CASE_FLT_FN (BUILT_IN_CEIL):
14145 CASE_FLT_FN (BUILT_IN_ERF):
14146 CASE_FLT_FN (BUILT_IN_EXPM1):
14147 CASE_FLT_FN (BUILT_IN_FLOOR):
14148 CASE_FLT_FN (BUILT_IN_FMOD):
14149 CASE_FLT_FN (BUILT_IN_FREXP):
14150 CASE_FLT_FN (BUILT_IN_LCEIL):
14151 CASE_FLT_FN (BUILT_IN_LDEXP):
14152 CASE_FLT_FN (BUILT_IN_LFLOOR):
14153 CASE_FLT_FN (BUILT_IN_LLCEIL):
14154 CASE_FLT_FN (BUILT_IN_LLFLOOR):
14155 CASE_FLT_FN (BUILT_IN_LLRINT):
14156 CASE_FLT_FN (BUILT_IN_LLROUND):
14157 CASE_FLT_FN (BUILT_IN_LRINT):
14158 CASE_FLT_FN (BUILT_IN_LROUND):
14159 CASE_FLT_FN (BUILT_IN_MODF):
14160 CASE_FLT_FN (BUILT_IN_NEARBYINT):
14161 CASE_FLT_FN (BUILT_IN_RINT):
14162 CASE_FLT_FN (BUILT_IN_ROUND):
14163 CASE_FLT_FN (BUILT_IN_SCALB):
14164 CASE_FLT_FN (BUILT_IN_SCALBLN):
14165 CASE_FLT_FN (BUILT_IN_SCALBN):
14166 CASE_FLT_FN (BUILT_IN_SIGNBIT):
14167 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
14168 CASE_FLT_FN (BUILT_IN_SINH):
14169 CASE_FLT_FN (BUILT_IN_TANH):
14170 CASE_FLT_FN (BUILT_IN_TRUNC):
14171 /* True if the 1st argument is nonnegative. */
14172 return tree_expr_nonnegative_warnv_p (arg0,
14173 strict_overflow_p);
14174
14175 CASE_FLT_FN (BUILT_IN_FMAX):
14176 /* True if the 1st OR 2nd arguments are nonnegative. */
14177 return (tree_expr_nonnegative_warnv_p (arg0,
14178 strict_overflow_p)
14179 || (tree_expr_nonnegative_warnv_p (arg1,
14180 strict_overflow_p)));
14181
14182 CASE_FLT_FN (BUILT_IN_FMIN):
14183 /* True if the 1st AND 2nd arguments are nonnegative. */
14184 return (tree_expr_nonnegative_warnv_p (arg0,
14185 strict_overflow_p)
14186 && (tree_expr_nonnegative_warnv_p (arg1,
14187 strict_overflow_p)));
14188
14189 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14190 /* True if the 2nd argument is nonnegative. */
14191 return tree_expr_nonnegative_warnv_p (arg1,
14192 strict_overflow_p);
14193
14194 CASE_FLT_FN (BUILT_IN_POWI):
14195 /* True if the 1st argument is nonnegative or the second
14196 argument is an even integer. */
14197 if (TREE_CODE (arg1) == INTEGER_CST
14198 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
14199 return true;
14200 return tree_expr_nonnegative_warnv_p (arg0,
14201 strict_overflow_p);
14202
14203 CASE_FLT_FN (BUILT_IN_POW):
14204 /* True if the 1st argument is nonnegative or the second
14205 argument is an even integer valued real. */
14206 if (TREE_CODE (arg1) == REAL_CST)
14207 {
14208 REAL_VALUE_TYPE c;
14209 HOST_WIDE_INT n;
14210
14211 c = TREE_REAL_CST (arg1);
14212 n = real_to_integer (&c);
14213 if ((n & 1) == 0)
14214 {
14215 REAL_VALUE_TYPE cint;
14216 real_from_integer (&cint, VOIDmode, n,
14217 n < 0 ? -1 : 0, 0);
14218 if (real_identical (&c, &cint))
14219 return true;
14220 }
14221 }
14222 return tree_expr_nonnegative_warnv_p (arg0,
14223 strict_overflow_p);
14224
14225 default:
14226 break;
14227 }
14228 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
14229 type);
14230 }
14231
14232 /* Return true if T is known to be non-negative. If the return
14233 value is based on the assumption that signed overflow is undefined,
14234 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14235 *STRICT_OVERFLOW_P. */
14236
14237 bool
14238 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14239 {
14240 enum tree_code code = TREE_CODE (t);
14241 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14242 return true;
14243
14244 switch (code)
14245 {
14246 case TARGET_EXPR:
14247 {
14248 tree temp = TARGET_EXPR_SLOT (t);
14249 t = TARGET_EXPR_INITIAL (t);
14250
14251 /* If the initializer is non-void, then it's a normal expression
14252 that will be assigned to the slot. */
14253 if (!VOID_TYPE_P (t))
14254 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
14255
14256 /* Otherwise, the initializer sets the slot in some way. One common
14257 way is an assignment statement at the end of the initializer. */
14258 while (1)
14259 {
14260 if (TREE_CODE (t) == BIND_EXPR)
14261 t = expr_last (BIND_EXPR_BODY (t));
14262 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
14263 || TREE_CODE (t) == TRY_CATCH_EXPR)
14264 t = expr_last (TREE_OPERAND (t, 0));
14265 else if (TREE_CODE (t) == STATEMENT_LIST)
14266 t = expr_last (t);
14267 else
14268 break;
14269 }
14270 if (TREE_CODE (t) == MODIFY_EXPR
14271 && TREE_OPERAND (t, 0) == temp)
14272 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14273 strict_overflow_p);
14274
14275 return false;
14276 }
14277
14278 case CALL_EXPR:
14279 {
14280 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
14281 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
14282
14283 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
14284 get_callee_fndecl (t),
14285 arg0,
14286 arg1,
14287 strict_overflow_p);
14288 }
14289 case COMPOUND_EXPR:
14290 case MODIFY_EXPR:
14291 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14292 strict_overflow_p);
14293 case BIND_EXPR:
14294 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
14295 strict_overflow_p);
14296 case SAVE_EXPR:
14297 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14298 strict_overflow_p);
14299
14300 default:
14301 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14302 TREE_TYPE (t));
14303 }
14304
14305 /* We don't know sign of `t', so be conservative and return false. */
14306 return false;
14307 }
14308
14309 /* Return true if T is known to be non-negative. If the return
14310 value is based on the assumption that signed overflow is undefined,
14311 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14312 *STRICT_OVERFLOW_P. */
14313
14314 bool
14315 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14316 {
14317 enum tree_code code;
14318 if (t == error_mark_node)
14319 return false;
14320
14321 code = TREE_CODE (t);
14322 switch (TREE_CODE_CLASS (code))
14323 {
14324 case tcc_binary:
14325 case tcc_comparison:
14326 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14327 TREE_TYPE (t),
14328 TREE_OPERAND (t, 0),
14329 TREE_OPERAND (t, 1),
14330 strict_overflow_p);
14331
14332 case tcc_unary:
14333 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14334 TREE_TYPE (t),
14335 TREE_OPERAND (t, 0),
14336 strict_overflow_p);
14337
14338 case tcc_constant:
14339 case tcc_declaration:
14340 case tcc_reference:
14341 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14342
14343 default:
14344 break;
14345 }
14346
14347 switch (code)
14348 {
14349 case TRUTH_AND_EXPR:
14350 case TRUTH_OR_EXPR:
14351 case TRUTH_XOR_EXPR:
14352 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14353 TREE_TYPE (t),
14354 TREE_OPERAND (t, 0),
14355 TREE_OPERAND (t, 1),
14356 strict_overflow_p);
14357 case TRUTH_NOT_EXPR:
14358 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14359 TREE_TYPE (t),
14360 TREE_OPERAND (t, 0),
14361 strict_overflow_p);
14362
14363 case COND_EXPR:
14364 case CONSTRUCTOR:
14365 case OBJ_TYPE_REF:
14366 case ASSERT_EXPR:
14367 case ADDR_EXPR:
14368 case WITH_SIZE_EXPR:
14369 case EXC_PTR_EXPR:
14370 case SSA_NAME:
14371 case FILTER_EXPR:
14372 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14373
14374 default:
14375 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
14376 }
14377 }
14378
14379 /* Return true if `t' is known to be non-negative. Handle warnings
14380 about undefined signed overflow. */
14381
14382 bool
14383 tree_expr_nonnegative_p (tree t)
14384 {
14385 bool ret, strict_overflow_p;
14386
14387 strict_overflow_p = false;
14388 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
14389 if (strict_overflow_p)
14390 fold_overflow_warning (("assuming signed overflow does not occur when "
14391 "determining that expression is always "
14392 "non-negative"),
14393 WARN_STRICT_OVERFLOW_MISC);
14394 return ret;
14395 }
14396
14397
14398 /* Return true when (CODE OP0) is an address and is known to be nonzero.
14399 For floating point we further ensure that T is not denormal.
14400 Similar logic is present in nonzero_address in rtlanal.h.
14401
14402 If the return value is based on the assumption that signed overflow
14403 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14404 change *STRICT_OVERFLOW_P. */
14405
14406 bool
14407 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
14408 bool *strict_overflow_p)
14409 {
14410 switch (code)
14411 {
14412 case ABS_EXPR:
14413 return tree_expr_nonzero_warnv_p (op0,
14414 strict_overflow_p);
14415
14416 case NOP_EXPR:
14417 {
14418 tree inner_type = TREE_TYPE (op0);
14419 tree outer_type = type;
14420
14421 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
14422 && tree_expr_nonzero_warnv_p (op0,
14423 strict_overflow_p));
14424 }
14425 break;
14426
14427 case NON_LVALUE_EXPR:
14428 return tree_expr_nonzero_warnv_p (op0,
14429 strict_overflow_p);
14430
14431 default:
14432 break;
14433 }
14434
14435 return false;
14436 }
14437
14438 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
14439 For floating point we further ensure that T is not denormal.
14440 Similar logic is present in nonzero_address in rtlanal.h.
14441
14442 If the return value is based on the assumption that signed overflow
14443 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14444 change *STRICT_OVERFLOW_P. */
14445
14446 bool
14447 tree_binary_nonzero_warnv_p (enum tree_code code,
14448 tree type,
14449 tree op0,
14450 tree op1, bool *strict_overflow_p)
14451 {
14452 bool sub_strict_overflow_p;
14453 switch (code)
14454 {
14455 case POINTER_PLUS_EXPR:
14456 case PLUS_EXPR:
14457 if (TYPE_OVERFLOW_UNDEFINED (type))
14458 {
14459 /* With the presence of negative values it is hard
14460 to say something. */
14461 sub_strict_overflow_p = false;
14462 if (!tree_expr_nonnegative_warnv_p (op0,
14463 &sub_strict_overflow_p)
14464 || !tree_expr_nonnegative_warnv_p (op1,
14465 &sub_strict_overflow_p))
14466 return false;
14467 /* One of operands must be positive and the other non-negative. */
14468 /* We don't set *STRICT_OVERFLOW_P here: even if this value
14469 overflows, on a twos-complement machine the sum of two
14470 nonnegative numbers can never be zero. */
14471 return (tree_expr_nonzero_warnv_p (op0,
14472 strict_overflow_p)
14473 || tree_expr_nonzero_warnv_p (op1,
14474 strict_overflow_p));
14475 }
14476 break;
14477
14478 case MULT_EXPR:
14479 if (TYPE_OVERFLOW_UNDEFINED (type))
14480 {
14481 if (tree_expr_nonzero_warnv_p (op0,
14482 strict_overflow_p)
14483 && tree_expr_nonzero_warnv_p (op1,
14484 strict_overflow_p))
14485 {
14486 *strict_overflow_p = true;
14487 return true;
14488 }
14489 }
14490 break;
14491
14492 case MIN_EXPR:
14493 sub_strict_overflow_p = false;
14494 if (tree_expr_nonzero_warnv_p (op0,
14495 &sub_strict_overflow_p)
14496 && tree_expr_nonzero_warnv_p (op1,
14497 &sub_strict_overflow_p))
14498 {
14499 if (sub_strict_overflow_p)
14500 *strict_overflow_p = true;
14501 }
14502 break;
14503
14504 case MAX_EXPR:
14505 sub_strict_overflow_p = false;
14506 if (tree_expr_nonzero_warnv_p (op0,
14507 &sub_strict_overflow_p))
14508 {
14509 if (sub_strict_overflow_p)
14510 *strict_overflow_p = true;
14511
14512 /* When both operands are nonzero, then MAX must be too. */
14513 if (tree_expr_nonzero_warnv_p (op1,
14514 strict_overflow_p))
14515 return true;
14516
14517 /* MAX where operand 0 is positive is positive. */
14518 return tree_expr_nonnegative_warnv_p (op0,
14519 strict_overflow_p);
14520 }
14521 /* MAX where operand 1 is positive is positive. */
14522 else if (tree_expr_nonzero_warnv_p (op1,
14523 &sub_strict_overflow_p)
14524 && tree_expr_nonnegative_warnv_p (op1,
14525 &sub_strict_overflow_p))
14526 {
14527 if (sub_strict_overflow_p)
14528 *strict_overflow_p = true;
14529 return true;
14530 }
14531 break;
14532
14533 case BIT_IOR_EXPR:
14534 return (tree_expr_nonzero_warnv_p (op1,
14535 strict_overflow_p)
14536 || tree_expr_nonzero_warnv_p (op0,
14537 strict_overflow_p));
14538
14539 default:
14540 break;
14541 }
14542
14543 return false;
14544 }
14545
14546 /* Return true when T is an address and is known to be nonzero.
14547 For floating point we further ensure that T is not denormal.
14548 Similar logic is present in nonzero_address in rtlanal.h.
14549
14550 If the return value is based on the assumption that signed overflow
14551 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14552 change *STRICT_OVERFLOW_P. */
14553
14554 bool
14555 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
14556 {
14557 bool sub_strict_overflow_p;
14558 switch (TREE_CODE (t))
14559 {
14560 case INTEGER_CST:
14561 return !integer_zerop (t);
14562
14563 case ADDR_EXPR:
14564 {
14565 tree base = get_base_address (TREE_OPERAND (t, 0));
14566
14567 if (!base)
14568 return false;
14569
14570 /* Weak declarations may link to NULL. */
14571 if (VAR_OR_FUNCTION_DECL_P (base))
14572 return !DECL_WEAK (base);
14573
14574 /* Constants are never weak. */
14575 if (CONSTANT_CLASS_P (base))
14576 return true;
14577
14578 return false;
14579 }
14580
14581 case COND_EXPR:
14582 sub_strict_overflow_p = false;
14583 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
14584 &sub_strict_overflow_p)
14585 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
14586 &sub_strict_overflow_p))
14587 {
14588 if (sub_strict_overflow_p)
14589 *strict_overflow_p = true;
14590 return true;
14591 }
14592 break;
14593
14594 default:
14595 break;
14596 }
14597 return false;
14598 }
14599
14600 /* Return true when T is an address and is known to be nonzero.
14601 For floating point we further ensure that T is not denormal.
14602 Similar logic is present in nonzero_address in rtlanal.h.
14603
14604 If the return value is based on the assumption that signed overflow
14605 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14606 change *STRICT_OVERFLOW_P. */
14607
14608 bool
14609 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
14610 {
14611 tree type = TREE_TYPE (t);
14612 enum tree_code code;
14613
14614 /* Doing something useful for floating point would need more work. */
14615 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
14616 return false;
14617
14618 code = TREE_CODE (t);
14619 switch (TREE_CODE_CLASS (code))
14620 {
14621 case tcc_unary:
14622 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
14623 strict_overflow_p);
14624 case tcc_binary:
14625 case tcc_comparison:
14626 return tree_binary_nonzero_warnv_p (code, type,
14627 TREE_OPERAND (t, 0),
14628 TREE_OPERAND (t, 1),
14629 strict_overflow_p);
14630 case tcc_constant:
14631 case tcc_declaration:
14632 case tcc_reference:
14633 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
14634
14635 default:
14636 break;
14637 }
14638
14639 switch (code)
14640 {
14641 case TRUTH_NOT_EXPR:
14642 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
14643 strict_overflow_p);
14644
14645 case TRUTH_AND_EXPR:
14646 case TRUTH_OR_EXPR:
14647 case TRUTH_XOR_EXPR:
14648 return tree_binary_nonzero_warnv_p (code, type,
14649 TREE_OPERAND (t, 0),
14650 TREE_OPERAND (t, 1),
14651 strict_overflow_p);
14652
14653 case COND_EXPR:
14654 case CONSTRUCTOR:
14655 case OBJ_TYPE_REF:
14656 case ASSERT_EXPR:
14657 case ADDR_EXPR:
14658 case WITH_SIZE_EXPR:
14659 case EXC_PTR_EXPR:
14660 case SSA_NAME:
14661 case FILTER_EXPR:
14662 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
14663
14664 case COMPOUND_EXPR:
14665 case MODIFY_EXPR:
14666 case BIND_EXPR:
14667 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
14668 strict_overflow_p);
14669
14670 case SAVE_EXPR:
14671 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
14672 strict_overflow_p);
14673
14674 case CALL_EXPR:
14675 return alloca_call_p (t);
14676
14677 default:
14678 break;
14679 }
14680 return false;
14681 }
14682
14683 /* Return true when T is an address and is known to be nonzero.
14684 Handle warnings about undefined signed overflow. */
14685
14686 bool
14687 tree_expr_nonzero_p (tree t)
14688 {
14689 bool ret, strict_overflow_p;
14690
14691 strict_overflow_p = false;
14692 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
14693 if (strict_overflow_p)
14694 fold_overflow_warning (("assuming signed overflow does not occur when "
14695 "determining that expression is always "
14696 "non-zero"),
14697 WARN_STRICT_OVERFLOW_MISC);
14698 return ret;
14699 }
14700
14701 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
14702 attempt to fold the expression to a constant without modifying TYPE,
14703 OP0 or OP1.
14704
14705 If the expression could be simplified to a constant, then return
14706 the constant. If the expression would not be simplified to a
14707 constant, then return NULL_TREE. */
14708
14709 tree
14710 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
14711 {
14712 tree tem = fold_binary (code, type, op0, op1);
14713 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
14714 }
14715
14716 /* Given the components of a unary expression CODE, TYPE and OP0,
14717 attempt to fold the expression to a constant without modifying
14718 TYPE or OP0.
14719
14720 If the expression could be simplified to a constant, then return
14721 the constant. If the expression would not be simplified to a
14722 constant, then return NULL_TREE. */
14723
14724 tree
14725 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
14726 {
14727 tree tem = fold_unary (code, type, op0);
14728 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
14729 }
14730
14731 /* If EXP represents referencing an element in a constant string
14732 (either via pointer arithmetic or array indexing), return the
14733 tree representing the value accessed, otherwise return NULL. */
14734
14735 tree
14736 fold_read_from_constant_string (tree exp)
14737 {
14738 if ((TREE_CODE (exp) == INDIRECT_REF
14739 || TREE_CODE (exp) == ARRAY_REF)
14740 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
14741 {
14742 tree exp1 = TREE_OPERAND (exp, 0);
14743 tree index;
14744 tree string;
14745
14746 if (TREE_CODE (exp) == INDIRECT_REF)
14747 string = string_constant (exp1, &index);
14748 else
14749 {
14750 tree low_bound = array_ref_low_bound (exp);
14751 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
14752
14753 /* Optimize the special-case of a zero lower bound.
14754
14755 We convert the low_bound to sizetype to avoid some problems
14756 with constant folding. (E.g. suppose the lower bound is 1,
14757 and its mode is QI. Without the conversion,l (ARRAY
14758 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
14759 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
14760 if (! integer_zerop (low_bound))
14761 index = size_diffop (index, fold_convert (sizetype, low_bound));
14762
14763 string = exp1;
14764 }
14765
14766 if (string
14767 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
14768 && TREE_CODE (string) == STRING_CST
14769 && TREE_CODE (index) == INTEGER_CST
14770 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
14771 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
14772 == MODE_INT)
14773 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
14774 return build_int_cst_type (TREE_TYPE (exp),
14775 (TREE_STRING_POINTER (string)
14776 [TREE_INT_CST_LOW (index)]));
14777 }
14778 return NULL;
14779 }
14780
14781 /* Return the tree for neg (ARG0) when ARG0 is known to be either
14782 an integer constant, real, or fixed-point constant.
14783
14784 TYPE is the type of the result. */
14785
14786 static tree
14787 fold_negate_const (tree arg0, tree type)
14788 {
14789 tree t = NULL_TREE;
14790
14791 switch (TREE_CODE (arg0))
14792 {
14793 case INTEGER_CST:
14794 {
14795 unsigned HOST_WIDE_INT low;
14796 HOST_WIDE_INT high;
14797 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
14798 TREE_INT_CST_HIGH (arg0),
14799 &low, &high);
14800 t = force_fit_type_double (type, low, high, 1,
14801 (overflow | TREE_OVERFLOW (arg0))
14802 && !TYPE_UNSIGNED (type));
14803 break;
14804 }
14805
14806 case REAL_CST:
14807 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
14808 break;
14809
14810 case FIXED_CST:
14811 {
14812 FIXED_VALUE_TYPE f;
14813 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
14814 &(TREE_FIXED_CST (arg0)), NULL,
14815 TYPE_SATURATING (type));
14816 t = build_fixed (type, f);
14817 /* Propagate overflow flags. */
14818 if (overflow_p | TREE_OVERFLOW (arg0))
14819 {
14820 TREE_OVERFLOW (t) = 1;
14821 TREE_CONSTANT_OVERFLOW (t) = 1;
14822 }
14823 else if (TREE_CONSTANT_OVERFLOW (arg0))
14824 TREE_CONSTANT_OVERFLOW (t) = 1;
14825 break;
14826 }
14827
14828 default:
14829 gcc_unreachable ();
14830 }
14831
14832 return t;
14833 }
14834
14835 /* Return the tree for abs (ARG0) when ARG0 is known to be either
14836 an integer constant or real constant.
14837
14838 TYPE is the type of the result. */
14839
14840 tree
14841 fold_abs_const (tree arg0, tree type)
14842 {
14843 tree t = NULL_TREE;
14844
14845 switch (TREE_CODE (arg0))
14846 {
14847 case INTEGER_CST:
14848 /* If the value is unsigned, then the absolute value is
14849 the same as the ordinary value. */
14850 if (TYPE_UNSIGNED (type))
14851 t = arg0;
14852 /* Similarly, if the value is non-negative. */
14853 else if (INT_CST_LT (integer_minus_one_node, arg0))
14854 t = arg0;
14855 /* If the value is negative, then the absolute value is
14856 its negation. */
14857 else
14858 {
14859 unsigned HOST_WIDE_INT low;
14860 HOST_WIDE_INT high;
14861 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
14862 TREE_INT_CST_HIGH (arg0),
14863 &low, &high);
14864 t = force_fit_type_double (type, low, high, -1,
14865 overflow | TREE_OVERFLOW (arg0));
14866 }
14867 break;
14868
14869 case REAL_CST:
14870 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
14871 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
14872 else
14873 t = arg0;
14874 break;
14875
14876 default:
14877 gcc_unreachable ();
14878 }
14879
14880 return t;
14881 }
14882
14883 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
14884 constant. TYPE is the type of the result. */
14885
14886 static tree
14887 fold_not_const (tree arg0, tree type)
14888 {
14889 tree t = NULL_TREE;
14890
14891 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
14892
14893 t = force_fit_type_double (type, ~TREE_INT_CST_LOW (arg0),
14894 ~TREE_INT_CST_HIGH (arg0), 0,
14895 TREE_OVERFLOW (arg0));
14896
14897 return t;
14898 }
14899
14900 /* Given CODE, a relational operator, the target type, TYPE and two
14901 constant operands OP0 and OP1, return the result of the
14902 relational operation. If the result is not a compile time
14903 constant, then return NULL_TREE. */
14904
14905 static tree
14906 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
14907 {
14908 int result, invert;
14909
14910 /* From here on, the only cases we handle are when the result is
14911 known to be a constant. */
14912
14913 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
14914 {
14915 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
14916 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
14917
14918 /* Handle the cases where either operand is a NaN. */
14919 if (real_isnan (c0) || real_isnan (c1))
14920 {
14921 switch (code)
14922 {
14923 case EQ_EXPR:
14924 case ORDERED_EXPR:
14925 result = 0;
14926 break;
14927
14928 case NE_EXPR:
14929 case UNORDERED_EXPR:
14930 case UNLT_EXPR:
14931 case UNLE_EXPR:
14932 case UNGT_EXPR:
14933 case UNGE_EXPR:
14934 case UNEQ_EXPR:
14935 result = 1;
14936 break;
14937
14938 case LT_EXPR:
14939 case LE_EXPR:
14940 case GT_EXPR:
14941 case GE_EXPR:
14942 case LTGT_EXPR:
14943 if (flag_trapping_math)
14944 return NULL_TREE;
14945 result = 0;
14946 break;
14947
14948 default:
14949 gcc_unreachable ();
14950 }
14951
14952 return constant_boolean_node (result, type);
14953 }
14954
14955 return constant_boolean_node (real_compare (code, c0, c1), type);
14956 }
14957
14958 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
14959 {
14960 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
14961 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
14962 return constant_boolean_node (fixed_compare (code, c0, c1), type);
14963 }
14964
14965 /* Handle equality/inequality of complex constants. */
14966 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
14967 {
14968 tree rcond = fold_relational_const (code, type,
14969 TREE_REALPART (op0),
14970 TREE_REALPART (op1));
14971 tree icond = fold_relational_const (code, type,
14972 TREE_IMAGPART (op0),
14973 TREE_IMAGPART (op1));
14974 if (code == EQ_EXPR)
14975 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
14976 else if (code == NE_EXPR)
14977 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
14978 else
14979 return NULL_TREE;
14980 }
14981
14982 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
14983
14984 To compute GT, swap the arguments and do LT.
14985 To compute GE, do LT and invert the result.
14986 To compute LE, swap the arguments, do LT and invert the result.
14987 To compute NE, do EQ and invert the result.
14988
14989 Therefore, the code below must handle only EQ and LT. */
14990
14991 if (code == LE_EXPR || code == GT_EXPR)
14992 {
14993 tree tem = op0;
14994 op0 = op1;
14995 op1 = tem;
14996 code = swap_tree_comparison (code);
14997 }
14998
14999 /* Note that it is safe to invert for real values here because we
15000 have already handled the one case that it matters. */
15001
15002 invert = 0;
15003 if (code == NE_EXPR || code == GE_EXPR)
15004 {
15005 invert = 1;
15006 code = invert_tree_comparison (code, false);
15007 }
15008
15009 /* Compute a result for LT or EQ if args permit;
15010 Otherwise return T. */
15011 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15012 {
15013 if (code == EQ_EXPR)
15014 result = tree_int_cst_equal (op0, op1);
15015 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
15016 result = INT_CST_LT_UNSIGNED (op0, op1);
15017 else
15018 result = INT_CST_LT (op0, op1);
15019 }
15020 else
15021 return NULL_TREE;
15022
15023 if (invert)
15024 result ^= 1;
15025 return constant_boolean_node (result, type);
15026 }
15027
15028 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15029 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15030 itself. */
15031
15032 tree
15033 fold_build_cleanup_point_expr (tree type, tree expr)
15034 {
15035 /* If the expression does not have side effects then we don't have to wrap
15036 it with a cleanup point expression. */
15037 if (!TREE_SIDE_EFFECTS (expr))
15038 return expr;
15039
15040 /* If the expression is a return, check to see if the expression inside the
15041 return has no side effects or the right hand side of the modify expression
15042 inside the return. If either don't have side effects set we don't need to
15043 wrap the expression in a cleanup point expression. Note we don't check the
15044 left hand side of the modify because it should always be a return decl. */
15045 if (TREE_CODE (expr) == RETURN_EXPR)
15046 {
15047 tree op = TREE_OPERAND (expr, 0);
15048 if (!op || !TREE_SIDE_EFFECTS (op))
15049 return expr;
15050 op = TREE_OPERAND (op, 1);
15051 if (!TREE_SIDE_EFFECTS (op))
15052 return expr;
15053 }
15054
15055 return build1 (CLEANUP_POINT_EXPR, type, expr);
15056 }
15057
15058 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15059 of an indirection through OP0, or NULL_TREE if no simplification is
15060 possible. */
15061
15062 tree
15063 fold_indirect_ref_1 (tree type, tree op0)
15064 {
15065 tree sub = op0;
15066 tree subtype;
15067
15068 STRIP_NOPS (sub);
15069 subtype = TREE_TYPE (sub);
15070 if (!POINTER_TYPE_P (subtype))
15071 return NULL_TREE;
15072
15073 if (TREE_CODE (sub) == ADDR_EXPR)
15074 {
15075 tree op = TREE_OPERAND (sub, 0);
15076 tree optype = TREE_TYPE (op);
15077 /* *&CONST_DECL -> to the value of the const decl. */
15078 if (TREE_CODE (op) == CONST_DECL)
15079 return DECL_INITIAL (op);
15080 /* *&p => p; make sure to handle *&"str"[cst] here. */
15081 if (type == optype)
15082 {
15083 tree fop = fold_read_from_constant_string (op);
15084 if (fop)
15085 return fop;
15086 else
15087 return op;
15088 }
15089 /* *(foo *)&fooarray => fooarray[0] */
15090 else if (TREE_CODE (optype) == ARRAY_TYPE
15091 && type == TREE_TYPE (optype))
15092 {
15093 tree type_domain = TYPE_DOMAIN (optype);
15094 tree min_val = size_zero_node;
15095 if (type_domain && TYPE_MIN_VALUE (type_domain))
15096 min_val = TYPE_MIN_VALUE (type_domain);
15097 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
15098 }
15099 /* *(foo *)&complexfoo => __real__ complexfoo */
15100 else if (TREE_CODE (optype) == COMPLEX_TYPE
15101 && type == TREE_TYPE (optype))
15102 return fold_build1 (REALPART_EXPR, type, op);
15103 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15104 else if (TREE_CODE (optype) == VECTOR_TYPE
15105 && type == TREE_TYPE (optype))
15106 {
15107 tree part_width = TYPE_SIZE (type);
15108 tree index = bitsize_int (0);
15109 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
15110 }
15111 }
15112
15113 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15114 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15115 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15116 {
15117 tree op00 = TREE_OPERAND (sub, 0);
15118 tree op01 = TREE_OPERAND (sub, 1);
15119 tree op00type;
15120
15121 STRIP_NOPS (op00);
15122 op00type = TREE_TYPE (op00);
15123 if (TREE_CODE (op00) == ADDR_EXPR
15124 && TREE_CODE (TREE_TYPE (op00type)) == VECTOR_TYPE
15125 && type == TREE_TYPE (TREE_TYPE (op00type)))
15126 {
15127 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
15128 tree part_width = TYPE_SIZE (type);
15129 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
15130 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
15131 tree index = bitsize_int (indexi);
15132
15133 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (op00type)))
15134 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (op00, 0),
15135 part_width, index);
15136
15137 }
15138 }
15139
15140
15141 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15142 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15143 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15144 {
15145 tree op00 = TREE_OPERAND (sub, 0);
15146 tree op01 = TREE_OPERAND (sub, 1);
15147 tree op00type;
15148
15149 STRIP_NOPS (op00);
15150 op00type = TREE_TYPE (op00);
15151 if (TREE_CODE (op00) == ADDR_EXPR
15152 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
15153 && type == TREE_TYPE (TREE_TYPE (op00type)))
15154 {
15155 tree size = TYPE_SIZE_UNIT (type);
15156 if (tree_int_cst_equal (size, op01))
15157 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
15158 }
15159 }
15160
15161 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15162 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
15163 && type == TREE_TYPE (TREE_TYPE (subtype)))
15164 {
15165 tree type_domain;
15166 tree min_val = size_zero_node;
15167 sub = build_fold_indirect_ref (sub);
15168 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
15169 if (type_domain && TYPE_MIN_VALUE (type_domain))
15170 min_val = TYPE_MIN_VALUE (type_domain);
15171 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
15172 }
15173
15174 return NULL_TREE;
15175 }
15176
15177 /* Builds an expression for an indirection through T, simplifying some
15178 cases. */
15179
15180 tree
15181 build_fold_indirect_ref (tree t)
15182 {
15183 tree type = TREE_TYPE (TREE_TYPE (t));
15184 tree sub = fold_indirect_ref_1 (type, t);
15185
15186 if (sub)
15187 return sub;
15188 else
15189 return build1 (INDIRECT_REF, type, t);
15190 }
15191
15192 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15193
15194 tree
15195 fold_indirect_ref (tree t)
15196 {
15197 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
15198
15199 if (sub)
15200 return sub;
15201 else
15202 return t;
15203 }
15204
15205 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15206 whose result is ignored. The type of the returned tree need not be
15207 the same as the original expression. */
15208
15209 tree
15210 fold_ignored_result (tree t)
15211 {
15212 if (!TREE_SIDE_EFFECTS (t))
15213 return integer_zero_node;
15214
15215 for (;;)
15216 switch (TREE_CODE_CLASS (TREE_CODE (t)))
15217 {
15218 case tcc_unary:
15219 t = TREE_OPERAND (t, 0);
15220 break;
15221
15222 case tcc_binary:
15223 case tcc_comparison:
15224 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15225 t = TREE_OPERAND (t, 0);
15226 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
15227 t = TREE_OPERAND (t, 1);
15228 else
15229 return t;
15230 break;
15231
15232 case tcc_expression:
15233 switch (TREE_CODE (t))
15234 {
15235 case COMPOUND_EXPR:
15236 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15237 return t;
15238 t = TREE_OPERAND (t, 0);
15239 break;
15240
15241 case COND_EXPR:
15242 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
15243 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
15244 return t;
15245 t = TREE_OPERAND (t, 0);
15246 break;
15247
15248 default:
15249 return t;
15250 }
15251 break;
15252
15253 default:
15254 return t;
15255 }
15256 }
15257
15258 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
15259 This can only be applied to objects of a sizetype. */
15260
15261 tree
15262 round_up (tree value, int divisor)
15263 {
15264 tree div = NULL_TREE;
15265
15266 gcc_assert (divisor > 0);
15267 if (divisor == 1)
15268 return value;
15269
15270 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15271 have to do anything. Only do this when we are not given a const,
15272 because in that case, this check is more expensive than just
15273 doing it. */
15274 if (TREE_CODE (value) != INTEGER_CST)
15275 {
15276 div = build_int_cst (TREE_TYPE (value), divisor);
15277
15278 if (multiple_of_p (TREE_TYPE (value), value, div))
15279 return value;
15280 }
15281
15282 /* If divisor is a power of two, simplify this to bit manipulation. */
15283 if (divisor == (divisor & -divisor))
15284 {
15285 if (TREE_CODE (value) == INTEGER_CST)
15286 {
15287 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (value);
15288 unsigned HOST_WIDE_INT high;
15289 bool overflow_p;
15290
15291 if ((low & (divisor - 1)) == 0)
15292 return value;
15293
15294 overflow_p = TREE_OVERFLOW (value);
15295 high = TREE_INT_CST_HIGH (value);
15296 low &= ~(divisor - 1);
15297 low += divisor;
15298 if (low == 0)
15299 {
15300 high++;
15301 if (high == 0)
15302 overflow_p = true;
15303 }
15304
15305 return force_fit_type_double (TREE_TYPE (value), low, high,
15306 -1, overflow_p);
15307 }
15308 else
15309 {
15310 tree t;
15311
15312 t = build_int_cst (TREE_TYPE (value), divisor - 1);
15313 value = size_binop (PLUS_EXPR, value, t);
15314 t = build_int_cst (TREE_TYPE (value), -divisor);
15315 value = size_binop (BIT_AND_EXPR, value, t);
15316 }
15317 }
15318 else
15319 {
15320 if (!div)
15321 div = build_int_cst (TREE_TYPE (value), divisor);
15322 value = size_binop (CEIL_DIV_EXPR, value, div);
15323 value = size_binop (MULT_EXPR, value, div);
15324 }
15325
15326 return value;
15327 }
15328
15329 /* Likewise, but round down. */
15330
15331 tree
15332 round_down (tree value, int divisor)
15333 {
15334 tree div = NULL_TREE;
15335
15336 gcc_assert (divisor > 0);
15337 if (divisor == 1)
15338 return value;
15339
15340 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15341 have to do anything. Only do this when we are not given a const,
15342 because in that case, this check is more expensive than just
15343 doing it. */
15344 if (TREE_CODE (value) != INTEGER_CST)
15345 {
15346 div = build_int_cst (TREE_TYPE (value), divisor);
15347
15348 if (multiple_of_p (TREE_TYPE (value), value, div))
15349 return value;
15350 }
15351
15352 /* If divisor is a power of two, simplify this to bit manipulation. */
15353 if (divisor == (divisor & -divisor))
15354 {
15355 tree t;
15356
15357 t = build_int_cst (TREE_TYPE (value), -divisor);
15358 value = size_binop (BIT_AND_EXPR, value, t);
15359 }
15360 else
15361 {
15362 if (!div)
15363 div = build_int_cst (TREE_TYPE (value), divisor);
15364 value = size_binop (FLOOR_DIV_EXPR, value, div);
15365 value = size_binop (MULT_EXPR, value, div);
15366 }
15367
15368 return value;
15369 }
15370
15371 /* Returns the pointer to the base of the object addressed by EXP and
15372 extracts the information about the offset of the access, storing it
15373 to PBITPOS and POFFSET. */
15374
15375 static tree
15376 split_address_to_core_and_offset (tree exp,
15377 HOST_WIDE_INT *pbitpos, tree *poffset)
15378 {
15379 tree core;
15380 enum machine_mode mode;
15381 int unsignedp, volatilep;
15382 HOST_WIDE_INT bitsize;
15383
15384 if (TREE_CODE (exp) == ADDR_EXPR)
15385 {
15386 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
15387 poffset, &mode, &unsignedp, &volatilep,
15388 false);
15389 core = fold_addr_expr (core);
15390 }
15391 else
15392 {
15393 core = exp;
15394 *pbitpos = 0;
15395 *poffset = NULL_TREE;
15396 }
15397
15398 return core;
15399 }
15400
15401 /* Returns true if addresses of E1 and E2 differ by a constant, false
15402 otherwise. If they do, E1 - E2 is stored in *DIFF. */
15403
15404 bool
15405 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
15406 {
15407 tree core1, core2;
15408 HOST_WIDE_INT bitpos1, bitpos2;
15409 tree toffset1, toffset2, tdiff, type;
15410
15411 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
15412 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
15413
15414 if (bitpos1 % BITS_PER_UNIT != 0
15415 || bitpos2 % BITS_PER_UNIT != 0
15416 || !operand_equal_p (core1, core2, 0))
15417 return false;
15418
15419 if (toffset1 && toffset2)
15420 {
15421 type = TREE_TYPE (toffset1);
15422 if (type != TREE_TYPE (toffset2))
15423 toffset2 = fold_convert (type, toffset2);
15424
15425 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
15426 if (!cst_and_fits_in_hwi (tdiff))
15427 return false;
15428
15429 *diff = int_cst_value (tdiff);
15430 }
15431 else if (toffset1 || toffset2)
15432 {
15433 /* If only one of the offsets is non-constant, the difference cannot
15434 be a constant. */
15435 return false;
15436 }
15437 else
15438 *diff = 0;
15439
15440 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
15441 return true;
15442 }
15443
15444 /* Simplify the floating point expression EXP when the sign of the
15445 result is not significant. Return NULL_TREE if no simplification
15446 is possible. */
15447
15448 tree
15449 fold_strip_sign_ops (tree exp)
15450 {
15451 tree arg0, arg1;
15452
15453 switch (TREE_CODE (exp))
15454 {
15455 case ABS_EXPR:
15456 case NEGATE_EXPR:
15457 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15458 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
15459
15460 case MULT_EXPR:
15461 case RDIV_EXPR:
15462 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
15463 return NULL_TREE;
15464 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15465 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15466 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
15467 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
15468 arg0 ? arg0 : TREE_OPERAND (exp, 0),
15469 arg1 ? arg1 : TREE_OPERAND (exp, 1));
15470 break;
15471
15472 case COMPOUND_EXPR:
15473 arg0 = TREE_OPERAND (exp, 0);
15474 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15475 if (arg1)
15476 return fold_build2 (COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
15477 break;
15478
15479 case COND_EXPR:
15480 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15481 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
15482 if (arg0 || arg1)
15483 return fold_build3 (COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
15484 arg0 ? arg0 : TREE_OPERAND (exp, 1),
15485 arg1 ? arg1 : TREE_OPERAND (exp, 2));
15486 break;
15487
15488 case CALL_EXPR:
15489 {
15490 const enum built_in_function fcode = builtin_mathfn_code (exp);
15491 switch (fcode)
15492 {
15493 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15494 /* Strip copysign function call, return the 1st argument. */
15495 arg0 = CALL_EXPR_ARG (exp, 0);
15496 arg1 = CALL_EXPR_ARG (exp, 1);
15497 return omit_one_operand (TREE_TYPE (exp), arg0, arg1);
15498
15499 default:
15500 /* Strip sign ops from the argument of "odd" math functions. */
15501 if (negate_mathfn_p (fcode))
15502 {
15503 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
15504 if (arg0)
15505 return build_call_expr (get_callee_fndecl (exp), 1, arg0);
15506 }
15507 break;
15508 }
15509 }
15510 break;
15511
15512 default:
15513 break;
15514 }
15515 return NULL_TREE;
15516 }