re PR tree-optimization/35429 (ICE with complex arithmetic)
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
29
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type_double.
32
33 fold takes a tree as argument and returns a simplified tree.
34
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
38
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
41
42 force_fit_type_double takes a constant, an overflowable flag and a
43 prior overflow indicator. It forces the value to fit the type and
44 sets TREE_OVERFLOW.
45
46 Note: Since the folders get called on non-gimple code as well as
47 gimple code, we need to handle GIMPLE tuples as well as their
48 corresponding tree equivalents. */
49
50 #include "config.h"
51 #include "system.h"
52 #include "coretypes.h"
53 #include "tm.h"
54 #include "flags.h"
55 #include "tree.h"
56 #include "real.h"
57 #include "fixed-value.h"
58 #include "rtl.h"
59 #include "expr.h"
60 #include "tm_p.h"
61 #include "toplev.h"
62 #include "intl.h"
63 #include "ggc.h"
64 #include "hashtab.h"
65 #include "langhooks.h"
66 #include "md5.h"
67
68 /* Nonzero if we are folding constants inside an initializer; zero
69 otherwise. */
70 int folding_initializer = 0;
71
72 /* The following constants represent a bit based encoding of GCC's
73 comparison operators. This encoding simplifies transformations
74 on relational comparison operators, such as AND and OR. */
75 enum comparison_code {
76 COMPCODE_FALSE = 0,
77 COMPCODE_LT = 1,
78 COMPCODE_EQ = 2,
79 COMPCODE_LE = 3,
80 COMPCODE_GT = 4,
81 COMPCODE_LTGT = 5,
82 COMPCODE_GE = 6,
83 COMPCODE_ORD = 7,
84 COMPCODE_UNORD = 8,
85 COMPCODE_UNLT = 9,
86 COMPCODE_UNEQ = 10,
87 COMPCODE_UNLE = 11,
88 COMPCODE_UNGT = 12,
89 COMPCODE_NE = 13,
90 COMPCODE_UNGE = 14,
91 COMPCODE_TRUE = 15
92 };
93
94 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
95 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
96 static bool negate_mathfn_p (enum built_in_function);
97 static bool negate_expr_p (tree);
98 static tree negate_expr (tree);
99 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
100 static tree associate_trees (tree, tree, enum tree_code, tree);
101 static tree const_binop (enum tree_code, tree, tree, int);
102 static enum comparison_code comparison_to_compcode (enum tree_code);
103 static enum tree_code compcode_to_comparison (enum comparison_code);
104 static tree combine_comparisons (enum tree_code, enum tree_code,
105 enum tree_code, tree, tree, tree);
106 static int truth_value_p (enum tree_code);
107 static int operand_equal_for_comparison_p (tree, tree, tree);
108 static int twoval_comparison_p (tree, tree *, tree *, int *);
109 static tree eval_subst (tree, tree, tree, tree, tree);
110 static tree pedantic_omit_one_operand (tree, tree, tree);
111 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
112 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
113 enum machine_mode *, int *, int *,
114 tree *, tree *);
115 static tree sign_bit_p (tree, const_tree);
116 static int simple_operand_p (const_tree);
117 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
118 static tree range_predecessor (tree);
119 static tree range_successor (tree);
120 static tree make_range (tree, int *, tree *, tree *, bool *);
121 static tree build_range_check (tree, tree, int, tree, tree);
122 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
123 tree);
124 static tree fold_range_test (enum tree_code, tree, tree, tree);
125 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
126 static tree unextend (tree, int, int, tree);
127 static tree fold_truthop (enum tree_code, tree, tree, tree);
128 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
129 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
130 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
131 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
132 tree, tree,
133 tree, tree, int);
134 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
135 tree, tree, tree);
136 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
137 static tree fold_div_compare (enum tree_code, tree, tree, tree);
138 static bool reorder_operands_p (const_tree, const_tree);
139 static tree fold_negate_const (tree, tree);
140 static tree fold_not_const (tree, tree);
141 static tree fold_relational_const (enum tree_code, tree, tree, tree);
142
143
144 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
145 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
146 and SUM1. Then this yields nonzero if overflow occurred during the
147 addition.
148
149 Overflow occurs if A and B have the same sign, but A and SUM differ in
150 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
151 sign. */
152 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
153 \f
154 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
155 We do that by representing the two-word integer in 4 words, with only
156 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
157 number. The value of the word is LOWPART + HIGHPART * BASE. */
158
159 #define LOWPART(x) \
160 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
161 #define HIGHPART(x) \
162 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
163 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
164
165 /* Unpack a two-word integer into 4 words.
166 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
167 WORDS points to the array of HOST_WIDE_INTs. */
168
169 static void
170 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
171 {
172 words[0] = LOWPART (low);
173 words[1] = HIGHPART (low);
174 words[2] = LOWPART (hi);
175 words[3] = HIGHPART (hi);
176 }
177
178 /* Pack an array of 4 words into a two-word integer.
179 WORDS points to the array of words.
180 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
181
182 static void
183 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
184 HOST_WIDE_INT *hi)
185 {
186 *low = words[0] + words[1] * BASE;
187 *hi = words[2] + words[3] * BASE;
188 }
189 \f
190 /* Force the double-word integer L1, H1 to be within the range of the
191 integer type TYPE. Stores the properly truncated and sign-extended
192 double-word integer in *LV, *HV. Returns true if the operation
193 overflows, that is, argument and result are different. */
194
195 int
196 fit_double_type (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
197 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, const_tree type)
198 {
199 unsigned HOST_WIDE_INT low0 = l1;
200 HOST_WIDE_INT high0 = h1;
201 unsigned int prec;
202 int sign_extended_type;
203
204 if (POINTER_TYPE_P (type)
205 || TREE_CODE (type) == OFFSET_TYPE)
206 prec = POINTER_SIZE;
207 else
208 prec = TYPE_PRECISION (type);
209
210 /* Size types *are* sign extended. */
211 sign_extended_type = (!TYPE_UNSIGNED (type)
212 || (TREE_CODE (type) == INTEGER_TYPE
213 && TYPE_IS_SIZETYPE (type)));
214
215 /* First clear all bits that are beyond the type's precision. */
216 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
217 ;
218 else if (prec > HOST_BITS_PER_WIDE_INT)
219 h1 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
220 else
221 {
222 h1 = 0;
223 if (prec < HOST_BITS_PER_WIDE_INT)
224 l1 &= ~((HOST_WIDE_INT) (-1) << prec);
225 }
226
227 /* Then do sign extension if necessary. */
228 if (!sign_extended_type)
229 /* No sign extension */;
230 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
231 /* Correct width already. */;
232 else if (prec > HOST_BITS_PER_WIDE_INT)
233 {
234 /* Sign extend top half? */
235 if (h1 & ((unsigned HOST_WIDE_INT)1
236 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
237 h1 |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
238 }
239 else if (prec == HOST_BITS_PER_WIDE_INT)
240 {
241 if ((HOST_WIDE_INT)l1 < 0)
242 h1 = -1;
243 }
244 else
245 {
246 /* Sign extend bottom half? */
247 if (l1 & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
248 {
249 h1 = -1;
250 l1 |= (HOST_WIDE_INT)(-1) << prec;
251 }
252 }
253
254 *lv = l1;
255 *hv = h1;
256
257 /* If the value didn't fit, signal overflow. */
258 return l1 != low0 || h1 != high0;
259 }
260
261 /* We force the double-int HIGH:LOW to the range of the type TYPE by
262 sign or zero extending it.
263 OVERFLOWABLE indicates if we are interested
264 in overflow of the value, when >0 we are only interested in signed
265 overflow, for <0 we are interested in any overflow. OVERFLOWED
266 indicates whether overflow has already occurred. CONST_OVERFLOWED
267 indicates whether constant overflow has already occurred. We force
268 T's value to be within range of T's type (by setting to 0 or 1 all
269 the bits outside the type's range). We set TREE_OVERFLOWED if,
270 OVERFLOWED is nonzero,
271 or OVERFLOWABLE is >0 and signed overflow occurs
272 or OVERFLOWABLE is <0 and any overflow occurs
273 We return a new tree node for the extended double-int. The node
274 is shared if no overflow flags are set. */
275
276 tree
277 force_fit_type_double (tree type, unsigned HOST_WIDE_INT low,
278 HOST_WIDE_INT high, int overflowable,
279 bool overflowed)
280 {
281 int sign_extended_type;
282 bool overflow;
283
284 /* Size types *are* sign extended. */
285 sign_extended_type = (!TYPE_UNSIGNED (type)
286 || (TREE_CODE (type) == INTEGER_TYPE
287 && TYPE_IS_SIZETYPE (type)));
288
289 overflow = fit_double_type (low, high, &low, &high, type);
290
291 /* If we need to set overflow flags, return a new unshared node. */
292 if (overflowed || overflow)
293 {
294 if (overflowed
295 || overflowable < 0
296 || (overflowable > 0 && sign_extended_type))
297 {
298 tree t = make_node (INTEGER_CST);
299 TREE_INT_CST_LOW (t) = low;
300 TREE_INT_CST_HIGH (t) = high;
301 TREE_TYPE (t) = type;
302 TREE_OVERFLOW (t) = 1;
303 return t;
304 }
305 }
306
307 /* Else build a shared node. */
308 return build_int_cst_wide (type, low, high);
309 }
310 \f
311 /* Add two doubleword integers with doubleword result.
312 Return nonzero if the operation overflows according to UNSIGNED_P.
313 Each argument is given as two `HOST_WIDE_INT' pieces.
314 One argument is L1 and H1; the other, L2 and H2.
315 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
316
317 int
318 add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
319 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
320 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
321 bool unsigned_p)
322 {
323 unsigned HOST_WIDE_INT l;
324 HOST_WIDE_INT h;
325
326 l = l1 + l2;
327 h = h1 + h2 + (l < l1);
328
329 *lv = l;
330 *hv = h;
331
332 if (unsigned_p)
333 return (unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1;
334 else
335 return OVERFLOW_SUM_SIGN (h1, h2, h);
336 }
337
338 /* Negate a doubleword integer with doubleword result.
339 Return nonzero if the operation overflows, assuming it's signed.
340 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
341 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
342
343 int
344 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
345 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
346 {
347 if (l1 == 0)
348 {
349 *lv = 0;
350 *hv = - h1;
351 return (*hv & h1) < 0;
352 }
353 else
354 {
355 *lv = -l1;
356 *hv = ~h1;
357 return 0;
358 }
359 }
360 \f
361 /* Multiply two doubleword integers with doubleword result.
362 Return nonzero if the operation overflows according to UNSIGNED_P.
363 Each argument is given as two `HOST_WIDE_INT' pieces.
364 One argument is L1 and H1; the other, L2 and H2.
365 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
366
367 int
368 mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
369 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
370 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
371 bool unsigned_p)
372 {
373 HOST_WIDE_INT arg1[4];
374 HOST_WIDE_INT arg2[4];
375 HOST_WIDE_INT prod[4 * 2];
376 unsigned HOST_WIDE_INT carry;
377 int i, j, k;
378 unsigned HOST_WIDE_INT toplow, neglow;
379 HOST_WIDE_INT tophigh, neghigh;
380
381 encode (arg1, l1, h1);
382 encode (arg2, l2, h2);
383
384 memset (prod, 0, sizeof prod);
385
386 for (i = 0; i < 4; i++)
387 {
388 carry = 0;
389 for (j = 0; j < 4; j++)
390 {
391 k = i + j;
392 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
393 carry += arg1[i] * arg2[j];
394 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
395 carry += prod[k];
396 prod[k] = LOWPART (carry);
397 carry = HIGHPART (carry);
398 }
399 prod[i + 4] = carry;
400 }
401
402 decode (prod, lv, hv);
403 decode (prod + 4, &toplow, &tophigh);
404
405 /* Unsigned overflow is immediate. */
406 if (unsigned_p)
407 return (toplow | tophigh) != 0;
408
409 /* Check for signed overflow by calculating the signed representation of the
410 top half of the result; it should agree with the low half's sign bit. */
411 if (h1 < 0)
412 {
413 neg_double (l2, h2, &neglow, &neghigh);
414 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
415 }
416 if (h2 < 0)
417 {
418 neg_double (l1, h1, &neglow, &neghigh);
419 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
420 }
421 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
422 }
423 \f
424 /* Shift the doubleword integer in L1, H1 left by COUNT places
425 keeping only PREC bits of result.
426 Shift right if COUNT is negative.
427 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
428 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
429
430 void
431 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
432 HOST_WIDE_INT count, unsigned int prec,
433 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
434 {
435 unsigned HOST_WIDE_INT signmask;
436
437 if (count < 0)
438 {
439 rshift_double (l1, h1, -count, prec, lv, hv, arith);
440 return;
441 }
442
443 if (SHIFT_COUNT_TRUNCATED)
444 count %= prec;
445
446 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
447 {
448 /* Shifting by the host word size is undefined according to the
449 ANSI standard, so we must handle this as a special case. */
450 *hv = 0;
451 *lv = 0;
452 }
453 else if (count >= HOST_BITS_PER_WIDE_INT)
454 {
455 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
456 *lv = 0;
457 }
458 else
459 {
460 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
461 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
462 *lv = l1 << count;
463 }
464
465 /* Sign extend all bits that are beyond the precision. */
466
467 signmask = -((prec > HOST_BITS_PER_WIDE_INT
468 ? ((unsigned HOST_WIDE_INT) *hv
469 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
470 : (*lv >> (prec - 1))) & 1);
471
472 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
473 ;
474 else if (prec >= HOST_BITS_PER_WIDE_INT)
475 {
476 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
477 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
478 }
479 else
480 {
481 *hv = signmask;
482 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
483 *lv |= signmask << prec;
484 }
485 }
486
487 /* Shift the doubleword integer in L1, H1 right by COUNT places
488 keeping only PREC bits of result. COUNT must be positive.
489 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
490 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
491
492 void
493 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
494 HOST_WIDE_INT count, unsigned int prec,
495 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
496 int arith)
497 {
498 unsigned HOST_WIDE_INT signmask;
499
500 signmask = (arith
501 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
502 : 0);
503
504 if (SHIFT_COUNT_TRUNCATED)
505 count %= prec;
506
507 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
508 {
509 /* Shifting by the host word size is undefined according to the
510 ANSI standard, so we must handle this as a special case. */
511 *hv = 0;
512 *lv = 0;
513 }
514 else if (count >= HOST_BITS_PER_WIDE_INT)
515 {
516 *hv = 0;
517 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
518 }
519 else
520 {
521 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
522 *lv = ((l1 >> count)
523 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
524 }
525
526 /* Zero / sign extend all bits that are beyond the precision. */
527
528 if (count >= (HOST_WIDE_INT)prec)
529 {
530 *hv = signmask;
531 *lv = signmask;
532 }
533 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
534 ;
535 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
536 {
537 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
538 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
539 }
540 else
541 {
542 *hv = signmask;
543 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
544 *lv |= signmask << (prec - count);
545 }
546 }
547 \f
548 /* Rotate the doubleword integer in L1, H1 left by COUNT places
549 keeping only PREC bits of result.
550 Rotate right if COUNT is negative.
551 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
552
553 void
554 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
555 HOST_WIDE_INT count, unsigned int prec,
556 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
557 {
558 unsigned HOST_WIDE_INT s1l, s2l;
559 HOST_WIDE_INT s1h, s2h;
560
561 count %= prec;
562 if (count < 0)
563 count += prec;
564
565 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
566 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
567 *lv = s1l | s2l;
568 *hv = s1h | s2h;
569 }
570
571 /* Rotate the doubleword integer in L1, H1 left by COUNT places
572 keeping only PREC bits of result. COUNT must be positive.
573 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
574
575 void
576 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
577 HOST_WIDE_INT count, unsigned int prec,
578 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
579 {
580 unsigned HOST_WIDE_INT s1l, s2l;
581 HOST_WIDE_INT s1h, s2h;
582
583 count %= prec;
584 if (count < 0)
585 count += prec;
586
587 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
588 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
589 *lv = s1l | s2l;
590 *hv = s1h | s2h;
591 }
592 \f
593 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
594 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
595 CODE is a tree code for a kind of division, one of
596 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
597 or EXACT_DIV_EXPR
598 It controls how the quotient is rounded to an integer.
599 Return nonzero if the operation overflows.
600 UNS nonzero says do unsigned division. */
601
602 int
603 div_and_round_double (enum tree_code code, int uns,
604 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
605 HOST_WIDE_INT hnum_orig,
606 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
607 HOST_WIDE_INT hden_orig,
608 unsigned HOST_WIDE_INT *lquo,
609 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
610 HOST_WIDE_INT *hrem)
611 {
612 int quo_neg = 0;
613 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
614 HOST_WIDE_INT den[4], quo[4];
615 int i, j;
616 unsigned HOST_WIDE_INT work;
617 unsigned HOST_WIDE_INT carry = 0;
618 unsigned HOST_WIDE_INT lnum = lnum_orig;
619 HOST_WIDE_INT hnum = hnum_orig;
620 unsigned HOST_WIDE_INT lden = lden_orig;
621 HOST_WIDE_INT hden = hden_orig;
622 int overflow = 0;
623
624 if (hden == 0 && lden == 0)
625 overflow = 1, lden = 1;
626
627 /* Calculate quotient sign and convert operands to unsigned. */
628 if (!uns)
629 {
630 if (hnum < 0)
631 {
632 quo_neg = ~ quo_neg;
633 /* (minimum integer) / (-1) is the only overflow case. */
634 if (neg_double (lnum, hnum, &lnum, &hnum)
635 && ((HOST_WIDE_INT) lden & hden) == -1)
636 overflow = 1;
637 }
638 if (hden < 0)
639 {
640 quo_neg = ~ quo_neg;
641 neg_double (lden, hden, &lden, &hden);
642 }
643 }
644
645 if (hnum == 0 && hden == 0)
646 { /* single precision */
647 *hquo = *hrem = 0;
648 /* This unsigned division rounds toward zero. */
649 *lquo = lnum / lden;
650 goto finish_up;
651 }
652
653 if (hnum == 0)
654 { /* trivial case: dividend < divisor */
655 /* hden != 0 already checked. */
656 *hquo = *lquo = 0;
657 *hrem = hnum;
658 *lrem = lnum;
659 goto finish_up;
660 }
661
662 memset (quo, 0, sizeof quo);
663
664 memset (num, 0, sizeof num); /* to zero 9th element */
665 memset (den, 0, sizeof den);
666
667 encode (num, lnum, hnum);
668 encode (den, lden, hden);
669
670 /* Special code for when the divisor < BASE. */
671 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
672 {
673 /* hnum != 0 already checked. */
674 for (i = 4 - 1; i >= 0; i--)
675 {
676 work = num[i] + carry * BASE;
677 quo[i] = work / lden;
678 carry = work % lden;
679 }
680 }
681 else
682 {
683 /* Full double precision division,
684 with thanks to Don Knuth's "Seminumerical Algorithms". */
685 int num_hi_sig, den_hi_sig;
686 unsigned HOST_WIDE_INT quo_est, scale;
687
688 /* Find the highest nonzero divisor digit. */
689 for (i = 4 - 1;; i--)
690 if (den[i] != 0)
691 {
692 den_hi_sig = i;
693 break;
694 }
695
696 /* Insure that the first digit of the divisor is at least BASE/2.
697 This is required by the quotient digit estimation algorithm. */
698
699 scale = BASE / (den[den_hi_sig] + 1);
700 if (scale > 1)
701 { /* scale divisor and dividend */
702 carry = 0;
703 for (i = 0; i <= 4 - 1; i++)
704 {
705 work = (num[i] * scale) + carry;
706 num[i] = LOWPART (work);
707 carry = HIGHPART (work);
708 }
709
710 num[4] = carry;
711 carry = 0;
712 for (i = 0; i <= 4 - 1; i++)
713 {
714 work = (den[i] * scale) + carry;
715 den[i] = LOWPART (work);
716 carry = HIGHPART (work);
717 if (den[i] != 0) den_hi_sig = i;
718 }
719 }
720
721 num_hi_sig = 4;
722
723 /* Main loop */
724 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
725 {
726 /* Guess the next quotient digit, quo_est, by dividing the first
727 two remaining dividend digits by the high order quotient digit.
728 quo_est is never low and is at most 2 high. */
729 unsigned HOST_WIDE_INT tmp;
730
731 num_hi_sig = i + den_hi_sig + 1;
732 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
733 if (num[num_hi_sig] != den[den_hi_sig])
734 quo_est = work / den[den_hi_sig];
735 else
736 quo_est = BASE - 1;
737
738 /* Refine quo_est so it's usually correct, and at most one high. */
739 tmp = work - quo_est * den[den_hi_sig];
740 if (tmp < BASE
741 && (den[den_hi_sig - 1] * quo_est
742 > (tmp * BASE + num[num_hi_sig - 2])))
743 quo_est--;
744
745 /* Try QUO_EST as the quotient digit, by multiplying the
746 divisor by QUO_EST and subtracting from the remaining dividend.
747 Keep in mind that QUO_EST is the I - 1st digit. */
748
749 carry = 0;
750 for (j = 0; j <= den_hi_sig; j++)
751 {
752 work = quo_est * den[j] + carry;
753 carry = HIGHPART (work);
754 work = num[i + j] - LOWPART (work);
755 num[i + j] = LOWPART (work);
756 carry += HIGHPART (work) != 0;
757 }
758
759 /* If quo_est was high by one, then num[i] went negative and
760 we need to correct things. */
761 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
762 {
763 quo_est--;
764 carry = 0; /* add divisor back in */
765 for (j = 0; j <= den_hi_sig; j++)
766 {
767 work = num[i + j] + den[j] + carry;
768 carry = HIGHPART (work);
769 num[i + j] = LOWPART (work);
770 }
771
772 num [num_hi_sig] += carry;
773 }
774
775 /* Store the quotient digit. */
776 quo[i] = quo_est;
777 }
778 }
779
780 decode (quo, lquo, hquo);
781
782 finish_up:
783 /* If result is negative, make it so. */
784 if (quo_neg)
785 neg_double (*lquo, *hquo, lquo, hquo);
786
787 /* Compute trial remainder: rem = num - (quo * den) */
788 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
789 neg_double (*lrem, *hrem, lrem, hrem);
790 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
791
792 switch (code)
793 {
794 case TRUNC_DIV_EXPR:
795 case TRUNC_MOD_EXPR: /* round toward zero */
796 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
797 return overflow;
798
799 case FLOOR_DIV_EXPR:
800 case FLOOR_MOD_EXPR: /* round toward negative infinity */
801 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
802 {
803 /* quo = quo - 1; */
804 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
805 lquo, hquo);
806 }
807 else
808 return overflow;
809 break;
810
811 case CEIL_DIV_EXPR:
812 case CEIL_MOD_EXPR: /* round toward positive infinity */
813 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
814 {
815 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
816 lquo, hquo);
817 }
818 else
819 return overflow;
820 break;
821
822 case ROUND_DIV_EXPR:
823 case ROUND_MOD_EXPR: /* round to closest integer */
824 {
825 unsigned HOST_WIDE_INT labs_rem = *lrem;
826 HOST_WIDE_INT habs_rem = *hrem;
827 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
828 HOST_WIDE_INT habs_den = hden, htwice;
829
830 /* Get absolute values. */
831 if (*hrem < 0)
832 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
833 if (hden < 0)
834 neg_double (lden, hden, &labs_den, &habs_den);
835
836 /* If (2 * abs (lrem) >= abs (lden)) */
837 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
838 labs_rem, habs_rem, &ltwice, &htwice);
839
840 if (((unsigned HOST_WIDE_INT) habs_den
841 < (unsigned HOST_WIDE_INT) htwice)
842 || (((unsigned HOST_WIDE_INT) habs_den
843 == (unsigned HOST_WIDE_INT) htwice)
844 && (labs_den < ltwice)))
845 {
846 if (*hquo < 0)
847 /* quo = quo - 1; */
848 add_double (*lquo, *hquo,
849 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
850 else
851 /* quo = quo + 1; */
852 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
853 lquo, hquo);
854 }
855 else
856 return overflow;
857 }
858 break;
859
860 default:
861 gcc_unreachable ();
862 }
863
864 /* Compute true remainder: rem = num - (quo * den) */
865 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
866 neg_double (*lrem, *hrem, lrem, hrem);
867 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
868 return overflow;
869 }
870
871 /* If ARG2 divides ARG1 with zero remainder, carries out the division
872 of type CODE and returns the quotient.
873 Otherwise returns NULL_TREE. */
874
875 static tree
876 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
877 {
878 unsigned HOST_WIDE_INT int1l, int2l;
879 HOST_WIDE_INT int1h, int2h;
880 unsigned HOST_WIDE_INT quol, reml;
881 HOST_WIDE_INT quoh, remh;
882 tree type = TREE_TYPE (arg1);
883 int uns = TYPE_UNSIGNED (type);
884
885 int1l = TREE_INT_CST_LOW (arg1);
886 int1h = TREE_INT_CST_HIGH (arg1);
887 /* &obj[0] + -128 really should be compiled as &obj[-8] rather than
888 &obj[some_exotic_number]. */
889 if (POINTER_TYPE_P (type))
890 {
891 uns = false;
892 type = signed_type_for (type);
893 fit_double_type (int1l, int1h, &int1l, &int1h,
894 type);
895 }
896 else
897 fit_double_type (int1l, int1h, &int1l, &int1h, type);
898 int2l = TREE_INT_CST_LOW (arg2);
899 int2h = TREE_INT_CST_HIGH (arg2);
900
901 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
902 &quol, &quoh, &reml, &remh);
903 if (remh != 0 || reml != 0)
904 return NULL_TREE;
905
906 return build_int_cst_wide (type, quol, quoh);
907 }
908 \f
909 /* This is nonzero if we should defer warnings about undefined
910 overflow. This facility exists because these warnings are a
911 special case. The code to estimate loop iterations does not want
912 to issue any warnings, since it works with expressions which do not
913 occur in user code. Various bits of cleanup code call fold(), but
914 only use the result if it has certain characteristics (e.g., is a
915 constant); that code only wants to issue a warning if the result is
916 used. */
917
918 static int fold_deferring_overflow_warnings;
919
920 /* If a warning about undefined overflow is deferred, this is the
921 warning. Note that this may cause us to turn two warnings into
922 one, but that is fine since it is sufficient to only give one
923 warning per expression. */
924
925 static const char* fold_deferred_overflow_warning;
926
927 /* If a warning about undefined overflow is deferred, this is the
928 level at which the warning should be emitted. */
929
930 static enum warn_strict_overflow_code fold_deferred_overflow_code;
931
932 /* Start deferring overflow warnings. We could use a stack here to
933 permit nested calls, but at present it is not necessary. */
934
935 void
936 fold_defer_overflow_warnings (void)
937 {
938 ++fold_deferring_overflow_warnings;
939 }
940
941 /* Stop deferring overflow warnings. If there is a pending warning,
942 and ISSUE is true, then issue the warning if appropriate. STMT is
943 the statement with which the warning should be associated (used for
944 location information); STMT may be NULL. CODE is the level of the
945 warning--a warn_strict_overflow_code value. This function will use
946 the smaller of CODE and the deferred code when deciding whether to
947 issue the warning. CODE may be zero to mean to always use the
948 deferred code. */
949
950 void
951 fold_undefer_overflow_warnings (bool issue, const_tree stmt, int code)
952 {
953 const char *warnmsg;
954 location_t locus;
955
956 gcc_assert (fold_deferring_overflow_warnings > 0);
957 --fold_deferring_overflow_warnings;
958 if (fold_deferring_overflow_warnings > 0)
959 {
960 if (fold_deferred_overflow_warning != NULL
961 && code != 0
962 && code < (int) fold_deferred_overflow_code)
963 fold_deferred_overflow_code = code;
964 return;
965 }
966
967 warnmsg = fold_deferred_overflow_warning;
968 fold_deferred_overflow_warning = NULL;
969
970 if (!issue || warnmsg == NULL)
971 return;
972
973 if (stmt != NULL_TREE && TREE_NO_WARNING (stmt))
974 return;
975
976 /* Use the smallest code level when deciding to issue the
977 warning. */
978 if (code == 0 || code > (int) fold_deferred_overflow_code)
979 code = fold_deferred_overflow_code;
980
981 if (!issue_strict_overflow_warning (code))
982 return;
983
984 if (stmt == NULL_TREE || !expr_has_location (stmt))
985 locus = input_location;
986 else
987 locus = expr_location (stmt);
988 warning (OPT_Wstrict_overflow, "%H%s", &locus, warnmsg);
989 }
990
991 /* Stop deferring overflow warnings, ignoring any deferred
992 warnings. */
993
994 void
995 fold_undefer_and_ignore_overflow_warnings (void)
996 {
997 fold_undefer_overflow_warnings (false, NULL_TREE, 0);
998 }
999
1000 /* Whether we are deferring overflow warnings. */
1001
1002 bool
1003 fold_deferring_overflow_warnings_p (void)
1004 {
1005 return fold_deferring_overflow_warnings > 0;
1006 }
1007
1008 /* This is called when we fold something based on the fact that signed
1009 overflow is undefined. */
1010
1011 static void
1012 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
1013 {
1014 gcc_assert (!flag_wrapv && !flag_trapv);
1015 if (fold_deferring_overflow_warnings > 0)
1016 {
1017 if (fold_deferred_overflow_warning == NULL
1018 || wc < fold_deferred_overflow_code)
1019 {
1020 fold_deferred_overflow_warning = gmsgid;
1021 fold_deferred_overflow_code = wc;
1022 }
1023 }
1024 else if (issue_strict_overflow_warning (wc))
1025 warning (OPT_Wstrict_overflow, gmsgid);
1026 }
1027 \f
1028 /* Return true if the built-in mathematical function specified by CODE
1029 is odd, i.e. -f(x) == f(-x). */
1030
1031 static bool
1032 negate_mathfn_p (enum built_in_function code)
1033 {
1034 switch (code)
1035 {
1036 CASE_FLT_FN (BUILT_IN_ASIN):
1037 CASE_FLT_FN (BUILT_IN_ASINH):
1038 CASE_FLT_FN (BUILT_IN_ATAN):
1039 CASE_FLT_FN (BUILT_IN_ATANH):
1040 CASE_FLT_FN (BUILT_IN_CASIN):
1041 CASE_FLT_FN (BUILT_IN_CASINH):
1042 CASE_FLT_FN (BUILT_IN_CATAN):
1043 CASE_FLT_FN (BUILT_IN_CATANH):
1044 CASE_FLT_FN (BUILT_IN_CBRT):
1045 CASE_FLT_FN (BUILT_IN_CPROJ):
1046 CASE_FLT_FN (BUILT_IN_CSIN):
1047 CASE_FLT_FN (BUILT_IN_CSINH):
1048 CASE_FLT_FN (BUILT_IN_CTAN):
1049 CASE_FLT_FN (BUILT_IN_CTANH):
1050 CASE_FLT_FN (BUILT_IN_ERF):
1051 CASE_FLT_FN (BUILT_IN_LLROUND):
1052 CASE_FLT_FN (BUILT_IN_LROUND):
1053 CASE_FLT_FN (BUILT_IN_ROUND):
1054 CASE_FLT_FN (BUILT_IN_SIN):
1055 CASE_FLT_FN (BUILT_IN_SINH):
1056 CASE_FLT_FN (BUILT_IN_TAN):
1057 CASE_FLT_FN (BUILT_IN_TANH):
1058 CASE_FLT_FN (BUILT_IN_TRUNC):
1059 return true;
1060
1061 CASE_FLT_FN (BUILT_IN_LLRINT):
1062 CASE_FLT_FN (BUILT_IN_LRINT):
1063 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1064 CASE_FLT_FN (BUILT_IN_RINT):
1065 return !flag_rounding_math;
1066
1067 default:
1068 break;
1069 }
1070 return false;
1071 }
1072
1073 /* Check whether we may negate an integer constant T without causing
1074 overflow. */
1075
1076 bool
1077 may_negate_without_overflow_p (const_tree t)
1078 {
1079 unsigned HOST_WIDE_INT val;
1080 unsigned int prec;
1081 tree type;
1082
1083 gcc_assert (TREE_CODE (t) == INTEGER_CST);
1084
1085 type = TREE_TYPE (t);
1086 if (TYPE_UNSIGNED (type))
1087 return false;
1088
1089 prec = TYPE_PRECISION (type);
1090 if (prec > HOST_BITS_PER_WIDE_INT)
1091 {
1092 if (TREE_INT_CST_LOW (t) != 0)
1093 return true;
1094 prec -= HOST_BITS_PER_WIDE_INT;
1095 val = TREE_INT_CST_HIGH (t);
1096 }
1097 else
1098 val = TREE_INT_CST_LOW (t);
1099 if (prec < HOST_BITS_PER_WIDE_INT)
1100 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
1101 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
1102 }
1103
1104 /* Determine whether an expression T can be cheaply negated using
1105 the function negate_expr without introducing undefined overflow. */
1106
1107 static bool
1108 negate_expr_p (tree t)
1109 {
1110 tree type;
1111
1112 if (t == 0)
1113 return false;
1114
1115 type = TREE_TYPE (t);
1116
1117 STRIP_SIGN_NOPS (t);
1118 switch (TREE_CODE (t))
1119 {
1120 case INTEGER_CST:
1121 if (TYPE_OVERFLOW_WRAPS (type))
1122 return true;
1123
1124 /* Check that -CST will not overflow type. */
1125 return may_negate_without_overflow_p (t);
1126 case BIT_NOT_EXPR:
1127 return (INTEGRAL_TYPE_P (type)
1128 && TYPE_OVERFLOW_WRAPS (type));
1129
1130 case FIXED_CST:
1131 case REAL_CST:
1132 case NEGATE_EXPR:
1133 return true;
1134
1135 case COMPLEX_CST:
1136 return negate_expr_p (TREE_REALPART (t))
1137 && negate_expr_p (TREE_IMAGPART (t));
1138
1139 case COMPLEX_EXPR:
1140 return negate_expr_p (TREE_OPERAND (t, 0))
1141 && negate_expr_p (TREE_OPERAND (t, 1));
1142
1143 case CONJ_EXPR:
1144 return negate_expr_p (TREE_OPERAND (t, 0));
1145
1146 case PLUS_EXPR:
1147 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1148 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1149 return false;
1150 /* -(A + B) -> (-B) - A. */
1151 if (negate_expr_p (TREE_OPERAND (t, 1))
1152 && reorder_operands_p (TREE_OPERAND (t, 0),
1153 TREE_OPERAND (t, 1)))
1154 return true;
1155 /* -(A + B) -> (-A) - B. */
1156 return negate_expr_p (TREE_OPERAND (t, 0));
1157
1158 case MINUS_EXPR:
1159 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
1160 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1161 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1162 && reorder_operands_p (TREE_OPERAND (t, 0),
1163 TREE_OPERAND (t, 1));
1164
1165 case MULT_EXPR:
1166 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1167 break;
1168
1169 /* Fall through. */
1170
1171 case RDIV_EXPR:
1172 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1173 return negate_expr_p (TREE_OPERAND (t, 1))
1174 || negate_expr_p (TREE_OPERAND (t, 0));
1175 break;
1176
1177 case TRUNC_DIV_EXPR:
1178 case ROUND_DIV_EXPR:
1179 case FLOOR_DIV_EXPR:
1180 case CEIL_DIV_EXPR:
1181 case EXACT_DIV_EXPR:
1182 /* In general we can't negate A / B, because if A is INT_MIN and
1183 B is 1, we may turn this into INT_MIN / -1 which is undefined
1184 and actually traps on some architectures. But if overflow is
1185 undefined, we can negate, because - (INT_MIN / 1) is an
1186 overflow. */
1187 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
1188 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
1189 break;
1190 return negate_expr_p (TREE_OPERAND (t, 1))
1191 || negate_expr_p (TREE_OPERAND (t, 0));
1192
1193 case NOP_EXPR:
1194 /* Negate -((double)float) as (double)(-float). */
1195 if (TREE_CODE (type) == REAL_TYPE)
1196 {
1197 tree tem = strip_float_extensions (t);
1198 if (tem != t)
1199 return negate_expr_p (tem);
1200 }
1201 break;
1202
1203 case CALL_EXPR:
1204 /* Negate -f(x) as f(-x). */
1205 if (negate_mathfn_p (builtin_mathfn_code (t)))
1206 return negate_expr_p (CALL_EXPR_ARG (t, 0));
1207 break;
1208
1209 case RSHIFT_EXPR:
1210 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1211 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1212 {
1213 tree op1 = TREE_OPERAND (t, 1);
1214 if (TREE_INT_CST_HIGH (op1) == 0
1215 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1216 == TREE_INT_CST_LOW (op1))
1217 return true;
1218 }
1219 break;
1220
1221 default:
1222 break;
1223 }
1224 return false;
1225 }
1226
1227 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1228 simplification is possible.
1229 If negate_expr_p would return true for T, NULL_TREE will never be
1230 returned. */
1231
1232 static tree
1233 fold_negate_expr (tree t)
1234 {
1235 tree type = TREE_TYPE (t);
1236 tree tem;
1237
1238 switch (TREE_CODE (t))
1239 {
1240 /* Convert - (~A) to A + 1. */
1241 case BIT_NOT_EXPR:
1242 if (INTEGRAL_TYPE_P (type))
1243 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0),
1244 build_int_cst (type, 1));
1245 break;
1246
1247 case INTEGER_CST:
1248 tem = fold_negate_const (t, type);
1249 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
1250 || !TYPE_OVERFLOW_TRAPS (type))
1251 return tem;
1252 break;
1253
1254 case REAL_CST:
1255 tem = fold_negate_const (t, type);
1256 /* Two's complement FP formats, such as c4x, may overflow. */
1257 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
1258 return tem;
1259 break;
1260
1261 case FIXED_CST:
1262 tem = fold_negate_const (t, type);
1263 return tem;
1264
1265 case COMPLEX_CST:
1266 {
1267 tree rpart = negate_expr (TREE_REALPART (t));
1268 tree ipart = negate_expr (TREE_IMAGPART (t));
1269
1270 if ((TREE_CODE (rpart) == REAL_CST
1271 && TREE_CODE (ipart) == REAL_CST)
1272 || (TREE_CODE (rpart) == INTEGER_CST
1273 && TREE_CODE (ipart) == INTEGER_CST))
1274 return build_complex (type, rpart, ipart);
1275 }
1276 break;
1277
1278 case COMPLEX_EXPR:
1279 if (negate_expr_p (t))
1280 return fold_build2 (COMPLEX_EXPR, type,
1281 fold_negate_expr (TREE_OPERAND (t, 0)),
1282 fold_negate_expr (TREE_OPERAND (t, 1)));
1283 break;
1284
1285 case CONJ_EXPR:
1286 if (negate_expr_p (t))
1287 return fold_build1 (CONJ_EXPR, type,
1288 fold_negate_expr (TREE_OPERAND (t, 0)));
1289 break;
1290
1291 case NEGATE_EXPR:
1292 return TREE_OPERAND (t, 0);
1293
1294 case PLUS_EXPR:
1295 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1296 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1297 {
1298 /* -(A + B) -> (-B) - A. */
1299 if (negate_expr_p (TREE_OPERAND (t, 1))
1300 && reorder_operands_p (TREE_OPERAND (t, 0),
1301 TREE_OPERAND (t, 1)))
1302 {
1303 tem = negate_expr (TREE_OPERAND (t, 1));
1304 return fold_build2 (MINUS_EXPR, type,
1305 tem, TREE_OPERAND (t, 0));
1306 }
1307
1308 /* -(A + B) -> (-A) - B. */
1309 if (negate_expr_p (TREE_OPERAND (t, 0)))
1310 {
1311 tem = negate_expr (TREE_OPERAND (t, 0));
1312 return fold_build2 (MINUS_EXPR, type,
1313 tem, TREE_OPERAND (t, 1));
1314 }
1315 }
1316 break;
1317
1318 case MINUS_EXPR:
1319 /* - (A - B) -> B - A */
1320 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1321 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1322 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1323 return fold_build2 (MINUS_EXPR, type,
1324 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
1325 break;
1326
1327 case MULT_EXPR:
1328 if (TYPE_UNSIGNED (type))
1329 break;
1330
1331 /* Fall through. */
1332
1333 case RDIV_EXPR:
1334 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
1335 {
1336 tem = TREE_OPERAND (t, 1);
1337 if (negate_expr_p (tem))
1338 return fold_build2 (TREE_CODE (t), type,
1339 TREE_OPERAND (t, 0), negate_expr (tem));
1340 tem = TREE_OPERAND (t, 0);
1341 if (negate_expr_p (tem))
1342 return fold_build2 (TREE_CODE (t), type,
1343 negate_expr (tem), TREE_OPERAND (t, 1));
1344 }
1345 break;
1346
1347 case TRUNC_DIV_EXPR:
1348 case ROUND_DIV_EXPR:
1349 case FLOOR_DIV_EXPR:
1350 case CEIL_DIV_EXPR:
1351 case EXACT_DIV_EXPR:
1352 /* In general we can't negate A / B, because if A is INT_MIN and
1353 B is 1, we may turn this into INT_MIN / -1 which is undefined
1354 and actually traps on some architectures. But if overflow is
1355 undefined, we can negate, because - (INT_MIN / 1) is an
1356 overflow. */
1357 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
1358 {
1359 const char * const warnmsg = G_("assuming signed overflow does not "
1360 "occur when negating a division");
1361 tem = TREE_OPERAND (t, 1);
1362 if (negate_expr_p (tem))
1363 {
1364 if (INTEGRAL_TYPE_P (type)
1365 && (TREE_CODE (tem) != INTEGER_CST
1366 || integer_onep (tem)))
1367 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1368 return fold_build2 (TREE_CODE (t), type,
1369 TREE_OPERAND (t, 0), negate_expr (tem));
1370 }
1371 tem = TREE_OPERAND (t, 0);
1372 if (negate_expr_p (tem))
1373 {
1374 if (INTEGRAL_TYPE_P (type)
1375 && (TREE_CODE (tem) != INTEGER_CST
1376 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
1377 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1378 return fold_build2 (TREE_CODE (t), type,
1379 negate_expr (tem), TREE_OPERAND (t, 1));
1380 }
1381 }
1382 break;
1383
1384 case NOP_EXPR:
1385 /* Convert -((double)float) into (double)(-float). */
1386 if (TREE_CODE (type) == REAL_TYPE)
1387 {
1388 tem = strip_float_extensions (t);
1389 if (tem != t && negate_expr_p (tem))
1390 return fold_convert (type, negate_expr (tem));
1391 }
1392 break;
1393
1394 case CALL_EXPR:
1395 /* Negate -f(x) as f(-x). */
1396 if (negate_mathfn_p (builtin_mathfn_code (t))
1397 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
1398 {
1399 tree fndecl, arg;
1400
1401 fndecl = get_callee_fndecl (t);
1402 arg = negate_expr (CALL_EXPR_ARG (t, 0));
1403 return build_call_expr (fndecl, 1, arg);
1404 }
1405 break;
1406
1407 case RSHIFT_EXPR:
1408 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1409 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1410 {
1411 tree op1 = TREE_OPERAND (t, 1);
1412 if (TREE_INT_CST_HIGH (op1) == 0
1413 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1414 == TREE_INT_CST_LOW (op1))
1415 {
1416 tree ntype = TYPE_UNSIGNED (type)
1417 ? signed_type_for (type)
1418 : unsigned_type_for (type);
1419 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1420 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1421 return fold_convert (type, temp);
1422 }
1423 }
1424 break;
1425
1426 default:
1427 break;
1428 }
1429
1430 return NULL_TREE;
1431 }
1432
1433 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1434 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1435 return NULL_TREE. */
1436
1437 static tree
1438 negate_expr (tree t)
1439 {
1440 tree type, tem;
1441
1442 if (t == NULL_TREE)
1443 return NULL_TREE;
1444
1445 type = TREE_TYPE (t);
1446 STRIP_SIGN_NOPS (t);
1447
1448 tem = fold_negate_expr (t);
1449 if (!tem)
1450 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1451 return fold_convert (type, tem);
1452 }
1453 \f
1454 /* Split a tree IN into a constant, literal and variable parts that could be
1455 combined with CODE to make IN. "constant" means an expression with
1456 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1457 commutative arithmetic operation. Store the constant part into *CONP,
1458 the literal in *LITP and return the variable part. If a part isn't
1459 present, set it to null. If the tree does not decompose in this way,
1460 return the entire tree as the variable part and the other parts as null.
1461
1462 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1463 case, we negate an operand that was subtracted. Except if it is a
1464 literal for which we use *MINUS_LITP instead.
1465
1466 If NEGATE_P is true, we are negating all of IN, again except a literal
1467 for which we use *MINUS_LITP instead.
1468
1469 If IN is itself a literal or constant, return it as appropriate.
1470
1471 Note that we do not guarantee that any of the three values will be the
1472 same type as IN, but they will have the same signedness and mode. */
1473
1474 static tree
1475 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1476 tree *minus_litp, int negate_p)
1477 {
1478 tree var = 0;
1479
1480 *conp = 0;
1481 *litp = 0;
1482 *minus_litp = 0;
1483
1484 /* Strip any conversions that don't change the machine mode or signedness. */
1485 STRIP_SIGN_NOPS (in);
1486
1487 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
1488 || TREE_CODE (in) == FIXED_CST)
1489 *litp = in;
1490 else if (TREE_CODE (in) == code
1491 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
1492 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
1493 /* We can associate addition and subtraction together (even
1494 though the C standard doesn't say so) for integers because
1495 the value is not affected. For reals, the value might be
1496 affected, so we can't. */
1497 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1498 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1499 {
1500 tree op0 = TREE_OPERAND (in, 0);
1501 tree op1 = TREE_OPERAND (in, 1);
1502 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1503 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1504
1505 /* First see if either of the operands is a literal, then a constant. */
1506 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
1507 || TREE_CODE (op0) == FIXED_CST)
1508 *litp = op0, op0 = 0;
1509 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
1510 || TREE_CODE (op1) == FIXED_CST)
1511 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1512
1513 if (op0 != 0 && TREE_CONSTANT (op0))
1514 *conp = op0, op0 = 0;
1515 else if (op1 != 0 && TREE_CONSTANT (op1))
1516 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1517
1518 /* If we haven't dealt with either operand, this is not a case we can
1519 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1520 if (op0 != 0 && op1 != 0)
1521 var = in;
1522 else if (op0 != 0)
1523 var = op0;
1524 else
1525 var = op1, neg_var_p = neg1_p;
1526
1527 /* Now do any needed negations. */
1528 if (neg_litp_p)
1529 *minus_litp = *litp, *litp = 0;
1530 if (neg_conp_p)
1531 *conp = negate_expr (*conp);
1532 if (neg_var_p)
1533 var = negate_expr (var);
1534 }
1535 else if (TREE_CONSTANT (in))
1536 *conp = in;
1537 else
1538 var = in;
1539
1540 if (negate_p)
1541 {
1542 if (*litp)
1543 *minus_litp = *litp, *litp = 0;
1544 else if (*minus_litp)
1545 *litp = *minus_litp, *minus_litp = 0;
1546 *conp = negate_expr (*conp);
1547 var = negate_expr (var);
1548 }
1549
1550 return var;
1551 }
1552
1553 /* Re-associate trees split by the above function. T1 and T2 are either
1554 expressions to associate or null. Return the new expression, if any. If
1555 we build an operation, do it in TYPE and with CODE. */
1556
1557 static tree
1558 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1559 {
1560 if (t1 == 0)
1561 return t2;
1562 else if (t2 == 0)
1563 return t1;
1564
1565 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1566 try to fold this since we will have infinite recursion. But do
1567 deal with any NEGATE_EXPRs. */
1568 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1569 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1570 {
1571 if (code == PLUS_EXPR)
1572 {
1573 if (TREE_CODE (t1) == NEGATE_EXPR)
1574 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1575 fold_convert (type, TREE_OPERAND (t1, 0)));
1576 else if (TREE_CODE (t2) == NEGATE_EXPR)
1577 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1578 fold_convert (type, TREE_OPERAND (t2, 0)));
1579 else if (integer_zerop (t2))
1580 return fold_convert (type, t1);
1581 }
1582 else if (code == MINUS_EXPR)
1583 {
1584 if (integer_zerop (t2))
1585 return fold_convert (type, t1);
1586 }
1587
1588 return build2 (code, type, fold_convert (type, t1),
1589 fold_convert (type, t2));
1590 }
1591
1592 return fold_build2 (code, type, fold_convert (type, t1),
1593 fold_convert (type, t2));
1594 }
1595 \f
1596 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1597 for use in int_const_binop, size_binop and size_diffop. */
1598
1599 static bool
1600 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
1601 {
1602 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
1603 return false;
1604 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
1605 return false;
1606
1607 switch (code)
1608 {
1609 case LSHIFT_EXPR:
1610 case RSHIFT_EXPR:
1611 case LROTATE_EXPR:
1612 case RROTATE_EXPR:
1613 return true;
1614
1615 default:
1616 break;
1617 }
1618
1619 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
1620 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
1621 && TYPE_MODE (type1) == TYPE_MODE (type2);
1622 }
1623
1624
1625 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1626 to produce a new constant. Return NULL_TREE if we don't know how
1627 to evaluate CODE at compile-time.
1628
1629 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1630
1631 tree
1632 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2, int notrunc)
1633 {
1634 unsigned HOST_WIDE_INT int1l, int2l;
1635 HOST_WIDE_INT int1h, int2h;
1636 unsigned HOST_WIDE_INT low;
1637 HOST_WIDE_INT hi;
1638 unsigned HOST_WIDE_INT garbagel;
1639 HOST_WIDE_INT garbageh;
1640 tree t;
1641 tree type = TREE_TYPE (arg1);
1642 int uns = TYPE_UNSIGNED (type);
1643 int is_sizetype
1644 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1645 int overflow = 0;
1646
1647 int1l = TREE_INT_CST_LOW (arg1);
1648 int1h = TREE_INT_CST_HIGH (arg1);
1649 int2l = TREE_INT_CST_LOW (arg2);
1650 int2h = TREE_INT_CST_HIGH (arg2);
1651
1652 switch (code)
1653 {
1654 case BIT_IOR_EXPR:
1655 low = int1l | int2l, hi = int1h | int2h;
1656 break;
1657
1658 case BIT_XOR_EXPR:
1659 low = int1l ^ int2l, hi = int1h ^ int2h;
1660 break;
1661
1662 case BIT_AND_EXPR:
1663 low = int1l & int2l, hi = int1h & int2h;
1664 break;
1665
1666 case RSHIFT_EXPR:
1667 int2l = -int2l;
1668 case LSHIFT_EXPR:
1669 /* It's unclear from the C standard whether shifts can overflow.
1670 The following code ignores overflow; perhaps a C standard
1671 interpretation ruling is needed. */
1672 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1673 &low, &hi, !uns);
1674 break;
1675
1676 case RROTATE_EXPR:
1677 int2l = - int2l;
1678 case LROTATE_EXPR:
1679 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1680 &low, &hi);
1681 break;
1682
1683 case PLUS_EXPR:
1684 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1685 break;
1686
1687 case MINUS_EXPR:
1688 neg_double (int2l, int2h, &low, &hi);
1689 add_double (int1l, int1h, low, hi, &low, &hi);
1690 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1691 break;
1692
1693 case MULT_EXPR:
1694 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1695 break;
1696
1697 case TRUNC_DIV_EXPR:
1698 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1699 case EXACT_DIV_EXPR:
1700 /* This is a shortcut for a common special case. */
1701 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1702 && !TREE_OVERFLOW (arg1)
1703 && !TREE_OVERFLOW (arg2)
1704 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1705 {
1706 if (code == CEIL_DIV_EXPR)
1707 int1l += int2l - 1;
1708
1709 low = int1l / int2l, hi = 0;
1710 break;
1711 }
1712
1713 /* ... fall through ... */
1714
1715 case ROUND_DIV_EXPR:
1716 if (int2h == 0 && int2l == 0)
1717 return NULL_TREE;
1718 if (int2h == 0 && int2l == 1)
1719 {
1720 low = int1l, hi = int1h;
1721 break;
1722 }
1723 if (int1l == int2l && int1h == int2h
1724 && ! (int1l == 0 && int1h == 0))
1725 {
1726 low = 1, hi = 0;
1727 break;
1728 }
1729 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1730 &low, &hi, &garbagel, &garbageh);
1731 break;
1732
1733 case TRUNC_MOD_EXPR:
1734 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1735 /* This is a shortcut for a common special case. */
1736 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1737 && !TREE_OVERFLOW (arg1)
1738 && !TREE_OVERFLOW (arg2)
1739 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1740 {
1741 if (code == CEIL_MOD_EXPR)
1742 int1l += int2l - 1;
1743 low = int1l % int2l, hi = 0;
1744 break;
1745 }
1746
1747 /* ... fall through ... */
1748
1749 case ROUND_MOD_EXPR:
1750 if (int2h == 0 && int2l == 0)
1751 return NULL_TREE;
1752 overflow = div_and_round_double (code, uns,
1753 int1l, int1h, int2l, int2h,
1754 &garbagel, &garbageh, &low, &hi);
1755 break;
1756
1757 case MIN_EXPR:
1758 case MAX_EXPR:
1759 if (uns)
1760 low = (((unsigned HOST_WIDE_INT) int1h
1761 < (unsigned HOST_WIDE_INT) int2h)
1762 || (((unsigned HOST_WIDE_INT) int1h
1763 == (unsigned HOST_WIDE_INT) int2h)
1764 && int1l < int2l));
1765 else
1766 low = (int1h < int2h
1767 || (int1h == int2h && int1l < int2l));
1768
1769 if (low == (code == MIN_EXPR))
1770 low = int1l, hi = int1h;
1771 else
1772 low = int2l, hi = int2h;
1773 break;
1774
1775 default:
1776 return NULL_TREE;
1777 }
1778
1779 if (notrunc)
1780 {
1781 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1782
1783 /* Propagate overflow flags ourselves. */
1784 if (((!uns || is_sizetype) && overflow)
1785 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1786 {
1787 t = copy_node (t);
1788 TREE_OVERFLOW (t) = 1;
1789 }
1790 }
1791 else
1792 t = force_fit_type_double (TREE_TYPE (arg1), low, hi, 1,
1793 ((!uns || is_sizetype) && overflow)
1794 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1795
1796 return t;
1797 }
1798
1799 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1800 constant. We assume ARG1 and ARG2 have the same data type, or at least
1801 are the same kind of constant and the same machine mode. Return zero if
1802 combining the constants is not allowed in the current operating mode.
1803
1804 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1805
1806 static tree
1807 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1808 {
1809 /* Sanity check for the recursive cases. */
1810 if (!arg1 || !arg2)
1811 return NULL_TREE;
1812
1813 STRIP_NOPS (arg1);
1814 STRIP_NOPS (arg2);
1815
1816 if (TREE_CODE (arg1) == INTEGER_CST)
1817 return int_const_binop (code, arg1, arg2, notrunc);
1818
1819 if (TREE_CODE (arg1) == REAL_CST)
1820 {
1821 enum machine_mode mode;
1822 REAL_VALUE_TYPE d1;
1823 REAL_VALUE_TYPE d2;
1824 REAL_VALUE_TYPE value;
1825 REAL_VALUE_TYPE result;
1826 bool inexact;
1827 tree t, type;
1828
1829 /* The following codes are handled by real_arithmetic. */
1830 switch (code)
1831 {
1832 case PLUS_EXPR:
1833 case MINUS_EXPR:
1834 case MULT_EXPR:
1835 case RDIV_EXPR:
1836 case MIN_EXPR:
1837 case MAX_EXPR:
1838 break;
1839
1840 default:
1841 return NULL_TREE;
1842 }
1843
1844 d1 = TREE_REAL_CST (arg1);
1845 d2 = TREE_REAL_CST (arg2);
1846
1847 type = TREE_TYPE (arg1);
1848 mode = TYPE_MODE (type);
1849
1850 /* Don't perform operation if we honor signaling NaNs and
1851 either operand is a NaN. */
1852 if (HONOR_SNANS (mode)
1853 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1854 return NULL_TREE;
1855
1856 /* Don't perform operation if it would raise a division
1857 by zero exception. */
1858 if (code == RDIV_EXPR
1859 && REAL_VALUES_EQUAL (d2, dconst0)
1860 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1861 return NULL_TREE;
1862
1863 /* If either operand is a NaN, just return it. Otherwise, set up
1864 for floating-point trap; we return an overflow. */
1865 if (REAL_VALUE_ISNAN (d1))
1866 return arg1;
1867 else if (REAL_VALUE_ISNAN (d2))
1868 return arg2;
1869
1870 inexact = real_arithmetic (&value, code, &d1, &d2);
1871 real_convert (&result, mode, &value);
1872
1873 /* Don't constant fold this floating point operation if
1874 the result has overflowed and flag_trapping_math. */
1875 if (flag_trapping_math
1876 && MODE_HAS_INFINITIES (mode)
1877 && REAL_VALUE_ISINF (result)
1878 && !REAL_VALUE_ISINF (d1)
1879 && !REAL_VALUE_ISINF (d2))
1880 return NULL_TREE;
1881
1882 /* Don't constant fold this floating point operation if the
1883 result may dependent upon the run-time rounding mode and
1884 flag_rounding_math is set, or if GCC's software emulation
1885 is unable to accurately represent the result. */
1886 if ((flag_rounding_math
1887 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1888 && !flag_unsafe_math_optimizations))
1889 && (inexact || !real_identical (&result, &value)))
1890 return NULL_TREE;
1891
1892 t = build_real (type, result);
1893
1894 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1895 return t;
1896 }
1897
1898 if (TREE_CODE (arg1) == FIXED_CST)
1899 {
1900 FIXED_VALUE_TYPE f1;
1901 FIXED_VALUE_TYPE f2;
1902 FIXED_VALUE_TYPE result;
1903 tree t, type;
1904 int sat_p;
1905 bool overflow_p;
1906
1907 /* The following codes are handled by fixed_arithmetic. */
1908 switch (code)
1909 {
1910 case PLUS_EXPR:
1911 case MINUS_EXPR:
1912 case MULT_EXPR:
1913 case TRUNC_DIV_EXPR:
1914 f2 = TREE_FIXED_CST (arg2);
1915 break;
1916
1917 case LSHIFT_EXPR:
1918 case RSHIFT_EXPR:
1919 f2.data.high = TREE_INT_CST_HIGH (arg2);
1920 f2.data.low = TREE_INT_CST_LOW (arg2);
1921 f2.mode = SImode;
1922 break;
1923
1924 default:
1925 return NULL_TREE;
1926 }
1927
1928 f1 = TREE_FIXED_CST (arg1);
1929 type = TREE_TYPE (arg1);
1930 sat_p = TYPE_SATURATING (type);
1931 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1932 t = build_fixed (type, result);
1933 /* Propagate overflow flags. */
1934 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1935 {
1936 TREE_OVERFLOW (t) = 1;
1937 TREE_CONSTANT_OVERFLOW (t) = 1;
1938 }
1939 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1940 TREE_CONSTANT_OVERFLOW (t) = 1;
1941 return t;
1942 }
1943
1944 if (TREE_CODE (arg1) == COMPLEX_CST)
1945 {
1946 tree type = TREE_TYPE (arg1);
1947 tree r1 = TREE_REALPART (arg1);
1948 tree i1 = TREE_IMAGPART (arg1);
1949 tree r2 = TREE_REALPART (arg2);
1950 tree i2 = TREE_IMAGPART (arg2);
1951 tree real, imag;
1952
1953 switch (code)
1954 {
1955 case PLUS_EXPR:
1956 case MINUS_EXPR:
1957 real = const_binop (code, r1, r2, notrunc);
1958 imag = const_binop (code, i1, i2, notrunc);
1959 break;
1960
1961 case MULT_EXPR:
1962 real = const_binop (MINUS_EXPR,
1963 const_binop (MULT_EXPR, r1, r2, notrunc),
1964 const_binop (MULT_EXPR, i1, i2, notrunc),
1965 notrunc);
1966 imag = const_binop (PLUS_EXPR,
1967 const_binop (MULT_EXPR, r1, i2, notrunc),
1968 const_binop (MULT_EXPR, i1, r2, notrunc),
1969 notrunc);
1970 break;
1971
1972 case RDIV_EXPR:
1973 {
1974 tree magsquared
1975 = const_binop (PLUS_EXPR,
1976 const_binop (MULT_EXPR, r2, r2, notrunc),
1977 const_binop (MULT_EXPR, i2, i2, notrunc),
1978 notrunc);
1979 tree t1
1980 = const_binop (PLUS_EXPR,
1981 const_binop (MULT_EXPR, r1, r2, notrunc),
1982 const_binop (MULT_EXPR, i1, i2, notrunc),
1983 notrunc);
1984 tree t2
1985 = const_binop (MINUS_EXPR,
1986 const_binop (MULT_EXPR, i1, r2, notrunc),
1987 const_binop (MULT_EXPR, r1, i2, notrunc),
1988 notrunc);
1989
1990 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1991 code = TRUNC_DIV_EXPR;
1992
1993 real = const_binop (code, t1, magsquared, notrunc);
1994 imag = const_binop (code, t2, magsquared, notrunc);
1995 }
1996 break;
1997
1998 default:
1999 return NULL_TREE;
2000 }
2001
2002 if (real && imag)
2003 return build_complex (type, real, imag);
2004 }
2005
2006 return NULL_TREE;
2007 }
2008
2009 /* Create a size type INT_CST node with NUMBER sign extended. KIND
2010 indicates which particular sizetype to create. */
2011
2012 tree
2013 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
2014 {
2015 return build_int_cst (sizetype_tab[(int) kind], number);
2016 }
2017 \f
2018 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
2019 is a tree code. The type of the result is taken from the operands.
2020 Both must be equivalent integer types, ala int_binop_types_match_p.
2021 If the operands are constant, so is the result. */
2022
2023 tree
2024 size_binop (enum tree_code code, tree arg0, tree arg1)
2025 {
2026 tree type = TREE_TYPE (arg0);
2027
2028 if (arg0 == error_mark_node || arg1 == error_mark_node)
2029 return error_mark_node;
2030
2031 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
2032 TREE_TYPE (arg1)));
2033
2034 /* Handle the special case of two integer constants faster. */
2035 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2036 {
2037 /* And some specific cases even faster than that. */
2038 if (code == PLUS_EXPR)
2039 {
2040 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
2041 return arg1;
2042 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
2043 return arg0;
2044 }
2045 else if (code == MINUS_EXPR)
2046 {
2047 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
2048 return arg0;
2049 }
2050 else if (code == MULT_EXPR)
2051 {
2052 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
2053 return arg1;
2054 }
2055
2056 /* Handle general case of two integer constants. */
2057 return int_const_binop (code, arg0, arg1, 0);
2058 }
2059
2060 return fold_build2 (code, type, arg0, arg1);
2061 }
2062
2063 /* Given two values, either both of sizetype or both of bitsizetype,
2064 compute the difference between the two values. Return the value
2065 in signed type corresponding to the type of the operands. */
2066
2067 tree
2068 size_diffop (tree arg0, tree arg1)
2069 {
2070 tree type = TREE_TYPE (arg0);
2071 tree ctype;
2072
2073 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
2074 TREE_TYPE (arg1)));
2075
2076 /* If the type is already signed, just do the simple thing. */
2077 if (!TYPE_UNSIGNED (type))
2078 return size_binop (MINUS_EXPR, arg0, arg1);
2079
2080 if (type == sizetype)
2081 ctype = ssizetype;
2082 else if (type == bitsizetype)
2083 ctype = sbitsizetype;
2084 else
2085 ctype = signed_type_for (type);
2086
2087 /* If either operand is not a constant, do the conversions to the signed
2088 type and subtract. The hardware will do the right thing with any
2089 overflow in the subtraction. */
2090 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
2091 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
2092 fold_convert (ctype, arg1));
2093
2094 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
2095 Otherwise, subtract the other way, convert to CTYPE (we know that can't
2096 overflow) and negate (which can't either). Special-case a result
2097 of zero while we're here. */
2098 if (tree_int_cst_equal (arg0, arg1))
2099 return build_int_cst (ctype, 0);
2100 else if (tree_int_cst_lt (arg1, arg0))
2101 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
2102 else
2103 return size_binop (MINUS_EXPR, build_int_cst (ctype, 0),
2104 fold_convert (ctype, size_binop (MINUS_EXPR,
2105 arg1, arg0)));
2106 }
2107 \f
2108 /* A subroutine of fold_convert_const handling conversions of an
2109 INTEGER_CST to another integer type. */
2110
2111 static tree
2112 fold_convert_const_int_from_int (tree type, const_tree arg1)
2113 {
2114 tree t;
2115
2116 /* Given an integer constant, make new constant with new type,
2117 appropriately sign-extended or truncated. */
2118 t = force_fit_type_double (type, TREE_INT_CST_LOW (arg1),
2119 TREE_INT_CST_HIGH (arg1),
2120 /* Don't set the overflow when
2121 converting from a pointer, */
2122 !POINTER_TYPE_P (TREE_TYPE (arg1))
2123 /* or to a sizetype with same signedness
2124 and the precision is unchanged.
2125 ??? sizetype is always sign-extended,
2126 but its signedness depends on the
2127 frontend. Thus we see spurious overflows
2128 here if we do not check this. */
2129 && !((TYPE_PRECISION (TREE_TYPE (arg1))
2130 == TYPE_PRECISION (type))
2131 && (TYPE_UNSIGNED (TREE_TYPE (arg1))
2132 == TYPE_UNSIGNED (type))
2133 && ((TREE_CODE (TREE_TYPE (arg1)) == INTEGER_TYPE
2134 && TYPE_IS_SIZETYPE (TREE_TYPE (arg1)))
2135 || (TREE_CODE (type) == INTEGER_TYPE
2136 && TYPE_IS_SIZETYPE (type)))),
2137 (TREE_INT_CST_HIGH (arg1) < 0
2138 && (TYPE_UNSIGNED (type)
2139 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2140 | TREE_OVERFLOW (arg1));
2141
2142 return t;
2143 }
2144
2145 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2146 to an integer type. */
2147
2148 static tree
2149 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
2150 {
2151 int overflow = 0;
2152 tree t;
2153
2154 /* The following code implements the floating point to integer
2155 conversion rules required by the Java Language Specification,
2156 that IEEE NaNs are mapped to zero and values that overflow
2157 the target precision saturate, i.e. values greater than
2158 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2159 are mapped to INT_MIN. These semantics are allowed by the
2160 C and C++ standards that simply state that the behavior of
2161 FP-to-integer conversion is unspecified upon overflow. */
2162
2163 HOST_WIDE_INT high, low;
2164 REAL_VALUE_TYPE r;
2165 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2166
2167 switch (code)
2168 {
2169 case FIX_TRUNC_EXPR:
2170 real_trunc (&r, VOIDmode, &x);
2171 break;
2172
2173 default:
2174 gcc_unreachable ();
2175 }
2176
2177 /* If R is NaN, return zero and show we have an overflow. */
2178 if (REAL_VALUE_ISNAN (r))
2179 {
2180 overflow = 1;
2181 high = 0;
2182 low = 0;
2183 }
2184
2185 /* See if R is less than the lower bound or greater than the
2186 upper bound. */
2187
2188 if (! overflow)
2189 {
2190 tree lt = TYPE_MIN_VALUE (type);
2191 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2192 if (REAL_VALUES_LESS (r, l))
2193 {
2194 overflow = 1;
2195 high = TREE_INT_CST_HIGH (lt);
2196 low = TREE_INT_CST_LOW (lt);
2197 }
2198 }
2199
2200 if (! overflow)
2201 {
2202 tree ut = TYPE_MAX_VALUE (type);
2203 if (ut)
2204 {
2205 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2206 if (REAL_VALUES_LESS (u, r))
2207 {
2208 overflow = 1;
2209 high = TREE_INT_CST_HIGH (ut);
2210 low = TREE_INT_CST_LOW (ut);
2211 }
2212 }
2213 }
2214
2215 if (! overflow)
2216 REAL_VALUE_TO_INT (&low, &high, r);
2217
2218 t = force_fit_type_double (type, low, high, -1,
2219 overflow | TREE_OVERFLOW (arg1));
2220 return t;
2221 }
2222
2223 /* A subroutine of fold_convert_const handling conversions of a
2224 FIXED_CST to an integer type. */
2225
2226 static tree
2227 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
2228 {
2229 tree t;
2230 double_int temp, temp_trunc;
2231 unsigned int mode;
2232
2233 /* Right shift FIXED_CST to temp by fbit. */
2234 temp = TREE_FIXED_CST (arg1).data;
2235 mode = TREE_FIXED_CST (arg1).mode;
2236 if (GET_MODE_FBIT (mode) < 2 * HOST_BITS_PER_WIDE_INT)
2237 {
2238 lshift_double (temp.low, temp.high,
2239 - GET_MODE_FBIT (mode), 2 * HOST_BITS_PER_WIDE_INT,
2240 &temp.low, &temp.high, SIGNED_FIXED_POINT_MODE_P (mode));
2241
2242 /* Left shift temp to temp_trunc by fbit. */
2243 lshift_double (temp.low, temp.high,
2244 GET_MODE_FBIT (mode), 2 * HOST_BITS_PER_WIDE_INT,
2245 &temp_trunc.low, &temp_trunc.high,
2246 SIGNED_FIXED_POINT_MODE_P (mode));
2247 }
2248 else
2249 {
2250 temp.low = 0;
2251 temp.high = 0;
2252 temp_trunc.low = 0;
2253 temp_trunc.high = 0;
2254 }
2255
2256 /* If FIXED_CST is negative, we need to round the value toward 0.
2257 By checking if the fractional bits are not zero to add 1 to temp. */
2258 if (SIGNED_FIXED_POINT_MODE_P (mode) && temp_trunc.high < 0
2259 && !double_int_equal_p (TREE_FIXED_CST (arg1).data, temp_trunc))
2260 {
2261 double_int one;
2262 one.low = 1;
2263 one.high = 0;
2264 temp = double_int_add (temp, one);
2265 }
2266
2267 /* Given a fixed-point constant, make new constant with new type,
2268 appropriately sign-extended or truncated. */
2269 t = force_fit_type_double (type, temp.low, temp.high, -1,
2270 (temp.high < 0
2271 && (TYPE_UNSIGNED (type)
2272 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2273 | TREE_OVERFLOW (arg1));
2274
2275 return t;
2276 }
2277
2278 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2279 to another floating point type. */
2280
2281 static tree
2282 fold_convert_const_real_from_real (tree type, const_tree arg1)
2283 {
2284 REAL_VALUE_TYPE value;
2285 tree t;
2286
2287 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2288 t = build_real (type, value);
2289
2290 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2291 return t;
2292 }
2293
2294 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2295 to a floating point type. */
2296
2297 static tree
2298 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2299 {
2300 REAL_VALUE_TYPE value;
2301 tree t;
2302
2303 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
2304 t = build_real (type, value);
2305
2306 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2307 TREE_CONSTANT_OVERFLOW (t)
2308 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
2309 return t;
2310 }
2311
2312 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2313 to another fixed-point type. */
2314
2315 static tree
2316 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2317 {
2318 FIXED_VALUE_TYPE value;
2319 tree t;
2320 bool overflow_p;
2321
2322 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2323 TYPE_SATURATING (type));
2324 t = build_fixed (type, value);
2325
2326 /* Propagate overflow flags. */
2327 if (overflow_p | TREE_OVERFLOW (arg1))
2328 {
2329 TREE_OVERFLOW (t) = 1;
2330 TREE_CONSTANT_OVERFLOW (t) = 1;
2331 }
2332 else if (TREE_CONSTANT_OVERFLOW (arg1))
2333 TREE_CONSTANT_OVERFLOW (t) = 1;
2334 return t;
2335 }
2336
2337 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2338 to a fixed-point type. */
2339
2340 static tree
2341 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2342 {
2343 FIXED_VALUE_TYPE value;
2344 tree t;
2345 bool overflow_p;
2346
2347 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
2348 TREE_INT_CST (arg1),
2349 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2350 TYPE_SATURATING (type));
2351 t = build_fixed (type, value);
2352
2353 /* Propagate overflow flags. */
2354 if (overflow_p | TREE_OVERFLOW (arg1))
2355 {
2356 TREE_OVERFLOW (t) = 1;
2357 TREE_CONSTANT_OVERFLOW (t) = 1;
2358 }
2359 else if (TREE_CONSTANT_OVERFLOW (arg1))
2360 TREE_CONSTANT_OVERFLOW (t) = 1;
2361 return t;
2362 }
2363
2364 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2365 to a fixed-point type. */
2366
2367 static tree
2368 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2369 {
2370 FIXED_VALUE_TYPE value;
2371 tree t;
2372 bool overflow_p;
2373
2374 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2375 &TREE_REAL_CST (arg1),
2376 TYPE_SATURATING (type));
2377 t = build_fixed (type, value);
2378
2379 /* Propagate overflow flags. */
2380 if (overflow_p | TREE_OVERFLOW (arg1))
2381 {
2382 TREE_OVERFLOW (t) = 1;
2383 TREE_CONSTANT_OVERFLOW (t) = 1;
2384 }
2385 else if (TREE_CONSTANT_OVERFLOW (arg1))
2386 TREE_CONSTANT_OVERFLOW (t) = 1;
2387 return t;
2388 }
2389
2390 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2391 type TYPE. If no simplification can be done return NULL_TREE. */
2392
2393 static tree
2394 fold_convert_const (enum tree_code code, tree type, tree arg1)
2395 {
2396 if (TREE_TYPE (arg1) == type)
2397 return arg1;
2398
2399 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2400 {
2401 if (TREE_CODE (arg1) == INTEGER_CST)
2402 return fold_convert_const_int_from_int (type, arg1);
2403 else if (TREE_CODE (arg1) == REAL_CST)
2404 return fold_convert_const_int_from_real (code, type, arg1);
2405 else if (TREE_CODE (arg1) == FIXED_CST)
2406 return fold_convert_const_int_from_fixed (type, arg1);
2407 }
2408 else if (TREE_CODE (type) == REAL_TYPE)
2409 {
2410 if (TREE_CODE (arg1) == INTEGER_CST)
2411 return build_real_from_int_cst (type, arg1);
2412 else if (TREE_CODE (arg1) == REAL_CST)
2413 return fold_convert_const_real_from_real (type, arg1);
2414 else if (TREE_CODE (arg1) == FIXED_CST)
2415 return fold_convert_const_real_from_fixed (type, arg1);
2416 }
2417 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2418 {
2419 if (TREE_CODE (arg1) == FIXED_CST)
2420 return fold_convert_const_fixed_from_fixed (type, arg1);
2421 else if (TREE_CODE (arg1) == INTEGER_CST)
2422 return fold_convert_const_fixed_from_int (type, arg1);
2423 else if (TREE_CODE (arg1) == REAL_CST)
2424 return fold_convert_const_fixed_from_real (type, arg1);
2425 }
2426 return NULL_TREE;
2427 }
2428
2429 /* Construct a vector of zero elements of vector type TYPE. */
2430
2431 static tree
2432 build_zero_vector (tree type)
2433 {
2434 tree elem, list;
2435 int i, units;
2436
2437 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2438 units = TYPE_VECTOR_SUBPARTS (type);
2439
2440 list = NULL_TREE;
2441 for (i = 0; i < units; i++)
2442 list = tree_cons (NULL_TREE, elem, list);
2443 return build_vector (type, list);
2444 }
2445
2446 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2447
2448 bool
2449 fold_convertible_p (const_tree type, const_tree arg)
2450 {
2451 tree orig = TREE_TYPE (arg);
2452
2453 if (type == orig)
2454 return true;
2455
2456 if (TREE_CODE (arg) == ERROR_MARK
2457 || TREE_CODE (type) == ERROR_MARK
2458 || TREE_CODE (orig) == ERROR_MARK)
2459 return false;
2460
2461 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2462 return true;
2463
2464 switch (TREE_CODE (type))
2465 {
2466 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2467 case POINTER_TYPE: case REFERENCE_TYPE:
2468 case OFFSET_TYPE:
2469 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2470 || TREE_CODE (orig) == OFFSET_TYPE)
2471 return true;
2472 return (TREE_CODE (orig) == VECTOR_TYPE
2473 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2474
2475 case REAL_TYPE:
2476 case FIXED_POINT_TYPE:
2477 case COMPLEX_TYPE:
2478 case VECTOR_TYPE:
2479 case VOID_TYPE:
2480 return TREE_CODE (type) == TREE_CODE (orig);
2481
2482 default:
2483 return false;
2484 }
2485 }
2486
2487 /* Convert expression ARG to type TYPE. Used by the middle-end for
2488 simple conversions in preference to calling the front-end's convert. */
2489
2490 tree
2491 fold_convert (tree type, tree arg)
2492 {
2493 tree orig = TREE_TYPE (arg);
2494 tree tem;
2495
2496 if (type == orig)
2497 return arg;
2498
2499 if (TREE_CODE (arg) == ERROR_MARK
2500 || TREE_CODE (type) == ERROR_MARK
2501 || TREE_CODE (orig) == ERROR_MARK)
2502 return error_mark_node;
2503
2504 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2505 return fold_build1 (NOP_EXPR, type, arg);
2506
2507 switch (TREE_CODE (type))
2508 {
2509 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2510 case POINTER_TYPE: case REFERENCE_TYPE:
2511 case OFFSET_TYPE:
2512 if (TREE_CODE (arg) == INTEGER_CST)
2513 {
2514 tem = fold_convert_const (NOP_EXPR, type, arg);
2515 if (tem != NULL_TREE)
2516 return tem;
2517 }
2518 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2519 || TREE_CODE (orig) == OFFSET_TYPE)
2520 return fold_build1 (NOP_EXPR, type, arg);
2521 if (TREE_CODE (orig) == COMPLEX_TYPE)
2522 {
2523 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2524 return fold_convert (type, tem);
2525 }
2526 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2527 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2528 return fold_build1 (NOP_EXPR, type, arg);
2529
2530 case REAL_TYPE:
2531 if (TREE_CODE (arg) == INTEGER_CST)
2532 {
2533 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2534 if (tem != NULL_TREE)
2535 return tem;
2536 }
2537 else if (TREE_CODE (arg) == REAL_CST)
2538 {
2539 tem = fold_convert_const (NOP_EXPR, type, arg);
2540 if (tem != NULL_TREE)
2541 return tem;
2542 }
2543 else if (TREE_CODE (arg) == FIXED_CST)
2544 {
2545 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2546 if (tem != NULL_TREE)
2547 return tem;
2548 }
2549
2550 switch (TREE_CODE (orig))
2551 {
2552 case INTEGER_TYPE:
2553 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2554 case POINTER_TYPE: case REFERENCE_TYPE:
2555 return fold_build1 (FLOAT_EXPR, type, arg);
2556
2557 case REAL_TYPE:
2558 return fold_build1 (NOP_EXPR, type, arg);
2559
2560 case FIXED_POINT_TYPE:
2561 return fold_build1 (FIXED_CONVERT_EXPR, type, arg);
2562
2563 case COMPLEX_TYPE:
2564 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2565 return fold_convert (type, tem);
2566
2567 default:
2568 gcc_unreachable ();
2569 }
2570
2571 case FIXED_POINT_TYPE:
2572 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2573 || TREE_CODE (arg) == REAL_CST)
2574 {
2575 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2576 if (tem != NULL_TREE)
2577 return tem;
2578 }
2579
2580 switch (TREE_CODE (orig))
2581 {
2582 case FIXED_POINT_TYPE:
2583 case INTEGER_TYPE:
2584 case ENUMERAL_TYPE:
2585 case BOOLEAN_TYPE:
2586 case REAL_TYPE:
2587 return fold_build1 (FIXED_CONVERT_EXPR, type, arg);
2588
2589 case COMPLEX_TYPE:
2590 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2591 return fold_convert (type, tem);
2592
2593 default:
2594 gcc_unreachable ();
2595 }
2596
2597 case COMPLEX_TYPE:
2598 switch (TREE_CODE (orig))
2599 {
2600 case INTEGER_TYPE:
2601 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2602 case POINTER_TYPE: case REFERENCE_TYPE:
2603 case REAL_TYPE:
2604 case FIXED_POINT_TYPE:
2605 return build2 (COMPLEX_EXPR, type,
2606 fold_convert (TREE_TYPE (type), arg),
2607 fold_convert (TREE_TYPE (type), integer_zero_node));
2608 case COMPLEX_TYPE:
2609 {
2610 tree rpart, ipart;
2611
2612 if (TREE_CODE (arg) == COMPLEX_EXPR)
2613 {
2614 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2615 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2616 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2617 }
2618
2619 arg = save_expr (arg);
2620 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2621 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2622 rpart = fold_convert (TREE_TYPE (type), rpart);
2623 ipart = fold_convert (TREE_TYPE (type), ipart);
2624 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2625 }
2626
2627 default:
2628 gcc_unreachable ();
2629 }
2630
2631 case VECTOR_TYPE:
2632 if (integer_zerop (arg))
2633 return build_zero_vector (type);
2634 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2635 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2636 || TREE_CODE (orig) == VECTOR_TYPE);
2637 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2638
2639 case VOID_TYPE:
2640 tem = fold_ignored_result (arg);
2641 if (TREE_CODE (tem) == GIMPLE_MODIFY_STMT)
2642 return tem;
2643 return fold_build1 (NOP_EXPR, type, tem);
2644
2645 default:
2646 gcc_unreachable ();
2647 }
2648 }
2649 \f
2650 /* Return false if expr can be assumed not to be an lvalue, true
2651 otherwise. */
2652
2653 static bool
2654 maybe_lvalue_p (const_tree x)
2655 {
2656 /* We only need to wrap lvalue tree codes. */
2657 switch (TREE_CODE (x))
2658 {
2659 case VAR_DECL:
2660 case PARM_DECL:
2661 case RESULT_DECL:
2662 case LABEL_DECL:
2663 case FUNCTION_DECL:
2664 case SSA_NAME:
2665
2666 case COMPONENT_REF:
2667 case INDIRECT_REF:
2668 case ALIGN_INDIRECT_REF:
2669 case MISALIGNED_INDIRECT_REF:
2670 case ARRAY_REF:
2671 case ARRAY_RANGE_REF:
2672 case BIT_FIELD_REF:
2673 case OBJ_TYPE_REF:
2674
2675 case REALPART_EXPR:
2676 case IMAGPART_EXPR:
2677 case PREINCREMENT_EXPR:
2678 case PREDECREMENT_EXPR:
2679 case SAVE_EXPR:
2680 case TRY_CATCH_EXPR:
2681 case WITH_CLEANUP_EXPR:
2682 case COMPOUND_EXPR:
2683 case MODIFY_EXPR:
2684 case GIMPLE_MODIFY_STMT:
2685 case TARGET_EXPR:
2686 case COND_EXPR:
2687 case BIND_EXPR:
2688 case MIN_EXPR:
2689 case MAX_EXPR:
2690 break;
2691
2692 default:
2693 /* Assume the worst for front-end tree codes. */
2694 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2695 break;
2696 return false;
2697 }
2698
2699 return true;
2700 }
2701
2702 /* Return an expr equal to X but certainly not valid as an lvalue. */
2703
2704 tree
2705 non_lvalue (tree x)
2706 {
2707 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2708 us. */
2709 if (in_gimple_form)
2710 return x;
2711
2712 if (! maybe_lvalue_p (x))
2713 return x;
2714 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2715 }
2716
2717 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2718 Zero means allow extended lvalues. */
2719
2720 int pedantic_lvalues;
2721
2722 /* When pedantic, return an expr equal to X but certainly not valid as a
2723 pedantic lvalue. Otherwise, return X. */
2724
2725 static tree
2726 pedantic_non_lvalue (tree x)
2727 {
2728 if (pedantic_lvalues)
2729 return non_lvalue (x);
2730 else
2731 return x;
2732 }
2733 \f
2734 /* Given a tree comparison code, return the code that is the logical inverse
2735 of the given code. It is not safe to do this for floating-point
2736 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2737 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2738
2739 enum tree_code
2740 invert_tree_comparison (enum tree_code code, bool honor_nans)
2741 {
2742 if (honor_nans && flag_trapping_math)
2743 return ERROR_MARK;
2744
2745 switch (code)
2746 {
2747 case EQ_EXPR:
2748 return NE_EXPR;
2749 case NE_EXPR:
2750 return EQ_EXPR;
2751 case GT_EXPR:
2752 return honor_nans ? UNLE_EXPR : LE_EXPR;
2753 case GE_EXPR:
2754 return honor_nans ? UNLT_EXPR : LT_EXPR;
2755 case LT_EXPR:
2756 return honor_nans ? UNGE_EXPR : GE_EXPR;
2757 case LE_EXPR:
2758 return honor_nans ? UNGT_EXPR : GT_EXPR;
2759 case LTGT_EXPR:
2760 return UNEQ_EXPR;
2761 case UNEQ_EXPR:
2762 return LTGT_EXPR;
2763 case UNGT_EXPR:
2764 return LE_EXPR;
2765 case UNGE_EXPR:
2766 return LT_EXPR;
2767 case UNLT_EXPR:
2768 return GE_EXPR;
2769 case UNLE_EXPR:
2770 return GT_EXPR;
2771 case ORDERED_EXPR:
2772 return UNORDERED_EXPR;
2773 case UNORDERED_EXPR:
2774 return ORDERED_EXPR;
2775 default:
2776 gcc_unreachable ();
2777 }
2778 }
2779
2780 /* Similar, but return the comparison that results if the operands are
2781 swapped. This is safe for floating-point. */
2782
2783 enum tree_code
2784 swap_tree_comparison (enum tree_code code)
2785 {
2786 switch (code)
2787 {
2788 case EQ_EXPR:
2789 case NE_EXPR:
2790 case ORDERED_EXPR:
2791 case UNORDERED_EXPR:
2792 case LTGT_EXPR:
2793 case UNEQ_EXPR:
2794 return code;
2795 case GT_EXPR:
2796 return LT_EXPR;
2797 case GE_EXPR:
2798 return LE_EXPR;
2799 case LT_EXPR:
2800 return GT_EXPR;
2801 case LE_EXPR:
2802 return GE_EXPR;
2803 case UNGT_EXPR:
2804 return UNLT_EXPR;
2805 case UNGE_EXPR:
2806 return UNLE_EXPR;
2807 case UNLT_EXPR:
2808 return UNGT_EXPR;
2809 case UNLE_EXPR:
2810 return UNGE_EXPR;
2811 default:
2812 gcc_unreachable ();
2813 }
2814 }
2815
2816
2817 /* Convert a comparison tree code from an enum tree_code representation
2818 into a compcode bit-based encoding. This function is the inverse of
2819 compcode_to_comparison. */
2820
2821 static enum comparison_code
2822 comparison_to_compcode (enum tree_code code)
2823 {
2824 switch (code)
2825 {
2826 case LT_EXPR:
2827 return COMPCODE_LT;
2828 case EQ_EXPR:
2829 return COMPCODE_EQ;
2830 case LE_EXPR:
2831 return COMPCODE_LE;
2832 case GT_EXPR:
2833 return COMPCODE_GT;
2834 case NE_EXPR:
2835 return COMPCODE_NE;
2836 case GE_EXPR:
2837 return COMPCODE_GE;
2838 case ORDERED_EXPR:
2839 return COMPCODE_ORD;
2840 case UNORDERED_EXPR:
2841 return COMPCODE_UNORD;
2842 case UNLT_EXPR:
2843 return COMPCODE_UNLT;
2844 case UNEQ_EXPR:
2845 return COMPCODE_UNEQ;
2846 case UNLE_EXPR:
2847 return COMPCODE_UNLE;
2848 case UNGT_EXPR:
2849 return COMPCODE_UNGT;
2850 case LTGT_EXPR:
2851 return COMPCODE_LTGT;
2852 case UNGE_EXPR:
2853 return COMPCODE_UNGE;
2854 default:
2855 gcc_unreachable ();
2856 }
2857 }
2858
2859 /* Convert a compcode bit-based encoding of a comparison operator back
2860 to GCC's enum tree_code representation. This function is the
2861 inverse of comparison_to_compcode. */
2862
2863 static enum tree_code
2864 compcode_to_comparison (enum comparison_code code)
2865 {
2866 switch (code)
2867 {
2868 case COMPCODE_LT:
2869 return LT_EXPR;
2870 case COMPCODE_EQ:
2871 return EQ_EXPR;
2872 case COMPCODE_LE:
2873 return LE_EXPR;
2874 case COMPCODE_GT:
2875 return GT_EXPR;
2876 case COMPCODE_NE:
2877 return NE_EXPR;
2878 case COMPCODE_GE:
2879 return GE_EXPR;
2880 case COMPCODE_ORD:
2881 return ORDERED_EXPR;
2882 case COMPCODE_UNORD:
2883 return UNORDERED_EXPR;
2884 case COMPCODE_UNLT:
2885 return UNLT_EXPR;
2886 case COMPCODE_UNEQ:
2887 return UNEQ_EXPR;
2888 case COMPCODE_UNLE:
2889 return UNLE_EXPR;
2890 case COMPCODE_UNGT:
2891 return UNGT_EXPR;
2892 case COMPCODE_LTGT:
2893 return LTGT_EXPR;
2894 case COMPCODE_UNGE:
2895 return UNGE_EXPR;
2896 default:
2897 gcc_unreachable ();
2898 }
2899 }
2900
2901 /* Return a tree for the comparison which is the combination of
2902 doing the AND or OR (depending on CODE) of the two operations LCODE
2903 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2904 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2905 if this makes the transformation invalid. */
2906
2907 tree
2908 combine_comparisons (enum tree_code code, enum tree_code lcode,
2909 enum tree_code rcode, tree truth_type,
2910 tree ll_arg, tree lr_arg)
2911 {
2912 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2913 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2914 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2915 enum comparison_code compcode;
2916
2917 switch (code)
2918 {
2919 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2920 compcode = lcompcode & rcompcode;
2921 break;
2922
2923 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2924 compcode = lcompcode | rcompcode;
2925 break;
2926
2927 default:
2928 return NULL_TREE;
2929 }
2930
2931 if (!honor_nans)
2932 {
2933 /* Eliminate unordered comparisons, as well as LTGT and ORD
2934 which are not used unless the mode has NaNs. */
2935 compcode &= ~COMPCODE_UNORD;
2936 if (compcode == COMPCODE_LTGT)
2937 compcode = COMPCODE_NE;
2938 else if (compcode == COMPCODE_ORD)
2939 compcode = COMPCODE_TRUE;
2940 }
2941 else if (flag_trapping_math)
2942 {
2943 /* Check that the original operation and the optimized ones will trap
2944 under the same condition. */
2945 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2946 && (lcompcode != COMPCODE_EQ)
2947 && (lcompcode != COMPCODE_ORD);
2948 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2949 && (rcompcode != COMPCODE_EQ)
2950 && (rcompcode != COMPCODE_ORD);
2951 bool trap = (compcode & COMPCODE_UNORD) == 0
2952 && (compcode != COMPCODE_EQ)
2953 && (compcode != COMPCODE_ORD);
2954
2955 /* In a short-circuited boolean expression the LHS might be
2956 such that the RHS, if evaluated, will never trap. For
2957 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2958 if neither x nor y is NaN. (This is a mixed blessing: for
2959 example, the expression above will never trap, hence
2960 optimizing it to x < y would be invalid). */
2961 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2962 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2963 rtrap = false;
2964
2965 /* If the comparison was short-circuited, and only the RHS
2966 trapped, we may now generate a spurious trap. */
2967 if (rtrap && !ltrap
2968 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2969 return NULL_TREE;
2970
2971 /* If we changed the conditions that cause a trap, we lose. */
2972 if ((ltrap || rtrap) != trap)
2973 return NULL_TREE;
2974 }
2975
2976 if (compcode == COMPCODE_TRUE)
2977 return constant_boolean_node (true, truth_type);
2978 else if (compcode == COMPCODE_FALSE)
2979 return constant_boolean_node (false, truth_type);
2980 else
2981 return fold_build2 (compcode_to_comparison (compcode),
2982 truth_type, ll_arg, lr_arg);
2983 }
2984
2985 /* Return nonzero if CODE is a tree code that represents a truth value. */
2986
2987 static int
2988 truth_value_p (enum tree_code code)
2989 {
2990 return (TREE_CODE_CLASS (code) == tcc_comparison
2991 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2992 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2993 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2994 }
2995 \f
2996 /* Return nonzero if two operands (typically of the same tree node)
2997 are necessarily equal. If either argument has side-effects this
2998 function returns zero. FLAGS modifies behavior as follows:
2999
3000 If OEP_ONLY_CONST is set, only return nonzero for constants.
3001 This function tests whether the operands are indistinguishable;
3002 it does not test whether they are equal using C's == operation.
3003 The distinction is important for IEEE floating point, because
3004 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
3005 (2) two NaNs may be indistinguishable, but NaN!=NaN.
3006
3007 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
3008 even though it may hold multiple values during a function.
3009 This is because a GCC tree node guarantees that nothing else is
3010 executed between the evaluation of its "operands" (which may often
3011 be evaluated in arbitrary order). Hence if the operands themselves
3012 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
3013 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
3014 unset means assuming isochronic (or instantaneous) tree equivalence.
3015 Unless comparing arbitrary expression trees, such as from different
3016 statements, this flag can usually be left unset.
3017
3018 If OEP_PURE_SAME is set, then pure functions with identical arguments
3019 are considered the same. It is used when the caller has other ways
3020 to ensure that global memory is unchanged in between. */
3021
3022 int
3023 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
3024 {
3025 /* If either is ERROR_MARK, they aren't equal. */
3026 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
3027 return 0;
3028
3029 /* If both types don't have the same signedness, then we can't consider
3030 them equal. We must check this before the STRIP_NOPS calls
3031 because they may change the signedness of the arguments. */
3032 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3033 return 0;
3034
3035 /* If both types don't have the same precision, then it is not safe
3036 to strip NOPs. */
3037 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
3038 return 0;
3039
3040 STRIP_NOPS (arg0);
3041 STRIP_NOPS (arg1);
3042
3043 /* In case both args are comparisons but with different comparison
3044 code, try to swap the comparison operands of one arg to produce
3045 a match and compare that variant. */
3046 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3047 && COMPARISON_CLASS_P (arg0)
3048 && COMPARISON_CLASS_P (arg1))
3049 {
3050 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
3051
3052 if (TREE_CODE (arg0) == swap_code)
3053 return operand_equal_p (TREE_OPERAND (arg0, 0),
3054 TREE_OPERAND (arg1, 1), flags)
3055 && operand_equal_p (TREE_OPERAND (arg0, 1),
3056 TREE_OPERAND (arg1, 0), flags);
3057 }
3058
3059 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3060 /* This is needed for conversions and for COMPONENT_REF.
3061 Might as well play it safe and always test this. */
3062 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
3063 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
3064 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
3065 return 0;
3066
3067 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3068 We don't care about side effects in that case because the SAVE_EXPR
3069 takes care of that for us. In all other cases, two expressions are
3070 equal if they have no side effects. If we have two identical
3071 expressions with side effects that should be treated the same due
3072 to the only side effects being identical SAVE_EXPR's, that will
3073 be detected in the recursive calls below. */
3074 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
3075 && (TREE_CODE (arg0) == SAVE_EXPR
3076 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
3077 return 1;
3078
3079 /* Next handle constant cases, those for which we can return 1 even
3080 if ONLY_CONST is set. */
3081 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
3082 switch (TREE_CODE (arg0))
3083 {
3084 case INTEGER_CST:
3085 return tree_int_cst_equal (arg0, arg1);
3086
3087 case FIXED_CST:
3088 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
3089 TREE_FIXED_CST (arg1));
3090
3091 case REAL_CST:
3092 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
3093 TREE_REAL_CST (arg1)))
3094 return 1;
3095
3096
3097 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
3098 {
3099 /* If we do not distinguish between signed and unsigned zero,
3100 consider them equal. */
3101 if (real_zerop (arg0) && real_zerop (arg1))
3102 return 1;
3103 }
3104 return 0;
3105
3106 case VECTOR_CST:
3107 {
3108 tree v1, v2;
3109
3110 v1 = TREE_VECTOR_CST_ELTS (arg0);
3111 v2 = TREE_VECTOR_CST_ELTS (arg1);
3112 while (v1 && v2)
3113 {
3114 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
3115 flags))
3116 return 0;
3117 v1 = TREE_CHAIN (v1);
3118 v2 = TREE_CHAIN (v2);
3119 }
3120
3121 return v1 == v2;
3122 }
3123
3124 case COMPLEX_CST:
3125 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
3126 flags)
3127 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
3128 flags));
3129
3130 case STRING_CST:
3131 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
3132 && ! memcmp (TREE_STRING_POINTER (arg0),
3133 TREE_STRING_POINTER (arg1),
3134 TREE_STRING_LENGTH (arg0)));
3135
3136 case ADDR_EXPR:
3137 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
3138 0);
3139 default:
3140 break;
3141 }
3142
3143 if (flags & OEP_ONLY_CONST)
3144 return 0;
3145
3146 /* Define macros to test an operand from arg0 and arg1 for equality and a
3147 variant that allows null and views null as being different from any
3148 non-null value. In the latter case, if either is null, the both
3149 must be; otherwise, do the normal comparison. */
3150 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
3151 TREE_OPERAND (arg1, N), flags)
3152
3153 #define OP_SAME_WITH_NULL(N) \
3154 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3155 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3156
3157 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3158 {
3159 case tcc_unary:
3160 /* Two conversions are equal only if signedness and modes match. */
3161 switch (TREE_CODE (arg0))
3162 {
3163 case NOP_EXPR:
3164 case CONVERT_EXPR:
3165 case FIX_TRUNC_EXPR:
3166 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3167 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3168 return 0;
3169 break;
3170 default:
3171 break;
3172 }
3173
3174 return OP_SAME (0);
3175
3176
3177 case tcc_comparison:
3178 case tcc_binary:
3179 if (OP_SAME (0) && OP_SAME (1))
3180 return 1;
3181
3182 /* For commutative ops, allow the other order. */
3183 return (commutative_tree_code (TREE_CODE (arg0))
3184 && operand_equal_p (TREE_OPERAND (arg0, 0),
3185 TREE_OPERAND (arg1, 1), flags)
3186 && operand_equal_p (TREE_OPERAND (arg0, 1),
3187 TREE_OPERAND (arg1, 0), flags));
3188
3189 case tcc_reference:
3190 /* If either of the pointer (or reference) expressions we are
3191 dereferencing contain a side effect, these cannot be equal. */
3192 if (TREE_SIDE_EFFECTS (arg0)
3193 || TREE_SIDE_EFFECTS (arg1))
3194 return 0;
3195
3196 switch (TREE_CODE (arg0))
3197 {
3198 case INDIRECT_REF:
3199 case ALIGN_INDIRECT_REF:
3200 case MISALIGNED_INDIRECT_REF:
3201 case REALPART_EXPR:
3202 case IMAGPART_EXPR:
3203 return OP_SAME (0);
3204
3205 case ARRAY_REF:
3206 case ARRAY_RANGE_REF:
3207 /* Operands 2 and 3 may be null.
3208 Compare the array index by value if it is constant first as we
3209 may have different types but same value here. */
3210 return (OP_SAME (0)
3211 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3212 TREE_OPERAND (arg1, 1))
3213 || OP_SAME (1))
3214 && OP_SAME_WITH_NULL (2)
3215 && OP_SAME_WITH_NULL (3));
3216
3217 case COMPONENT_REF:
3218 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3219 may be NULL when we're called to compare MEM_EXPRs. */
3220 return OP_SAME_WITH_NULL (0)
3221 && OP_SAME (1)
3222 && OP_SAME_WITH_NULL (2);
3223
3224 case BIT_FIELD_REF:
3225 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3226
3227 default:
3228 return 0;
3229 }
3230
3231 case tcc_expression:
3232 switch (TREE_CODE (arg0))
3233 {
3234 case ADDR_EXPR:
3235 case TRUTH_NOT_EXPR:
3236 return OP_SAME (0);
3237
3238 case TRUTH_ANDIF_EXPR:
3239 case TRUTH_ORIF_EXPR:
3240 return OP_SAME (0) && OP_SAME (1);
3241
3242 case TRUTH_AND_EXPR:
3243 case TRUTH_OR_EXPR:
3244 case TRUTH_XOR_EXPR:
3245 if (OP_SAME (0) && OP_SAME (1))
3246 return 1;
3247
3248 /* Otherwise take into account this is a commutative operation. */
3249 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3250 TREE_OPERAND (arg1, 1), flags)
3251 && operand_equal_p (TREE_OPERAND (arg0, 1),
3252 TREE_OPERAND (arg1, 0), flags));
3253
3254 default:
3255 return 0;
3256 }
3257
3258 case tcc_vl_exp:
3259 switch (TREE_CODE (arg0))
3260 {
3261 case CALL_EXPR:
3262 /* If the CALL_EXPRs call different functions, then they
3263 clearly can not be equal. */
3264 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3265 flags))
3266 return 0;
3267
3268 {
3269 unsigned int cef = call_expr_flags (arg0);
3270 if (flags & OEP_PURE_SAME)
3271 cef &= ECF_CONST | ECF_PURE;
3272 else
3273 cef &= ECF_CONST;
3274 if (!cef)
3275 return 0;
3276 }
3277
3278 /* Now see if all the arguments are the same. */
3279 {
3280 const_call_expr_arg_iterator iter0, iter1;
3281 const_tree a0, a1;
3282 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3283 a1 = first_const_call_expr_arg (arg1, &iter1);
3284 a0 && a1;
3285 a0 = next_const_call_expr_arg (&iter0),
3286 a1 = next_const_call_expr_arg (&iter1))
3287 if (! operand_equal_p (a0, a1, flags))
3288 return 0;
3289
3290 /* If we get here and both argument lists are exhausted
3291 then the CALL_EXPRs are equal. */
3292 return ! (a0 || a1);
3293 }
3294 default:
3295 return 0;
3296 }
3297
3298 case tcc_declaration:
3299 /* Consider __builtin_sqrt equal to sqrt. */
3300 return (TREE_CODE (arg0) == FUNCTION_DECL
3301 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3302 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3303 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3304
3305 default:
3306 return 0;
3307 }
3308
3309 #undef OP_SAME
3310 #undef OP_SAME_WITH_NULL
3311 }
3312 \f
3313 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3314 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3315
3316 When in doubt, return 0. */
3317
3318 static int
3319 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3320 {
3321 int unsignedp1, unsignedpo;
3322 tree primarg0, primarg1, primother;
3323 unsigned int correct_width;
3324
3325 if (operand_equal_p (arg0, arg1, 0))
3326 return 1;
3327
3328 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3329 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3330 return 0;
3331
3332 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3333 and see if the inner values are the same. This removes any
3334 signedness comparison, which doesn't matter here. */
3335 primarg0 = arg0, primarg1 = arg1;
3336 STRIP_NOPS (primarg0);
3337 STRIP_NOPS (primarg1);
3338 if (operand_equal_p (primarg0, primarg1, 0))
3339 return 1;
3340
3341 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3342 actual comparison operand, ARG0.
3343
3344 First throw away any conversions to wider types
3345 already present in the operands. */
3346
3347 primarg1 = get_narrower (arg1, &unsignedp1);
3348 primother = get_narrower (other, &unsignedpo);
3349
3350 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3351 if (unsignedp1 == unsignedpo
3352 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3353 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3354 {
3355 tree type = TREE_TYPE (arg0);
3356
3357 /* Make sure shorter operand is extended the right way
3358 to match the longer operand. */
3359 primarg1 = fold_convert (signed_or_unsigned_type_for
3360 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3361
3362 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3363 return 1;
3364 }
3365
3366 return 0;
3367 }
3368 \f
3369 /* See if ARG is an expression that is either a comparison or is performing
3370 arithmetic on comparisons. The comparisons must only be comparing
3371 two different values, which will be stored in *CVAL1 and *CVAL2; if
3372 they are nonzero it means that some operands have already been found.
3373 No variables may be used anywhere else in the expression except in the
3374 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3375 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3376
3377 If this is true, return 1. Otherwise, return zero. */
3378
3379 static int
3380 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3381 {
3382 enum tree_code code = TREE_CODE (arg);
3383 enum tree_code_class class = TREE_CODE_CLASS (code);
3384
3385 /* We can handle some of the tcc_expression cases here. */
3386 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
3387 class = tcc_unary;
3388 else if (class == tcc_expression
3389 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3390 || code == COMPOUND_EXPR))
3391 class = tcc_binary;
3392
3393 else if (class == tcc_expression && code == SAVE_EXPR
3394 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3395 {
3396 /* If we've already found a CVAL1 or CVAL2, this expression is
3397 two complex to handle. */
3398 if (*cval1 || *cval2)
3399 return 0;
3400
3401 class = tcc_unary;
3402 *save_p = 1;
3403 }
3404
3405 switch (class)
3406 {
3407 case tcc_unary:
3408 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3409
3410 case tcc_binary:
3411 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3412 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3413 cval1, cval2, save_p));
3414
3415 case tcc_constant:
3416 return 1;
3417
3418 case tcc_expression:
3419 if (code == COND_EXPR)
3420 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3421 cval1, cval2, save_p)
3422 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3423 cval1, cval2, save_p)
3424 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3425 cval1, cval2, save_p));
3426 return 0;
3427
3428 case tcc_comparison:
3429 /* First see if we can handle the first operand, then the second. For
3430 the second operand, we know *CVAL1 can't be zero. It must be that
3431 one side of the comparison is each of the values; test for the
3432 case where this isn't true by failing if the two operands
3433 are the same. */
3434
3435 if (operand_equal_p (TREE_OPERAND (arg, 0),
3436 TREE_OPERAND (arg, 1), 0))
3437 return 0;
3438
3439 if (*cval1 == 0)
3440 *cval1 = TREE_OPERAND (arg, 0);
3441 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3442 ;
3443 else if (*cval2 == 0)
3444 *cval2 = TREE_OPERAND (arg, 0);
3445 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3446 ;
3447 else
3448 return 0;
3449
3450 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3451 ;
3452 else if (*cval2 == 0)
3453 *cval2 = TREE_OPERAND (arg, 1);
3454 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3455 ;
3456 else
3457 return 0;
3458
3459 return 1;
3460
3461 default:
3462 return 0;
3463 }
3464 }
3465 \f
3466 /* ARG is a tree that is known to contain just arithmetic operations and
3467 comparisons. Evaluate the operations in the tree substituting NEW0 for
3468 any occurrence of OLD0 as an operand of a comparison and likewise for
3469 NEW1 and OLD1. */
3470
3471 static tree
3472 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
3473 {
3474 tree type = TREE_TYPE (arg);
3475 enum tree_code code = TREE_CODE (arg);
3476 enum tree_code_class class = TREE_CODE_CLASS (code);
3477
3478 /* We can handle some of the tcc_expression cases here. */
3479 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
3480 class = tcc_unary;
3481 else if (class == tcc_expression
3482 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3483 class = tcc_binary;
3484
3485 switch (class)
3486 {
3487 case tcc_unary:
3488 return fold_build1 (code, type,
3489 eval_subst (TREE_OPERAND (arg, 0),
3490 old0, new0, old1, new1));
3491
3492 case tcc_binary:
3493 return fold_build2 (code, type,
3494 eval_subst (TREE_OPERAND (arg, 0),
3495 old0, new0, old1, new1),
3496 eval_subst (TREE_OPERAND (arg, 1),
3497 old0, new0, old1, new1));
3498
3499 case tcc_expression:
3500 switch (code)
3501 {
3502 case SAVE_EXPR:
3503 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
3504
3505 case COMPOUND_EXPR:
3506 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
3507
3508 case COND_EXPR:
3509 return fold_build3 (code, type,
3510 eval_subst (TREE_OPERAND (arg, 0),
3511 old0, new0, old1, new1),
3512 eval_subst (TREE_OPERAND (arg, 1),
3513 old0, new0, old1, new1),
3514 eval_subst (TREE_OPERAND (arg, 2),
3515 old0, new0, old1, new1));
3516 default:
3517 break;
3518 }
3519 /* Fall through - ??? */
3520
3521 case tcc_comparison:
3522 {
3523 tree arg0 = TREE_OPERAND (arg, 0);
3524 tree arg1 = TREE_OPERAND (arg, 1);
3525
3526 /* We need to check both for exact equality and tree equality. The
3527 former will be true if the operand has a side-effect. In that
3528 case, we know the operand occurred exactly once. */
3529
3530 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3531 arg0 = new0;
3532 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3533 arg0 = new1;
3534
3535 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3536 arg1 = new0;
3537 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3538 arg1 = new1;
3539
3540 return fold_build2 (code, type, arg0, arg1);
3541 }
3542
3543 default:
3544 return arg;
3545 }
3546 }
3547 \f
3548 /* Return a tree for the case when the result of an expression is RESULT
3549 converted to TYPE and OMITTED was previously an operand of the expression
3550 but is now not needed (e.g., we folded OMITTED * 0).
3551
3552 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3553 the conversion of RESULT to TYPE. */
3554
3555 tree
3556 omit_one_operand (tree type, tree result, tree omitted)
3557 {
3558 tree t = fold_convert (type, result);
3559
3560 /* If the resulting operand is an empty statement, just return the omitted
3561 statement casted to void. */
3562 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3563 return build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3564
3565 if (TREE_SIDE_EFFECTS (omitted))
3566 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3567
3568 return non_lvalue (t);
3569 }
3570
3571 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3572
3573 static tree
3574 pedantic_omit_one_operand (tree type, tree result, tree omitted)
3575 {
3576 tree t = fold_convert (type, result);
3577
3578 /* If the resulting operand is an empty statement, just return the omitted
3579 statement casted to void. */
3580 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3581 return build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3582
3583 if (TREE_SIDE_EFFECTS (omitted))
3584 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3585
3586 return pedantic_non_lvalue (t);
3587 }
3588
3589 /* Return a tree for the case when the result of an expression is RESULT
3590 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3591 of the expression but are now not needed.
3592
3593 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3594 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3595 evaluated before OMITTED2. Otherwise, if neither has side effects,
3596 just do the conversion of RESULT to TYPE. */
3597
3598 tree
3599 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
3600 {
3601 tree t = fold_convert (type, result);
3602
3603 if (TREE_SIDE_EFFECTS (omitted2))
3604 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3605 if (TREE_SIDE_EFFECTS (omitted1))
3606 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3607
3608 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
3609 }
3610
3611 \f
3612 /* Return a simplified tree node for the truth-negation of ARG. This
3613 never alters ARG itself. We assume that ARG is an operation that
3614 returns a truth value (0 or 1).
3615
3616 FIXME: one would think we would fold the result, but it causes
3617 problems with the dominator optimizer. */
3618
3619 tree
3620 fold_truth_not_expr (tree arg)
3621 {
3622 tree type = TREE_TYPE (arg);
3623 enum tree_code code = TREE_CODE (arg);
3624
3625 /* If this is a comparison, we can simply invert it, except for
3626 floating-point non-equality comparisons, in which case we just
3627 enclose a TRUTH_NOT_EXPR around what we have. */
3628
3629 if (TREE_CODE_CLASS (code) == tcc_comparison)
3630 {
3631 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3632 if (FLOAT_TYPE_P (op_type)
3633 && flag_trapping_math
3634 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3635 && code != NE_EXPR && code != EQ_EXPR)
3636 return NULL_TREE;
3637 else
3638 {
3639 code = invert_tree_comparison (code,
3640 HONOR_NANS (TYPE_MODE (op_type)));
3641 if (code == ERROR_MARK)
3642 return NULL_TREE;
3643 else
3644 return build2 (code, type,
3645 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3646 }
3647 }
3648
3649 switch (code)
3650 {
3651 case INTEGER_CST:
3652 return constant_boolean_node (integer_zerop (arg), type);
3653
3654 case TRUTH_AND_EXPR:
3655 return build2 (TRUTH_OR_EXPR, type,
3656 invert_truthvalue (TREE_OPERAND (arg, 0)),
3657 invert_truthvalue (TREE_OPERAND (arg, 1)));
3658
3659 case TRUTH_OR_EXPR:
3660 return build2 (TRUTH_AND_EXPR, type,
3661 invert_truthvalue (TREE_OPERAND (arg, 0)),
3662 invert_truthvalue (TREE_OPERAND (arg, 1)));
3663
3664 case TRUTH_XOR_EXPR:
3665 /* Here we can invert either operand. We invert the first operand
3666 unless the second operand is a TRUTH_NOT_EXPR in which case our
3667 result is the XOR of the first operand with the inside of the
3668 negation of the second operand. */
3669
3670 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3671 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3672 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3673 else
3674 return build2 (TRUTH_XOR_EXPR, type,
3675 invert_truthvalue (TREE_OPERAND (arg, 0)),
3676 TREE_OPERAND (arg, 1));
3677
3678 case TRUTH_ANDIF_EXPR:
3679 return build2 (TRUTH_ORIF_EXPR, type,
3680 invert_truthvalue (TREE_OPERAND (arg, 0)),
3681 invert_truthvalue (TREE_OPERAND (arg, 1)));
3682
3683 case TRUTH_ORIF_EXPR:
3684 return build2 (TRUTH_ANDIF_EXPR, type,
3685 invert_truthvalue (TREE_OPERAND (arg, 0)),
3686 invert_truthvalue (TREE_OPERAND (arg, 1)));
3687
3688 case TRUTH_NOT_EXPR:
3689 return TREE_OPERAND (arg, 0);
3690
3691 case COND_EXPR:
3692 {
3693 tree arg1 = TREE_OPERAND (arg, 1);
3694 tree arg2 = TREE_OPERAND (arg, 2);
3695 /* A COND_EXPR may have a throw as one operand, which
3696 then has void type. Just leave void operands
3697 as they are. */
3698 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3699 VOID_TYPE_P (TREE_TYPE (arg1))
3700 ? arg1 : invert_truthvalue (arg1),
3701 VOID_TYPE_P (TREE_TYPE (arg2))
3702 ? arg2 : invert_truthvalue (arg2));
3703 }
3704
3705 case COMPOUND_EXPR:
3706 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3707 invert_truthvalue (TREE_OPERAND (arg, 1)));
3708
3709 case NON_LVALUE_EXPR:
3710 return invert_truthvalue (TREE_OPERAND (arg, 0));
3711
3712 case NOP_EXPR:
3713 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3714 return build1 (TRUTH_NOT_EXPR, type, arg);
3715
3716 case CONVERT_EXPR:
3717 case FLOAT_EXPR:
3718 return build1 (TREE_CODE (arg), type,
3719 invert_truthvalue (TREE_OPERAND (arg, 0)));
3720
3721 case BIT_AND_EXPR:
3722 if (!integer_onep (TREE_OPERAND (arg, 1)))
3723 break;
3724 return build2 (EQ_EXPR, type, arg,
3725 build_int_cst (type, 0));
3726
3727 case SAVE_EXPR:
3728 return build1 (TRUTH_NOT_EXPR, type, arg);
3729
3730 case CLEANUP_POINT_EXPR:
3731 return build1 (CLEANUP_POINT_EXPR, type,
3732 invert_truthvalue (TREE_OPERAND (arg, 0)));
3733
3734 default:
3735 break;
3736 }
3737
3738 return NULL_TREE;
3739 }
3740
3741 /* Return a simplified tree node for the truth-negation of ARG. This
3742 never alters ARG itself. We assume that ARG is an operation that
3743 returns a truth value (0 or 1).
3744
3745 FIXME: one would think we would fold the result, but it causes
3746 problems with the dominator optimizer. */
3747
3748 tree
3749 invert_truthvalue (tree arg)
3750 {
3751 tree tem;
3752
3753 if (TREE_CODE (arg) == ERROR_MARK)
3754 return arg;
3755
3756 tem = fold_truth_not_expr (arg);
3757 if (!tem)
3758 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3759
3760 return tem;
3761 }
3762
3763 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3764 operands are another bit-wise operation with a common input. If so,
3765 distribute the bit operations to save an operation and possibly two if
3766 constants are involved. For example, convert
3767 (A | B) & (A | C) into A | (B & C)
3768 Further simplification will occur if B and C are constants.
3769
3770 If this optimization cannot be done, 0 will be returned. */
3771
3772 static tree
3773 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3774 {
3775 tree common;
3776 tree left, right;
3777
3778 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3779 || TREE_CODE (arg0) == code
3780 || (TREE_CODE (arg0) != BIT_AND_EXPR
3781 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3782 return 0;
3783
3784 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3785 {
3786 common = TREE_OPERAND (arg0, 0);
3787 left = TREE_OPERAND (arg0, 1);
3788 right = TREE_OPERAND (arg1, 1);
3789 }
3790 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3791 {
3792 common = TREE_OPERAND (arg0, 0);
3793 left = TREE_OPERAND (arg0, 1);
3794 right = TREE_OPERAND (arg1, 0);
3795 }
3796 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3797 {
3798 common = TREE_OPERAND (arg0, 1);
3799 left = TREE_OPERAND (arg0, 0);
3800 right = TREE_OPERAND (arg1, 1);
3801 }
3802 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3803 {
3804 common = TREE_OPERAND (arg0, 1);
3805 left = TREE_OPERAND (arg0, 0);
3806 right = TREE_OPERAND (arg1, 0);
3807 }
3808 else
3809 return 0;
3810
3811 return fold_build2 (TREE_CODE (arg0), type, common,
3812 fold_build2 (code, type, left, right));
3813 }
3814
3815 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3816 with code CODE. This optimization is unsafe. */
3817 static tree
3818 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3819 {
3820 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3821 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3822
3823 /* (A / C) +- (B / C) -> (A +- B) / C. */
3824 if (mul0 == mul1
3825 && operand_equal_p (TREE_OPERAND (arg0, 1),
3826 TREE_OPERAND (arg1, 1), 0))
3827 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3828 fold_build2 (code, type,
3829 TREE_OPERAND (arg0, 0),
3830 TREE_OPERAND (arg1, 0)),
3831 TREE_OPERAND (arg0, 1));
3832
3833 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3834 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3835 TREE_OPERAND (arg1, 0), 0)
3836 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3837 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3838 {
3839 REAL_VALUE_TYPE r0, r1;
3840 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3841 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3842 if (!mul0)
3843 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3844 if (!mul1)
3845 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3846 real_arithmetic (&r0, code, &r0, &r1);
3847 return fold_build2 (MULT_EXPR, type,
3848 TREE_OPERAND (arg0, 0),
3849 build_real (type, r0));
3850 }
3851
3852 return NULL_TREE;
3853 }
3854 \f
3855 /* Subroutine for fold_truthop: decode a field reference.
3856
3857 If EXP is a comparison reference, we return the innermost reference.
3858
3859 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3860 set to the starting bit number.
3861
3862 If the innermost field can be completely contained in a mode-sized
3863 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3864
3865 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3866 otherwise it is not changed.
3867
3868 *PUNSIGNEDP is set to the signedness of the field.
3869
3870 *PMASK is set to the mask used. This is either contained in a
3871 BIT_AND_EXPR or derived from the width of the field.
3872
3873 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3874
3875 Return 0 if this is not a component reference or is one that we can't
3876 do anything with. */
3877
3878 static tree
3879 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3880 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3881 int *punsignedp, int *pvolatilep,
3882 tree *pmask, tree *pand_mask)
3883 {
3884 tree outer_type = 0;
3885 tree and_mask = 0;
3886 tree mask, inner, offset;
3887 tree unsigned_type;
3888 unsigned int precision;
3889
3890 /* All the optimizations using this function assume integer fields.
3891 There are problems with FP fields since the type_for_size call
3892 below can fail for, e.g., XFmode. */
3893 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3894 return 0;
3895
3896 /* We are interested in the bare arrangement of bits, so strip everything
3897 that doesn't affect the machine mode. However, record the type of the
3898 outermost expression if it may matter below. */
3899 if (TREE_CODE (exp) == NOP_EXPR
3900 || TREE_CODE (exp) == CONVERT_EXPR
3901 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3902 outer_type = TREE_TYPE (exp);
3903 STRIP_NOPS (exp);
3904
3905 if (TREE_CODE (exp) == BIT_AND_EXPR)
3906 {
3907 and_mask = TREE_OPERAND (exp, 1);
3908 exp = TREE_OPERAND (exp, 0);
3909 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3910 if (TREE_CODE (and_mask) != INTEGER_CST)
3911 return 0;
3912 }
3913
3914 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3915 punsignedp, pvolatilep, false);
3916 if ((inner == exp && and_mask == 0)
3917 || *pbitsize < 0 || offset != 0
3918 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3919 return 0;
3920
3921 /* If the number of bits in the reference is the same as the bitsize of
3922 the outer type, then the outer type gives the signedness. Otherwise
3923 (in case of a small bitfield) the signedness is unchanged. */
3924 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3925 *punsignedp = TYPE_UNSIGNED (outer_type);
3926
3927 /* Compute the mask to access the bitfield. */
3928 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3929 precision = TYPE_PRECISION (unsigned_type);
3930
3931 mask = build_int_cst_type (unsigned_type, -1);
3932
3933 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3934 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3935
3936 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3937 if (and_mask != 0)
3938 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3939 fold_convert (unsigned_type, and_mask), mask);
3940
3941 *pmask = mask;
3942 *pand_mask = and_mask;
3943 return inner;
3944 }
3945
3946 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3947 represents the sign bit of EXP's type. If EXP represents a sign
3948 or zero extension, also test VAL against the unextended type.
3949 The return value is the (sub)expression whose sign bit is VAL,
3950 or NULL_TREE otherwise. */
3951
3952 static tree
3953 sign_bit_p (tree exp, const_tree val)
3954 {
3955 unsigned HOST_WIDE_INT mask_lo, lo;
3956 HOST_WIDE_INT mask_hi, hi;
3957 int width;
3958 tree t;
3959
3960 /* Tree EXP must have an integral type. */
3961 t = TREE_TYPE (exp);
3962 if (! INTEGRAL_TYPE_P (t))
3963 return NULL_TREE;
3964
3965 /* Tree VAL must be an integer constant. */
3966 if (TREE_CODE (val) != INTEGER_CST
3967 || TREE_OVERFLOW (val))
3968 return NULL_TREE;
3969
3970 width = TYPE_PRECISION (t);
3971 if (width > HOST_BITS_PER_WIDE_INT)
3972 {
3973 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3974 lo = 0;
3975
3976 mask_hi = ((unsigned HOST_WIDE_INT) -1
3977 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3978 mask_lo = -1;
3979 }
3980 else
3981 {
3982 hi = 0;
3983 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3984
3985 mask_hi = 0;
3986 mask_lo = ((unsigned HOST_WIDE_INT) -1
3987 >> (HOST_BITS_PER_WIDE_INT - width));
3988 }
3989
3990 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3991 treat VAL as if it were unsigned. */
3992 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3993 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3994 return exp;
3995
3996 /* Handle extension from a narrower type. */
3997 if (TREE_CODE (exp) == NOP_EXPR
3998 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3999 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4000
4001 return NULL_TREE;
4002 }
4003
4004 /* Subroutine for fold_truthop: determine if an operand is simple enough
4005 to be evaluated unconditionally. */
4006
4007 static int
4008 simple_operand_p (const_tree exp)
4009 {
4010 /* Strip any conversions that don't change the machine mode. */
4011 STRIP_NOPS (exp);
4012
4013 return (CONSTANT_CLASS_P (exp)
4014 || TREE_CODE (exp) == SSA_NAME
4015 || (DECL_P (exp)
4016 && ! TREE_ADDRESSABLE (exp)
4017 && ! TREE_THIS_VOLATILE (exp)
4018 && ! DECL_NONLOCAL (exp)
4019 /* Don't regard global variables as simple. They may be
4020 allocated in ways unknown to the compiler (shared memory,
4021 #pragma weak, etc). */
4022 && ! TREE_PUBLIC (exp)
4023 && ! DECL_EXTERNAL (exp)
4024 /* Loading a static variable is unduly expensive, but global
4025 registers aren't expensive. */
4026 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4027 }
4028 \f
4029 /* The following functions are subroutines to fold_range_test and allow it to
4030 try to change a logical combination of comparisons into a range test.
4031
4032 For example, both
4033 X == 2 || X == 3 || X == 4 || X == 5
4034 and
4035 X >= 2 && X <= 5
4036 are converted to
4037 (unsigned) (X - 2) <= 3
4038
4039 We describe each set of comparisons as being either inside or outside
4040 a range, using a variable named like IN_P, and then describe the
4041 range with a lower and upper bound. If one of the bounds is omitted,
4042 it represents either the highest or lowest value of the type.
4043
4044 In the comments below, we represent a range by two numbers in brackets
4045 preceded by a "+" to designate being inside that range, or a "-" to
4046 designate being outside that range, so the condition can be inverted by
4047 flipping the prefix. An omitted bound is represented by a "-". For
4048 example, "- [-, 10]" means being outside the range starting at the lowest
4049 possible value and ending at 10, in other words, being greater than 10.
4050 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4051 always false.
4052
4053 We set up things so that the missing bounds are handled in a consistent
4054 manner so neither a missing bound nor "true" and "false" need to be
4055 handled using a special case. */
4056
4057 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4058 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4059 and UPPER1_P are nonzero if the respective argument is an upper bound
4060 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4061 must be specified for a comparison. ARG1 will be converted to ARG0's
4062 type if both are specified. */
4063
4064 static tree
4065 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4066 tree arg1, int upper1_p)
4067 {
4068 tree tem;
4069 int result;
4070 int sgn0, sgn1;
4071
4072 /* If neither arg represents infinity, do the normal operation.
4073 Else, if not a comparison, return infinity. Else handle the special
4074 comparison rules. Note that most of the cases below won't occur, but
4075 are handled for consistency. */
4076
4077 if (arg0 != 0 && arg1 != 0)
4078 {
4079 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4080 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4081 STRIP_NOPS (tem);
4082 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4083 }
4084
4085 if (TREE_CODE_CLASS (code) != tcc_comparison)
4086 return 0;
4087
4088 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4089 for neither. In real maths, we cannot assume open ended ranges are
4090 the same. But, this is computer arithmetic, where numbers are finite.
4091 We can therefore make the transformation of any unbounded range with
4092 the value Z, Z being greater than any representable number. This permits
4093 us to treat unbounded ranges as equal. */
4094 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4095 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4096 switch (code)
4097 {
4098 case EQ_EXPR:
4099 result = sgn0 == sgn1;
4100 break;
4101 case NE_EXPR:
4102 result = sgn0 != sgn1;
4103 break;
4104 case LT_EXPR:
4105 result = sgn0 < sgn1;
4106 break;
4107 case LE_EXPR:
4108 result = sgn0 <= sgn1;
4109 break;
4110 case GT_EXPR:
4111 result = sgn0 > sgn1;
4112 break;
4113 case GE_EXPR:
4114 result = sgn0 >= sgn1;
4115 break;
4116 default:
4117 gcc_unreachable ();
4118 }
4119
4120 return constant_boolean_node (result, type);
4121 }
4122 \f
4123 /* Given EXP, a logical expression, set the range it is testing into
4124 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4125 actually being tested. *PLOW and *PHIGH will be made of the same
4126 type as the returned expression. If EXP is not a comparison, we
4127 will most likely not be returning a useful value and range. Set
4128 *STRICT_OVERFLOW_P to true if the return value is only valid
4129 because signed overflow is undefined; otherwise, do not change
4130 *STRICT_OVERFLOW_P. */
4131
4132 static tree
4133 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4134 bool *strict_overflow_p)
4135 {
4136 enum tree_code code;
4137 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
4138 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
4139 int in_p, n_in_p;
4140 tree low, high, n_low, n_high;
4141
4142 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4143 and see if we can refine the range. Some of the cases below may not
4144 happen, but it doesn't seem worth worrying about this. We "continue"
4145 the outer loop when we've changed something; otherwise we "break"
4146 the switch, which will "break" the while. */
4147
4148 in_p = 0;
4149 low = high = build_int_cst (TREE_TYPE (exp), 0);
4150
4151 while (1)
4152 {
4153 code = TREE_CODE (exp);
4154 exp_type = TREE_TYPE (exp);
4155
4156 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4157 {
4158 if (TREE_OPERAND_LENGTH (exp) > 0)
4159 arg0 = TREE_OPERAND (exp, 0);
4160 if (TREE_CODE_CLASS (code) == tcc_comparison
4161 || TREE_CODE_CLASS (code) == tcc_unary
4162 || TREE_CODE_CLASS (code) == tcc_binary)
4163 arg0_type = TREE_TYPE (arg0);
4164 if (TREE_CODE_CLASS (code) == tcc_binary
4165 || TREE_CODE_CLASS (code) == tcc_comparison
4166 || (TREE_CODE_CLASS (code) == tcc_expression
4167 && TREE_OPERAND_LENGTH (exp) > 1))
4168 arg1 = TREE_OPERAND (exp, 1);
4169 }
4170
4171 switch (code)
4172 {
4173 case TRUTH_NOT_EXPR:
4174 in_p = ! in_p, exp = arg0;
4175 continue;
4176
4177 case EQ_EXPR: case NE_EXPR:
4178 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4179 /* We can only do something if the range is testing for zero
4180 and if the second operand is an integer constant. Note that
4181 saying something is "in" the range we make is done by
4182 complementing IN_P since it will set in the initial case of
4183 being not equal to zero; "out" is leaving it alone. */
4184 if (low == 0 || high == 0
4185 || ! integer_zerop (low) || ! integer_zerop (high)
4186 || TREE_CODE (arg1) != INTEGER_CST)
4187 break;
4188
4189 switch (code)
4190 {
4191 case NE_EXPR: /* - [c, c] */
4192 low = high = arg1;
4193 break;
4194 case EQ_EXPR: /* + [c, c] */
4195 in_p = ! in_p, low = high = arg1;
4196 break;
4197 case GT_EXPR: /* - [-, c] */
4198 low = 0, high = arg1;
4199 break;
4200 case GE_EXPR: /* + [c, -] */
4201 in_p = ! in_p, low = arg1, high = 0;
4202 break;
4203 case LT_EXPR: /* - [c, -] */
4204 low = arg1, high = 0;
4205 break;
4206 case LE_EXPR: /* + [-, c] */
4207 in_p = ! in_p, low = 0, high = arg1;
4208 break;
4209 default:
4210 gcc_unreachable ();
4211 }
4212
4213 /* If this is an unsigned comparison, we also know that EXP is
4214 greater than or equal to zero. We base the range tests we make
4215 on that fact, so we record it here so we can parse existing
4216 range tests. We test arg0_type since often the return type
4217 of, e.g. EQ_EXPR, is boolean. */
4218 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4219 {
4220 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4221 in_p, low, high, 1,
4222 build_int_cst (arg0_type, 0),
4223 NULL_TREE))
4224 break;
4225
4226 in_p = n_in_p, low = n_low, high = n_high;
4227
4228 /* If the high bound is missing, but we have a nonzero low
4229 bound, reverse the range so it goes from zero to the low bound
4230 minus 1. */
4231 if (high == 0 && low && ! integer_zerop (low))
4232 {
4233 in_p = ! in_p;
4234 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4235 integer_one_node, 0);
4236 low = build_int_cst (arg0_type, 0);
4237 }
4238 }
4239
4240 exp = arg0;
4241 continue;
4242
4243 case NEGATE_EXPR:
4244 /* (-x) IN [a,b] -> x in [-b, -a] */
4245 n_low = range_binop (MINUS_EXPR, exp_type,
4246 build_int_cst (exp_type, 0),
4247 0, high, 1);
4248 n_high = range_binop (MINUS_EXPR, exp_type,
4249 build_int_cst (exp_type, 0),
4250 0, low, 0);
4251 low = n_low, high = n_high;
4252 exp = arg0;
4253 continue;
4254
4255 case BIT_NOT_EXPR:
4256 /* ~ X -> -X - 1 */
4257 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
4258 build_int_cst (exp_type, 1));
4259 continue;
4260
4261 case PLUS_EXPR: case MINUS_EXPR:
4262 if (TREE_CODE (arg1) != INTEGER_CST)
4263 break;
4264
4265 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4266 move a constant to the other side. */
4267 if (!TYPE_UNSIGNED (arg0_type)
4268 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4269 break;
4270
4271 /* If EXP is signed, any overflow in the computation is undefined,
4272 so we don't worry about it so long as our computations on
4273 the bounds don't overflow. For unsigned, overflow is defined
4274 and this is exactly the right thing. */
4275 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4276 arg0_type, low, 0, arg1, 0);
4277 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4278 arg0_type, high, 1, arg1, 0);
4279 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4280 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4281 break;
4282
4283 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4284 *strict_overflow_p = true;
4285
4286 /* Check for an unsigned range which has wrapped around the maximum
4287 value thus making n_high < n_low, and normalize it. */
4288 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4289 {
4290 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4291 integer_one_node, 0);
4292 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4293 integer_one_node, 0);
4294
4295 /* If the range is of the form +/- [ x+1, x ], we won't
4296 be able to normalize it. But then, it represents the
4297 whole range or the empty set, so make it
4298 +/- [ -, - ]. */
4299 if (tree_int_cst_equal (n_low, low)
4300 && tree_int_cst_equal (n_high, high))
4301 low = high = 0;
4302 else
4303 in_p = ! in_p;
4304 }
4305 else
4306 low = n_low, high = n_high;
4307
4308 exp = arg0;
4309 continue;
4310
4311 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
4312 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4313 break;
4314
4315 if (! INTEGRAL_TYPE_P (arg0_type)
4316 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4317 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4318 break;
4319
4320 n_low = low, n_high = high;
4321
4322 if (n_low != 0)
4323 n_low = fold_convert (arg0_type, n_low);
4324
4325 if (n_high != 0)
4326 n_high = fold_convert (arg0_type, n_high);
4327
4328
4329 /* If we're converting arg0 from an unsigned type, to exp,
4330 a signed type, we will be doing the comparison as unsigned.
4331 The tests above have already verified that LOW and HIGH
4332 are both positive.
4333
4334 So we have to ensure that we will handle large unsigned
4335 values the same way that the current signed bounds treat
4336 negative values. */
4337
4338 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4339 {
4340 tree high_positive;
4341 tree equiv_type;
4342 /* For fixed-point modes, we need to pass the saturating flag
4343 as the 2nd parameter. */
4344 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4345 equiv_type = lang_hooks.types.type_for_mode
4346 (TYPE_MODE (arg0_type),
4347 TYPE_SATURATING (arg0_type));
4348 else
4349 equiv_type = lang_hooks.types.type_for_mode
4350 (TYPE_MODE (arg0_type), 1);
4351
4352 /* A range without an upper bound is, naturally, unbounded.
4353 Since convert would have cropped a very large value, use
4354 the max value for the destination type. */
4355 high_positive
4356 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4357 : TYPE_MAX_VALUE (arg0_type);
4358
4359 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4360 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
4361 fold_convert (arg0_type,
4362 high_positive),
4363 build_int_cst (arg0_type, 1));
4364
4365 /* If the low bound is specified, "and" the range with the
4366 range for which the original unsigned value will be
4367 positive. */
4368 if (low != 0)
4369 {
4370 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4371 1, n_low, n_high, 1,
4372 fold_convert (arg0_type,
4373 integer_zero_node),
4374 high_positive))
4375 break;
4376
4377 in_p = (n_in_p == in_p);
4378 }
4379 else
4380 {
4381 /* Otherwise, "or" the range with the range of the input
4382 that will be interpreted as negative. */
4383 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4384 0, n_low, n_high, 1,
4385 fold_convert (arg0_type,
4386 integer_zero_node),
4387 high_positive))
4388 break;
4389
4390 in_p = (in_p != n_in_p);
4391 }
4392 }
4393
4394 exp = arg0;
4395 low = n_low, high = n_high;
4396 continue;
4397
4398 default:
4399 break;
4400 }
4401
4402 break;
4403 }
4404
4405 /* If EXP is a constant, we can evaluate whether this is true or false. */
4406 if (TREE_CODE (exp) == INTEGER_CST)
4407 {
4408 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4409 exp, 0, low, 0))
4410 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4411 exp, 1, high, 1)));
4412 low = high = 0;
4413 exp = 0;
4414 }
4415
4416 *pin_p = in_p, *plow = low, *phigh = high;
4417 return exp;
4418 }
4419 \f
4420 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4421 type, TYPE, return an expression to test if EXP is in (or out of, depending
4422 on IN_P) the range. Return 0 if the test couldn't be created. */
4423
4424 static tree
4425 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
4426 {
4427 tree etype = TREE_TYPE (exp);
4428 tree value;
4429
4430 #ifdef HAVE_canonicalize_funcptr_for_compare
4431 /* Disable this optimization for function pointer expressions
4432 on targets that require function pointer canonicalization. */
4433 if (HAVE_canonicalize_funcptr_for_compare
4434 && TREE_CODE (etype) == POINTER_TYPE
4435 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4436 return NULL_TREE;
4437 #endif
4438
4439 if (! in_p)
4440 {
4441 value = build_range_check (type, exp, 1, low, high);
4442 if (value != 0)
4443 return invert_truthvalue (value);
4444
4445 return 0;
4446 }
4447
4448 if (low == 0 && high == 0)
4449 return build_int_cst (type, 1);
4450
4451 if (low == 0)
4452 return fold_build2 (LE_EXPR, type, exp,
4453 fold_convert (etype, high));
4454
4455 if (high == 0)
4456 return fold_build2 (GE_EXPR, type, exp,
4457 fold_convert (etype, low));
4458
4459 if (operand_equal_p (low, high, 0))
4460 return fold_build2 (EQ_EXPR, type, exp,
4461 fold_convert (etype, low));
4462
4463 if (integer_zerop (low))
4464 {
4465 if (! TYPE_UNSIGNED (etype))
4466 {
4467 etype = unsigned_type_for (etype);
4468 high = fold_convert (etype, high);
4469 exp = fold_convert (etype, exp);
4470 }
4471 return build_range_check (type, exp, 1, 0, high);
4472 }
4473
4474 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4475 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4476 {
4477 unsigned HOST_WIDE_INT lo;
4478 HOST_WIDE_INT hi;
4479 int prec;
4480
4481 prec = TYPE_PRECISION (etype);
4482 if (prec <= HOST_BITS_PER_WIDE_INT)
4483 {
4484 hi = 0;
4485 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4486 }
4487 else
4488 {
4489 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4490 lo = (unsigned HOST_WIDE_INT) -1;
4491 }
4492
4493 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4494 {
4495 if (TYPE_UNSIGNED (etype))
4496 {
4497 etype = signed_type_for (etype);
4498 exp = fold_convert (etype, exp);
4499 }
4500 return fold_build2 (GT_EXPR, type, exp,
4501 build_int_cst (etype, 0));
4502 }
4503 }
4504
4505 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4506 This requires wrap-around arithmetics for the type of the expression. */
4507 switch (TREE_CODE (etype))
4508 {
4509 case INTEGER_TYPE:
4510 /* There is no requirement that LOW be within the range of ETYPE
4511 if the latter is a subtype. It must, however, be within the base
4512 type of ETYPE. So be sure we do the subtraction in that type. */
4513 if (TREE_TYPE (etype))
4514 etype = TREE_TYPE (etype);
4515 break;
4516
4517 case ENUMERAL_TYPE:
4518 case BOOLEAN_TYPE:
4519 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4520 TYPE_UNSIGNED (etype));
4521 break;
4522
4523 default:
4524 break;
4525 }
4526
4527 /* If we don't have wrap-around arithmetics upfront, try to force it. */
4528 if (TREE_CODE (etype) == INTEGER_TYPE
4529 && !TYPE_OVERFLOW_WRAPS (etype))
4530 {
4531 tree utype, minv, maxv;
4532
4533 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4534 for the type in question, as we rely on this here. */
4535 utype = unsigned_type_for (etype);
4536 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4537 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4538 integer_one_node, 1);
4539 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4540
4541 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4542 minv, 1, maxv, 1)))
4543 etype = utype;
4544 else
4545 return 0;
4546 }
4547
4548 high = fold_convert (etype, high);
4549 low = fold_convert (etype, low);
4550 exp = fold_convert (etype, exp);
4551
4552 value = const_binop (MINUS_EXPR, high, low, 0);
4553
4554
4555 if (POINTER_TYPE_P (etype))
4556 {
4557 if (value != 0 && !TREE_OVERFLOW (value))
4558 {
4559 low = fold_convert (sizetype, low);
4560 low = fold_build1 (NEGATE_EXPR, sizetype, low);
4561 return build_range_check (type,
4562 fold_build2 (POINTER_PLUS_EXPR, etype, exp, low),
4563 1, build_int_cst (etype, 0), value);
4564 }
4565 return 0;
4566 }
4567
4568 if (value != 0 && !TREE_OVERFLOW (value))
4569 return build_range_check (type,
4570 fold_build2 (MINUS_EXPR, etype, exp, low),
4571 1, build_int_cst (etype, 0), value);
4572
4573 return 0;
4574 }
4575 \f
4576 /* Return the predecessor of VAL in its type, handling the infinite case. */
4577
4578 static tree
4579 range_predecessor (tree val)
4580 {
4581 tree type = TREE_TYPE (val);
4582
4583 if (INTEGRAL_TYPE_P (type)
4584 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4585 return 0;
4586 else
4587 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4588 }
4589
4590 /* Return the successor of VAL in its type, handling the infinite case. */
4591
4592 static tree
4593 range_successor (tree val)
4594 {
4595 tree type = TREE_TYPE (val);
4596
4597 if (INTEGRAL_TYPE_P (type)
4598 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4599 return 0;
4600 else
4601 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4602 }
4603
4604 /* Given two ranges, see if we can merge them into one. Return 1 if we
4605 can, 0 if we can't. Set the output range into the specified parameters. */
4606
4607 static int
4608 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4609 tree high0, int in1_p, tree low1, tree high1)
4610 {
4611 int no_overlap;
4612 int subset;
4613 int temp;
4614 tree tem;
4615 int in_p;
4616 tree low, high;
4617 int lowequal = ((low0 == 0 && low1 == 0)
4618 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4619 low0, 0, low1, 0)));
4620 int highequal = ((high0 == 0 && high1 == 0)
4621 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4622 high0, 1, high1, 1)));
4623
4624 /* Make range 0 be the range that starts first, or ends last if they
4625 start at the same value. Swap them if it isn't. */
4626 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4627 low0, 0, low1, 0))
4628 || (lowequal
4629 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4630 high1, 1, high0, 1))))
4631 {
4632 temp = in0_p, in0_p = in1_p, in1_p = temp;
4633 tem = low0, low0 = low1, low1 = tem;
4634 tem = high0, high0 = high1, high1 = tem;
4635 }
4636
4637 /* Now flag two cases, whether the ranges are disjoint or whether the
4638 second range is totally subsumed in the first. Note that the tests
4639 below are simplified by the ones above. */
4640 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4641 high0, 1, low1, 0));
4642 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4643 high1, 1, high0, 1));
4644
4645 /* We now have four cases, depending on whether we are including or
4646 excluding the two ranges. */
4647 if (in0_p && in1_p)
4648 {
4649 /* If they don't overlap, the result is false. If the second range
4650 is a subset it is the result. Otherwise, the range is from the start
4651 of the second to the end of the first. */
4652 if (no_overlap)
4653 in_p = 0, low = high = 0;
4654 else if (subset)
4655 in_p = 1, low = low1, high = high1;
4656 else
4657 in_p = 1, low = low1, high = high0;
4658 }
4659
4660 else if (in0_p && ! in1_p)
4661 {
4662 /* If they don't overlap, the result is the first range. If they are
4663 equal, the result is false. If the second range is a subset of the
4664 first, and the ranges begin at the same place, we go from just after
4665 the end of the second range to the end of the first. If the second
4666 range is not a subset of the first, or if it is a subset and both
4667 ranges end at the same place, the range starts at the start of the
4668 first range and ends just before the second range.
4669 Otherwise, we can't describe this as a single range. */
4670 if (no_overlap)
4671 in_p = 1, low = low0, high = high0;
4672 else if (lowequal && highequal)
4673 in_p = 0, low = high = 0;
4674 else if (subset && lowequal)
4675 {
4676 low = range_successor (high1);
4677 high = high0;
4678 in_p = 1;
4679 if (low == 0)
4680 {
4681 /* We are in the weird situation where high0 > high1 but
4682 high1 has no successor. Punt. */
4683 return 0;
4684 }
4685 }
4686 else if (! subset || highequal)
4687 {
4688 low = low0;
4689 high = range_predecessor (low1);
4690 in_p = 1;
4691 if (high == 0)
4692 {
4693 /* low0 < low1 but low1 has no predecessor. Punt. */
4694 return 0;
4695 }
4696 }
4697 else
4698 return 0;
4699 }
4700
4701 else if (! in0_p && in1_p)
4702 {
4703 /* If they don't overlap, the result is the second range. If the second
4704 is a subset of the first, the result is false. Otherwise,
4705 the range starts just after the first range and ends at the
4706 end of the second. */
4707 if (no_overlap)
4708 in_p = 1, low = low1, high = high1;
4709 else if (subset || highequal)
4710 in_p = 0, low = high = 0;
4711 else
4712 {
4713 low = range_successor (high0);
4714 high = high1;
4715 in_p = 1;
4716 if (low == 0)
4717 {
4718 /* high1 > high0 but high0 has no successor. Punt. */
4719 return 0;
4720 }
4721 }
4722 }
4723
4724 else
4725 {
4726 /* The case where we are excluding both ranges. Here the complex case
4727 is if they don't overlap. In that case, the only time we have a
4728 range is if they are adjacent. If the second is a subset of the
4729 first, the result is the first. Otherwise, the range to exclude
4730 starts at the beginning of the first range and ends at the end of the
4731 second. */
4732 if (no_overlap)
4733 {
4734 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4735 range_successor (high0),
4736 1, low1, 0)))
4737 in_p = 0, low = low0, high = high1;
4738 else
4739 {
4740 /* Canonicalize - [min, x] into - [-, x]. */
4741 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4742 switch (TREE_CODE (TREE_TYPE (low0)))
4743 {
4744 case ENUMERAL_TYPE:
4745 if (TYPE_PRECISION (TREE_TYPE (low0))
4746 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4747 break;
4748 /* FALLTHROUGH */
4749 case INTEGER_TYPE:
4750 if (tree_int_cst_equal (low0,
4751 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4752 low0 = 0;
4753 break;
4754 case POINTER_TYPE:
4755 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4756 && integer_zerop (low0))
4757 low0 = 0;
4758 break;
4759 default:
4760 break;
4761 }
4762
4763 /* Canonicalize - [x, max] into - [x, -]. */
4764 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4765 switch (TREE_CODE (TREE_TYPE (high1)))
4766 {
4767 case ENUMERAL_TYPE:
4768 if (TYPE_PRECISION (TREE_TYPE (high1))
4769 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4770 break;
4771 /* FALLTHROUGH */
4772 case INTEGER_TYPE:
4773 if (tree_int_cst_equal (high1,
4774 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4775 high1 = 0;
4776 break;
4777 case POINTER_TYPE:
4778 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4779 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4780 high1, 1,
4781 integer_one_node, 1)))
4782 high1 = 0;
4783 break;
4784 default:
4785 break;
4786 }
4787
4788 /* The ranges might be also adjacent between the maximum and
4789 minimum values of the given type. For
4790 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4791 return + [x + 1, y - 1]. */
4792 if (low0 == 0 && high1 == 0)
4793 {
4794 low = range_successor (high0);
4795 high = range_predecessor (low1);
4796 if (low == 0 || high == 0)
4797 return 0;
4798
4799 in_p = 1;
4800 }
4801 else
4802 return 0;
4803 }
4804 }
4805 else if (subset)
4806 in_p = 0, low = low0, high = high0;
4807 else
4808 in_p = 0, low = low0, high = high1;
4809 }
4810
4811 *pin_p = in_p, *plow = low, *phigh = high;
4812 return 1;
4813 }
4814 \f
4815
4816 /* Subroutine of fold, looking inside expressions of the form
4817 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4818 of the COND_EXPR. This function is being used also to optimize
4819 A op B ? C : A, by reversing the comparison first.
4820
4821 Return a folded expression whose code is not a COND_EXPR
4822 anymore, or NULL_TREE if no folding opportunity is found. */
4823
4824 static tree
4825 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4826 {
4827 enum tree_code comp_code = TREE_CODE (arg0);
4828 tree arg00 = TREE_OPERAND (arg0, 0);
4829 tree arg01 = TREE_OPERAND (arg0, 1);
4830 tree arg1_type = TREE_TYPE (arg1);
4831 tree tem;
4832
4833 STRIP_NOPS (arg1);
4834 STRIP_NOPS (arg2);
4835
4836 /* If we have A op 0 ? A : -A, consider applying the following
4837 transformations:
4838
4839 A == 0? A : -A same as -A
4840 A != 0? A : -A same as A
4841 A >= 0? A : -A same as abs (A)
4842 A > 0? A : -A same as abs (A)
4843 A <= 0? A : -A same as -abs (A)
4844 A < 0? A : -A same as -abs (A)
4845
4846 None of these transformations work for modes with signed
4847 zeros. If A is +/-0, the first two transformations will
4848 change the sign of the result (from +0 to -0, or vice
4849 versa). The last four will fix the sign of the result,
4850 even though the original expressions could be positive or
4851 negative, depending on the sign of A.
4852
4853 Note that all these transformations are correct if A is
4854 NaN, since the two alternatives (A and -A) are also NaNs. */
4855 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4856 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4857 ? real_zerop (arg01)
4858 : integer_zerop (arg01))
4859 && ((TREE_CODE (arg2) == NEGATE_EXPR
4860 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4861 /* In the case that A is of the form X-Y, '-A' (arg2) may
4862 have already been folded to Y-X, check for that. */
4863 || (TREE_CODE (arg1) == MINUS_EXPR
4864 && TREE_CODE (arg2) == MINUS_EXPR
4865 && operand_equal_p (TREE_OPERAND (arg1, 0),
4866 TREE_OPERAND (arg2, 1), 0)
4867 && operand_equal_p (TREE_OPERAND (arg1, 1),
4868 TREE_OPERAND (arg2, 0), 0))))
4869 switch (comp_code)
4870 {
4871 case EQ_EXPR:
4872 case UNEQ_EXPR:
4873 tem = fold_convert (arg1_type, arg1);
4874 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4875 case NE_EXPR:
4876 case LTGT_EXPR:
4877 return pedantic_non_lvalue (fold_convert (type, arg1));
4878 case UNGE_EXPR:
4879 case UNGT_EXPR:
4880 if (flag_trapping_math)
4881 break;
4882 /* Fall through. */
4883 case GE_EXPR:
4884 case GT_EXPR:
4885 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4886 arg1 = fold_convert (signed_type_for
4887 (TREE_TYPE (arg1)), arg1);
4888 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4889 return pedantic_non_lvalue (fold_convert (type, tem));
4890 case UNLE_EXPR:
4891 case UNLT_EXPR:
4892 if (flag_trapping_math)
4893 break;
4894 case LE_EXPR:
4895 case LT_EXPR:
4896 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4897 arg1 = fold_convert (signed_type_for
4898 (TREE_TYPE (arg1)), arg1);
4899 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4900 return negate_expr (fold_convert (type, tem));
4901 default:
4902 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4903 break;
4904 }
4905
4906 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4907 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4908 both transformations are correct when A is NaN: A != 0
4909 is then true, and A == 0 is false. */
4910
4911 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4912 && integer_zerop (arg01) && integer_zerop (arg2))
4913 {
4914 if (comp_code == NE_EXPR)
4915 return pedantic_non_lvalue (fold_convert (type, arg1));
4916 else if (comp_code == EQ_EXPR)
4917 return build_int_cst (type, 0);
4918 }
4919
4920 /* Try some transformations of A op B ? A : B.
4921
4922 A == B? A : B same as B
4923 A != B? A : B same as A
4924 A >= B? A : B same as max (A, B)
4925 A > B? A : B same as max (B, A)
4926 A <= B? A : B same as min (A, B)
4927 A < B? A : B same as min (B, A)
4928
4929 As above, these transformations don't work in the presence
4930 of signed zeros. For example, if A and B are zeros of
4931 opposite sign, the first two transformations will change
4932 the sign of the result. In the last four, the original
4933 expressions give different results for (A=+0, B=-0) and
4934 (A=-0, B=+0), but the transformed expressions do not.
4935
4936 The first two transformations are correct if either A or B
4937 is a NaN. In the first transformation, the condition will
4938 be false, and B will indeed be chosen. In the case of the
4939 second transformation, the condition A != B will be true,
4940 and A will be chosen.
4941
4942 The conversions to max() and min() are not correct if B is
4943 a number and A is not. The conditions in the original
4944 expressions will be false, so all four give B. The min()
4945 and max() versions would give a NaN instead. */
4946 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4947 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4948 /* Avoid these transformations if the COND_EXPR may be used
4949 as an lvalue in the C++ front-end. PR c++/19199. */
4950 && (in_gimple_form
4951 || (strcmp (lang_hooks.name, "GNU C++") != 0
4952 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4953 || ! maybe_lvalue_p (arg1)
4954 || ! maybe_lvalue_p (arg2)))
4955 {
4956 tree comp_op0 = arg00;
4957 tree comp_op1 = arg01;
4958 tree comp_type = TREE_TYPE (comp_op0);
4959
4960 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4961 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4962 {
4963 comp_type = type;
4964 comp_op0 = arg1;
4965 comp_op1 = arg2;
4966 }
4967
4968 switch (comp_code)
4969 {
4970 case EQ_EXPR:
4971 return pedantic_non_lvalue (fold_convert (type, arg2));
4972 case NE_EXPR:
4973 return pedantic_non_lvalue (fold_convert (type, arg1));
4974 case LE_EXPR:
4975 case LT_EXPR:
4976 case UNLE_EXPR:
4977 case UNLT_EXPR:
4978 /* In C++ a ?: expression can be an lvalue, so put the
4979 operand which will be used if they are equal first
4980 so that we can convert this back to the
4981 corresponding COND_EXPR. */
4982 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4983 {
4984 comp_op0 = fold_convert (comp_type, comp_op0);
4985 comp_op1 = fold_convert (comp_type, comp_op1);
4986 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4987 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4988 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4989 return pedantic_non_lvalue (fold_convert (type, tem));
4990 }
4991 break;
4992 case GE_EXPR:
4993 case GT_EXPR:
4994 case UNGE_EXPR:
4995 case UNGT_EXPR:
4996 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4997 {
4998 comp_op0 = fold_convert (comp_type, comp_op0);
4999 comp_op1 = fold_convert (comp_type, comp_op1);
5000 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5001 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
5002 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
5003 return pedantic_non_lvalue (fold_convert (type, tem));
5004 }
5005 break;
5006 case UNEQ_EXPR:
5007 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5008 return pedantic_non_lvalue (fold_convert (type, arg2));
5009 break;
5010 case LTGT_EXPR:
5011 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5012 return pedantic_non_lvalue (fold_convert (type, arg1));
5013 break;
5014 default:
5015 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5016 break;
5017 }
5018 }
5019
5020 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5021 we might still be able to simplify this. For example,
5022 if C1 is one less or one more than C2, this might have started
5023 out as a MIN or MAX and been transformed by this function.
5024 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5025
5026 if (INTEGRAL_TYPE_P (type)
5027 && TREE_CODE (arg01) == INTEGER_CST
5028 && TREE_CODE (arg2) == INTEGER_CST)
5029 switch (comp_code)
5030 {
5031 case EQ_EXPR:
5032 /* We can replace A with C1 in this case. */
5033 arg1 = fold_convert (type, arg01);
5034 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
5035
5036 case LT_EXPR:
5037 /* If C1 is C2 + 1, this is min(A, C2). */
5038 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5039 OEP_ONLY_CONST)
5040 && operand_equal_p (arg01,
5041 const_binop (PLUS_EXPR, arg2,
5042 build_int_cst (type, 1), 0),
5043 OEP_ONLY_CONST))
5044 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
5045 type,
5046 fold_convert (type, arg1),
5047 arg2));
5048 break;
5049
5050 case LE_EXPR:
5051 /* If C1 is C2 - 1, this is min(A, C2). */
5052 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5053 OEP_ONLY_CONST)
5054 && operand_equal_p (arg01,
5055 const_binop (MINUS_EXPR, arg2,
5056 build_int_cst (type, 1), 0),
5057 OEP_ONLY_CONST))
5058 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
5059 type,
5060 fold_convert (type, arg1),
5061 arg2));
5062 break;
5063
5064 case GT_EXPR:
5065 /* If C1 is C2 - 1, this is max(A, C2). */
5066 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5067 OEP_ONLY_CONST)
5068 && operand_equal_p (arg01,
5069 const_binop (MINUS_EXPR, arg2,
5070 build_int_cst (type, 1), 0),
5071 OEP_ONLY_CONST))
5072 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
5073 type,
5074 fold_convert (type, arg1),
5075 arg2));
5076 break;
5077
5078 case GE_EXPR:
5079 /* If C1 is C2 + 1, this is max(A, C2). */
5080 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5081 OEP_ONLY_CONST)
5082 && operand_equal_p (arg01,
5083 const_binop (PLUS_EXPR, arg2,
5084 build_int_cst (type, 1), 0),
5085 OEP_ONLY_CONST))
5086 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
5087 type,
5088 fold_convert (type, arg1),
5089 arg2));
5090 break;
5091 case NE_EXPR:
5092 break;
5093 default:
5094 gcc_unreachable ();
5095 }
5096
5097 return NULL_TREE;
5098 }
5099
5100
5101 \f
5102 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5103 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
5104 #endif
5105
5106 /* EXP is some logical combination of boolean tests. See if we can
5107 merge it into some range test. Return the new tree if so. */
5108
5109 static tree
5110 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
5111 {
5112 int or_op = (code == TRUTH_ORIF_EXPR
5113 || code == TRUTH_OR_EXPR);
5114 int in0_p, in1_p, in_p;
5115 tree low0, low1, low, high0, high1, high;
5116 bool strict_overflow_p = false;
5117 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5118 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5119 tree tem;
5120 const char * const warnmsg = G_("assuming signed overflow does not occur "
5121 "when simplifying range test");
5122
5123 /* If this is an OR operation, invert both sides; we will invert
5124 again at the end. */
5125 if (or_op)
5126 in0_p = ! in0_p, in1_p = ! in1_p;
5127
5128 /* If both expressions are the same, if we can merge the ranges, and we
5129 can build the range test, return it or it inverted. If one of the
5130 ranges is always true or always false, consider it to be the same
5131 expression as the other. */
5132 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5133 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5134 in1_p, low1, high1)
5135 && 0 != (tem = (build_range_check (type,
5136 lhs != 0 ? lhs
5137 : rhs != 0 ? rhs : integer_zero_node,
5138 in_p, low, high))))
5139 {
5140 if (strict_overflow_p)
5141 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5142 return or_op ? invert_truthvalue (tem) : tem;
5143 }
5144
5145 /* On machines where the branch cost is expensive, if this is a
5146 short-circuited branch and the underlying object on both sides
5147 is the same, make a non-short-circuit operation. */
5148 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5149 && lhs != 0 && rhs != 0
5150 && (code == TRUTH_ANDIF_EXPR
5151 || code == TRUTH_ORIF_EXPR)
5152 && operand_equal_p (lhs, rhs, 0))
5153 {
5154 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5155 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5156 which cases we can't do this. */
5157 if (simple_operand_p (lhs))
5158 return build2 (code == TRUTH_ANDIF_EXPR
5159 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5160 type, op0, op1);
5161
5162 else if (lang_hooks.decls.global_bindings_p () == 0
5163 && ! CONTAINS_PLACEHOLDER_P (lhs))
5164 {
5165 tree common = save_expr (lhs);
5166
5167 if (0 != (lhs = build_range_check (type, common,
5168 or_op ? ! in0_p : in0_p,
5169 low0, high0))
5170 && (0 != (rhs = build_range_check (type, common,
5171 or_op ? ! in1_p : in1_p,
5172 low1, high1))))
5173 {
5174 if (strict_overflow_p)
5175 fold_overflow_warning (warnmsg,
5176 WARN_STRICT_OVERFLOW_COMPARISON);
5177 return build2 (code == TRUTH_ANDIF_EXPR
5178 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5179 type, lhs, rhs);
5180 }
5181 }
5182 }
5183
5184 return 0;
5185 }
5186 \f
5187 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
5188 bit value. Arrange things so the extra bits will be set to zero if and
5189 only if C is signed-extended to its full width. If MASK is nonzero,
5190 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5191
5192 static tree
5193 unextend (tree c, int p, int unsignedp, tree mask)
5194 {
5195 tree type = TREE_TYPE (c);
5196 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5197 tree temp;
5198
5199 if (p == modesize || unsignedp)
5200 return c;
5201
5202 /* We work by getting just the sign bit into the low-order bit, then
5203 into the high-order bit, then sign-extend. We then XOR that value
5204 with C. */
5205 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
5206 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
5207
5208 /* We must use a signed type in order to get an arithmetic right shift.
5209 However, we must also avoid introducing accidental overflows, so that
5210 a subsequent call to integer_zerop will work. Hence we must
5211 do the type conversion here. At this point, the constant is either
5212 zero or one, and the conversion to a signed type can never overflow.
5213 We could get an overflow if this conversion is done anywhere else. */
5214 if (TYPE_UNSIGNED (type))
5215 temp = fold_convert (signed_type_for (type), temp);
5216
5217 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
5218 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
5219 if (mask != 0)
5220 temp = const_binop (BIT_AND_EXPR, temp,
5221 fold_convert (TREE_TYPE (c), mask), 0);
5222 /* If necessary, convert the type back to match the type of C. */
5223 if (TYPE_UNSIGNED (type))
5224 temp = fold_convert (type, temp);
5225
5226 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
5227 }
5228 \f
5229 /* Find ways of folding logical expressions of LHS and RHS:
5230 Try to merge two comparisons to the same innermost item.
5231 Look for range tests like "ch >= '0' && ch <= '9'".
5232 Look for combinations of simple terms on machines with expensive branches
5233 and evaluate the RHS unconditionally.
5234
5235 For example, if we have p->a == 2 && p->b == 4 and we can make an
5236 object large enough to span both A and B, we can do this with a comparison
5237 against the object ANDed with the a mask.
5238
5239 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5240 operations to do this with one comparison.
5241
5242 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5243 function and the one above.
5244
5245 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5246 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5247
5248 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5249 two operands.
5250
5251 We return the simplified tree or 0 if no optimization is possible. */
5252
5253 static tree
5254 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
5255 {
5256 /* If this is the "or" of two comparisons, we can do something if
5257 the comparisons are NE_EXPR. If this is the "and", we can do something
5258 if the comparisons are EQ_EXPR. I.e.,
5259 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5260
5261 WANTED_CODE is this operation code. For single bit fields, we can
5262 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5263 comparison for one-bit fields. */
5264
5265 enum tree_code wanted_code;
5266 enum tree_code lcode, rcode;
5267 tree ll_arg, lr_arg, rl_arg, rr_arg;
5268 tree ll_inner, lr_inner, rl_inner, rr_inner;
5269 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5270 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5271 HOST_WIDE_INT xll_bitpos, xrl_bitpos;
5272 HOST_WIDE_INT lnbitsize, lnbitpos;
5273 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5274 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5275 enum machine_mode lnmode;
5276 tree ll_mask, lr_mask, rl_mask, rr_mask;
5277 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5278 tree l_const, r_const;
5279 tree lntype, result;
5280 int first_bit, end_bit;
5281 int volatilep;
5282 tree orig_lhs = lhs, orig_rhs = rhs;
5283 enum tree_code orig_code = code;
5284
5285 /* Start by getting the comparison codes. Fail if anything is volatile.
5286 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5287 it were surrounded with a NE_EXPR. */
5288
5289 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5290 return 0;
5291
5292 lcode = TREE_CODE (lhs);
5293 rcode = TREE_CODE (rhs);
5294
5295 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5296 {
5297 lhs = build2 (NE_EXPR, truth_type, lhs,
5298 build_int_cst (TREE_TYPE (lhs), 0));
5299 lcode = NE_EXPR;
5300 }
5301
5302 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5303 {
5304 rhs = build2 (NE_EXPR, truth_type, rhs,
5305 build_int_cst (TREE_TYPE (rhs), 0));
5306 rcode = NE_EXPR;
5307 }
5308
5309 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5310 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5311 return 0;
5312
5313 ll_arg = TREE_OPERAND (lhs, 0);
5314 lr_arg = TREE_OPERAND (lhs, 1);
5315 rl_arg = TREE_OPERAND (rhs, 0);
5316 rr_arg = TREE_OPERAND (rhs, 1);
5317
5318 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5319 if (simple_operand_p (ll_arg)
5320 && simple_operand_p (lr_arg))
5321 {
5322 tree result;
5323 if (operand_equal_p (ll_arg, rl_arg, 0)
5324 && operand_equal_p (lr_arg, rr_arg, 0))
5325 {
5326 result = combine_comparisons (code, lcode, rcode,
5327 truth_type, ll_arg, lr_arg);
5328 if (result)
5329 return result;
5330 }
5331 else if (operand_equal_p (ll_arg, rr_arg, 0)
5332 && operand_equal_p (lr_arg, rl_arg, 0))
5333 {
5334 result = combine_comparisons (code, lcode,
5335 swap_tree_comparison (rcode),
5336 truth_type, ll_arg, lr_arg);
5337 if (result)
5338 return result;
5339 }
5340 }
5341
5342 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5343 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5344
5345 /* If the RHS can be evaluated unconditionally and its operands are
5346 simple, it wins to evaluate the RHS unconditionally on machines
5347 with expensive branches. In this case, this isn't a comparison
5348 that can be merged. Avoid doing this if the RHS is a floating-point
5349 comparison since those can trap. */
5350
5351 if (BRANCH_COST >= 2
5352 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5353 && simple_operand_p (rl_arg)
5354 && simple_operand_p (rr_arg))
5355 {
5356 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5357 if (code == TRUTH_OR_EXPR
5358 && lcode == NE_EXPR && integer_zerop (lr_arg)
5359 && rcode == NE_EXPR && integer_zerop (rr_arg)
5360 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5361 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5362 return build2 (NE_EXPR, truth_type,
5363 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5364 ll_arg, rl_arg),
5365 build_int_cst (TREE_TYPE (ll_arg), 0));
5366
5367 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5368 if (code == TRUTH_AND_EXPR
5369 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5370 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5371 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5372 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5373 return build2 (EQ_EXPR, truth_type,
5374 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5375 ll_arg, rl_arg),
5376 build_int_cst (TREE_TYPE (ll_arg), 0));
5377
5378 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5379 {
5380 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5381 return build2 (code, truth_type, lhs, rhs);
5382 return NULL_TREE;
5383 }
5384 }
5385
5386 /* See if the comparisons can be merged. Then get all the parameters for
5387 each side. */
5388
5389 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5390 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5391 return 0;
5392
5393 volatilep = 0;
5394 ll_inner = decode_field_reference (ll_arg,
5395 &ll_bitsize, &ll_bitpos, &ll_mode,
5396 &ll_unsignedp, &volatilep, &ll_mask,
5397 &ll_and_mask);
5398 lr_inner = decode_field_reference (lr_arg,
5399 &lr_bitsize, &lr_bitpos, &lr_mode,
5400 &lr_unsignedp, &volatilep, &lr_mask,
5401 &lr_and_mask);
5402 rl_inner = decode_field_reference (rl_arg,
5403 &rl_bitsize, &rl_bitpos, &rl_mode,
5404 &rl_unsignedp, &volatilep, &rl_mask,
5405 &rl_and_mask);
5406 rr_inner = decode_field_reference (rr_arg,
5407 &rr_bitsize, &rr_bitpos, &rr_mode,
5408 &rr_unsignedp, &volatilep, &rr_mask,
5409 &rr_and_mask);
5410
5411 /* It must be true that the inner operation on the lhs of each
5412 comparison must be the same if we are to be able to do anything.
5413 Then see if we have constants. If not, the same must be true for
5414 the rhs's. */
5415 if (volatilep || ll_inner == 0 || rl_inner == 0
5416 || ! operand_equal_p (ll_inner, rl_inner, 0))
5417 return 0;
5418
5419 if (TREE_CODE (lr_arg) == INTEGER_CST
5420 && TREE_CODE (rr_arg) == INTEGER_CST)
5421 l_const = lr_arg, r_const = rr_arg;
5422 else if (lr_inner == 0 || rr_inner == 0
5423 || ! operand_equal_p (lr_inner, rr_inner, 0))
5424 return 0;
5425 else
5426 l_const = r_const = 0;
5427
5428 /* If either comparison code is not correct for our logical operation,
5429 fail. However, we can convert a one-bit comparison against zero into
5430 the opposite comparison against that bit being set in the field. */
5431
5432 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5433 if (lcode != wanted_code)
5434 {
5435 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5436 {
5437 /* Make the left operand unsigned, since we are only interested
5438 in the value of one bit. Otherwise we are doing the wrong
5439 thing below. */
5440 ll_unsignedp = 1;
5441 l_const = ll_mask;
5442 }
5443 else
5444 return 0;
5445 }
5446
5447 /* This is analogous to the code for l_const above. */
5448 if (rcode != wanted_code)
5449 {
5450 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5451 {
5452 rl_unsignedp = 1;
5453 r_const = rl_mask;
5454 }
5455 else
5456 return 0;
5457 }
5458
5459 /* See if we can find a mode that contains both fields being compared on
5460 the left. If we can't, fail. Otherwise, update all constants and masks
5461 to be relative to a field of that size. */
5462 first_bit = MIN (ll_bitpos, rl_bitpos);
5463 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5464 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5465 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5466 volatilep);
5467 if (lnmode == VOIDmode)
5468 return 0;
5469
5470 lnbitsize = GET_MODE_BITSIZE (lnmode);
5471 lnbitpos = first_bit & ~ (lnbitsize - 1);
5472 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5473 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5474
5475 if (BYTES_BIG_ENDIAN)
5476 {
5477 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5478 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5479 }
5480
5481 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
5482 size_int (xll_bitpos), 0);
5483 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
5484 size_int (xrl_bitpos), 0);
5485
5486 if (l_const)
5487 {
5488 l_const = fold_convert (lntype, l_const);
5489 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5490 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
5491 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5492 fold_build1 (BIT_NOT_EXPR,
5493 lntype, ll_mask),
5494 0)))
5495 {
5496 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5497
5498 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5499 }
5500 }
5501 if (r_const)
5502 {
5503 r_const = fold_convert (lntype, r_const);
5504 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5505 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
5506 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5507 fold_build1 (BIT_NOT_EXPR,
5508 lntype, rl_mask),
5509 0)))
5510 {
5511 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5512
5513 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5514 }
5515 }
5516
5517 /* Handle the case of comparisons with constants. If there is something in
5518 common between the masks, those bits of the constants must be the same.
5519 If not, the condition is always false. Test for this to avoid generating
5520 incorrect code below. */
5521 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5522 if (! integer_zerop (result)
5523 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5524 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5525 {
5526 if (wanted_code == NE_EXPR)
5527 {
5528 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5529 return constant_boolean_node (true, truth_type);
5530 }
5531 else
5532 {
5533 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5534 return constant_boolean_node (false, truth_type);
5535 }
5536 }
5537
5538 return NULL_TREE;
5539 }
5540 \f
5541 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5542 constant. */
5543
5544 static tree
5545 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5546 {
5547 tree arg0 = op0;
5548 enum tree_code op_code;
5549 tree comp_const = op1;
5550 tree minmax_const;
5551 int consts_equal, consts_lt;
5552 tree inner;
5553
5554 STRIP_SIGN_NOPS (arg0);
5555
5556 op_code = TREE_CODE (arg0);
5557 minmax_const = TREE_OPERAND (arg0, 1);
5558 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5559 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5560 inner = TREE_OPERAND (arg0, 0);
5561
5562 /* If something does not permit us to optimize, return the original tree. */
5563 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5564 || TREE_CODE (comp_const) != INTEGER_CST
5565 || TREE_OVERFLOW (comp_const)
5566 || TREE_CODE (minmax_const) != INTEGER_CST
5567 || TREE_OVERFLOW (minmax_const))
5568 return NULL_TREE;
5569
5570 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5571 and GT_EXPR, doing the rest with recursive calls using logical
5572 simplifications. */
5573 switch (code)
5574 {
5575 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5576 {
5577 tree tem = optimize_minmax_comparison (invert_tree_comparison (code, false),
5578 type, op0, op1);
5579 if (tem)
5580 return invert_truthvalue (tem);
5581 return NULL_TREE;
5582 }
5583
5584 case GE_EXPR:
5585 return
5586 fold_build2 (TRUTH_ORIF_EXPR, type,
5587 optimize_minmax_comparison
5588 (EQ_EXPR, type, arg0, comp_const),
5589 optimize_minmax_comparison
5590 (GT_EXPR, type, arg0, comp_const));
5591
5592 case EQ_EXPR:
5593 if (op_code == MAX_EXPR && consts_equal)
5594 /* MAX (X, 0) == 0 -> X <= 0 */
5595 return fold_build2 (LE_EXPR, type, inner, comp_const);
5596
5597 else if (op_code == MAX_EXPR && consts_lt)
5598 /* MAX (X, 0) == 5 -> X == 5 */
5599 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5600
5601 else if (op_code == MAX_EXPR)
5602 /* MAX (X, 0) == -1 -> false */
5603 return omit_one_operand (type, integer_zero_node, inner);
5604
5605 else if (consts_equal)
5606 /* MIN (X, 0) == 0 -> X >= 0 */
5607 return fold_build2 (GE_EXPR, type, inner, comp_const);
5608
5609 else if (consts_lt)
5610 /* MIN (X, 0) == 5 -> false */
5611 return omit_one_operand (type, integer_zero_node, inner);
5612
5613 else
5614 /* MIN (X, 0) == -1 -> X == -1 */
5615 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5616
5617 case GT_EXPR:
5618 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5619 /* MAX (X, 0) > 0 -> X > 0
5620 MAX (X, 0) > 5 -> X > 5 */
5621 return fold_build2 (GT_EXPR, type, inner, comp_const);
5622
5623 else if (op_code == MAX_EXPR)
5624 /* MAX (X, 0) > -1 -> true */
5625 return omit_one_operand (type, integer_one_node, inner);
5626
5627 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5628 /* MIN (X, 0) > 0 -> false
5629 MIN (X, 0) > 5 -> false */
5630 return omit_one_operand (type, integer_zero_node, inner);
5631
5632 else
5633 /* MIN (X, 0) > -1 -> X > -1 */
5634 return fold_build2 (GT_EXPR, type, inner, comp_const);
5635
5636 default:
5637 return NULL_TREE;
5638 }
5639 }
5640 \f
5641 /* T is an integer expression that is being multiplied, divided, or taken a
5642 modulus (CODE says which and what kind of divide or modulus) by a
5643 constant C. See if we can eliminate that operation by folding it with
5644 other operations already in T. WIDE_TYPE, if non-null, is a type that
5645 should be used for the computation if wider than our type.
5646
5647 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5648 (X * 2) + (Y * 4). We must, however, be assured that either the original
5649 expression would not overflow or that overflow is undefined for the type
5650 in the language in question.
5651
5652 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5653 the machine has a multiply-accumulate insn or that this is part of an
5654 addressing calculation.
5655
5656 If we return a non-null expression, it is an equivalent form of the
5657 original computation, but need not be in the original type.
5658
5659 We set *STRICT_OVERFLOW_P to true if the return values depends on
5660 signed overflow being undefined. Otherwise we do not change
5661 *STRICT_OVERFLOW_P. */
5662
5663 static tree
5664 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5665 bool *strict_overflow_p)
5666 {
5667 /* To avoid exponential search depth, refuse to allow recursion past
5668 three levels. Beyond that (1) it's highly unlikely that we'll find
5669 something interesting and (2) we've probably processed it before
5670 when we built the inner expression. */
5671
5672 static int depth;
5673 tree ret;
5674
5675 if (depth > 3)
5676 return NULL;
5677
5678 depth++;
5679 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5680 depth--;
5681
5682 return ret;
5683 }
5684
5685 static tree
5686 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5687 bool *strict_overflow_p)
5688 {
5689 tree type = TREE_TYPE (t);
5690 enum tree_code tcode = TREE_CODE (t);
5691 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5692 > GET_MODE_SIZE (TYPE_MODE (type)))
5693 ? wide_type : type);
5694 tree t1, t2;
5695 int same_p = tcode == code;
5696 tree op0 = NULL_TREE, op1 = NULL_TREE;
5697 bool sub_strict_overflow_p;
5698
5699 /* Don't deal with constants of zero here; they confuse the code below. */
5700 if (integer_zerop (c))
5701 return NULL_TREE;
5702
5703 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5704 op0 = TREE_OPERAND (t, 0);
5705
5706 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5707 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5708
5709 /* Note that we need not handle conditional operations here since fold
5710 already handles those cases. So just do arithmetic here. */
5711 switch (tcode)
5712 {
5713 case INTEGER_CST:
5714 /* For a constant, we can always simplify if we are a multiply
5715 or (for divide and modulus) if it is a multiple of our constant. */
5716 if (code == MULT_EXPR
5717 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5718 return const_binop (code, fold_convert (ctype, t),
5719 fold_convert (ctype, c), 0);
5720 break;
5721
5722 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5723 /* If op0 is an expression ... */
5724 if ((COMPARISON_CLASS_P (op0)
5725 || UNARY_CLASS_P (op0)
5726 || BINARY_CLASS_P (op0)
5727 || VL_EXP_CLASS_P (op0)
5728 || EXPRESSION_CLASS_P (op0))
5729 /* ... and is unsigned, and its type is smaller than ctype,
5730 then we cannot pass through as widening. */
5731 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5732 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5733 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5734 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5735 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5736 /* ... or this is a truncation (t is narrower than op0),
5737 then we cannot pass through this narrowing. */
5738 || (GET_MODE_SIZE (TYPE_MODE (type))
5739 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5740 /* ... or signedness changes for division or modulus,
5741 then we cannot pass through this conversion. */
5742 || (code != MULT_EXPR
5743 && (TYPE_UNSIGNED (ctype)
5744 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5745 /* ... or has undefined overflow while the converted to
5746 type has not, we cannot do the operation in the inner type
5747 as that would introduce undefined overflow. */
5748 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5749 && !TYPE_OVERFLOW_UNDEFINED (type))))
5750 break;
5751
5752 /* Pass the constant down and see if we can make a simplification. If
5753 we can, replace this expression with the inner simplification for
5754 possible later conversion to our or some other type. */
5755 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5756 && TREE_CODE (t2) == INTEGER_CST
5757 && !TREE_OVERFLOW (t2)
5758 && (0 != (t1 = extract_muldiv (op0, t2, code,
5759 code == MULT_EXPR
5760 ? ctype : NULL_TREE,
5761 strict_overflow_p))))
5762 return t1;
5763 break;
5764
5765 case ABS_EXPR:
5766 /* If widening the type changes it from signed to unsigned, then we
5767 must avoid building ABS_EXPR itself as unsigned. */
5768 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5769 {
5770 tree cstype = (*signed_type_for) (ctype);
5771 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5772 != 0)
5773 {
5774 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5775 return fold_convert (ctype, t1);
5776 }
5777 break;
5778 }
5779 /* If the constant is negative, we cannot simplify this. */
5780 if (tree_int_cst_sgn (c) == -1)
5781 break;
5782 /* FALLTHROUGH */
5783 case NEGATE_EXPR:
5784 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5785 != 0)
5786 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5787 break;
5788
5789 case MIN_EXPR: case MAX_EXPR:
5790 /* If widening the type changes the signedness, then we can't perform
5791 this optimization as that changes the result. */
5792 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5793 break;
5794
5795 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5796 sub_strict_overflow_p = false;
5797 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5798 &sub_strict_overflow_p)) != 0
5799 && (t2 = extract_muldiv (op1, c, code, wide_type,
5800 &sub_strict_overflow_p)) != 0)
5801 {
5802 if (tree_int_cst_sgn (c) < 0)
5803 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5804 if (sub_strict_overflow_p)
5805 *strict_overflow_p = true;
5806 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5807 fold_convert (ctype, t2));
5808 }
5809 break;
5810
5811 case LSHIFT_EXPR: case RSHIFT_EXPR:
5812 /* If the second operand is constant, this is a multiplication
5813 or floor division, by a power of two, so we can treat it that
5814 way unless the multiplier or divisor overflows. Signed
5815 left-shift overflow is implementation-defined rather than
5816 undefined in C90, so do not convert signed left shift into
5817 multiplication. */
5818 if (TREE_CODE (op1) == INTEGER_CST
5819 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5820 /* const_binop may not detect overflow correctly,
5821 so check for it explicitly here. */
5822 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5823 && TREE_INT_CST_HIGH (op1) == 0
5824 && 0 != (t1 = fold_convert (ctype,
5825 const_binop (LSHIFT_EXPR,
5826 size_one_node,
5827 op1, 0)))
5828 && !TREE_OVERFLOW (t1))
5829 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5830 ? MULT_EXPR : FLOOR_DIV_EXPR,
5831 ctype, fold_convert (ctype, op0), t1),
5832 c, code, wide_type, strict_overflow_p);
5833 break;
5834
5835 case PLUS_EXPR: case MINUS_EXPR:
5836 /* See if we can eliminate the operation on both sides. If we can, we
5837 can return a new PLUS or MINUS. If we can't, the only remaining
5838 cases where we can do anything are if the second operand is a
5839 constant. */
5840 sub_strict_overflow_p = false;
5841 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5842 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5843 if (t1 != 0 && t2 != 0
5844 && (code == MULT_EXPR
5845 /* If not multiplication, we can only do this if both operands
5846 are divisible by c. */
5847 || (multiple_of_p (ctype, op0, c)
5848 && multiple_of_p (ctype, op1, c))))
5849 {
5850 if (sub_strict_overflow_p)
5851 *strict_overflow_p = true;
5852 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5853 fold_convert (ctype, t2));
5854 }
5855
5856 /* If this was a subtraction, negate OP1 and set it to be an addition.
5857 This simplifies the logic below. */
5858 if (tcode == MINUS_EXPR)
5859 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5860
5861 if (TREE_CODE (op1) != INTEGER_CST)
5862 break;
5863
5864 /* If either OP1 or C are negative, this optimization is not safe for
5865 some of the division and remainder types while for others we need
5866 to change the code. */
5867 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5868 {
5869 if (code == CEIL_DIV_EXPR)
5870 code = FLOOR_DIV_EXPR;
5871 else if (code == FLOOR_DIV_EXPR)
5872 code = CEIL_DIV_EXPR;
5873 else if (code != MULT_EXPR
5874 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5875 break;
5876 }
5877
5878 /* If it's a multiply or a division/modulus operation of a multiple
5879 of our constant, do the operation and verify it doesn't overflow. */
5880 if (code == MULT_EXPR
5881 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5882 {
5883 op1 = const_binop (code, fold_convert (ctype, op1),
5884 fold_convert (ctype, c), 0);
5885 /* We allow the constant to overflow with wrapping semantics. */
5886 if (op1 == 0
5887 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5888 break;
5889 }
5890 else
5891 break;
5892
5893 /* If we have an unsigned type is not a sizetype, we cannot widen
5894 the operation since it will change the result if the original
5895 computation overflowed. */
5896 if (TYPE_UNSIGNED (ctype)
5897 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5898 && ctype != type)
5899 break;
5900
5901 /* If we were able to eliminate our operation from the first side,
5902 apply our operation to the second side and reform the PLUS. */
5903 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5904 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5905
5906 /* The last case is if we are a multiply. In that case, we can
5907 apply the distributive law to commute the multiply and addition
5908 if the multiplication of the constants doesn't overflow. */
5909 if (code == MULT_EXPR)
5910 return fold_build2 (tcode, ctype,
5911 fold_build2 (code, ctype,
5912 fold_convert (ctype, op0),
5913 fold_convert (ctype, c)),
5914 op1);
5915
5916 break;
5917
5918 case MULT_EXPR:
5919 /* We have a special case here if we are doing something like
5920 (C * 8) % 4 since we know that's zero. */
5921 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5922 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5923 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5924 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5925 return omit_one_operand (type, integer_zero_node, op0);
5926
5927 /* ... fall through ... */
5928
5929 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5930 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5931 /* If we can extract our operation from the LHS, do so and return a
5932 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5933 do something only if the second operand is a constant. */
5934 if (same_p
5935 && (t1 = extract_muldiv (op0, c, code, wide_type,
5936 strict_overflow_p)) != 0)
5937 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5938 fold_convert (ctype, op1));
5939 else if (tcode == MULT_EXPR && code == MULT_EXPR
5940 && (t1 = extract_muldiv (op1, c, code, wide_type,
5941 strict_overflow_p)) != 0)
5942 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5943 fold_convert (ctype, t1));
5944 else if (TREE_CODE (op1) != INTEGER_CST)
5945 return 0;
5946
5947 /* If these are the same operation types, we can associate them
5948 assuming no overflow. */
5949 if (tcode == code
5950 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5951 fold_convert (ctype, c), 0))
5952 && !TREE_OVERFLOW (t1))
5953 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5954
5955 /* If these operations "cancel" each other, we have the main
5956 optimizations of this pass, which occur when either constant is a
5957 multiple of the other, in which case we replace this with either an
5958 operation or CODE or TCODE.
5959
5960 If we have an unsigned type that is not a sizetype, we cannot do
5961 this since it will change the result if the original computation
5962 overflowed. */
5963 if ((TYPE_OVERFLOW_UNDEFINED (ctype)
5964 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5965 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5966 || (tcode == MULT_EXPR
5967 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5968 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
5969 && code != MULT_EXPR)))
5970 {
5971 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5972 {
5973 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5974 *strict_overflow_p = true;
5975 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5976 fold_convert (ctype,
5977 const_binop (TRUNC_DIV_EXPR,
5978 op1, c, 0)));
5979 }
5980 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5981 {
5982 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5983 *strict_overflow_p = true;
5984 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5985 fold_convert (ctype,
5986 const_binop (TRUNC_DIV_EXPR,
5987 c, op1, 0)));
5988 }
5989 }
5990 break;
5991
5992 default:
5993 break;
5994 }
5995
5996 return 0;
5997 }
5998 \f
5999 /* Return a node which has the indicated constant VALUE (either 0 or
6000 1), and is of the indicated TYPE. */
6001
6002 tree
6003 constant_boolean_node (int value, tree type)
6004 {
6005 if (type == integer_type_node)
6006 return value ? integer_one_node : integer_zero_node;
6007 else if (type == boolean_type_node)
6008 return value ? boolean_true_node : boolean_false_node;
6009 else
6010 return build_int_cst (type, value);
6011 }
6012
6013
6014 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6015 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6016 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6017 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6018 COND is the first argument to CODE; otherwise (as in the example
6019 given here), it is the second argument. TYPE is the type of the
6020 original expression. Return NULL_TREE if no simplification is
6021 possible. */
6022
6023 static tree
6024 fold_binary_op_with_conditional_arg (enum tree_code code,
6025 tree type, tree op0, tree op1,
6026 tree cond, tree arg, int cond_first_p)
6027 {
6028 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6029 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6030 tree test, true_value, false_value;
6031 tree lhs = NULL_TREE;
6032 tree rhs = NULL_TREE;
6033
6034 /* This transformation is only worthwhile if we don't have to wrap
6035 arg in a SAVE_EXPR, and the operation can be simplified on at least
6036 one of the branches once its pushed inside the COND_EXPR. */
6037 if (!TREE_CONSTANT (arg))
6038 return NULL_TREE;
6039
6040 if (TREE_CODE (cond) == COND_EXPR)
6041 {
6042 test = TREE_OPERAND (cond, 0);
6043 true_value = TREE_OPERAND (cond, 1);
6044 false_value = TREE_OPERAND (cond, 2);
6045 /* If this operand throws an expression, then it does not make
6046 sense to try to perform a logical or arithmetic operation
6047 involving it. */
6048 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6049 lhs = true_value;
6050 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6051 rhs = false_value;
6052 }
6053 else
6054 {
6055 tree testtype = TREE_TYPE (cond);
6056 test = cond;
6057 true_value = constant_boolean_node (true, testtype);
6058 false_value = constant_boolean_node (false, testtype);
6059 }
6060
6061 arg = fold_convert (arg_type, arg);
6062 if (lhs == 0)
6063 {
6064 true_value = fold_convert (cond_type, true_value);
6065 if (cond_first_p)
6066 lhs = fold_build2 (code, type, true_value, arg);
6067 else
6068 lhs = fold_build2 (code, type, arg, true_value);
6069 }
6070 if (rhs == 0)
6071 {
6072 false_value = fold_convert (cond_type, false_value);
6073 if (cond_first_p)
6074 rhs = fold_build2 (code, type, false_value, arg);
6075 else
6076 rhs = fold_build2 (code, type, arg, false_value);
6077 }
6078
6079 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
6080 return fold_convert (type, test);
6081 }
6082
6083 \f
6084 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6085
6086 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6087 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6088 ADDEND is the same as X.
6089
6090 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6091 and finite. The problematic cases are when X is zero, and its mode
6092 has signed zeros. In the case of rounding towards -infinity,
6093 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6094 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6095
6096 bool
6097 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6098 {
6099 if (!real_zerop (addend))
6100 return false;
6101
6102 /* Don't allow the fold with -fsignaling-nans. */
6103 if (HONOR_SNANS (TYPE_MODE (type)))
6104 return false;
6105
6106 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6107 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6108 return true;
6109
6110 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6111 if (TREE_CODE (addend) == REAL_CST
6112 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6113 negate = !negate;
6114
6115 /* The mode has signed zeros, and we have to honor their sign.
6116 In this situation, there is only one case we can return true for.
6117 X - 0 is the same as X unless rounding towards -infinity is
6118 supported. */
6119 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6120 }
6121
6122 /* Subroutine of fold() that checks comparisons of built-in math
6123 functions against real constants.
6124
6125 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6126 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6127 is the type of the result and ARG0 and ARG1 are the operands of the
6128 comparison. ARG1 must be a TREE_REAL_CST.
6129
6130 The function returns the constant folded tree if a simplification
6131 can be made, and NULL_TREE otherwise. */
6132
6133 static tree
6134 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
6135 tree type, tree arg0, tree arg1)
6136 {
6137 REAL_VALUE_TYPE c;
6138
6139 if (BUILTIN_SQRT_P (fcode))
6140 {
6141 tree arg = CALL_EXPR_ARG (arg0, 0);
6142 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6143
6144 c = TREE_REAL_CST (arg1);
6145 if (REAL_VALUE_NEGATIVE (c))
6146 {
6147 /* sqrt(x) < y is always false, if y is negative. */
6148 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6149 return omit_one_operand (type, integer_zero_node, arg);
6150
6151 /* sqrt(x) > y is always true, if y is negative and we
6152 don't care about NaNs, i.e. negative values of x. */
6153 if (code == NE_EXPR || !HONOR_NANS (mode))
6154 return omit_one_operand (type, integer_one_node, arg);
6155
6156 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6157 return fold_build2 (GE_EXPR, type, arg,
6158 build_real (TREE_TYPE (arg), dconst0));
6159 }
6160 else if (code == GT_EXPR || code == GE_EXPR)
6161 {
6162 REAL_VALUE_TYPE c2;
6163
6164 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6165 real_convert (&c2, mode, &c2);
6166
6167 if (REAL_VALUE_ISINF (c2))
6168 {
6169 /* sqrt(x) > y is x == +Inf, when y is very large. */
6170 if (HONOR_INFINITIES (mode))
6171 return fold_build2 (EQ_EXPR, type, arg,
6172 build_real (TREE_TYPE (arg), c2));
6173
6174 /* sqrt(x) > y is always false, when y is very large
6175 and we don't care about infinities. */
6176 return omit_one_operand (type, integer_zero_node, arg);
6177 }
6178
6179 /* sqrt(x) > c is the same as x > c*c. */
6180 return fold_build2 (code, type, arg,
6181 build_real (TREE_TYPE (arg), c2));
6182 }
6183 else if (code == LT_EXPR || code == LE_EXPR)
6184 {
6185 REAL_VALUE_TYPE c2;
6186
6187 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6188 real_convert (&c2, mode, &c2);
6189
6190 if (REAL_VALUE_ISINF (c2))
6191 {
6192 /* sqrt(x) < y is always true, when y is a very large
6193 value and we don't care about NaNs or Infinities. */
6194 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6195 return omit_one_operand (type, integer_one_node, arg);
6196
6197 /* sqrt(x) < y is x != +Inf when y is very large and we
6198 don't care about NaNs. */
6199 if (! HONOR_NANS (mode))
6200 return fold_build2 (NE_EXPR, type, arg,
6201 build_real (TREE_TYPE (arg), c2));
6202
6203 /* sqrt(x) < y is x >= 0 when y is very large and we
6204 don't care about Infinities. */
6205 if (! HONOR_INFINITIES (mode))
6206 return fold_build2 (GE_EXPR, type, arg,
6207 build_real (TREE_TYPE (arg), dconst0));
6208
6209 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6210 if (lang_hooks.decls.global_bindings_p () != 0
6211 || CONTAINS_PLACEHOLDER_P (arg))
6212 return NULL_TREE;
6213
6214 arg = save_expr (arg);
6215 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6216 fold_build2 (GE_EXPR, type, arg,
6217 build_real (TREE_TYPE (arg),
6218 dconst0)),
6219 fold_build2 (NE_EXPR, type, arg,
6220 build_real (TREE_TYPE (arg),
6221 c2)));
6222 }
6223
6224 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6225 if (! HONOR_NANS (mode))
6226 return fold_build2 (code, type, arg,
6227 build_real (TREE_TYPE (arg), c2));
6228
6229 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6230 if (lang_hooks.decls.global_bindings_p () == 0
6231 && ! CONTAINS_PLACEHOLDER_P (arg))
6232 {
6233 arg = save_expr (arg);
6234 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6235 fold_build2 (GE_EXPR, type, arg,
6236 build_real (TREE_TYPE (arg),
6237 dconst0)),
6238 fold_build2 (code, type, arg,
6239 build_real (TREE_TYPE (arg),
6240 c2)));
6241 }
6242 }
6243 }
6244
6245 return NULL_TREE;
6246 }
6247
6248 /* Subroutine of fold() that optimizes comparisons against Infinities,
6249 either +Inf or -Inf.
6250
6251 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6252 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6253 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6254
6255 The function returns the constant folded tree if a simplification
6256 can be made, and NULL_TREE otherwise. */
6257
6258 static tree
6259 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6260 {
6261 enum machine_mode mode;
6262 REAL_VALUE_TYPE max;
6263 tree temp;
6264 bool neg;
6265
6266 mode = TYPE_MODE (TREE_TYPE (arg0));
6267
6268 /* For negative infinity swap the sense of the comparison. */
6269 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6270 if (neg)
6271 code = swap_tree_comparison (code);
6272
6273 switch (code)
6274 {
6275 case GT_EXPR:
6276 /* x > +Inf is always false, if with ignore sNANs. */
6277 if (HONOR_SNANS (mode))
6278 return NULL_TREE;
6279 return omit_one_operand (type, integer_zero_node, arg0);
6280
6281 case LE_EXPR:
6282 /* x <= +Inf is always true, if we don't case about NaNs. */
6283 if (! HONOR_NANS (mode))
6284 return omit_one_operand (type, integer_one_node, arg0);
6285
6286 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6287 if (lang_hooks.decls.global_bindings_p () == 0
6288 && ! CONTAINS_PLACEHOLDER_P (arg0))
6289 {
6290 arg0 = save_expr (arg0);
6291 return fold_build2 (EQ_EXPR, type, arg0, arg0);
6292 }
6293 break;
6294
6295 case EQ_EXPR:
6296 case GE_EXPR:
6297 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6298 real_maxval (&max, neg, mode);
6299 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6300 arg0, build_real (TREE_TYPE (arg0), max));
6301
6302 case LT_EXPR:
6303 /* x < +Inf is always equal to x <= DBL_MAX. */
6304 real_maxval (&max, neg, mode);
6305 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6306 arg0, build_real (TREE_TYPE (arg0), max));
6307
6308 case NE_EXPR:
6309 /* x != +Inf is always equal to !(x > DBL_MAX). */
6310 real_maxval (&max, neg, mode);
6311 if (! HONOR_NANS (mode))
6312 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6313 arg0, build_real (TREE_TYPE (arg0), max));
6314
6315 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6316 arg0, build_real (TREE_TYPE (arg0), max));
6317 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
6318
6319 default:
6320 break;
6321 }
6322
6323 return NULL_TREE;
6324 }
6325
6326 /* Subroutine of fold() that optimizes comparisons of a division by
6327 a nonzero integer constant against an integer constant, i.e.
6328 X/C1 op C2.
6329
6330 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6331 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6332 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6333
6334 The function returns the constant folded tree if a simplification
6335 can be made, and NULL_TREE otherwise. */
6336
6337 static tree
6338 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6339 {
6340 tree prod, tmp, hi, lo;
6341 tree arg00 = TREE_OPERAND (arg0, 0);
6342 tree arg01 = TREE_OPERAND (arg0, 1);
6343 unsigned HOST_WIDE_INT lpart;
6344 HOST_WIDE_INT hpart;
6345 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6346 bool neg_overflow;
6347 int overflow;
6348
6349 /* We have to do this the hard way to detect unsigned overflow.
6350 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6351 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6352 TREE_INT_CST_HIGH (arg01),
6353 TREE_INT_CST_LOW (arg1),
6354 TREE_INT_CST_HIGH (arg1),
6355 &lpart, &hpart, unsigned_p);
6356 prod = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6357 -1, overflow);
6358 neg_overflow = false;
6359
6360 if (unsigned_p)
6361 {
6362 tmp = int_const_binop (MINUS_EXPR, arg01,
6363 build_int_cst (TREE_TYPE (arg01), 1), 0);
6364 lo = prod;
6365
6366 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6367 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6368 TREE_INT_CST_HIGH (prod),
6369 TREE_INT_CST_LOW (tmp),
6370 TREE_INT_CST_HIGH (tmp),
6371 &lpart, &hpart, unsigned_p);
6372 hi = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6373 -1, overflow | TREE_OVERFLOW (prod));
6374 }
6375 else if (tree_int_cst_sgn (arg01) >= 0)
6376 {
6377 tmp = int_const_binop (MINUS_EXPR, arg01,
6378 build_int_cst (TREE_TYPE (arg01), 1), 0);
6379 switch (tree_int_cst_sgn (arg1))
6380 {
6381 case -1:
6382 neg_overflow = true;
6383 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6384 hi = prod;
6385 break;
6386
6387 case 0:
6388 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6389 hi = tmp;
6390 break;
6391
6392 case 1:
6393 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6394 lo = prod;
6395 break;
6396
6397 default:
6398 gcc_unreachable ();
6399 }
6400 }
6401 else
6402 {
6403 /* A negative divisor reverses the relational operators. */
6404 code = swap_tree_comparison (code);
6405
6406 tmp = int_const_binop (PLUS_EXPR, arg01,
6407 build_int_cst (TREE_TYPE (arg01), 1), 0);
6408 switch (tree_int_cst_sgn (arg1))
6409 {
6410 case -1:
6411 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6412 lo = prod;
6413 break;
6414
6415 case 0:
6416 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6417 lo = tmp;
6418 break;
6419
6420 case 1:
6421 neg_overflow = true;
6422 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6423 hi = prod;
6424 break;
6425
6426 default:
6427 gcc_unreachable ();
6428 }
6429 }
6430
6431 switch (code)
6432 {
6433 case EQ_EXPR:
6434 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6435 return omit_one_operand (type, integer_zero_node, arg00);
6436 if (TREE_OVERFLOW (hi))
6437 return fold_build2 (GE_EXPR, type, arg00, lo);
6438 if (TREE_OVERFLOW (lo))
6439 return fold_build2 (LE_EXPR, type, arg00, hi);
6440 return build_range_check (type, arg00, 1, lo, hi);
6441
6442 case NE_EXPR:
6443 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6444 return omit_one_operand (type, integer_one_node, arg00);
6445 if (TREE_OVERFLOW (hi))
6446 return fold_build2 (LT_EXPR, type, arg00, lo);
6447 if (TREE_OVERFLOW (lo))
6448 return fold_build2 (GT_EXPR, type, arg00, hi);
6449 return build_range_check (type, arg00, 0, lo, hi);
6450
6451 case LT_EXPR:
6452 if (TREE_OVERFLOW (lo))
6453 {
6454 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6455 return omit_one_operand (type, tmp, arg00);
6456 }
6457 return fold_build2 (LT_EXPR, type, arg00, lo);
6458
6459 case LE_EXPR:
6460 if (TREE_OVERFLOW (hi))
6461 {
6462 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6463 return omit_one_operand (type, tmp, arg00);
6464 }
6465 return fold_build2 (LE_EXPR, type, arg00, hi);
6466
6467 case GT_EXPR:
6468 if (TREE_OVERFLOW (hi))
6469 {
6470 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6471 return omit_one_operand (type, tmp, arg00);
6472 }
6473 return fold_build2 (GT_EXPR, type, arg00, hi);
6474
6475 case GE_EXPR:
6476 if (TREE_OVERFLOW (lo))
6477 {
6478 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6479 return omit_one_operand (type, tmp, arg00);
6480 }
6481 return fold_build2 (GE_EXPR, type, arg00, lo);
6482
6483 default:
6484 break;
6485 }
6486
6487 return NULL_TREE;
6488 }
6489
6490
6491 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6492 equality/inequality test, then return a simplified form of the test
6493 using a sign testing. Otherwise return NULL. TYPE is the desired
6494 result type. */
6495
6496 static tree
6497 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6498 tree result_type)
6499 {
6500 /* If this is testing a single bit, we can optimize the test. */
6501 if ((code == NE_EXPR || code == EQ_EXPR)
6502 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6503 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6504 {
6505 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6506 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6507 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6508
6509 if (arg00 != NULL_TREE
6510 /* This is only a win if casting to a signed type is cheap,
6511 i.e. when arg00's type is not a partial mode. */
6512 && TYPE_PRECISION (TREE_TYPE (arg00))
6513 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6514 {
6515 tree stype = signed_type_for (TREE_TYPE (arg00));
6516 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6517 result_type, fold_convert (stype, arg00),
6518 build_int_cst (stype, 0));
6519 }
6520 }
6521
6522 return NULL_TREE;
6523 }
6524
6525 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6526 equality/inequality test, then return a simplified form of
6527 the test using shifts and logical operations. Otherwise return
6528 NULL. TYPE is the desired result type. */
6529
6530 tree
6531 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6532 tree result_type)
6533 {
6534 /* If this is testing a single bit, we can optimize the test. */
6535 if ((code == NE_EXPR || code == EQ_EXPR)
6536 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6537 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6538 {
6539 tree inner = TREE_OPERAND (arg0, 0);
6540 tree type = TREE_TYPE (arg0);
6541 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6542 enum machine_mode operand_mode = TYPE_MODE (type);
6543 int ops_unsigned;
6544 tree signed_type, unsigned_type, intermediate_type;
6545 tree tem, one;
6546
6547 /* First, see if we can fold the single bit test into a sign-bit
6548 test. */
6549 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6550 result_type);
6551 if (tem)
6552 return tem;
6553
6554 /* Otherwise we have (A & C) != 0 where C is a single bit,
6555 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6556 Similarly for (A & C) == 0. */
6557
6558 /* If INNER is a right shift of a constant and it plus BITNUM does
6559 not overflow, adjust BITNUM and INNER. */
6560 if (TREE_CODE (inner) == RSHIFT_EXPR
6561 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6562 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6563 && bitnum < TYPE_PRECISION (type)
6564 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6565 bitnum - TYPE_PRECISION (type)))
6566 {
6567 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6568 inner = TREE_OPERAND (inner, 0);
6569 }
6570
6571 /* If we are going to be able to omit the AND below, we must do our
6572 operations as unsigned. If we must use the AND, we have a choice.
6573 Normally unsigned is faster, but for some machines signed is. */
6574 #ifdef LOAD_EXTEND_OP
6575 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6576 && !flag_syntax_only) ? 0 : 1;
6577 #else
6578 ops_unsigned = 1;
6579 #endif
6580
6581 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6582 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6583 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6584 inner = fold_convert (intermediate_type, inner);
6585
6586 if (bitnum != 0)
6587 inner = build2 (RSHIFT_EXPR, intermediate_type,
6588 inner, size_int (bitnum));
6589
6590 one = build_int_cst (intermediate_type, 1);
6591
6592 if (code == EQ_EXPR)
6593 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type, inner, one);
6594
6595 /* Put the AND last so it can combine with more things. */
6596 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6597
6598 /* Make sure to return the proper type. */
6599 inner = fold_convert (result_type, inner);
6600
6601 return inner;
6602 }
6603 return NULL_TREE;
6604 }
6605
6606 /* Check whether we are allowed to reorder operands arg0 and arg1,
6607 such that the evaluation of arg1 occurs before arg0. */
6608
6609 static bool
6610 reorder_operands_p (const_tree arg0, const_tree arg1)
6611 {
6612 if (! flag_evaluation_order)
6613 return true;
6614 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6615 return true;
6616 return ! TREE_SIDE_EFFECTS (arg0)
6617 && ! TREE_SIDE_EFFECTS (arg1);
6618 }
6619
6620 /* Test whether it is preferable two swap two operands, ARG0 and
6621 ARG1, for example because ARG0 is an integer constant and ARG1
6622 isn't. If REORDER is true, only recommend swapping if we can
6623 evaluate the operands in reverse order. */
6624
6625 bool
6626 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6627 {
6628 STRIP_SIGN_NOPS (arg0);
6629 STRIP_SIGN_NOPS (arg1);
6630
6631 if (TREE_CODE (arg1) == INTEGER_CST)
6632 return 0;
6633 if (TREE_CODE (arg0) == INTEGER_CST)
6634 return 1;
6635
6636 if (TREE_CODE (arg1) == REAL_CST)
6637 return 0;
6638 if (TREE_CODE (arg0) == REAL_CST)
6639 return 1;
6640
6641 if (TREE_CODE (arg1) == FIXED_CST)
6642 return 0;
6643 if (TREE_CODE (arg0) == FIXED_CST)
6644 return 1;
6645
6646 if (TREE_CODE (arg1) == COMPLEX_CST)
6647 return 0;
6648 if (TREE_CODE (arg0) == COMPLEX_CST)
6649 return 1;
6650
6651 if (TREE_CONSTANT (arg1))
6652 return 0;
6653 if (TREE_CONSTANT (arg0))
6654 return 1;
6655
6656 if (optimize_size)
6657 return 0;
6658
6659 if (reorder && flag_evaluation_order
6660 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6661 return 0;
6662
6663 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6664 for commutative and comparison operators. Ensuring a canonical
6665 form allows the optimizers to find additional redundancies without
6666 having to explicitly check for both orderings. */
6667 if (TREE_CODE (arg0) == SSA_NAME
6668 && TREE_CODE (arg1) == SSA_NAME
6669 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6670 return 1;
6671
6672 /* Put SSA_NAMEs last. */
6673 if (TREE_CODE (arg1) == SSA_NAME)
6674 return 0;
6675 if (TREE_CODE (arg0) == SSA_NAME)
6676 return 1;
6677
6678 /* Put variables last. */
6679 if (DECL_P (arg1))
6680 return 0;
6681 if (DECL_P (arg0))
6682 return 1;
6683
6684 return 0;
6685 }
6686
6687 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6688 ARG0 is extended to a wider type. */
6689
6690 static tree
6691 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6692 {
6693 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6694 tree arg1_unw;
6695 tree shorter_type, outer_type;
6696 tree min, max;
6697 bool above, below;
6698
6699 if (arg0_unw == arg0)
6700 return NULL_TREE;
6701 shorter_type = TREE_TYPE (arg0_unw);
6702
6703 #ifdef HAVE_canonicalize_funcptr_for_compare
6704 /* Disable this optimization if we're casting a function pointer
6705 type on targets that require function pointer canonicalization. */
6706 if (HAVE_canonicalize_funcptr_for_compare
6707 && TREE_CODE (shorter_type) == POINTER_TYPE
6708 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6709 return NULL_TREE;
6710 #endif
6711
6712 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6713 return NULL_TREE;
6714
6715 arg1_unw = get_unwidened (arg1, NULL_TREE);
6716
6717 /* If possible, express the comparison in the shorter mode. */
6718 if ((code == EQ_EXPR || code == NE_EXPR
6719 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6720 && (TREE_TYPE (arg1_unw) == shorter_type
6721 || (TYPE_PRECISION (shorter_type)
6722 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6723 || (TREE_CODE (arg1_unw) == INTEGER_CST
6724 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6725 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6726 && int_fits_type_p (arg1_unw, shorter_type))))
6727 return fold_build2 (code, type, arg0_unw,
6728 fold_convert (shorter_type, arg1_unw));
6729
6730 if (TREE_CODE (arg1_unw) != INTEGER_CST
6731 || TREE_CODE (shorter_type) != INTEGER_TYPE
6732 || !int_fits_type_p (arg1_unw, shorter_type))
6733 return NULL_TREE;
6734
6735 /* If we are comparing with the integer that does not fit into the range
6736 of the shorter type, the result is known. */
6737 outer_type = TREE_TYPE (arg1_unw);
6738 min = lower_bound_in_type (outer_type, shorter_type);
6739 max = upper_bound_in_type (outer_type, shorter_type);
6740
6741 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6742 max, arg1_unw));
6743 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6744 arg1_unw, min));
6745
6746 switch (code)
6747 {
6748 case EQ_EXPR:
6749 if (above || below)
6750 return omit_one_operand (type, integer_zero_node, arg0);
6751 break;
6752
6753 case NE_EXPR:
6754 if (above || below)
6755 return omit_one_operand (type, integer_one_node, arg0);
6756 break;
6757
6758 case LT_EXPR:
6759 case LE_EXPR:
6760 if (above)
6761 return omit_one_operand (type, integer_one_node, arg0);
6762 else if (below)
6763 return omit_one_operand (type, integer_zero_node, arg0);
6764
6765 case GT_EXPR:
6766 case GE_EXPR:
6767 if (above)
6768 return omit_one_operand (type, integer_zero_node, arg0);
6769 else if (below)
6770 return omit_one_operand (type, integer_one_node, arg0);
6771
6772 default:
6773 break;
6774 }
6775
6776 return NULL_TREE;
6777 }
6778
6779 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6780 ARG0 just the signedness is changed. */
6781
6782 static tree
6783 fold_sign_changed_comparison (enum tree_code code, tree type,
6784 tree arg0, tree arg1)
6785 {
6786 tree arg0_inner;
6787 tree inner_type, outer_type;
6788
6789 if (TREE_CODE (arg0) != NOP_EXPR
6790 && TREE_CODE (arg0) != CONVERT_EXPR)
6791 return NULL_TREE;
6792
6793 outer_type = TREE_TYPE (arg0);
6794 arg0_inner = TREE_OPERAND (arg0, 0);
6795 inner_type = TREE_TYPE (arg0_inner);
6796
6797 #ifdef HAVE_canonicalize_funcptr_for_compare
6798 /* Disable this optimization if we're casting a function pointer
6799 type on targets that require function pointer canonicalization. */
6800 if (HAVE_canonicalize_funcptr_for_compare
6801 && TREE_CODE (inner_type) == POINTER_TYPE
6802 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6803 return NULL_TREE;
6804 #endif
6805
6806 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6807 return NULL_TREE;
6808
6809 if (TREE_CODE (arg1) != INTEGER_CST
6810 && !((TREE_CODE (arg1) == NOP_EXPR
6811 || TREE_CODE (arg1) == CONVERT_EXPR)
6812 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6813 return NULL_TREE;
6814
6815 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6816 && code != NE_EXPR
6817 && code != EQ_EXPR)
6818 return NULL_TREE;
6819
6820 if (TREE_CODE (arg1) == INTEGER_CST)
6821 arg1 = force_fit_type_double (inner_type, TREE_INT_CST_LOW (arg1),
6822 TREE_INT_CST_HIGH (arg1), 0,
6823 TREE_OVERFLOW (arg1));
6824 else
6825 arg1 = fold_convert (inner_type, arg1);
6826
6827 return fold_build2 (code, type, arg0_inner, arg1);
6828 }
6829
6830 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6831 step of the array. Reconstructs s and delta in the case of s * delta
6832 being an integer constant (and thus already folded).
6833 ADDR is the address. MULT is the multiplicative expression.
6834 If the function succeeds, the new address expression is returned. Otherwise
6835 NULL_TREE is returned. */
6836
6837 static tree
6838 try_move_mult_to_index (tree addr, tree op1)
6839 {
6840 tree s, delta, step;
6841 tree ref = TREE_OPERAND (addr, 0), pref;
6842 tree ret, pos;
6843 tree itype;
6844 bool mdim = false;
6845
6846 /* Strip the nops that might be added when converting op1 to sizetype. */
6847 STRIP_NOPS (op1);
6848
6849 /* Canonicalize op1 into a possibly non-constant delta
6850 and an INTEGER_CST s. */
6851 if (TREE_CODE (op1) == MULT_EXPR)
6852 {
6853 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6854
6855 STRIP_NOPS (arg0);
6856 STRIP_NOPS (arg1);
6857
6858 if (TREE_CODE (arg0) == INTEGER_CST)
6859 {
6860 s = arg0;
6861 delta = arg1;
6862 }
6863 else if (TREE_CODE (arg1) == INTEGER_CST)
6864 {
6865 s = arg1;
6866 delta = arg0;
6867 }
6868 else
6869 return NULL_TREE;
6870 }
6871 else if (TREE_CODE (op1) == INTEGER_CST)
6872 {
6873 delta = op1;
6874 s = NULL_TREE;
6875 }
6876 else
6877 {
6878 /* Simulate we are delta * 1. */
6879 delta = op1;
6880 s = integer_one_node;
6881 }
6882
6883 for (;; ref = TREE_OPERAND (ref, 0))
6884 {
6885 if (TREE_CODE (ref) == ARRAY_REF)
6886 {
6887 /* Remember if this was a multi-dimensional array. */
6888 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6889 mdim = true;
6890
6891 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6892 if (! itype)
6893 continue;
6894
6895 step = array_ref_element_size (ref);
6896 if (TREE_CODE (step) != INTEGER_CST)
6897 continue;
6898
6899 if (s)
6900 {
6901 if (! tree_int_cst_equal (step, s))
6902 continue;
6903 }
6904 else
6905 {
6906 /* Try if delta is a multiple of step. */
6907 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step);
6908 if (! tmp)
6909 continue;
6910 delta = tmp;
6911 }
6912
6913 /* Only fold here if we can verify we do not overflow one
6914 dimension of a multi-dimensional array. */
6915 if (mdim)
6916 {
6917 tree tmp;
6918
6919 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
6920 || !INTEGRAL_TYPE_P (itype)
6921 || !TYPE_MAX_VALUE (itype)
6922 || TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST)
6923 continue;
6924
6925 tmp = fold_binary (PLUS_EXPR, itype,
6926 fold_convert (itype,
6927 TREE_OPERAND (ref, 1)),
6928 fold_convert (itype, delta));
6929 if (!tmp
6930 || TREE_CODE (tmp) != INTEGER_CST
6931 || tree_int_cst_lt (TYPE_MAX_VALUE (itype), tmp))
6932 continue;
6933 }
6934
6935 break;
6936 }
6937 else
6938 mdim = false;
6939
6940 if (!handled_component_p (ref))
6941 return NULL_TREE;
6942 }
6943
6944 /* We found the suitable array reference. So copy everything up to it,
6945 and replace the index. */
6946
6947 pref = TREE_OPERAND (addr, 0);
6948 ret = copy_node (pref);
6949 pos = ret;
6950
6951 while (pref != ref)
6952 {
6953 pref = TREE_OPERAND (pref, 0);
6954 TREE_OPERAND (pos, 0) = copy_node (pref);
6955 pos = TREE_OPERAND (pos, 0);
6956 }
6957
6958 TREE_OPERAND (pos, 1) = fold_build2 (PLUS_EXPR, itype,
6959 fold_convert (itype,
6960 TREE_OPERAND (pos, 1)),
6961 fold_convert (itype, delta));
6962
6963 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6964 }
6965
6966
6967 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6968 means A >= Y && A != MAX, but in this case we know that
6969 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6970
6971 static tree
6972 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6973 {
6974 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6975
6976 if (TREE_CODE (bound) == LT_EXPR)
6977 a = TREE_OPERAND (bound, 0);
6978 else if (TREE_CODE (bound) == GT_EXPR)
6979 a = TREE_OPERAND (bound, 1);
6980 else
6981 return NULL_TREE;
6982
6983 typea = TREE_TYPE (a);
6984 if (!INTEGRAL_TYPE_P (typea)
6985 && !POINTER_TYPE_P (typea))
6986 return NULL_TREE;
6987
6988 if (TREE_CODE (ineq) == LT_EXPR)
6989 {
6990 a1 = TREE_OPERAND (ineq, 1);
6991 y = TREE_OPERAND (ineq, 0);
6992 }
6993 else if (TREE_CODE (ineq) == GT_EXPR)
6994 {
6995 a1 = TREE_OPERAND (ineq, 0);
6996 y = TREE_OPERAND (ineq, 1);
6997 }
6998 else
6999 return NULL_TREE;
7000
7001 if (TREE_TYPE (a1) != typea)
7002 return NULL_TREE;
7003
7004 if (POINTER_TYPE_P (typea))
7005 {
7006 /* Convert the pointer types into integer before taking the difference. */
7007 tree ta = fold_convert (ssizetype, a);
7008 tree ta1 = fold_convert (ssizetype, a1);
7009 diff = fold_binary (MINUS_EXPR, ssizetype, ta1, ta);
7010 }
7011 else
7012 diff = fold_binary (MINUS_EXPR, typea, a1, a);
7013
7014 if (!diff || !integer_onep (diff))
7015 return NULL_TREE;
7016
7017 return fold_build2 (GE_EXPR, type, a, y);
7018 }
7019
7020 /* Fold a sum or difference of at least one multiplication.
7021 Returns the folded tree or NULL if no simplification could be made. */
7022
7023 static tree
7024 fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
7025 {
7026 tree arg00, arg01, arg10, arg11;
7027 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7028
7029 /* (A * C) +- (B * C) -> (A+-B) * C.
7030 (A * C) +- A -> A * (C+-1).
7031 We are most concerned about the case where C is a constant,
7032 but other combinations show up during loop reduction. Since
7033 it is not difficult, try all four possibilities. */
7034
7035 if (TREE_CODE (arg0) == MULT_EXPR)
7036 {
7037 arg00 = TREE_OPERAND (arg0, 0);
7038 arg01 = TREE_OPERAND (arg0, 1);
7039 }
7040 else if (TREE_CODE (arg0) == INTEGER_CST)
7041 {
7042 arg00 = build_one_cst (type);
7043 arg01 = arg0;
7044 }
7045 else
7046 {
7047 /* We cannot generate constant 1 for fract. */
7048 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7049 return NULL_TREE;
7050 arg00 = arg0;
7051 arg01 = build_one_cst (type);
7052 }
7053 if (TREE_CODE (arg1) == MULT_EXPR)
7054 {
7055 arg10 = TREE_OPERAND (arg1, 0);
7056 arg11 = TREE_OPERAND (arg1, 1);
7057 }
7058 else if (TREE_CODE (arg1) == INTEGER_CST)
7059 {
7060 arg10 = build_one_cst (type);
7061 arg11 = arg1;
7062 }
7063 else
7064 {
7065 /* We cannot generate constant 1 for fract. */
7066 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7067 return NULL_TREE;
7068 arg10 = arg1;
7069 arg11 = build_one_cst (type);
7070 }
7071 same = NULL_TREE;
7072
7073 if (operand_equal_p (arg01, arg11, 0))
7074 same = arg01, alt0 = arg00, alt1 = arg10;
7075 else if (operand_equal_p (arg00, arg10, 0))
7076 same = arg00, alt0 = arg01, alt1 = arg11;
7077 else if (operand_equal_p (arg00, arg11, 0))
7078 same = arg00, alt0 = arg01, alt1 = arg10;
7079 else if (operand_equal_p (arg01, arg10, 0))
7080 same = arg01, alt0 = arg00, alt1 = arg11;
7081
7082 /* No identical multiplicands; see if we can find a common
7083 power-of-two factor in non-power-of-two multiplies. This
7084 can help in multi-dimensional array access. */
7085 else if (host_integerp (arg01, 0)
7086 && host_integerp (arg11, 0))
7087 {
7088 HOST_WIDE_INT int01, int11, tmp;
7089 bool swap = false;
7090 tree maybe_same;
7091 int01 = TREE_INT_CST_LOW (arg01);
7092 int11 = TREE_INT_CST_LOW (arg11);
7093
7094 /* Move min of absolute values to int11. */
7095 if ((int01 >= 0 ? int01 : -int01)
7096 < (int11 >= 0 ? int11 : -int11))
7097 {
7098 tmp = int01, int01 = int11, int11 = tmp;
7099 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7100 maybe_same = arg01;
7101 swap = true;
7102 }
7103 else
7104 maybe_same = arg11;
7105
7106 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0)
7107 {
7108 alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
7109 build_int_cst (TREE_TYPE (arg00),
7110 int01 / int11));
7111 alt1 = arg10;
7112 same = maybe_same;
7113 if (swap)
7114 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7115 }
7116 }
7117
7118 if (same)
7119 return fold_build2 (MULT_EXPR, type,
7120 fold_build2 (code, type,
7121 fold_convert (type, alt0),
7122 fold_convert (type, alt1)),
7123 fold_convert (type, same));
7124
7125 return NULL_TREE;
7126 }
7127
7128 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7129 specified by EXPR into the buffer PTR of length LEN bytes.
7130 Return the number of bytes placed in the buffer, or zero
7131 upon failure. */
7132
7133 static int
7134 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7135 {
7136 tree type = TREE_TYPE (expr);
7137 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7138 int byte, offset, word, words;
7139 unsigned char value;
7140
7141 if (total_bytes > len)
7142 return 0;
7143 words = total_bytes / UNITS_PER_WORD;
7144
7145 for (byte = 0; byte < total_bytes; byte++)
7146 {
7147 int bitpos = byte * BITS_PER_UNIT;
7148 if (bitpos < HOST_BITS_PER_WIDE_INT)
7149 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7150 else
7151 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7152 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7153
7154 if (total_bytes > UNITS_PER_WORD)
7155 {
7156 word = byte / UNITS_PER_WORD;
7157 if (WORDS_BIG_ENDIAN)
7158 word = (words - 1) - word;
7159 offset = word * UNITS_PER_WORD;
7160 if (BYTES_BIG_ENDIAN)
7161 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7162 else
7163 offset += byte % UNITS_PER_WORD;
7164 }
7165 else
7166 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7167 ptr[offset] = value;
7168 }
7169 return total_bytes;
7170 }
7171
7172
7173 /* Subroutine of native_encode_expr. Encode the REAL_CST
7174 specified by EXPR into the buffer PTR of length LEN bytes.
7175 Return the number of bytes placed in the buffer, or zero
7176 upon failure. */
7177
7178 static int
7179 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7180 {
7181 tree type = TREE_TYPE (expr);
7182 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7183 int byte, offset, word, words, bitpos;
7184 unsigned char value;
7185
7186 /* There are always 32 bits in each long, no matter the size of
7187 the hosts long. We handle floating point representations with
7188 up to 192 bits. */
7189 long tmp[6];
7190
7191 if (total_bytes > len)
7192 return 0;
7193 words = 32 / UNITS_PER_WORD;
7194
7195 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7196
7197 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7198 bitpos += BITS_PER_UNIT)
7199 {
7200 byte = (bitpos / BITS_PER_UNIT) & 3;
7201 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7202
7203 if (UNITS_PER_WORD < 4)
7204 {
7205 word = byte / UNITS_PER_WORD;
7206 if (WORDS_BIG_ENDIAN)
7207 word = (words - 1) - word;
7208 offset = word * UNITS_PER_WORD;
7209 if (BYTES_BIG_ENDIAN)
7210 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7211 else
7212 offset += byte % UNITS_PER_WORD;
7213 }
7214 else
7215 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7216 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7217 }
7218 return total_bytes;
7219 }
7220
7221 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7222 specified by EXPR into the buffer PTR of length LEN bytes.
7223 Return the number of bytes placed in the buffer, or zero
7224 upon failure. */
7225
7226 static int
7227 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7228 {
7229 int rsize, isize;
7230 tree part;
7231
7232 part = TREE_REALPART (expr);
7233 rsize = native_encode_expr (part, ptr, len);
7234 if (rsize == 0)
7235 return 0;
7236 part = TREE_IMAGPART (expr);
7237 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7238 if (isize != rsize)
7239 return 0;
7240 return rsize + isize;
7241 }
7242
7243
7244 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7245 specified by EXPR into the buffer PTR of length LEN bytes.
7246 Return the number of bytes placed in the buffer, or zero
7247 upon failure. */
7248
7249 static int
7250 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7251 {
7252 int i, size, offset, count;
7253 tree itype, elem, elements;
7254
7255 offset = 0;
7256 elements = TREE_VECTOR_CST_ELTS (expr);
7257 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7258 itype = TREE_TYPE (TREE_TYPE (expr));
7259 size = GET_MODE_SIZE (TYPE_MODE (itype));
7260 for (i = 0; i < count; i++)
7261 {
7262 if (elements)
7263 {
7264 elem = TREE_VALUE (elements);
7265 elements = TREE_CHAIN (elements);
7266 }
7267 else
7268 elem = NULL_TREE;
7269
7270 if (elem)
7271 {
7272 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7273 return 0;
7274 }
7275 else
7276 {
7277 if (offset + size > len)
7278 return 0;
7279 memset (ptr+offset, 0, size);
7280 }
7281 offset += size;
7282 }
7283 return offset;
7284 }
7285
7286
7287 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7288 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7289 buffer PTR of length LEN bytes. Return the number of bytes
7290 placed in the buffer, or zero upon failure. */
7291
7292 int
7293 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7294 {
7295 switch (TREE_CODE (expr))
7296 {
7297 case INTEGER_CST:
7298 return native_encode_int (expr, ptr, len);
7299
7300 case REAL_CST:
7301 return native_encode_real (expr, ptr, len);
7302
7303 case COMPLEX_CST:
7304 return native_encode_complex (expr, ptr, len);
7305
7306 case VECTOR_CST:
7307 return native_encode_vector (expr, ptr, len);
7308
7309 default:
7310 return 0;
7311 }
7312 }
7313
7314
7315 /* Subroutine of native_interpret_expr. Interpret the contents of
7316 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7317 If the buffer cannot be interpreted, return NULL_TREE. */
7318
7319 static tree
7320 native_interpret_int (tree type, const unsigned char *ptr, int len)
7321 {
7322 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7323 int byte, offset, word, words;
7324 unsigned char value;
7325 unsigned int HOST_WIDE_INT lo = 0;
7326 HOST_WIDE_INT hi = 0;
7327
7328 if (total_bytes > len)
7329 return NULL_TREE;
7330 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7331 return NULL_TREE;
7332 words = total_bytes / UNITS_PER_WORD;
7333
7334 for (byte = 0; byte < total_bytes; byte++)
7335 {
7336 int bitpos = byte * BITS_PER_UNIT;
7337 if (total_bytes > UNITS_PER_WORD)
7338 {
7339 word = byte / UNITS_PER_WORD;
7340 if (WORDS_BIG_ENDIAN)
7341 word = (words - 1) - word;
7342 offset = word * UNITS_PER_WORD;
7343 if (BYTES_BIG_ENDIAN)
7344 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7345 else
7346 offset += byte % UNITS_PER_WORD;
7347 }
7348 else
7349 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7350 value = ptr[offset];
7351
7352 if (bitpos < HOST_BITS_PER_WIDE_INT)
7353 lo |= (unsigned HOST_WIDE_INT) value << bitpos;
7354 else
7355 hi |= (unsigned HOST_WIDE_INT) value
7356 << (bitpos - HOST_BITS_PER_WIDE_INT);
7357 }
7358
7359 return build_int_cst_wide_type (type, lo, hi);
7360 }
7361
7362
7363 /* Subroutine of native_interpret_expr. Interpret the contents of
7364 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7365 If the buffer cannot be interpreted, return NULL_TREE. */
7366
7367 static tree
7368 native_interpret_real (tree type, const unsigned char *ptr, int len)
7369 {
7370 enum machine_mode mode = TYPE_MODE (type);
7371 int total_bytes = GET_MODE_SIZE (mode);
7372 int byte, offset, word, words, bitpos;
7373 unsigned char value;
7374 /* There are always 32 bits in each long, no matter the size of
7375 the hosts long. We handle floating point representations with
7376 up to 192 bits. */
7377 REAL_VALUE_TYPE r;
7378 long tmp[6];
7379
7380 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7381 if (total_bytes > len || total_bytes > 24)
7382 return NULL_TREE;
7383 words = 32 / UNITS_PER_WORD;
7384
7385 memset (tmp, 0, sizeof (tmp));
7386 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7387 bitpos += BITS_PER_UNIT)
7388 {
7389 byte = (bitpos / BITS_PER_UNIT) & 3;
7390 if (UNITS_PER_WORD < 4)
7391 {
7392 word = byte / UNITS_PER_WORD;
7393 if (WORDS_BIG_ENDIAN)
7394 word = (words - 1) - word;
7395 offset = word * UNITS_PER_WORD;
7396 if (BYTES_BIG_ENDIAN)
7397 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7398 else
7399 offset += byte % UNITS_PER_WORD;
7400 }
7401 else
7402 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7403 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7404
7405 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7406 }
7407
7408 real_from_target (&r, tmp, mode);
7409 return build_real (type, r);
7410 }
7411
7412
7413 /* Subroutine of native_interpret_expr. Interpret the contents of
7414 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7415 If the buffer cannot be interpreted, return NULL_TREE. */
7416
7417 static tree
7418 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7419 {
7420 tree etype, rpart, ipart;
7421 int size;
7422
7423 etype = TREE_TYPE (type);
7424 size = GET_MODE_SIZE (TYPE_MODE (etype));
7425 if (size * 2 > len)
7426 return NULL_TREE;
7427 rpart = native_interpret_expr (etype, ptr, size);
7428 if (!rpart)
7429 return NULL_TREE;
7430 ipart = native_interpret_expr (etype, ptr+size, size);
7431 if (!ipart)
7432 return NULL_TREE;
7433 return build_complex (type, rpart, ipart);
7434 }
7435
7436
7437 /* Subroutine of native_interpret_expr. Interpret the contents of
7438 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7439 If the buffer cannot be interpreted, return NULL_TREE. */
7440
7441 static tree
7442 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7443 {
7444 tree etype, elem, elements;
7445 int i, size, count;
7446
7447 etype = TREE_TYPE (type);
7448 size = GET_MODE_SIZE (TYPE_MODE (etype));
7449 count = TYPE_VECTOR_SUBPARTS (type);
7450 if (size * count > len)
7451 return NULL_TREE;
7452
7453 elements = NULL_TREE;
7454 for (i = count - 1; i >= 0; i--)
7455 {
7456 elem = native_interpret_expr (etype, ptr+(i*size), size);
7457 if (!elem)
7458 return NULL_TREE;
7459 elements = tree_cons (NULL_TREE, elem, elements);
7460 }
7461 return build_vector (type, elements);
7462 }
7463
7464
7465 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7466 the buffer PTR of length LEN as a constant of type TYPE. For
7467 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7468 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7469 return NULL_TREE. */
7470
7471 tree
7472 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7473 {
7474 switch (TREE_CODE (type))
7475 {
7476 case INTEGER_TYPE:
7477 case ENUMERAL_TYPE:
7478 case BOOLEAN_TYPE:
7479 return native_interpret_int (type, ptr, len);
7480
7481 case REAL_TYPE:
7482 return native_interpret_real (type, ptr, len);
7483
7484 case COMPLEX_TYPE:
7485 return native_interpret_complex (type, ptr, len);
7486
7487 case VECTOR_TYPE:
7488 return native_interpret_vector (type, ptr, len);
7489
7490 default:
7491 return NULL_TREE;
7492 }
7493 }
7494
7495
7496 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7497 TYPE at compile-time. If we're unable to perform the conversion
7498 return NULL_TREE. */
7499
7500 static tree
7501 fold_view_convert_expr (tree type, tree expr)
7502 {
7503 /* We support up to 512-bit values (for V8DFmode). */
7504 unsigned char buffer[64];
7505 int len;
7506
7507 /* Check that the host and target are sane. */
7508 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7509 return NULL_TREE;
7510
7511 len = native_encode_expr (expr, buffer, sizeof (buffer));
7512 if (len == 0)
7513 return NULL_TREE;
7514
7515 return native_interpret_expr (type, buffer, len);
7516 }
7517
7518 /* Build an expression for the address of T. Folds away INDIRECT_REF
7519 to avoid confusing the gimplify process. When IN_FOLD is true
7520 avoid modifications of T. */
7521
7522 static tree
7523 build_fold_addr_expr_with_type_1 (tree t, tree ptrtype, bool in_fold)
7524 {
7525 /* The size of the object is not relevant when talking about its address. */
7526 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7527 t = TREE_OPERAND (t, 0);
7528
7529 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
7530 if (TREE_CODE (t) == INDIRECT_REF
7531 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
7532 {
7533 t = TREE_OPERAND (t, 0);
7534
7535 if (TREE_TYPE (t) != ptrtype)
7536 t = build1 (NOP_EXPR, ptrtype, t);
7537 }
7538 else if (!in_fold)
7539 {
7540 tree base = t;
7541
7542 while (handled_component_p (base))
7543 base = TREE_OPERAND (base, 0);
7544
7545 if (DECL_P (base))
7546 TREE_ADDRESSABLE (base) = 1;
7547
7548 t = build1 (ADDR_EXPR, ptrtype, t);
7549 }
7550 else
7551 t = build1 (ADDR_EXPR, ptrtype, t);
7552
7553 return t;
7554 }
7555
7556 /* Build an expression for the address of T with type PTRTYPE. This
7557 function modifies the input parameter 'T' by sometimes setting the
7558 TREE_ADDRESSABLE flag. */
7559
7560 tree
7561 build_fold_addr_expr_with_type (tree t, tree ptrtype)
7562 {
7563 return build_fold_addr_expr_with_type_1 (t, ptrtype, false);
7564 }
7565
7566 /* Build an expression for the address of T. This function modifies
7567 the input parameter 'T' by sometimes setting the TREE_ADDRESSABLE
7568 flag. When called from fold functions, use fold_addr_expr instead. */
7569
7570 tree
7571 build_fold_addr_expr (tree t)
7572 {
7573 return build_fold_addr_expr_with_type_1 (t,
7574 build_pointer_type (TREE_TYPE (t)),
7575 false);
7576 }
7577
7578 /* Same as build_fold_addr_expr, builds an expression for the address
7579 of T, but avoids touching the input node 't'. Fold functions
7580 should use this version. */
7581
7582 static tree
7583 fold_addr_expr (tree t)
7584 {
7585 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7586
7587 return build_fold_addr_expr_with_type_1 (t, ptrtype, true);
7588 }
7589
7590 /* Fold a unary expression of code CODE and type TYPE with operand
7591 OP0. Return the folded expression if folding is successful.
7592 Otherwise, return NULL_TREE. */
7593
7594 tree
7595 fold_unary (enum tree_code code, tree type, tree op0)
7596 {
7597 tree tem;
7598 tree arg0;
7599 enum tree_code_class kind = TREE_CODE_CLASS (code);
7600
7601 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7602 && TREE_CODE_LENGTH (code) == 1);
7603
7604 arg0 = op0;
7605 if (arg0)
7606 {
7607 if (code == NOP_EXPR || code == CONVERT_EXPR
7608 || code == FLOAT_EXPR || code == ABS_EXPR)
7609 {
7610 /* Don't use STRIP_NOPS, because signedness of argument type
7611 matters. */
7612 STRIP_SIGN_NOPS (arg0);
7613 }
7614 else
7615 {
7616 /* Strip any conversions that don't change the mode. This
7617 is safe for every expression, except for a comparison
7618 expression because its signedness is derived from its
7619 operands.
7620
7621 Note that this is done as an internal manipulation within
7622 the constant folder, in order to find the simplest
7623 representation of the arguments so that their form can be
7624 studied. In any cases, the appropriate type conversions
7625 should be put back in the tree that will get out of the
7626 constant folder. */
7627 STRIP_NOPS (arg0);
7628 }
7629 }
7630
7631 if (TREE_CODE_CLASS (code) == tcc_unary)
7632 {
7633 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7634 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7635 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
7636 else if (TREE_CODE (arg0) == COND_EXPR)
7637 {
7638 tree arg01 = TREE_OPERAND (arg0, 1);
7639 tree arg02 = TREE_OPERAND (arg0, 2);
7640 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7641 arg01 = fold_build1 (code, type, arg01);
7642 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7643 arg02 = fold_build1 (code, type, arg02);
7644 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
7645 arg01, arg02);
7646
7647 /* If this was a conversion, and all we did was to move into
7648 inside the COND_EXPR, bring it back out. But leave it if
7649 it is a conversion from integer to integer and the
7650 result precision is no wider than a word since such a
7651 conversion is cheap and may be optimized away by combine,
7652 while it couldn't if it were outside the COND_EXPR. Then return
7653 so we don't get into an infinite recursion loop taking the
7654 conversion out and then back in. */
7655
7656 if ((code == NOP_EXPR || code == CONVERT_EXPR
7657 || code == NON_LVALUE_EXPR)
7658 && TREE_CODE (tem) == COND_EXPR
7659 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7660 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7661 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7662 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7663 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7664 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7665 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7666 && (INTEGRAL_TYPE_P
7667 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7668 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7669 || flag_syntax_only))
7670 tem = build1 (code, type,
7671 build3 (COND_EXPR,
7672 TREE_TYPE (TREE_OPERAND
7673 (TREE_OPERAND (tem, 1), 0)),
7674 TREE_OPERAND (tem, 0),
7675 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7676 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
7677 return tem;
7678 }
7679 else if (COMPARISON_CLASS_P (arg0))
7680 {
7681 if (TREE_CODE (type) == BOOLEAN_TYPE)
7682 {
7683 arg0 = copy_node (arg0);
7684 TREE_TYPE (arg0) = type;
7685 return arg0;
7686 }
7687 else if (TREE_CODE (type) != INTEGER_TYPE)
7688 return fold_build3 (COND_EXPR, type, arg0,
7689 fold_build1 (code, type,
7690 integer_one_node),
7691 fold_build1 (code, type,
7692 integer_zero_node));
7693 }
7694 }
7695
7696 switch (code)
7697 {
7698 case PAREN_EXPR:
7699 /* Re-association barriers around constants and other re-association
7700 barriers can be removed. */
7701 if (CONSTANT_CLASS_P (op0)
7702 || TREE_CODE (op0) == PAREN_EXPR)
7703 return fold_convert (type, op0);
7704 return NULL_TREE;
7705
7706 case NOP_EXPR:
7707 case FLOAT_EXPR:
7708 case CONVERT_EXPR:
7709 case FIX_TRUNC_EXPR:
7710 if (TREE_TYPE (op0) == type)
7711 return op0;
7712
7713 /* If we have (type) (a CMP b) and type is an integral type, return
7714 new expression involving the new type. */
7715 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
7716 return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
7717 TREE_OPERAND (op0, 1));
7718
7719 /* Handle cases of two conversions in a row. */
7720 if (TREE_CODE (op0) == NOP_EXPR
7721 || TREE_CODE (op0) == CONVERT_EXPR)
7722 {
7723 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7724 tree inter_type = TREE_TYPE (op0);
7725 int inside_int = INTEGRAL_TYPE_P (inside_type);
7726 int inside_ptr = POINTER_TYPE_P (inside_type);
7727 int inside_float = FLOAT_TYPE_P (inside_type);
7728 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7729 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7730 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7731 int inter_int = INTEGRAL_TYPE_P (inter_type);
7732 int inter_ptr = POINTER_TYPE_P (inter_type);
7733 int inter_float = FLOAT_TYPE_P (inter_type);
7734 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7735 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7736 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7737 int final_int = INTEGRAL_TYPE_P (type);
7738 int final_ptr = POINTER_TYPE_P (type);
7739 int final_float = FLOAT_TYPE_P (type);
7740 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7741 unsigned int final_prec = TYPE_PRECISION (type);
7742 int final_unsignedp = TYPE_UNSIGNED (type);
7743
7744 /* In addition to the cases of two conversions in a row
7745 handled below, if we are converting something to its own
7746 type via an object of identical or wider precision, neither
7747 conversion is needed. */
7748 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7749 && (((inter_int || inter_ptr) && final_int)
7750 || (inter_float && final_float))
7751 && inter_prec >= final_prec)
7752 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7753
7754 /* Likewise, if the intermediate and final types are either both
7755 float or both integer, we don't need the middle conversion if
7756 it is wider than the final type and doesn't change the signedness
7757 (for integers). Avoid this if the final type is a pointer
7758 since then we sometimes need the inner conversion. Likewise if
7759 the outer has a precision not equal to the size of its mode. */
7760 if (((inter_int && inside_int)
7761 || (inter_float && inside_float)
7762 || (inter_vec && inside_vec))
7763 && inter_prec >= inside_prec
7764 && (inter_float || inter_vec
7765 || inter_unsignedp == inside_unsignedp)
7766 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7767 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7768 && ! final_ptr
7769 && (! final_vec || inter_prec == inside_prec))
7770 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7771
7772 /* If we have a sign-extension of a zero-extended value, we can
7773 replace that by a single zero-extension. */
7774 if (inside_int && inter_int && final_int
7775 && inside_prec < inter_prec && inter_prec < final_prec
7776 && inside_unsignedp && !inter_unsignedp)
7777 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7778
7779 /* Two conversions in a row are not needed unless:
7780 - some conversion is floating-point (overstrict for now), or
7781 - some conversion is a vector (overstrict for now), or
7782 - the intermediate type is narrower than both initial and
7783 final, or
7784 - the intermediate type and innermost type differ in signedness,
7785 and the outermost type is wider than the intermediate, or
7786 - the initial type is a pointer type and the precisions of the
7787 intermediate and final types differ, or
7788 - the final type is a pointer type and the precisions of the
7789 initial and intermediate types differ. */
7790 if (! inside_float && ! inter_float && ! final_float
7791 && ! inside_vec && ! inter_vec && ! final_vec
7792 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7793 && ! (inside_int && inter_int
7794 && inter_unsignedp != inside_unsignedp
7795 && inter_prec < final_prec)
7796 && ((inter_unsignedp && inter_prec > inside_prec)
7797 == (final_unsignedp && final_prec > inter_prec))
7798 && ! (inside_ptr && inter_prec != final_prec)
7799 && ! (final_ptr && inside_prec != inter_prec)
7800 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7801 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
7802 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7803 }
7804
7805 /* Handle (T *)&A.B.C for A being of type T and B and C
7806 living at offset zero. This occurs frequently in
7807 C++ upcasting and then accessing the base. */
7808 if (TREE_CODE (op0) == ADDR_EXPR
7809 && POINTER_TYPE_P (type)
7810 && handled_component_p (TREE_OPERAND (op0, 0)))
7811 {
7812 HOST_WIDE_INT bitsize, bitpos;
7813 tree offset;
7814 enum machine_mode mode;
7815 int unsignedp, volatilep;
7816 tree base = TREE_OPERAND (op0, 0);
7817 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7818 &mode, &unsignedp, &volatilep, false);
7819 /* If the reference was to a (constant) zero offset, we can use
7820 the address of the base if it has the same base type
7821 as the result type. */
7822 if (! offset && bitpos == 0
7823 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
7824 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7825 return fold_convert (type, fold_addr_expr (base));
7826 }
7827
7828 if ((TREE_CODE (op0) == MODIFY_EXPR
7829 || TREE_CODE (op0) == GIMPLE_MODIFY_STMT)
7830 && TREE_CONSTANT (GENERIC_TREE_OPERAND (op0, 1))
7831 /* Detect assigning a bitfield. */
7832 && !(TREE_CODE (GENERIC_TREE_OPERAND (op0, 0)) == COMPONENT_REF
7833 && DECL_BIT_FIELD
7834 (TREE_OPERAND (GENERIC_TREE_OPERAND (op0, 0), 1))))
7835 {
7836 /* Don't leave an assignment inside a conversion
7837 unless assigning a bitfield. */
7838 tem = fold_build1 (code, type, GENERIC_TREE_OPERAND (op0, 1));
7839 /* First do the assignment, then return converted constant. */
7840 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7841 TREE_NO_WARNING (tem) = 1;
7842 TREE_USED (tem) = 1;
7843 return tem;
7844 }
7845
7846 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7847 constants (if x has signed type, the sign bit cannot be set
7848 in c). This folds extension into the BIT_AND_EXPR. */
7849 if (INTEGRAL_TYPE_P (type)
7850 && TREE_CODE (type) != BOOLEAN_TYPE
7851 && TREE_CODE (op0) == BIT_AND_EXPR
7852 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7853 {
7854 tree and = op0;
7855 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
7856 int change = 0;
7857
7858 if (TYPE_UNSIGNED (TREE_TYPE (and))
7859 || (TYPE_PRECISION (type)
7860 <= TYPE_PRECISION (TREE_TYPE (and))))
7861 change = 1;
7862 else if (TYPE_PRECISION (TREE_TYPE (and1))
7863 <= HOST_BITS_PER_WIDE_INT
7864 && host_integerp (and1, 1))
7865 {
7866 unsigned HOST_WIDE_INT cst;
7867
7868 cst = tree_low_cst (and1, 1);
7869 cst &= (HOST_WIDE_INT) -1
7870 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7871 change = (cst == 0);
7872 #ifdef LOAD_EXTEND_OP
7873 if (change
7874 && !flag_syntax_only
7875 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7876 == ZERO_EXTEND))
7877 {
7878 tree uns = unsigned_type_for (TREE_TYPE (and0));
7879 and0 = fold_convert (uns, and0);
7880 and1 = fold_convert (uns, and1);
7881 }
7882 #endif
7883 }
7884 if (change)
7885 {
7886 tem = force_fit_type_double (type, TREE_INT_CST_LOW (and1),
7887 TREE_INT_CST_HIGH (and1), 0,
7888 TREE_OVERFLOW (and1));
7889 return fold_build2 (BIT_AND_EXPR, type,
7890 fold_convert (type, and0), tem);
7891 }
7892 }
7893
7894 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7895 when one of the new casts will fold away. Conservatively we assume
7896 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7897 if (POINTER_TYPE_P (type)
7898 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7899 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7900 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7901 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
7902 {
7903 tree arg00 = TREE_OPERAND (arg0, 0);
7904 tree arg01 = TREE_OPERAND (arg0, 1);
7905
7906 return fold_build2 (TREE_CODE (arg0), type, fold_convert (type, arg00),
7907 fold_convert (sizetype, arg01));
7908 }
7909
7910 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7911 of the same precision, and X is an integer type not narrower than
7912 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7913 if (INTEGRAL_TYPE_P (type)
7914 && TREE_CODE (op0) == BIT_NOT_EXPR
7915 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7916 && (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
7917 || TREE_CODE (TREE_OPERAND (op0, 0)) == CONVERT_EXPR)
7918 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7919 {
7920 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7921 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7922 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7923 return fold_build1 (BIT_NOT_EXPR, type, fold_convert (type, tem));
7924 }
7925
7926 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7927 type of X and Y (integer types only). */
7928 if (INTEGRAL_TYPE_P (type)
7929 && TREE_CODE (op0) == MULT_EXPR
7930 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7931 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7932 {
7933 /* Be careful not to introduce new overflows. */
7934 tree mult_type;
7935 if (TYPE_OVERFLOW_WRAPS (type))
7936 mult_type = type;
7937 else
7938 mult_type = unsigned_type_for (type);
7939
7940 tem = fold_build2 (MULT_EXPR, mult_type,
7941 fold_convert (mult_type, TREE_OPERAND (op0, 0)),
7942 fold_convert (mult_type, TREE_OPERAND (op0, 1)));
7943 return fold_convert (type, tem);
7944 }
7945
7946 tem = fold_convert_const (code, type, op0);
7947 return tem ? tem : NULL_TREE;
7948
7949 case FIXED_CONVERT_EXPR:
7950 tem = fold_convert_const (code, type, arg0);
7951 return tem ? tem : NULL_TREE;
7952
7953 case VIEW_CONVERT_EXPR:
7954 if (TREE_TYPE (op0) == type)
7955 return op0;
7956 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
7957 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
7958
7959 /* For integral conversions with the same precision or pointer
7960 conversions use a NOP_EXPR instead. */
7961 if ((INTEGRAL_TYPE_P (type)
7962 || POINTER_TYPE_P (type))
7963 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
7964 || POINTER_TYPE_P (TREE_TYPE (op0)))
7965 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0))
7966 /* Do not muck with VIEW_CONVERT_EXPRs that convert from
7967 a sub-type to its base type as generated by the Ada FE. */
7968 && !(INTEGRAL_TYPE_P (TREE_TYPE (op0))
7969 && TREE_TYPE (TREE_TYPE (op0))))
7970 return fold_convert (type, op0);
7971
7972 /* Strip inner integral conversions that do not change the precision. */
7973 if ((TREE_CODE (op0) == NOP_EXPR
7974 || TREE_CODE (op0) == CONVERT_EXPR)
7975 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
7976 || POINTER_TYPE_P (TREE_TYPE (op0)))
7977 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
7978 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
7979 && (TYPE_PRECISION (TREE_TYPE (op0))
7980 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
7981 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
7982
7983 return fold_view_convert_expr (type, op0);
7984
7985 case NEGATE_EXPR:
7986 tem = fold_negate_expr (arg0);
7987 if (tem)
7988 return fold_convert (type, tem);
7989 return NULL_TREE;
7990
7991 case ABS_EXPR:
7992 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
7993 return fold_abs_const (arg0, type);
7994 else if (TREE_CODE (arg0) == NEGATE_EXPR)
7995 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
7996 /* Convert fabs((double)float) into (double)fabsf(float). */
7997 else if (TREE_CODE (arg0) == NOP_EXPR
7998 && TREE_CODE (type) == REAL_TYPE)
7999 {
8000 tree targ0 = strip_float_extensions (arg0);
8001 if (targ0 != arg0)
8002 return fold_convert (type, fold_build1 (ABS_EXPR,
8003 TREE_TYPE (targ0),
8004 targ0));
8005 }
8006 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8007 else if (TREE_CODE (arg0) == ABS_EXPR)
8008 return arg0;
8009 else if (tree_expr_nonnegative_p (arg0))
8010 return arg0;
8011
8012 /* Strip sign ops from argument. */
8013 if (TREE_CODE (type) == REAL_TYPE)
8014 {
8015 tem = fold_strip_sign_ops (arg0);
8016 if (tem)
8017 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
8018 }
8019 return NULL_TREE;
8020
8021 case CONJ_EXPR:
8022 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8023 return fold_convert (type, arg0);
8024 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8025 {
8026 tree itype = TREE_TYPE (type);
8027 tree rpart = fold_convert (itype, TREE_OPERAND (arg0, 0));
8028 tree ipart = fold_convert (itype, TREE_OPERAND (arg0, 1));
8029 return fold_build2 (COMPLEX_EXPR, type, rpart, negate_expr (ipart));
8030 }
8031 if (TREE_CODE (arg0) == COMPLEX_CST)
8032 {
8033 tree itype = TREE_TYPE (type);
8034 tree rpart = fold_convert (itype, TREE_REALPART (arg0));
8035 tree ipart = fold_convert (itype, TREE_IMAGPART (arg0));
8036 return build_complex (type, rpart, negate_expr (ipart));
8037 }
8038 if (TREE_CODE (arg0) == CONJ_EXPR)
8039 return fold_convert (type, TREE_OPERAND (arg0, 0));
8040 return NULL_TREE;
8041
8042 case BIT_NOT_EXPR:
8043 if (TREE_CODE (arg0) == INTEGER_CST)
8044 return fold_not_const (arg0, type);
8045 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8046 return fold_convert (type, TREE_OPERAND (arg0, 0));
8047 /* Convert ~ (-A) to A - 1. */
8048 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8049 return fold_build2 (MINUS_EXPR, type,
8050 fold_convert (type, TREE_OPERAND (arg0, 0)),
8051 build_int_cst (type, 1));
8052 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8053 else if (INTEGRAL_TYPE_P (type)
8054 && ((TREE_CODE (arg0) == MINUS_EXPR
8055 && integer_onep (TREE_OPERAND (arg0, 1)))
8056 || (TREE_CODE (arg0) == PLUS_EXPR
8057 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8058 return fold_build1 (NEGATE_EXPR, type,
8059 fold_convert (type, TREE_OPERAND (arg0, 0)));
8060 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8061 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8062 && (tem = fold_unary (BIT_NOT_EXPR, type,
8063 fold_convert (type,
8064 TREE_OPERAND (arg0, 0)))))
8065 return fold_build2 (BIT_XOR_EXPR, type, tem,
8066 fold_convert (type, TREE_OPERAND (arg0, 1)));
8067 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8068 && (tem = fold_unary (BIT_NOT_EXPR, type,
8069 fold_convert (type,
8070 TREE_OPERAND (arg0, 1)))))
8071 return fold_build2 (BIT_XOR_EXPR, type,
8072 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
8073 /* Perform BIT_NOT_EXPR on each element individually. */
8074 else if (TREE_CODE (arg0) == VECTOR_CST)
8075 {
8076 tree elements = TREE_VECTOR_CST_ELTS (arg0), elem, list = NULL_TREE;
8077 int count = TYPE_VECTOR_SUBPARTS (type), i;
8078
8079 for (i = 0; i < count; i++)
8080 {
8081 if (elements)
8082 {
8083 elem = TREE_VALUE (elements);
8084 elem = fold_unary (BIT_NOT_EXPR, TREE_TYPE (type), elem);
8085 if (elem == NULL_TREE)
8086 break;
8087 elements = TREE_CHAIN (elements);
8088 }
8089 else
8090 elem = build_int_cst (TREE_TYPE (type), -1);
8091 list = tree_cons (NULL_TREE, elem, list);
8092 }
8093 if (i == count)
8094 return build_vector (type, nreverse (list));
8095 }
8096
8097 return NULL_TREE;
8098
8099 case TRUTH_NOT_EXPR:
8100 /* The argument to invert_truthvalue must have Boolean type. */
8101 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8102 arg0 = fold_convert (boolean_type_node, arg0);
8103
8104 /* Note that the operand of this must be an int
8105 and its values must be 0 or 1.
8106 ("true" is a fixed value perhaps depending on the language,
8107 but we don't handle values other than 1 correctly yet.) */
8108 tem = fold_truth_not_expr (arg0);
8109 if (!tem)
8110 return NULL_TREE;
8111 return fold_convert (type, tem);
8112
8113 case REALPART_EXPR:
8114 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8115 return fold_convert (type, arg0);
8116 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8117 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
8118 TREE_OPERAND (arg0, 1));
8119 if (TREE_CODE (arg0) == COMPLEX_CST)
8120 return fold_convert (type, TREE_REALPART (arg0));
8121 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8122 {
8123 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8124 tem = fold_build2 (TREE_CODE (arg0), itype,
8125 fold_build1 (REALPART_EXPR, itype,
8126 TREE_OPERAND (arg0, 0)),
8127 fold_build1 (REALPART_EXPR, itype,
8128 TREE_OPERAND (arg0, 1)));
8129 return fold_convert (type, tem);
8130 }
8131 if (TREE_CODE (arg0) == CONJ_EXPR)
8132 {
8133 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8134 tem = fold_build1 (REALPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8135 return fold_convert (type, tem);
8136 }
8137 if (TREE_CODE (arg0) == CALL_EXPR)
8138 {
8139 tree fn = get_callee_fndecl (arg0);
8140 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8141 switch (DECL_FUNCTION_CODE (fn))
8142 {
8143 CASE_FLT_FN (BUILT_IN_CEXPI):
8144 fn = mathfn_built_in (type, BUILT_IN_COS);
8145 if (fn)
8146 return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0));
8147 break;
8148
8149 default:
8150 break;
8151 }
8152 }
8153 return NULL_TREE;
8154
8155 case IMAGPART_EXPR:
8156 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8157 return fold_convert (type, integer_zero_node);
8158 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8159 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
8160 TREE_OPERAND (arg0, 0));
8161 if (TREE_CODE (arg0) == COMPLEX_CST)
8162 return fold_convert (type, TREE_IMAGPART (arg0));
8163 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8164 {
8165 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8166 tem = fold_build2 (TREE_CODE (arg0), itype,
8167 fold_build1 (IMAGPART_EXPR, itype,
8168 TREE_OPERAND (arg0, 0)),
8169 fold_build1 (IMAGPART_EXPR, itype,
8170 TREE_OPERAND (arg0, 1)));
8171 return fold_convert (type, tem);
8172 }
8173 if (TREE_CODE (arg0) == CONJ_EXPR)
8174 {
8175 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8176 tem = fold_build1 (IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8177 return fold_convert (type, negate_expr (tem));
8178 }
8179 if (TREE_CODE (arg0) == CALL_EXPR)
8180 {
8181 tree fn = get_callee_fndecl (arg0);
8182 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8183 switch (DECL_FUNCTION_CODE (fn))
8184 {
8185 CASE_FLT_FN (BUILT_IN_CEXPI):
8186 fn = mathfn_built_in (type, BUILT_IN_SIN);
8187 if (fn)
8188 return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0));
8189 break;
8190
8191 default:
8192 break;
8193 }
8194 }
8195 return NULL_TREE;
8196
8197 default:
8198 return NULL_TREE;
8199 } /* switch (code) */
8200 }
8201
8202 /* Fold a binary expression of code CODE and type TYPE with operands
8203 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8204 Return the folded expression if folding is successful. Otherwise,
8205 return NULL_TREE. */
8206
8207 static tree
8208 fold_minmax (enum tree_code code, tree type, tree op0, tree op1)
8209 {
8210 enum tree_code compl_code;
8211
8212 if (code == MIN_EXPR)
8213 compl_code = MAX_EXPR;
8214 else if (code == MAX_EXPR)
8215 compl_code = MIN_EXPR;
8216 else
8217 gcc_unreachable ();
8218
8219 /* MIN (MAX (a, b), b) == b. */
8220 if (TREE_CODE (op0) == compl_code
8221 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8222 return omit_one_operand (type, op1, TREE_OPERAND (op0, 0));
8223
8224 /* MIN (MAX (b, a), b) == b. */
8225 if (TREE_CODE (op0) == compl_code
8226 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8227 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8228 return omit_one_operand (type, op1, TREE_OPERAND (op0, 1));
8229
8230 /* MIN (a, MAX (a, b)) == a. */
8231 if (TREE_CODE (op1) == compl_code
8232 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8233 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8234 return omit_one_operand (type, op0, TREE_OPERAND (op1, 1));
8235
8236 /* MIN (a, MAX (b, a)) == a. */
8237 if (TREE_CODE (op1) == compl_code
8238 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8239 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8240 return omit_one_operand (type, op0, TREE_OPERAND (op1, 0));
8241
8242 return NULL_TREE;
8243 }
8244
8245 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8246 by changing CODE to reduce the magnitude of constants involved in
8247 ARG0 of the comparison.
8248 Returns a canonicalized comparison tree if a simplification was
8249 possible, otherwise returns NULL_TREE.
8250 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8251 valid if signed overflow is undefined. */
8252
8253 static tree
8254 maybe_canonicalize_comparison_1 (enum tree_code code, tree type,
8255 tree arg0, tree arg1,
8256 bool *strict_overflow_p)
8257 {
8258 enum tree_code code0 = TREE_CODE (arg0);
8259 tree t, cst0 = NULL_TREE;
8260 int sgn0;
8261 bool swap = false;
8262
8263 /* Match A +- CST code arg1 and CST code arg1. */
8264 if (!(((code0 == MINUS_EXPR
8265 || code0 == PLUS_EXPR)
8266 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8267 || code0 == INTEGER_CST))
8268 return NULL_TREE;
8269
8270 /* Identify the constant in arg0 and its sign. */
8271 if (code0 == INTEGER_CST)
8272 cst0 = arg0;
8273 else
8274 cst0 = TREE_OPERAND (arg0, 1);
8275 sgn0 = tree_int_cst_sgn (cst0);
8276
8277 /* Overflowed constants and zero will cause problems. */
8278 if (integer_zerop (cst0)
8279 || TREE_OVERFLOW (cst0))
8280 return NULL_TREE;
8281
8282 /* See if we can reduce the magnitude of the constant in
8283 arg0 by changing the comparison code. */
8284 if (code0 == INTEGER_CST)
8285 {
8286 /* CST <= arg1 -> CST-1 < arg1. */
8287 if (code == LE_EXPR && sgn0 == 1)
8288 code = LT_EXPR;
8289 /* -CST < arg1 -> -CST-1 <= arg1. */
8290 else if (code == LT_EXPR && sgn0 == -1)
8291 code = LE_EXPR;
8292 /* CST > arg1 -> CST-1 >= arg1. */
8293 else if (code == GT_EXPR && sgn0 == 1)
8294 code = GE_EXPR;
8295 /* -CST >= arg1 -> -CST-1 > arg1. */
8296 else if (code == GE_EXPR && sgn0 == -1)
8297 code = GT_EXPR;
8298 else
8299 return NULL_TREE;
8300 /* arg1 code' CST' might be more canonical. */
8301 swap = true;
8302 }
8303 else
8304 {
8305 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8306 if (code == LT_EXPR
8307 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8308 code = LE_EXPR;
8309 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8310 else if (code == GT_EXPR
8311 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8312 code = GE_EXPR;
8313 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8314 else if (code == LE_EXPR
8315 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8316 code = LT_EXPR;
8317 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8318 else if (code == GE_EXPR
8319 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8320 code = GT_EXPR;
8321 else
8322 return NULL_TREE;
8323 *strict_overflow_p = true;
8324 }
8325
8326 /* Now build the constant reduced in magnitude. */
8327 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8328 cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
8329 if (code0 != INTEGER_CST)
8330 t = fold_build2 (code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8331
8332 /* If swapping might yield to a more canonical form, do so. */
8333 if (swap)
8334 return fold_build2 (swap_tree_comparison (code), type, arg1, t);
8335 else
8336 return fold_build2 (code, type, t, arg1);
8337 }
8338
8339 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8340 overflow further. Try to decrease the magnitude of constants involved
8341 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8342 and put sole constants at the second argument position.
8343 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8344
8345 static tree
8346 maybe_canonicalize_comparison (enum tree_code code, tree type,
8347 tree arg0, tree arg1)
8348 {
8349 tree t;
8350 bool strict_overflow_p;
8351 const char * const warnmsg = G_("assuming signed overflow does not occur "
8352 "when reducing constant in comparison");
8353
8354 /* In principle pointers also have undefined overflow behavior,
8355 but that causes problems elsewhere. */
8356 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8357 || POINTER_TYPE_P (TREE_TYPE (arg0)))
8358 return NULL_TREE;
8359
8360 /* Try canonicalization by simplifying arg0. */
8361 strict_overflow_p = false;
8362 t = maybe_canonicalize_comparison_1 (code, type, arg0, arg1,
8363 &strict_overflow_p);
8364 if (t)
8365 {
8366 if (strict_overflow_p)
8367 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8368 return t;
8369 }
8370
8371 /* Try canonicalization by simplifying arg1 using the swapped
8372 comparison. */
8373 code = swap_tree_comparison (code);
8374 strict_overflow_p = false;
8375 t = maybe_canonicalize_comparison_1 (code, type, arg1, arg0,
8376 &strict_overflow_p);
8377 if (t && strict_overflow_p)
8378 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8379 return t;
8380 }
8381
8382 /* Subroutine of fold_binary. This routine performs all of the
8383 transformations that are common to the equality/inequality
8384 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8385 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8386 fold_binary should call fold_binary. Fold a comparison with
8387 tree code CODE and type TYPE with operands OP0 and OP1. Return
8388 the folded comparison or NULL_TREE. */
8389
8390 static tree
8391 fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
8392 {
8393 tree arg0, arg1, tem;
8394
8395 arg0 = op0;
8396 arg1 = op1;
8397
8398 STRIP_SIGN_NOPS (arg0);
8399 STRIP_SIGN_NOPS (arg1);
8400
8401 tem = fold_relational_const (code, type, arg0, arg1);
8402 if (tem != NULL_TREE)
8403 return tem;
8404
8405 /* If one arg is a real or integer constant, put it last. */
8406 if (tree_swap_operands_p (arg0, arg1, true))
8407 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
8408
8409 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8410 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8411 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8412 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8413 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8414 && (TREE_CODE (arg1) == INTEGER_CST
8415 && !TREE_OVERFLOW (arg1)))
8416 {
8417 tree const1 = TREE_OPERAND (arg0, 1);
8418 tree const2 = arg1;
8419 tree variable = TREE_OPERAND (arg0, 0);
8420 tree lhs;
8421 int lhs_add;
8422 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8423
8424 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
8425 TREE_TYPE (arg1), const2, const1);
8426
8427 /* If the constant operation overflowed this can be
8428 simplified as a comparison against INT_MAX/INT_MIN. */
8429 if (TREE_CODE (lhs) == INTEGER_CST
8430 && TREE_OVERFLOW (lhs))
8431 {
8432 int const1_sgn = tree_int_cst_sgn (const1);
8433 enum tree_code code2 = code;
8434
8435 /* Get the sign of the constant on the lhs if the
8436 operation were VARIABLE + CONST1. */
8437 if (TREE_CODE (arg0) == MINUS_EXPR)
8438 const1_sgn = -const1_sgn;
8439
8440 /* The sign of the constant determines if we overflowed
8441 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8442 Canonicalize to the INT_MIN overflow by swapping the comparison
8443 if necessary. */
8444 if (const1_sgn == -1)
8445 code2 = swap_tree_comparison (code);
8446
8447 /* We now can look at the canonicalized case
8448 VARIABLE + 1 CODE2 INT_MIN
8449 and decide on the result. */
8450 if (code2 == LT_EXPR
8451 || code2 == LE_EXPR
8452 || code2 == EQ_EXPR)
8453 return omit_one_operand (type, boolean_false_node, variable);
8454 else if (code2 == NE_EXPR
8455 || code2 == GE_EXPR
8456 || code2 == GT_EXPR)
8457 return omit_one_operand (type, boolean_true_node, variable);
8458 }
8459
8460 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8461 && (TREE_CODE (lhs) != INTEGER_CST
8462 || !TREE_OVERFLOW (lhs)))
8463 {
8464 fold_overflow_warning (("assuming signed overflow does not occur "
8465 "when changing X +- C1 cmp C2 to "
8466 "X cmp C1 +- C2"),
8467 WARN_STRICT_OVERFLOW_COMPARISON);
8468 return fold_build2 (code, type, variable, lhs);
8469 }
8470 }
8471
8472 /* For comparisons of pointers we can decompose it to a compile time
8473 comparison of the base objects and the offsets into the object.
8474 This requires at least one operand being an ADDR_EXPR or a
8475 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8476 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8477 && (TREE_CODE (arg0) == ADDR_EXPR
8478 || TREE_CODE (arg1) == ADDR_EXPR
8479 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8480 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8481 {
8482 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8483 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8484 enum machine_mode mode;
8485 int volatilep, unsignedp;
8486 bool indirect_base0 = false;
8487
8488 /* Get base and offset for the access. Strip ADDR_EXPR for
8489 get_inner_reference, but put it back by stripping INDIRECT_REF
8490 off the base object if possible. */
8491 base0 = arg0;
8492 if (TREE_CODE (arg0) == ADDR_EXPR)
8493 {
8494 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8495 &bitsize, &bitpos0, &offset0, &mode,
8496 &unsignedp, &volatilep, false);
8497 if (TREE_CODE (base0) == INDIRECT_REF)
8498 base0 = TREE_OPERAND (base0, 0);
8499 else
8500 indirect_base0 = true;
8501 }
8502 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8503 {
8504 base0 = TREE_OPERAND (arg0, 0);
8505 offset0 = TREE_OPERAND (arg0, 1);
8506 }
8507
8508 base1 = arg1;
8509 if (TREE_CODE (arg1) == ADDR_EXPR)
8510 {
8511 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8512 &bitsize, &bitpos1, &offset1, &mode,
8513 &unsignedp, &volatilep, false);
8514 /* We have to make sure to have an indirect/non-indirect base1
8515 just the same as we did for base0. */
8516 if (TREE_CODE (base1) == INDIRECT_REF
8517 && !indirect_base0)
8518 base1 = TREE_OPERAND (base1, 0);
8519 else if (!indirect_base0)
8520 base1 = NULL_TREE;
8521 }
8522 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8523 {
8524 base1 = TREE_OPERAND (arg1, 0);
8525 offset1 = TREE_OPERAND (arg1, 1);
8526 }
8527 else if (indirect_base0)
8528 base1 = NULL_TREE;
8529
8530 /* If we have equivalent bases we might be able to simplify. */
8531 if (base0 && base1
8532 && operand_equal_p (base0, base1, 0))
8533 {
8534 /* We can fold this expression to a constant if the non-constant
8535 offset parts are equal. */
8536 if (offset0 == offset1
8537 || (offset0 && offset1
8538 && operand_equal_p (offset0, offset1, 0)))
8539 {
8540 switch (code)
8541 {
8542 case EQ_EXPR:
8543 return build_int_cst (boolean_type_node, bitpos0 == bitpos1);
8544 case NE_EXPR:
8545 return build_int_cst (boolean_type_node, bitpos0 != bitpos1);
8546 case LT_EXPR:
8547 return build_int_cst (boolean_type_node, bitpos0 < bitpos1);
8548 case LE_EXPR:
8549 return build_int_cst (boolean_type_node, bitpos0 <= bitpos1);
8550 case GE_EXPR:
8551 return build_int_cst (boolean_type_node, bitpos0 >= bitpos1);
8552 case GT_EXPR:
8553 return build_int_cst (boolean_type_node, bitpos0 > bitpos1);
8554 default:;
8555 }
8556 }
8557 /* We can simplify the comparison to a comparison of the variable
8558 offset parts if the constant offset parts are equal.
8559 Be careful to use signed size type here because otherwise we
8560 mess with array offsets in the wrong way. This is possible
8561 because pointer arithmetic is restricted to retain within an
8562 object and overflow on pointer differences is undefined as of
8563 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8564 else if (bitpos0 == bitpos1)
8565 {
8566 tree signed_size_type_node;
8567 signed_size_type_node = signed_type_for (size_type_node);
8568
8569 /* By converting to signed size type we cover middle-end pointer
8570 arithmetic which operates on unsigned pointer types of size
8571 type size and ARRAY_REF offsets which are properly sign or
8572 zero extended from their type in case it is narrower than
8573 size type. */
8574 if (offset0 == NULL_TREE)
8575 offset0 = build_int_cst (signed_size_type_node, 0);
8576 else
8577 offset0 = fold_convert (signed_size_type_node, offset0);
8578 if (offset1 == NULL_TREE)
8579 offset1 = build_int_cst (signed_size_type_node, 0);
8580 else
8581 offset1 = fold_convert (signed_size_type_node, offset1);
8582
8583 return fold_build2 (code, type, offset0, offset1);
8584 }
8585 }
8586 }
8587
8588 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8589 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8590 the resulting offset is smaller in absolute value than the
8591 original one. */
8592 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8593 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8594 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8595 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8596 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8597 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8598 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8599 {
8600 tree const1 = TREE_OPERAND (arg0, 1);
8601 tree const2 = TREE_OPERAND (arg1, 1);
8602 tree variable1 = TREE_OPERAND (arg0, 0);
8603 tree variable2 = TREE_OPERAND (arg1, 0);
8604 tree cst;
8605 const char * const warnmsg = G_("assuming signed overflow does not "
8606 "occur when combining constants around "
8607 "a comparison");
8608
8609 /* Put the constant on the side where it doesn't overflow and is
8610 of lower absolute value than before. */
8611 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8612 ? MINUS_EXPR : PLUS_EXPR,
8613 const2, const1, 0);
8614 if (!TREE_OVERFLOW (cst)
8615 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
8616 {
8617 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8618 return fold_build2 (code, type,
8619 variable1,
8620 fold_build2 (TREE_CODE (arg1), TREE_TYPE (arg1),
8621 variable2, cst));
8622 }
8623
8624 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8625 ? MINUS_EXPR : PLUS_EXPR,
8626 const1, const2, 0);
8627 if (!TREE_OVERFLOW (cst)
8628 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
8629 {
8630 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8631 return fold_build2 (code, type,
8632 fold_build2 (TREE_CODE (arg0), TREE_TYPE (arg0),
8633 variable1, cst),
8634 variable2);
8635 }
8636 }
8637
8638 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
8639 signed arithmetic case. That form is created by the compiler
8640 often enough for folding it to be of value. One example is in
8641 computing loop trip counts after Operator Strength Reduction. */
8642 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8643 && TREE_CODE (arg0) == MULT_EXPR
8644 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8645 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8646 && integer_zerop (arg1))
8647 {
8648 tree const1 = TREE_OPERAND (arg0, 1);
8649 tree const2 = arg1; /* zero */
8650 tree variable1 = TREE_OPERAND (arg0, 0);
8651 enum tree_code cmp_code = code;
8652
8653 gcc_assert (!integer_zerop (const1));
8654
8655 fold_overflow_warning (("assuming signed overflow does not occur when "
8656 "eliminating multiplication in comparison "
8657 "with zero"),
8658 WARN_STRICT_OVERFLOW_COMPARISON);
8659
8660 /* If const1 is negative we swap the sense of the comparison. */
8661 if (tree_int_cst_sgn (const1) < 0)
8662 cmp_code = swap_tree_comparison (cmp_code);
8663
8664 return fold_build2 (cmp_code, type, variable1, const2);
8665 }
8666
8667 tem = maybe_canonicalize_comparison (code, type, op0, op1);
8668 if (tem)
8669 return tem;
8670
8671 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8672 {
8673 tree targ0 = strip_float_extensions (arg0);
8674 tree targ1 = strip_float_extensions (arg1);
8675 tree newtype = TREE_TYPE (targ0);
8676
8677 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8678 newtype = TREE_TYPE (targ1);
8679
8680 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8681 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8682 return fold_build2 (code, type, fold_convert (newtype, targ0),
8683 fold_convert (newtype, targ1));
8684
8685 /* (-a) CMP (-b) -> b CMP a */
8686 if (TREE_CODE (arg0) == NEGATE_EXPR
8687 && TREE_CODE (arg1) == NEGATE_EXPR)
8688 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
8689 TREE_OPERAND (arg0, 0));
8690
8691 if (TREE_CODE (arg1) == REAL_CST)
8692 {
8693 REAL_VALUE_TYPE cst;
8694 cst = TREE_REAL_CST (arg1);
8695
8696 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8697 if (TREE_CODE (arg0) == NEGATE_EXPR)
8698 return fold_build2 (swap_tree_comparison (code), type,
8699 TREE_OPERAND (arg0, 0),
8700 build_real (TREE_TYPE (arg1),
8701 REAL_VALUE_NEGATE (cst)));
8702
8703 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8704 /* a CMP (-0) -> a CMP 0 */
8705 if (REAL_VALUE_MINUS_ZERO (cst))
8706 return fold_build2 (code, type, arg0,
8707 build_real (TREE_TYPE (arg1), dconst0));
8708
8709 /* x != NaN is always true, other ops are always false. */
8710 if (REAL_VALUE_ISNAN (cst)
8711 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8712 {
8713 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8714 return omit_one_operand (type, tem, arg0);
8715 }
8716
8717 /* Fold comparisons against infinity. */
8718 if (REAL_VALUE_ISINF (cst))
8719 {
8720 tem = fold_inf_compare (code, type, arg0, arg1);
8721 if (tem != NULL_TREE)
8722 return tem;
8723 }
8724 }
8725
8726 /* If this is a comparison of a real constant with a PLUS_EXPR
8727 or a MINUS_EXPR of a real constant, we can convert it into a
8728 comparison with a revised real constant as long as no overflow
8729 occurs when unsafe_math_optimizations are enabled. */
8730 if (flag_unsafe_math_optimizations
8731 && TREE_CODE (arg1) == REAL_CST
8732 && (TREE_CODE (arg0) == PLUS_EXPR
8733 || TREE_CODE (arg0) == MINUS_EXPR)
8734 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8735 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8736 ? MINUS_EXPR : PLUS_EXPR,
8737 arg1, TREE_OPERAND (arg0, 1), 0))
8738 && !TREE_OVERFLOW (tem))
8739 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
8740
8741 /* Likewise, we can simplify a comparison of a real constant with
8742 a MINUS_EXPR whose first operand is also a real constant, i.e.
8743 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
8744 floating-point types only if -fassociative-math is set. */
8745 if (flag_associative_math
8746 && TREE_CODE (arg1) == REAL_CST
8747 && TREE_CODE (arg0) == MINUS_EXPR
8748 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
8749 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
8750 arg1, 0))
8751 && !TREE_OVERFLOW (tem))
8752 return fold_build2 (swap_tree_comparison (code), type,
8753 TREE_OPERAND (arg0, 1), tem);
8754
8755 /* Fold comparisons against built-in math functions. */
8756 if (TREE_CODE (arg1) == REAL_CST
8757 && flag_unsafe_math_optimizations
8758 && ! flag_errno_math)
8759 {
8760 enum built_in_function fcode = builtin_mathfn_code (arg0);
8761
8762 if (fcode != END_BUILTINS)
8763 {
8764 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
8765 if (tem != NULL_TREE)
8766 return tem;
8767 }
8768 }
8769 }
8770
8771 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
8772 && (TREE_CODE (arg0) == NOP_EXPR
8773 || TREE_CODE (arg0) == CONVERT_EXPR))
8774 {
8775 /* If we are widening one operand of an integer comparison,
8776 see if the other operand is similarly being widened. Perhaps we
8777 can do the comparison in the narrower type. */
8778 tem = fold_widened_comparison (code, type, arg0, arg1);
8779 if (tem)
8780 return tem;
8781
8782 /* Or if we are changing signedness. */
8783 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
8784 if (tem)
8785 return tem;
8786 }
8787
8788 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8789 constant, we can simplify it. */
8790 if (TREE_CODE (arg1) == INTEGER_CST
8791 && (TREE_CODE (arg0) == MIN_EXPR
8792 || TREE_CODE (arg0) == MAX_EXPR)
8793 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8794 {
8795 tem = optimize_minmax_comparison (code, type, op0, op1);
8796 if (tem)
8797 return tem;
8798 }
8799
8800 /* Simplify comparison of something with itself. (For IEEE
8801 floating-point, we can only do some of these simplifications.) */
8802 if (operand_equal_p (arg0, arg1, 0))
8803 {
8804 switch (code)
8805 {
8806 case EQ_EXPR:
8807 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8808 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8809 return constant_boolean_node (1, type);
8810 break;
8811
8812 case GE_EXPR:
8813 case LE_EXPR:
8814 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8815 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8816 return constant_boolean_node (1, type);
8817 return fold_build2 (EQ_EXPR, type, arg0, arg1);
8818
8819 case NE_EXPR:
8820 /* For NE, we can only do this simplification if integer
8821 or we don't honor IEEE floating point NaNs. */
8822 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
8823 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8824 break;
8825 /* ... fall through ... */
8826 case GT_EXPR:
8827 case LT_EXPR:
8828 return constant_boolean_node (0, type);
8829 default:
8830 gcc_unreachable ();
8831 }
8832 }
8833
8834 /* If we are comparing an expression that just has comparisons
8835 of two integer values, arithmetic expressions of those comparisons,
8836 and constants, we can simplify it. There are only three cases
8837 to check: the two values can either be equal, the first can be
8838 greater, or the second can be greater. Fold the expression for
8839 those three values. Since each value must be 0 or 1, we have
8840 eight possibilities, each of which corresponds to the constant 0
8841 or 1 or one of the six possible comparisons.
8842
8843 This handles common cases like (a > b) == 0 but also handles
8844 expressions like ((x > y) - (y > x)) > 0, which supposedly
8845 occur in macroized code. */
8846
8847 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8848 {
8849 tree cval1 = 0, cval2 = 0;
8850 int save_p = 0;
8851
8852 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8853 /* Don't handle degenerate cases here; they should already
8854 have been handled anyway. */
8855 && cval1 != 0 && cval2 != 0
8856 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8857 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8858 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8859 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8860 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8861 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8862 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8863 {
8864 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8865 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8866
8867 /* We can't just pass T to eval_subst in case cval1 or cval2
8868 was the same as ARG1. */
8869
8870 tree high_result
8871 = fold_build2 (code, type,
8872 eval_subst (arg0, cval1, maxval,
8873 cval2, minval),
8874 arg1);
8875 tree equal_result
8876 = fold_build2 (code, type,
8877 eval_subst (arg0, cval1, maxval,
8878 cval2, maxval),
8879 arg1);
8880 tree low_result
8881 = fold_build2 (code, type,
8882 eval_subst (arg0, cval1, minval,
8883 cval2, maxval),
8884 arg1);
8885
8886 /* All three of these results should be 0 or 1. Confirm they are.
8887 Then use those values to select the proper code to use. */
8888
8889 if (TREE_CODE (high_result) == INTEGER_CST
8890 && TREE_CODE (equal_result) == INTEGER_CST
8891 && TREE_CODE (low_result) == INTEGER_CST)
8892 {
8893 /* Make a 3-bit mask with the high-order bit being the
8894 value for `>', the next for '=', and the low for '<'. */
8895 switch ((integer_onep (high_result) * 4)
8896 + (integer_onep (equal_result) * 2)
8897 + integer_onep (low_result))
8898 {
8899 case 0:
8900 /* Always false. */
8901 return omit_one_operand (type, integer_zero_node, arg0);
8902 case 1:
8903 code = LT_EXPR;
8904 break;
8905 case 2:
8906 code = EQ_EXPR;
8907 break;
8908 case 3:
8909 code = LE_EXPR;
8910 break;
8911 case 4:
8912 code = GT_EXPR;
8913 break;
8914 case 5:
8915 code = NE_EXPR;
8916 break;
8917 case 6:
8918 code = GE_EXPR;
8919 break;
8920 case 7:
8921 /* Always true. */
8922 return omit_one_operand (type, integer_one_node, arg0);
8923 }
8924
8925 if (save_p)
8926 return save_expr (build2 (code, type, cval1, cval2));
8927 return fold_build2 (code, type, cval1, cval2);
8928 }
8929 }
8930 }
8931
8932 /* Fold a comparison of the address of COMPONENT_REFs with the same
8933 type and component to a comparison of the address of the base
8934 object. In short, &x->a OP &y->a to x OP y and
8935 &x->a OP &y.a to x OP &y */
8936 if (TREE_CODE (arg0) == ADDR_EXPR
8937 && TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
8938 && TREE_CODE (arg1) == ADDR_EXPR
8939 && TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
8940 {
8941 tree cref0 = TREE_OPERAND (arg0, 0);
8942 tree cref1 = TREE_OPERAND (arg1, 0);
8943 if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
8944 {
8945 tree op0 = TREE_OPERAND (cref0, 0);
8946 tree op1 = TREE_OPERAND (cref1, 0);
8947 return fold_build2 (code, type,
8948 fold_addr_expr (op0),
8949 fold_addr_expr (op1));
8950 }
8951 }
8952
8953 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8954 into a single range test. */
8955 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
8956 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
8957 && TREE_CODE (arg1) == INTEGER_CST
8958 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8959 && !integer_zerop (TREE_OPERAND (arg0, 1))
8960 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8961 && !TREE_OVERFLOW (arg1))
8962 {
8963 tem = fold_div_compare (code, type, arg0, arg1);
8964 if (tem != NULL_TREE)
8965 return tem;
8966 }
8967
8968 /* Fold ~X op ~Y as Y op X. */
8969 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8970 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8971 {
8972 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
8973 return fold_build2 (code, type,
8974 fold_convert (cmp_type, TREE_OPERAND (arg1, 0)),
8975 TREE_OPERAND (arg0, 0));
8976 }
8977
8978 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
8979 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8980 && TREE_CODE (arg1) == INTEGER_CST)
8981 {
8982 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
8983 return fold_build2 (swap_tree_comparison (code), type,
8984 TREE_OPERAND (arg0, 0),
8985 fold_build1 (BIT_NOT_EXPR, cmp_type,
8986 fold_convert (cmp_type, arg1)));
8987 }
8988
8989 return NULL_TREE;
8990 }
8991
8992
8993 /* Subroutine of fold_binary. Optimize complex multiplications of the
8994 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8995 argument EXPR represents the expression "z" of type TYPE. */
8996
8997 static tree
8998 fold_mult_zconjz (tree type, tree expr)
8999 {
9000 tree itype = TREE_TYPE (type);
9001 tree rpart, ipart, tem;
9002
9003 if (TREE_CODE (expr) == COMPLEX_EXPR)
9004 {
9005 rpart = TREE_OPERAND (expr, 0);
9006 ipart = TREE_OPERAND (expr, 1);
9007 }
9008 else if (TREE_CODE (expr) == COMPLEX_CST)
9009 {
9010 rpart = TREE_REALPART (expr);
9011 ipart = TREE_IMAGPART (expr);
9012 }
9013 else
9014 {
9015 expr = save_expr (expr);
9016 rpart = fold_build1 (REALPART_EXPR, itype, expr);
9017 ipart = fold_build1 (IMAGPART_EXPR, itype, expr);
9018 }
9019
9020 rpart = save_expr (rpart);
9021 ipart = save_expr (ipart);
9022 tem = fold_build2 (PLUS_EXPR, itype,
9023 fold_build2 (MULT_EXPR, itype, rpart, rpart),
9024 fold_build2 (MULT_EXPR, itype, ipart, ipart));
9025 return fold_build2 (COMPLEX_EXPR, type, tem,
9026 fold_convert (itype, integer_zero_node));
9027 }
9028
9029
9030 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9031 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9032 guarantees that P and N have the same least significant log2(M) bits.
9033 N is not otherwise constrained. In particular, N is not normalized to
9034 0 <= N < M as is common. In general, the precise value of P is unknown.
9035 M is chosen as large as possible such that constant N can be determined.
9036
9037 Returns M and sets *RESIDUE to N. */
9038
9039 static unsigned HOST_WIDE_INT
9040 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue)
9041 {
9042 enum tree_code code;
9043
9044 *residue = 0;
9045
9046 code = TREE_CODE (expr);
9047 if (code == ADDR_EXPR)
9048 {
9049 expr = TREE_OPERAND (expr, 0);
9050 if (handled_component_p (expr))
9051 {
9052 HOST_WIDE_INT bitsize, bitpos;
9053 tree offset;
9054 enum machine_mode mode;
9055 int unsignedp, volatilep;
9056
9057 expr = get_inner_reference (expr, &bitsize, &bitpos, &offset,
9058 &mode, &unsignedp, &volatilep, false);
9059 *residue = bitpos / BITS_PER_UNIT;
9060 if (offset)
9061 {
9062 if (TREE_CODE (offset) == INTEGER_CST)
9063 *residue += TREE_INT_CST_LOW (offset);
9064 else
9065 /* We don't handle more complicated offset expressions. */
9066 return 1;
9067 }
9068 }
9069
9070 if (DECL_P (expr))
9071 return DECL_ALIGN_UNIT (expr);
9072 }
9073 else if (code == POINTER_PLUS_EXPR)
9074 {
9075 tree op0, op1;
9076 unsigned HOST_WIDE_INT modulus;
9077 enum tree_code inner_code;
9078
9079 op0 = TREE_OPERAND (expr, 0);
9080 STRIP_NOPS (op0);
9081 modulus = get_pointer_modulus_and_residue (op0, residue);
9082
9083 op1 = TREE_OPERAND (expr, 1);
9084 STRIP_NOPS (op1);
9085 inner_code = TREE_CODE (op1);
9086 if (inner_code == INTEGER_CST)
9087 {
9088 *residue += TREE_INT_CST_LOW (op1);
9089 return modulus;
9090 }
9091 else if (inner_code == MULT_EXPR)
9092 {
9093 op1 = TREE_OPERAND (op1, 1);
9094 if (TREE_CODE (op1) == INTEGER_CST)
9095 {
9096 unsigned HOST_WIDE_INT align;
9097
9098 /* Compute the greatest power-of-2 divisor of op1. */
9099 align = TREE_INT_CST_LOW (op1);
9100 align &= -align;
9101
9102 /* If align is non-zero and less than *modulus, replace
9103 *modulus with align., If align is 0, then either op1 is 0
9104 or the greatest power-of-2 divisor of op1 doesn't fit in an
9105 unsigned HOST_WIDE_INT. In either case, no additional
9106 constraint is imposed. */
9107 if (align)
9108 modulus = MIN (modulus, align);
9109
9110 return modulus;
9111 }
9112 }
9113 }
9114
9115 /* If we get here, we were unable to determine anything useful about the
9116 expression. */
9117 return 1;
9118 }
9119
9120
9121 /* Fold a binary expression of code CODE and type TYPE with operands
9122 OP0 and OP1. Return the folded expression if folding is
9123 successful. Otherwise, return NULL_TREE. */
9124
9125 tree
9126 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
9127 {
9128 enum tree_code_class kind = TREE_CODE_CLASS (code);
9129 tree arg0, arg1, tem;
9130 tree t1 = NULL_TREE;
9131 bool strict_overflow_p;
9132
9133 gcc_assert ((IS_EXPR_CODE_CLASS (kind)
9134 || IS_GIMPLE_STMT_CODE_CLASS (kind))
9135 && TREE_CODE_LENGTH (code) == 2
9136 && op0 != NULL_TREE
9137 && op1 != NULL_TREE);
9138
9139 arg0 = op0;
9140 arg1 = op1;
9141
9142 /* Strip any conversions that don't change the mode. This is
9143 safe for every expression, except for a comparison expression
9144 because its signedness is derived from its operands. So, in
9145 the latter case, only strip conversions that don't change the
9146 signedness.
9147
9148 Note that this is done as an internal manipulation within the
9149 constant folder, in order to find the simplest representation
9150 of the arguments so that their form can be studied. In any
9151 cases, the appropriate type conversions should be put back in
9152 the tree that will get out of the constant folder. */
9153
9154 if (kind == tcc_comparison)
9155 {
9156 STRIP_SIGN_NOPS (arg0);
9157 STRIP_SIGN_NOPS (arg1);
9158 }
9159 else
9160 {
9161 STRIP_NOPS (arg0);
9162 STRIP_NOPS (arg1);
9163 }
9164
9165 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9166 constant but we can't do arithmetic on them. */
9167 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9168 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9169 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9170 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9171 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9172 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9173 {
9174 if (kind == tcc_binary)
9175 {
9176 /* Make sure type and arg0 have the same saturating flag. */
9177 gcc_assert (TYPE_SATURATING (type)
9178 == TYPE_SATURATING (TREE_TYPE (arg0)));
9179 tem = const_binop (code, arg0, arg1, 0);
9180 }
9181 else if (kind == tcc_comparison)
9182 tem = fold_relational_const (code, type, arg0, arg1);
9183 else
9184 tem = NULL_TREE;
9185
9186 if (tem != NULL_TREE)
9187 {
9188 if (TREE_TYPE (tem) != type)
9189 tem = fold_convert (type, tem);
9190 return tem;
9191 }
9192 }
9193
9194 /* If this is a commutative operation, and ARG0 is a constant, move it
9195 to ARG1 to reduce the number of tests below. */
9196 if (commutative_tree_code (code)
9197 && tree_swap_operands_p (arg0, arg1, true))
9198 return fold_build2 (code, type, op1, op0);
9199
9200 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9201
9202 First check for cases where an arithmetic operation is applied to a
9203 compound, conditional, or comparison operation. Push the arithmetic
9204 operation inside the compound or conditional to see if any folding
9205 can then be done. Convert comparison to conditional for this purpose.
9206 The also optimizes non-constant cases that used to be done in
9207 expand_expr.
9208
9209 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9210 one of the operands is a comparison and the other is a comparison, a
9211 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9212 code below would make the expression more complex. Change it to a
9213 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9214 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9215
9216 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9217 || code == EQ_EXPR || code == NE_EXPR)
9218 && ((truth_value_p (TREE_CODE (arg0))
9219 && (truth_value_p (TREE_CODE (arg1))
9220 || (TREE_CODE (arg1) == BIT_AND_EXPR
9221 && integer_onep (TREE_OPERAND (arg1, 1)))))
9222 || (truth_value_p (TREE_CODE (arg1))
9223 && (truth_value_p (TREE_CODE (arg0))
9224 || (TREE_CODE (arg0) == BIT_AND_EXPR
9225 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9226 {
9227 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9228 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9229 : TRUTH_XOR_EXPR,
9230 boolean_type_node,
9231 fold_convert (boolean_type_node, arg0),
9232 fold_convert (boolean_type_node, arg1));
9233
9234 if (code == EQ_EXPR)
9235 tem = invert_truthvalue (tem);
9236
9237 return fold_convert (type, tem);
9238 }
9239
9240 if (TREE_CODE_CLASS (code) == tcc_binary
9241 || TREE_CODE_CLASS (code) == tcc_comparison)
9242 {
9243 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9244 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9245 fold_build2 (code, type,
9246 fold_convert (TREE_TYPE (op0),
9247 TREE_OPERAND (arg0, 1)),
9248 op1));
9249 if (TREE_CODE (arg1) == COMPOUND_EXPR
9250 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9251 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9252 fold_build2 (code, type, op0,
9253 fold_convert (TREE_TYPE (op1),
9254 TREE_OPERAND (arg1, 1))));
9255
9256 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
9257 {
9258 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
9259 arg0, arg1,
9260 /*cond_first_p=*/1);
9261 if (tem != NULL_TREE)
9262 return tem;
9263 }
9264
9265 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
9266 {
9267 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
9268 arg1, arg0,
9269 /*cond_first_p=*/0);
9270 if (tem != NULL_TREE)
9271 return tem;
9272 }
9273 }
9274
9275 switch (code)
9276 {
9277 case POINTER_PLUS_EXPR:
9278 /* 0 +p index -> (type)index */
9279 if (integer_zerop (arg0))
9280 return non_lvalue (fold_convert (type, arg1));
9281
9282 /* PTR +p 0 -> PTR */
9283 if (integer_zerop (arg1))
9284 return non_lvalue (fold_convert (type, arg0));
9285
9286 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9287 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9288 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9289 return fold_convert (type, fold_build2 (PLUS_EXPR, sizetype,
9290 fold_convert (sizetype, arg1),
9291 fold_convert (sizetype, arg0)));
9292
9293 /* index +p PTR -> PTR +p index */
9294 if (POINTER_TYPE_P (TREE_TYPE (arg1))
9295 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9296 return fold_build2 (POINTER_PLUS_EXPR, type,
9297 fold_convert (type, arg1),
9298 fold_convert (sizetype, arg0));
9299
9300 /* (PTR +p B) +p A -> PTR +p (B + A) */
9301 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9302 {
9303 tree inner;
9304 tree arg01 = fold_convert (sizetype, TREE_OPERAND (arg0, 1));
9305 tree arg00 = TREE_OPERAND (arg0, 0);
9306 inner = fold_build2 (PLUS_EXPR, sizetype,
9307 arg01, fold_convert (sizetype, arg1));
9308 return fold_convert (type,
9309 fold_build2 (POINTER_PLUS_EXPR,
9310 TREE_TYPE (arg00), arg00, inner));
9311 }
9312
9313 /* PTR_CST +p CST -> CST1 */
9314 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9315 return fold_build2 (PLUS_EXPR, type, arg0, fold_convert (type, arg1));
9316
9317 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9318 of the array. Loop optimizer sometimes produce this type of
9319 expressions. */
9320 if (TREE_CODE (arg0) == ADDR_EXPR)
9321 {
9322 tem = try_move_mult_to_index (arg0, fold_convert (sizetype, arg1));
9323 if (tem)
9324 return fold_convert (type, tem);
9325 }
9326
9327 return NULL_TREE;
9328
9329 case PLUS_EXPR:
9330 /* PTR + INT -> (INT)(PTR p+ INT) */
9331 if (POINTER_TYPE_P (TREE_TYPE (arg0))
9332 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
9333 return fold_convert (type, fold_build2 (POINTER_PLUS_EXPR,
9334 TREE_TYPE (arg0),
9335 arg0,
9336 fold_convert (sizetype, arg1)));
9337 /* INT + PTR -> (INT)(PTR p+ INT) */
9338 if (POINTER_TYPE_P (TREE_TYPE (arg1))
9339 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9340 return fold_convert (type, fold_build2 (POINTER_PLUS_EXPR,
9341 TREE_TYPE (arg1),
9342 arg1,
9343 fold_convert (sizetype, arg0)));
9344 /* A + (-B) -> A - B */
9345 if (TREE_CODE (arg1) == NEGATE_EXPR)
9346 return fold_build2 (MINUS_EXPR, type,
9347 fold_convert (type, arg0),
9348 fold_convert (type, TREE_OPERAND (arg1, 0)));
9349 /* (-A) + B -> B - A */
9350 if (TREE_CODE (arg0) == NEGATE_EXPR
9351 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
9352 return fold_build2 (MINUS_EXPR, type,
9353 fold_convert (type, arg1),
9354 fold_convert (type, TREE_OPERAND (arg0, 0)));
9355
9356 if (INTEGRAL_TYPE_P (type))
9357 {
9358 /* Convert ~A + 1 to -A. */
9359 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9360 && integer_onep (arg1))
9361 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
9362
9363 /* ~X + X is -1. */
9364 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9365 && !TYPE_OVERFLOW_TRAPS (type))
9366 {
9367 tree tem = TREE_OPERAND (arg0, 0);
9368
9369 STRIP_NOPS (tem);
9370 if (operand_equal_p (tem, arg1, 0))
9371 {
9372 t1 = build_int_cst_type (type, -1);
9373 return omit_one_operand (type, t1, arg1);
9374 }
9375 }
9376
9377 /* X + ~X is -1. */
9378 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9379 && !TYPE_OVERFLOW_TRAPS (type))
9380 {
9381 tree tem = TREE_OPERAND (arg1, 0);
9382
9383 STRIP_NOPS (tem);
9384 if (operand_equal_p (arg0, tem, 0))
9385 {
9386 t1 = build_int_cst_type (type, -1);
9387 return omit_one_operand (type, t1, arg0);
9388 }
9389 }
9390
9391 /* X + (X / CST) * -CST is X % CST. */
9392 if (TREE_CODE (arg1) == MULT_EXPR
9393 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9394 && operand_equal_p (arg0,
9395 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9396 {
9397 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9398 tree cst1 = TREE_OPERAND (arg1, 1);
9399 tree sum = fold_binary (PLUS_EXPR, TREE_TYPE (cst1), cst1, cst0);
9400 if (sum && integer_zerop (sum))
9401 return fold_convert (type,
9402 fold_build2 (TRUNC_MOD_EXPR,
9403 TREE_TYPE (arg0), arg0, cst0));
9404 }
9405 }
9406
9407 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
9408 same or one. Make sure type is not saturating.
9409 fold_plusminus_mult_expr will re-associate. */
9410 if ((TREE_CODE (arg0) == MULT_EXPR
9411 || TREE_CODE (arg1) == MULT_EXPR)
9412 && !TYPE_SATURATING (type)
9413 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9414 {
9415 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
9416 if (tem)
9417 return tem;
9418 }
9419
9420 if (! FLOAT_TYPE_P (type))
9421 {
9422 if (integer_zerop (arg1))
9423 return non_lvalue (fold_convert (type, arg0));
9424
9425 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9426 with a constant, and the two constants have no bits in common,
9427 we should treat this as a BIT_IOR_EXPR since this may produce more
9428 simplifications. */
9429 if (TREE_CODE (arg0) == BIT_AND_EXPR
9430 && TREE_CODE (arg1) == BIT_AND_EXPR
9431 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9432 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9433 && integer_zerop (const_binop (BIT_AND_EXPR,
9434 TREE_OPERAND (arg0, 1),
9435 TREE_OPERAND (arg1, 1), 0)))
9436 {
9437 code = BIT_IOR_EXPR;
9438 goto bit_ior;
9439 }
9440
9441 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9442 (plus (plus (mult) (mult)) (foo)) so that we can
9443 take advantage of the factoring cases below. */
9444 if (((TREE_CODE (arg0) == PLUS_EXPR
9445 || TREE_CODE (arg0) == MINUS_EXPR)
9446 && TREE_CODE (arg1) == MULT_EXPR)
9447 || ((TREE_CODE (arg1) == PLUS_EXPR
9448 || TREE_CODE (arg1) == MINUS_EXPR)
9449 && TREE_CODE (arg0) == MULT_EXPR))
9450 {
9451 tree parg0, parg1, parg, marg;
9452 enum tree_code pcode;
9453
9454 if (TREE_CODE (arg1) == MULT_EXPR)
9455 parg = arg0, marg = arg1;
9456 else
9457 parg = arg1, marg = arg0;
9458 pcode = TREE_CODE (parg);
9459 parg0 = TREE_OPERAND (parg, 0);
9460 parg1 = TREE_OPERAND (parg, 1);
9461 STRIP_NOPS (parg0);
9462 STRIP_NOPS (parg1);
9463
9464 if (TREE_CODE (parg0) == MULT_EXPR
9465 && TREE_CODE (parg1) != MULT_EXPR)
9466 return fold_build2 (pcode, type,
9467 fold_build2 (PLUS_EXPR, type,
9468 fold_convert (type, parg0),
9469 fold_convert (type, marg)),
9470 fold_convert (type, parg1));
9471 if (TREE_CODE (parg0) != MULT_EXPR
9472 && TREE_CODE (parg1) == MULT_EXPR)
9473 return fold_build2 (PLUS_EXPR, type,
9474 fold_convert (type, parg0),
9475 fold_build2 (pcode, type,
9476 fold_convert (type, marg),
9477 fold_convert (type,
9478 parg1)));
9479 }
9480 }
9481 else
9482 {
9483 /* See if ARG1 is zero and X + ARG1 reduces to X. */
9484 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
9485 return non_lvalue (fold_convert (type, arg0));
9486
9487 /* Likewise if the operands are reversed. */
9488 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9489 return non_lvalue (fold_convert (type, arg1));
9490
9491 /* Convert X + -C into X - C. */
9492 if (TREE_CODE (arg1) == REAL_CST
9493 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
9494 {
9495 tem = fold_negate_const (arg1, type);
9496 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
9497 return fold_build2 (MINUS_EXPR, type,
9498 fold_convert (type, arg0),
9499 fold_convert (type, tem));
9500 }
9501
9502 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9503 to __complex__ ( x, y ). This is not the same for SNaNs or
9504 if signed zeros are involved. */
9505 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9506 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9507 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9508 {
9509 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9510 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
9511 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
9512 bool arg0rz = false, arg0iz = false;
9513 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9514 || (arg0i && (arg0iz = real_zerop (arg0i))))
9515 {
9516 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
9517 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
9518 if (arg0rz && arg1i && real_zerop (arg1i))
9519 {
9520 tree rp = arg1r ? arg1r
9521 : build1 (REALPART_EXPR, rtype, arg1);
9522 tree ip = arg0i ? arg0i
9523 : build1 (IMAGPART_EXPR, rtype, arg0);
9524 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9525 }
9526 else if (arg0iz && arg1r && real_zerop (arg1r))
9527 {
9528 tree rp = arg0r ? arg0r
9529 : build1 (REALPART_EXPR, rtype, arg0);
9530 tree ip = arg1i ? arg1i
9531 : build1 (IMAGPART_EXPR, rtype, arg1);
9532 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9533 }
9534 }
9535 }
9536
9537 if (flag_unsafe_math_optimizations
9538 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9539 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9540 && (tem = distribute_real_division (code, type, arg0, arg1)))
9541 return tem;
9542
9543 /* Convert x+x into x*2.0. */
9544 if (operand_equal_p (arg0, arg1, 0)
9545 && SCALAR_FLOAT_TYPE_P (type))
9546 return fold_build2 (MULT_EXPR, type, arg0,
9547 build_real (type, dconst2));
9548
9549 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9550 We associate floats only if the user has specified
9551 -fassociative-math. */
9552 if (flag_associative_math
9553 && TREE_CODE (arg1) == PLUS_EXPR
9554 && TREE_CODE (arg0) != MULT_EXPR)
9555 {
9556 tree tree10 = TREE_OPERAND (arg1, 0);
9557 tree tree11 = TREE_OPERAND (arg1, 1);
9558 if (TREE_CODE (tree11) == MULT_EXPR
9559 && TREE_CODE (tree10) == MULT_EXPR)
9560 {
9561 tree tree0;
9562 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
9563 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
9564 }
9565 }
9566 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9567 We associate floats only if the user has specified
9568 -fassociative-math. */
9569 if (flag_associative_math
9570 && TREE_CODE (arg0) == PLUS_EXPR
9571 && TREE_CODE (arg1) != MULT_EXPR)
9572 {
9573 tree tree00 = TREE_OPERAND (arg0, 0);
9574 tree tree01 = TREE_OPERAND (arg0, 1);
9575 if (TREE_CODE (tree01) == MULT_EXPR
9576 && TREE_CODE (tree00) == MULT_EXPR)
9577 {
9578 tree tree0;
9579 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
9580 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
9581 }
9582 }
9583 }
9584
9585 bit_rotate:
9586 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9587 is a rotate of A by C1 bits. */
9588 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9589 is a rotate of A by B bits. */
9590 {
9591 enum tree_code code0, code1;
9592 tree rtype;
9593 code0 = TREE_CODE (arg0);
9594 code1 = TREE_CODE (arg1);
9595 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9596 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9597 && operand_equal_p (TREE_OPERAND (arg0, 0),
9598 TREE_OPERAND (arg1, 0), 0)
9599 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9600 TYPE_UNSIGNED (rtype))
9601 /* Only create rotates in complete modes. Other cases are not
9602 expanded properly. */
9603 && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
9604 {
9605 tree tree01, tree11;
9606 enum tree_code code01, code11;
9607
9608 tree01 = TREE_OPERAND (arg0, 1);
9609 tree11 = TREE_OPERAND (arg1, 1);
9610 STRIP_NOPS (tree01);
9611 STRIP_NOPS (tree11);
9612 code01 = TREE_CODE (tree01);
9613 code11 = TREE_CODE (tree11);
9614 if (code01 == INTEGER_CST
9615 && code11 == INTEGER_CST
9616 && TREE_INT_CST_HIGH (tree01) == 0
9617 && TREE_INT_CST_HIGH (tree11) == 0
9618 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
9619 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9620 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
9621 code0 == LSHIFT_EXPR ? tree01 : tree11);
9622 else if (code11 == MINUS_EXPR)
9623 {
9624 tree tree110, tree111;
9625 tree110 = TREE_OPERAND (tree11, 0);
9626 tree111 = TREE_OPERAND (tree11, 1);
9627 STRIP_NOPS (tree110);
9628 STRIP_NOPS (tree111);
9629 if (TREE_CODE (tree110) == INTEGER_CST
9630 && 0 == compare_tree_int (tree110,
9631 TYPE_PRECISION
9632 (TREE_TYPE (TREE_OPERAND
9633 (arg0, 0))))
9634 && operand_equal_p (tree01, tree111, 0))
9635 return build2 ((code0 == LSHIFT_EXPR
9636 ? LROTATE_EXPR
9637 : RROTATE_EXPR),
9638 type, TREE_OPERAND (arg0, 0), tree01);
9639 }
9640 else if (code01 == MINUS_EXPR)
9641 {
9642 tree tree010, tree011;
9643 tree010 = TREE_OPERAND (tree01, 0);
9644 tree011 = TREE_OPERAND (tree01, 1);
9645 STRIP_NOPS (tree010);
9646 STRIP_NOPS (tree011);
9647 if (TREE_CODE (tree010) == INTEGER_CST
9648 && 0 == compare_tree_int (tree010,
9649 TYPE_PRECISION
9650 (TREE_TYPE (TREE_OPERAND
9651 (arg0, 0))))
9652 && operand_equal_p (tree11, tree011, 0))
9653 return build2 ((code0 != LSHIFT_EXPR
9654 ? LROTATE_EXPR
9655 : RROTATE_EXPR),
9656 type, TREE_OPERAND (arg0, 0), tree11);
9657 }
9658 }
9659 }
9660
9661 associate:
9662 /* In most languages, can't associate operations on floats through
9663 parentheses. Rather than remember where the parentheses were, we
9664 don't associate floats at all, unless the user has specified
9665 -fassociative-math.
9666 And, we need to make sure type is not saturating. */
9667
9668 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
9669 && !TYPE_SATURATING (type))
9670 {
9671 tree var0, con0, lit0, minus_lit0;
9672 tree var1, con1, lit1, minus_lit1;
9673 bool ok = true;
9674
9675 /* Split both trees into variables, constants, and literals. Then
9676 associate each group together, the constants with literals,
9677 then the result with variables. This increases the chances of
9678 literals being recombined later and of generating relocatable
9679 expressions for the sum of a constant and literal. */
9680 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
9681 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
9682 code == MINUS_EXPR);
9683
9684 /* With undefined overflow we can only associate constants
9685 with one variable. */
9686 if ((POINTER_TYPE_P (type)
9687 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
9688 && var0 && var1)
9689 {
9690 tree tmp0 = var0;
9691 tree tmp1 = var1;
9692
9693 if (TREE_CODE (tmp0) == NEGATE_EXPR)
9694 tmp0 = TREE_OPERAND (tmp0, 0);
9695 if (TREE_CODE (tmp1) == NEGATE_EXPR)
9696 tmp1 = TREE_OPERAND (tmp1, 0);
9697 /* The only case we can still associate with two variables
9698 is if they are the same, modulo negation. */
9699 if (!operand_equal_p (tmp0, tmp1, 0))
9700 ok = false;
9701 }
9702
9703 /* Only do something if we found more than two objects. Otherwise,
9704 nothing has changed and we risk infinite recursion. */
9705 if (ok
9706 && (2 < ((var0 != 0) + (var1 != 0)
9707 + (con0 != 0) + (con1 != 0)
9708 + (lit0 != 0) + (lit1 != 0)
9709 + (minus_lit0 != 0) + (minus_lit1 != 0))))
9710 {
9711 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9712 if (code == MINUS_EXPR)
9713 code = PLUS_EXPR;
9714
9715 var0 = associate_trees (var0, var1, code, type);
9716 con0 = associate_trees (con0, con1, code, type);
9717 lit0 = associate_trees (lit0, lit1, code, type);
9718 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
9719
9720 /* Preserve the MINUS_EXPR if the negative part of the literal is
9721 greater than the positive part. Otherwise, the multiplicative
9722 folding code (i.e extract_muldiv) may be fooled in case
9723 unsigned constants are subtracted, like in the following
9724 example: ((X*2 + 4) - 8U)/2. */
9725 if (minus_lit0 && lit0)
9726 {
9727 if (TREE_CODE (lit0) == INTEGER_CST
9728 && TREE_CODE (minus_lit0) == INTEGER_CST
9729 && tree_int_cst_lt (lit0, minus_lit0))
9730 {
9731 minus_lit0 = associate_trees (minus_lit0, lit0,
9732 MINUS_EXPR, type);
9733 lit0 = 0;
9734 }
9735 else
9736 {
9737 lit0 = associate_trees (lit0, minus_lit0,
9738 MINUS_EXPR, type);
9739 minus_lit0 = 0;
9740 }
9741 }
9742 if (minus_lit0)
9743 {
9744 if (con0 == 0)
9745 return fold_convert (type,
9746 associate_trees (var0, minus_lit0,
9747 MINUS_EXPR, type));
9748 else
9749 {
9750 con0 = associate_trees (con0, minus_lit0,
9751 MINUS_EXPR, type);
9752 return fold_convert (type,
9753 associate_trees (var0, con0,
9754 PLUS_EXPR, type));
9755 }
9756 }
9757
9758 con0 = associate_trees (con0, lit0, code, type);
9759 return fold_convert (type, associate_trees (var0, con0,
9760 code, type));
9761 }
9762 }
9763
9764 return NULL_TREE;
9765
9766 case MINUS_EXPR:
9767 /* Pointer simplifications for subtraction, simple reassociations. */
9768 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
9769 {
9770 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
9771 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
9772 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9773 {
9774 tree arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
9775 tree arg01 = fold_convert (type, TREE_OPERAND (arg0, 1));
9776 tree arg10 = fold_convert (type, TREE_OPERAND (arg1, 0));
9777 tree arg11 = fold_convert (type, TREE_OPERAND (arg1, 1));
9778 return fold_build2 (PLUS_EXPR, type,
9779 fold_build2 (MINUS_EXPR, type, arg00, arg10),
9780 fold_build2 (MINUS_EXPR, type, arg01, arg11));
9781 }
9782 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
9783 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9784 {
9785 tree arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
9786 tree arg01 = fold_convert (type, TREE_OPERAND (arg0, 1));
9787 tree tmp = fold_binary (MINUS_EXPR, type, arg00, fold_convert (type, arg1));
9788 if (tmp)
9789 return fold_build2 (PLUS_EXPR, type, tmp, arg01);
9790 }
9791 }
9792 /* A - (-B) -> A + B */
9793 if (TREE_CODE (arg1) == NEGATE_EXPR)
9794 return fold_build2 (PLUS_EXPR, type, op0,
9795 fold_convert (type, TREE_OPERAND (arg1, 0)));
9796 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9797 if (TREE_CODE (arg0) == NEGATE_EXPR
9798 && (FLOAT_TYPE_P (type)
9799 || INTEGRAL_TYPE_P (type))
9800 && negate_expr_p (arg1)
9801 && reorder_operands_p (arg0, arg1))
9802 return fold_build2 (MINUS_EXPR, type,
9803 fold_convert (type, negate_expr (arg1)),
9804 fold_convert (type, TREE_OPERAND (arg0, 0)));
9805 /* Convert -A - 1 to ~A. */
9806 if (INTEGRAL_TYPE_P (type)
9807 && TREE_CODE (arg0) == NEGATE_EXPR
9808 && integer_onep (arg1)
9809 && !TYPE_OVERFLOW_TRAPS (type))
9810 return fold_build1 (BIT_NOT_EXPR, type,
9811 fold_convert (type, TREE_OPERAND (arg0, 0)));
9812
9813 /* Convert -1 - A to ~A. */
9814 if (INTEGRAL_TYPE_P (type)
9815 && integer_all_onesp (arg0))
9816 return fold_build1 (BIT_NOT_EXPR, type, op1);
9817
9818
9819 /* X - (X / CST) * CST is X % CST. */
9820 if (INTEGRAL_TYPE_P (type)
9821 && TREE_CODE (arg1) == MULT_EXPR
9822 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9823 && operand_equal_p (arg0,
9824 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
9825 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
9826 TREE_OPERAND (arg1, 1), 0))
9827 return fold_convert (type,
9828 fold_build2 (TRUNC_MOD_EXPR, TREE_TYPE (arg0),
9829 arg0, TREE_OPERAND (arg1, 1)));
9830
9831 if (! FLOAT_TYPE_P (type))
9832 {
9833 if (integer_zerop (arg0))
9834 return negate_expr (fold_convert (type, arg1));
9835 if (integer_zerop (arg1))
9836 return non_lvalue (fold_convert (type, arg0));
9837
9838 /* Fold A - (A & B) into ~B & A. */
9839 if (!TREE_SIDE_EFFECTS (arg0)
9840 && TREE_CODE (arg1) == BIT_AND_EXPR)
9841 {
9842 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
9843 {
9844 tree arg10 = fold_convert (type, TREE_OPERAND (arg1, 0));
9845 return fold_build2 (BIT_AND_EXPR, type,
9846 fold_build1 (BIT_NOT_EXPR, type, arg10),
9847 fold_convert (type, arg0));
9848 }
9849 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9850 {
9851 tree arg11 = fold_convert (type, TREE_OPERAND (arg1, 1));
9852 return fold_build2 (BIT_AND_EXPR, type,
9853 fold_build1 (BIT_NOT_EXPR, type, arg11),
9854 fold_convert (type, arg0));
9855 }
9856 }
9857
9858 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
9859 any power of 2 minus 1. */
9860 if (TREE_CODE (arg0) == BIT_AND_EXPR
9861 && TREE_CODE (arg1) == BIT_AND_EXPR
9862 && operand_equal_p (TREE_OPERAND (arg0, 0),
9863 TREE_OPERAND (arg1, 0), 0))
9864 {
9865 tree mask0 = TREE_OPERAND (arg0, 1);
9866 tree mask1 = TREE_OPERAND (arg1, 1);
9867 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
9868
9869 if (operand_equal_p (tem, mask1, 0))
9870 {
9871 tem = fold_build2 (BIT_XOR_EXPR, type,
9872 TREE_OPERAND (arg0, 0), mask1);
9873 return fold_build2 (MINUS_EXPR, type, tem, mask1);
9874 }
9875 }
9876 }
9877
9878 /* See if ARG1 is zero and X - ARG1 reduces to X. */
9879 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
9880 return non_lvalue (fold_convert (type, arg0));
9881
9882 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
9883 ARG0 is zero and X + ARG0 reduces to X, since that would mean
9884 (-ARG1 + ARG0) reduces to -ARG1. */
9885 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9886 return negate_expr (fold_convert (type, arg1));
9887
9888 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
9889 __complex__ ( x, -y ). This is not the same for SNaNs or if
9890 signed zeros are involved. */
9891 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9892 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9893 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9894 {
9895 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9896 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
9897 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
9898 bool arg0rz = false, arg0iz = false;
9899 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9900 || (arg0i && (arg0iz = real_zerop (arg0i))))
9901 {
9902 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
9903 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
9904 if (arg0rz && arg1i && real_zerop (arg1i))
9905 {
9906 tree rp = fold_build1 (NEGATE_EXPR, rtype,
9907 arg1r ? arg1r
9908 : build1 (REALPART_EXPR, rtype, arg1));
9909 tree ip = arg0i ? arg0i
9910 : build1 (IMAGPART_EXPR, rtype, arg0);
9911 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9912 }
9913 else if (arg0iz && arg1r && real_zerop (arg1r))
9914 {
9915 tree rp = arg0r ? arg0r
9916 : build1 (REALPART_EXPR, rtype, arg0);
9917 tree ip = fold_build1 (NEGATE_EXPR, rtype,
9918 arg1i ? arg1i
9919 : build1 (IMAGPART_EXPR, rtype, arg1));
9920 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9921 }
9922 }
9923 }
9924
9925 /* Fold &x - &x. This can happen from &x.foo - &x.
9926 This is unsafe for certain floats even in non-IEEE formats.
9927 In IEEE, it is unsafe because it does wrong for NaNs.
9928 Also note that operand_equal_p is always false if an operand
9929 is volatile. */
9930
9931 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
9932 && operand_equal_p (arg0, arg1, 0))
9933 return fold_convert (type, integer_zero_node);
9934
9935 /* A - B -> A + (-B) if B is easily negatable. */
9936 if (negate_expr_p (arg1)
9937 && ((FLOAT_TYPE_P (type)
9938 /* Avoid this transformation if B is a positive REAL_CST. */
9939 && (TREE_CODE (arg1) != REAL_CST
9940 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
9941 || INTEGRAL_TYPE_P (type)))
9942 return fold_build2 (PLUS_EXPR, type,
9943 fold_convert (type, arg0),
9944 fold_convert (type, negate_expr (arg1)));
9945
9946 /* Try folding difference of addresses. */
9947 {
9948 HOST_WIDE_INT diff;
9949
9950 if ((TREE_CODE (arg0) == ADDR_EXPR
9951 || TREE_CODE (arg1) == ADDR_EXPR)
9952 && ptr_difference_const (arg0, arg1, &diff))
9953 return build_int_cst_type (type, diff);
9954 }
9955
9956 /* Fold &a[i] - &a[j] to i-j. */
9957 if (TREE_CODE (arg0) == ADDR_EXPR
9958 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
9959 && TREE_CODE (arg1) == ADDR_EXPR
9960 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
9961 {
9962 tree aref0 = TREE_OPERAND (arg0, 0);
9963 tree aref1 = TREE_OPERAND (arg1, 0);
9964 if (operand_equal_p (TREE_OPERAND (aref0, 0),
9965 TREE_OPERAND (aref1, 0), 0))
9966 {
9967 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
9968 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
9969 tree esz = array_ref_element_size (aref0);
9970 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9971 return fold_build2 (MULT_EXPR, type, diff,
9972 fold_convert (type, esz));
9973
9974 }
9975 }
9976
9977 if (flag_unsafe_math_optimizations
9978 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9979 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9980 && (tem = distribute_real_division (code, type, arg0, arg1)))
9981 return tem;
9982
9983 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
9984 same or one. Make sure type is not saturating.
9985 fold_plusminus_mult_expr will re-associate. */
9986 if ((TREE_CODE (arg0) == MULT_EXPR
9987 || TREE_CODE (arg1) == MULT_EXPR)
9988 && !TYPE_SATURATING (type)
9989 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9990 {
9991 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
9992 if (tem)
9993 return tem;
9994 }
9995
9996 goto associate;
9997
9998 case MULT_EXPR:
9999 /* (-A) * (-B) -> A * B */
10000 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10001 return fold_build2 (MULT_EXPR, type,
10002 fold_convert (type, TREE_OPERAND (arg0, 0)),
10003 fold_convert (type, negate_expr (arg1)));
10004 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10005 return fold_build2 (MULT_EXPR, type,
10006 fold_convert (type, negate_expr (arg0)),
10007 fold_convert (type, TREE_OPERAND (arg1, 0)));
10008
10009 if (! FLOAT_TYPE_P (type))
10010 {
10011 if (integer_zerop (arg1))
10012 return omit_one_operand (type, arg1, arg0);
10013 if (integer_onep (arg1))
10014 return non_lvalue (fold_convert (type, arg0));
10015 /* Transform x * -1 into -x. Make sure to do the negation
10016 on the original operand with conversions not stripped
10017 because we can only strip non-sign-changing conversions. */
10018 if (integer_all_onesp (arg1))
10019 return fold_convert (type, negate_expr (op0));
10020 /* Transform x * -C into -x * C if x is easily negatable. */
10021 if (TREE_CODE (arg1) == INTEGER_CST
10022 && tree_int_cst_sgn (arg1) == -1
10023 && negate_expr_p (arg0)
10024 && (tem = negate_expr (arg1)) != arg1
10025 && !TREE_OVERFLOW (tem))
10026 return fold_build2 (MULT_EXPR, type,
10027 fold_convert (type, negate_expr (arg0)), tem);
10028
10029 /* (a * (1 << b)) is (a << b) */
10030 if (TREE_CODE (arg1) == LSHIFT_EXPR
10031 && integer_onep (TREE_OPERAND (arg1, 0)))
10032 return fold_build2 (LSHIFT_EXPR, type, op0,
10033 TREE_OPERAND (arg1, 1));
10034 if (TREE_CODE (arg0) == LSHIFT_EXPR
10035 && integer_onep (TREE_OPERAND (arg0, 0)))
10036 return fold_build2 (LSHIFT_EXPR, type, op1,
10037 TREE_OPERAND (arg0, 1));
10038
10039 strict_overflow_p = false;
10040 if (TREE_CODE (arg1) == INTEGER_CST
10041 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10042 &strict_overflow_p)))
10043 {
10044 if (strict_overflow_p)
10045 fold_overflow_warning (("assuming signed overflow does not "
10046 "occur when simplifying "
10047 "multiplication"),
10048 WARN_STRICT_OVERFLOW_MISC);
10049 return fold_convert (type, tem);
10050 }
10051
10052 /* Optimize z * conj(z) for integer complex numbers. */
10053 if (TREE_CODE (arg0) == CONJ_EXPR
10054 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10055 return fold_mult_zconjz (type, arg1);
10056 if (TREE_CODE (arg1) == CONJ_EXPR
10057 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10058 return fold_mult_zconjz (type, arg0);
10059 }
10060 else
10061 {
10062 /* Maybe fold x * 0 to 0. The expressions aren't the same
10063 when x is NaN, since x * 0 is also NaN. Nor are they the
10064 same in modes with signed zeros, since multiplying a
10065 negative value by 0 gives -0, not +0. */
10066 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10067 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10068 && real_zerop (arg1))
10069 return omit_one_operand (type, arg1, arg0);
10070 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
10071 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10072 && real_onep (arg1))
10073 return non_lvalue (fold_convert (type, arg0));
10074
10075 /* Transform x * -1.0 into -x. */
10076 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10077 && real_minus_onep (arg1))
10078 return fold_convert (type, negate_expr (arg0));
10079
10080 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10081 the result for floating point types due to rounding so it is applied
10082 only if -fassociative-math was specify. */
10083 if (flag_associative_math
10084 && TREE_CODE (arg0) == RDIV_EXPR
10085 && TREE_CODE (arg1) == REAL_CST
10086 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10087 {
10088 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10089 arg1, 0);
10090 if (tem)
10091 return fold_build2 (RDIV_EXPR, type, tem,
10092 TREE_OPERAND (arg0, 1));
10093 }
10094
10095 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10096 if (operand_equal_p (arg0, arg1, 0))
10097 {
10098 tree tem = fold_strip_sign_ops (arg0);
10099 if (tem != NULL_TREE)
10100 {
10101 tem = fold_convert (type, tem);
10102 return fold_build2 (MULT_EXPR, type, tem, tem);
10103 }
10104 }
10105
10106 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10107 This is not the same for NaNs or if signed zeros are
10108 involved. */
10109 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10110 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10111 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10112 && TREE_CODE (arg1) == COMPLEX_CST
10113 && real_zerop (TREE_REALPART (arg1)))
10114 {
10115 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10116 if (real_onep (TREE_IMAGPART (arg1)))
10117 return fold_build2 (COMPLEX_EXPR, type,
10118 negate_expr (fold_build1 (IMAGPART_EXPR,
10119 rtype, arg0)),
10120 fold_build1 (REALPART_EXPR, rtype, arg0));
10121 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10122 return fold_build2 (COMPLEX_EXPR, type,
10123 fold_build1 (IMAGPART_EXPR, rtype, arg0),
10124 negate_expr (fold_build1 (REALPART_EXPR,
10125 rtype, arg0)));
10126 }
10127
10128 /* Optimize z * conj(z) for floating point complex numbers.
10129 Guarded by flag_unsafe_math_optimizations as non-finite
10130 imaginary components don't produce scalar results. */
10131 if (flag_unsafe_math_optimizations
10132 && TREE_CODE (arg0) == CONJ_EXPR
10133 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10134 return fold_mult_zconjz (type, arg1);
10135 if (flag_unsafe_math_optimizations
10136 && TREE_CODE (arg1) == CONJ_EXPR
10137 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10138 return fold_mult_zconjz (type, arg0);
10139
10140 if (flag_unsafe_math_optimizations)
10141 {
10142 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10143 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10144
10145 /* Optimizations of root(...)*root(...). */
10146 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10147 {
10148 tree rootfn, arg;
10149 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10150 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10151
10152 /* Optimize sqrt(x)*sqrt(x) as x. */
10153 if (BUILTIN_SQRT_P (fcode0)
10154 && operand_equal_p (arg00, arg10, 0)
10155 && ! HONOR_SNANS (TYPE_MODE (type)))
10156 return arg00;
10157
10158 /* Optimize root(x)*root(y) as root(x*y). */
10159 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10160 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
10161 return build_call_expr (rootfn, 1, arg);
10162 }
10163
10164 /* Optimize expN(x)*expN(y) as expN(x+y). */
10165 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10166 {
10167 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10168 tree arg = fold_build2 (PLUS_EXPR, type,
10169 CALL_EXPR_ARG (arg0, 0),
10170 CALL_EXPR_ARG (arg1, 0));
10171 return build_call_expr (expfn, 1, arg);
10172 }
10173
10174 /* Optimizations of pow(...)*pow(...). */
10175 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10176 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10177 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10178 {
10179 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10180 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10181 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10182 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10183
10184 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10185 if (operand_equal_p (arg01, arg11, 0))
10186 {
10187 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10188 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
10189 return build_call_expr (powfn, 2, arg, arg01);
10190 }
10191
10192 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10193 if (operand_equal_p (arg00, arg10, 0))
10194 {
10195 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10196 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
10197 return build_call_expr (powfn, 2, arg00, arg);
10198 }
10199 }
10200
10201 /* Optimize tan(x)*cos(x) as sin(x). */
10202 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10203 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10204 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10205 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10206 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10207 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10208 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10209 CALL_EXPR_ARG (arg1, 0), 0))
10210 {
10211 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10212
10213 if (sinfn != NULL_TREE)
10214 return build_call_expr (sinfn, 1, CALL_EXPR_ARG (arg0, 0));
10215 }
10216
10217 /* Optimize x*pow(x,c) as pow(x,c+1). */
10218 if (fcode1 == BUILT_IN_POW
10219 || fcode1 == BUILT_IN_POWF
10220 || fcode1 == BUILT_IN_POWL)
10221 {
10222 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10223 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10224 if (TREE_CODE (arg11) == REAL_CST
10225 && !TREE_OVERFLOW (arg11)
10226 && operand_equal_p (arg0, arg10, 0))
10227 {
10228 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10229 REAL_VALUE_TYPE c;
10230 tree arg;
10231
10232 c = TREE_REAL_CST (arg11);
10233 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10234 arg = build_real (type, c);
10235 return build_call_expr (powfn, 2, arg0, arg);
10236 }
10237 }
10238
10239 /* Optimize pow(x,c)*x as pow(x,c+1). */
10240 if (fcode0 == BUILT_IN_POW
10241 || fcode0 == BUILT_IN_POWF
10242 || fcode0 == BUILT_IN_POWL)
10243 {
10244 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10245 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10246 if (TREE_CODE (arg01) == REAL_CST
10247 && !TREE_OVERFLOW (arg01)
10248 && operand_equal_p (arg1, arg00, 0))
10249 {
10250 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10251 REAL_VALUE_TYPE c;
10252 tree arg;
10253
10254 c = TREE_REAL_CST (arg01);
10255 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10256 arg = build_real (type, c);
10257 return build_call_expr (powfn, 2, arg1, arg);
10258 }
10259 }
10260
10261 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
10262 if (! optimize_size
10263 && operand_equal_p (arg0, arg1, 0))
10264 {
10265 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10266
10267 if (powfn)
10268 {
10269 tree arg = build_real (type, dconst2);
10270 return build_call_expr (powfn, 2, arg0, arg);
10271 }
10272 }
10273 }
10274 }
10275 goto associate;
10276
10277 case BIT_IOR_EXPR:
10278 bit_ior:
10279 if (integer_all_onesp (arg1))
10280 return omit_one_operand (type, arg1, arg0);
10281 if (integer_zerop (arg1))
10282 return non_lvalue (fold_convert (type, arg0));
10283 if (operand_equal_p (arg0, arg1, 0))
10284 return non_lvalue (fold_convert (type, arg0));
10285
10286 /* ~X | X is -1. */
10287 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10288 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10289 {
10290 t1 = fold_convert (type, integer_zero_node);
10291 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10292 return omit_one_operand (type, t1, arg1);
10293 }
10294
10295 /* X | ~X is -1. */
10296 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10297 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10298 {
10299 t1 = fold_convert (type, integer_zero_node);
10300 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10301 return omit_one_operand (type, t1, arg0);
10302 }
10303
10304 /* Canonicalize (X & C1) | C2. */
10305 if (TREE_CODE (arg0) == BIT_AND_EXPR
10306 && TREE_CODE (arg1) == INTEGER_CST
10307 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10308 {
10309 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, hi3, lo3, mlo, mhi;
10310 int width = TYPE_PRECISION (type), w;
10311 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
10312 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10313 hi2 = TREE_INT_CST_HIGH (arg1);
10314 lo2 = TREE_INT_CST_LOW (arg1);
10315
10316 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10317 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
10318 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10319
10320 if (width > HOST_BITS_PER_WIDE_INT)
10321 {
10322 mhi = (unsigned HOST_WIDE_INT) -1
10323 >> (2 * HOST_BITS_PER_WIDE_INT - width);
10324 mlo = -1;
10325 }
10326 else
10327 {
10328 mhi = 0;
10329 mlo = (unsigned HOST_WIDE_INT) -1
10330 >> (HOST_BITS_PER_WIDE_INT - width);
10331 }
10332
10333 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10334 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
10335 return fold_build2 (BIT_IOR_EXPR, type,
10336 TREE_OPERAND (arg0, 0), arg1);
10337
10338 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10339 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10340 mode which allows further optimizations. */
10341 hi1 &= mhi;
10342 lo1 &= mlo;
10343 hi2 &= mhi;
10344 lo2 &= mlo;
10345 hi3 = hi1 & ~hi2;
10346 lo3 = lo1 & ~lo2;
10347 for (w = BITS_PER_UNIT;
10348 w <= width && w <= HOST_BITS_PER_WIDE_INT;
10349 w <<= 1)
10350 {
10351 unsigned HOST_WIDE_INT mask
10352 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
10353 if (((lo1 | lo2) & mask) == mask
10354 && (lo1 & ~mask) == 0 && hi1 == 0)
10355 {
10356 hi3 = 0;
10357 lo3 = mask;
10358 break;
10359 }
10360 }
10361 if (hi3 != hi1 || lo3 != lo1)
10362 return fold_build2 (BIT_IOR_EXPR, type,
10363 fold_build2 (BIT_AND_EXPR, type,
10364 TREE_OPERAND (arg0, 0),
10365 build_int_cst_wide (type,
10366 lo3, hi3)),
10367 arg1);
10368 }
10369
10370 /* (X & Y) | Y is (X, Y). */
10371 if (TREE_CODE (arg0) == BIT_AND_EXPR
10372 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10373 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10374 /* (X & Y) | X is (Y, X). */
10375 if (TREE_CODE (arg0) == BIT_AND_EXPR
10376 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10377 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10378 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
10379 /* X | (X & Y) is (Y, X). */
10380 if (TREE_CODE (arg1) == BIT_AND_EXPR
10381 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10382 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10383 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
10384 /* X | (Y & X) is (Y, X). */
10385 if (TREE_CODE (arg1) == BIT_AND_EXPR
10386 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10387 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10388 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
10389
10390 t1 = distribute_bit_expr (code, type, arg0, arg1);
10391 if (t1 != NULL_TREE)
10392 return t1;
10393
10394 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
10395
10396 This results in more efficient code for machines without a NAND
10397 instruction. Combine will canonicalize to the first form
10398 which will allow use of NAND instructions provided by the
10399 backend if they exist. */
10400 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10401 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10402 {
10403 return fold_build1 (BIT_NOT_EXPR, type,
10404 build2 (BIT_AND_EXPR, type,
10405 TREE_OPERAND (arg0, 0),
10406 TREE_OPERAND (arg1, 0)));
10407 }
10408
10409 /* See if this can be simplified into a rotate first. If that
10410 is unsuccessful continue in the association code. */
10411 goto bit_rotate;
10412
10413 case BIT_XOR_EXPR:
10414 if (integer_zerop (arg1))
10415 return non_lvalue (fold_convert (type, arg0));
10416 if (integer_all_onesp (arg1))
10417 return fold_build1 (BIT_NOT_EXPR, type, op0);
10418 if (operand_equal_p (arg0, arg1, 0))
10419 return omit_one_operand (type, integer_zero_node, arg0);
10420
10421 /* ~X ^ X is -1. */
10422 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10423 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10424 {
10425 t1 = fold_convert (type, integer_zero_node);
10426 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10427 return omit_one_operand (type, t1, arg1);
10428 }
10429
10430 /* X ^ ~X is -1. */
10431 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10432 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10433 {
10434 t1 = fold_convert (type, integer_zero_node);
10435 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10436 return omit_one_operand (type, t1, arg0);
10437 }
10438
10439 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
10440 with a constant, and the two constants have no bits in common,
10441 we should treat this as a BIT_IOR_EXPR since this may produce more
10442 simplifications. */
10443 if (TREE_CODE (arg0) == BIT_AND_EXPR
10444 && TREE_CODE (arg1) == BIT_AND_EXPR
10445 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10446 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10447 && integer_zerop (const_binop (BIT_AND_EXPR,
10448 TREE_OPERAND (arg0, 1),
10449 TREE_OPERAND (arg1, 1), 0)))
10450 {
10451 code = BIT_IOR_EXPR;
10452 goto bit_ior;
10453 }
10454
10455 /* (X | Y) ^ X -> Y & ~ X*/
10456 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10457 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10458 {
10459 tree t2 = TREE_OPERAND (arg0, 1);
10460 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
10461 arg1);
10462 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10463 fold_convert (type, t1));
10464 return t1;
10465 }
10466
10467 /* (Y | X) ^ X -> Y & ~ X*/
10468 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10469 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10470 {
10471 tree t2 = TREE_OPERAND (arg0, 0);
10472 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
10473 arg1);
10474 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10475 fold_convert (type, t1));
10476 return t1;
10477 }
10478
10479 /* X ^ (X | Y) -> Y & ~ X*/
10480 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10481 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
10482 {
10483 tree t2 = TREE_OPERAND (arg1, 1);
10484 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
10485 arg0);
10486 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10487 fold_convert (type, t1));
10488 return t1;
10489 }
10490
10491 /* X ^ (Y | X) -> Y & ~ X*/
10492 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10493 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
10494 {
10495 tree t2 = TREE_OPERAND (arg1, 0);
10496 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
10497 arg0);
10498 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10499 fold_convert (type, t1));
10500 return t1;
10501 }
10502
10503 /* Convert ~X ^ ~Y to X ^ Y. */
10504 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10505 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10506 return fold_build2 (code, type,
10507 fold_convert (type, TREE_OPERAND (arg0, 0)),
10508 fold_convert (type, TREE_OPERAND (arg1, 0)));
10509
10510 /* Convert ~X ^ C to X ^ ~C. */
10511 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10512 && TREE_CODE (arg1) == INTEGER_CST)
10513 return fold_build2 (code, type,
10514 fold_convert (type, TREE_OPERAND (arg0, 0)),
10515 fold_build1 (BIT_NOT_EXPR, type, arg1));
10516
10517 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10518 if (TREE_CODE (arg0) == BIT_AND_EXPR
10519 && integer_onep (TREE_OPERAND (arg0, 1))
10520 && integer_onep (arg1))
10521 return fold_build2 (EQ_EXPR, type, arg0,
10522 build_int_cst (TREE_TYPE (arg0), 0));
10523
10524 /* Fold (X & Y) ^ Y as ~X & Y. */
10525 if (TREE_CODE (arg0) == BIT_AND_EXPR
10526 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10527 {
10528 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
10529 return fold_build2 (BIT_AND_EXPR, type,
10530 fold_build1 (BIT_NOT_EXPR, type, tem),
10531 fold_convert (type, arg1));
10532 }
10533 /* Fold (X & Y) ^ X as ~Y & X. */
10534 if (TREE_CODE (arg0) == BIT_AND_EXPR
10535 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10536 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10537 {
10538 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
10539 return fold_build2 (BIT_AND_EXPR, type,
10540 fold_build1 (BIT_NOT_EXPR, type, tem),
10541 fold_convert (type, arg1));
10542 }
10543 /* Fold X ^ (X & Y) as X & ~Y. */
10544 if (TREE_CODE (arg1) == BIT_AND_EXPR
10545 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10546 {
10547 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
10548 return fold_build2 (BIT_AND_EXPR, type,
10549 fold_convert (type, arg0),
10550 fold_build1 (BIT_NOT_EXPR, type, tem));
10551 }
10552 /* Fold X ^ (Y & X) as ~Y & X. */
10553 if (TREE_CODE (arg1) == BIT_AND_EXPR
10554 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10555 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10556 {
10557 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
10558 return fold_build2 (BIT_AND_EXPR, type,
10559 fold_build1 (BIT_NOT_EXPR, type, tem),
10560 fold_convert (type, arg0));
10561 }
10562
10563 /* See if this can be simplified into a rotate first. If that
10564 is unsuccessful continue in the association code. */
10565 goto bit_rotate;
10566
10567 case BIT_AND_EXPR:
10568 if (integer_all_onesp (arg1))
10569 return non_lvalue (fold_convert (type, arg0));
10570 if (integer_zerop (arg1))
10571 return omit_one_operand (type, arg1, arg0);
10572 if (operand_equal_p (arg0, arg1, 0))
10573 return non_lvalue (fold_convert (type, arg0));
10574
10575 /* ~X & X is always zero. */
10576 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10577 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10578 return omit_one_operand (type, integer_zero_node, arg1);
10579
10580 /* X & ~X is always zero. */
10581 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10582 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10583 return omit_one_operand (type, integer_zero_node, arg0);
10584
10585 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
10586 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10587 && TREE_CODE (arg1) == INTEGER_CST
10588 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10589 {
10590 tree tmp1 = fold_convert (TREE_TYPE (arg0), arg1);
10591 tree tmp2 = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10592 TREE_OPERAND (arg0, 0), tmp1);
10593 tree tmp3 = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10594 TREE_OPERAND (arg0, 1), tmp1);
10595 return fold_convert (type,
10596 fold_build2 (BIT_IOR_EXPR, TREE_TYPE (arg0),
10597 tmp2, tmp3));
10598 }
10599
10600 /* (X | Y) & Y is (X, Y). */
10601 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10602 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10603 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10604 /* (X | Y) & X is (Y, X). */
10605 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10606 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10607 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10608 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
10609 /* X & (X | Y) is (Y, X). */
10610 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10611 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10612 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10613 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
10614 /* X & (Y | X) is (Y, X). */
10615 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10616 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10617 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10618 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
10619
10620 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
10621 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10622 && integer_onep (TREE_OPERAND (arg0, 1))
10623 && integer_onep (arg1))
10624 {
10625 tem = TREE_OPERAND (arg0, 0);
10626 return fold_build2 (EQ_EXPR, type,
10627 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
10628 build_int_cst (TREE_TYPE (tem), 1)),
10629 build_int_cst (TREE_TYPE (tem), 0));
10630 }
10631 /* Fold ~X & 1 as (X & 1) == 0. */
10632 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10633 && integer_onep (arg1))
10634 {
10635 tem = TREE_OPERAND (arg0, 0);
10636 return fold_build2 (EQ_EXPR, type,
10637 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
10638 build_int_cst (TREE_TYPE (tem), 1)),
10639 build_int_cst (TREE_TYPE (tem), 0));
10640 }
10641
10642 /* Fold (X ^ Y) & Y as ~X & Y. */
10643 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10644 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10645 {
10646 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
10647 return fold_build2 (BIT_AND_EXPR, type,
10648 fold_build1 (BIT_NOT_EXPR, type, tem),
10649 fold_convert (type, arg1));
10650 }
10651 /* Fold (X ^ Y) & X as ~Y & X. */
10652 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10653 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10654 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10655 {
10656 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
10657 return fold_build2 (BIT_AND_EXPR, type,
10658 fold_build1 (BIT_NOT_EXPR, type, tem),
10659 fold_convert (type, arg1));
10660 }
10661 /* Fold X & (X ^ Y) as X & ~Y. */
10662 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10663 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10664 {
10665 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
10666 return fold_build2 (BIT_AND_EXPR, type,
10667 fold_convert (type, arg0),
10668 fold_build1 (BIT_NOT_EXPR, type, tem));
10669 }
10670 /* Fold X & (Y ^ X) as ~Y & X. */
10671 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10672 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10673 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10674 {
10675 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
10676 return fold_build2 (BIT_AND_EXPR, type,
10677 fold_build1 (BIT_NOT_EXPR, type, tem),
10678 fold_convert (type, arg0));
10679 }
10680
10681 t1 = distribute_bit_expr (code, type, arg0, arg1);
10682 if (t1 != NULL_TREE)
10683 return t1;
10684 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10685 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
10686 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
10687 {
10688 unsigned int prec
10689 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
10690
10691 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
10692 && (~TREE_INT_CST_LOW (arg1)
10693 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
10694 return fold_convert (type, TREE_OPERAND (arg0, 0));
10695 }
10696
10697 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
10698
10699 This results in more efficient code for machines without a NOR
10700 instruction. Combine will canonicalize to the first form
10701 which will allow use of NOR instructions provided by the
10702 backend if they exist. */
10703 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10704 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10705 {
10706 return fold_build1 (BIT_NOT_EXPR, type,
10707 build2 (BIT_IOR_EXPR, type,
10708 fold_convert (type,
10709 TREE_OPERAND (arg0, 0)),
10710 fold_convert (type,
10711 TREE_OPERAND (arg1, 0))));
10712 }
10713
10714 /* If arg0 is derived from the address of an object or function, we may
10715 be able to fold this expression using the object or function's
10716 alignment. */
10717 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
10718 {
10719 unsigned HOST_WIDE_INT modulus, residue;
10720 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
10721
10722 modulus = get_pointer_modulus_and_residue (arg0, &residue);
10723
10724 /* This works because modulus is a power of 2. If this weren't the
10725 case, we'd have to replace it by its greatest power-of-2
10726 divisor: modulus & -modulus. */
10727 if (low < modulus)
10728 return build_int_cst (type, residue & low);
10729 }
10730
10731 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
10732 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
10733 if the new mask might be further optimized. */
10734 if ((TREE_CODE (arg0) == LSHIFT_EXPR
10735 || TREE_CODE (arg0) == RSHIFT_EXPR)
10736 && host_integerp (TREE_OPERAND (arg0, 1), 1)
10737 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
10738 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
10739 < TYPE_PRECISION (TREE_TYPE (arg0))
10740 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
10741 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
10742 {
10743 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
10744 unsigned HOST_WIDE_INT mask
10745 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
10746 unsigned HOST_WIDE_INT newmask, zerobits = 0;
10747 tree shift_type = TREE_TYPE (arg0);
10748
10749 if (TREE_CODE (arg0) == LSHIFT_EXPR)
10750 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
10751 else if (TREE_CODE (arg0) == RSHIFT_EXPR
10752 && TYPE_PRECISION (TREE_TYPE (arg0))
10753 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
10754 {
10755 unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
10756 tree arg00 = TREE_OPERAND (arg0, 0);
10757 /* See if more bits can be proven as zero because of
10758 zero extension. */
10759 if (TREE_CODE (arg00) == NOP_EXPR
10760 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
10761 {
10762 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
10763 if (TYPE_PRECISION (inner_type)
10764 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
10765 && TYPE_PRECISION (inner_type) < prec)
10766 {
10767 prec = TYPE_PRECISION (inner_type);
10768 /* See if we can shorten the right shift. */
10769 if (shiftc < prec)
10770 shift_type = inner_type;
10771 }
10772 }
10773 zerobits = ~(unsigned HOST_WIDE_INT) 0;
10774 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
10775 zerobits <<= prec - shiftc;
10776 /* For arithmetic shift if sign bit could be set, zerobits
10777 can contain actually sign bits, so no transformation is
10778 possible, unless MASK masks them all away. In that
10779 case the shift needs to be converted into logical shift. */
10780 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
10781 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
10782 {
10783 if ((mask & zerobits) == 0)
10784 shift_type = unsigned_type_for (TREE_TYPE (arg0));
10785 else
10786 zerobits = 0;
10787 }
10788 }
10789
10790 /* ((X << 16) & 0xff00) is (X, 0). */
10791 if ((mask & zerobits) == mask)
10792 return omit_one_operand (type, build_int_cst (type, 0), arg0);
10793
10794 newmask = mask | zerobits;
10795 if (newmask != mask && (newmask & (newmask + 1)) == 0)
10796 {
10797 unsigned int prec;
10798
10799 /* Only do the transformation if NEWMASK is some integer
10800 mode's mask. */
10801 for (prec = BITS_PER_UNIT;
10802 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
10803 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
10804 break;
10805 if (prec < HOST_BITS_PER_WIDE_INT
10806 || newmask == ~(unsigned HOST_WIDE_INT) 0)
10807 {
10808 if (shift_type != TREE_TYPE (arg0))
10809 {
10810 tem = fold_build2 (TREE_CODE (arg0), shift_type,
10811 fold_convert (shift_type,
10812 TREE_OPERAND (arg0, 0)),
10813 TREE_OPERAND (arg0, 1));
10814 tem = fold_convert (type, tem);
10815 }
10816 else
10817 tem = op0;
10818 return fold_build2 (BIT_AND_EXPR, type, tem,
10819 build_int_cst_type (TREE_TYPE (op1),
10820 newmask));
10821 }
10822 }
10823 }
10824
10825 goto associate;
10826
10827 case RDIV_EXPR:
10828 /* Don't touch a floating-point divide by zero unless the mode
10829 of the constant can represent infinity. */
10830 if (TREE_CODE (arg1) == REAL_CST
10831 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
10832 && real_zerop (arg1))
10833 return NULL_TREE;
10834
10835 /* Optimize A / A to 1.0 if we don't care about
10836 NaNs or Infinities. Skip the transformation
10837 for non-real operands. */
10838 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
10839 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10840 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
10841 && operand_equal_p (arg0, arg1, 0))
10842 {
10843 tree r = build_real (TREE_TYPE (arg0), dconst1);
10844
10845 return omit_two_operands (type, r, arg0, arg1);
10846 }
10847
10848 /* The complex version of the above A / A optimization. */
10849 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10850 && operand_equal_p (arg0, arg1, 0))
10851 {
10852 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
10853 if (! HONOR_NANS (TYPE_MODE (elem_type))
10854 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
10855 {
10856 tree r = build_real (elem_type, dconst1);
10857 /* omit_two_operands will call fold_convert for us. */
10858 return omit_two_operands (type, r, arg0, arg1);
10859 }
10860 }
10861
10862 /* (-A) / (-B) -> A / B */
10863 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10864 return fold_build2 (RDIV_EXPR, type,
10865 TREE_OPERAND (arg0, 0),
10866 negate_expr (arg1));
10867 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10868 return fold_build2 (RDIV_EXPR, type,
10869 negate_expr (arg0),
10870 TREE_OPERAND (arg1, 0));
10871
10872 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
10873 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10874 && real_onep (arg1))
10875 return non_lvalue (fold_convert (type, arg0));
10876
10877 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
10878 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10879 && real_minus_onep (arg1))
10880 return non_lvalue (fold_convert (type, negate_expr (arg0)));
10881
10882 /* If ARG1 is a constant, we can convert this to a multiply by the
10883 reciprocal. This does not have the same rounding properties,
10884 so only do this if -freciprocal-math. We can actually
10885 always safely do it if ARG1 is a power of two, but it's hard to
10886 tell if it is or not in a portable manner. */
10887 if (TREE_CODE (arg1) == REAL_CST)
10888 {
10889 if (flag_reciprocal_math
10890 && 0 != (tem = const_binop (code, build_real (type, dconst1),
10891 arg1, 0)))
10892 return fold_build2 (MULT_EXPR, type, arg0, tem);
10893 /* Find the reciprocal if optimizing and the result is exact. */
10894 if (optimize)
10895 {
10896 REAL_VALUE_TYPE r;
10897 r = TREE_REAL_CST (arg1);
10898 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
10899 {
10900 tem = build_real (type, r);
10901 return fold_build2 (MULT_EXPR, type,
10902 fold_convert (type, arg0), tem);
10903 }
10904 }
10905 }
10906 /* Convert A/B/C to A/(B*C). */
10907 if (flag_reciprocal_math
10908 && TREE_CODE (arg0) == RDIV_EXPR)
10909 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
10910 fold_build2 (MULT_EXPR, type,
10911 TREE_OPERAND (arg0, 1), arg1));
10912
10913 /* Convert A/(B/C) to (A/B)*C. */
10914 if (flag_reciprocal_math
10915 && TREE_CODE (arg1) == RDIV_EXPR)
10916 return fold_build2 (MULT_EXPR, type,
10917 fold_build2 (RDIV_EXPR, type, arg0,
10918 TREE_OPERAND (arg1, 0)),
10919 TREE_OPERAND (arg1, 1));
10920
10921 /* Convert C1/(X*C2) into (C1/C2)/X. */
10922 if (flag_reciprocal_math
10923 && TREE_CODE (arg1) == MULT_EXPR
10924 && TREE_CODE (arg0) == REAL_CST
10925 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
10926 {
10927 tree tem = const_binop (RDIV_EXPR, arg0,
10928 TREE_OPERAND (arg1, 1), 0);
10929 if (tem)
10930 return fold_build2 (RDIV_EXPR, type, tem,
10931 TREE_OPERAND (arg1, 0));
10932 }
10933
10934 if (flag_unsafe_math_optimizations)
10935 {
10936 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10937 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10938
10939 /* Optimize sin(x)/cos(x) as tan(x). */
10940 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
10941 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
10942 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
10943 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10944 CALL_EXPR_ARG (arg1, 0), 0))
10945 {
10946 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
10947
10948 if (tanfn != NULL_TREE)
10949 return build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0));
10950 }
10951
10952 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
10953 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
10954 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
10955 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
10956 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10957 CALL_EXPR_ARG (arg1, 0), 0))
10958 {
10959 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
10960
10961 if (tanfn != NULL_TREE)
10962 {
10963 tree tmp = build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0));
10964 return fold_build2 (RDIV_EXPR, type,
10965 build_real (type, dconst1), tmp);
10966 }
10967 }
10968
10969 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
10970 NaNs or Infinities. */
10971 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
10972 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
10973 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
10974 {
10975 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10976 tree arg01 = CALL_EXPR_ARG (arg1, 0);
10977
10978 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
10979 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
10980 && operand_equal_p (arg00, arg01, 0))
10981 {
10982 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
10983
10984 if (cosfn != NULL_TREE)
10985 return build_call_expr (cosfn, 1, arg00);
10986 }
10987 }
10988
10989 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
10990 NaNs or Infinities. */
10991 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
10992 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
10993 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
10994 {
10995 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10996 tree arg01 = CALL_EXPR_ARG (arg1, 0);
10997
10998 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
10999 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11000 && operand_equal_p (arg00, arg01, 0))
11001 {
11002 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11003
11004 if (cosfn != NULL_TREE)
11005 {
11006 tree tmp = build_call_expr (cosfn, 1, arg00);
11007 return fold_build2 (RDIV_EXPR, type,
11008 build_real (type, dconst1),
11009 tmp);
11010 }
11011 }
11012 }
11013
11014 /* Optimize pow(x,c)/x as pow(x,c-1). */
11015 if (fcode0 == BUILT_IN_POW
11016 || fcode0 == BUILT_IN_POWF
11017 || fcode0 == BUILT_IN_POWL)
11018 {
11019 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11020 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11021 if (TREE_CODE (arg01) == REAL_CST
11022 && !TREE_OVERFLOW (arg01)
11023 && operand_equal_p (arg1, arg00, 0))
11024 {
11025 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11026 REAL_VALUE_TYPE c;
11027 tree arg;
11028
11029 c = TREE_REAL_CST (arg01);
11030 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11031 arg = build_real (type, c);
11032 return build_call_expr (powfn, 2, arg1, arg);
11033 }
11034 }
11035
11036 /* Optimize a/root(b/c) into a*root(c/b). */
11037 if (BUILTIN_ROOT_P (fcode1))
11038 {
11039 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11040
11041 if (TREE_CODE (rootarg) == RDIV_EXPR)
11042 {
11043 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11044 tree b = TREE_OPERAND (rootarg, 0);
11045 tree c = TREE_OPERAND (rootarg, 1);
11046
11047 tree tmp = fold_build2 (RDIV_EXPR, type, c, b);
11048
11049 tmp = build_call_expr (rootfn, 1, tmp);
11050 return fold_build2 (MULT_EXPR, type, arg0, tmp);
11051 }
11052 }
11053
11054 /* Optimize x/expN(y) into x*expN(-y). */
11055 if (BUILTIN_EXPONENT_P (fcode1))
11056 {
11057 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11058 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11059 arg1 = build_call_expr (expfn, 1, fold_convert (type, arg));
11060 return fold_build2 (MULT_EXPR, type, arg0, arg1);
11061 }
11062
11063 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11064 if (fcode1 == BUILT_IN_POW
11065 || fcode1 == BUILT_IN_POWF
11066 || fcode1 == BUILT_IN_POWL)
11067 {
11068 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11069 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11070 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11071 tree neg11 = fold_convert (type, negate_expr (arg11));
11072 arg1 = build_call_expr (powfn, 2, arg10, neg11);
11073 return fold_build2 (MULT_EXPR, type, arg0, arg1);
11074 }
11075 }
11076 return NULL_TREE;
11077
11078 case TRUNC_DIV_EXPR:
11079 case FLOOR_DIV_EXPR:
11080 /* Simplify A / (B << N) where A and B are positive and B is
11081 a power of 2, to A >> (N + log2(B)). */
11082 strict_overflow_p = false;
11083 if (TREE_CODE (arg1) == LSHIFT_EXPR
11084 && (TYPE_UNSIGNED (type)
11085 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11086 {
11087 tree sval = TREE_OPERAND (arg1, 0);
11088 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11089 {
11090 tree sh_cnt = TREE_OPERAND (arg1, 1);
11091 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
11092
11093 if (strict_overflow_p)
11094 fold_overflow_warning (("assuming signed overflow does not "
11095 "occur when simplifying A / (B << N)"),
11096 WARN_STRICT_OVERFLOW_MISC);
11097
11098 sh_cnt = fold_build2 (PLUS_EXPR, TREE_TYPE (sh_cnt),
11099 sh_cnt, build_int_cst (NULL_TREE, pow2));
11100 return fold_build2 (RSHIFT_EXPR, type,
11101 fold_convert (type, arg0), sh_cnt);
11102 }
11103 }
11104
11105 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
11106 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
11107 if (INTEGRAL_TYPE_P (type)
11108 && TYPE_UNSIGNED (type)
11109 && code == FLOOR_DIV_EXPR)
11110 return fold_build2 (TRUNC_DIV_EXPR, type, op0, op1);
11111
11112 /* Fall thru */
11113
11114 case ROUND_DIV_EXPR:
11115 case CEIL_DIV_EXPR:
11116 case EXACT_DIV_EXPR:
11117 if (integer_onep (arg1))
11118 return non_lvalue (fold_convert (type, arg0));
11119 if (integer_zerop (arg1))
11120 return NULL_TREE;
11121 /* X / -1 is -X. */
11122 if (!TYPE_UNSIGNED (type)
11123 && TREE_CODE (arg1) == INTEGER_CST
11124 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11125 && TREE_INT_CST_HIGH (arg1) == -1)
11126 return fold_convert (type, negate_expr (arg0));
11127
11128 /* Convert -A / -B to A / B when the type is signed and overflow is
11129 undefined. */
11130 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11131 && TREE_CODE (arg0) == NEGATE_EXPR
11132 && negate_expr_p (arg1))
11133 {
11134 if (INTEGRAL_TYPE_P (type))
11135 fold_overflow_warning (("assuming signed overflow does not occur "
11136 "when distributing negation across "
11137 "division"),
11138 WARN_STRICT_OVERFLOW_MISC);
11139 return fold_build2 (code, type,
11140 fold_convert (type, TREE_OPERAND (arg0, 0)),
11141 negate_expr (arg1));
11142 }
11143 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11144 && TREE_CODE (arg1) == NEGATE_EXPR
11145 && negate_expr_p (arg0))
11146 {
11147 if (INTEGRAL_TYPE_P (type))
11148 fold_overflow_warning (("assuming signed overflow does not occur "
11149 "when distributing negation across "
11150 "division"),
11151 WARN_STRICT_OVERFLOW_MISC);
11152 return fold_build2 (code, type, negate_expr (arg0),
11153 TREE_OPERAND (arg1, 0));
11154 }
11155
11156 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11157 operation, EXACT_DIV_EXPR.
11158
11159 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11160 At one time others generated faster code, it's not clear if they do
11161 after the last round to changes to the DIV code in expmed.c. */
11162 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11163 && multiple_of_p (type, arg0, arg1))
11164 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
11165
11166 strict_overflow_p = false;
11167 if (TREE_CODE (arg1) == INTEGER_CST
11168 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11169 &strict_overflow_p)))
11170 {
11171 if (strict_overflow_p)
11172 fold_overflow_warning (("assuming signed overflow does not occur "
11173 "when simplifying division"),
11174 WARN_STRICT_OVERFLOW_MISC);
11175 return fold_convert (type, tem);
11176 }
11177
11178 return NULL_TREE;
11179
11180 case CEIL_MOD_EXPR:
11181 case FLOOR_MOD_EXPR:
11182 case ROUND_MOD_EXPR:
11183 case TRUNC_MOD_EXPR:
11184 /* X % 1 is always zero, but be sure to preserve any side
11185 effects in X. */
11186 if (integer_onep (arg1))
11187 return omit_one_operand (type, integer_zero_node, arg0);
11188
11189 /* X % 0, return X % 0 unchanged so that we can get the
11190 proper warnings and errors. */
11191 if (integer_zerop (arg1))
11192 return NULL_TREE;
11193
11194 /* 0 % X is always zero, but be sure to preserve any side
11195 effects in X. Place this after checking for X == 0. */
11196 if (integer_zerop (arg0))
11197 return omit_one_operand (type, integer_zero_node, arg1);
11198
11199 /* X % -1 is zero. */
11200 if (!TYPE_UNSIGNED (type)
11201 && TREE_CODE (arg1) == INTEGER_CST
11202 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11203 && TREE_INT_CST_HIGH (arg1) == -1)
11204 return omit_one_operand (type, integer_zero_node, arg0);
11205
11206 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
11207 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
11208 strict_overflow_p = false;
11209 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
11210 && (TYPE_UNSIGNED (type)
11211 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11212 {
11213 tree c = arg1;
11214 /* Also optimize A % (C << N) where C is a power of 2,
11215 to A & ((C << N) - 1). */
11216 if (TREE_CODE (arg1) == LSHIFT_EXPR)
11217 c = TREE_OPERAND (arg1, 0);
11218
11219 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
11220 {
11221 tree mask = fold_build2 (MINUS_EXPR, TREE_TYPE (arg1), arg1,
11222 build_int_cst (TREE_TYPE (arg1), 1));
11223 if (strict_overflow_p)
11224 fold_overflow_warning (("assuming signed overflow does not "
11225 "occur when simplifying "
11226 "X % (power of two)"),
11227 WARN_STRICT_OVERFLOW_MISC);
11228 return fold_build2 (BIT_AND_EXPR, type,
11229 fold_convert (type, arg0),
11230 fold_convert (type, mask));
11231 }
11232 }
11233
11234 /* X % -C is the same as X % C. */
11235 if (code == TRUNC_MOD_EXPR
11236 && !TYPE_UNSIGNED (type)
11237 && TREE_CODE (arg1) == INTEGER_CST
11238 && !TREE_OVERFLOW (arg1)
11239 && TREE_INT_CST_HIGH (arg1) < 0
11240 && !TYPE_OVERFLOW_TRAPS (type)
11241 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
11242 && !sign_bit_p (arg1, arg1))
11243 return fold_build2 (code, type, fold_convert (type, arg0),
11244 fold_convert (type, negate_expr (arg1)));
11245
11246 /* X % -Y is the same as X % Y. */
11247 if (code == TRUNC_MOD_EXPR
11248 && !TYPE_UNSIGNED (type)
11249 && TREE_CODE (arg1) == NEGATE_EXPR
11250 && !TYPE_OVERFLOW_TRAPS (type))
11251 return fold_build2 (code, type, fold_convert (type, arg0),
11252 fold_convert (type, TREE_OPERAND (arg1, 0)));
11253
11254 if (TREE_CODE (arg1) == INTEGER_CST
11255 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11256 &strict_overflow_p)))
11257 {
11258 if (strict_overflow_p)
11259 fold_overflow_warning (("assuming signed overflow does not occur "
11260 "when simplifying modulos"),
11261 WARN_STRICT_OVERFLOW_MISC);
11262 return fold_convert (type, tem);
11263 }
11264
11265 return NULL_TREE;
11266
11267 case LROTATE_EXPR:
11268 case RROTATE_EXPR:
11269 if (integer_all_onesp (arg0))
11270 return omit_one_operand (type, arg0, arg1);
11271 goto shift;
11272
11273 case RSHIFT_EXPR:
11274 /* Optimize -1 >> x for arithmetic right shifts. */
11275 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
11276 return omit_one_operand (type, arg0, arg1);
11277 /* ... fall through ... */
11278
11279 case LSHIFT_EXPR:
11280 shift:
11281 if (integer_zerop (arg1))
11282 return non_lvalue (fold_convert (type, arg0));
11283 if (integer_zerop (arg0))
11284 return omit_one_operand (type, arg0, arg1);
11285
11286 /* Since negative shift count is not well-defined,
11287 don't try to compute it in the compiler. */
11288 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11289 return NULL_TREE;
11290
11291 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
11292 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
11293 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11294 && host_integerp (TREE_OPERAND (arg0, 1), false)
11295 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11296 {
11297 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
11298 + TREE_INT_CST_LOW (arg1));
11299
11300 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11301 being well defined. */
11302 if (low >= TYPE_PRECISION (type))
11303 {
11304 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
11305 low = low % TYPE_PRECISION (type);
11306 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
11307 return build_int_cst (type, 0);
11308 else
11309 low = TYPE_PRECISION (type) - 1;
11310 }
11311
11312 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11313 build_int_cst (type, low));
11314 }
11315
11316 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
11317 into x & ((unsigned)-1 >> c) for unsigned types. */
11318 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
11319 || (TYPE_UNSIGNED (type)
11320 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
11321 && host_integerp (arg1, false)
11322 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11323 && host_integerp (TREE_OPERAND (arg0, 1), false)
11324 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11325 {
11326 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
11327 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
11328 tree lshift;
11329 tree arg00;
11330
11331 if (low0 == low1)
11332 {
11333 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
11334
11335 lshift = build_int_cst (type, -1);
11336 lshift = int_const_binop (code, lshift, arg1, 0);
11337
11338 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
11339 }
11340 }
11341
11342 /* Rewrite an LROTATE_EXPR by a constant into an
11343 RROTATE_EXPR by a new constant. */
11344 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
11345 {
11346 tree tem = build_int_cst (TREE_TYPE (arg1),
11347 TYPE_PRECISION (type));
11348 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
11349 return fold_build2 (RROTATE_EXPR, type, op0, tem);
11350 }
11351
11352 /* If we have a rotate of a bit operation with the rotate count and
11353 the second operand of the bit operation both constant,
11354 permute the two operations. */
11355 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11356 && (TREE_CODE (arg0) == BIT_AND_EXPR
11357 || TREE_CODE (arg0) == BIT_IOR_EXPR
11358 || TREE_CODE (arg0) == BIT_XOR_EXPR)
11359 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11360 return fold_build2 (TREE_CODE (arg0), type,
11361 fold_build2 (code, type,
11362 TREE_OPERAND (arg0, 0), arg1),
11363 fold_build2 (code, type,
11364 TREE_OPERAND (arg0, 1), arg1));
11365
11366 /* Two consecutive rotates adding up to the precision of the
11367 type can be ignored. */
11368 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11369 && TREE_CODE (arg0) == RROTATE_EXPR
11370 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11371 && TREE_INT_CST_HIGH (arg1) == 0
11372 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
11373 && ((TREE_INT_CST_LOW (arg1)
11374 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
11375 == (unsigned int) TYPE_PRECISION (type)))
11376 return TREE_OPERAND (arg0, 0);
11377
11378 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
11379 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
11380 if the latter can be further optimized. */
11381 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
11382 && TREE_CODE (arg0) == BIT_AND_EXPR
11383 && TREE_CODE (arg1) == INTEGER_CST
11384 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11385 {
11386 tree mask = fold_build2 (code, type,
11387 fold_convert (type, TREE_OPERAND (arg0, 1)),
11388 arg1);
11389 tree shift = fold_build2 (code, type,
11390 fold_convert (type, TREE_OPERAND (arg0, 0)),
11391 arg1);
11392 tem = fold_binary (BIT_AND_EXPR, type, shift, mask);
11393 if (tem)
11394 return tem;
11395 }
11396
11397 return NULL_TREE;
11398
11399 case MIN_EXPR:
11400 if (operand_equal_p (arg0, arg1, 0))
11401 return omit_one_operand (type, arg0, arg1);
11402 if (INTEGRAL_TYPE_P (type)
11403 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
11404 return omit_one_operand (type, arg1, arg0);
11405 tem = fold_minmax (MIN_EXPR, type, arg0, arg1);
11406 if (tem)
11407 return tem;
11408 goto associate;
11409
11410 case MAX_EXPR:
11411 if (operand_equal_p (arg0, arg1, 0))
11412 return omit_one_operand (type, arg0, arg1);
11413 if (INTEGRAL_TYPE_P (type)
11414 && TYPE_MAX_VALUE (type)
11415 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
11416 return omit_one_operand (type, arg1, arg0);
11417 tem = fold_minmax (MAX_EXPR, type, arg0, arg1);
11418 if (tem)
11419 return tem;
11420 goto associate;
11421
11422 case TRUTH_ANDIF_EXPR:
11423 /* Note that the operands of this must be ints
11424 and their values must be 0 or 1.
11425 ("true" is a fixed value perhaps depending on the language.) */
11426 /* If first arg is constant zero, return it. */
11427 if (integer_zerop (arg0))
11428 return fold_convert (type, arg0);
11429 case TRUTH_AND_EXPR:
11430 /* If either arg is constant true, drop it. */
11431 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11432 return non_lvalue (fold_convert (type, arg1));
11433 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
11434 /* Preserve sequence points. */
11435 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11436 return non_lvalue (fold_convert (type, arg0));
11437 /* If second arg is constant zero, result is zero, but first arg
11438 must be evaluated. */
11439 if (integer_zerop (arg1))
11440 return omit_one_operand (type, arg1, arg0);
11441 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
11442 case will be handled here. */
11443 if (integer_zerop (arg0))
11444 return omit_one_operand (type, arg0, arg1);
11445
11446 /* !X && X is always false. */
11447 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11448 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11449 return omit_one_operand (type, integer_zero_node, arg1);
11450 /* X && !X is always false. */
11451 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11452 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11453 return omit_one_operand (type, integer_zero_node, arg0);
11454
11455 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
11456 means A >= Y && A != MAX, but in this case we know that
11457 A < X <= MAX. */
11458
11459 if (!TREE_SIDE_EFFECTS (arg0)
11460 && !TREE_SIDE_EFFECTS (arg1))
11461 {
11462 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
11463 if (tem && !operand_equal_p (tem, arg0, 0))
11464 return fold_build2 (code, type, tem, arg1);
11465
11466 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
11467 if (tem && !operand_equal_p (tem, arg1, 0))
11468 return fold_build2 (code, type, arg0, tem);
11469 }
11470
11471 truth_andor:
11472 /* We only do these simplifications if we are optimizing. */
11473 if (!optimize)
11474 return NULL_TREE;
11475
11476 /* Check for things like (A || B) && (A || C). We can convert this
11477 to A || (B && C). Note that either operator can be any of the four
11478 truth and/or operations and the transformation will still be
11479 valid. Also note that we only care about order for the
11480 ANDIF and ORIF operators. If B contains side effects, this
11481 might change the truth-value of A. */
11482 if (TREE_CODE (arg0) == TREE_CODE (arg1)
11483 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
11484 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
11485 || TREE_CODE (arg0) == TRUTH_AND_EXPR
11486 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
11487 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
11488 {
11489 tree a00 = TREE_OPERAND (arg0, 0);
11490 tree a01 = TREE_OPERAND (arg0, 1);
11491 tree a10 = TREE_OPERAND (arg1, 0);
11492 tree a11 = TREE_OPERAND (arg1, 1);
11493 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
11494 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
11495 && (code == TRUTH_AND_EXPR
11496 || code == TRUTH_OR_EXPR));
11497
11498 if (operand_equal_p (a00, a10, 0))
11499 return fold_build2 (TREE_CODE (arg0), type, a00,
11500 fold_build2 (code, type, a01, a11));
11501 else if (commutative && operand_equal_p (a00, a11, 0))
11502 return fold_build2 (TREE_CODE (arg0), type, a00,
11503 fold_build2 (code, type, a01, a10));
11504 else if (commutative && operand_equal_p (a01, a10, 0))
11505 return fold_build2 (TREE_CODE (arg0), type, a01,
11506 fold_build2 (code, type, a00, a11));
11507
11508 /* This case if tricky because we must either have commutative
11509 operators or else A10 must not have side-effects. */
11510
11511 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
11512 && operand_equal_p (a01, a11, 0))
11513 return fold_build2 (TREE_CODE (arg0), type,
11514 fold_build2 (code, type, a00, a10),
11515 a01);
11516 }
11517
11518 /* See if we can build a range comparison. */
11519 if (0 != (tem = fold_range_test (code, type, op0, op1)))
11520 return tem;
11521
11522 /* Check for the possibility of merging component references. If our
11523 lhs is another similar operation, try to merge its rhs with our
11524 rhs. Then try to merge our lhs and rhs. */
11525 if (TREE_CODE (arg0) == code
11526 && 0 != (tem = fold_truthop (code, type,
11527 TREE_OPERAND (arg0, 1), arg1)))
11528 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11529
11530 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
11531 return tem;
11532
11533 return NULL_TREE;
11534
11535 case TRUTH_ORIF_EXPR:
11536 /* Note that the operands of this must be ints
11537 and their values must be 0 or true.
11538 ("true" is a fixed value perhaps depending on the language.) */
11539 /* If first arg is constant true, return it. */
11540 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11541 return fold_convert (type, arg0);
11542 case TRUTH_OR_EXPR:
11543 /* If either arg is constant zero, drop it. */
11544 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
11545 return non_lvalue (fold_convert (type, arg1));
11546 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
11547 /* Preserve sequence points. */
11548 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11549 return non_lvalue (fold_convert (type, arg0));
11550 /* If second arg is constant true, result is true, but we must
11551 evaluate first arg. */
11552 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
11553 return omit_one_operand (type, arg1, arg0);
11554 /* Likewise for first arg, but note this only occurs here for
11555 TRUTH_OR_EXPR. */
11556 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11557 return omit_one_operand (type, arg0, arg1);
11558
11559 /* !X || X is always true. */
11560 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11561 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11562 return omit_one_operand (type, integer_one_node, arg1);
11563 /* X || !X is always true. */
11564 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11565 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11566 return omit_one_operand (type, integer_one_node, arg0);
11567
11568 goto truth_andor;
11569
11570 case TRUTH_XOR_EXPR:
11571 /* If the second arg is constant zero, drop it. */
11572 if (integer_zerop (arg1))
11573 return non_lvalue (fold_convert (type, arg0));
11574 /* If the second arg is constant true, this is a logical inversion. */
11575 if (integer_onep (arg1))
11576 {
11577 /* Only call invert_truthvalue if operand is a truth value. */
11578 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
11579 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
11580 else
11581 tem = invert_truthvalue (arg0);
11582 return non_lvalue (fold_convert (type, tem));
11583 }
11584 /* Identical arguments cancel to zero. */
11585 if (operand_equal_p (arg0, arg1, 0))
11586 return omit_one_operand (type, integer_zero_node, arg0);
11587
11588 /* !X ^ X is always true. */
11589 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11590 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11591 return omit_one_operand (type, integer_one_node, arg1);
11592
11593 /* X ^ !X is always true. */
11594 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11595 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11596 return omit_one_operand (type, integer_one_node, arg0);
11597
11598 return NULL_TREE;
11599
11600 case EQ_EXPR:
11601 case NE_EXPR:
11602 tem = fold_comparison (code, type, op0, op1);
11603 if (tem != NULL_TREE)
11604 return tem;
11605
11606 /* bool_var != 0 becomes bool_var. */
11607 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
11608 && code == NE_EXPR)
11609 return non_lvalue (fold_convert (type, arg0));
11610
11611 /* bool_var == 1 becomes bool_var. */
11612 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
11613 && code == EQ_EXPR)
11614 return non_lvalue (fold_convert (type, arg0));
11615
11616 /* bool_var != 1 becomes !bool_var. */
11617 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
11618 && code == NE_EXPR)
11619 return fold_build1 (TRUTH_NOT_EXPR, type, fold_convert (type, arg0));
11620
11621 /* bool_var == 0 becomes !bool_var. */
11622 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
11623 && code == EQ_EXPR)
11624 return fold_build1 (TRUTH_NOT_EXPR, type, fold_convert (type, arg0));
11625
11626 /* If this is an equality comparison of the address of two non-weak,
11627 unaliased symbols neither of which are extern (since we do not
11628 have access to attributes for externs), then we know the result. */
11629 if (TREE_CODE (arg0) == ADDR_EXPR
11630 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
11631 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
11632 && ! lookup_attribute ("alias",
11633 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
11634 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
11635 && TREE_CODE (arg1) == ADDR_EXPR
11636 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
11637 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
11638 && ! lookup_attribute ("alias",
11639 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
11640 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
11641 {
11642 /* We know that we're looking at the address of two
11643 non-weak, unaliased, static _DECL nodes.
11644
11645 It is both wasteful and incorrect to call operand_equal_p
11646 to compare the two ADDR_EXPR nodes. It is wasteful in that
11647 all we need to do is test pointer equality for the arguments
11648 to the two ADDR_EXPR nodes. It is incorrect to use
11649 operand_equal_p as that function is NOT equivalent to a
11650 C equality test. It can in fact return false for two
11651 objects which would test as equal using the C equality
11652 operator. */
11653 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
11654 return constant_boolean_node (equal
11655 ? code == EQ_EXPR : code != EQ_EXPR,
11656 type);
11657 }
11658
11659 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
11660 a MINUS_EXPR of a constant, we can convert it into a comparison with
11661 a revised constant as long as no overflow occurs. */
11662 if (TREE_CODE (arg1) == INTEGER_CST
11663 && (TREE_CODE (arg0) == PLUS_EXPR
11664 || TREE_CODE (arg0) == MINUS_EXPR)
11665 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11666 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
11667 ? MINUS_EXPR : PLUS_EXPR,
11668 fold_convert (TREE_TYPE (arg0), arg1),
11669 TREE_OPERAND (arg0, 1), 0))
11670 && !TREE_OVERFLOW (tem))
11671 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11672
11673 /* Similarly for a NEGATE_EXPR. */
11674 if (TREE_CODE (arg0) == NEGATE_EXPR
11675 && TREE_CODE (arg1) == INTEGER_CST
11676 && 0 != (tem = negate_expr (arg1))
11677 && TREE_CODE (tem) == INTEGER_CST
11678 && !TREE_OVERFLOW (tem))
11679 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11680
11681 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
11682 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11683 && TREE_CODE (arg1) == INTEGER_CST
11684 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11685 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11686 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg0),
11687 fold_convert (TREE_TYPE (arg0), arg1),
11688 TREE_OPERAND (arg0, 1)));
11689
11690 /* Transform comparisons of the form X +- C CMP X. */
11691 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11692 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11693 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11694 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11695 || POINTER_TYPE_P (TREE_TYPE (arg0))))
11696 {
11697 tree cst = TREE_OPERAND (arg0, 1);
11698
11699 if (code == EQ_EXPR
11700 && !integer_zerop (cst))
11701 return omit_two_operands (type, boolean_false_node,
11702 TREE_OPERAND (arg0, 0), arg1);
11703 else
11704 return omit_two_operands (type, boolean_true_node,
11705 TREE_OPERAND (arg0, 0), arg1);
11706 }
11707
11708 /* If we have X - Y == 0, we can convert that to X == Y and similarly
11709 for !=. Don't do this for ordered comparisons due to overflow. */
11710 if (TREE_CODE (arg0) == MINUS_EXPR
11711 && integer_zerop (arg1))
11712 return fold_build2 (code, type,
11713 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
11714
11715 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
11716 if (TREE_CODE (arg0) == ABS_EXPR
11717 && (integer_zerop (arg1) || real_zerop (arg1)))
11718 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
11719
11720 /* If this is an EQ or NE comparison with zero and ARG0 is
11721 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
11722 two operations, but the latter can be done in one less insn
11723 on machines that have only two-operand insns or on which a
11724 constant cannot be the first operand. */
11725 if (TREE_CODE (arg0) == BIT_AND_EXPR
11726 && integer_zerop (arg1))
11727 {
11728 tree arg00 = TREE_OPERAND (arg0, 0);
11729 tree arg01 = TREE_OPERAND (arg0, 1);
11730 if (TREE_CODE (arg00) == LSHIFT_EXPR
11731 && integer_onep (TREE_OPERAND (arg00, 0)))
11732 {
11733 tree tem = fold_build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
11734 arg01, TREE_OPERAND (arg00, 1));
11735 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0), tem,
11736 build_int_cst (TREE_TYPE (arg0), 1));
11737 return fold_build2 (code, type,
11738 fold_convert (TREE_TYPE (arg1), tem), arg1);
11739 }
11740 else if (TREE_CODE (arg01) == LSHIFT_EXPR
11741 && integer_onep (TREE_OPERAND (arg01, 0)))
11742 {
11743 tree tem = fold_build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
11744 arg00, TREE_OPERAND (arg01, 1));
11745 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0), tem,
11746 build_int_cst (TREE_TYPE (arg0), 1));
11747 return fold_build2 (code, type,
11748 fold_convert (TREE_TYPE (arg1), tem), arg1);
11749 }
11750 }
11751
11752 /* If this is an NE or EQ comparison of zero against the result of a
11753 signed MOD operation whose second operand is a power of 2, make
11754 the MOD operation unsigned since it is simpler and equivalent. */
11755 if (integer_zerop (arg1)
11756 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
11757 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
11758 || TREE_CODE (arg0) == CEIL_MOD_EXPR
11759 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
11760 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
11761 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11762 {
11763 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
11764 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
11765 fold_convert (newtype,
11766 TREE_OPERAND (arg0, 0)),
11767 fold_convert (newtype,
11768 TREE_OPERAND (arg0, 1)));
11769
11770 return fold_build2 (code, type, newmod,
11771 fold_convert (newtype, arg1));
11772 }
11773
11774 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
11775 C1 is a valid shift constant, and C2 is a power of two, i.e.
11776 a single bit. */
11777 if (TREE_CODE (arg0) == BIT_AND_EXPR
11778 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
11779 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
11780 == INTEGER_CST
11781 && integer_pow2p (TREE_OPERAND (arg0, 1))
11782 && integer_zerop (arg1))
11783 {
11784 tree itype = TREE_TYPE (arg0);
11785 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
11786 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
11787
11788 /* Check for a valid shift count. */
11789 if (TREE_INT_CST_HIGH (arg001) == 0
11790 && TREE_INT_CST_LOW (arg001) < prec)
11791 {
11792 tree arg01 = TREE_OPERAND (arg0, 1);
11793 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
11794 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
11795 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
11796 can be rewritten as (X & (C2 << C1)) != 0. */
11797 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
11798 {
11799 tem = fold_build2 (LSHIFT_EXPR, itype, arg01, arg001);
11800 tem = fold_build2 (BIT_AND_EXPR, itype, arg000, tem);
11801 return fold_build2 (code, type, tem, arg1);
11802 }
11803 /* Otherwise, for signed (arithmetic) shifts,
11804 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
11805 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
11806 else if (!TYPE_UNSIGNED (itype))
11807 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
11808 arg000, build_int_cst (itype, 0));
11809 /* Otherwise, of unsigned (logical) shifts,
11810 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
11811 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
11812 else
11813 return omit_one_operand (type,
11814 code == EQ_EXPR ? integer_one_node
11815 : integer_zero_node,
11816 arg000);
11817 }
11818 }
11819
11820 /* If this is an NE comparison of zero with an AND of one, remove the
11821 comparison since the AND will give the correct value. */
11822 if (code == NE_EXPR
11823 && integer_zerop (arg1)
11824 && TREE_CODE (arg0) == BIT_AND_EXPR
11825 && integer_onep (TREE_OPERAND (arg0, 1)))
11826 return fold_convert (type, arg0);
11827
11828 /* If we have (A & C) == C where C is a power of 2, convert this into
11829 (A & C) != 0. Similarly for NE_EXPR. */
11830 if (TREE_CODE (arg0) == BIT_AND_EXPR
11831 && integer_pow2p (TREE_OPERAND (arg0, 1))
11832 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11833 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11834 arg0, fold_convert (TREE_TYPE (arg0),
11835 integer_zero_node));
11836
11837 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
11838 bit, then fold the expression into A < 0 or A >= 0. */
11839 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
11840 if (tem)
11841 return tem;
11842
11843 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
11844 Similarly for NE_EXPR. */
11845 if (TREE_CODE (arg0) == BIT_AND_EXPR
11846 && TREE_CODE (arg1) == INTEGER_CST
11847 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11848 {
11849 tree notc = fold_build1 (BIT_NOT_EXPR,
11850 TREE_TYPE (TREE_OPERAND (arg0, 1)),
11851 TREE_OPERAND (arg0, 1));
11852 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11853 arg1, notc);
11854 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
11855 if (integer_nonzerop (dandnotc))
11856 return omit_one_operand (type, rslt, arg0);
11857 }
11858
11859 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
11860 Similarly for NE_EXPR. */
11861 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11862 && TREE_CODE (arg1) == INTEGER_CST
11863 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11864 {
11865 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
11866 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11867 TREE_OPERAND (arg0, 1), notd);
11868 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
11869 if (integer_nonzerop (candnotd))
11870 return omit_one_operand (type, rslt, arg0);
11871 }
11872
11873 /* Optimize comparisons of strlen vs zero to a compare of the
11874 first character of the string vs zero. To wit,
11875 strlen(ptr) == 0 => *ptr == 0
11876 strlen(ptr) != 0 => *ptr != 0
11877 Other cases should reduce to one of these two (or a constant)
11878 due to the return value of strlen being unsigned. */
11879 if (TREE_CODE (arg0) == CALL_EXPR
11880 && integer_zerop (arg1))
11881 {
11882 tree fndecl = get_callee_fndecl (arg0);
11883
11884 if (fndecl
11885 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
11886 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
11887 && call_expr_nargs (arg0) == 1
11888 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
11889 {
11890 tree iref = build_fold_indirect_ref (CALL_EXPR_ARG (arg0, 0));
11891 return fold_build2 (code, type, iref,
11892 build_int_cst (TREE_TYPE (iref), 0));
11893 }
11894 }
11895
11896 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
11897 of X. Similarly fold (X >> C) == 0 into X >= 0. */
11898 if (TREE_CODE (arg0) == RSHIFT_EXPR
11899 && integer_zerop (arg1)
11900 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11901 {
11902 tree arg00 = TREE_OPERAND (arg0, 0);
11903 tree arg01 = TREE_OPERAND (arg0, 1);
11904 tree itype = TREE_TYPE (arg00);
11905 if (TREE_INT_CST_HIGH (arg01) == 0
11906 && TREE_INT_CST_LOW (arg01)
11907 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
11908 {
11909 if (TYPE_UNSIGNED (itype))
11910 {
11911 itype = signed_type_for (itype);
11912 arg00 = fold_convert (itype, arg00);
11913 }
11914 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
11915 type, arg00, build_int_cst (itype, 0));
11916 }
11917 }
11918
11919 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
11920 if (integer_zerop (arg1)
11921 && TREE_CODE (arg0) == BIT_XOR_EXPR)
11922 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11923 TREE_OPERAND (arg0, 1));
11924
11925 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
11926 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11927 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11928 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11929 build_int_cst (TREE_TYPE (arg1), 0));
11930 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
11931 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11932 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11933 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11934 return fold_build2 (code, type, TREE_OPERAND (arg0, 1),
11935 build_int_cst (TREE_TYPE (arg1), 0));
11936
11937 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
11938 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11939 && TREE_CODE (arg1) == INTEGER_CST
11940 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11941 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11942 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg1),
11943 TREE_OPERAND (arg0, 1), arg1));
11944
11945 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
11946 (X & C) == 0 when C is a single bit. */
11947 if (TREE_CODE (arg0) == BIT_AND_EXPR
11948 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
11949 && integer_zerop (arg1)
11950 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11951 {
11952 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11953 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
11954 TREE_OPERAND (arg0, 1));
11955 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
11956 type, tem, arg1);
11957 }
11958
11959 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
11960 constant C is a power of two, i.e. a single bit. */
11961 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11962 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11963 && integer_zerop (arg1)
11964 && integer_pow2p (TREE_OPERAND (arg0, 1))
11965 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11966 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
11967 {
11968 tree arg00 = TREE_OPERAND (arg0, 0);
11969 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11970 arg00, build_int_cst (TREE_TYPE (arg00), 0));
11971 }
11972
11973 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
11974 when is C is a power of two, i.e. a single bit. */
11975 if (TREE_CODE (arg0) == BIT_AND_EXPR
11976 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
11977 && integer_zerop (arg1)
11978 && integer_pow2p (TREE_OPERAND (arg0, 1))
11979 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11980 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
11981 {
11982 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
11983 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg000),
11984 arg000, TREE_OPERAND (arg0, 1));
11985 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11986 tem, build_int_cst (TREE_TYPE (tem), 0));
11987 }
11988
11989 if (integer_zerop (arg1)
11990 && tree_expr_nonzero_p (arg0))
11991 {
11992 tree res = constant_boolean_node (code==NE_EXPR, type);
11993 return omit_one_operand (type, res, arg0);
11994 }
11995
11996 /* Fold -X op -Y as X op Y, where op is eq/ne. */
11997 if (TREE_CODE (arg0) == NEGATE_EXPR
11998 && TREE_CODE (arg1) == NEGATE_EXPR)
11999 return fold_build2 (code, type,
12000 TREE_OPERAND (arg0, 0),
12001 TREE_OPERAND (arg1, 0));
12002
12003 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12004 if (TREE_CODE (arg0) == BIT_AND_EXPR
12005 && TREE_CODE (arg1) == BIT_AND_EXPR)
12006 {
12007 tree arg00 = TREE_OPERAND (arg0, 0);
12008 tree arg01 = TREE_OPERAND (arg0, 1);
12009 tree arg10 = TREE_OPERAND (arg1, 0);
12010 tree arg11 = TREE_OPERAND (arg1, 1);
12011 tree itype = TREE_TYPE (arg0);
12012
12013 if (operand_equal_p (arg01, arg11, 0))
12014 return fold_build2 (code, type,
12015 fold_build2 (BIT_AND_EXPR, itype,
12016 fold_build2 (BIT_XOR_EXPR, itype,
12017 arg00, arg10),
12018 arg01),
12019 build_int_cst (itype, 0));
12020
12021 if (operand_equal_p (arg01, arg10, 0))
12022 return fold_build2 (code, type,
12023 fold_build2 (BIT_AND_EXPR, itype,
12024 fold_build2 (BIT_XOR_EXPR, itype,
12025 arg00, arg11),
12026 arg01),
12027 build_int_cst (itype, 0));
12028
12029 if (operand_equal_p (arg00, arg11, 0))
12030 return fold_build2 (code, type,
12031 fold_build2 (BIT_AND_EXPR, itype,
12032 fold_build2 (BIT_XOR_EXPR, itype,
12033 arg01, arg10),
12034 arg00),
12035 build_int_cst (itype, 0));
12036
12037 if (operand_equal_p (arg00, arg10, 0))
12038 return fold_build2 (code, type,
12039 fold_build2 (BIT_AND_EXPR, itype,
12040 fold_build2 (BIT_XOR_EXPR, itype,
12041 arg01, arg11),
12042 arg00),
12043 build_int_cst (itype, 0));
12044 }
12045
12046 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12047 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12048 {
12049 tree arg00 = TREE_OPERAND (arg0, 0);
12050 tree arg01 = TREE_OPERAND (arg0, 1);
12051 tree arg10 = TREE_OPERAND (arg1, 0);
12052 tree arg11 = TREE_OPERAND (arg1, 1);
12053 tree itype = TREE_TYPE (arg0);
12054
12055 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12056 operand_equal_p guarantees no side-effects so we don't need
12057 to use omit_one_operand on Z. */
12058 if (operand_equal_p (arg01, arg11, 0))
12059 return fold_build2 (code, type, arg00, arg10);
12060 if (operand_equal_p (arg01, arg10, 0))
12061 return fold_build2 (code, type, arg00, arg11);
12062 if (operand_equal_p (arg00, arg11, 0))
12063 return fold_build2 (code, type, arg01, arg10);
12064 if (operand_equal_p (arg00, arg10, 0))
12065 return fold_build2 (code, type, arg01, arg11);
12066
12067 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12068 if (TREE_CODE (arg01) == INTEGER_CST
12069 && TREE_CODE (arg11) == INTEGER_CST)
12070 return fold_build2 (code, type,
12071 fold_build2 (BIT_XOR_EXPR, itype, arg00,
12072 fold_build2 (BIT_XOR_EXPR, itype,
12073 arg01, arg11)),
12074 arg10);
12075 }
12076
12077 /* Attempt to simplify equality/inequality comparisons of complex
12078 values. Only lower the comparison if the result is known or
12079 can be simplified to a single scalar comparison. */
12080 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12081 || TREE_CODE (arg0) == COMPLEX_CST)
12082 && (TREE_CODE (arg1) == COMPLEX_EXPR
12083 || TREE_CODE (arg1) == COMPLEX_CST))
12084 {
12085 tree real0, imag0, real1, imag1;
12086 tree rcond, icond;
12087
12088 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12089 {
12090 real0 = TREE_OPERAND (arg0, 0);
12091 imag0 = TREE_OPERAND (arg0, 1);
12092 }
12093 else
12094 {
12095 real0 = TREE_REALPART (arg0);
12096 imag0 = TREE_IMAGPART (arg0);
12097 }
12098
12099 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12100 {
12101 real1 = TREE_OPERAND (arg1, 0);
12102 imag1 = TREE_OPERAND (arg1, 1);
12103 }
12104 else
12105 {
12106 real1 = TREE_REALPART (arg1);
12107 imag1 = TREE_IMAGPART (arg1);
12108 }
12109
12110 rcond = fold_binary (code, type, real0, real1);
12111 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12112 {
12113 if (integer_zerop (rcond))
12114 {
12115 if (code == EQ_EXPR)
12116 return omit_two_operands (type, boolean_false_node,
12117 imag0, imag1);
12118 return fold_build2 (NE_EXPR, type, imag0, imag1);
12119 }
12120 else
12121 {
12122 if (code == NE_EXPR)
12123 return omit_two_operands (type, boolean_true_node,
12124 imag0, imag1);
12125 return fold_build2 (EQ_EXPR, type, imag0, imag1);
12126 }
12127 }
12128
12129 icond = fold_binary (code, type, imag0, imag1);
12130 if (icond && TREE_CODE (icond) == INTEGER_CST)
12131 {
12132 if (integer_zerop (icond))
12133 {
12134 if (code == EQ_EXPR)
12135 return omit_two_operands (type, boolean_false_node,
12136 real0, real1);
12137 return fold_build2 (NE_EXPR, type, real0, real1);
12138 }
12139 else
12140 {
12141 if (code == NE_EXPR)
12142 return omit_two_operands (type, boolean_true_node,
12143 real0, real1);
12144 return fold_build2 (EQ_EXPR, type, real0, real1);
12145 }
12146 }
12147 }
12148
12149 return NULL_TREE;
12150
12151 case LT_EXPR:
12152 case GT_EXPR:
12153 case LE_EXPR:
12154 case GE_EXPR:
12155 tem = fold_comparison (code, type, op0, op1);
12156 if (tem != NULL_TREE)
12157 return tem;
12158
12159 /* Transform comparisons of the form X +- C CMP X. */
12160 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12161 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12162 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12163 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
12164 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12165 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
12166 {
12167 tree arg01 = TREE_OPERAND (arg0, 1);
12168 enum tree_code code0 = TREE_CODE (arg0);
12169 int is_positive;
12170
12171 if (TREE_CODE (arg01) == REAL_CST)
12172 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12173 else
12174 is_positive = tree_int_cst_sgn (arg01);
12175
12176 /* (X - c) > X becomes false. */
12177 if (code == GT_EXPR
12178 && ((code0 == MINUS_EXPR && is_positive >= 0)
12179 || (code0 == PLUS_EXPR && is_positive <= 0)))
12180 {
12181 if (TREE_CODE (arg01) == INTEGER_CST
12182 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12183 fold_overflow_warning (("assuming signed overflow does not "
12184 "occur when assuming that (X - c) > X "
12185 "is always false"),
12186 WARN_STRICT_OVERFLOW_ALL);
12187 return constant_boolean_node (0, type);
12188 }
12189
12190 /* Likewise (X + c) < X becomes false. */
12191 if (code == LT_EXPR
12192 && ((code0 == PLUS_EXPR && is_positive >= 0)
12193 || (code0 == MINUS_EXPR && is_positive <= 0)))
12194 {
12195 if (TREE_CODE (arg01) == INTEGER_CST
12196 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12197 fold_overflow_warning (("assuming signed overflow does not "
12198 "occur when assuming that "
12199 "(X + c) < X is always false"),
12200 WARN_STRICT_OVERFLOW_ALL);
12201 return constant_boolean_node (0, type);
12202 }
12203
12204 /* Convert (X - c) <= X to true. */
12205 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12206 && code == LE_EXPR
12207 && ((code0 == MINUS_EXPR && is_positive >= 0)
12208 || (code0 == PLUS_EXPR && is_positive <= 0)))
12209 {
12210 if (TREE_CODE (arg01) == INTEGER_CST
12211 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12212 fold_overflow_warning (("assuming signed overflow does not "
12213 "occur when assuming that "
12214 "(X - c) <= X is always true"),
12215 WARN_STRICT_OVERFLOW_ALL);
12216 return constant_boolean_node (1, type);
12217 }
12218
12219 /* Convert (X + c) >= X to true. */
12220 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12221 && code == GE_EXPR
12222 && ((code0 == PLUS_EXPR && is_positive >= 0)
12223 || (code0 == MINUS_EXPR && is_positive <= 0)))
12224 {
12225 if (TREE_CODE (arg01) == INTEGER_CST
12226 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12227 fold_overflow_warning (("assuming signed overflow does not "
12228 "occur when assuming that "
12229 "(X + c) >= X is always true"),
12230 WARN_STRICT_OVERFLOW_ALL);
12231 return constant_boolean_node (1, type);
12232 }
12233
12234 if (TREE_CODE (arg01) == INTEGER_CST)
12235 {
12236 /* Convert X + c > X and X - c < X to true for integers. */
12237 if (code == GT_EXPR
12238 && ((code0 == PLUS_EXPR && is_positive > 0)
12239 || (code0 == MINUS_EXPR && is_positive < 0)))
12240 {
12241 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12242 fold_overflow_warning (("assuming signed overflow does "
12243 "not occur when assuming that "
12244 "(X + c) > X is always true"),
12245 WARN_STRICT_OVERFLOW_ALL);
12246 return constant_boolean_node (1, type);
12247 }
12248
12249 if (code == LT_EXPR
12250 && ((code0 == MINUS_EXPR && is_positive > 0)
12251 || (code0 == PLUS_EXPR && is_positive < 0)))
12252 {
12253 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12254 fold_overflow_warning (("assuming signed overflow does "
12255 "not occur when assuming that "
12256 "(X - c) < X is always true"),
12257 WARN_STRICT_OVERFLOW_ALL);
12258 return constant_boolean_node (1, type);
12259 }
12260
12261 /* Convert X + c <= X and X - c >= X to false for integers. */
12262 if (code == LE_EXPR
12263 && ((code0 == PLUS_EXPR && is_positive > 0)
12264 || (code0 == MINUS_EXPR && is_positive < 0)))
12265 {
12266 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12267 fold_overflow_warning (("assuming signed overflow does "
12268 "not occur when assuming that "
12269 "(X + c) <= X is always false"),
12270 WARN_STRICT_OVERFLOW_ALL);
12271 return constant_boolean_node (0, type);
12272 }
12273
12274 if (code == GE_EXPR
12275 && ((code0 == MINUS_EXPR && is_positive > 0)
12276 || (code0 == PLUS_EXPR && is_positive < 0)))
12277 {
12278 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12279 fold_overflow_warning (("assuming signed overflow does "
12280 "not occur when assuming that "
12281 "(X - c) >= X is always false"),
12282 WARN_STRICT_OVERFLOW_ALL);
12283 return constant_boolean_node (0, type);
12284 }
12285 }
12286 }
12287
12288 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
12289 This transformation affects the cases which are handled in later
12290 optimizations involving comparisons with non-negative constants. */
12291 if (TREE_CODE (arg1) == INTEGER_CST
12292 && TREE_CODE (arg0) != INTEGER_CST
12293 && tree_int_cst_sgn (arg1) > 0)
12294 {
12295 if (code == GE_EXPR)
12296 {
12297 arg1 = const_binop (MINUS_EXPR, arg1,
12298 build_int_cst (TREE_TYPE (arg1), 1), 0);
12299 return fold_build2 (GT_EXPR, type, arg0,
12300 fold_convert (TREE_TYPE (arg0), arg1));
12301 }
12302 if (code == LT_EXPR)
12303 {
12304 arg1 = const_binop (MINUS_EXPR, arg1,
12305 build_int_cst (TREE_TYPE (arg1), 1), 0);
12306 return fold_build2 (LE_EXPR, type, arg0,
12307 fold_convert (TREE_TYPE (arg0), arg1));
12308 }
12309 }
12310
12311 /* Comparisons with the highest or lowest possible integer of
12312 the specified precision will have known values. */
12313 {
12314 tree arg1_type = TREE_TYPE (arg1);
12315 unsigned int width = TYPE_PRECISION (arg1_type);
12316
12317 if (TREE_CODE (arg1) == INTEGER_CST
12318 && !TREE_OVERFLOW (arg1)
12319 && width <= 2 * HOST_BITS_PER_WIDE_INT
12320 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
12321 {
12322 HOST_WIDE_INT signed_max_hi;
12323 unsigned HOST_WIDE_INT signed_max_lo;
12324 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
12325
12326 if (width <= HOST_BITS_PER_WIDE_INT)
12327 {
12328 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12329 - 1;
12330 signed_max_hi = 0;
12331 max_hi = 0;
12332
12333 if (TYPE_UNSIGNED (arg1_type))
12334 {
12335 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12336 min_lo = 0;
12337 min_hi = 0;
12338 }
12339 else
12340 {
12341 max_lo = signed_max_lo;
12342 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12343 min_hi = -1;
12344 }
12345 }
12346 else
12347 {
12348 width -= HOST_BITS_PER_WIDE_INT;
12349 signed_max_lo = -1;
12350 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12351 - 1;
12352 max_lo = -1;
12353 min_lo = 0;
12354
12355 if (TYPE_UNSIGNED (arg1_type))
12356 {
12357 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12358 min_hi = 0;
12359 }
12360 else
12361 {
12362 max_hi = signed_max_hi;
12363 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12364 }
12365 }
12366
12367 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
12368 && TREE_INT_CST_LOW (arg1) == max_lo)
12369 switch (code)
12370 {
12371 case GT_EXPR:
12372 return omit_one_operand (type, integer_zero_node, arg0);
12373
12374 case GE_EXPR:
12375 return fold_build2 (EQ_EXPR, type, op0, op1);
12376
12377 case LE_EXPR:
12378 return omit_one_operand (type, integer_one_node, arg0);
12379
12380 case LT_EXPR:
12381 return fold_build2 (NE_EXPR, type, op0, op1);
12382
12383 /* The GE_EXPR and LT_EXPR cases above are not normally
12384 reached because of previous transformations. */
12385
12386 default:
12387 break;
12388 }
12389 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12390 == max_hi
12391 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
12392 switch (code)
12393 {
12394 case GT_EXPR:
12395 arg1 = const_binop (PLUS_EXPR, arg1,
12396 build_int_cst (TREE_TYPE (arg1), 1), 0);
12397 return fold_build2 (EQ_EXPR, type,
12398 fold_convert (TREE_TYPE (arg1), arg0),
12399 arg1);
12400 case LE_EXPR:
12401 arg1 = const_binop (PLUS_EXPR, arg1,
12402 build_int_cst (TREE_TYPE (arg1), 1), 0);
12403 return fold_build2 (NE_EXPR, type,
12404 fold_convert (TREE_TYPE (arg1), arg0),
12405 arg1);
12406 default:
12407 break;
12408 }
12409 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12410 == min_hi
12411 && TREE_INT_CST_LOW (arg1) == min_lo)
12412 switch (code)
12413 {
12414 case LT_EXPR:
12415 return omit_one_operand (type, integer_zero_node, arg0);
12416
12417 case LE_EXPR:
12418 return fold_build2 (EQ_EXPR, type, op0, op1);
12419
12420 case GE_EXPR:
12421 return omit_one_operand (type, integer_one_node, arg0);
12422
12423 case GT_EXPR:
12424 return fold_build2 (NE_EXPR, type, op0, op1);
12425
12426 default:
12427 break;
12428 }
12429 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12430 == min_hi
12431 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
12432 switch (code)
12433 {
12434 case GE_EXPR:
12435 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
12436 return fold_build2 (NE_EXPR, type,
12437 fold_convert (TREE_TYPE (arg1), arg0),
12438 arg1);
12439 case LT_EXPR:
12440 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
12441 return fold_build2 (EQ_EXPR, type,
12442 fold_convert (TREE_TYPE (arg1), arg0),
12443 arg1);
12444 default:
12445 break;
12446 }
12447
12448 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
12449 && TREE_INT_CST_LOW (arg1) == signed_max_lo
12450 && TYPE_UNSIGNED (arg1_type)
12451 /* We will flip the signedness of the comparison operator
12452 associated with the mode of arg1, so the sign bit is
12453 specified by this mode. Check that arg1 is the signed
12454 max associated with this sign bit. */
12455 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
12456 /* signed_type does not work on pointer types. */
12457 && INTEGRAL_TYPE_P (arg1_type))
12458 {
12459 /* The following case also applies to X < signed_max+1
12460 and X >= signed_max+1 because previous transformations. */
12461 if (code == LE_EXPR || code == GT_EXPR)
12462 {
12463 tree st;
12464 st = signed_type_for (TREE_TYPE (arg1));
12465 return fold_build2 (code == LE_EXPR ? GE_EXPR : LT_EXPR,
12466 type, fold_convert (st, arg0),
12467 build_int_cst (st, 0));
12468 }
12469 }
12470 }
12471 }
12472
12473 /* If we are comparing an ABS_EXPR with a constant, we can
12474 convert all the cases into explicit comparisons, but they may
12475 well not be faster than doing the ABS and one comparison.
12476 But ABS (X) <= C is a range comparison, which becomes a subtraction
12477 and a comparison, and is probably faster. */
12478 if (code == LE_EXPR
12479 && TREE_CODE (arg1) == INTEGER_CST
12480 && TREE_CODE (arg0) == ABS_EXPR
12481 && ! TREE_SIDE_EFFECTS (arg0)
12482 && (0 != (tem = negate_expr (arg1)))
12483 && TREE_CODE (tem) == INTEGER_CST
12484 && !TREE_OVERFLOW (tem))
12485 return fold_build2 (TRUTH_ANDIF_EXPR, type,
12486 build2 (GE_EXPR, type,
12487 TREE_OPERAND (arg0, 0), tem),
12488 build2 (LE_EXPR, type,
12489 TREE_OPERAND (arg0, 0), arg1));
12490
12491 /* Convert ABS_EXPR<x> >= 0 to true. */
12492 strict_overflow_p = false;
12493 if (code == GE_EXPR
12494 && (integer_zerop (arg1)
12495 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
12496 && real_zerop (arg1)))
12497 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12498 {
12499 if (strict_overflow_p)
12500 fold_overflow_warning (("assuming signed overflow does not occur "
12501 "when simplifying comparison of "
12502 "absolute value and zero"),
12503 WARN_STRICT_OVERFLOW_CONDITIONAL);
12504 return omit_one_operand (type, integer_one_node, arg0);
12505 }
12506
12507 /* Convert ABS_EXPR<x> < 0 to false. */
12508 strict_overflow_p = false;
12509 if (code == LT_EXPR
12510 && (integer_zerop (arg1) || real_zerop (arg1))
12511 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12512 {
12513 if (strict_overflow_p)
12514 fold_overflow_warning (("assuming signed overflow does not occur "
12515 "when simplifying comparison of "
12516 "absolute value and zero"),
12517 WARN_STRICT_OVERFLOW_CONDITIONAL);
12518 return omit_one_operand (type, integer_zero_node, arg0);
12519 }
12520
12521 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
12522 and similarly for >= into !=. */
12523 if ((code == LT_EXPR || code == GE_EXPR)
12524 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12525 && TREE_CODE (arg1) == LSHIFT_EXPR
12526 && integer_onep (TREE_OPERAND (arg1, 0)))
12527 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12528 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12529 TREE_OPERAND (arg1, 1)),
12530 build_int_cst (TREE_TYPE (arg0), 0));
12531
12532 if ((code == LT_EXPR || code == GE_EXPR)
12533 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12534 && (TREE_CODE (arg1) == NOP_EXPR
12535 || TREE_CODE (arg1) == CONVERT_EXPR)
12536 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
12537 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
12538 return
12539 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12540 fold_convert (TREE_TYPE (arg0),
12541 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12542 TREE_OPERAND (TREE_OPERAND (arg1, 0),
12543 1))),
12544 build_int_cst (TREE_TYPE (arg0), 0));
12545
12546 return NULL_TREE;
12547
12548 case UNORDERED_EXPR:
12549 case ORDERED_EXPR:
12550 case UNLT_EXPR:
12551 case UNLE_EXPR:
12552 case UNGT_EXPR:
12553 case UNGE_EXPR:
12554 case UNEQ_EXPR:
12555 case LTGT_EXPR:
12556 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
12557 {
12558 t1 = fold_relational_const (code, type, arg0, arg1);
12559 if (t1 != NULL_TREE)
12560 return t1;
12561 }
12562
12563 /* If the first operand is NaN, the result is constant. */
12564 if (TREE_CODE (arg0) == REAL_CST
12565 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
12566 && (code != LTGT_EXPR || ! flag_trapping_math))
12567 {
12568 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
12569 ? integer_zero_node
12570 : integer_one_node;
12571 return omit_one_operand (type, t1, arg1);
12572 }
12573
12574 /* If the second operand is NaN, the result is constant. */
12575 if (TREE_CODE (arg1) == REAL_CST
12576 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
12577 && (code != LTGT_EXPR || ! flag_trapping_math))
12578 {
12579 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
12580 ? integer_zero_node
12581 : integer_one_node;
12582 return omit_one_operand (type, t1, arg0);
12583 }
12584
12585 /* Simplify unordered comparison of something with itself. */
12586 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
12587 && operand_equal_p (arg0, arg1, 0))
12588 return constant_boolean_node (1, type);
12589
12590 if (code == LTGT_EXPR
12591 && !flag_trapping_math
12592 && operand_equal_p (arg0, arg1, 0))
12593 return constant_boolean_node (0, type);
12594
12595 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
12596 {
12597 tree targ0 = strip_float_extensions (arg0);
12598 tree targ1 = strip_float_extensions (arg1);
12599 tree newtype = TREE_TYPE (targ0);
12600
12601 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
12602 newtype = TREE_TYPE (targ1);
12603
12604 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
12605 return fold_build2 (code, type, fold_convert (newtype, targ0),
12606 fold_convert (newtype, targ1));
12607 }
12608
12609 return NULL_TREE;
12610
12611 case COMPOUND_EXPR:
12612 /* When pedantic, a compound expression can be neither an lvalue
12613 nor an integer constant expression. */
12614 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
12615 return NULL_TREE;
12616 /* Don't let (0, 0) be null pointer constant. */
12617 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
12618 : fold_convert (type, arg1);
12619 return pedantic_non_lvalue (tem);
12620
12621 case COMPLEX_EXPR:
12622 if ((TREE_CODE (arg0) == REAL_CST
12623 && TREE_CODE (arg1) == REAL_CST)
12624 || (TREE_CODE (arg0) == INTEGER_CST
12625 && TREE_CODE (arg1) == INTEGER_CST))
12626 return build_complex (type, arg0, arg1);
12627 return NULL_TREE;
12628
12629 case ASSERT_EXPR:
12630 /* An ASSERT_EXPR should never be passed to fold_binary. */
12631 gcc_unreachable ();
12632
12633 default:
12634 return NULL_TREE;
12635 } /* switch (code) */
12636 }
12637
12638 /* Callback for walk_tree, looking for LABEL_EXPR.
12639 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
12640 Do not check the sub-tree of GOTO_EXPR. */
12641
12642 static tree
12643 contains_label_1 (tree *tp,
12644 int *walk_subtrees,
12645 void *data ATTRIBUTE_UNUSED)
12646 {
12647 switch (TREE_CODE (*tp))
12648 {
12649 case LABEL_EXPR:
12650 return *tp;
12651 case GOTO_EXPR:
12652 *walk_subtrees = 0;
12653 /* no break */
12654 default:
12655 return NULL_TREE;
12656 }
12657 }
12658
12659 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
12660 accessible from outside the sub-tree. Returns NULL_TREE if no
12661 addressable label is found. */
12662
12663 static bool
12664 contains_label_p (tree st)
12665 {
12666 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
12667 }
12668
12669 /* Fold a ternary expression of code CODE and type TYPE with operands
12670 OP0, OP1, and OP2. Return the folded expression if folding is
12671 successful. Otherwise, return NULL_TREE. */
12672
12673 tree
12674 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
12675 {
12676 tree tem;
12677 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
12678 enum tree_code_class kind = TREE_CODE_CLASS (code);
12679
12680 gcc_assert (IS_EXPR_CODE_CLASS (kind)
12681 && TREE_CODE_LENGTH (code) == 3);
12682
12683 /* Strip any conversions that don't change the mode. This is safe
12684 for every expression, except for a comparison expression because
12685 its signedness is derived from its operands. So, in the latter
12686 case, only strip conversions that don't change the signedness.
12687
12688 Note that this is done as an internal manipulation within the
12689 constant folder, in order to find the simplest representation of
12690 the arguments so that their form can be studied. In any cases,
12691 the appropriate type conversions should be put back in the tree
12692 that will get out of the constant folder. */
12693 if (op0)
12694 {
12695 arg0 = op0;
12696 STRIP_NOPS (arg0);
12697 }
12698
12699 if (op1)
12700 {
12701 arg1 = op1;
12702 STRIP_NOPS (arg1);
12703 }
12704
12705 switch (code)
12706 {
12707 case COMPONENT_REF:
12708 if (TREE_CODE (arg0) == CONSTRUCTOR
12709 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
12710 {
12711 unsigned HOST_WIDE_INT idx;
12712 tree field, value;
12713 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
12714 if (field == arg1)
12715 return value;
12716 }
12717 return NULL_TREE;
12718
12719 case COND_EXPR:
12720 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
12721 so all simple results must be passed through pedantic_non_lvalue. */
12722 if (TREE_CODE (arg0) == INTEGER_CST)
12723 {
12724 tree unused_op = integer_zerop (arg0) ? op1 : op2;
12725 tem = integer_zerop (arg0) ? op2 : op1;
12726 /* Only optimize constant conditions when the selected branch
12727 has the same type as the COND_EXPR. This avoids optimizing
12728 away "c ? x : throw", where the throw has a void type.
12729 Avoid throwing away that operand which contains label. */
12730 if ((!TREE_SIDE_EFFECTS (unused_op)
12731 || !contains_label_p (unused_op))
12732 && (! VOID_TYPE_P (TREE_TYPE (tem))
12733 || VOID_TYPE_P (type)))
12734 return pedantic_non_lvalue (tem);
12735 return NULL_TREE;
12736 }
12737 if (operand_equal_p (arg1, op2, 0))
12738 return pedantic_omit_one_operand (type, arg1, arg0);
12739
12740 /* If we have A op B ? A : C, we may be able to convert this to a
12741 simpler expression, depending on the operation and the values
12742 of B and C. Signed zeros prevent all of these transformations,
12743 for reasons given above each one.
12744
12745 Also try swapping the arguments and inverting the conditional. */
12746 if (COMPARISON_CLASS_P (arg0)
12747 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
12748 arg1, TREE_OPERAND (arg0, 1))
12749 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
12750 {
12751 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
12752 if (tem)
12753 return tem;
12754 }
12755
12756 if (COMPARISON_CLASS_P (arg0)
12757 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
12758 op2,
12759 TREE_OPERAND (arg0, 1))
12760 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
12761 {
12762 tem = fold_truth_not_expr (arg0);
12763 if (tem && COMPARISON_CLASS_P (tem))
12764 {
12765 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
12766 if (tem)
12767 return tem;
12768 }
12769 }
12770
12771 /* If the second operand is simpler than the third, swap them
12772 since that produces better jump optimization results. */
12773 if (truth_value_p (TREE_CODE (arg0))
12774 && tree_swap_operands_p (op1, op2, false))
12775 {
12776 /* See if this can be inverted. If it can't, possibly because
12777 it was a floating-point inequality comparison, don't do
12778 anything. */
12779 tem = fold_truth_not_expr (arg0);
12780 if (tem)
12781 return fold_build3 (code, type, tem, op2, op1);
12782 }
12783
12784 /* Convert A ? 1 : 0 to simply A. */
12785 if (integer_onep (op1)
12786 && integer_zerop (op2)
12787 /* If we try to convert OP0 to our type, the
12788 call to fold will try to move the conversion inside
12789 a COND, which will recurse. In that case, the COND_EXPR
12790 is probably the best choice, so leave it alone. */
12791 && type == TREE_TYPE (arg0))
12792 return pedantic_non_lvalue (arg0);
12793
12794 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
12795 over COND_EXPR in cases such as floating point comparisons. */
12796 if (integer_zerop (op1)
12797 && integer_onep (op2)
12798 && truth_value_p (TREE_CODE (arg0)))
12799 return pedantic_non_lvalue (fold_convert (type,
12800 invert_truthvalue (arg0)));
12801
12802 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
12803 if (TREE_CODE (arg0) == LT_EXPR
12804 && integer_zerop (TREE_OPERAND (arg0, 1))
12805 && integer_zerop (op2)
12806 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
12807 {
12808 /* sign_bit_p only checks ARG1 bits within A's precision.
12809 If <sign bit of A> has wider type than A, bits outside
12810 of A's precision in <sign bit of A> need to be checked.
12811 If they are all 0, this optimization needs to be done
12812 in unsigned A's type, if they are all 1 in signed A's type,
12813 otherwise this can't be done. */
12814 if (TYPE_PRECISION (TREE_TYPE (tem))
12815 < TYPE_PRECISION (TREE_TYPE (arg1))
12816 && TYPE_PRECISION (TREE_TYPE (tem))
12817 < TYPE_PRECISION (type))
12818 {
12819 unsigned HOST_WIDE_INT mask_lo;
12820 HOST_WIDE_INT mask_hi;
12821 int inner_width, outer_width;
12822 tree tem_type;
12823
12824 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
12825 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
12826 if (outer_width > TYPE_PRECISION (type))
12827 outer_width = TYPE_PRECISION (type);
12828
12829 if (outer_width > HOST_BITS_PER_WIDE_INT)
12830 {
12831 mask_hi = ((unsigned HOST_WIDE_INT) -1
12832 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
12833 mask_lo = -1;
12834 }
12835 else
12836 {
12837 mask_hi = 0;
12838 mask_lo = ((unsigned HOST_WIDE_INT) -1
12839 >> (HOST_BITS_PER_WIDE_INT - outer_width));
12840 }
12841 if (inner_width > HOST_BITS_PER_WIDE_INT)
12842 {
12843 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
12844 >> (HOST_BITS_PER_WIDE_INT - inner_width));
12845 mask_lo = 0;
12846 }
12847 else
12848 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
12849 >> (HOST_BITS_PER_WIDE_INT - inner_width));
12850
12851 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
12852 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
12853 {
12854 tem_type = signed_type_for (TREE_TYPE (tem));
12855 tem = fold_convert (tem_type, tem);
12856 }
12857 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
12858 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
12859 {
12860 tem_type = unsigned_type_for (TREE_TYPE (tem));
12861 tem = fold_convert (tem_type, tem);
12862 }
12863 else
12864 tem = NULL;
12865 }
12866
12867 if (tem)
12868 return fold_convert (type,
12869 fold_build2 (BIT_AND_EXPR,
12870 TREE_TYPE (tem), tem,
12871 fold_convert (TREE_TYPE (tem),
12872 arg1)));
12873 }
12874
12875 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
12876 already handled above. */
12877 if (TREE_CODE (arg0) == BIT_AND_EXPR
12878 && integer_onep (TREE_OPERAND (arg0, 1))
12879 && integer_zerop (op2)
12880 && integer_pow2p (arg1))
12881 {
12882 tree tem = TREE_OPERAND (arg0, 0);
12883 STRIP_NOPS (tem);
12884 if (TREE_CODE (tem) == RSHIFT_EXPR
12885 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
12886 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
12887 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
12888 return fold_build2 (BIT_AND_EXPR, type,
12889 TREE_OPERAND (tem, 0), arg1);
12890 }
12891
12892 /* A & N ? N : 0 is simply A & N if N is a power of two. This
12893 is probably obsolete because the first operand should be a
12894 truth value (that's why we have the two cases above), but let's
12895 leave it in until we can confirm this for all front-ends. */
12896 if (integer_zerop (op2)
12897 && TREE_CODE (arg0) == NE_EXPR
12898 && integer_zerop (TREE_OPERAND (arg0, 1))
12899 && integer_pow2p (arg1)
12900 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12901 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12902 arg1, OEP_ONLY_CONST))
12903 return pedantic_non_lvalue (fold_convert (type,
12904 TREE_OPERAND (arg0, 0)));
12905
12906 /* Convert A ? B : 0 into A && B if A and B are truth values. */
12907 if (integer_zerop (op2)
12908 && truth_value_p (TREE_CODE (arg0))
12909 && truth_value_p (TREE_CODE (arg1)))
12910 return fold_build2 (TRUTH_ANDIF_EXPR, type,
12911 fold_convert (type, arg0),
12912 arg1);
12913
12914 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
12915 if (integer_onep (op2)
12916 && truth_value_p (TREE_CODE (arg0))
12917 && truth_value_p (TREE_CODE (arg1)))
12918 {
12919 /* Only perform transformation if ARG0 is easily inverted. */
12920 tem = fold_truth_not_expr (arg0);
12921 if (tem)
12922 return fold_build2 (TRUTH_ORIF_EXPR, type,
12923 fold_convert (type, tem),
12924 arg1);
12925 }
12926
12927 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
12928 if (integer_zerop (arg1)
12929 && truth_value_p (TREE_CODE (arg0))
12930 && truth_value_p (TREE_CODE (op2)))
12931 {
12932 /* Only perform transformation if ARG0 is easily inverted. */
12933 tem = fold_truth_not_expr (arg0);
12934 if (tem)
12935 return fold_build2 (TRUTH_ANDIF_EXPR, type,
12936 fold_convert (type, tem),
12937 op2);
12938 }
12939
12940 /* Convert A ? 1 : B into A || B if A and B are truth values. */
12941 if (integer_onep (arg1)
12942 && truth_value_p (TREE_CODE (arg0))
12943 && truth_value_p (TREE_CODE (op2)))
12944 return fold_build2 (TRUTH_ORIF_EXPR, type,
12945 fold_convert (type, arg0),
12946 op2);
12947
12948 return NULL_TREE;
12949
12950 case CALL_EXPR:
12951 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
12952 of fold_ternary on them. */
12953 gcc_unreachable ();
12954
12955 case BIT_FIELD_REF:
12956 if ((TREE_CODE (arg0) == VECTOR_CST
12957 || (TREE_CODE (arg0) == CONSTRUCTOR && TREE_CONSTANT (arg0)))
12958 && type == TREE_TYPE (TREE_TYPE (arg0)))
12959 {
12960 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
12961 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
12962
12963 if (width != 0
12964 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
12965 && (idx % width) == 0
12966 && (idx = idx / width)
12967 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
12968 {
12969 tree elements = NULL_TREE;
12970
12971 if (TREE_CODE (arg0) == VECTOR_CST)
12972 elements = TREE_VECTOR_CST_ELTS (arg0);
12973 else
12974 {
12975 unsigned HOST_WIDE_INT idx;
12976 tree value;
12977
12978 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (arg0), idx, value)
12979 elements = tree_cons (NULL_TREE, value, elements);
12980 }
12981 while (idx-- > 0 && elements)
12982 elements = TREE_CHAIN (elements);
12983 if (elements)
12984 return TREE_VALUE (elements);
12985 else
12986 return fold_convert (type, integer_zero_node);
12987 }
12988 }
12989 return NULL_TREE;
12990
12991 default:
12992 return NULL_TREE;
12993 } /* switch (code) */
12994 }
12995
12996 /* Perform constant folding and related simplification of EXPR.
12997 The related simplifications include x*1 => x, x*0 => 0, etc.,
12998 and application of the associative law.
12999 NOP_EXPR conversions may be removed freely (as long as we
13000 are careful not to change the type of the overall expression).
13001 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13002 but we can constant-fold them if they have constant operands. */
13003
13004 #ifdef ENABLE_FOLD_CHECKING
13005 # define fold(x) fold_1 (x)
13006 static tree fold_1 (tree);
13007 static
13008 #endif
13009 tree
13010 fold (tree expr)
13011 {
13012 const tree t = expr;
13013 enum tree_code code = TREE_CODE (t);
13014 enum tree_code_class kind = TREE_CODE_CLASS (code);
13015 tree tem;
13016
13017 /* Return right away if a constant. */
13018 if (kind == tcc_constant)
13019 return t;
13020
13021 /* CALL_EXPR-like objects with variable numbers of operands are
13022 treated specially. */
13023 if (kind == tcc_vl_exp)
13024 {
13025 if (code == CALL_EXPR)
13026 {
13027 tem = fold_call_expr (expr, false);
13028 return tem ? tem : expr;
13029 }
13030 return expr;
13031 }
13032
13033 if (IS_EXPR_CODE_CLASS (kind)
13034 || IS_GIMPLE_STMT_CODE_CLASS (kind))
13035 {
13036 tree type = TREE_TYPE (t);
13037 tree op0, op1, op2;
13038
13039 switch (TREE_CODE_LENGTH (code))
13040 {
13041 case 1:
13042 op0 = TREE_OPERAND (t, 0);
13043 tem = fold_unary (code, type, op0);
13044 return tem ? tem : expr;
13045 case 2:
13046 op0 = TREE_OPERAND (t, 0);
13047 op1 = TREE_OPERAND (t, 1);
13048 tem = fold_binary (code, type, op0, op1);
13049 return tem ? tem : expr;
13050 case 3:
13051 op0 = TREE_OPERAND (t, 0);
13052 op1 = TREE_OPERAND (t, 1);
13053 op2 = TREE_OPERAND (t, 2);
13054 tem = fold_ternary (code, type, op0, op1, op2);
13055 return tem ? tem : expr;
13056 default:
13057 break;
13058 }
13059 }
13060
13061 switch (code)
13062 {
13063 case CONST_DECL:
13064 return fold (DECL_INITIAL (t));
13065
13066 default:
13067 return t;
13068 } /* switch (code) */
13069 }
13070
13071 #ifdef ENABLE_FOLD_CHECKING
13072 #undef fold
13073
13074 static void fold_checksum_tree (const_tree, struct md5_ctx *, htab_t);
13075 static void fold_check_failed (const_tree, const_tree);
13076 void print_fold_checksum (const_tree);
13077
13078 /* When --enable-checking=fold, compute a digest of expr before
13079 and after actual fold call to see if fold did not accidentally
13080 change original expr. */
13081
13082 tree
13083 fold (tree expr)
13084 {
13085 tree ret;
13086 struct md5_ctx ctx;
13087 unsigned char checksum_before[16], checksum_after[16];
13088 htab_t ht;
13089
13090 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13091 md5_init_ctx (&ctx);
13092 fold_checksum_tree (expr, &ctx, ht);
13093 md5_finish_ctx (&ctx, checksum_before);
13094 htab_empty (ht);
13095
13096 ret = fold_1 (expr);
13097
13098 md5_init_ctx (&ctx);
13099 fold_checksum_tree (expr, &ctx, ht);
13100 md5_finish_ctx (&ctx, checksum_after);
13101 htab_delete (ht);
13102
13103 if (memcmp (checksum_before, checksum_after, 16))
13104 fold_check_failed (expr, ret);
13105
13106 return ret;
13107 }
13108
13109 void
13110 print_fold_checksum (const_tree expr)
13111 {
13112 struct md5_ctx ctx;
13113 unsigned char checksum[16], cnt;
13114 htab_t ht;
13115
13116 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13117 md5_init_ctx (&ctx);
13118 fold_checksum_tree (expr, &ctx, ht);
13119 md5_finish_ctx (&ctx, checksum);
13120 htab_delete (ht);
13121 for (cnt = 0; cnt < 16; ++cnt)
13122 fprintf (stderr, "%02x", checksum[cnt]);
13123 putc ('\n', stderr);
13124 }
13125
13126 static void
13127 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
13128 {
13129 internal_error ("fold check: original tree changed by fold");
13130 }
13131
13132 static void
13133 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx, htab_t ht)
13134 {
13135 const void **slot;
13136 enum tree_code code;
13137 struct tree_function_decl buf;
13138 int i, len;
13139
13140 recursive_label:
13141
13142 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
13143 <= sizeof (struct tree_function_decl))
13144 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
13145 if (expr == NULL)
13146 return;
13147 slot = (const void **) htab_find_slot (ht, expr, INSERT);
13148 if (*slot != NULL)
13149 return;
13150 *slot = expr;
13151 code = TREE_CODE (expr);
13152 if (TREE_CODE_CLASS (code) == tcc_declaration
13153 && DECL_ASSEMBLER_NAME_SET_P (expr))
13154 {
13155 /* Allow DECL_ASSEMBLER_NAME to be modified. */
13156 memcpy ((char *) &buf, expr, tree_size (expr));
13157 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
13158 expr = (tree) &buf;
13159 }
13160 else if (TREE_CODE_CLASS (code) == tcc_type
13161 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
13162 || TYPE_CACHED_VALUES_P (expr)
13163 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
13164 {
13165 /* Allow these fields to be modified. */
13166 tree tmp;
13167 memcpy ((char *) &buf, expr, tree_size (expr));
13168 expr = tmp = (tree) &buf;
13169 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
13170 TYPE_POINTER_TO (tmp) = NULL;
13171 TYPE_REFERENCE_TO (tmp) = NULL;
13172 if (TYPE_CACHED_VALUES_P (tmp))
13173 {
13174 TYPE_CACHED_VALUES_P (tmp) = 0;
13175 TYPE_CACHED_VALUES (tmp) = NULL;
13176 }
13177 }
13178 md5_process_bytes (expr, tree_size (expr), ctx);
13179 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
13180 if (TREE_CODE_CLASS (code) != tcc_type
13181 && TREE_CODE_CLASS (code) != tcc_declaration
13182 && code != TREE_LIST
13183 && code != SSA_NAME)
13184 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
13185 switch (TREE_CODE_CLASS (code))
13186 {
13187 case tcc_constant:
13188 switch (code)
13189 {
13190 case STRING_CST:
13191 md5_process_bytes (TREE_STRING_POINTER (expr),
13192 TREE_STRING_LENGTH (expr), ctx);
13193 break;
13194 case COMPLEX_CST:
13195 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
13196 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
13197 break;
13198 case VECTOR_CST:
13199 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
13200 break;
13201 default:
13202 break;
13203 }
13204 break;
13205 case tcc_exceptional:
13206 switch (code)
13207 {
13208 case TREE_LIST:
13209 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
13210 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
13211 expr = TREE_CHAIN (expr);
13212 goto recursive_label;
13213 break;
13214 case TREE_VEC:
13215 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
13216 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
13217 break;
13218 default:
13219 break;
13220 }
13221 break;
13222 case tcc_expression:
13223 case tcc_reference:
13224 case tcc_comparison:
13225 case tcc_unary:
13226 case tcc_binary:
13227 case tcc_statement:
13228 case tcc_vl_exp:
13229 len = TREE_OPERAND_LENGTH (expr);
13230 for (i = 0; i < len; ++i)
13231 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
13232 break;
13233 case tcc_declaration:
13234 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
13235 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
13236 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
13237 {
13238 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
13239 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
13240 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
13241 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
13242 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
13243 }
13244 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
13245 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
13246
13247 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
13248 {
13249 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
13250 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
13251 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
13252 }
13253 break;
13254 case tcc_type:
13255 if (TREE_CODE (expr) == ENUMERAL_TYPE)
13256 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
13257 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
13258 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
13259 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
13260 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
13261 if (INTEGRAL_TYPE_P (expr)
13262 || SCALAR_FLOAT_TYPE_P (expr))
13263 {
13264 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
13265 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
13266 }
13267 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
13268 if (TREE_CODE (expr) == RECORD_TYPE
13269 || TREE_CODE (expr) == UNION_TYPE
13270 || TREE_CODE (expr) == QUAL_UNION_TYPE)
13271 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
13272 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
13273 break;
13274 default:
13275 break;
13276 }
13277 }
13278
13279 /* Helper function for outputting the checksum of a tree T. When
13280 debugging with gdb, you can "define mynext" to be "next" followed
13281 by "call debug_fold_checksum (op0)", then just trace down till the
13282 outputs differ. */
13283
13284 void
13285 debug_fold_checksum (const_tree t)
13286 {
13287 int i;
13288 unsigned char checksum[16];
13289 struct md5_ctx ctx;
13290 htab_t ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13291
13292 md5_init_ctx (&ctx);
13293 fold_checksum_tree (t, &ctx, ht);
13294 md5_finish_ctx (&ctx, checksum);
13295 htab_empty (ht);
13296
13297 for (i = 0; i < 16; i++)
13298 fprintf (stderr, "%d ", checksum[i]);
13299
13300 fprintf (stderr, "\n");
13301 }
13302
13303 #endif
13304
13305 /* Fold a unary tree expression with code CODE of type TYPE with an
13306 operand OP0. Return a folded expression if successful. Otherwise,
13307 return a tree expression with code CODE of type TYPE with an
13308 operand OP0. */
13309
13310 tree
13311 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
13312 {
13313 tree tem;
13314 #ifdef ENABLE_FOLD_CHECKING
13315 unsigned char checksum_before[16], checksum_after[16];
13316 struct md5_ctx ctx;
13317 htab_t ht;
13318
13319 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13320 md5_init_ctx (&ctx);
13321 fold_checksum_tree (op0, &ctx, ht);
13322 md5_finish_ctx (&ctx, checksum_before);
13323 htab_empty (ht);
13324 #endif
13325
13326 tem = fold_unary (code, type, op0);
13327 if (!tem)
13328 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
13329
13330 #ifdef ENABLE_FOLD_CHECKING
13331 md5_init_ctx (&ctx);
13332 fold_checksum_tree (op0, &ctx, ht);
13333 md5_finish_ctx (&ctx, checksum_after);
13334 htab_delete (ht);
13335
13336 if (memcmp (checksum_before, checksum_after, 16))
13337 fold_check_failed (op0, tem);
13338 #endif
13339 return tem;
13340 }
13341
13342 /* Fold a binary tree expression with code CODE of type TYPE with
13343 operands OP0 and OP1. Return a folded expression if successful.
13344 Otherwise, return a tree expression with code CODE of type TYPE
13345 with operands OP0 and OP1. */
13346
13347 tree
13348 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
13349 MEM_STAT_DECL)
13350 {
13351 tree tem;
13352 #ifdef ENABLE_FOLD_CHECKING
13353 unsigned char checksum_before_op0[16],
13354 checksum_before_op1[16],
13355 checksum_after_op0[16],
13356 checksum_after_op1[16];
13357 struct md5_ctx ctx;
13358 htab_t ht;
13359
13360 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13361 md5_init_ctx (&ctx);
13362 fold_checksum_tree (op0, &ctx, ht);
13363 md5_finish_ctx (&ctx, checksum_before_op0);
13364 htab_empty (ht);
13365
13366 md5_init_ctx (&ctx);
13367 fold_checksum_tree (op1, &ctx, ht);
13368 md5_finish_ctx (&ctx, checksum_before_op1);
13369 htab_empty (ht);
13370 #endif
13371
13372 tem = fold_binary (code, type, op0, op1);
13373 if (!tem)
13374 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
13375
13376 #ifdef ENABLE_FOLD_CHECKING
13377 md5_init_ctx (&ctx);
13378 fold_checksum_tree (op0, &ctx, ht);
13379 md5_finish_ctx (&ctx, checksum_after_op0);
13380 htab_empty (ht);
13381
13382 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13383 fold_check_failed (op0, tem);
13384
13385 md5_init_ctx (&ctx);
13386 fold_checksum_tree (op1, &ctx, ht);
13387 md5_finish_ctx (&ctx, checksum_after_op1);
13388 htab_delete (ht);
13389
13390 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13391 fold_check_failed (op1, tem);
13392 #endif
13393 return tem;
13394 }
13395
13396 /* Fold a ternary tree expression with code CODE of type TYPE with
13397 operands OP0, OP1, and OP2. Return a folded expression if
13398 successful. Otherwise, return a tree expression with code CODE of
13399 type TYPE with operands OP0, OP1, and OP2. */
13400
13401 tree
13402 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
13403 MEM_STAT_DECL)
13404 {
13405 tree tem;
13406 #ifdef ENABLE_FOLD_CHECKING
13407 unsigned char checksum_before_op0[16],
13408 checksum_before_op1[16],
13409 checksum_before_op2[16],
13410 checksum_after_op0[16],
13411 checksum_after_op1[16],
13412 checksum_after_op2[16];
13413 struct md5_ctx ctx;
13414 htab_t ht;
13415
13416 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13417 md5_init_ctx (&ctx);
13418 fold_checksum_tree (op0, &ctx, ht);
13419 md5_finish_ctx (&ctx, checksum_before_op0);
13420 htab_empty (ht);
13421
13422 md5_init_ctx (&ctx);
13423 fold_checksum_tree (op1, &ctx, ht);
13424 md5_finish_ctx (&ctx, checksum_before_op1);
13425 htab_empty (ht);
13426
13427 md5_init_ctx (&ctx);
13428 fold_checksum_tree (op2, &ctx, ht);
13429 md5_finish_ctx (&ctx, checksum_before_op2);
13430 htab_empty (ht);
13431 #endif
13432
13433 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
13434 tem = fold_ternary (code, type, op0, op1, op2);
13435 if (!tem)
13436 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
13437
13438 #ifdef ENABLE_FOLD_CHECKING
13439 md5_init_ctx (&ctx);
13440 fold_checksum_tree (op0, &ctx, ht);
13441 md5_finish_ctx (&ctx, checksum_after_op0);
13442 htab_empty (ht);
13443
13444 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13445 fold_check_failed (op0, tem);
13446
13447 md5_init_ctx (&ctx);
13448 fold_checksum_tree (op1, &ctx, ht);
13449 md5_finish_ctx (&ctx, checksum_after_op1);
13450 htab_empty (ht);
13451
13452 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13453 fold_check_failed (op1, tem);
13454
13455 md5_init_ctx (&ctx);
13456 fold_checksum_tree (op2, &ctx, ht);
13457 md5_finish_ctx (&ctx, checksum_after_op2);
13458 htab_delete (ht);
13459
13460 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
13461 fold_check_failed (op2, tem);
13462 #endif
13463 return tem;
13464 }
13465
13466 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
13467 arguments in ARGARRAY, and a null static chain.
13468 Return a folded expression if successful. Otherwise, return a CALL_EXPR
13469 of type TYPE from the given operands as constructed by build_call_array. */
13470
13471 tree
13472 fold_build_call_array (tree type, tree fn, int nargs, tree *argarray)
13473 {
13474 tree tem;
13475 #ifdef ENABLE_FOLD_CHECKING
13476 unsigned char checksum_before_fn[16],
13477 checksum_before_arglist[16],
13478 checksum_after_fn[16],
13479 checksum_after_arglist[16];
13480 struct md5_ctx ctx;
13481 htab_t ht;
13482 int i;
13483
13484 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13485 md5_init_ctx (&ctx);
13486 fold_checksum_tree (fn, &ctx, ht);
13487 md5_finish_ctx (&ctx, checksum_before_fn);
13488 htab_empty (ht);
13489
13490 md5_init_ctx (&ctx);
13491 for (i = 0; i < nargs; i++)
13492 fold_checksum_tree (argarray[i], &ctx, ht);
13493 md5_finish_ctx (&ctx, checksum_before_arglist);
13494 htab_empty (ht);
13495 #endif
13496
13497 tem = fold_builtin_call_array (type, fn, nargs, argarray);
13498
13499 #ifdef ENABLE_FOLD_CHECKING
13500 md5_init_ctx (&ctx);
13501 fold_checksum_tree (fn, &ctx, ht);
13502 md5_finish_ctx (&ctx, checksum_after_fn);
13503 htab_empty (ht);
13504
13505 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
13506 fold_check_failed (fn, tem);
13507
13508 md5_init_ctx (&ctx);
13509 for (i = 0; i < nargs; i++)
13510 fold_checksum_tree (argarray[i], &ctx, ht);
13511 md5_finish_ctx (&ctx, checksum_after_arglist);
13512 htab_delete (ht);
13513
13514 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
13515 fold_check_failed (NULL_TREE, tem);
13516 #endif
13517 return tem;
13518 }
13519
13520 /* Perform constant folding and related simplification of initializer
13521 expression EXPR. These behave identically to "fold_buildN" but ignore
13522 potential run-time traps and exceptions that fold must preserve. */
13523
13524 #define START_FOLD_INIT \
13525 int saved_signaling_nans = flag_signaling_nans;\
13526 int saved_trapping_math = flag_trapping_math;\
13527 int saved_rounding_math = flag_rounding_math;\
13528 int saved_trapv = flag_trapv;\
13529 int saved_folding_initializer = folding_initializer;\
13530 flag_signaling_nans = 0;\
13531 flag_trapping_math = 0;\
13532 flag_rounding_math = 0;\
13533 flag_trapv = 0;\
13534 folding_initializer = 1;
13535
13536 #define END_FOLD_INIT \
13537 flag_signaling_nans = saved_signaling_nans;\
13538 flag_trapping_math = saved_trapping_math;\
13539 flag_rounding_math = saved_rounding_math;\
13540 flag_trapv = saved_trapv;\
13541 folding_initializer = saved_folding_initializer;
13542
13543 tree
13544 fold_build1_initializer (enum tree_code code, tree type, tree op)
13545 {
13546 tree result;
13547 START_FOLD_INIT;
13548
13549 result = fold_build1 (code, type, op);
13550
13551 END_FOLD_INIT;
13552 return result;
13553 }
13554
13555 tree
13556 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
13557 {
13558 tree result;
13559 START_FOLD_INIT;
13560
13561 result = fold_build2 (code, type, op0, op1);
13562
13563 END_FOLD_INIT;
13564 return result;
13565 }
13566
13567 tree
13568 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
13569 tree op2)
13570 {
13571 tree result;
13572 START_FOLD_INIT;
13573
13574 result = fold_build3 (code, type, op0, op1, op2);
13575
13576 END_FOLD_INIT;
13577 return result;
13578 }
13579
13580 tree
13581 fold_build_call_array_initializer (tree type, tree fn,
13582 int nargs, tree *argarray)
13583 {
13584 tree result;
13585 START_FOLD_INIT;
13586
13587 result = fold_build_call_array (type, fn, nargs, argarray);
13588
13589 END_FOLD_INIT;
13590 return result;
13591 }
13592
13593 #undef START_FOLD_INIT
13594 #undef END_FOLD_INIT
13595
13596 /* Determine if first argument is a multiple of second argument. Return 0 if
13597 it is not, or we cannot easily determined it to be.
13598
13599 An example of the sort of thing we care about (at this point; this routine
13600 could surely be made more general, and expanded to do what the *_DIV_EXPR's
13601 fold cases do now) is discovering that
13602
13603 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13604
13605 is a multiple of
13606
13607 SAVE_EXPR (J * 8)
13608
13609 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
13610
13611 This code also handles discovering that
13612
13613 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13614
13615 is a multiple of 8 so we don't have to worry about dealing with a
13616 possible remainder.
13617
13618 Note that we *look* inside a SAVE_EXPR only to determine how it was
13619 calculated; it is not safe for fold to do much of anything else with the
13620 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
13621 at run time. For example, the latter example above *cannot* be implemented
13622 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
13623 evaluation time of the original SAVE_EXPR is not necessarily the same at
13624 the time the new expression is evaluated. The only optimization of this
13625 sort that would be valid is changing
13626
13627 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
13628
13629 divided by 8 to
13630
13631 SAVE_EXPR (I) * SAVE_EXPR (J)
13632
13633 (where the same SAVE_EXPR (J) is used in the original and the
13634 transformed version). */
13635
13636 int
13637 multiple_of_p (tree type, const_tree top, const_tree bottom)
13638 {
13639 if (operand_equal_p (top, bottom, 0))
13640 return 1;
13641
13642 if (TREE_CODE (type) != INTEGER_TYPE)
13643 return 0;
13644
13645 switch (TREE_CODE (top))
13646 {
13647 case BIT_AND_EXPR:
13648 /* Bitwise and provides a power of two multiple. If the mask is
13649 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
13650 if (!integer_pow2p (bottom))
13651 return 0;
13652 /* FALLTHRU */
13653
13654 case MULT_EXPR:
13655 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
13656 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
13657
13658 case PLUS_EXPR:
13659 case MINUS_EXPR:
13660 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
13661 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
13662
13663 case LSHIFT_EXPR:
13664 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
13665 {
13666 tree op1, t1;
13667
13668 op1 = TREE_OPERAND (top, 1);
13669 /* const_binop may not detect overflow correctly,
13670 so check for it explicitly here. */
13671 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
13672 > TREE_INT_CST_LOW (op1)
13673 && TREE_INT_CST_HIGH (op1) == 0
13674 && 0 != (t1 = fold_convert (type,
13675 const_binop (LSHIFT_EXPR,
13676 size_one_node,
13677 op1, 0)))
13678 && !TREE_OVERFLOW (t1))
13679 return multiple_of_p (type, t1, bottom);
13680 }
13681 return 0;
13682
13683 case NOP_EXPR:
13684 /* Can't handle conversions from non-integral or wider integral type. */
13685 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
13686 || (TYPE_PRECISION (type)
13687 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
13688 return 0;
13689
13690 /* .. fall through ... */
13691
13692 case SAVE_EXPR:
13693 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
13694
13695 case INTEGER_CST:
13696 if (TREE_CODE (bottom) != INTEGER_CST
13697 || integer_zerop (bottom)
13698 || (TYPE_UNSIGNED (type)
13699 && (tree_int_cst_sgn (top) < 0
13700 || tree_int_cst_sgn (bottom) < 0)))
13701 return 0;
13702 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
13703 top, bottom, 0));
13704
13705 default:
13706 return 0;
13707 }
13708 }
13709
13710 /* Return true if CODE or TYPE is known to be non-negative. */
13711
13712 static bool
13713 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
13714 {
13715 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
13716 && truth_value_p (code))
13717 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
13718 have a signed:1 type (where the value is -1 and 0). */
13719 return true;
13720 return false;
13721 }
13722
13723 /* Return true if (CODE OP0) is known to be non-negative. If the return
13724 value is based on the assumption that signed overflow is undefined,
13725 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13726 *STRICT_OVERFLOW_P. */
13727
13728 static bool
13729 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
13730 bool *strict_overflow_p)
13731 {
13732 if (TYPE_UNSIGNED (type))
13733 return true;
13734
13735 switch (code)
13736 {
13737 case ABS_EXPR:
13738 /* We can't return 1 if flag_wrapv is set because
13739 ABS_EXPR<INT_MIN> = INT_MIN. */
13740 if (!INTEGRAL_TYPE_P (type))
13741 return true;
13742 if (TYPE_OVERFLOW_UNDEFINED (type))
13743 {
13744 *strict_overflow_p = true;
13745 return true;
13746 }
13747 break;
13748
13749 case NON_LVALUE_EXPR:
13750 case FLOAT_EXPR:
13751 case FIX_TRUNC_EXPR:
13752 return tree_expr_nonnegative_warnv_p (op0,
13753 strict_overflow_p);
13754
13755 case NOP_EXPR:
13756 {
13757 tree inner_type = TREE_TYPE (op0);
13758 tree outer_type = type;
13759
13760 if (TREE_CODE (outer_type) == REAL_TYPE)
13761 {
13762 if (TREE_CODE (inner_type) == REAL_TYPE)
13763 return tree_expr_nonnegative_warnv_p (op0,
13764 strict_overflow_p);
13765 if (TREE_CODE (inner_type) == INTEGER_TYPE)
13766 {
13767 if (TYPE_UNSIGNED (inner_type))
13768 return true;
13769 return tree_expr_nonnegative_warnv_p (op0,
13770 strict_overflow_p);
13771 }
13772 }
13773 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
13774 {
13775 if (TREE_CODE (inner_type) == REAL_TYPE)
13776 return tree_expr_nonnegative_warnv_p (op0,
13777 strict_overflow_p);
13778 if (TREE_CODE (inner_type) == INTEGER_TYPE)
13779 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
13780 && TYPE_UNSIGNED (inner_type);
13781 }
13782 }
13783 break;
13784
13785 default:
13786 return tree_simple_nonnegative_warnv_p (code, type);
13787 }
13788
13789 /* We don't know sign of `t', so be conservative and return false. */
13790 return false;
13791 }
13792
13793 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
13794 value is based on the assumption that signed overflow is undefined,
13795 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13796 *STRICT_OVERFLOW_P. */
13797
13798 static bool
13799 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
13800 tree op1, bool *strict_overflow_p)
13801 {
13802 if (TYPE_UNSIGNED (type))
13803 return true;
13804
13805 switch (code)
13806 {
13807 case POINTER_PLUS_EXPR:
13808 case PLUS_EXPR:
13809 if (FLOAT_TYPE_P (type))
13810 return (tree_expr_nonnegative_warnv_p (op0,
13811 strict_overflow_p)
13812 && tree_expr_nonnegative_warnv_p (op1,
13813 strict_overflow_p));
13814
13815 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
13816 both unsigned and at least 2 bits shorter than the result. */
13817 if (TREE_CODE (type) == INTEGER_TYPE
13818 && TREE_CODE (op0) == NOP_EXPR
13819 && TREE_CODE (op1) == NOP_EXPR)
13820 {
13821 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
13822 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
13823 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
13824 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
13825 {
13826 unsigned int prec = MAX (TYPE_PRECISION (inner1),
13827 TYPE_PRECISION (inner2)) + 1;
13828 return prec < TYPE_PRECISION (type);
13829 }
13830 }
13831 break;
13832
13833 case MULT_EXPR:
13834 if (FLOAT_TYPE_P (type))
13835 {
13836 /* x * x for floating point x is always non-negative. */
13837 if (operand_equal_p (op0, op1, 0))
13838 return true;
13839 return (tree_expr_nonnegative_warnv_p (op0,
13840 strict_overflow_p)
13841 && tree_expr_nonnegative_warnv_p (op1,
13842 strict_overflow_p));
13843 }
13844
13845 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
13846 both unsigned and their total bits is shorter than the result. */
13847 if (TREE_CODE (type) == INTEGER_TYPE
13848 && TREE_CODE (op0) == NOP_EXPR
13849 && TREE_CODE (op1) == NOP_EXPR)
13850 {
13851 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
13852 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
13853 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
13854 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
13855 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
13856 < TYPE_PRECISION (type);
13857 }
13858 return false;
13859
13860 case BIT_AND_EXPR:
13861 case MAX_EXPR:
13862 return (tree_expr_nonnegative_warnv_p (op0,
13863 strict_overflow_p)
13864 || tree_expr_nonnegative_warnv_p (op1,
13865 strict_overflow_p));
13866
13867 case BIT_IOR_EXPR:
13868 case BIT_XOR_EXPR:
13869 case MIN_EXPR:
13870 case RDIV_EXPR:
13871 case TRUNC_DIV_EXPR:
13872 case CEIL_DIV_EXPR:
13873 case FLOOR_DIV_EXPR:
13874 case ROUND_DIV_EXPR:
13875 return (tree_expr_nonnegative_warnv_p (op0,
13876 strict_overflow_p)
13877 && tree_expr_nonnegative_warnv_p (op1,
13878 strict_overflow_p));
13879
13880 case TRUNC_MOD_EXPR:
13881 case CEIL_MOD_EXPR:
13882 case FLOOR_MOD_EXPR:
13883 case ROUND_MOD_EXPR:
13884 return tree_expr_nonnegative_warnv_p (op0,
13885 strict_overflow_p);
13886 default:
13887 return tree_simple_nonnegative_warnv_p (code, type);
13888 }
13889
13890 /* We don't know sign of `t', so be conservative and return false. */
13891 return false;
13892 }
13893
13894 /* Return true if T is known to be non-negative. If the return
13895 value is based on the assumption that signed overflow is undefined,
13896 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13897 *STRICT_OVERFLOW_P. */
13898
13899 static bool
13900 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
13901 {
13902 if (TYPE_UNSIGNED (TREE_TYPE (t)))
13903 return true;
13904
13905 switch (TREE_CODE (t))
13906 {
13907 case SSA_NAME:
13908 /* Query VRP to see if it has recorded any information about
13909 the range of this object. */
13910 return ssa_name_nonnegative_p (t);
13911
13912 case INTEGER_CST:
13913 return tree_int_cst_sgn (t) >= 0;
13914
13915 case REAL_CST:
13916 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
13917
13918 case FIXED_CST:
13919 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
13920
13921 case COND_EXPR:
13922 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13923 strict_overflow_p)
13924 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
13925 strict_overflow_p));
13926 default:
13927 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
13928 TREE_TYPE (t));
13929 }
13930 /* We don't know sign of `t', so be conservative and return false. */
13931 return false;
13932 }
13933
13934 /* Return true if T is known to be non-negative. If the return
13935 value is based on the assumption that signed overflow is undefined,
13936 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13937 *STRICT_OVERFLOW_P. */
13938
13939 static bool
13940 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
13941 {
13942 enum tree_code code = TREE_CODE (t);
13943 if (TYPE_UNSIGNED (TREE_TYPE (t)))
13944 return true;
13945
13946 switch (code)
13947 {
13948 case TARGET_EXPR:
13949 {
13950 tree temp = TARGET_EXPR_SLOT (t);
13951 t = TARGET_EXPR_INITIAL (t);
13952
13953 /* If the initializer is non-void, then it's a normal expression
13954 that will be assigned to the slot. */
13955 if (!VOID_TYPE_P (t))
13956 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
13957
13958 /* Otherwise, the initializer sets the slot in some way. One common
13959 way is an assignment statement at the end of the initializer. */
13960 while (1)
13961 {
13962 if (TREE_CODE (t) == BIND_EXPR)
13963 t = expr_last (BIND_EXPR_BODY (t));
13964 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
13965 || TREE_CODE (t) == TRY_CATCH_EXPR)
13966 t = expr_last (TREE_OPERAND (t, 0));
13967 else if (TREE_CODE (t) == STATEMENT_LIST)
13968 t = expr_last (t);
13969 else
13970 break;
13971 }
13972 if ((TREE_CODE (t) == MODIFY_EXPR
13973 || TREE_CODE (t) == GIMPLE_MODIFY_STMT)
13974 && GENERIC_TREE_OPERAND (t, 0) == temp)
13975 return tree_expr_nonnegative_warnv_p (GENERIC_TREE_OPERAND (t, 1),
13976 strict_overflow_p);
13977
13978 return false;
13979 }
13980
13981 case CALL_EXPR:
13982 {
13983 tree fndecl = get_callee_fndecl (t);
13984 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
13985 switch (DECL_FUNCTION_CODE (fndecl))
13986 {
13987 CASE_FLT_FN (BUILT_IN_ACOS):
13988 CASE_FLT_FN (BUILT_IN_ACOSH):
13989 CASE_FLT_FN (BUILT_IN_CABS):
13990 CASE_FLT_FN (BUILT_IN_COSH):
13991 CASE_FLT_FN (BUILT_IN_ERFC):
13992 CASE_FLT_FN (BUILT_IN_EXP):
13993 CASE_FLT_FN (BUILT_IN_EXP10):
13994 CASE_FLT_FN (BUILT_IN_EXP2):
13995 CASE_FLT_FN (BUILT_IN_FABS):
13996 CASE_FLT_FN (BUILT_IN_FDIM):
13997 CASE_FLT_FN (BUILT_IN_HYPOT):
13998 CASE_FLT_FN (BUILT_IN_POW10):
13999 CASE_INT_FN (BUILT_IN_FFS):
14000 CASE_INT_FN (BUILT_IN_PARITY):
14001 CASE_INT_FN (BUILT_IN_POPCOUNT):
14002 case BUILT_IN_BSWAP32:
14003 case BUILT_IN_BSWAP64:
14004 /* Always true. */
14005 return true;
14006
14007 CASE_FLT_FN (BUILT_IN_SQRT):
14008 /* sqrt(-0.0) is -0.0. */
14009 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
14010 return true;
14011 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
14012 strict_overflow_p);
14013
14014 CASE_FLT_FN (BUILT_IN_ASINH):
14015 CASE_FLT_FN (BUILT_IN_ATAN):
14016 CASE_FLT_FN (BUILT_IN_ATANH):
14017 CASE_FLT_FN (BUILT_IN_CBRT):
14018 CASE_FLT_FN (BUILT_IN_CEIL):
14019 CASE_FLT_FN (BUILT_IN_ERF):
14020 CASE_FLT_FN (BUILT_IN_EXPM1):
14021 CASE_FLT_FN (BUILT_IN_FLOOR):
14022 CASE_FLT_FN (BUILT_IN_FMOD):
14023 CASE_FLT_FN (BUILT_IN_FREXP):
14024 CASE_FLT_FN (BUILT_IN_LCEIL):
14025 CASE_FLT_FN (BUILT_IN_LDEXP):
14026 CASE_FLT_FN (BUILT_IN_LFLOOR):
14027 CASE_FLT_FN (BUILT_IN_LLCEIL):
14028 CASE_FLT_FN (BUILT_IN_LLFLOOR):
14029 CASE_FLT_FN (BUILT_IN_LLRINT):
14030 CASE_FLT_FN (BUILT_IN_LLROUND):
14031 CASE_FLT_FN (BUILT_IN_LRINT):
14032 CASE_FLT_FN (BUILT_IN_LROUND):
14033 CASE_FLT_FN (BUILT_IN_MODF):
14034 CASE_FLT_FN (BUILT_IN_NEARBYINT):
14035 CASE_FLT_FN (BUILT_IN_RINT):
14036 CASE_FLT_FN (BUILT_IN_ROUND):
14037 CASE_FLT_FN (BUILT_IN_SCALB):
14038 CASE_FLT_FN (BUILT_IN_SCALBLN):
14039 CASE_FLT_FN (BUILT_IN_SCALBN):
14040 CASE_FLT_FN (BUILT_IN_SIGNBIT):
14041 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
14042 CASE_FLT_FN (BUILT_IN_SINH):
14043 CASE_FLT_FN (BUILT_IN_TANH):
14044 CASE_FLT_FN (BUILT_IN_TRUNC):
14045 /* True if the 1st argument is nonnegative. */
14046 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
14047 strict_overflow_p);
14048
14049 CASE_FLT_FN (BUILT_IN_FMAX):
14050 /* True if the 1st OR 2nd arguments are nonnegative. */
14051 return (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
14052 strict_overflow_p)
14053 || (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 1),
14054 strict_overflow_p)));
14055
14056 CASE_FLT_FN (BUILT_IN_FMIN):
14057 /* True if the 1st AND 2nd arguments are nonnegative. */
14058 return (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
14059 strict_overflow_p)
14060 && (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 1),
14061 strict_overflow_p)));
14062
14063 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14064 /* True if the 2nd argument is nonnegative. */
14065 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 1),
14066 strict_overflow_p);
14067
14068 CASE_FLT_FN (BUILT_IN_POWI):
14069 /* True if the 1st argument is nonnegative or the second
14070 argument is an even integer. */
14071 if (TREE_CODE (CALL_EXPR_ARG (t, 1)) == INTEGER_CST)
14072 {
14073 tree arg1 = CALL_EXPR_ARG (t, 1);
14074 if ((TREE_INT_CST_LOW (arg1) & 1) == 0)
14075 return true;
14076 }
14077 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
14078 strict_overflow_p);
14079
14080 CASE_FLT_FN (BUILT_IN_POW):
14081 /* True if the 1st argument is nonnegative or the second
14082 argument is an even integer valued real. */
14083 if (TREE_CODE (CALL_EXPR_ARG (t, 1)) == REAL_CST)
14084 {
14085 REAL_VALUE_TYPE c;
14086 HOST_WIDE_INT n;
14087
14088 c = TREE_REAL_CST (CALL_EXPR_ARG (t, 1));
14089 n = real_to_integer (&c);
14090 if ((n & 1) == 0)
14091 {
14092 REAL_VALUE_TYPE cint;
14093 real_from_integer (&cint, VOIDmode, n,
14094 n < 0 ? -1 : 0, 0);
14095 if (real_identical (&c, &cint))
14096 return true;
14097 }
14098 }
14099 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
14100 strict_overflow_p);
14101
14102 default:
14103 break;
14104 }
14105 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14106 TREE_TYPE (t));
14107 }
14108 break;
14109
14110 case COMPOUND_EXPR:
14111 case MODIFY_EXPR:
14112 case GIMPLE_MODIFY_STMT:
14113 return tree_expr_nonnegative_warnv_p (GENERIC_TREE_OPERAND (t, 1),
14114 strict_overflow_p);
14115 case BIND_EXPR:
14116 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
14117 strict_overflow_p);
14118 case SAVE_EXPR:
14119 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14120 strict_overflow_p);
14121
14122 default:
14123 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14124 TREE_TYPE (t));
14125 }
14126
14127 /* We don't know sign of `t', so be conservative and return false. */
14128 return false;
14129 }
14130
14131 /* Return true if T is known to be non-negative. If the return
14132 value is based on the assumption that signed overflow is undefined,
14133 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14134 *STRICT_OVERFLOW_P. */
14135
14136 bool
14137 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14138 {
14139 enum tree_code code;
14140 if (t == error_mark_node)
14141 return false;
14142
14143 code = TREE_CODE (t);
14144 switch (TREE_CODE_CLASS (code))
14145 {
14146 case tcc_binary:
14147 case tcc_comparison:
14148 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14149 TREE_TYPE (t),
14150 TREE_OPERAND (t, 0),
14151 TREE_OPERAND (t, 1),
14152 strict_overflow_p);
14153
14154 case tcc_unary:
14155 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14156 TREE_TYPE (t),
14157 TREE_OPERAND (t, 0),
14158 strict_overflow_p);
14159
14160 case tcc_constant:
14161 case tcc_declaration:
14162 case tcc_reference:
14163 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14164
14165 default:
14166 break;
14167 }
14168
14169 switch (code)
14170 {
14171 case TRUTH_AND_EXPR:
14172 case TRUTH_OR_EXPR:
14173 case TRUTH_XOR_EXPR:
14174 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14175 TREE_TYPE (t),
14176 TREE_OPERAND (t, 0),
14177 TREE_OPERAND (t, 1),
14178 strict_overflow_p);
14179 case TRUTH_NOT_EXPR:
14180 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14181 TREE_TYPE (t),
14182 TREE_OPERAND (t, 0),
14183 strict_overflow_p);
14184
14185 case COND_EXPR:
14186 case CONSTRUCTOR:
14187 case OBJ_TYPE_REF:
14188 case ASSERT_EXPR:
14189 case ADDR_EXPR:
14190 case WITH_SIZE_EXPR:
14191 case EXC_PTR_EXPR:
14192 case SSA_NAME:
14193 case FILTER_EXPR:
14194 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14195
14196 default:
14197 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
14198 }
14199 }
14200
14201 /* Return true if `t' is known to be non-negative. Handle warnings
14202 about undefined signed overflow. */
14203
14204 bool
14205 tree_expr_nonnegative_p (tree t)
14206 {
14207 bool ret, strict_overflow_p;
14208
14209 strict_overflow_p = false;
14210 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
14211 if (strict_overflow_p)
14212 fold_overflow_warning (("assuming signed overflow does not occur when "
14213 "determining that expression is always "
14214 "non-negative"),
14215 WARN_STRICT_OVERFLOW_MISC);
14216 return ret;
14217 }
14218
14219
14220 /* Return true when (CODE OP0) is an address and is known to be nonzero.
14221 For floating point we further ensure that T is not denormal.
14222 Similar logic is present in nonzero_address in rtlanal.h.
14223
14224 If the return value is based on the assumption that signed overflow
14225 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14226 change *STRICT_OVERFLOW_P. */
14227
14228 static bool
14229 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
14230 bool *strict_overflow_p)
14231 {
14232 switch (code)
14233 {
14234 case ABS_EXPR:
14235 return tree_expr_nonzero_warnv_p (op0,
14236 strict_overflow_p);
14237
14238 case NOP_EXPR:
14239 {
14240 tree inner_type = TREE_TYPE (op0);
14241 tree outer_type = type;
14242
14243 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
14244 && tree_expr_nonzero_warnv_p (op0,
14245 strict_overflow_p));
14246 }
14247 break;
14248
14249 case NON_LVALUE_EXPR:
14250 return tree_expr_nonzero_warnv_p (op0,
14251 strict_overflow_p);
14252
14253 default:
14254 break;
14255 }
14256
14257 return false;
14258 }
14259
14260 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
14261 For floating point we further ensure that T is not denormal.
14262 Similar logic is present in nonzero_address in rtlanal.h.
14263
14264 If the return value is based on the assumption that signed overflow
14265 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14266 change *STRICT_OVERFLOW_P. */
14267
14268 static bool
14269 tree_binary_nonzero_warnv_p (enum tree_code code,
14270 tree type,
14271 tree op0,
14272 tree op1, bool *strict_overflow_p)
14273 {
14274 bool sub_strict_overflow_p;
14275 switch (code)
14276 {
14277 case POINTER_PLUS_EXPR:
14278 case PLUS_EXPR:
14279 if (TYPE_OVERFLOW_UNDEFINED (type))
14280 {
14281 /* With the presence of negative values it is hard
14282 to say something. */
14283 sub_strict_overflow_p = false;
14284 if (!tree_expr_nonnegative_warnv_p (op0,
14285 &sub_strict_overflow_p)
14286 || !tree_expr_nonnegative_warnv_p (op1,
14287 &sub_strict_overflow_p))
14288 return false;
14289 /* One of operands must be positive and the other non-negative. */
14290 /* We don't set *STRICT_OVERFLOW_P here: even if this value
14291 overflows, on a twos-complement machine the sum of two
14292 nonnegative numbers can never be zero. */
14293 return (tree_expr_nonzero_warnv_p (op0,
14294 strict_overflow_p)
14295 || tree_expr_nonzero_warnv_p (op1,
14296 strict_overflow_p));
14297 }
14298 break;
14299
14300 case MULT_EXPR:
14301 if (TYPE_OVERFLOW_UNDEFINED (type))
14302 {
14303 if (tree_expr_nonzero_warnv_p (op0,
14304 strict_overflow_p)
14305 && tree_expr_nonzero_warnv_p (op1,
14306 strict_overflow_p))
14307 {
14308 *strict_overflow_p = true;
14309 return true;
14310 }
14311 }
14312 break;
14313
14314 case MIN_EXPR:
14315 sub_strict_overflow_p = false;
14316 if (tree_expr_nonzero_warnv_p (op0,
14317 &sub_strict_overflow_p)
14318 && tree_expr_nonzero_warnv_p (op1,
14319 &sub_strict_overflow_p))
14320 {
14321 if (sub_strict_overflow_p)
14322 *strict_overflow_p = true;
14323 }
14324 break;
14325
14326 case MAX_EXPR:
14327 sub_strict_overflow_p = false;
14328 if (tree_expr_nonzero_warnv_p (op0,
14329 &sub_strict_overflow_p))
14330 {
14331 if (sub_strict_overflow_p)
14332 *strict_overflow_p = true;
14333
14334 /* When both operands are nonzero, then MAX must be too. */
14335 if (tree_expr_nonzero_warnv_p (op1,
14336 strict_overflow_p))
14337 return true;
14338
14339 /* MAX where operand 0 is positive is positive. */
14340 return tree_expr_nonnegative_warnv_p (op0,
14341 strict_overflow_p);
14342 }
14343 /* MAX where operand 1 is positive is positive. */
14344 else if (tree_expr_nonzero_warnv_p (op1,
14345 &sub_strict_overflow_p)
14346 && tree_expr_nonnegative_warnv_p (op1,
14347 &sub_strict_overflow_p))
14348 {
14349 if (sub_strict_overflow_p)
14350 *strict_overflow_p = true;
14351 return true;
14352 }
14353 break;
14354
14355 case BIT_IOR_EXPR:
14356 return (tree_expr_nonzero_warnv_p (op1,
14357 strict_overflow_p)
14358 || tree_expr_nonzero_warnv_p (op0,
14359 strict_overflow_p));
14360
14361 default:
14362 break;
14363 }
14364
14365 return false;
14366 }
14367
14368 /* Return true when T is an address and is known to be nonzero.
14369 For floating point we further ensure that T is not denormal.
14370 Similar logic is present in nonzero_address in rtlanal.h.
14371
14372 If the return value is based on the assumption that signed overflow
14373 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14374 change *STRICT_OVERFLOW_P. */
14375
14376 static bool
14377 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
14378 {
14379 bool sub_strict_overflow_p;
14380 switch (TREE_CODE (t))
14381 {
14382 case SSA_NAME:
14383 /* Query VRP to see if it has recorded any information about
14384 the range of this object. */
14385 return ssa_name_nonzero_p (t);
14386
14387 case INTEGER_CST:
14388 return !integer_zerop (t);
14389
14390 case ADDR_EXPR:
14391 {
14392 tree base = get_base_address (TREE_OPERAND (t, 0));
14393
14394 if (!base)
14395 return false;
14396
14397 /* Weak declarations may link to NULL. */
14398 if (VAR_OR_FUNCTION_DECL_P (base))
14399 return !DECL_WEAK (base);
14400
14401 /* Constants are never weak. */
14402 if (CONSTANT_CLASS_P (base))
14403 return true;
14404
14405 return false;
14406 }
14407
14408 case COND_EXPR:
14409 sub_strict_overflow_p = false;
14410 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
14411 &sub_strict_overflow_p)
14412 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
14413 &sub_strict_overflow_p))
14414 {
14415 if (sub_strict_overflow_p)
14416 *strict_overflow_p = true;
14417 return true;
14418 }
14419 break;
14420
14421 default:
14422 break;
14423 }
14424 return false;
14425 }
14426
14427 /* Return true when T is an address and is known to be nonzero.
14428 For floating point we further ensure that T is not denormal.
14429 Similar logic is present in nonzero_address in rtlanal.h.
14430
14431 If the return value is based on the assumption that signed overflow
14432 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14433 change *STRICT_OVERFLOW_P. */
14434
14435 bool
14436 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
14437 {
14438 tree type = TREE_TYPE (t);
14439 enum tree_code code;
14440
14441 /* Doing something useful for floating point would need more work. */
14442 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
14443 return false;
14444
14445 code = TREE_CODE (t);
14446 switch (TREE_CODE_CLASS (code))
14447 {
14448 case tcc_unary:
14449 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
14450 strict_overflow_p);
14451 case tcc_binary:
14452 case tcc_comparison:
14453 return tree_binary_nonzero_warnv_p (code, type,
14454 TREE_OPERAND (t, 0),
14455 TREE_OPERAND (t, 1),
14456 strict_overflow_p);
14457 case tcc_constant:
14458 case tcc_declaration:
14459 case tcc_reference:
14460 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
14461
14462 default:
14463 break;
14464 }
14465
14466 switch (code)
14467 {
14468 case TRUTH_NOT_EXPR:
14469 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
14470 strict_overflow_p);
14471
14472 case TRUTH_AND_EXPR:
14473 case TRUTH_OR_EXPR:
14474 case TRUTH_XOR_EXPR:
14475 return tree_binary_nonzero_warnv_p (code, type,
14476 TREE_OPERAND (t, 0),
14477 TREE_OPERAND (t, 1),
14478 strict_overflow_p);
14479
14480 case COND_EXPR:
14481 case CONSTRUCTOR:
14482 case OBJ_TYPE_REF:
14483 case ASSERT_EXPR:
14484 case ADDR_EXPR:
14485 case WITH_SIZE_EXPR:
14486 case EXC_PTR_EXPR:
14487 case SSA_NAME:
14488 case FILTER_EXPR:
14489 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
14490
14491 case COMPOUND_EXPR:
14492 case MODIFY_EXPR:
14493 case GIMPLE_MODIFY_STMT:
14494 case BIND_EXPR:
14495 return tree_expr_nonzero_warnv_p (GENERIC_TREE_OPERAND (t, 1),
14496 strict_overflow_p);
14497
14498 case SAVE_EXPR:
14499 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
14500 strict_overflow_p);
14501
14502 case CALL_EXPR:
14503 return alloca_call_p (t);
14504
14505 default:
14506 break;
14507 }
14508 return false;
14509 }
14510
14511 /* Return true when T is an address and is known to be nonzero.
14512 Handle warnings about undefined signed overflow. */
14513
14514 bool
14515 tree_expr_nonzero_p (tree t)
14516 {
14517 bool ret, strict_overflow_p;
14518
14519 strict_overflow_p = false;
14520 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
14521 if (strict_overflow_p)
14522 fold_overflow_warning (("assuming signed overflow does not occur when "
14523 "determining that expression is always "
14524 "non-zero"),
14525 WARN_STRICT_OVERFLOW_MISC);
14526 return ret;
14527 }
14528
14529 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
14530 attempt to fold the expression to a constant without modifying TYPE,
14531 OP0 or OP1.
14532
14533 If the expression could be simplified to a constant, then return
14534 the constant. If the expression would not be simplified to a
14535 constant, then return NULL_TREE. */
14536
14537 tree
14538 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
14539 {
14540 tree tem = fold_binary (code, type, op0, op1);
14541 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
14542 }
14543
14544 /* Given the components of a unary expression CODE, TYPE and OP0,
14545 attempt to fold the expression to a constant without modifying
14546 TYPE or OP0.
14547
14548 If the expression could be simplified to a constant, then return
14549 the constant. If the expression would not be simplified to a
14550 constant, then return NULL_TREE. */
14551
14552 tree
14553 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
14554 {
14555 tree tem = fold_unary (code, type, op0);
14556 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
14557 }
14558
14559 /* If EXP represents referencing an element in a constant string
14560 (either via pointer arithmetic or array indexing), return the
14561 tree representing the value accessed, otherwise return NULL. */
14562
14563 tree
14564 fold_read_from_constant_string (tree exp)
14565 {
14566 if ((TREE_CODE (exp) == INDIRECT_REF
14567 || TREE_CODE (exp) == ARRAY_REF)
14568 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
14569 {
14570 tree exp1 = TREE_OPERAND (exp, 0);
14571 tree index;
14572 tree string;
14573
14574 if (TREE_CODE (exp) == INDIRECT_REF)
14575 string = string_constant (exp1, &index);
14576 else
14577 {
14578 tree low_bound = array_ref_low_bound (exp);
14579 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
14580
14581 /* Optimize the special-case of a zero lower bound.
14582
14583 We convert the low_bound to sizetype to avoid some problems
14584 with constant folding. (E.g. suppose the lower bound is 1,
14585 and its mode is QI. Without the conversion,l (ARRAY
14586 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
14587 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
14588 if (! integer_zerop (low_bound))
14589 index = size_diffop (index, fold_convert (sizetype, low_bound));
14590
14591 string = exp1;
14592 }
14593
14594 if (string
14595 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
14596 && TREE_CODE (string) == STRING_CST
14597 && TREE_CODE (index) == INTEGER_CST
14598 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
14599 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
14600 == MODE_INT)
14601 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
14602 return build_int_cst_type (TREE_TYPE (exp),
14603 (TREE_STRING_POINTER (string)
14604 [TREE_INT_CST_LOW (index)]));
14605 }
14606 return NULL;
14607 }
14608
14609 /* Return the tree for neg (ARG0) when ARG0 is known to be either
14610 an integer constant, real, or fixed-point constant.
14611
14612 TYPE is the type of the result. */
14613
14614 static tree
14615 fold_negate_const (tree arg0, tree type)
14616 {
14617 tree t = NULL_TREE;
14618
14619 switch (TREE_CODE (arg0))
14620 {
14621 case INTEGER_CST:
14622 {
14623 unsigned HOST_WIDE_INT low;
14624 HOST_WIDE_INT high;
14625 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
14626 TREE_INT_CST_HIGH (arg0),
14627 &low, &high);
14628 t = force_fit_type_double (type, low, high, 1,
14629 (overflow | TREE_OVERFLOW (arg0))
14630 && !TYPE_UNSIGNED (type));
14631 break;
14632 }
14633
14634 case REAL_CST:
14635 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
14636 break;
14637
14638 case FIXED_CST:
14639 {
14640 FIXED_VALUE_TYPE f;
14641 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
14642 &(TREE_FIXED_CST (arg0)), NULL,
14643 TYPE_SATURATING (type));
14644 t = build_fixed (type, f);
14645 /* Propagate overflow flags. */
14646 if (overflow_p | TREE_OVERFLOW (arg0))
14647 {
14648 TREE_OVERFLOW (t) = 1;
14649 TREE_CONSTANT_OVERFLOW (t) = 1;
14650 }
14651 else if (TREE_CONSTANT_OVERFLOW (arg0))
14652 TREE_CONSTANT_OVERFLOW (t) = 1;
14653 break;
14654 }
14655
14656 default:
14657 gcc_unreachable ();
14658 }
14659
14660 return t;
14661 }
14662
14663 /* Return the tree for abs (ARG0) when ARG0 is known to be either
14664 an integer constant or real constant.
14665
14666 TYPE is the type of the result. */
14667
14668 tree
14669 fold_abs_const (tree arg0, tree type)
14670 {
14671 tree t = NULL_TREE;
14672
14673 switch (TREE_CODE (arg0))
14674 {
14675 case INTEGER_CST:
14676 /* If the value is unsigned, then the absolute value is
14677 the same as the ordinary value. */
14678 if (TYPE_UNSIGNED (type))
14679 t = arg0;
14680 /* Similarly, if the value is non-negative. */
14681 else if (INT_CST_LT (integer_minus_one_node, arg0))
14682 t = arg0;
14683 /* If the value is negative, then the absolute value is
14684 its negation. */
14685 else
14686 {
14687 unsigned HOST_WIDE_INT low;
14688 HOST_WIDE_INT high;
14689 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
14690 TREE_INT_CST_HIGH (arg0),
14691 &low, &high);
14692 t = force_fit_type_double (type, low, high, -1,
14693 overflow | TREE_OVERFLOW (arg0));
14694 }
14695 break;
14696
14697 case REAL_CST:
14698 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
14699 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
14700 else
14701 t = arg0;
14702 break;
14703
14704 default:
14705 gcc_unreachable ();
14706 }
14707
14708 return t;
14709 }
14710
14711 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
14712 constant. TYPE is the type of the result. */
14713
14714 static tree
14715 fold_not_const (tree arg0, tree type)
14716 {
14717 tree t = NULL_TREE;
14718
14719 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
14720
14721 t = force_fit_type_double (type, ~TREE_INT_CST_LOW (arg0),
14722 ~TREE_INT_CST_HIGH (arg0), 0,
14723 TREE_OVERFLOW (arg0));
14724
14725 return t;
14726 }
14727
14728 /* Given CODE, a relational operator, the target type, TYPE and two
14729 constant operands OP0 and OP1, return the result of the
14730 relational operation. If the result is not a compile time
14731 constant, then return NULL_TREE. */
14732
14733 static tree
14734 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
14735 {
14736 int result, invert;
14737
14738 /* From here on, the only cases we handle are when the result is
14739 known to be a constant. */
14740
14741 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
14742 {
14743 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
14744 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
14745
14746 /* Handle the cases where either operand is a NaN. */
14747 if (real_isnan (c0) || real_isnan (c1))
14748 {
14749 switch (code)
14750 {
14751 case EQ_EXPR:
14752 case ORDERED_EXPR:
14753 result = 0;
14754 break;
14755
14756 case NE_EXPR:
14757 case UNORDERED_EXPR:
14758 case UNLT_EXPR:
14759 case UNLE_EXPR:
14760 case UNGT_EXPR:
14761 case UNGE_EXPR:
14762 case UNEQ_EXPR:
14763 result = 1;
14764 break;
14765
14766 case LT_EXPR:
14767 case LE_EXPR:
14768 case GT_EXPR:
14769 case GE_EXPR:
14770 case LTGT_EXPR:
14771 if (flag_trapping_math)
14772 return NULL_TREE;
14773 result = 0;
14774 break;
14775
14776 default:
14777 gcc_unreachable ();
14778 }
14779
14780 return constant_boolean_node (result, type);
14781 }
14782
14783 return constant_boolean_node (real_compare (code, c0, c1), type);
14784 }
14785
14786 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
14787 {
14788 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
14789 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
14790 return constant_boolean_node (fixed_compare (code, c0, c1), type);
14791 }
14792
14793 /* Handle equality/inequality of complex constants. */
14794 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
14795 {
14796 tree rcond = fold_relational_const (code, type,
14797 TREE_REALPART (op0),
14798 TREE_REALPART (op1));
14799 tree icond = fold_relational_const (code, type,
14800 TREE_IMAGPART (op0),
14801 TREE_IMAGPART (op1));
14802 if (code == EQ_EXPR)
14803 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
14804 else if (code == NE_EXPR)
14805 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
14806 else
14807 return NULL_TREE;
14808 }
14809
14810 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
14811
14812 To compute GT, swap the arguments and do LT.
14813 To compute GE, do LT and invert the result.
14814 To compute LE, swap the arguments, do LT and invert the result.
14815 To compute NE, do EQ and invert the result.
14816
14817 Therefore, the code below must handle only EQ and LT. */
14818
14819 if (code == LE_EXPR || code == GT_EXPR)
14820 {
14821 tree tem = op0;
14822 op0 = op1;
14823 op1 = tem;
14824 code = swap_tree_comparison (code);
14825 }
14826
14827 /* Note that it is safe to invert for real values here because we
14828 have already handled the one case that it matters. */
14829
14830 invert = 0;
14831 if (code == NE_EXPR || code == GE_EXPR)
14832 {
14833 invert = 1;
14834 code = invert_tree_comparison (code, false);
14835 }
14836
14837 /* Compute a result for LT or EQ if args permit;
14838 Otherwise return T. */
14839 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
14840 {
14841 if (code == EQ_EXPR)
14842 result = tree_int_cst_equal (op0, op1);
14843 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
14844 result = INT_CST_LT_UNSIGNED (op0, op1);
14845 else
14846 result = INT_CST_LT (op0, op1);
14847 }
14848 else
14849 return NULL_TREE;
14850
14851 if (invert)
14852 result ^= 1;
14853 return constant_boolean_node (result, type);
14854 }
14855
14856 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
14857 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
14858 itself. */
14859
14860 tree
14861 fold_build_cleanup_point_expr (tree type, tree expr)
14862 {
14863 /* If the expression does not have side effects then we don't have to wrap
14864 it with a cleanup point expression. */
14865 if (!TREE_SIDE_EFFECTS (expr))
14866 return expr;
14867
14868 /* If the expression is a return, check to see if the expression inside the
14869 return has no side effects or the right hand side of the modify expression
14870 inside the return. If either don't have side effects set we don't need to
14871 wrap the expression in a cleanup point expression. Note we don't check the
14872 left hand side of the modify because it should always be a return decl. */
14873 if (TREE_CODE (expr) == RETURN_EXPR)
14874 {
14875 tree op = TREE_OPERAND (expr, 0);
14876 if (!op || !TREE_SIDE_EFFECTS (op))
14877 return expr;
14878 op = TREE_OPERAND (op, 1);
14879 if (!TREE_SIDE_EFFECTS (op))
14880 return expr;
14881 }
14882
14883 return build1 (CLEANUP_POINT_EXPR, type, expr);
14884 }
14885
14886 /* Given a pointer value OP0 and a type TYPE, return a simplified version
14887 of an indirection through OP0, or NULL_TREE if no simplification is
14888 possible. */
14889
14890 tree
14891 fold_indirect_ref_1 (tree type, tree op0)
14892 {
14893 tree sub = op0;
14894 tree subtype;
14895
14896 STRIP_NOPS (sub);
14897 subtype = TREE_TYPE (sub);
14898 if (!POINTER_TYPE_P (subtype))
14899 return NULL_TREE;
14900
14901 if (TREE_CODE (sub) == ADDR_EXPR)
14902 {
14903 tree op = TREE_OPERAND (sub, 0);
14904 tree optype = TREE_TYPE (op);
14905 /* *&CONST_DECL -> to the value of the const decl. */
14906 if (TREE_CODE (op) == CONST_DECL)
14907 return DECL_INITIAL (op);
14908 /* *&p => p; make sure to handle *&"str"[cst] here. */
14909 if (type == optype)
14910 {
14911 tree fop = fold_read_from_constant_string (op);
14912 if (fop)
14913 return fop;
14914 else
14915 return op;
14916 }
14917 /* *(foo *)&fooarray => fooarray[0] */
14918 else if (TREE_CODE (optype) == ARRAY_TYPE
14919 && type == TREE_TYPE (optype))
14920 {
14921 tree type_domain = TYPE_DOMAIN (optype);
14922 tree min_val = size_zero_node;
14923 if (type_domain && TYPE_MIN_VALUE (type_domain))
14924 min_val = TYPE_MIN_VALUE (type_domain);
14925 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
14926 }
14927 /* *(foo *)&complexfoo => __real__ complexfoo */
14928 else if (TREE_CODE (optype) == COMPLEX_TYPE
14929 && type == TREE_TYPE (optype))
14930 return fold_build1 (REALPART_EXPR, type, op);
14931 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
14932 else if (TREE_CODE (optype) == VECTOR_TYPE
14933 && type == TREE_TYPE (optype))
14934 {
14935 tree part_width = TYPE_SIZE (type);
14936 tree index = bitsize_int (0);
14937 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
14938 }
14939 }
14940
14941 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
14942 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
14943 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
14944 {
14945 tree op00 = TREE_OPERAND (sub, 0);
14946 tree op01 = TREE_OPERAND (sub, 1);
14947 tree op00type;
14948
14949 STRIP_NOPS (op00);
14950 op00type = TREE_TYPE (op00);
14951 if (TREE_CODE (op00) == ADDR_EXPR
14952 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
14953 && type == TREE_TYPE (TREE_TYPE (op00type)))
14954 {
14955 tree size = TYPE_SIZE_UNIT (type);
14956 if (tree_int_cst_equal (size, op01))
14957 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
14958 }
14959 }
14960
14961 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
14962 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
14963 && type == TREE_TYPE (TREE_TYPE (subtype)))
14964 {
14965 tree type_domain;
14966 tree min_val = size_zero_node;
14967 sub = build_fold_indirect_ref (sub);
14968 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
14969 if (type_domain && TYPE_MIN_VALUE (type_domain))
14970 min_val = TYPE_MIN_VALUE (type_domain);
14971 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
14972 }
14973
14974 return NULL_TREE;
14975 }
14976
14977 /* Builds an expression for an indirection through T, simplifying some
14978 cases. */
14979
14980 tree
14981 build_fold_indirect_ref (tree t)
14982 {
14983 tree type = TREE_TYPE (TREE_TYPE (t));
14984 tree sub = fold_indirect_ref_1 (type, t);
14985
14986 if (sub)
14987 return sub;
14988 else
14989 return build1 (INDIRECT_REF, type, t);
14990 }
14991
14992 /* Given an INDIRECT_REF T, return either T or a simplified version. */
14993
14994 tree
14995 fold_indirect_ref (tree t)
14996 {
14997 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
14998
14999 if (sub)
15000 return sub;
15001 else
15002 return t;
15003 }
15004
15005 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15006 whose result is ignored. The type of the returned tree need not be
15007 the same as the original expression. */
15008
15009 tree
15010 fold_ignored_result (tree t)
15011 {
15012 if (!TREE_SIDE_EFFECTS (t))
15013 return integer_zero_node;
15014
15015 for (;;)
15016 switch (TREE_CODE_CLASS (TREE_CODE (t)))
15017 {
15018 case tcc_unary:
15019 t = TREE_OPERAND (t, 0);
15020 break;
15021
15022 case tcc_binary:
15023 case tcc_comparison:
15024 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15025 t = TREE_OPERAND (t, 0);
15026 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
15027 t = TREE_OPERAND (t, 1);
15028 else
15029 return t;
15030 break;
15031
15032 case tcc_expression:
15033 switch (TREE_CODE (t))
15034 {
15035 case COMPOUND_EXPR:
15036 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15037 return t;
15038 t = TREE_OPERAND (t, 0);
15039 break;
15040
15041 case COND_EXPR:
15042 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
15043 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
15044 return t;
15045 t = TREE_OPERAND (t, 0);
15046 break;
15047
15048 default:
15049 return t;
15050 }
15051 break;
15052
15053 default:
15054 return t;
15055 }
15056 }
15057
15058 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
15059 This can only be applied to objects of a sizetype. */
15060
15061 tree
15062 round_up (tree value, int divisor)
15063 {
15064 tree div = NULL_TREE;
15065
15066 gcc_assert (divisor > 0);
15067 if (divisor == 1)
15068 return value;
15069
15070 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15071 have to do anything. Only do this when we are not given a const,
15072 because in that case, this check is more expensive than just
15073 doing it. */
15074 if (TREE_CODE (value) != INTEGER_CST)
15075 {
15076 div = build_int_cst (TREE_TYPE (value), divisor);
15077
15078 if (multiple_of_p (TREE_TYPE (value), value, div))
15079 return value;
15080 }
15081
15082 /* If divisor is a power of two, simplify this to bit manipulation. */
15083 if (divisor == (divisor & -divisor))
15084 {
15085 if (TREE_CODE (value) == INTEGER_CST)
15086 {
15087 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (value);
15088 unsigned HOST_WIDE_INT high;
15089 bool overflow_p;
15090
15091 if ((low & (divisor - 1)) == 0)
15092 return value;
15093
15094 overflow_p = TREE_OVERFLOW (value);
15095 high = TREE_INT_CST_HIGH (value);
15096 low &= ~(divisor - 1);
15097 low += divisor;
15098 if (low == 0)
15099 {
15100 high++;
15101 if (high == 0)
15102 overflow_p = true;
15103 }
15104
15105 return force_fit_type_double (TREE_TYPE (value), low, high,
15106 -1, overflow_p);
15107 }
15108 else
15109 {
15110 tree t;
15111
15112 t = build_int_cst (TREE_TYPE (value), divisor - 1);
15113 value = size_binop (PLUS_EXPR, value, t);
15114 t = build_int_cst (TREE_TYPE (value), -divisor);
15115 value = size_binop (BIT_AND_EXPR, value, t);
15116 }
15117 }
15118 else
15119 {
15120 if (!div)
15121 div = build_int_cst (TREE_TYPE (value), divisor);
15122 value = size_binop (CEIL_DIV_EXPR, value, div);
15123 value = size_binop (MULT_EXPR, value, div);
15124 }
15125
15126 return value;
15127 }
15128
15129 /* Likewise, but round down. */
15130
15131 tree
15132 round_down (tree value, int divisor)
15133 {
15134 tree div = NULL_TREE;
15135
15136 gcc_assert (divisor > 0);
15137 if (divisor == 1)
15138 return value;
15139
15140 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15141 have to do anything. Only do this when we are not given a const,
15142 because in that case, this check is more expensive than just
15143 doing it. */
15144 if (TREE_CODE (value) != INTEGER_CST)
15145 {
15146 div = build_int_cst (TREE_TYPE (value), divisor);
15147
15148 if (multiple_of_p (TREE_TYPE (value), value, div))
15149 return value;
15150 }
15151
15152 /* If divisor is a power of two, simplify this to bit manipulation. */
15153 if (divisor == (divisor & -divisor))
15154 {
15155 tree t;
15156
15157 t = build_int_cst (TREE_TYPE (value), -divisor);
15158 value = size_binop (BIT_AND_EXPR, value, t);
15159 }
15160 else
15161 {
15162 if (!div)
15163 div = build_int_cst (TREE_TYPE (value), divisor);
15164 value = size_binop (FLOOR_DIV_EXPR, value, div);
15165 value = size_binop (MULT_EXPR, value, div);
15166 }
15167
15168 return value;
15169 }
15170
15171 /* Returns the pointer to the base of the object addressed by EXP and
15172 extracts the information about the offset of the access, storing it
15173 to PBITPOS and POFFSET. */
15174
15175 static tree
15176 split_address_to_core_and_offset (tree exp,
15177 HOST_WIDE_INT *pbitpos, tree *poffset)
15178 {
15179 tree core;
15180 enum machine_mode mode;
15181 int unsignedp, volatilep;
15182 HOST_WIDE_INT bitsize;
15183
15184 if (TREE_CODE (exp) == ADDR_EXPR)
15185 {
15186 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
15187 poffset, &mode, &unsignedp, &volatilep,
15188 false);
15189 core = fold_addr_expr (core);
15190 }
15191 else
15192 {
15193 core = exp;
15194 *pbitpos = 0;
15195 *poffset = NULL_TREE;
15196 }
15197
15198 return core;
15199 }
15200
15201 /* Returns true if addresses of E1 and E2 differ by a constant, false
15202 otherwise. If they do, E1 - E2 is stored in *DIFF. */
15203
15204 bool
15205 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
15206 {
15207 tree core1, core2;
15208 HOST_WIDE_INT bitpos1, bitpos2;
15209 tree toffset1, toffset2, tdiff, type;
15210
15211 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
15212 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
15213
15214 if (bitpos1 % BITS_PER_UNIT != 0
15215 || bitpos2 % BITS_PER_UNIT != 0
15216 || !operand_equal_p (core1, core2, 0))
15217 return false;
15218
15219 if (toffset1 && toffset2)
15220 {
15221 type = TREE_TYPE (toffset1);
15222 if (type != TREE_TYPE (toffset2))
15223 toffset2 = fold_convert (type, toffset2);
15224
15225 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
15226 if (!cst_and_fits_in_hwi (tdiff))
15227 return false;
15228
15229 *diff = int_cst_value (tdiff);
15230 }
15231 else if (toffset1 || toffset2)
15232 {
15233 /* If only one of the offsets is non-constant, the difference cannot
15234 be a constant. */
15235 return false;
15236 }
15237 else
15238 *diff = 0;
15239
15240 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
15241 return true;
15242 }
15243
15244 /* Simplify the floating point expression EXP when the sign of the
15245 result is not significant. Return NULL_TREE if no simplification
15246 is possible. */
15247
15248 tree
15249 fold_strip_sign_ops (tree exp)
15250 {
15251 tree arg0, arg1;
15252
15253 switch (TREE_CODE (exp))
15254 {
15255 case ABS_EXPR:
15256 case NEGATE_EXPR:
15257 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15258 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
15259
15260 case MULT_EXPR:
15261 case RDIV_EXPR:
15262 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
15263 return NULL_TREE;
15264 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15265 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15266 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
15267 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
15268 arg0 ? arg0 : TREE_OPERAND (exp, 0),
15269 arg1 ? arg1 : TREE_OPERAND (exp, 1));
15270 break;
15271
15272 case COMPOUND_EXPR:
15273 arg0 = TREE_OPERAND (exp, 0);
15274 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15275 if (arg1)
15276 return fold_build2 (COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
15277 break;
15278
15279 case COND_EXPR:
15280 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15281 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
15282 if (arg0 || arg1)
15283 return fold_build3 (COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
15284 arg0 ? arg0 : TREE_OPERAND (exp, 1),
15285 arg1 ? arg1 : TREE_OPERAND (exp, 2));
15286 break;
15287
15288 case CALL_EXPR:
15289 {
15290 const enum built_in_function fcode = builtin_mathfn_code (exp);
15291 switch (fcode)
15292 {
15293 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15294 /* Strip copysign function call, return the 1st argument. */
15295 arg0 = CALL_EXPR_ARG (exp, 0);
15296 arg1 = CALL_EXPR_ARG (exp, 1);
15297 return omit_one_operand (TREE_TYPE (exp), arg0, arg1);
15298
15299 default:
15300 /* Strip sign ops from the argument of "odd" math functions. */
15301 if (negate_mathfn_p (fcode))
15302 {
15303 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
15304 if (arg0)
15305 return build_call_expr (get_callee_fndecl (exp), 1, arg0);
15306 }
15307 break;
15308 }
15309 }
15310 break;
15311
15312 default:
15313 break;
15314 }
15315 return NULL_TREE;
15316 }