s-carun8.adb, [...] (Compare_Array_?8): modify so that last full word is no longer...
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
21 02110-1301, USA. */
22
23 /*@@ This file should be rewritten to use an arbitrary precision
24 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
25 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
26 @@ The routines that translate from the ap rep should
27 @@ warn if precision et. al. is lost.
28 @@ This would also make life easier when this technology is used
29 @@ for cross-compilers. */
30
31 /* The entry points in this file are fold, size_int_wide, size_binop
32 and force_fit_type_double.
33
34 fold takes a tree as argument and returns a simplified tree.
35
36 size_binop takes a tree code for an arithmetic operation
37 and two operands that are trees, and produces a tree for the
38 result, assuming the type comes from `sizetype'.
39
40 size_int takes an integer value, and creates a tree constant
41 with type from `sizetype'.
42
43 force_fit_type_double takes a constant, an overflowable flag and a
44 prior overflow indicator. It forces the value to fit the type and
45 sets TREE_OVERFLOW.
46
47 Note: Since the folders get called on non-gimple code as well as
48 gimple code, we need to handle GIMPLE tuples as well as their
49 corresponding tree equivalents. */
50
51 #include "config.h"
52 #include "system.h"
53 #include "coretypes.h"
54 #include "tm.h"
55 #include "flags.h"
56 #include "tree.h"
57 #include "real.h"
58 #include "rtl.h"
59 #include "expr.h"
60 #include "tm_p.h"
61 #include "toplev.h"
62 #include "intl.h"
63 #include "ggc.h"
64 #include "hashtab.h"
65 #include "langhooks.h"
66 #include "md5.h"
67
68 /* Non-zero if we are folding constants inside an initializer; zero
69 otherwise. */
70 int folding_initializer = 0;
71
72 /* The following constants represent a bit based encoding of GCC's
73 comparison operators. This encoding simplifies transformations
74 on relational comparison operators, such as AND and OR. */
75 enum comparison_code {
76 COMPCODE_FALSE = 0,
77 COMPCODE_LT = 1,
78 COMPCODE_EQ = 2,
79 COMPCODE_LE = 3,
80 COMPCODE_GT = 4,
81 COMPCODE_LTGT = 5,
82 COMPCODE_GE = 6,
83 COMPCODE_ORD = 7,
84 COMPCODE_UNORD = 8,
85 COMPCODE_UNLT = 9,
86 COMPCODE_UNEQ = 10,
87 COMPCODE_UNLE = 11,
88 COMPCODE_UNGT = 12,
89 COMPCODE_NE = 13,
90 COMPCODE_UNGE = 14,
91 COMPCODE_TRUE = 15
92 };
93
94 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
95 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
96 static bool negate_mathfn_p (enum built_in_function);
97 static bool negate_expr_p (tree);
98 static tree negate_expr (tree);
99 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
100 static tree associate_trees (tree, tree, enum tree_code, tree);
101 static tree const_binop (enum tree_code, tree, tree, int);
102 static enum comparison_code comparison_to_compcode (enum tree_code);
103 static enum tree_code compcode_to_comparison (enum comparison_code);
104 static tree combine_comparisons (enum tree_code, enum tree_code,
105 enum tree_code, tree, tree, tree);
106 static int truth_value_p (enum tree_code);
107 static int operand_equal_for_comparison_p (tree, tree, tree);
108 static int twoval_comparison_p (tree, tree *, tree *, int *);
109 static tree eval_subst (tree, tree, tree, tree, tree);
110 static tree pedantic_omit_one_operand (tree, tree, tree);
111 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
112 static tree make_bit_field_ref (tree, tree, int, int, int);
113 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
114 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
115 enum machine_mode *, int *, int *,
116 tree *, tree *);
117 static int all_ones_mask_p (tree, int);
118 static tree sign_bit_p (tree, tree);
119 static int simple_operand_p (tree);
120 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
121 static tree range_predecessor (tree);
122 static tree range_successor (tree);
123 static tree make_range (tree, int *, tree *, tree *, bool *);
124 static tree build_range_check (tree, tree, int, tree, tree);
125 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
126 tree);
127 static tree fold_range_test (enum tree_code, tree, tree, tree);
128 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
129 static tree unextend (tree, int, int, tree);
130 static tree fold_truthop (enum tree_code, tree, tree, tree);
131 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
132 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
133 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
134 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
135 tree, tree,
136 tree, tree, int);
137 static bool fold_real_zero_addition_p (tree, tree, int);
138 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
139 tree, tree, tree);
140 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
141 static tree fold_div_compare (enum tree_code, tree, tree, tree);
142 static bool reorder_operands_p (tree, tree);
143 static tree fold_negate_const (tree, tree);
144 static tree fold_not_const (tree, tree);
145 static tree fold_relational_const (enum tree_code, tree, tree, tree);
146 static int native_encode_expr (tree, unsigned char *, int);
147 static tree native_interpret_expr (tree, unsigned char *, int);
148
149
150 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
151 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
152 and SUM1. Then this yields nonzero if overflow occurred during the
153 addition.
154
155 Overflow occurs if A and B have the same sign, but A and SUM differ in
156 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
157 sign. */
158 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
159 \f
160 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
161 We do that by representing the two-word integer in 4 words, with only
162 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
163 number. The value of the word is LOWPART + HIGHPART * BASE. */
164
165 #define LOWPART(x) \
166 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
167 #define HIGHPART(x) \
168 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
169 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
170
171 /* Unpack a two-word integer into 4 words.
172 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
173 WORDS points to the array of HOST_WIDE_INTs. */
174
175 static void
176 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
177 {
178 words[0] = LOWPART (low);
179 words[1] = HIGHPART (low);
180 words[2] = LOWPART (hi);
181 words[3] = HIGHPART (hi);
182 }
183
184 /* Pack an array of 4 words into a two-word integer.
185 WORDS points to the array of words.
186 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
187
188 static void
189 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
190 HOST_WIDE_INT *hi)
191 {
192 *low = words[0] + words[1] * BASE;
193 *hi = words[2] + words[3] * BASE;
194 }
195 \f
196 /* Force the double-word integer L1, H1 to be within the range of the
197 integer type TYPE. Stores the properly truncated and sign-extended
198 double-word integer in *LV, *HV. Returns true if the operation
199 overflows, that is, argument and result are different. */
200
201 int
202 fit_double_type (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
203 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, tree type)
204 {
205 unsigned HOST_WIDE_INT low0 = l1;
206 HOST_WIDE_INT high0 = h1;
207 unsigned int prec;
208 int sign_extended_type;
209
210 if (POINTER_TYPE_P (type)
211 || TREE_CODE (type) == OFFSET_TYPE)
212 prec = POINTER_SIZE;
213 else
214 prec = TYPE_PRECISION (type);
215
216 /* Size types *are* sign extended. */
217 sign_extended_type = (!TYPE_UNSIGNED (type)
218 || (TREE_CODE (type) == INTEGER_TYPE
219 && TYPE_IS_SIZETYPE (type)));
220
221 /* First clear all bits that are beyond the type's precision. */
222 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
223 ;
224 else if (prec > HOST_BITS_PER_WIDE_INT)
225 h1 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
226 else
227 {
228 h1 = 0;
229 if (prec < HOST_BITS_PER_WIDE_INT)
230 l1 &= ~((HOST_WIDE_INT) (-1) << prec);
231 }
232
233 /* Then do sign extension if necessary. */
234 if (!sign_extended_type)
235 /* No sign extension */;
236 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
237 /* Correct width already. */;
238 else if (prec > HOST_BITS_PER_WIDE_INT)
239 {
240 /* Sign extend top half? */
241 if (h1 & ((unsigned HOST_WIDE_INT)1
242 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
243 h1 |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
244 }
245 else if (prec == HOST_BITS_PER_WIDE_INT)
246 {
247 if ((HOST_WIDE_INT)l1 < 0)
248 h1 = -1;
249 }
250 else
251 {
252 /* Sign extend bottom half? */
253 if (l1 & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
254 {
255 h1 = -1;
256 l1 |= (HOST_WIDE_INT)(-1) << prec;
257 }
258 }
259
260 *lv = l1;
261 *hv = h1;
262
263 /* If the value didn't fit, signal overflow. */
264 return l1 != low0 || h1 != high0;
265 }
266
267 /* We force the double-int HIGH:LOW to the range of the type TYPE by
268 sign or zero extending it.
269 OVERFLOWABLE indicates if we are interested
270 in overflow of the value, when >0 we are only interested in signed
271 overflow, for <0 we are interested in any overflow. OVERFLOWED
272 indicates whether overflow has already occurred. CONST_OVERFLOWED
273 indicates whether constant overflow has already occurred. We force
274 T's value to be within range of T's type (by setting to 0 or 1 all
275 the bits outside the type's range). We set TREE_OVERFLOWED if,
276 OVERFLOWED is nonzero,
277 or OVERFLOWABLE is >0 and signed overflow occurs
278 or OVERFLOWABLE is <0 and any overflow occurs
279 We return a new tree node for the extended double-int. The node
280 is shared if no overflow flags are set. */
281
282 tree
283 force_fit_type_double (tree type, unsigned HOST_WIDE_INT low,
284 HOST_WIDE_INT high, int overflowable,
285 bool overflowed)
286 {
287 int sign_extended_type;
288 bool overflow;
289
290 /* Size types *are* sign extended. */
291 sign_extended_type = (!TYPE_UNSIGNED (type)
292 || (TREE_CODE (type) == INTEGER_TYPE
293 && TYPE_IS_SIZETYPE (type)));
294
295 overflow = fit_double_type (low, high, &low, &high, type);
296
297 /* If we need to set overflow flags, return a new unshared node. */
298 if (overflowed || overflow)
299 {
300 if (overflowed
301 || overflowable < 0
302 || (overflowable > 0 && sign_extended_type))
303 {
304 tree t = make_node (INTEGER_CST);
305 TREE_INT_CST_LOW (t) = low;
306 TREE_INT_CST_HIGH (t) = high;
307 TREE_TYPE (t) = type;
308 TREE_OVERFLOW (t) = 1;
309 return t;
310 }
311 }
312
313 /* Else build a shared node. */
314 return build_int_cst_wide (type, low, high);
315 }
316 \f
317 /* Add two doubleword integers with doubleword result.
318 Return nonzero if the operation overflows according to UNSIGNED_P.
319 Each argument is given as two `HOST_WIDE_INT' pieces.
320 One argument is L1 and H1; the other, L2 and H2.
321 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
322
323 int
324 add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
325 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
326 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
327 bool unsigned_p)
328 {
329 unsigned HOST_WIDE_INT l;
330 HOST_WIDE_INT h;
331
332 l = l1 + l2;
333 h = h1 + h2 + (l < l1);
334
335 *lv = l;
336 *hv = h;
337
338 if (unsigned_p)
339 return (unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1;
340 else
341 return OVERFLOW_SUM_SIGN (h1, h2, h);
342 }
343
344 /* Negate a doubleword integer with doubleword result.
345 Return nonzero if the operation overflows, assuming it's signed.
346 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
347 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
348
349 int
350 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
351 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
352 {
353 if (l1 == 0)
354 {
355 *lv = 0;
356 *hv = - h1;
357 return (*hv & h1) < 0;
358 }
359 else
360 {
361 *lv = -l1;
362 *hv = ~h1;
363 return 0;
364 }
365 }
366 \f
367 /* Multiply two doubleword integers with doubleword result.
368 Return nonzero if the operation overflows according to UNSIGNED_P.
369 Each argument is given as two `HOST_WIDE_INT' pieces.
370 One argument is L1 and H1; the other, L2 and H2.
371 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
372
373 int
374 mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
375 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
376 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
377 bool unsigned_p)
378 {
379 HOST_WIDE_INT arg1[4];
380 HOST_WIDE_INT arg2[4];
381 HOST_WIDE_INT prod[4 * 2];
382 unsigned HOST_WIDE_INT carry;
383 int i, j, k;
384 unsigned HOST_WIDE_INT toplow, neglow;
385 HOST_WIDE_INT tophigh, neghigh;
386
387 encode (arg1, l1, h1);
388 encode (arg2, l2, h2);
389
390 memset (prod, 0, sizeof prod);
391
392 for (i = 0; i < 4; i++)
393 {
394 carry = 0;
395 for (j = 0; j < 4; j++)
396 {
397 k = i + j;
398 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
399 carry += arg1[i] * arg2[j];
400 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
401 carry += prod[k];
402 prod[k] = LOWPART (carry);
403 carry = HIGHPART (carry);
404 }
405 prod[i + 4] = carry;
406 }
407
408 decode (prod, lv, hv);
409 decode (prod + 4, &toplow, &tophigh);
410
411 /* Unsigned overflow is immediate. */
412 if (unsigned_p)
413 return (toplow | tophigh) != 0;
414
415 /* Check for signed overflow by calculating the signed representation of the
416 top half of the result; it should agree with the low half's sign bit. */
417 if (h1 < 0)
418 {
419 neg_double (l2, h2, &neglow, &neghigh);
420 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
421 }
422 if (h2 < 0)
423 {
424 neg_double (l1, h1, &neglow, &neghigh);
425 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
426 }
427 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
428 }
429 \f
430 /* Shift the doubleword integer in L1, H1 left by COUNT places
431 keeping only PREC bits of result.
432 Shift right if COUNT is negative.
433 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
434 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
435
436 void
437 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
438 HOST_WIDE_INT count, unsigned int prec,
439 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
440 {
441 unsigned HOST_WIDE_INT signmask;
442
443 if (count < 0)
444 {
445 rshift_double (l1, h1, -count, prec, lv, hv, arith);
446 return;
447 }
448
449 if (SHIFT_COUNT_TRUNCATED)
450 count %= prec;
451
452 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
453 {
454 /* Shifting by the host word size is undefined according to the
455 ANSI standard, so we must handle this as a special case. */
456 *hv = 0;
457 *lv = 0;
458 }
459 else if (count >= HOST_BITS_PER_WIDE_INT)
460 {
461 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
462 *lv = 0;
463 }
464 else
465 {
466 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
467 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
468 *lv = l1 << count;
469 }
470
471 /* Sign extend all bits that are beyond the precision. */
472
473 signmask = -((prec > HOST_BITS_PER_WIDE_INT
474 ? ((unsigned HOST_WIDE_INT) *hv
475 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
476 : (*lv >> (prec - 1))) & 1);
477
478 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
479 ;
480 else if (prec >= HOST_BITS_PER_WIDE_INT)
481 {
482 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
483 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
484 }
485 else
486 {
487 *hv = signmask;
488 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
489 *lv |= signmask << prec;
490 }
491 }
492
493 /* Shift the doubleword integer in L1, H1 right by COUNT places
494 keeping only PREC bits of result. COUNT must be positive.
495 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
496 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
497
498 void
499 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
500 HOST_WIDE_INT count, unsigned int prec,
501 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
502 int arith)
503 {
504 unsigned HOST_WIDE_INT signmask;
505
506 signmask = (arith
507 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
508 : 0);
509
510 if (SHIFT_COUNT_TRUNCATED)
511 count %= prec;
512
513 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
514 {
515 /* Shifting by the host word size is undefined according to the
516 ANSI standard, so we must handle this as a special case. */
517 *hv = 0;
518 *lv = 0;
519 }
520 else if (count >= HOST_BITS_PER_WIDE_INT)
521 {
522 *hv = 0;
523 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
524 }
525 else
526 {
527 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
528 *lv = ((l1 >> count)
529 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
530 }
531
532 /* Zero / sign extend all bits that are beyond the precision. */
533
534 if (count >= (HOST_WIDE_INT)prec)
535 {
536 *hv = signmask;
537 *lv = signmask;
538 }
539 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
540 ;
541 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
542 {
543 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
544 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
545 }
546 else
547 {
548 *hv = signmask;
549 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
550 *lv |= signmask << (prec - count);
551 }
552 }
553 \f
554 /* Rotate the doubleword integer in L1, H1 left by COUNT places
555 keeping only PREC bits of result.
556 Rotate right if COUNT is negative.
557 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
558
559 void
560 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
561 HOST_WIDE_INT count, unsigned int prec,
562 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
563 {
564 unsigned HOST_WIDE_INT s1l, s2l;
565 HOST_WIDE_INT s1h, s2h;
566
567 count %= prec;
568 if (count < 0)
569 count += prec;
570
571 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
572 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
573 *lv = s1l | s2l;
574 *hv = s1h | s2h;
575 }
576
577 /* Rotate the doubleword integer in L1, H1 left by COUNT places
578 keeping only PREC bits of result. COUNT must be positive.
579 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
580
581 void
582 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
583 HOST_WIDE_INT count, unsigned int prec,
584 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
585 {
586 unsigned HOST_WIDE_INT s1l, s2l;
587 HOST_WIDE_INT s1h, s2h;
588
589 count %= prec;
590 if (count < 0)
591 count += prec;
592
593 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
594 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
595 *lv = s1l | s2l;
596 *hv = s1h | s2h;
597 }
598 \f
599 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
600 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
601 CODE is a tree code for a kind of division, one of
602 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
603 or EXACT_DIV_EXPR
604 It controls how the quotient is rounded to an integer.
605 Return nonzero if the operation overflows.
606 UNS nonzero says do unsigned division. */
607
608 int
609 div_and_round_double (enum tree_code code, int uns,
610 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
611 HOST_WIDE_INT hnum_orig,
612 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
613 HOST_WIDE_INT hden_orig,
614 unsigned HOST_WIDE_INT *lquo,
615 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
616 HOST_WIDE_INT *hrem)
617 {
618 int quo_neg = 0;
619 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
620 HOST_WIDE_INT den[4], quo[4];
621 int i, j;
622 unsigned HOST_WIDE_INT work;
623 unsigned HOST_WIDE_INT carry = 0;
624 unsigned HOST_WIDE_INT lnum = lnum_orig;
625 HOST_WIDE_INT hnum = hnum_orig;
626 unsigned HOST_WIDE_INT lden = lden_orig;
627 HOST_WIDE_INT hden = hden_orig;
628 int overflow = 0;
629
630 if (hden == 0 && lden == 0)
631 overflow = 1, lden = 1;
632
633 /* Calculate quotient sign and convert operands to unsigned. */
634 if (!uns)
635 {
636 if (hnum < 0)
637 {
638 quo_neg = ~ quo_neg;
639 /* (minimum integer) / (-1) is the only overflow case. */
640 if (neg_double (lnum, hnum, &lnum, &hnum)
641 && ((HOST_WIDE_INT) lden & hden) == -1)
642 overflow = 1;
643 }
644 if (hden < 0)
645 {
646 quo_neg = ~ quo_neg;
647 neg_double (lden, hden, &lden, &hden);
648 }
649 }
650
651 if (hnum == 0 && hden == 0)
652 { /* single precision */
653 *hquo = *hrem = 0;
654 /* This unsigned division rounds toward zero. */
655 *lquo = lnum / lden;
656 goto finish_up;
657 }
658
659 if (hnum == 0)
660 { /* trivial case: dividend < divisor */
661 /* hden != 0 already checked. */
662 *hquo = *lquo = 0;
663 *hrem = hnum;
664 *lrem = lnum;
665 goto finish_up;
666 }
667
668 memset (quo, 0, sizeof quo);
669
670 memset (num, 0, sizeof num); /* to zero 9th element */
671 memset (den, 0, sizeof den);
672
673 encode (num, lnum, hnum);
674 encode (den, lden, hden);
675
676 /* Special code for when the divisor < BASE. */
677 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
678 {
679 /* hnum != 0 already checked. */
680 for (i = 4 - 1; i >= 0; i--)
681 {
682 work = num[i] + carry * BASE;
683 quo[i] = work / lden;
684 carry = work % lden;
685 }
686 }
687 else
688 {
689 /* Full double precision division,
690 with thanks to Don Knuth's "Seminumerical Algorithms". */
691 int num_hi_sig, den_hi_sig;
692 unsigned HOST_WIDE_INT quo_est, scale;
693
694 /* Find the highest nonzero divisor digit. */
695 for (i = 4 - 1;; i--)
696 if (den[i] != 0)
697 {
698 den_hi_sig = i;
699 break;
700 }
701
702 /* Insure that the first digit of the divisor is at least BASE/2.
703 This is required by the quotient digit estimation algorithm. */
704
705 scale = BASE / (den[den_hi_sig] + 1);
706 if (scale > 1)
707 { /* scale divisor and dividend */
708 carry = 0;
709 for (i = 0; i <= 4 - 1; i++)
710 {
711 work = (num[i] * scale) + carry;
712 num[i] = LOWPART (work);
713 carry = HIGHPART (work);
714 }
715
716 num[4] = carry;
717 carry = 0;
718 for (i = 0; i <= 4 - 1; i++)
719 {
720 work = (den[i] * scale) + carry;
721 den[i] = LOWPART (work);
722 carry = HIGHPART (work);
723 if (den[i] != 0) den_hi_sig = i;
724 }
725 }
726
727 num_hi_sig = 4;
728
729 /* Main loop */
730 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
731 {
732 /* Guess the next quotient digit, quo_est, by dividing the first
733 two remaining dividend digits by the high order quotient digit.
734 quo_est is never low and is at most 2 high. */
735 unsigned HOST_WIDE_INT tmp;
736
737 num_hi_sig = i + den_hi_sig + 1;
738 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
739 if (num[num_hi_sig] != den[den_hi_sig])
740 quo_est = work / den[den_hi_sig];
741 else
742 quo_est = BASE - 1;
743
744 /* Refine quo_est so it's usually correct, and at most one high. */
745 tmp = work - quo_est * den[den_hi_sig];
746 if (tmp < BASE
747 && (den[den_hi_sig - 1] * quo_est
748 > (tmp * BASE + num[num_hi_sig - 2])))
749 quo_est--;
750
751 /* Try QUO_EST as the quotient digit, by multiplying the
752 divisor by QUO_EST and subtracting from the remaining dividend.
753 Keep in mind that QUO_EST is the I - 1st digit. */
754
755 carry = 0;
756 for (j = 0; j <= den_hi_sig; j++)
757 {
758 work = quo_est * den[j] + carry;
759 carry = HIGHPART (work);
760 work = num[i + j] - LOWPART (work);
761 num[i + j] = LOWPART (work);
762 carry += HIGHPART (work) != 0;
763 }
764
765 /* If quo_est was high by one, then num[i] went negative and
766 we need to correct things. */
767 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
768 {
769 quo_est--;
770 carry = 0; /* add divisor back in */
771 for (j = 0; j <= den_hi_sig; j++)
772 {
773 work = num[i + j] + den[j] + carry;
774 carry = HIGHPART (work);
775 num[i + j] = LOWPART (work);
776 }
777
778 num [num_hi_sig] += carry;
779 }
780
781 /* Store the quotient digit. */
782 quo[i] = quo_est;
783 }
784 }
785
786 decode (quo, lquo, hquo);
787
788 finish_up:
789 /* If result is negative, make it so. */
790 if (quo_neg)
791 neg_double (*lquo, *hquo, lquo, hquo);
792
793 /* Compute trial remainder: rem = num - (quo * den) */
794 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
795 neg_double (*lrem, *hrem, lrem, hrem);
796 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
797
798 switch (code)
799 {
800 case TRUNC_DIV_EXPR:
801 case TRUNC_MOD_EXPR: /* round toward zero */
802 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
803 return overflow;
804
805 case FLOOR_DIV_EXPR:
806 case FLOOR_MOD_EXPR: /* round toward negative infinity */
807 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
808 {
809 /* quo = quo - 1; */
810 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
811 lquo, hquo);
812 }
813 else
814 return overflow;
815 break;
816
817 case CEIL_DIV_EXPR:
818 case CEIL_MOD_EXPR: /* round toward positive infinity */
819 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
820 {
821 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
822 lquo, hquo);
823 }
824 else
825 return overflow;
826 break;
827
828 case ROUND_DIV_EXPR:
829 case ROUND_MOD_EXPR: /* round to closest integer */
830 {
831 unsigned HOST_WIDE_INT labs_rem = *lrem;
832 HOST_WIDE_INT habs_rem = *hrem;
833 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
834 HOST_WIDE_INT habs_den = hden, htwice;
835
836 /* Get absolute values. */
837 if (*hrem < 0)
838 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
839 if (hden < 0)
840 neg_double (lden, hden, &labs_den, &habs_den);
841
842 /* If (2 * abs (lrem) >= abs (lden)) */
843 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
844 labs_rem, habs_rem, &ltwice, &htwice);
845
846 if (((unsigned HOST_WIDE_INT) habs_den
847 < (unsigned HOST_WIDE_INT) htwice)
848 || (((unsigned HOST_WIDE_INT) habs_den
849 == (unsigned HOST_WIDE_INT) htwice)
850 && (labs_den < ltwice)))
851 {
852 if (*hquo < 0)
853 /* quo = quo - 1; */
854 add_double (*lquo, *hquo,
855 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
856 else
857 /* quo = quo + 1; */
858 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
859 lquo, hquo);
860 }
861 else
862 return overflow;
863 }
864 break;
865
866 default:
867 gcc_unreachable ();
868 }
869
870 /* Compute true remainder: rem = num - (quo * den) */
871 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
872 neg_double (*lrem, *hrem, lrem, hrem);
873 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
874 return overflow;
875 }
876
877 /* If ARG2 divides ARG1 with zero remainder, carries out the division
878 of type CODE and returns the quotient.
879 Otherwise returns NULL_TREE. */
880
881 static tree
882 div_if_zero_remainder (enum tree_code code, tree arg1, tree arg2)
883 {
884 unsigned HOST_WIDE_INT int1l, int2l;
885 HOST_WIDE_INT int1h, int2h;
886 unsigned HOST_WIDE_INT quol, reml;
887 HOST_WIDE_INT quoh, remh;
888 tree type = TREE_TYPE (arg1);
889 int uns = TYPE_UNSIGNED (type);
890
891 int1l = TREE_INT_CST_LOW (arg1);
892 int1h = TREE_INT_CST_HIGH (arg1);
893 int2l = TREE_INT_CST_LOW (arg2);
894 int2h = TREE_INT_CST_HIGH (arg2);
895
896 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
897 &quol, &quoh, &reml, &remh);
898 if (remh != 0 || reml != 0)
899 return NULL_TREE;
900
901 return build_int_cst_wide (type, quol, quoh);
902 }
903 \f
904 /* This is non-zero if we should defer warnings about undefined
905 overflow. This facility exists because these warnings are a
906 special case. The code to estimate loop iterations does not want
907 to issue any warnings, since it works with expressions which do not
908 occur in user code. Various bits of cleanup code call fold(), but
909 only use the result if it has certain characteristics (e.g., is a
910 constant); that code only wants to issue a warning if the result is
911 used. */
912
913 static int fold_deferring_overflow_warnings;
914
915 /* If a warning about undefined overflow is deferred, this is the
916 warning. Note that this may cause us to turn two warnings into
917 one, but that is fine since it is sufficient to only give one
918 warning per expression. */
919
920 static const char* fold_deferred_overflow_warning;
921
922 /* If a warning about undefined overflow is deferred, this is the
923 level at which the warning should be emitted. */
924
925 static enum warn_strict_overflow_code fold_deferred_overflow_code;
926
927 /* Start deferring overflow warnings. We could use a stack here to
928 permit nested calls, but at present it is not necessary. */
929
930 void
931 fold_defer_overflow_warnings (void)
932 {
933 ++fold_deferring_overflow_warnings;
934 }
935
936 /* Stop deferring overflow warnings. If there is a pending warning,
937 and ISSUE is true, then issue the warning if appropriate. STMT is
938 the statement with which the warning should be associated (used for
939 location information); STMT may be NULL. CODE is the level of the
940 warning--a warn_strict_overflow_code value. This function will use
941 the smaller of CODE and the deferred code when deciding whether to
942 issue the warning. CODE may be zero to mean to always use the
943 deferred code. */
944
945 void
946 fold_undefer_overflow_warnings (bool issue, tree stmt, int code)
947 {
948 const char *warnmsg;
949 location_t locus;
950
951 gcc_assert (fold_deferring_overflow_warnings > 0);
952 --fold_deferring_overflow_warnings;
953 if (fold_deferring_overflow_warnings > 0)
954 {
955 if (fold_deferred_overflow_warning != NULL
956 && code != 0
957 && code < (int) fold_deferred_overflow_code)
958 fold_deferred_overflow_code = code;
959 return;
960 }
961
962 warnmsg = fold_deferred_overflow_warning;
963 fold_deferred_overflow_warning = NULL;
964
965 if (!issue || warnmsg == NULL)
966 return;
967
968 /* Use the smallest code level when deciding to issue the
969 warning. */
970 if (code == 0 || code > (int) fold_deferred_overflow_code)
971 code = fold_deferred_overflow_code;
972
973 if (!issue_strict_overflow_warning (code))
974 return;
975
976 if (stmt == NULL_TREE || !expr_has_location (stmt))
977 locus = input_location;
978 else
979 locus = expr_location (stmt);
980 warning (OPT_Wstrict_overflow, "%H%s", &locus, warnmsg);
981 }
982
983 /* Stop deferring overflow warnings, ignoring any deferred
984 warnings. */
985
986 void
987 fold_undefer_and_ignore_overflow_warnings (void)
988 {
989 fold_undefer_overflow_warnings (false, NULL_TREE, 0);
990 }
991
992 /* Whether we are deferring overflow warnings. */
993
994 bool
995 fold_deferring_overflow_warnings_p (void)
996 {
997 return fold_deferring_overflow_warnings > 0;
998 }
999
1000 /* This is called when we fold something based on the fact that signed
1001 overflow is undefined. */
1002
1003 static void
1004 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
1005 {
1006 gcc_assert (!flag_wrapv && !flag_trapv);
1007 if (fold_deferring_overflow_warnings > 0)
1008 {
1009 if (fold_deferred_overflow_warning == NULL
1010 || wc < fold_deferred_overflow_code)
1011 {
1012 fold_deferred_overflow_warning = gmsgid;
1013 fold_deferred_overflow_code = wc;
1014 }
1015 }
1016 else if (issue_strict_overflow_warning (wc))
1017 warning (OPT_Wstrict_overflow, gmsgid);
1018 }
1019 \f
1020 /* Return true if the built-in mathematical function specified by CODE
1021 is odd, i.e. -f(x) == f(-x). */
1022
1023 static bool
1024 negate_mathfn_p (enum built_in_function code)
1025 {
1026 switch (code)
1027 {
1028 CASE_FLT_FN (BUILT_IN_ASIN):
1029 CASE_FLT_FN (BUILT_IN_ASINH):
1030 CASE_FLT_FN (BUILT_IN_ATAN):
1031 CASE_FLT_FN (BUILT_IN_ATANH):
1032 CASE_FLT_FN (BUILT_IN_CASIN):
1033 CASE_FLT_FN (BUILT_IN_CASINH):
1034 CASE_FLT_FN (BUILT_IN_CATAN):
1035 CASE_FLT_FN (BUILT_IN_CATANH):
1036 CASE_FLT_FN (BUILT_IN_CBRT):
1037 CASE_FLT_FN (BUILT_IN_CPROJ):
1038 CASE_FLT_FN (BUILT_IN_CSIN):
1039 CASE_FLT_FN (BUILT_IN_CSINH):
1040 CASE_FLT_FN (BUILT_IN_CTAN):
1041 CASE_FLT_FN (BUILT_IN_CTANH):
1042 CASE_FLT_FN (BUILT_IN_ERF):
1043 CASE_FLT_FN (BUILT_IN_LLROUND):
1044 CASE_FLT_FN (BUILT_IN_LROUND):
1045 CASE_FLT_FN (BUILT_IN_ROUND):
1046 CASE_FLT_FN (BUILT_IN_SIN):
1047 CASE_FLT_FN (BUILT_IN_SINH):
1048 CASE_FLT_FN (BUILT_IN_TAN):
1049 CASE_FLT_FN (BUILT_IN_TANH):
1050 CASE_FLT_FN (BUILT_IN_TRUNC):
1051 return true;
1052
1053 CASE_FLT_FN (BUILT_IN_LLRINT):
1054 CASE_FLT_FN (BUILT_IN_LRINT):
1055 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1056 CASE_FLT_FN (BUILT_IN_RINT):
1057 return !flag_rounding_math;
1058
1059 default:
1060 break;
1061 }
1062 return false;
1063 }
1064
1065 /* Check whether we may negate an integer constant T without causing
1066 overflow. */
1067
1068 bool
1069 may_negate_without_overflow_p (tree t)
1070 {
1071 unsigned HOST_WIDE_INT val;
1072 unsigned int prec;
1073 tree type;
1074
1075 gcc_assert (TREE_CODE (t) == INTEGER_CST);
1076
1077 type = TREE_TYPE (t);
1078 if (TYPE_UNSIGNED (type))
1079 return false;
1080
1081 prec = TYPE_PRECISION (type);
1082 if (prec > HOST_BITS_PER_WIDE_INT)
1083 {
1084 if (TREE_INT_CST_LOW (t) != 0)
1085 return true;
1086 prec -= HOST_BITS_PER_WIDE_INT;
1087 val = TREE_INT_CST_HIGH (t);
1088 }
1089 else
1090 val = TREE_INT_CST_LOW (t);
1091 if (prec < HOST_BITS_PER_WIDE_INT)
1092 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
1093 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
1094 }
1095
1096 /* Determine whether an expression T can be cheaply negated using
1097 the function negate_expr without introducing undefined overflow. */
1098
1099 static bool
1100 negate_expr_p (tree t)
1101 {
1102 tree type;
1103
1104 if (t == 0)
1105 return false;
1106
1107 type = TREE_TYPE (t);
1108
1109 STRIP_SIGN_NOPS (t);
1110 switch (TREE_CODE (t))
1111 {
1112 case INTEGER_CST:
1113 if (TYPE_OVERFLOW_WRAPS (type))
1114 return true;
1115
1116 /* Check that -CST will not overflow type. */
1117 return may_negate_without_overflow_p (t);
1118 case BIT_NOT_EXPR:
1119 return (INTEGRAL_TYPE_P (type)
1120 && TYPE_OVERFLOW_WRAPS (type));
1121
1122 case REAL_CST:
1123 case NEGATE_EXPR:
1124 return true;
1125
1126 case COMPLEX_CST:
1127 return negate_expr_p (TREE_REALPART (t))
1128 && negate_expr_p (TREE_IMAGPART (t));
1129
1130 case COMPLEX_EXPR:
1131 return negate_expr_p (TREE_OPERAND (t, 0))
1132 && negate_expr_p (TREE_OPERAND (t, 1));
1133
1134 case CONJ_EXPR:
1135 return negate_expr_p (TREE_OPERAND (t, 0));
1136
1137 case PLUS_EXPR:
1138 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1139 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1140 return false;
1141 /* -(A + B) -> (-B) - A. */
1142 if (negate_expr_p (TREE_OPERAND (t, 1))
1143 && reorder_operands_p (TREE_OPERAND (t, 0),
1144 TREE_OPERAND (t, 1)))
1145 return true;
1146 /* -(A + B) -> (-A) - B. */
1147 return negate_expr_p (TREE_OPERAND (t, 0));
1148
1149 case MINUS_EXPR:
1150 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
1151 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1152 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1153 && reorder_operands_p (TREE_OPERAND (t, 0),
1154 TREE_OPERAND (t, 1));
1155
1156 case MULT_EXPR:
1157 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1158 break;
1159
1160 /* Fall through. */
1161
1162 case RDIV_EXPR:
1163 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1164 return negate_expr_p (TREE_OPERAND (t, 1))
1165 || negate_expr_p (TREE_OPERAND (t, 0));
1166 break;
1167
1168 case TRUNC_DIV_EXPR:
1169 case ROUND_DIV_EXPR:
1170 case FLOOR_DIV_EXPR:
1171 case CEIL_DIV_EXPR:
1172 case EXACT_DIV_EXPR:
1173 /* In general we can't negate A / B, because if A is INT_MIN and
1174 B is 1, we may turn this into INT_MIN / -1 which is undefined
1175 and actually traps on some architectures. But if overflow is
1176 undefined, we can negate, because - (INT_MIN / 1) is an
1177 overflow. */
1178 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
1179 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
1180 break;
1181 return negate_expr_p (TREE_OPERAND (t, 1))
1182 || negate_expr_p (TREE_OPERAND (t, 0));
1183
1184 case NOP_EXPR:
1185 /* Negate -((double)float) as (double)(-float). */
1186 if (TREE_CODE (type) == REAL_TYPE)
1187 {
1188 tree tem = strip_float_extensions (t);
1189 if (tem != t)
1190 return negate_expr_p (tem);
1191 }
1192 break;
1193
1194 case CALL_EXPR:
1195 /* Negate -f(x) as f(-x). */
1196 if (negate_mathfn_p (builtin_mathfn_code (t)))
1197 return negate_expr_p (CALL_EXPR_ARG (t, 0));
1198 break;
1199
1200 case RSHIFT_EXPR:
1201 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1202 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1203 {
1204 tree op1 = TREE_OPERAND (t, 1);
1205 if (TREE_INT_CST_HIGH (op1) == 0
1206 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1207 == TREE_INT_CST_LOW (op1))
1208 return true;
1209 }
1210 break;
1211
1212 default:
1213 break;
1214 }
1215 return false;
1216 }
1217
1218 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1219 simplification is possible.
1220 If negate_expr_p would return true for T, NULL_TREE will never be
1221 returned. */
1222
1223 static tree
1224 fold_negate_expr (tree t)
1225 {
1226 tree type = TREE_TYPE (t);
1227 tree tem;
1228
1229 switch (TREE_CODE (t))
1230 {
1231 /* Convert - (~A) to A + 1. */
1232 case BIT_NOT_EXPR:
1233 if (INTEGRAL_TYPE_P (type))
1234 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0),
1235 build_int_cst (type, 1));
1236 break;
1237
1238 case INTEGER_CST:
1239 tem = fold_negate_const (t, type);
1240 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
1241 || !TYPE_OVERFLOW_TRAPS (type))
1242 return tem;
1243 break;
1244
1245 case REAL_CST:
1246 tem = fold_negate_const (t, type);
1247 /* Two's complement FP formats, such as c4x, may overflow. */
1248 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
1249 return tem;
1250 break;
1251
1252 case COMPLEX_CST:
1253 {
1254 tree rpart = negate_expr (TREE_REALPART (t));
1255 tree ipart = negate_expr (TREE_IMAGPART (t));
1256
1257 if ((TREE_CODE (rpart) == REAL_CST
1258 && TREE_CODE (ipart) == REAL_CST)
1259 || (TREE_CODE (rpart) == INTEGER_CST
1260 && TREE_CODE (ipart) == INTEGER_CST))
1261 return build_complex (type, rpart, ipart);
1262 }
1263 break;
1264
1265 case COMPLEX_EXPR:
1266 if (negate_expr_p (t))
1267 return fold_build2 (COMPLEX_EXPR, type,
1268 fold_negate_expr (TREE_OPERAND (t, 0)),
1269 fold_negate_expr (TREE_OPERAND (t, 1)));
1270 break;
1271
1272 case CONJ_EXPR:
1273 if (negate_expr_p (t))
1274 return fold_build1 (CONJ_EXPR, type,
1275 fold_negate_expr (TREE_OPERAND (t, 0)));
1276 break;
1277
1278 case NEGATE_EXPR:
1279 return TREE_OPERAND (t, 0);
1280
1281 case PLUS_EXPR:
1282 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1283 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1284 {
1285 /* -(A + B) -> (-B) - A. */
1286 if (negate_expr_p (TREE_OPERAND (t, 1))
1287 && reorder_operands_p (TREE_OPERAND (t, 0),
1288 TREE_OPERAND (t, 1)))
1289 {
1290 tem = negate_expr (TREE_OPERAND (t, 1));
1291 return fold_build2 (MINUS_EXPR, type,
1292 tem, TREE_OPERAND (t, 0));
1293 }
1294
1295 /* -(A + B) -> (-A) - B. */
1296 if (negate_expr_p (TREE_OPERAND (t, 0)))
1297 {
1298 tem = negate_expr (TREE_OPERAND (t, 0));
1299 return fold_build2 (MINUS_EXPR, type,
1300 tem, TREE_OPERAND (t, 1));
1301 }
1302 }
1303 break;
1304
1305 case MINUS_EXPR:
1306 /* - (A - B) -> B - A */
1307 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1308 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1309 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1310 return fold_build2 (MINUS_EXPR, type,
1311 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
1312 break;
1313
1314 case MULT_EXPR:
1315 if (TYPE_UNSIGNED (type))
1316 break;
1317
1318 /* Fall through. */
1319
1320 case RDIV_EXPR:
1321 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
1322 {
1323 tem = TREE_OPERAND (t, 1);
1324 if (negate_expr_p (tem))
1325 return fold_build2 (TREE_CODE (t), type,
1326 TREE_OPERAND (t, 0), negate_expr (tem));
1327 tem = TREE_OPERAND (t, 0);
1328 if (negate_expr_p (tem))
1329 return fold_build2 (TREE_CODE (t), type,
1330 negate_expr (tem), TREE_OPERAND (t, 1));
1331 }
1332 break;
1333
1334 case TRUNC_DIV_EXPR:
1335 case ROUND_DIV_EXPR:
1336 case FLOOR_DIV_EXPR:
1337 case CEIL_DIV_EXPR:
1338 case EXACT_DIV_EXPR:
1339 /* In general we can't negate A / B, because if A is INT_MIN and
1340 B is 1, we may turn this into INT_MIN / -1 which is undefined
1341 and actually traps on some architectures. But if overflow is
1342 undefined, we can negate, because - (INT_MIN / 1) is an
1343 overflow. */
1344 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
1345 {
1346 const char * const warnmsg = G_("assuming signed overflow does not "
1347 "occur when negating a division");
1348 tem = TREE_OPERAND (t, 1);
1349 if (negate_expr_p (tem))
1350 {
1351 if (INTEGRAL_TYPE_P (type)
1352 && (TREE_CODE (tem) != INTEGER_CST
1353 || integer_onep (tem)))
1354 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1355 return fold_build2 (TREE_CODE (t), type,
1356 TREE_OPERAND (t, 0), negate_expr (tem));
1357 }
1358 tem = TREE_OPERAND (t, 0);
1359 if (negate_expr_p (tem))
1360 {
1361 if (INTEGRAL_TYPE_P (type)
1362 && (TREE_CODE (tem) != INTEGER_CST
1363 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
1364 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1365 return fold_build2 (TREE_CODE (t), type,
1366 negate_expr (tem), TREE_OPERAND (t, 1));
1367 }
1368 }
1369 break;
1370
1371 case NOP_EXPR:
1372 /* Convert -((double)float) into (double)(-float). */
1373 if (TREE_CODE (type) == REAL_TYPE)
1374 {
1375 tem = strip_float_extensions (t);
1376 if (tem != t && negate_expr_p (tem))
1377 return negate_expr (tem);
1378 }
1379 break;
1380
1381 case CALL_EXPR:
1382 /* Negate -f(x) as f(-x). */
1383 if (negate_mathfn_p (builtin_mathfn_code (t))
1384 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
1385 {
1386 tree fndecl, arg;
1387
1388 fndecl = get_callee_fndecl (t);
1389 arg = negate_expr (CALL_EXPR_ARG (t, 0));
1390 return build_call_expr (fndecl, 1, arg);
1391 }
1392 break;
1393
1394 case RSHIFT_EXPR:
1395 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1396 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1397 {
1398 tree op1 = TREE_OPERAND (t, 1);
1399 if (TREE_INT_CST_HIGH (op1) == 0
1400 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1401 == TREE_INT_CST_LOW (op1))
1402 {
1403 tree ntype = TYPE_UNSIGNED (type)
1404 ? lang_hooks.types.signed_type (type)
1405 : lang_hooks.types.unsigned_type (type);
1406 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1407 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1408 return fold_convert (type, temp);
1409 }
1410 }
1411 break;
1412
1413 default:
1414 break;
1415 }
1416
1417 return NULL_TREE;
1418 }
1419
1420 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1421 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1422 return NULL_TREE. */
1423
1424 static tree
1425 negate_expr (tree t)
1426 {
1427 tree type, tem;
1428
1429 if (t == NULL_TREE)
1430 return NULL_TREE;
1431
1432 type = TREE_TYPE (t);
1433 STRIP_SIGN_NOPS (t);
1434
1435 tem = fold_negate_expr (t);
1436 if (!tem)
1437 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1438 return fold_convert (type, tem);
1439 }
1440 \f
1441 /* Split a tree IN into a constant, literal and variable parts that could be
1442 combined with CODE to make IN. "constant" means an expression with
1443 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1444 commutative arithmetic operation. Store the constant part into *CONP,
1445 the literal in *LITP and return the variable part. If a part isn't
1446 present, set it to null. If the tree does not decompose in this way,
1447 return the entire tree as the variable part and the other parts as null.
1448
1449 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1450 case, we negate an operand that was subtracted. Except if it is a
1451 literal for which we use *MINUS_LITP instead.
1452
1453 If NEGATE_P is true, we are negating all of IN, again except a literal
1454 for which we use *MINUS_LITP instead.
1455
1456 If IN is itself a literal or constant, return it as appropriate.
1457
1458 Note that we do not guarantee that any of the three values will be the
1459 same type as IN, but they will have the same signedness and mode. */
1460
1461 static tree
1462 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1463 tree *minus_litp, int negate_p)
1464 {
1465 tree var = 0;
1466
1467 *conp = 0;
1468 *litp = 0;
1469 *minus_litp = 0;
1470
1471 /* Strip any conversions that don't change the machine mode or signedness. */
1472 STRIP_SIGN_NOPS (in);
1473
1474 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1475 *litp = in;
1476 else if (TREE_CODE (in) == code
1477 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1478 /* We can associate addition and subtraction together (even
1479 though the C standard doesn't say so) for integers because
1480 the value is not affected. For reals, the value might be
1481 affected, so we can't. */
1482 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1483 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1484 {
1485 tree op0 = TREE_OPERAND (in, 0);
1486 tree op1 = TREE_OPERAND (in, 1);
1487 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1488 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1489
1490 /* First see if either of the operands is a literal, then a constant. */
1491 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1492 *litp = op0, op0 = 0;
1493 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1494 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1495
1496 if (op0 != 0 && TREE_CONSTANT (op0))
1497 *conp = op0, op0 = 0;
1498 else if (op1 != 0 && TREE_CONSTANT (op1))
1499 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1500
1501 /* If we haven't dealt with either operand, this is not a case we can
1502 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1503 if (op0 != 0 && op1 != 0)
1504 var = in;
1505 else if (op0 != 0)
1506 var = op0;
1507 else
1508 var = op1, neg_var_p = neg1_p;
1509
1510 /* Now do any needed negations. */
1511 if (neg_litp_p)
1512 *minus_litp = *litp, *litp = 0;
1513 if (neg_conp_p)
1514 *conp = negate_expr (*conp);
1515 if (neg_var_p)
1516 var = negate_expr (var);
1517 }
1518 else if (TREE_CONSTANT (in))
1519 *conp = in;
1520 else
1521 var = in;
1522
1523 if (negate_p)
1524 {
1525 if (*litp)
1526 *minus_litp = *litp, *litp = 0;
1527 else if (*minus_litp)
1528 *litp = *minus_litp, *minus_litp = 0;
1529 *conp = negate_expr (*conp);
1530 var = negate_expr (var);
1531 }
1532
1533 return var;
1534 }
1535
1536 /* Re-associate trees split by the above function. T1 and T2 are either
1537 expressions to associate or null. Return the new expression, if any. If
1538 we build an operation, do it in TYPE and with CODE. */
1539
1540 static tree
1541 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1542 {
1543 if (t1 == 0)
1544 return t2;
1545 else if (t2 == 0)
1546 return t1;
1547
1548 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1549 try to fold this since we will have infinite recursion. But do
1550 deal with any NEGATE_EXPRs. */
1551 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1552 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1553 {
1554 if (code == PLUS_EXPR)
1555 {
1556 if (TREE_CODE (t1) == NEGATE_EXPR)
1557 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1558 fold_convert (type, TREE_OPERAND (t1, 0)));
1559 else if (TREE_CODE (t2) == NEGATE_EXPR)
1560 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1561 fold_convert (type, TREE_OPERAND (t2, 0)));
1562 else if (integer_zerop (t2))
1563 return fold_convert (type, t1);
1564 }
1565 else if (code == MINUS_EXPR)
1566 {
1567 if (integer_zerop (t2))
1568 return fold_convert (type, t1);
1569 }
1570
1571 return build2 (code, type, fold_convert (type, t1),
1572 fold_convert (type, t2));
1573 }
1574
1575 return fold_build2 (code, type, fold_convert (type, t1),
1576 fold_convert (type, t2));
1577 }
1578 \f
1579 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1580 for use in int_const_binop, size_binop and size_diffop. */
1581
1582 static bool
1583 int_binop_types_match_p (enum tree_code code, tree type1, tree type2)
1584 {
1585 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
1586 return false;
1587 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
1588 return false;
1589
1590 switch (code)
1591 {
1592 case LSHIFT_EXPR:
1593 case RSHIFT_EXPR:
1594 case LROTATE_EXPR:
1595 case RROTATE_EXPR:
1596 return true;
1597
1598 default:
1599 break;
1600 }
1601
1602 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
1603 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
1604 && TYPE_MODE (type1) == TYPE_MODE (type2);
1605 }
1606
1607
1608 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1609 to produce a new constant. Return NULL_TREE if we don't know how
1610 to evaluate CODE at compile-time.
1611
1612 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1613
1614 tree
1615 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1616 {
1617 unsigned HOST_WIDE_INT int1l, int2l;
1618 HOST_WIDE_INT int1h, int2h;
1619 unsigned HOST_WIDE_INT low;
1620 HOST_WIDE_INT hi;
1621 unsigned HOST_WIDE_INT garbagel;
1622 HOST_WIDE_INT garbageh;
1623 tree t;
1624 tree type = TREE_TYPE (arg1);
1625 int uns = TYPE_UNSIGNED (type);
1626 int is_sizetype
1627 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1628 int overflow = 0;
1629
1630 int1l = TREE_INT_CST_LOW (arg1);
1631 int1h = TREE_INT_CST_HIGH (arg1);
1632 int2l = TREE_INT_CST_LOW (arg2);
1633 int2h = TREE_INT_CST_HIGH (arg2);
1634
1635 switch (code)
1636 {
1637 case BIT_IOR_EXPR:
1638 low = int1l | int2l, hi = int1h | int2h;
1639 break;
1640
1641 case BIT_XOR_EXPR:
1642 low = int1l ^ int2l, hi = int1h ^ int2h;
1643 break;
1644
1645 case BIT_AND_EXPR:
1646 low = int1l & int2l, hi = int1h & int2h;
1647 break;
1648
1649 case RSHIFT_EXPR:
1650 int2l = -int2l;
1651 case LSHIFT_EXPR:
1652 /* It's unclear from the C standard whether shifts can overflow.
1653 The following code ignores overflow; perhaps a C standard
1654 interpretation ruling is needed. */
1655 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1656 &low, &hi, !uns);
1657 break;
1658
1659 case RROTATE_EXPR:
1660 int2l = - int2l;
1661 case LROTATE_EXPR:
1662 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1663 &low, &hi);
1664 break;
1665
1666 case PLUS_EXPR:
1667 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1668 break;
1669
1670 case MINUS_EXPR:
1671 neg_double (int2l, int2h, &low, &hi);
1672 add_double (int1l, int1h, low, hi, &low, &hi);
1673 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1674 break;
1675
1676 case MULT_EXPR:
1677 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1678 break;
1679
1680 case TRUNC_DIV_EXPR:
1681 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1682 case EXACT_DIV_EXPR:
1683 /* This is a shortcut for a common special case. */
1684 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1685 && !TREE_OVERFLOW (arg1)
1686 && !TREE_OVERFLOW (arg2)
1687 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1688 {
1689 if (code == CEIL_DIV_EXPR)
1690 int1l += int2l - 1;
1691
1692 low = int1l / int2l, hi = 0;
1693 break;
1694 }
1695
1696 /* ... fall through ... */
1697
1698 case ROUND_DIV_EXPR:
1699 if (int2h == 0 && int2l == 0)
1700 return NULL_TREE;
1701 if (int2h == 0 && int2l == 1)
1702 {
1703 low = int1l, hi = int1h;
1704 break;
1705 }
1706 if (int1l == int2l && int1h == int2h
1707 && ! (int1l == 0 && int1h == 0))
1708 {
1709 low = 1, hi = 0;
1710 break;
1711 }
1712 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1713 &low, &hi, &garbagel, &garbageh);
1714 break;
1715
1716 case TRUNC_MOD_EXPR:
1717 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1718 /* This is a shortcut for a common special case. */
1719 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1720 && !TREE_OVERFLOW (arg1)
1721 && !TREE_OVERFLOW (arg2)
1722 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1723 {
1724 if (code == CEIL_MOD_EXPR)
1725 int1l += int2l - 1;
1726 low = int1l % int2l, hi = 0;
1727 break;
1728 }
1729
1730 /* ... fall through ... */
1731
1732 case ROUND_MOD_EXPR:
1733 if (int2h == 0 && int2l == 0)
1734 return NULL_TREE;
1735 overflow = div_and_round_double (code, uns,
1736 int1l, int1h, int2l, int2h,
1737 &garbagel, &garbageh, &low, &hi);
1738 break;
1739
1740 case MIN_EXPR:
1741 case MAX_EXPR:
1742 if (uns)
1743 low = (((unsigned HOST_WIDE_INT) int1h
1744 < (unsigned HOST_WIDE_INT) int2h)
1745 || (((unsigned HOST_WIDE_INT) int1h
1746 == (unsigned HOST_WIDE_INT) int2h)
1747 && int1l < int2l));
1748 else
1749 low = (int1h < int2h
1750 || (int1h == int2h && int1l < int2l));
1751
1752 if (low == (code == MIN_EXPR))
1753 low = int1l, hi = int1h;
1754 else
1755 low = int2l, hi = int2h;
1756 break;
1757
1758 default:
1759 return NULL_TREE;
1760 }
1761
1762 if (notrunc)
1763 {
1764 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1765
1766 /* Propagate overflow flags ourselves. */
1767 if (((!uns || is_sizetype) && overflow)
1768 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1769 {
1770 t = copy_node (t);
1771 TREE_OVERFLOW (t) = 1;
1772 }
1773 }
1774 else
1775 t = force_fit_type_double (TREE_TYPE (arg1), low, hi, 1,
1776 ((!uns || is_sizetype) && overflow)
1777 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1778
1779 return t;
1780 }
1781
1782 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1783 constant. We assume ARG1 and ARG2 have the same data type, or at least
1784 are the same kind of constant and the same machine mode. Return zero if
1785 combining the constants is not allowed in the current operating mode.
1786
1787 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1788
1789 static tree
1790 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1791 {
1792 /* Sanity check for the recursive cases. */
1793 if (!arg1 || !arg2)
1794 return NULL_TREE;
1795
1796 STRIP_NOPS (arg1);
1797 STRIP_NOPS (arg2);
1798
1799 if (TREE_CODE (arg1) == INTEGER_CST)
1800 return int_const_binop (code, arg1, arg2, notrunc);
1801
1802 if (TREE_CODE (arg1) == REAL_CST)
1803 {
1804 enum machine_mode mode;
1805 REAL_VALUE_TYPE d1;
1806 REAL_VALUE_TYPE d2;
1807 REAL_VALUE_TYPE value;
1808 REAL_VALUE_TYPE result;
1809 bool inexact;
1810 tree t, type;
1811
1812 /* The following codes are handled by real_arithmetic. */
1813 switch (code)
1814 {
1815 case PLUS_EXPR:
1816 case MINUS_EXPR:
1817 case MULT_EXPR:
1818 case RDIV_EXPR:
1819 case MIN_EXPR:
1820 case MAX_EXPR:
1821 break;
1822
1823 default:
1824 return NULL_TREE;
1825 }
1826
1827 d1 = TREE_REAL_CST (arg1);
1828 d2 = TREE_REAL_CST (arg2);
1829
1830 type = TREE_TYPE (arg1);
1831 mode = TYPE_MODE (type);
1832
1833 /* Don't perform operation if we honor signaling NaNs and
1834 either operand is a NaN. */
1835 if (HONOR_SNANS (mode)
1836 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1837 return NULL_TREE;
1838
1839 /* Don't perform operation if it would raise a division
1840 by zero exception. */
1841 if (code == RDIV_EXPR
1842 && REAL_VALUES_EQUAL (d2, dconst0)
1843 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1844 return NULL_TREE;
1845
1846 /* If either operand is a NaN, just return it. Otherwise, set up
1847 for floating-point trap; we return an overflow. */
1848 if (REAL_VALUE_ISNAN (d1))
1849 return arg1;
1850 else if (REAL_VALUE_ISNAN (d2))
1851 return arg2;
1852
1853 inexact = real_arithmetic (&value, code, &d1, &d2);
1854 real_convert (&result, mode, &value);
1855
1856 /* Don't constant fold this floating point operation if
1857 the result has overflowed and flag_trapping_math. */
1858 if (flag_trapping_math
1859 && MODE_HAS_INFINITIES (mode)
1860 && REAL_VALUE_ISINF (result)
1861 && !REAL_VALUE_ISINF (d1)
1862 && !REAL_VALUE_ISINF (d2))
1863 return NULL_TREE;
1864
1865 /* Don't constant fold this floating point operation if the
1866 result may dependent upon the run-time rounding mode and
1867 flag_rounding_math is set, or if GCC's software emulation
1868 is unable to accurately represent the result. */
1869 if ((flag_rounding_math
1870 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1871 && !flag_unsafe_math_optimizations))
1872 && (inexact || !real_identical (&result, &value)))
1873 return NULL_TREE;
1874
1875 t = build_real (type, result);
1876
1877 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1878 return t;
1879 }
1880
1881 if (TREE_CODE (arg1) == COMPLEX_CST)
1882 {
1883 tree type = TREE_TYPE (arg1);
1884 tree r1 = TREE_REALPART (arg1);
1885 tree i1 = TREE_IMAGPART (arg1);
1886 tree r2 = TREE_REALPART (arg2);
1887 tree i2 = TREE_IMAGPART (arg2);
1888 tree real, imag;
1889
1890 switch (code)
1891 {
1892 case PLUS_EXPR:
1893 case MINUS_EXPR:
1894 real = const_binop (code, r1, r2, notrunc);
1895 imag = const_binop (code, i1, i2, notrunc);
1896 break;
1897
1898 case MULT_EXPR:
1899 real = const_binop (MINUS_EXPR,
1900 const_binop (MULT_EXPR, r1, r2, notrunc),
1901 const_binop (MULT_EXPR, i1, i2, notrunc),
1902 notrunc);
1903 imag = const_binop (PLUS_EXPR,
1904 const_binop (MULT_EXPR, r1, i2, notrunc),
1905 const_binop (MULT_EXPR, i1, r2, notrunc),
1906 notrunc);
1907 break;
1908
1909 case RDIV_EXPR:
1910 {
1911 tree magsquared
1912 = const_binop (PLUS_EXPR,
1913 const_binop (MULT_EXPR, r2, r2, notrunc),
1914 const_binop (MULT_EXPR, i2, i2, notrunc),
1915 notrunc);
1916 tree t1
1917 = const_binop (PLUS_EXPR,
1918 const_binop (MULT_EXPR, r1, r2, notrunc),
1919 const_binop (MULT_EXPR, i1, i2, notrunc),
1920 notrunc);
1921 tree t2
1922 = const_binop (MINUS_EXPR,
1923 const_binop (MULT_EXPR, i1, r2, notrunc),
1924 const_binop (MULT_EXPR, r1, i2, notrunc),
1925 notrunc);
1926
1927 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1928 code = TRUNC_DIV_EXPR;
1929
1930 real = const_binop (code, t1, magsquared, notrunc);
1931 imag = const_binop (code, t2, magsquared, notrunc);
1932 }
1933 break;
1934
1935 default:
1936 return NULL_TREE;
1937 }
1938
1939 if (real && imag)
1940 return build_complex (type, real, imag);
1941 }
1942
1943 return NULL_TREE;
1944 }
1945
1946 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1947 indicates which particular sizetype to create. */
1948
1949 tree
1950 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1951 {
1952 return build_int_cst (sizetype_tab[(int) kind], number);
1953 }
1954 \f
1955 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1956 is a tree code. The type of the result is taken from the operands.
1957 Both must be equivalent integer types, ala int_binop_types_match_p.
1958 If the operands are constant, so is the result. */
1959
1960 tree
1961 size_binop (enum tree_code code, tree arg0, tree arg1)
1962 {
1963 tree type = TREE_TYPE (arg0);
1964
1965 if (arg0 == error_mark_node || arg1 == error_mark_node)
1966 return error_mark_node;
1967
1968 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1969 TREE_TYPE (arg1)));
1970
1971 /* Handle the special case of two integer constants faster. */
1972 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1973 {
1974 /* And some specific cases even faster than that. */
1975 if (code == PLUS_EXPR)
1976 {
1977 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1978 return arg1;
1979 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1980 return arg0;
1981 }
1982 else if (code == MINUS_EXPR)
1983 {
1984 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1985 return arg0;
1986 }
1987 else if (code == MULT_EXPR)
1988 {
1989 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1990 return arg1;
1991 }
1992
1993 /* Handle general case of two integer constants. */
1994 return int_const_binop (code, arg0, arg1, 0);
1995 }
1996
1997 return fold_build2 (code, type, arg0, arg1);
1998 }
1999
2000 /* Given two values, either both of sizetype or both of bitsizetype,
2001 compute the difference between the two values. Return the value
2002 in signed type corresponding to the type of the operands. */
2003
2004 tree
2005 size_diffop (tree arg0, tree arg1)
2006 {
2007 tree type = TREE_TYPE (arg0);
2008 tree ctype;
2009
2010 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
2011 TREE_TYPE (arg1)));
2012
2013 /* If the type is already signed, just do the simple thing. */
2014 if (!TYPE_UNSIGNED (type))
2015 return size_binop (MINUS_EXPR, arg0, arg1);
2016
2017 if (type == sizetype)
2018 ctype = ssizetype;
2019 else if (type == bitsizetype)
2020 ctype = sbitsizetype;
2021 else
2022 ctype = lang_hooks.types.signed_type (type);
2023
2024 /* If either operand is not a constant, do the conversions to the signed
2025 type and subtract. The hardware will do the right thing with any
2026 overflow in the subtraction. */
2027 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
2028 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
2029 fold_convert (ctype, arg1));
2030
2031 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
2032 Otherwise, subtract the other way, convert to CTYPE (we know that can't
2033 overflow) and negate (which can't either). Special-case a result
2034 of zero while we're here. */
2035 if (tree_int_cst_equal (arg0, arg1))
2036 return build_int_cst (ctype, 0);
2037 else if (tree_int_cst_lt (arg1, arg0))
2038 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
2039 else
2040 return size_binop (MINUS_EXPR, build_int_cst (ctype, 0),
2041 fold_convert (ctype, size_binop (MINUS_EXPR,
2042 arg1, arg0)));
2043 }
2044 \f
2045 /* A subroutine of fold_convert_const handling conversions of an
2046 INTEGER_CST to another integer type. */
2047
2048 static tree
2049 fold_convert_const_int_from_int (tree type, tree arg1)
2050 {
2051 tree t;
2052
2053 /* Given an integer constant, make new constant with new type,
2054 appropriately sign-extended or truncated. */
2055 t = force_fit_type_double (type, TREE_INT_CST_LOW (arg1),
2056 TREE_INT_CST_HIGH (arg1),
2057 /* Don't set the overflow when
2058 converting a pointer */
2059 !POINTER_TYPE_P (TREE_TYPE (arg1)),
2060 (TREE_INT_CST_HIGH (arg1) < 0
2061 && (TYPE_UNSIGNED (type)
2062 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2063 | TREE_OVERFLOW (arg1));
2064
2065 return t;
2066 }
2067
2068 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2069 to an integer type. */
2070
2071 static tree
2072 fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
2073 {
2074 int overflow = 0;
2075 tree t;
2076
2077 /* The following code implements the floating point to integer
2078 conversion rules required by the Java Language Specification,
2079 that IEEE NaNs are mapped to zero and values that overflow
2080 the target precision saturate, i.e. values greater than
2081 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2082 are mapped to INT_MIN. These semantics are allowed by the
2083 C and C++ standards that simply state that the behavior of
2084 FP-to-integer conversion is unspecified upon overflow. */
2085
2086 HOST_WIDE_INT high, low;
2087 REAL_VALUE_TYPE r;
2088 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2089
2090 switch (code)
2091 {
2092 case FIX_TRUNC_EXPR:
2093 real_trunc (&r, VOIDmode, &x);
2094 break;
2095
2096 default:
2097 gcc_unreachable ();
2098 }
2099
2100 /* If R is NaN, return zero and show we have an overflow. */
2101 if (REAL_VALUE_ISNAN (r))
2102 {
2103 overflow = 1;
2104 high = 0;
2105 low = 0;
2106 }
2107
2108 /* See if R is less than the lower bound or greater than the
2109 upper bound. */
2110
2111 if (! overflow)
2112 {
2113 tree lt = TYPE_MIN_VALUE (type);
2114 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2115 if (REAL_VALUES_LESS (r, l))
2116 {
2117 overflow = 1;
2118 high = TREE_INT_CST_HIGH (lt);
2119 low = TREE_INT_CST_LOW (lt);
2120 }
2121 }
2122
2123 if (! overflow)
2124 {
2125 tree ut = TYPE_MAX_VALUE (type);
2126 if (ut)
2127 {
2128 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2129 if (REAL_VALUES_LESS (u, r))
2130 {
2131 overflow = 1;
2132 high = TREE_INT_CST_HIGH (ut);
2133 low = TREE_INT_CST_LOW (ut);
2134 }
2135 }
2136 }
2137
2138 if (! overflow)
2139 REAL_VALUE_TO_INT (&low, &high, r);
2140
2141 t = force_fit_type_double (type, low, high, -1,
2142 overflow | TREE_OVERFLOW (arg1));
2143 return t;
2144 }
2145
2146 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2147 to another floating point type. */
2148
2149 static tree
2150 fold_convert_const_real_from_real (tree type, tree arg1)
2151 {
2152 REAL_VALUE_TYPE value;
2153 tree t;
2154
2155 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2156 t = build_real (type, value);
2157
2158 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2159 return t;
2160 }
2161
2162 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2163 type TYPE. If no simplification can be done return NULL_TREE. */
2164
2165 static tree
2166 fold_convert_const (enum tree_code code, tree type, tree arg1)
2167 {
2168 if (TREE_TYPE (arg1) == type)
2169 return arg1;
2170
2171 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2172 {
2173 if (TREE_CODE (arg1) == INTEGER_CST)
2174 return fold_convert_const_int_from_int (type, arg1);
2175 else if (TREE_CODE (arg1) == REAL_CST)
2176 return fold_convert_const_int_from_real (code, type, arg1);
2177 }
2178 else if (TREE_CODE (type) == REAL_TYPE)
2179 {
2180 if (TREE_CODE (arg1) == INTEGER_CST)
2181 return build_real_from_int_cst (type, arg1);
2182 if (TREE_CODE (arg1) == REAL_CST)
2183 return fold_convert_const_real_from_real (type, arg1);
2184 }
2185 return NULL_TREE;
2186 }
2187
2188 /* Construct a vector of zero elements of vector type TYPE. */
2189
2190 static tree
2191 build_zero_vector (tree type)
2192 {
2193 tree elem, list;
2194 int i, units;
2195
2196 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2197 units = TYPE_VECTOR_SUBPARTS (type);
2198
2199 list = NULL_TREE;
2200 for (i = 0; i < units; i++)
2201 list = tree_cons (NULL_TREE, elem, list);
2202 return build_vector (type, list);
2203 }
2204
2205 /* Convert expression ARG to type TYPE. Used by the middle-end for
2206 simple conversions in preference to calling the front-end's convert. */
2207
2208 tree
2209 fold_convert (tree type, tree arg)
2210 {
2211 tree orig = TREE_TYPE (arg);
2212 tree tem;
2213
2214 if (type == orig)
2215 return arg;
2216
2217 if (TREE_CODE (arg) == ERROR_MARK
2218 || TREE_CODE (type) == ERROR_MARK
2219 || TREE_CODE (orig) == ERROR_MARK)
2220 return error_mark_node;
2221
2222 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
2223 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
2224 TYPE_MAIN_VARIANT (orig)))
2225 return fold_build1 (NOP_EXPR, type, arg);
2226
2227 switch (TREE_CODE (type))
2228 {
2229 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2230 case POINTER_TYPE: case REFERENCE_TYPE:
2231 case OFFSET_TYPE:
2232 if (TREE_CODE (arg) == INTEGER_CST)
2233 {
2234 tem = fold_convert_const (NOP_EXPR, type, arg);
2235 if (tem != NULL_TREE)
2236 return tem;
2237 }
2238 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2239 || TREE_CODE (orig) == OFFSET_TYPE)
2240 return fold_build1 (NOP_EXPR, type, arg);
2241 if (TREE_CODE (orig) == COMPLEX_TYPE)
2242 {
2243 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2244 return fold_convert (type, tem);
2245 }
2246 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2247 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2248 return fold_build1 (NOP_EXPR, type, arg);
2249
2250 case REAL_TYPE:
2251 if (TREE_CODE (arg) == INTEGER_CST)
2252 {
2253 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2254 if (tem != NULL_TREE)
2255 return tem;
2256 }
2257 else if (TREE_CODE (arg) == REAL_CST)
2258 {
2259 tem = fold_convert_const (NOP_EXPR, type, arg);
2260 if (tem != NULL_TREE)
2261 return tem;
2262 }
2263
2264 switch (TREE_CODE (orig))
2265 {
2266 case INTEGER_TYPE:
2267 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2268 case POINTER_TYPE: case REFERENCE_TYPE:
2269 return fold_build1 (FLOAT_EXPR, type, arg);
2270
2271 case REAL_TYPE:
2272 return fold_build1 (NOP_EXPR, type, arg);
2273
2274 case COMPLEX_TYPE:
2275 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2276 return fold_convert (type, tem);
2277
2278 default:
2279 gcc_unreachable ();
2280 }
2281
2282 case COMPLEX_TYPE:
2283 switch (TREE_CODE (orig))
2284 {
2285 case INTEGER_TYPE:
2286 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2287 case POINTER_TYPE: case REFERENCE_TYPE:
2288 case REAL_TYPE:
2289 return build2 (COMPLEX_EXPR, type,
2290 fold_convert (TREE_TYPE (type), arg),
2291 fold_convert (TREE_TYPE (type), integer_zero_node));
2292 case COMPLEX_TYPE:
2293 {
2294 tree rpart, ipart;
2295
2296 if (TREE_CODE (arg) == COMPLEX_EXPR)
2297 {
2298 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2299 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2300 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2301 }
2302
2303 arg = save_expr (arg);
2304 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2305 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2306 rpart = fold_convert (TREE_TYPE (type), rpart);
2307 ipart = fold_convert (TREE_TYPE (type), ipart);
2308 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2309 }
2310
2311 default:
2312 gcc_unreachable ();
2313 }
2314
2315 case VECTOR_TYPE:
2316 if (integer_zerop (arg))
2317 return build_zero_vector (type);
2318 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2319 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2320 || TREE_CODE (orig) == VECTOR_TYPE);
2321 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2322
2323 case VOID_TYPE:
2324 tem = fold_ignored_result (arg);
2325 if (TREE_CODE (tem) == GIMPLE_MODIFY_STMT)
2326 return tem;
2327 return fold_build1 (NOP_EXPR, type, tem);
2328
2329 default:
2330 gcc_unreachable ();
2331 }
2332 }
2333 \f
2334 /* Return false if expr can be assumed not to be an lvalue, true
2335 otherwise. */
2336
2337 static bool
2338 maybe_lvalue_p (tree x)
2339 {
2340 /* We only need to wrap lvalue tree codes. */
2341 switch (TREE_CODE (x))
2342 {
2343 case VAR_DECL:
2344 case PARM_DECL:
2345 case RESULT_DECL:
2346 case LABEL_DECL:
2347 case FUNCTION_DECL:
2348 case SSA_NAME:
2349
2350 case COMPONENT_REF:
2351 case INDIRECT_REF:
2352 case ALIGN_INDIRECT_REF:
2353 case MISALIGNED_INDIRECT_REF:
2354 case ARRAY_REF:
2355 case ARRAY_RANGE_REF:
2356 case BIT_FIELD_REF:
2357 case OBJ_TYPE_REF:
2358
2359 case REALPART_EXPR:
2360 case IMAGPART_EXPR:
2361 case PREINCREMENT_EXPR:
2362 case PREDECREMENT_EXPR:
2363 case SAVE_EXPR:
2364 case TRY_CATCH_EXPR:
2365 case WITH_CLEANUP_EXPR:
2366 case COMPOUND_EXPR:
2367 case MODIFY_EXPR:
2368 case GIMPLE_MODIFY_STMT:
2369 case TARGET_EXPR:
2370 case COND_EXPR:
2371 case BIND_EXPR:
2372 case MIN_EXPR:
2373 case MAX_EXPR:
2374 break;
2375
2376 default:
2377 /* Assume the worst for front-end tree codes. */
2378 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2379 break;
2380 return false;
2381 }
2382
2383 return true;
2384 }
2385
2386 /* Return an expr equal to X but certainly not valid as an lvalue. */
2387
2388 tree
2389 non_lvalue (tree x)
2390 {
2391 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2392 us. */
2393 if (in_gimple_form)
2394 return x;
2395
2396 if (! maybe_lvalue_p (x))
2397 return x;
2398 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2399 }
2400
2401 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2402 Zero means allow extended lvalues. */
2403
2404 int pedantic_lvalues;
2405
2406 /* When pedantic, return an expr equal to X but certainly not valid as a
2407 pedantic lvalue. Otherwise, return X. */
2408
2409 static tree
2410 pedantic_non_lvalue (tree x)
2411 {
2412 if (pedantic_lvalues)
2413 return non_lvalue (x);
2414 else
2415 return x;
2416 }
2417 \f
2418 /* Given a tree comparison code, return the code that is the logical inverse
2419 of the given code. It is not safe to do this for floating-point
2420 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2421 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2422
2423 enum tree_code
2424 invert_tree_comparison (enum tree_code code, bool honor_nans)
2425 {
2426 if (honor_nans && flag_trapping_math)
2427 return ERROR_MARK;
2428
2429 switch (code)
2430 {
2431 case EQ_EXPR:
2432 return NE_EXPR;
2433 case NE_EXPR:
2434 return EQ_EXPR;
2435 case GT_EXPR:
2436 return honor_nans ? UNLE_EXPR : LE_EXPR;
2437 case GE_EXPR:
2438 return honor_nans ? UNLT_EXPR : LT_EXPR;
2439 case LT_EXPR:
2440 return honor_nans ? UNGE_EXPR : GE_EXPR;
2441 case LE_EXPR:
2442 return honor_nans ? UNGT_EXPR : GT_EXPR;
2443 case LTGT_EXPR:
2444 return UNEQ_EXPR;
2445 case UNEQ_EXPR:
2446 return LTGT_EXPR;
2447 case UNGT_EXPR:
2448 return LE_EXPR;
2449 case UNGE_EXPR:
2450 return LT_EXPR;
2451 case UNLT_EXPR:
2452 return GE_EXPR;
2453 case UNLE_EXPR:
2454 return GT_EXPR;
2455 case ORDERED_EXPR:
2456 return UNORDERED_EXPR;
2457 case UNORDERED_EXPR:
2458 return ORDERED_EXPR;
2459 default:
2460 gcc_unreachable ();
2461 }
2462 }
2463
2464 /* Similar, but return the comparison that results if the operands are
2465 swapped. This is safe for floating-point. */
2466
2467 enum tree_code
2468 swap_tree_comparison (enum tree_code code)
2469 {
2470 switch (code)
2471 {
2472 case EQ_EXPR:
2473 case NE_EXPR:
2474 case ORDERED_EXPR:
2475 case UNORDERED_EXPR:
2476 case LTGT_EXPR:
2477 case UNEQ_EXPR:
2478 return code;
2479 case GT_EXPR:
2480 return LT_EXPR;
2481 case GE_EXPR:
2482 return LE_EXPR;
2483 case LT_EXPR:
2484 return GT_EXPR;
2485 case LE_EXPR:
2486 return GE_EXPR;
2487 case UNGT_EXPR:
2488 return UNLT_EXPR;
2489 case UNGE_EXPR:
2490 return UNLE_EXPR;
2491 case UNLT_EXPR:
2492 return UNGT_EXPR;
2493 case UNLE_EXPR:
2494 return UNGE_EXPR;
2495 default:
2496 gcc_unreachable ();
2497 }
2498 }
2499
2500
2501 /* Convert a comparison tree code from an enum tree_code representation
2502 into a compcode bit-based encoding. This function is the inverse of
2503 compcode_to_comparison. */
2504
2505 static enum comparison_code
2506 comparison_to_compcode (enum tree_code code)
2507 {
2508 switch (code)
2509 {
2510 case LT_EXPR:
2511 return COMPCODE_LT;
2512 case EQ_EXPR:
2513 return COMPCODE_EQ;
2514 case LE_EXPR:
2515 return COMPCODE_LE;
2516 case GT_EXPR:
2517 return COMPCODE_GT;
2518 case NE_EXPR:
2519 return COMPCODE_NE;
2520 case GE_EXPR:
2521 return COMPCODE_GE;
2522 case ORDERED_EXPR:
2523 return COMPCODE_ORD;
2524 case UNORDERED_EXPR:
2525 return COMPCODE_UNORD;
2526 case UNLT_EXPR:
2527 return COMPCODE_UNLT;
2528 case UNEQ_EXPR:
2529 return COMPCODE_UNEQ;
2530 case UNLE_EXPR:
2531 return COMPCODE_UNLE;
2532 case UNGT_EXPR:
2533 return COMPCODE_UNGT;
2534 case LTGT_EXPR:
2535 return COMPCODE_LTGT;
2536 case UNGE_EXPR:
2537 return COMPCODE_UNGE;
2538 default:
2539 gcc_unreachable ();
2540 }
2541 }
2542
2543 /* Convert a compcode bit-based encoding of a comparison operator back
2544 to GCC's enum tree_code representation. This function is the
2545 inverse of comparison_to_compcode. */
2546
2547 static enum tree_code
2548 compcode_to_comparison (enum comparison_code code)
2549 {
2550 switch (code)
2551 {
2552 case COMPCODE_LT:
2553 return LT_EXPR;
2554 case COMPCODE_EQ:
2555 return EQ_EXPR;
2556 case COMPCODE_LE:
2557 return LE_EXPR;
2558 case COMPCODE_GT:
2559 return GT_EXPR;
2560 case COMPCODE_NE:
2561 return NE_EXPR;
2562 case COMPCODE_GE:
2563 return GE_EXPR;
2564 case COMPCODE_ORD:
2565 return ORDERED_EXPR;
2566 case COMPCODE_UNORD:
2567 return UNORDERED_EXPR;
2568 case COMPCODE_UNLT:
2569 return UNLT_EXPR;
2570 case COMPCODE_UNEQ:
2571 return UNEQ_EXPR;
2572 case COMPCODE_UNLE:
2573 return UNLE_EXPR;
2574 case COMPCODE_UNGT:
2575 return UNGT_EXPR;
2576 case COMPCODE_LTGT:
2577 return LTGT_EXPR;
2578 case COMPCODE_UNGE:
2579 return UNGE_EXPR;
2580 default:
2581 gcc_unreachable ();
2582 }
2583 }
2584
2585 /* Return a tree for the comparison which is the combination of
2586 doing the AND or OR (depending on CODE) of the two operations LCODE
2587 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2588 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2589 if this makes the transformation invalid. */
2590
2591 tree
2592 combine_comparisons (enum tree_code code, enum tree_code lcode,
2593 enum tree_code rcode, tree truth_type,
2594 tree ll_arg, tree lr_arg)
2595 {
2596 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2597 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2598 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2599 enum comparison_code compcode;
2600
2601 switch (code)
2602 {
2603 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2604 compcode = lcompcode & rcompcode;
2605 break;
2606
2607 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2608 compcode = lcompcode | rcompcode;
2609 break;
2610
2611 default:
2612 return NULL_TREE;
2613 }
2614
2615 if (!honor_nans)
2616 {
2617 /* Eliminate unordered comparisons, as well as LTGT and ORD
2618 which are not used unless the mode has NaNs. */
2619 compcode &= ~COMPCODE_UNORD;
2620 if (compcode == COMPCODE_LTGT)
2621 compcode = COMPCODE_NE;
2622 else if (compcode == COMPCODE_ORD)
2623 compcode = COMPCODE_TRUE;
2624 }
2625 else if (flag_trapping_math)
2626 {
2627 /* Check that the original operation and the optimized ones will trap
2628 under the same condition. */
2629 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2630 && (lcompcode != COMPCODE_EQ)
2631 && (lcompcode != COMPCODE_ORD);
2632 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2633 && (rcompcode != COMPCODE_EQ)
2634 && (rcompcode != COMPCODE_ORD);
2635 bool trap = (compcode & COMPCODE_UNORD) == 0
2636 && (compcode != COMPCODE_EQ)
2637 && (compcode != COMPCODE_ORD);
2638
2639 /* In a short-circuited boolean expression the LHS might be
2640 such that the RHS, if evaluated, will never trap. For
2641 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2642 if neither x nor y is NaN. (This is a mixed blessing: for
2643 example, the expression above will never trap, hence
2644 optimizing it to x < y would be invalid). */
2645 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2646 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2647 rtrap = false;
2648
2649 /* If the comparison was short-circuited, and only the RHS
2650 trapped, we may now generate a spurious trap. */
2651 if (rtrap && !ltrap
2652 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2653 return NULL_TREE;
2654
2655 /* If we changed the conditions that cause a trap, we lose. */
2656 if ((ltrap || rtrap) != trap)
2657 return NULL_TREE;
2658 }
2659
2660 if (compcode == COMPCODE_TRUE)
2661 return constant_boolean_node (true, truth_type);
2662 else if (compcode == COMPCODE_FALSE)
2663 return constant_boolean_node (false, truth_type);
2664 else
2665 return fold_build2 (compcode_to_comparison (compcode),
2666 truth_type, ll_arg, lr_arg);
2667 }
2668
2669 /* Return nonzero if CODE is a tree code that represents a truth value. */
2670
2671 static int
2672 truth_value_p (enum tree_code code)
2673 {
2674 return (TREE_CODE_CLASS (code) == tcc_comparison
2675 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2676 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2677 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2678 }
2679 \f
2680 /* Return nonzero if two operands (typically of the same tree node)
2681 are necessarily equal. If either argument has side-effects this
2682 function returns zero. FLAGS modifies behavior as follows:
2683
2684 If OEP_ONLY_CONST is set, only return nonzero for constants.
2685 This function tests whether the operands are indistinguishable;
2686 it does not test whether they are equal using C's == operation.
2687 The distinction is important for IEEE floating point, because
2688 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2689 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2690
2691 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2692 even though it may hold multiple values during a function.
2693 This is because a GCC tree node guarantees that nothing else is
2694 executed between the evaluation of its "operands" (which may often
2695 be evaluated in arbitrary order). Hence if the operands themselves
2696 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2697 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2698 unset means assuming isochronic (or instantaneous) tree equivalence.
2699 Unless comparing arbitrary expression trees, such as from different
2700 statements, this flag can usually be left unset.
2701
2702 If OEP_PURE_SAME is set, then pure functions with identical arguments
2703 are considered the same. It is used when the caller has other ways
2704 to ensure that global memory is unchanged in between. */
2705
2706 int
2707 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2708 {
2709 /* If either is ERROR_MARK, they aren't equal. */
2710 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2711 return 0;
2712
2713 /* If both types don't have the same signedness, then we can't consider
2714 them equal. We must check this before the STRIP_NOPS calls
2715 because they may change the signedness of the arguments. */
2716 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2717 return 0;
2718
2719 /* If both types don't have the same precision, then it is not safe
2720 to strip NOPs. */
2721 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2722 return 0;
2723
2724 STRIP_NOPS (arg0);
2725 STRIP_NOPS (arg1);
2726
2727 /* In case both args are comparisons but with different comparison
2728 code, try to swap the comparison operands of one arg to produce
2729 a match and compare that variant. */
2730 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2731 && COMPARISON_CLASS_P (arg0)
2732 && COMPARISON_CLASS_P (arg1))
2733 {
2734 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2735
2736 if (TREE_CODE (arg0) == swap_code)
2737 return operand_equal_p (TREE_OPERAND (arg0, 0),
2738 TREE_OPERAND (arg1, 1), flags)
2739 && operand_equal_p (TREE_OPERAND (arg0, 1),
2740 TREE_OPERAND (arg1, 0), flags);
2741 }
2742
2743 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2744 /* This is needed for conversions and for COMPONENT_REF.
2745 Might as well play it safe and always test this. */
2746 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2747 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2748 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2749 return 0;
2750
2751 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2752 We don't care about side effects in that case because the SAVE_EXPR
2753 takes care of that for us. In all other cases, two expressions are
2754 equal if they have no side effects. If we have two identical
2755 expressions with side effects that should be treated the same due
2756 to the only side effects being identical SAVE_EXPR's, that will
2757 be detected in the recursive calls below. */
2758 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2759 && (TREE_CODE (arg0) == SAVE_EXPR
2760 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2761 return 1;
2762
2763 /* Next handle constant cases, those for which we can return 1 even
2764 if ONLY_CONST is set. */
2765 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2766 switch (TREE_CODE (arg0))
2767 {
2768 case INTEGER_CST:
2769 return tree_int_cst_equal (arg0, arg1);
2770
2771 case REAL_CST:
2772 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2773 TREE_REAL_CST (arg1)))
2774 return 1;
2775
2776
2777 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2778 {
2779 /* If we do not distinguish between signed and unsigned zero,
2780 consider them equal. */
2781 if (real_zerop (arg0) && real_zerop (arg1))
2782 return 1;
2783 }
2784 return 0;
2785
2786 case VECTOR_CST:
2787 {
2788 tree v1, v2;
2789
2790 v1 = TREE_VECTOR_CST_ELTS (arg0);
2791 v2 = TREE_VECTOR_CST_ELTS (arg1);
2792 while (v1 && v2)
2793 {
2794 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2795 flags))
2796 return 0;
2797 v1 = TREE_CHAIN (v1);
2798 v2 = TREE_CHAIN (v2);
2799 }
2800
2801 return v1 == v2;
2802 }
2803
2804 case COMPLEX_CST:
2805 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2806 flags)
2807 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2808 flags));
2809
2810 case STRING_CST:
2811 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2812 && ! memcmp (TREE_STRING_POINTER (arg0),
2813 TREE_STRING_POINTER (arg1),
2814 TREE_STRING_LENGTH (arg0)));
2815
2816 case ADDR_EXPR:
2817 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2818 0);
2819 default:
2820 break;
2821 }
2822
2823 if (flags & OEP_ONLY_CONST)
2824 return 0;
2825
2826 /* Define macros to test an operand from arg0 and arg1 for equality and a
2827 variant that allows null and views null as being different from any
2828 non-null value. In the latter case, if either is null, the both
2829 must be; otherwise, do the normal comparison. */
2830 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2831 TREE_OPERAND (arg1, N), flags)
2832
2833 #define OP_SAME_WITH_NULL(N) \
2834 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2835 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2836
2837 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2838 {
2839 case tcc_unary:
2840 /* Two conversions are equal only if signedness and modes match. */
2841 switch (TREE_CODE (arg0))
2842 {
2843 case NOP_EXPR:
2844 case CONVERT_EXPR:
2845 case FIX_TRUNC_EXPR:
2846 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2847 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2848 return 0;
2849 break;
2850 default:
2851 break;
2852 }
2853
2854 return OP_SAME (0);
2855
2856
2857 case tcc_comparison:
2858 case tcc_binary:
2859 if (OP_SAME (0) && OP_SAME (1))
2860 return 1;
2861
2862 /* For commutative ops, allow the other order. */
2863 return (commutative_tree_code (TREE_CODE (arg0))
2864 && operand_equal_p (TREE_OPERAND (arg0, 0),
2865 TREE_OPERAND (arg1, 1), flags)
2866 && operand_equal_p (TREE_OPERAND (arg0, 1),
2867 TREE_OPERAND (arg1, 0), flags));
2868
2869 case tcc_reference:
2870 /* If either of the pointer (or reference) expressions we are
2871 dereferencing contain a side effect, these cannot be equal. */
2872 if (TREE_SIDE_EFFECTS (arg0)
2873 || TREE_SIDE_EFFECTS (arg1))
2874 return 0;
2875
2876 switch (TREE_CODE (arg0))
2877 {
2878 case INDIRECT_REF:
2879 case ALIGN_INDIRECT_REF:
2880 case MISALIGNED_INDIRECT_REF:
2881 case REALPART_EXPR:
2882 case IMAGPART_EXPR:
2883 return OP_SAME (0);
2884
2885 case ARRAY_REF:
2886 case ARRAY_RANGE_REF:
2887 /* Operands 2 and 3 may be null. */
2888 return (OP_SAME (0)
2889 && OP_SAME (1)
2890 && OP_SAME_WITH_NULL (2)
2891 && OP_SAME_WITH_NULL (3));
2892
2893 case COMPONENT_REF:
2894 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2895 may be NULL when we're called to compare MEM_EXPRs. */
2896 return OP_SAME_WITH_NULL (0)
2897 && OP_SAME (1)
2898 && OP_SAME_WITH_NULL (2);
2899
2900 case BIT_FIELD_REF:
2901 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2902
2903 default:
2904 return 0;
2905 }
2906
2907 case tcc_expression:
2908 switch (TREE_CODE (arg0))
2909 {
2910 case ADDR_EXPR:
2911 case TRUTH_NOT_EXPR:
2912 return OP_SAME (0);
2913
2914 case TRUTH_ANDIF_EXPR:
2915 case TRUTH_ORIF_EXPR:
2916 return OP_SAME (0) && OP_SAME (1);
2917
2918 case TRUTH_AND_EXPR:
2919 case TRUTH_OR_EXPR:
2920 case TRUTH_XOR_EXPR:
2921 if (OP_SAME (0) && OP_SAME (1))
2922 return 1;
2923
2924 /* Otherwise take into account this is a commutative operation. */
2925 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2926 TREE_OPERAND (arg1, 1), flags)
2927 && operand_equal_p (TREE_OPERAND (arg0, 1),
2928 TREE_OPERAND (arg1, 0), flags));
2929
2930 default:
2931 return 0;
2932 }
2933
2934 case tcc_vl_exp:
2935 switch (TREE_CODE (arg0))
2936 {
2937 case CALL_EXPR:
2938 /* If the CALL_EXPRs call different functions, then they
2939 clearly can not be equal. */
2940 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2941 flags))
2942 return 0;
2943
2944 {
2945 unsigned int cef = call_expr_flags (arg0);
2946 if (flags & OEP_PURE_SAME)
2947 cef &= ECF_CONST | ECF_PURE;
2948 else
2949 cef &= ECF_CONST;
2950 if (!cef)
2951 return 0;
2952 }
2953
2954 /* Now see if all the arguments are the same. */
2955 {
2956 call_expr_arg_iterator iter0, iter1;
2957 tree a0, a1;
2958 for (a0 = first_call_expr_arg (arg0, &iter0),
2959 a1 = first_call_expr_arg (arg1, &iter1);
2960 a0 && a1;
2961 a0 = next_call_expr_arg (&iter0),
2962 a1 = next_call_expr_arg (&iter1))
2963 if (! operand_equal_p (a0, a1, flags))
2964 return 0;
2965
2966 /* If we get here and both argument lists are exhausted
2967 then the CALL_EXPRs are equal. */
2968 return ! (a0 || a1);
2969 }
2970 default:
2971 return 0;
2972 }
2973
2974 case tcc_declaration:
2975 /* Consider __builtin_sqrt equal to sqrt. */
2976 return (TREE_CODE (arg0) == FUNCTION_DECL
2977 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2978 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2979 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2980
2981 default:
2982 return 0;
2983 }
2984
2985 #undef OP_SAME
2986 #undef OP_SAME_WITH_NULL
2987 }
2988 \f
2989 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2990 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2991
2992 When in doubt, return 0. */
2993
2994 static int
2995 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2996 {
2997 int unsignedp1, unsignedpo;
2998 tree primarg0, primarg1, primother;
2999 unsigned int correct_width;
3000
3001 if (operand_equal_p (arg0, arg1, 0))
3002 return 1;
3003
3004 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3005 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3006 return 0;
3007
3008 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3009 and see if the inner values are the same. This removes any
3010 signedness comparison, which doesn't matter here. */
3011 primarg0 = arg0, primarg1 = arg1;
3012 STRIP_NOPS (primarg0);
3013 STRIP_NOPS (primarg1);
3014 if (operand_equal_p (primarg0, primarg1, 0))
3015 return 1;
3016
3017 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3018 actual comparison operand, ARG0.
3019
3020 First throw away any conversions to wider types
3021 already present in the operands. */
3022
3023 primarg1 = get_narrower (arg1, &unsignedp1);
3024 primother = get_narrower (other, &unsignedpo);
3025
3026 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3027 if (unsignedp1 == unsignedpo
3028 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3029 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3030 {
3031 tree type = TREE_TYPE (arg0);
3032
3033 /* Make sure shorter operand is extended the right way
3034 to match the longer operand. */
3035 primarg1 = fold_convert (get_signed_or_unsigned_type
3036 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3037
3038 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3039 return 1;
3040 }
3041
3042 return 0;
3043 }
3044 \f
3045 /* See if ARG is an expression that is either a comparison or is performing
3046 arithmetic on comparisons. The comparisons must only be comparing
3047 two different values, which will be stored in *CVAL1 and *CVAL2; if
3048 they are nonzero it means that some operands have already been found.
3049 No variables may be used anywhere else in the expression except in the
3050 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3051 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3052
3053 If this is true, return 1. Otherwise, return zero. */
3054
3055 static int
3056 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3057 {
3058 enum tree_code code = TREE_CODE (arg);
3059 enum tree_code_class class = TREE_CODE_CLASS (code);
3060
3061 /* We can handle some of the tcc_expression cases here. */
3062 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
3063 class = tcc_unary;
3064 else if (class == tcc_expression
3065 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3066 || code == COMPOUND_EXPR))
3067 class = tcc_binary;
3068
3069 else if (class == tcc_expression && code == SAVE_EXPR
3070 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3071 {
3072 /* If we've already found a CVAL1 or CVAL2, this expression is
3073 two complex to handle. */
3074 if (*cval1 || *cval2)
3075 return 0;
3076
3077 class = tcc_unary;
3078 *save_p = 1;
3079 }
3080
3081 switch (class)
3082 {
3083 case tcc_unary:
3084 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3085
3086 case tcc_binary:
3087 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3088 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3089 cval1, cval2, save_p));
3090
3091 case tcc_constant:
3092 return 1;
3093
3094 case tcc_expression:
3095 if (code == COND_EXPR)
3096 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3097 cval1, cval2, save_p)
3098 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3099 cval1, cval2, save_p)
3100 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3101 cval1, cval2, save_p));
3102 return 0;
3103
3104 case tcc_comparison:
3105 /* First see if we can handle the first operand, then the second. For
3106 the second operand, we know *CVAL1 can't be zero. It must be that
3107 one side of the comparison is each of the values; test for the
3108 case where this isn't true by failing if the two operands
3109 are the same. */
3110
3111 if (operand_equal_p (TREE_OPERAND (arg, 0),
3112 TREE_OPERAND (arg, 1), 0))
3113 return 0;
3114
3115 if (*cval1 == 0)
3116 *cval1 = TREE_OPERAND (arg, 0);
3117 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3118 ;
3119 else if (*cval2 == 0)
3120 *cval2 = TREE_OPERAND (arg, 0);
3121 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3122 ;
3123 else
3124 return 0;
3125
3126 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3127 ;
3128 else if (*cval2 == 0)
3129 *cval2 = TREE_OPERAND (arg, 1);
3130 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3131 ;
3132 else
3133 return 0;
3134
3135 return 1;
3136
3137 default:
3138 return 0;
3139 }
3140 }
3141 \f
3142 /* ARG is a tree that is known to contain just arithmetic operations and
3143 comparisons. Evaluate the operations in the tree substituting NEW0 for
3144 any occurrence of OLD0 as an operand of a comparison and likewise for
3145 NEW1 and OLD1. */
3146
3147 static tree
3148 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
3149 {
3150 tree type = TREE_TYPE (arg);
3151 enum tree_code code = TREE_CODE (arg);
3152 enum tree_code_class class = TREE_CODE_CLASS (code);
3153
3154 /* We can handle some of the tcc_expression cases here. */
3155 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
3156 class = tcc_unary;
3157 else if (class == tcc_expression
3158 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3159 class = tcc_binary;
3160
3161 switch (class)
3162 {
3163 case tcc_unary:
3164 return fold_build1 (code, type,
3165 eval_subst (TREE_OPERAND (arg, 0),
3166 old0, new0, old1, new1));
3167
3168 case tcc_binary:
3169 return fold_build2 (code, type,
3170 eval_subst (TREE_OPERAND (arg, 0),
3171 old0, new0, old1, new1),
3172 eval_subst (TREE_OPERAND (arg, 1),
3173 old0, new0, old1, new1));
3174
3175 case tcc_expression:
3176 switch (code)
3177 {
3178 case SAVE_EXPR:
3179 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
3180
3181 case COMPOUND_EXPR:
3182 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
3183
3184 case COND_EXPR:
3185 return fold_build3 (code, type,
3186 eval_subst (TREE_OPERAND (arg, 0),
3187 old0, new0, old1, new1),
3188 eval_subst (TREE_OPERAND (arg, 1),
3189 old0, new0, old1, new1),
3190 eval_subst (TREE_OPERAND (arg, 2),
3191 old0, new0, old1, new1));
3192 default:
3193 break;
3194 }
3195 /* Fall through - ??? */
3196
3197 case tcc_comparison:
3198 {
3199 tree arg0 = TREE_OPERAND (arg, 0);
3200 tree arg1 = TREE_OPERAND (arg, 1);
3201
3202 /* We need to check both for exact equality and tree equality. The
3203 former will be true if the operand has a side-effect. In that
3204 case, we know the operand occurred exactly once. */
3205
3206 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3207 arg0 = new0;
3208 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3209 arg0 = new1;
3210
3211 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3212 arg1 = new0;
3213 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3214 arg1 = new1;
3215
3216 return fold_build2 (code, type, arg0, arg1);
3217 }
3218
3219 default:
3220 return arg;
3221 }
3222 }
3223 \f
3224 /* Return a tree for the case when the result of an expression is RESULT
3225 converted to TYPE and OMITTED was previously an operand of the expression
3226 but is now not needed (e.g., we folded OMITTED * 0).
3227
3228 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3229 the conversion of RESULT to TYPE. */
3230
3231 tree
3232 omit_one_operand (tree type, tree result, tree omitted)
3233 {
3234 tree t = fold_convert (type, result);
3235
3236 if (TREE_SIDE_EFFECTS (omitted))
3237 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3238
3239 return non_lvalue (t);
3240 }
3241
3242 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3243
3244 static tree
3245 pedantic_omit_one_operand (tree type, tree result, tree omitted)
3246 {
3247 tree t = fold_convert (type, result);
3248
3249 if (TREE_SIDE_EFFECTS (omitted))
3250 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3251
3252 return pedantic_non_lvalue (t);
3253 }
3254
3255 /* Return a tree for the case when the result of an expression is RESULT
3256 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3257 of the expression but are now not needed.
3258
3259 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3260 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3261 evaluated before OMITTED2. Otherwise, if neither has side effects,
3262 just do the conversion of RESULT to TYPE. */
3263
3264 tree
3265 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
3266 {
3267 tree t = fold_convert (type, result);
3268
3269 if (TREE_SIDE_EFFECTS (omitted2))
3270 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3271 if (TREE_SIDE_EFFECTS (omitted1))
3272 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3273
3274 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
3275 }
3276
3277 \f
3278 /* Return a simplified tree node for the truth-negation of ARG. This
3279 never alters ARG itself. We assume that ARG is an operation that
3280 returns a truth value (0 or 1).
3281
3282 FIXME: one would think we would fold the result, but it causes
3283 problems with the dominator optimizer. */
3284
3285 tree
3286 fold_truth_not_expr (tree arg)
3287 {
3288 tree type = TREE_TYPE (arg);
3289 enum tree_code code = TREE_CODE (arg);
3290
3291 /* If this is a comparison, we can simply invert it, except for
3292 floating-point non-equality comparisons, in which case we just
3293 enclose a TRUTH_NOT_EXPR around what we have. */
3294
3295 if (TREE_CODE_CLASS (code) == tcc_comparison)
3296 {
3297 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3298 if (FLOAT_TYPE_P (op_type)
3299 && flag_trapping_math
3300 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3301 && code != NE_EXPR && code != EQ_EXPR)
3302 return NULL_TREE;
3303 else
3304 {
3305 code = invert_tree_comparison (code,
3306 HONOR_NANS (TYPE_MODE (op_type)));
3307 if (code == ERROR_MARK)
3308 return NULL_TREE;
3309 else
3310 return build2 (code, type,
3311 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3312 }
3313 }
3314
3315 switch (code)
3316 {
3317 case INTEGER_CST:
3318 return constant_boolean_node (integer_zerop (arg), type);
3319
3320 case TRUTH_AND_EXPR:
3321 return build2 (TRUTH_OR_EXPR, type,
3322 invert_truthvalue (TREE_OPERAND (arg, 0)),
3323 invert_truthvalue (TREE_OPERAND (arg, 1)));
3324
3325 case TRUTH_OR_EXPR:
3326 return build2 (TRUTH_AND_EXPR, type,
3327 invert_truthvalue (TREE_OPERAND (arg, 0)),
3328 invert_truthvalue (TREE_OPERAND (arg, 1)));
3329
3330 case TRUTH_XOR_EXPR:
3331 /* Here we can invert either operand. We invert the first operand
3332 unless the second operand is a TRUTH_NOT_EXPR in which case our
3333 result is the XOR of the first operand with the inside of the
3334 negation of the second operand. */
3335
3336 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3337 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3338 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3339 else
3340 return build2 (TRUTH_XOR_EXPR, type,
3341 invert_truthvalue (TREE_OPERAND (arg, 0)),
3342 TREE_OPERAND (arg, 1));
3343
3344 case TRUTH_ANDIF_EXPR:
3345 return build2 (TRUTH_ORIF_EXPR, type,
3346 invert_truthvalue (TREE_OPERAND (arg, 0)),
3347 invert_truthvalue (TREE_OPERAND (arg, 1)));
3348
3349 case TRUTH_ORIF_EXPR:
3350 return build2 (TRUTH_ANDIF_EXPR, type,
3351 invert_truthvalue (TREE_OPERAND (arg, 0)),
3352 invert_truthvalue (TREE_OPERAND (arg, 1)));
3353
3354 case TRUTH_NOT_EXPR:
3355 return TREE_OPERAND (arg, 0);
3356
3357 case COND_EXPR:
3358 {
3359 tree arg1 = TREE_OPERAND (arg, 1);
3360 tree arg2 = TREE_OPERAND (arg, 2);
3361 /* A COND_EXPR may have a throw as one operand, which
3362 then has void type. Just leave void operands
3363 as they are. */
3364 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3365 VOID_TYPE_P (TREE_TYPE (arg1))
3366 ? arg1 : invert_truthvalue (arg1),
3367 VOID_TYPE_P (TREE_TYPE (arg2))
3368 ? arg2 : invert_truthvalue (arg2));
3369 }
3370
3371 case COMPOUND_EXPR:
3372 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3373 invert_truthvalue (TREE_OPERAND (arg, 1)));
3374
3375 case NON_LVALUE_EXPR:
3376 return invert_truthvalue (TREE_OPERAND (arg, 0));
3377
3378 case NOP_EXPR:
3379 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3380 return build1 (TRUTH_NOT_EXPR, type, arg);
3381
3382 case CONVERT_EXPR:
3383 case FLOAT_EXPR:
3384 return build1 (TREE_CODE (arg), type,
3385 invert_truthvalue (TREE_OPERAND (arg, 0)));
3386
3387 case BIT_AND_EXPR:
3388 if (!integer_onep (TREE_OPERAND (arg, 1)))
3389 break;
3390 return build2 (EQ_EXPR, type, arg,
3391 build_int_cst (type, 0));
3392
3393 case SAVE_EXPR:
3394 return build1 (TRUTH_NOT_EXPR, type, arg);
3395
3396 case CLEANUP_POINT_EXPR:
3397 return build1 (CLEANUP_POINT_EXPR, type,
3398 invert_truthvalue (TREE_OPERAND (arg, 0)));
3399
3400 default:
3401 break;
3402 }
3403
3404 return NULL_TREE;
3405 }
3406
3407 /* Return a simplified tree node for the truth-negation of ARG. This
3408 never alters ARG itself. We assume that ARG is an operation that
3409 returns a truth value (0 or 1).
3410
3411 FIXME: one would think we would fold the result, but it causes
3412 problems with the dominator optimizer. */
3413
3414 tree
3415 invert_truthvalue (tree arg)
3416 {
3417 tree tem;
3418
3419 if (TREE_CODE (arg) == ERROR_MARK)
3420 return arg;
3421
3422 tem = fold_truth_not_expr (arg);
3423 if (!tem)
3424 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3425
3426 return tem;
3427 }
3428
3429 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3430 operands are another bit-wise operation with a common input. If so,
3431 distribute the bit operations to save an operation and possibly two if
3432 constants are involved. For example, convert
3433 (A | B) & (A | C) into A | (B & C)
3434 Further simplification will occur if B and C are constants.
3435
3436 If this optimization cannot be done, 0 will be returned. */
3437
3438 static tree
3439 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3440 {
3441 tree common;
3442 tree left, right;
3443
3444 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3445 || TREE_CODE (arg0) == code
3446 || (TREE_CODE (arg0) != BIT_AND_EXPR
3447 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3448 return 0;
3449
3450 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3451 {
3452 common = TREE_OPERAND (arg0, 0);
3453 left = TREE_OPERAND (arg0, 1);
3454 right = TREE_OPERAND (arg1, 1);
3455 }
3456 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3457 {
3458 common = TREE_OPERAND (arg0, 0);
3459 left = TREE_OPERAND (arg0, 1);
3460 right = TREE_OPERAND (arg1, 0);
3461 }
3462 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3463 {
3464 common = TREE_OPERAND (arg0, 1);
3465 left = TREE_OPERAND (arg0, 0);
3466 right = TREE_OPERAND (arg1, 1);
3467 }
3468 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3469 {
3470 common = TREE_OPERAND (arg0, 1);
3471 left = TREE_OPERAND (arg0, 0);
3472 right = TREE_OPERAND (arg1, 0);
3473 }
3474 else
3475 return 0;
3476
3477 return fold_build2 (TREE_CODE (arg0), type, common,
3478 fold_build2 (code, type, left, right));
3479 }
3480
3481 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3482 with code CODE. This optimization is unsafe. */
3483 static tree
3484 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3485 {
3486 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3487 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3488
3489 /* (A / C) +- (B / C) -> (A +- B) / C. */
3490 if (mul0 == mul1
3491 && operand_equal_p (TREE_OPERAND (arg0, 1),
3492 TREE_OPERAND (arg1, 1), 0))
3493 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3494 fold_build2 (code, type,
3495 TREE_OPERAND (arg0, 0),
3496 TREE_OPERAND (arg1, 0)),
3497 TREE_OPERAND (arg0, 1));
3498
3499 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3500 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3501 TREE_OPERAND (arg1, 0), 0)
3502 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3503 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3504 {
3505 REAL_VALUE_TYPE r0, r1;
3506 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3507 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3508 if (!mul0)
3509 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3510 if (!mul1)
3511 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3512 real_arithmetic (&r0, code, &r0, &r1);
3513 return fold_build2 (MULT_EXPR, type,
3514 TREE_OPERAND (arg0, 0),
3515 build_real (type, r0));
3516 }
3517
3518 return NULL_TREE;
3519 }
3520 \f
3521 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3522 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3523
3524 static tree
3525 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3526 int unsignedp)
3527 {
3528 tree result;
3529
3530 if (bitpos == 0)
3531 {
3532 tree size = TYPE_SIZE (TREE_TYPE (inner));
3533 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3534 || POINTER_TYPE_P (TREE_TYPE (inner)))
3535 && host_integerp (size, 0)
3536 && tree_low_cst (size, 0) == bitsize)
3537 return fold_convert (type, inner);
3538 }
3539
3540 result = build3 (BIT_FIELD_REF, type, inner,
3541 size_int (bitsize), bitsize_int (bitpos));
3542
3543 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3544
3545 return result;
3546 }
3547
3548 /* Optimize a bit-field compare.
3549
3550 There are two cases: First is a compare against a constant and the
3551 second is a comparison of two items where the fields are at the same
3552 bit position relative to the start of a chunk (byte, halfword, word)
3553 large enough to contain it. In these cases we can avoid the shift
3554 implicit in bitfield extractions.
3555
3556 For constants, we emit a compare of the shifted constant with the
3557 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3558 compared. For two fields at the same position, we do the ANDs with the
3559 similar mask and compare the result of the ANDs.
3560
3561 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3562 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3563 are the left and right operands of the comparison, respectively.
3564
3565 If the optimization described above can be done, we return the resulting
3566 tree. Otherwise we return zero. */
3567
3568 static tree
3569 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3570 tree lhs, tree rhs)
3571 {
3572 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3573 tree type = TREE_TYPE (lhs);
3574 tree signed_type, unsigned_type;
3575 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3576 enum machine_mode lmode, rmode, nmode;
3577 int lunsignedp, runsignedp;
3578 int lvolatilep = 0, rvolatilep = 0;
3579 tree linner, rinner = NULL_TREE;
3580 tree mask;
3581 tree offset;
3582
3583 /* Get all the information about the extractions being done. If the bit size
3584 if the same as the size of the underlying object, we aren't doing an
3585 extraction at all and so can do nothing. We also don't want to
3586 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3587 then will no longer be able to replace it. */
3588 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3589 &lunsignedp, &lvolatilep, false);
3590 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3591 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3592 return 0;
3593
3594 if (!const_p)
3595 {
3596 /* If this is not a constant, we can only do something if bit positions,
3597 sizes, and signedness are the same. */
3598 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3599 &runsignedp, &rvolatilep, false);
3600
3601 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3602 || lunsignedp != runsignedp || offset != 0
3603 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3604 return 0;
3605 }
3606
3607 /* See if we can find a mode to refer to this field. We should be able to,
3608 but fail if we can't. */
3609 nmode = get_best_mode (lbitsize, lbitpos,
3610 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3611 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3612 TYPE_ALIGN (TREE_TYPE (rinner))),
3613 word_mode, lvolatilep || rvolatilep);
3614 if (nmode == VOIDmode)
3615 return 0;
3616
3617 /* Set signed and unsigned types of the precision of this mode for the
3618 shifts below. */
3619 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3620 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3621
3622 /* Compute the bit position and size for the new reference and our offset
3623 within it. If the new reference is the same size as the original, we
3624 won't optimize anything, so return zero. */
3625 nbitsize = GET_MODE_BITSIZE (nmode);
3626 nbitpos = lbitpos & ~ (nbitsize - 1);
3627 lbitpos -= nbitpos;
3628 if (nbitsize == lbitsize)
3629 return 0;
3630
3631 if (BYTES_BIG_ENDIAN)
3632 lbitpos = nbitsize - lbitsize - lbitpos;
3633
3634 /* Make the mask to be used against the extracted field. */
3635 mask = build_int_cst_type (unsigned_type, -1);
3636 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3637 mask = const_binop (RSHIFT_EXPR, mask,
3638 size_int (nbitsize - lbitsize - lbitpos), 0);
3639
3640 if (! const_p)
3641 /* If not comparing with constant, just rework the comparison
3642 and return. */
3643 return fold_build2 (code, compare_type,
3644 fold_build2 (BIT_AND_EXPR, unsigned_type,
3645 make_bit_field_ref (linner,
3646 unsigned_type,
3647 nbitsize, nbitpos,
3648 1),
3649 mask),
3650 fold_build2 (BIT_AND_EXPR, unsigned_type,
3651 make_bit_field_ref (rinner,
3652 unsigned_type,
3653 nbitsize, nbitpos,
3654 1),
3655 mask));
3656
3657 /* Otherwise, we are handling the constant case. See if the constant is too
3658 big for the field. Warn and return a tree of for 0 (false) if so. We do
3659 this not only for its own sake, but to avoid having to test for this
3660 error case below. If we didn't, we might generate wrong code.
3661
3662 For unsigned fields, the constant shifted right by the field length should
3663 be all zero. For signed fields, the high-order bits should agree with
3664 the sign bit. */
3665
3666 if (lunsignedp)
3667 {
3668 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3669 fold_convert (unsigned_type, rhs),
3670 size_int (lbitsize), 0)))
3671 {
3672 warning (0, "comparison is always %d due to width of bit-field",
3673 code == NE_EXPR);
3674 return constant_boolean_node (code == NE_EXPR, compare_type);
3675 }
3676 }
3677 else
3678 {
3679 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3680 size_int (lbitsize - 1), 0);
3681 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3682 {
3683 warning (0, "comparison is always %d due to width of bit-field",
3684 code == NE_EXPR);
3685 return constant_boolean_node (code == NE_EXPR, compare_type);
3686 }
3687 }
3688
3689 /* Single-bit compares should always be against zero. */
3690 if (lbitsize == 1 && ! integer_zerop (rhs))
3691 {
3692 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3693 rhs = build_int_cst (type, 0);
3694 }
3695
3696 /* Make a new bitfield reference, shift the constant over the
3697 appropriate number of bits and mask it with the computed mask
3698 (in case this was a signed field). If we changed it, make a new one. */
3699 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3700 if (lvolatilep)
3701 {
3702 TREE_SIDE_EFFECTS (lhs) = 1;
3703 TREE_THIS_VOLATILE (lhs) = 1;
3704 }
3705
3706 rhs = const_binop (BIT_AND_EXPR,
3707 const_binop (LSHIFT_EXPR,
3708 fold_convert (unsigned_type, rhs),
3709 size_int (lbitpos), 0),
3710 mask, 0);
3711
3712 return build2 (code, compare_type,
3713 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3714 rhs);
3715 }
3716 \f
3717 /* Subroutine for fold_truthop: decode a field reference.
3718
3719 If EXP is a comparison reference, we return the innermost reference.
3720
3721 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3722 set to the starting bit number.
3723
3724 If the innermost field can be completely contained in a mode-sized
3725 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3726
3727 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3728 otherwise it is not changed.
3729
3730 *PUNSIGNEDP is set to the signedness of the field.
3731
3732 *PMASK is set to the mask used. This is either contained in a
3733 BIT_AND_EXPR or derived from the width of the field.
3734
3735 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3736
3737 Return 0 if this is not a component reference or is one that we can't
3738 do anything with. */
3739
3740 static tree
3741 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3742 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3743 int *punsignedp, int *pvolatilep,
3744 tree *pmask, tree *pand_mask)
3745 {
3746 tree outer_type = 0;
3747 tree and_mask = 0;
3748 tree mask, inner, offset;
3749 tree unsigned_type;
3750 unsigned int precision;
3751
3752 /* All the optimizations using this function assume integer fields.
3753 There are problems with FP fields since the type_for_size call
3754 below can fail for, e.g., XFmode. */
3755 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3756 return 0;
3757
3758 /* We are interested in the bare arrangement of bits, so strip everything
3759 that doesn't affect the machine mode. However, record the type of the
3760 outermost expression if it may matter below. */
3761 if (TREE_CODE (exp) == NOP_EXPR
3762 || TREE_CODE (exp) == CONVERT_EXPR
3763 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3764 outer_type = TREE_TYPE (exp);
3765 STRIP_NOPS (exp);
3766
3767 if (TREE_CODE (exp) == BIT_AND_EXPR)
3768 {
3769 and_mask = TREE_OPERAND (exp, 1);
3770 exp = TREE_OPERAND (exp, 0);
3771 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3772 if (TREE_CODE (and_mask) != INTEGER_CST)
3773 return 0;
3774 }
3775
3776 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3777 punsignedp, pvolatilep, false);
3778 if ((inner == exp && and_mask == 0)
3779 || *pbitsize < 0 || offset != 0
3780 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3781 return 0;
3782
3783 /* If the number of bits in the reference is the same as the bitsize of
3784 the outer type, then the outer type gives the signedness. Otherwise
3785 (in case of a small bitfield) the signedness is unchanged. */
3786 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3787 *punsignedp = TYPE_UNSIGNED (outer_type);
3788
3789 /* Compute the mask to access the bitfield. */
3790 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3791 precision = TYPE_PRECISION (unsigned_type);
3792
3793 mask = build_int_cst_type (unsigned_type, -1);
3794
3795 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3796 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3797
3798 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3799 if (and_mask != 0)
3800 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3801 fold_convert (unsigned_type, and_mask), mask);
3802
3803 *pmask = mask;
3804 *pand_mask = and_mask;
3805 return inner;
3806 }
3807
3808 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3809 bit positions. */
3810
3811 static int
3812 all_ones_mask_p (tree mask, int size)
3813 {
3814 tree type = TREE_TYPE (mask);
3815 unsigned int precision = TYPE_PRECISION (type);
3816 tree tmask;
3817
3818 tmask = build_int_cst_type (lang_hooks.types.signed_type (type), -1);
3819
3820 return
3821 tree_int_cst_equal (mask,
3822 const_binop (RSHIFT_EXPR,
3823 const_binop (LSHIFT_EXPR, tmask,
3824 size_int (precision - size),
3825 0),
3826 size_int (precision - size), 0));
3827 }
3828
3829 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3830 represents the sign bit of EXP's type. If EXP represents a sign
3831 or zero extension, also test VAL against the unextended type.
3832 The return value is the (sub)expression whose sign bit is VAL,
3833 or NULL_TREE otherwise. */
3834
3835 static tree
3836 sign_bit_p (tree exp, tree val)
3837 {
3838 unsigned HOST_WIDE_INT mask_lo, lo;
3839 HOST_WIDE_INT mask_hi, hi;
3840 int width;
3841 tree t;
3842
3843 /* Tree EXP must have an integral type. */
3844 t = TREE_TYPE (exp);
3845 if (! INTEGRAL_TYPE_P (t))
3846 return NULL_TREE;
3847
3848 /* Tree VAL must be an integer constant. */
3849 if (TREE_CODE (val) != INTEGER_CST
3850 || TREE_OVERFLOW (val))
3851 return NULL_TREE;
3852
3853 width = TYPE_PRECISION (t);
3854 if (width > HOST_BITS_PER_WIDE_INT)
3855 {
3856 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3857 lo = 0;
3858
3859 mask_hi = ((unsigned HOST_WIDE_INT) -1
3860 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3861 mask_lo = -1;
3862 }
3863 else
3864 {
3865 hi = 0;
3866 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3867
3868 mask_hi = 0;
3869 mask_lo = ((unsigned HOST_WIDE_INT) -1
3870 >> (HOST_BITS_PER_WIDE_INT - width));
3871 }
3872
3873 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3874 treat VAL as if it were unsigned. */
3875 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3876 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3877 return exp;
3878
3879 /* Handle extension from a narrower type. */
3880 if (TREE_CODE (exp) == NOP_EXPR
3881 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3882 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3883
3884 return NULL_TREE;
3885 }
3886
3887 /* Subroutine for fold_truthop: determine if an operand is simple enough
3888 to be evaluated unconditionally. */
3889
3890 static int
3891 simple_operand_p (tree exp)
3892 {
3893 /* Strip any conversions that don't change the machine mode. */
3894 STRIP_NOPS (exp);
3895
3896 return (CONSTANT_CLASS_P (exp)
3897 || TREE_CODE (exp) == SSA_NAME
3898 || (DECL_P (exp)
3899 && ! TREE_ADDRESSABLE (exp)
3900 && ! TREE_THIS_VOLATILE (exp)
3901 && ! DECL_NONLOCAL (exp)
3902 /* Don't regard global variables as simple. They may be
3903 allocated in ways unknown to the compiler (shared memory,
3904 #pragma weak, etc). */
3905 && ! TREE_PUBLIC (exp)
3906 && ! DECL_EXTERNAL (exp)
3907 /* Loading a static variable is unduly expensive, but global
3908 registers aren't expensive. */
3909 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3910 }
3911 \f
3912 /* The following functions are subroutines to fold_range_test and allow it to
3913 try to change a logical combination of comparisons into a range test.
3914
3915 For example, both
3916 X == 2 || X == 3 || X == 4 || X == 5
3917 and
3918 X >= 2 && X <= 5
3919 are converted to
3920 (unsigned) (X - 2) <= 3
3921
3922 We describe each set of comparisons as being either inside or outside
3923 a range, using a variable named like IN_P, and then describe the
3924 range with a lower and upper bound. If one of the bounds is omitted,
3925 it represents either the highest or lowest value of the type.
3926
3927 In the comments below, we represent a range by two numbers in brackets
3928 preceded by a "+" to designate being inside that range, or a "-" to
3929 designate being outside that range, so the condition can be inverted by
3930 flipping the prefix. An omitted bound is represented by a "-". For
3931 example, "- [-, 10]" means being outside the range starting at the lowest
3932 possible value and ending at 10, in other words, being greater than 10.
3933 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3934 always false.
3935
3936 We set up things so that the missing bounds are handled in a consistent
3937 manner so neither a missing bound nor "true" and "false" need to be
3938 handled using a special case. */
3939
3940 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3941 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3942 and UPPER1_P are nonzero if the respective argument is an upper bound
3943 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3944 must be specified for a comparison. ARG1 will be converted to ARG0's
3945 type if both are specified. */
3946
3947 static tree
3948 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3949 tree arg1, int upper1_p)
3950 {
3951 tree tem;
3952 int result;
3953 int sgn0, sgn1;
3954
3955 /* If neither arg represents infinity, do the normal operation.
3956 Else, if not a comparison, return infinity. Else handle the special
3957 comparison rules. Note that most of the cases below won't occur, but
3958 are handled for consistency. */
3959
3960 if (arg0 != 0 && arg1 != 0)
3961 {
3962 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3963 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3964 STRIP_NOPS (tem);
3965 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3966 }
3967
3968 if (TREE_CODE_CLASS (code) != tcc_comparison)
3969 return 0;
3970
3971 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3972 for neither. In real maths, we cannot assume open ended ranges are
3973 the same. But, this is computer arithmetic, where numbers are finite.
3974 We can therefore make the transformation of any unbounded range with
3975 the value Z, Z being greater than any representable number. This permits
3976 us to treat unbounded ranges as equal. */
3977 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3978 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3979 switch (code)
3980 {
3981 case EQ_EXPR:
3982 result = sgn0 == sgn1;
3983 break;
3984 case NE_EXPR:
3985 result = sgn0 != sgn1;
3986 break;
3987 case LT_EXPR:
3988 result = sgn0 < sgn1;
3989 break;
3990 case LE_EXPR:
3991 result = sgn0 <= sgn1;
3992 break;
3993 case GT_EXPR:
3994 result = sgn0 > sgn1;
3995 break;
3996 case GE_EXPR:
3997 result = sgn0 >= sgn1;
3998 break;
3999 default:
4000 gcc_unreachable ();
4001 }
4002
4003 return constant_boolean_node (result, type);
4004 }
4005 \f
4006 /* Given EXP, a logical expression, set the range it is testing into
4007 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4008 actually being tested. *PLOW and *PHIGH will be made of the same
4009 type as the returned expression. If EXP is not a comparison, we
4010 will most likely not be returning a useful value and range. Set
4011 *STRICT_OVERFLOW_P to true if the return value is only valid
4012 because signed overflow is undefined; otherwise, do not change
4013 *STRICT_OVERFLOW_P. */
4014
4015 static tree
4016 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4017 bool *strict_overflow_p)
4018 {
4019 enum tree_code code;
4020 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
4021 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
4022 int in_p, n_in_p;
4023 tree low, high, n_low, n_high;
4024
4025 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4026 and see if we can refine the range. Some of the cases below may not
4027 happen, but it doesn't seem worth worrying about this. We "continue"
4028 the outer loop when we've changed something; otherwise we "break"
4029 the switch, which will "break" the while. */
4030
4031 in_p = 0;
4032 low = high = build_int_cst (TREE_TYPE (exp), 0);
4033
4034 while (1)
4035 {
4036 code = TREE_CODE (exp);
4037 exp_type = TREE_TYPE (exp);
4038
4039 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4040 {
4041 if (TREE_OPERAND_LENGTH (exp) > 0)
4042 arg0 = TREE_OPERAND (exp, 0);
4043 if (TREE_CODE_CLASS (code) == tcc_comparison
4044 || TREE_CODE_CLASS (code) == tcc_unary
4045 || TREE_CODE_CLASS (code) == tcc_binary)
4046 arg0_type = TREE_TYPE (arg0);
4047 if (TREE_CODE_CLASS (code) == tcc_binary
4048 || TREE_CODE_CLASS (code) == tcc_comparison
4049 || (TREE_CODE_CLASS (code) == tcc_expression
4050 && TREE_OPERAND_LENGTH (exp) > 1))
4051 arg1 = TREE_OPERAND (exp, 1);
4052 }
4053
4054 switch (code)
4055 {
4056 case TRUTH_NOT_EXPR:
4057 in_p = ! in_p, exp = arg0;
4058 continue;
4059
4060 case EQ_EXPR: case NE_EXPR:
4061 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4062 /* We can only do something if the range is testing for zero
4063 and if the second operand is an integer constant. Note that
4064 saying something is "in" the range we make is done by
4065 complementing IN_P since it will set in the initial case of
4066 being not equal to zero; "out" is leaving it alone. */
4067 if (low == 0 || high == 0
4068 || ! integer_zerop (low) || ! integer_zerop (high)
4069 || TREE_CODE (arg1) != INTEGER_CST)
4070 break;
4071
4072 switch (code)
4073 {
4074 case NE_EXPR: /* - [c, c] */
4075 low = high = arg1;
4076 break;
4077 case EQ_EXPR: /* + [c, c] */
4078 in_p = ! in_p, low = high = arg1;
4079 break;
4080 case GT_EXPR: /* - [-, c] */
4081 low = 0, high = arg1;
4082 break;
4083 case GE_EXPR: /* + [c, -] */
4084 in_p = ! in_p, low = arg1, high = 0;
4085 break;
4086 case LT_EXPR: /* - [c, -] */
4087 low = arg1, high = 0;
4088 break;
4089 case LE_EXPR: /* + [-, c] */
4090 in_p = ! in_p, low = 0, high = arg1;
4091 break;
4092 default:
4093 gcc_unreachable ();
4094 }
4095
4096 /* If this is an unsigned comparison, we also know that EXP is
4097 greater than or equal to zero. We base the range tests we make
4098 on that fact, so we record it here so we can parse existing
4099 range tests. We test arg0_type since often the return type
4100 of, e.g. EQ_EXPR, is boolean. */
4101 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4102 {
4103 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4104 in_p, low, high, 1,
4105 build_int_cst (arg0_type, 0),
4106 NULL_TREE))
4107 break;
4108
4109 in_p = n_in_p, low = n_low, high = n_high;
4110
4111 /* If the high bound is missing, but we have a nonzero low
4112 bound, reverse the range so it goes from zero to the low bound
4113 minus 1. */
4114 if (high == 0 && low && ! integer_zerop (low))
4115 {
4116 in_p = ! in_p;
4117 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4118 integer_one_node, 0);
4119 low = build_int_cst (arg0_type, 0);
4120 }
4121 }
4122
4123 exp = arg0;
4124 continue;
4125
4126 case NEGATE_EXPR:
4127 /* (-x) IN [a,b] -> x in [-b, -a] */
4128 n_low = range_binop (MINUS_EXPR, exp_type,
4129 build_int_cst (exp_type, 0),
4130 0, high, 1);
4131 n_high = range_binop (MINUS_EXPR, exp_type,
4132 build_int_cst (exp_type, 0),
4133 0, low, 0);
4134 low = n_low, high = n_high;
4135 exp = arg0;
4136 continue;
4137
4138 case BIT_NOT_EXPR:
4139 /* ~ X -> -X - 1 */
4140 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
4141 build_int_cst (exp_type, 1));
4142 continue;
4143
4144 case PLUS_EXPR: case MINUS_EXPR:
4145 if (TREE_CODE (arg1) != INTEGER_CST)
4146 break;
4147
4148 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4149 move a constant to the other side. */
4150 if (!TYPE_UNSIGNED (arg0_type)
4151 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4152 break;
4153
4154 /* If EXP is signed, any overflow in the computation is undefined,
4155 so we don't worry about it so long as our computations on
4156 the bounds don't overflow. For unsigned, overflow is defined
4157 and this is exactly the right thing. */
4158 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4159 arg0_type, low, 0, arg1, 0);
4160 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4161 arg0_type, high, 1, arg1, 0);
4162 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4163 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4164 break;
4165
4166 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4167 *strict_overflow_p = true;
4168
4169 /* Check for an unsigned range which has wrapped around the maximum
4170 value thus making n_high < n_low, and normalize it. */
4171 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4172 {
4173 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4174 integer_one_node, 0);
4175 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4176 integer_one_node, 0);
4177
4178 /* If the range is of the form +/- [ x+1, x ], we won't
4179 be able to normalize it. But then, it represents the
4180 whole range or the empty set, so make it
4181 +/- [ -, - ]. */
4182 if (tree_int_cst_equal (n_low, low)
4183 && tree_int_cst_equal (n_high, high))
4184 low = high = 0;
4185 else
4186 in_p = ! in_p;
4187 }
4188 else
4189 low = n_low, high = n_high;
4190
4191 exp = arg0;
4192 continue;
4193
4194 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
4195 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4196 break;
4197
4198 if (! INTEGRAL_TYPE_P (arg0_type)
4199 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4200 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4201 break;
4202
4203 n_low = low, n_high = high;
4204
4205 if (n_low != 0)
4206 n_low = fold_convert (arg0_type, n_low);
4207
4208 if (n_high != 0)
4209 n_high = fold_convert (arg0_type, n_high);
4210
4211
4212 /* If we're converting arg0 from an unsigned type, to exp,
4213 a signed type, we will be doing the comparison as unsigned.
4214 The tests above have already verified that LOW and HIGH
4215 are both positive.
4216
4217 So we have to ensure that we will handle large unsigned
4218 values the same way that the current signed bounds treat
4219 negative values. */
4220
4221 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4222 {
4223 tree high_positive;
4224 tree equiv_type = lang_hooks.types.type_for_mode
4225 (TYPE_MODE (arg0_type), 1);
4226
4227 /* A range without an upper bound is, naturally, unbounded.
4228 Since convert would have cropped a very large value, use
4229 the max value for the destination type. */
4230 high_positive
4231 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4232 : TYPE_MAX_VALUE (arg0_type);
4233
4234 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4235 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
4236 fold_convert (arg0_type,
4237 high_positive),
4238 build_int_cst (arg0_type, 1));
4239
4240 /* If the low bound is specified, "and" the range with the
4241 range for which the original unsigned value will be
4242 positive. */
4243 if (low != 0)
4244 {
4245 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4246 1, n_low, n_high, 1,
4247 fold_convert (arg0_type,
4248 integer_zero_node),
4249 high_positive))
4250 break;
4251
4252 in_p = (n_in_p == in_p);
4253 }
4254 else
4255 {
4256 /* Otherwise, "or" the range with the range of the input
4257 that will be interpreted as negative. */
4258 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4259 0, n_low, n_high, 1,
4260 fold_convert (arg0_type,
4261 integer_zero_node),
4262 high_positive))
4263 break;
4264
4265 in_p = (in_p != n_in_p);
4266 }
4267 }
4268
4269 exp = arg0;
4270 low = n_low, high = n_high;
4271 continue;
4272
4273 default:
4274 break;
4275 }
4276
4277 break;
4278 }
4279
4280 /* If EXP is a constant, we can evaluate whether this is true or false. */
4281 if (TREE_CODE (exp) == INTEGER_CST)
4282 {
4283 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4284 exp, 0, low, 0))
4285 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4286 exp, 1, high, 1)));
4287 low = high = 0;
4288 exp = 0;
4289 }
4290
4291 *pin_p = in_p, *plow = low, *phigh = high;
4292 return exp;
4293 }
4294 \f
4295 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4296 type, TYPE, return an expression to test if EXP is in (or out of, depending
4297 on IN_P) the range. Return 0 if the test couldn't be created. */
4298
4299 static tree
4300 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
4301 {
4302 tree etype = TREE_TYPE (exp);
4303 tree value;
4304
4305 #ifdef HAVE_canonicalize_funcptr_for_compare
4306 /* Disable this optimization for function pointer expressions
4307 on targets that require function pointer canonicalization. */
4308 if (HAVE_canonicalize_funcptr_for_compare
4309 && TREE_CODE (etype) == POINTER_TYPE
4310 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4311 return NULL_TREE;
4312 #endif
4313
4314 if (! in_p)
4315 {
4316 value = build_range_check (type, exp, 1, low, high);
4317 if (value != 0)
4318 return invert_truthvalue (value);
4319
4320 return 0;
4321 }
4322
4323 if (low == 0 && high == 0)
4324 return build_int_cst (type, 1);
4325
4326 if (low == 0)
4327 return fold_build2 (LE_EXPR, type, exp,
4328 fold_convert (etype, high));
4329
4330 if (high == 0)
4331 return fold_build2 (GE_EXPR, type, exp,
4332 fold_convert (etype, low));
4333
4334 if (operand_equal_p (low, high, 0))
4335 return fold_build2 (EQ_EXPR, type, exp,
4336 fold_convert (etype, low));
4337
4338 if (integer_zerop (low))
4339 {
4340 if (! TYPE_UNSIGNED (etype))
4341 {
4342 etype = lang_hooks.types.unsigned_type (etype);
4343 high = fold_convert (etype, high);
4344 exp = fold_convert (etype, exp);
4345 }
4346 return build_range_check (type, exp, 1, 0, high);
4347 }
4348
4349 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4350 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4351 {
4352 unsigned HOST_WIDE_INT lo;
4353 HOST_WIDE_INT hi;
4354 int prec;
4355
4356 prec = TYPE_PRECISION (etype);
4357 if (prec <= HOST_BITS_PER_WIDE_INT)
4358 {
4359 hi = 0;
4360 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4361 }
4362 else
4363 {
4364 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4365 lo = (unsigned HOST_WIDE_INT) -1;
4366 }
4367
4368 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4369 {
4370 if (TYPE_UNSIGNED (etype))
4371 {
4372 etype = lang_hooks.types.signed_type (etype);
4373 exp = fold_convert (etype, exp);
4374 }
4375 return fold_build2 (GT_EXPR, type, exp,
4376 build_int_cst (etype, 0));
4377 }
4378 }
4379
4380 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4381 This requires wrap-around arithmetics for the type of the expression. */
4382 switch (TREE_CODE (etype))
4383 {
4384 case INTEGER_TYPE:
4385 /* There is no requirement that LOW be within the range of ETYPE
4386 if the latter is a subtype. It must, however, be within the base
4387 type of ETYPE. So be sure we do the subtraction in that type. */
4388 if (TREE_TYPE (etype))
4389 etype = TREE_TYPE (etype);
4390 break;
4391
4392 case ENUMERAL_TYPE:
4393 case BOOLEAN_TYPE:
4394 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4395 TYPE_UNSIGNED (etype));
4396 break;
4397
4398 default:
4399 break;
4400 }
4401
4402 /* If we don't have wrap-around arithmetics upfront, try to force it. */
4403 if (TREE_CODE (etype) == INTEGER_TYPE
4404 && !TYPE_OVERFLOW_WRAPS (etype))
4405 {
4406 tree utype, minv, maxv;
4407
4408 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4409 for the type in question, as we rely on this here. */
4410 utype = lang_hooks.types.unsigned_type (etype);
4411 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4412 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4413 integer_one_node, 1);
4414 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4415
4416 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4417 minv, 1, maxv, 1)))
4418 etype = utype;
4419 else
4420 return 0;
4421 }
4422
4423 high = fold_convert (etype, high);
4424 low = fold_convert (etype, low);
4425 exp = fold_convert (etype, exp);
4426
4427 value = const_binop (MINUS_EXPR, high, low, 0);
4428
4429 if (value != 0 && !TREE_OVERFLOW (value))
4430 return build_range_check (type,
4431 fold_build2 (MINUS_EXPR, etype, exp, low),
4432 1, build_int_cst (etype, 0), value);
4433
4434 return 0;
4435 }
4436 \f
4437 /* Return the predecessor of VAL in its type, handling the infinite case. */
4438
4439 static tree
4440 range_predecessor (tree val)
4441 {
4442 tree type = TREE_TYPE (val);
4443
4444 if (INTEGRAL_TYPE_P (type)
4445 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4446 return 0;
4447 else
4448 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4449 }
4450
4451 /* Return the successor of VAL in its type, handling the infinite case. */
4452
4453 static tree
4454 range_successor (tree val)
4455 {
4456 tree type = TREE_TYPE (val);
4457
4458 if (INTEGRAL_TYPE_P (type)
4459 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4460 return 0;
4461 else
4462 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4463 }
4464
4465 /* Given two ranges, see if we can merge them into one. Return 1 if we
4466 can, 0 if we can't. Set the output range into the specified parameters. */
4467
4468 static int
4469 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4470 tree high0, int in1_p, tree low1, tree high1)
4471 {
4472 int no_overlap;
4473 int subset;
4474 int temp;
4475 tree tem;
4476 int in_p;
4477 tree low, high;
4478 int lowequal = ((low0 == 0 && low1 == 0)
4479 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4480 low0, 0, low1, 0)));
4481 int highequal = ((high0 == 0 && high1 == 0)
4482 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4483 high0, 1, high1, 1)));
4484
4485 /* Make range 0 be the range that starts first, or ends last if they
4486 start at the same value. Swap them if it isn't. */
4487 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4488 low0, 0, low1, 0))
4489 || (lowequal
4490 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4491 high1, 1, high0, 1))))
4492 {
4493 temp = in0_p, in0_p = in1_p, in1_p = temp;
4494 tem = low0, low0 = low1, low1 = tem;
4495 tem = high0, high0 = high1, high1 = tem;
4496 }
4497
4498 /* Now flag two cases, whether the ranges are disjoint or whether the
4499 second range is totally subsumed in the first. Note that the tests
4500 below are simplified by the ones above. */
4501 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4502 high0, 1, low1, 0));
4503 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4504 high1, 1, high0, 1));
4505
4506 /* We now have four cases, depending on whether we are including or
4507 excluding the two ranges. */
4508 if (in0_p && in1_p)
4509 {
4510 /* If they don't overlap, the result is false. If the second range
4511 is a subset it is the result. Otherwise, the range is from the start
4512 of the second to the end of the first. */
4513 if (no_overlap)
4514 in_p = 0, low = high = 0;
4515 else if (subset)
4516 in_p = 1, low = low1, high = high1;
4517 else
4518 in_p = 1, low = low1, high = high0;
4519 }
4520
4521 else if (in0_p && ! in1_p)
4522 {
4523 /* If they don't overlap, the result is the first range. If they are
4524 equal, the result is false. If the second range is a subset of the
4525 first, and the ranges begin at the same place, we go from just after
4526 the end of the second range to the end of the first. If the second
4527 range is not a subset of the first, or if it is a subset and both
4528 ranges end at the same place, the range starts at the start of the
4529 first range and ends just before the second range.
4530 Otherwise, we can't describe this as a single range. */
4531 if (no_overlap)
4532 in_p = 1, low = low0, high = high0;
4533 else if (lowequal && highequal)
4534 in_p = 0, low = high = 0;
4535 else if (subset && lowequal)
4536 {
4537 low = range_successor (high1);
4538 high = high0;
4539 in_p = (low != 0);
4540 }
4541 else if (! subset || highequal)
4542 {
4543 low = low0;
4544 high = range_predecessor (low1);
4545 in_p = (high != 0);
4546 }
4547 else
4548 return 0;
4549 }
4550
4551 else if (! in0_p && in1_p)
4552 {
4553 /* If they don't overlap, the result is the second range. If the second
4554 is a subset of the first, the result is false. Otherwise,
4555 the range starts just after the first range and ends at the
4556 end of the second. */
4557 if (no_overlap)
4558 in_p = 1, low = low1, high = high1;
4559 else if (subset || highequal)
4560 in_p = 0, low = high = 0;
4561 else
4562 {
4563 low = range_successor (high0);
4564 high = high1;
4565 in_p = (low != 0);
4566 }
4567 }
4568
4569 else
4570 {
4571 /* The case where we are excluding both ranges. Here the complex case
4572 is if they don't overlap. In that case, the only time we have a
4573 range is if they are adjacent. If the second is a subset of the
4574 first, the result is the first. Otherwise, the range to exclude
4575 starts at the beginning of the first range and ends at the end of the
4576 second. */
4577 if (no_overlap)
4578 {
4579 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4580 range_successor (high0),
4581 1, low1, 0)))
4582 in_p = 0, low = low0, high = high1;
4583 else
4584 {
4585 /* Canonicalize - [min, x] into - [-, x]. */
4586 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4587 switch (TREE_CODE (TREE_TYPE (low0)))
4588 {
4589 case ENUMERAL_TYPE:
4590 if (TYPE_PRECISION (TREE_TYPE (low0))
4591 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4592 break;
4593 /* FALLTHROUGH */
4594 case INTEGER_TYPE:
4595 if (tree_int_cst_equal (low0,
4596 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4597 low0 = 0;
4598 break;
4599 case POINTER_TYPE:
4600 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4601 && integer_zerop (low0))
4602 low0 = 0;
4603 break;
4604 default:
4605 break;
4606 }
4607
4608 /* Canonicalize - [x, max] into - [x, -]. */
4609 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4610 switch (TREE_CODE (TREE_TYPE (high1)))
4611 {
4612 case ENUMERAL_TYPE:
4613 if (TYPE_PRECISION (TREE_TYPE (high1))
4614 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4615 break;
4616 /* FALLTHROUGH */
4617 case INTEGER_TYPE:
4618 if (tree_int_cst_equal (high1,
4619 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4620 high1 = 0;
4621 break;
4622 case POINTER_TYPE:
4623 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4624 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4625 high1, 1,
4626 integer_one_node, 1)))
4627 high1 = 0;
4628 break;
4629 default:
4630 break;
4631 }
4632
4633 /* The ranges might be also adjacent between the maximum and
4634 minimum values of the given type. For
4635 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4636 return + [x + 1, y - 1]. */
4637 if (low0 == 0 && high1 == 0)
4638 {
4639 low = range_successor (high0);
4640 high = range_predecessor (low1);
4641 if (low == 0 || high == 0)
4642 return 0;
4643
4644 in_p = 1;
4645 }
4646 else
4647 return 0;
4648 }
4649 }
4650 else if (subset)
4651 in_p = 0, low = low0, high = high0;
4652 else
4653 in_p = 0, low = low0, high = high1;
4654 }
4655
4656 *pin_p = in_p, *plow = low, *phigh = high;
4657 return 1;
4658 }
4659 \f
4660
4661 /* Subroutine of fold, looking inside expressions of the form
4662 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4663 of the COND_EXPR. This function is being used also to optimize
4664 A op B ? C : A, by reversing the comparison first.
4665
4666 Return a folded expression whose code is not a COND_EXPR
4667 anymore, or NULL_TREE if no folding opportunity is found. */
4668
4669 static tree
4670 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4671 {
4672 enum tree_code comp_code = TREE_CODE (arg0);
4673 tree arg00 = TREE_OPERAND (arg0, 0);
4674 tree arg01 = TREE_OPERAND (arg0, 1);
4675 tree arg1_type = TREE_TYPE (arg1);
4676 tree tem;
4677
4678 STRIP_NOPS (arg1);
4679 STRIP_NOPS (arg2);
4680
4681 /* If we have A op 0 ? A : -A, consider applying the following
4682 transformations:
4683
4684 A == 0? A : -A same as -A
4685 A != 0? A : -A same as A
4686 A >= 0? A : -A same as abs (A)
4687 A > 0? A : -A same as abs (A)
4688 A <= 0? A : -A same as -abs (A)
4689 A < 0? A : -A same as -abs (A)
4690
4691 None of these transformations work for modes with signed
4692 zeros. If A is +/-0, the first two transformations will
4693 change the sign of the result (from +0 to -0, or vice
4694 versa). The last four will fix the sign of the result,
4695 even though the original expressions could be positive or
4696 negative, depending on the sign of A.
4697
4698 Note that all these transformations are correct if A is
4699 NaN, since the two alternatives (A and -A) are also NaNs. */
4700 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4701 ? real_zerop (arg01)
4702 : integer_zerop (arg01))
4703 && ((TREE_CODE (arg2) == NEGATE_EXPR
4704 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4705 /* In the case that A is of the form X-Y, '-A' (arg2) may
4706 have already been folded to Y-X, check for that. */
4707 || (TREE_CODE (arg1) == MINUS_EXPR
4708 && TREE_CODE (arg2) == MINUS_EXPR
4709 && operand_equal_p (TREE_OPERAND (arg1, 0),
4710 TREE_OPERAND (arg2, 1), 0)
4711 && operand_equal_p (TREE_OPERAND (arg1, 1),
4712 TREE_OPERAND (arg2, 0), 0))))
4713 switch (comp_code)
4714 {
4715 case EQ_EXPR:
4716 case UNEQ_EXPR:
4717 tem = fold_convert (arg1_type, arg1);
4718 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4719 case NE_EXPR:
4720 case LTGT_EXPR:
4721 return pedantic_non_lvalue (fold_convert (type, arg1));
4722 case UNGE_EXPR:
4723 case UNGT_EXPR:
4724 if (flag_trapping_math)
4725 break;
4726 /* Fall through. */
4727 case GE_EXPR:
4728 case GT_EXPR:
4729 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4730 arg1 = fold_convert (lang_hooks.types.signed_type
4731 (TREE_TYPE (arg1)), arg1);
4732 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4733 return pedantic_non_lvalue (fold_convert (type, tem));
4734 case UNLE_EXPR:
4735 case UNLT_EXPR:
4736 if (flag_trapping_math)
4737 break;
4738 case LE_EXPR:
4739 case LT_EXPR:
4740 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4741 arg1 = fold_convert (lang_hooks.types.signed_type
4742 (TREE_TYPE (arg1)), arg1);
4743 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4744 return negate_expr (fold_convert (type, tem));
4745 default:
4746 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4747 break;
4748 }
4749
4750 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4751 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4752 both transformations are correct when A is NaN: A != 0
4753 is then true, and A == 0 is false. */
4754
4755 if (integer_zerop (arg01) && integer_zerop (arg2))
4756 {
4757 if (comp_code == NE_EXPR)
4758 return pedantic_non_lvalue (fold_convert (type, arg1));
4759 else if (comp_code == EQ_EXPR)
4760 return build_int_cst (type, 0);
4761 }
4762
4763 /* Try some transformations of A op B ? A : B.
4764
4765 A == B? A : B same as B
4766 A != B? A : B same as A
4767 A >= B? A : B same as max (A, B)
4768 A > B? A : B same as max (B, A)
4769 A <= B? A : B same as min (A, B)
4770 A < B? A : B same as min (B, A)
4771
4772 As above, these transformations don't work in the presence
4773 of signed zeros. For example, if A and B are zeros of
4774 opposite sign, the first two transformations will change
4775 the sign of the result. In the last four, the original
4776 expressions give different results for (A=+0, B=-0) and
4777 (A=-0, B=+0), but the transformed expressions do not.
4778
4779 The first two transformations are correct if either A or B
4780 is a NaN. In the first transformation, the condition will
4781 be false, and B will indeed be chosen. In the case of the
4782 second transformation, the condition A != B will be true,
4783 and A will be chosen.
4784
4785 The conversions to max() and min() are not correct if B is
4786 a number and A is not. The conditions in the original
4787 expressions will be false, so all four give B. The min()
4788 and max() versions would give a NaN instead. */
4789 if (operand_equal_for_comparison_p (arg01, arg2, arg00)
4790 /* Avoid these transformations if the COND_EXPR may be used
4791 as an lvalue in the C++ front-end. PR c++/19199. */
4792 && (in_gimple_form
4793 || (strcmp (lang_hooks.name, "GNU C++") != 0
4794 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4795 || ! maybe_lvalue_p (arg1)
4796 || ! maybe_lvalue_p (arg2)))
4797 {
4798 tree comp_op0 = arg00;
4799 tree comp_op1 = arg01;
4800 tree comp_type = TREE_TYPE (comp_op0);
4801
4802 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4803 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4804 {
4805 comp_type = type;
4806 comp_op0 = arg1;
4807 comp_op1 = arg2;
4808 }
4809
4810 switch (comp_code)
4811 {
4812 case EQ_EXPR:
4813 return pedantic_non_lvalue (fold_convert (type, arg2));
4814 case NE_EXPR:
4815 return pedantic_non_lvalue (fold_convert (type, arg1));
4816 case LE_EXPR:
4817 case LT_EXPR:
4818 case UNLE_EXPR:
4819 case UNLT_EXPR:
4820 /* In C++ a ?: expression can be an lvalue, so put the
4821 operand which will be used if they are equal first
4822 so that we can convert this back to the
4823 corresponding COND_EXPR. */
4824 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4825 {
4826 comp_op0 = fold_convert (comp_type, comp_op0);
4827 comp_op1 = fold_convert (comp_type, comp_op1);
4828 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4829 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4830 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4831 return pedantic_non_lvalue (fold_convert (type, tem));
4832 }
4833 break;
4834 case GE_EXPR:
4835 case GT_EXPR:
4836 case UNGE_EXPR:
4837 case UNGT_EXPR:
4838 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4839 {
4840 comp_op0 = fold_convert (comp_type, comp_op0);
4841 comp_op1 = fold_convert (comp_type, comp_op1);
4842 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4843 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
4844 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
4845 return pedantic_non_lvalue (fold_convert (type, tem));
4846 }
4847 break;
4848 case UNEQ_EXPR:
4849 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4850 return pedantic_non_lvalue (fold_convert (type, arg2));
4851 break;
4852 case LTGT_EXPR:
4853 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4854 return pedantic_non_lvalue (fold_convert (type, arg1));
4855 break;
4856 default:
4857 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4858 break;
4859 }
4860 }
4861
4862 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4863 we might still be able to simplify this. For example,
4864 if C1 is one less or one more than C2, this might have started
4865 out as a MIN or MAX and been transformed by this function.
4866 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4867
4868 if (INTEGRAL_TYPE_P (type)
4869 && TREE_CODE (arg01) == INTEGER_CST
4870 && TREE_CODE (arg2) == INTEGER_CST)
4871 switch (comp_code)
4872 {
4873 case EQ_EXPR:
4874 /* We can replace A with C1 in this case. */
4875 arg1 = fold_convert (type, arg01);
4876 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
4877
4878 case LT_EXPR:
4879 /* If C1 is C2 + 1, this is min(A, C2). */
4880 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4881 OEP_ONLY_CONST)
4882 && operand_equal_p (arg01,
4883 const_binop (PLUS_EXPR, arg2,
4884 build_int_cst (type, 1), 0),
4885 OEP_ONLY_CONST))
4886 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4887 type, arg1, arg2));
4888 break;
4889
4890 case LE_EXPR:
4891 /* If C1 is C2 - 1, this is min(A, C2). */
4892 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4893 OEP_ONLY_CONST)
4894 && operand_equal_p (arg01,
4895 const_binop (MINUS_EXPR, arg2,
4896 build_int_cst (type, 1), 0),
4897 OEP_ONLY_CONST))
4898 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4899 type, arg1, arg2));
4900 break;
4901
4902 case GT_EXPR:
4903 /* If C1 is C2 - 1, this is max(A, C2). */
4904 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4905 OEP_ONLY_CONST)
4906 && operand_equal_p (arg01,
4907 const_binop (MINUS_EXPR, arg2,
4908 build_int_cst (type, 1), 0),
4909 OEP_ONLY_CONST))
4910 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4911 type, arg1, arg2));
4912 break;
4913
4914 case GE_EXPR:
4915 /* If C1 is C2 + 1, this is max(A, C2). */
4916 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4917 OEP_ONLY_CONST)
4918 && operand_equal_p (arg01,
4919 const_binop (PLUS_EXPR, arg2,
4920 build_int_cst (type, 1), 0),
4921 OEP_ONLY_CONST))
4922 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4923 type, arg1, arg2));
4924 break;
4925 case NE_EXPR:
4926 break;
4927 default:
4928 gcc_unreachable ();
4929 }
4930
4931 return NULL_TREE;
4932 }
4933
4934
4935 \f
4936 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4937 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4938 #endif
4939
4940 /* EXP is some logical combination of boolean tests. See if we can
4941 merge it into some range test. Return the new tree if so. */
4942
4943 static tree
4944 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
4945 {
4946 int or_op = (code == TRUTH_ORIF_EXPR
4947 || code == TRUTH_OR_EXPR);
4948 int in0_p, in1_p, in_p;
4949 tree low0, low1, low, high0, high1, high;
4950 bool strict_overflow_p = false;
4951 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4952 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4953 tree tem;
4954 const char * const warnmsg = G_("assuming signed overflow does not occur "
4955 "when simplifying range test");
4956
4957 /* If this is an OR operation, invert both sides; we will invert
4958 again at the end. */
4959 if (or_op)
4960 in0_p = ! in0_p, in1_p = ! in1_p;
4961
4962 /* If both expressions are the same, if we can merge the ranges, and we
4963 can build the range test, return it or it inverted. If one of the
4964 ranges is always true or always false, consider it to be the same
4965 expression as the other. */
4966 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4967 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4968 in1_p, low1, high1)
4969 && 0 != (tem = (build_range_check (type,
4970 lhs != 0 ? lhs
4971 : rhs != 0 ? rhs : integer_zero_node,
4972 in_p, low, high))))
4973 {
4974 if (strict_overflow_p)
4975 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4976 return or_op ? invert_truthvalue (tem) : tem;
4977 }
4978
4979 /* On machines where the branch cost is expensive, if this is a
4980 short-circuited branch and the underlying object on both sides
4981 is the same, make a non-short-circuit operation. */
4982 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4983 && lhs != 0 && rhs != 0
4984 && (code == TRUTH_ANDIF_EXPR
4985 || code == TRUTH_ORIF_EXPR)
4986 && operand_equal_p (lhs, rhs, 0))
4987 {
4988 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4989 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4990 which cases we can't do this. */
4991 if (simple_operand_p (lhs))
4992 return build2 (code == TRUTH_ANDIF_EXPR
4993 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4994 type, op0, op1);
4995
4996 else if (lang_hooks.decls.global_bindings_p () == 0
4997 && ! CONTAINS_PLACEHOLDER_P (lhs))
4998 {
4999 tree common = save_expr (lhs);
5000
5001 if (0 != (lhs = build_range_check (type, common,
5002 or_op ? ! in0_p : in0_p,
5003 low0, high0))
5004 && (0 != (rhs = build_range_check (type, common,
5005 or_op ? ! in1_p : in1_p,
5006 low1, high1))))
5007 {
5008 if (strict_overflow_p)
5009 fold_overflow_warning (warnmsg,
5010 WARN_STRICT_OVERFLOW_COMPARISON);
5011 return build2 (code == TRUTH_ANDIF_EXPR
5012 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5013 type, lhs, rhs);
5014 }
5015 }
5016 }
5017
5018 return 0;
5019 }
5020 \f
5021 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
5022 bit value. Arrange things so the extra bits will be set to zero if and
5023 only if C is signed-extended to its full width. If MASK is nonzero,
5024 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5025
5026 static tree
5027 unextend (tree c, int p, int unsignedp, tree mask)
5028 {
5029 tree type = TREE_TYPE (c);
5030 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5031 tree temp;
5032
5033 if (p == modesize || unsignedp)
5034 return c;
5035
5036 /* We work by getting just the sign bit into the low-order bit, then
5037 into the high-order bit, then sign-extend. We then XOR that value
5038 with C. */
5039 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
5040 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
5041
5042 /* We must use a signed type in order to get an arithmetic right shift.
5043 However, we must also avoid introducing accidental overflows, so that
5044 a subsequent call to integer_zerop will work. Hence we must
5045 do the type conversion here. At this point, the constant is either
5046 zero or one, and the conversion to a signed type can never overflow.
5047 We could get an overflow if this conversion is done anywhere else. */
5048 if (TYPE_UNSIGNED (type))
5049 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
5050
5051 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
5052 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
5053 if (mask != 0)
5054 temp = const_binop (BIT_AND_EXPR, temp,
5055 fold_convert (TREE_TYPE (c), mask), 0);
5056 /* If necessary, convert the type back to match the type of C. */
5057 if (TYPE_UNSIGNED (type))
5058 temp = fold_convert (type, temp);
5059
5060 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
5061 }
5062 \f
5063 /* Find ways of folding logical expressions of LHS and RHS:
5064 Try to merge two comparisons to the same innermost item.
5065 Look for range tests like "ch >= '0' && ch <= '9'".
5066 Look for combinations of simple terms on machines with expensive branches
5067 and evaluate the RHS unconditionally.
5068
5069 For example, if we have p->a == 2 && p->b == 4 and we can make an
5070 object large enough to span both A and B, we can do this with a comparison
5071 against the object ANDed with the a mask.
5072
5073 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5074 operations to do this with one comparison.
5075
5076 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5077 function and the one above.
5078
5079 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5080 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5081
5082 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5083 two operands.
5084
5085 We return the simplified tree or 0 if no optimization is possible. */
5086
5087 static tree
5088 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
5089 {
5090 /* If this is the "or" of two comparisons, we can do something if
5091 the comparisons are NE_EXPR. If this is the "and", we can do something
5092 if the comparisons are EQ_EXPR. I.e.,
5093 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5094
5095 WANTED_CODE is this operation code. For single bit fields, we can
5096 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5097 comparison for one-bit fields. */
5098
5099 enum tree_code wanted_code;
5100 enum tree_code lcode, rcode;
5101 tree ll_arg, lr_arg, rl_arg, rr_arg;
5102 tree ll_inner, lr_inner, rl_inner, rr_inner;
5103 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5104 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5105 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5106 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5107 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5108 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5109 enum machine_mode lnmode, rnmode;
5110 tree ll_mask, lr_mask, rl_mask, rr_mask;
5111 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5112 tree l_const, r_const;
5113 tree lntype, rntype, result;
5114 int first_bit, end_bit;
5115 int volatilep;
5116 tree orig_lhs = lhs, orig_rhs = rhs;
5117 enum tree_code orig_code = code;
5118
5119 /* Start by getting the comparison codes. Fail if anything is volatile.
5120 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5121 it were surrounded with a NE_EXPR. */
5122
5123 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5124 return 0;
5125
5126 lcode = TREE_CODE (lhs);
5127 rcode = TREE_CODE (rhs);
5128
5129 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5130 {
5131 lhs = build2 (NE_EXPR, truth_type, lhs,
5132 build_int_cst (TREE_TYPE (lhs), 0));
5133 lcode = NE_EXPR;
5134 }
5135
5136 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5137 {
5138 rhs = build2 (NE_EXPR, truth_type, rhs,
5139 build_int_cst (TREE_TYPE (rhs), 0));
5140 rcode = NE_EXPR;
5141 }
5142
5143 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5144 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5145 return 0;
5146
5147 ll_arg = TREE_OPERAND (lhs, 0);
5148 lr_arg = TREE_OPERAND (lhs, 1);
5149 rl_arg = TREE_OPERAND (rhs, 0);
5150 rr_arg = TREE_OPERAND (rhs, 1);
5151
5152 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5153 if (simple_operand_p (ll_arg)
5154 && simple_operand_p (lr_arg))
5155 {
5156 tree result;
5157 if (operand_equal_p (ll_arg, rl_arg, 0)
5158 && operand_equal_p (lr_arg, rr_arg, 0))
5159 {
5160 result = combine_comparisons (code, lcode, rcode,
5161 truth_type, ll_arg, lr_arg);
5162 if (result)
5163 return result;
5164 }
5165 else if (operand_equal_p (ll_arg, rr_arg, 0)
5166 && operand_equal_p (lr_arg, rl_arg, 0))
5167 {
5168 result = combine_comparisons (code, lcode,
5169 swap_tree_comparison (rcode),
5170 truth_type, ll_arg, lr_arg);
5171 if (result)
5172 return result;
5173 }
5174 }
5175
5176 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5177 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5178
5179 /* If the RHS can be evaluated unconditionally and its operands are
5180 simple, it wins to evaluate the RHS unconditionally on machines
5181 with expensive branches. In this case, this isn't a comparison
5182 that can be merged. Avoid doing this if the RHS is a floating-point
5183 comparison since those can trap. */
5184
5185 if (BRANCH_COST >= 2
5186 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5187 && simple_operand_p (rl_arg)
5188 && simple_operand_p (rr_arg))
5189 {
5190 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5191 if (code == TRUTH_OR_EXPR
5192 && lcode == NE_EXPR && integer_zerop (lr_arg)
5193 && rcode == NE_EXPR && integer_zerop (rr_arg)
5194 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
5195 return build2 (NE_EXPR, truth_type,
5196 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5197 ll_arg, rl_arg),
5198 build_int_cst (TREE_TYPE (ll_arg), 0));
5199
5200 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5201 if (code == TRUTH_AND_EXPR
5202 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5203 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5204 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
5205 return build2 (EQ_EXPR, truth_type,
5206 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5207 ll_arg, rl_arg),
5208 build_int_cst (TREE_TYPE (ll_arg), 0));
5209
5210 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5211 {
5212 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5213 return build2 (code, truth_type, lhs, rhs);
5214 return NULL_TREE;
5215 }
5216 }
5217
5218 /* See if the comparisons can be merged. Then get all the parameters for
5219 each side. */
5220
5221 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5222 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5223 return 0;
5224
5225 volatilep = 0;
5226 ll_inner = decode_field_reference (ll_arg,
5227 &ll_bitsize, &ll_bitpos, &ll_mode,
5228 &ll_unsignedp, &volatilep, &ll_mask,
5229 &ll_and_mask);
5230 lr_inner = decode_field_reference (lr_arg,
5231 &lr_bitsize, &lr_bitpos, &lr_mode,
5232 &lr_unsignedp, &volatilep, &lr_mask,
5233 &lr_and_mask);
5234 rl_inner = decode_field_reference (rl_arg,
5235 &rl_bitsize, &rl_bitpos, &rl_mode,
5236 &rl_unsignedp, &volatilep, &rl_mask,
5237 &rl_and_mask);
5238 rr_inner = decode_field_reference (rr_arg,
5239 &rr_bitsize, &rr_bitpos, &rr_mode,
5240 &rr_unsignedp, &volatilep, &rr_mask,
5241 &rr_and_mask);
5242
5243 /* It must be true that the inner operation on the lhs of each
5244 comparison must be the same if we are to be able to do anything.
5245 Then see if we have constants. If not, the same must be true for
5246 the rhs's. */
5247 if (volatilep || ll_inner == 0 || rl_inner == 0
5248 || ! operand_equal_p (ll_inner, rl_inner, 0))
5249 return 0;
5250
5251 if (TREE_CODE (lr_arg) == INTEGER_CST
5252 && TREE_CODE (rr_arg) == INTEGER_CST)
5253 l_const = lr_arg, r_const = rr_arg;
5254 else if (lr_inner == 0 || rr_inner == 0
5255 || ! operand_equal_p (lr_inner, rr_inner, 0))
5256 return 0;
5257 else
5258 l_const = r_const = 0;
5259
5260 /* If either comparison code is not correct for our logical operation,
5261 fail. However, we can convert a one-bit comparison against zero into
5262 the opposite comparison against that bit being set in the field. */
5263
5264 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5265 if (lcode != wanted_code)
5266 {
5267 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5268 {
5269 /* Make the left operand unsigned, since we are only interested
5270 in the value of one bit. Otherwise we are doing the wrong
5271 thing below. */
5272 ll_unsignedp = 1;
5273 l_const = ll_mask;
5274 }
5275 else
5276 return 0;
5277 }
5278
5279 /* This is analogous to the code for l_const above. */
5280 if (rcode != wanted_code)
5281 {
5282 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5283 {
5284 rl_unsignedp = 1;
5285 r_const = rl_mask;
5286 }
5287 else
5288 return 0;
5289 }
5290
5291 /* See if we can find a mode that contains both fields being compared on
5292 the left. If we can't, fail. Otherwise, update all constants and masks
5293 to be relative to a field of that size. */
5294 first_bit = MIN (ll_bitpos, rl_bitpos);
5295 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5296 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5297 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5298 volatilep);
5299 if (lnmode == VOIDmode)
5300 return 0;
5301
5302 lnbitsize = GET_MODE_BITSIZE (lnmode);
5303 lnbitpos = first_bit & ~ (lnbitsize - 1);
5304 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5305 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5306
5307 if (BYTES_BIG_ENDIAN)
5308 {
5309 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5310 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5311 }
5312
5313 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
5314 size_int (xll_bitpos), 0);
5315 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
5316 size_int (xrl_bitpos), 0);
5317
5318 if (l_const)
5319 {
5320 l_const = fold_convert (lntype, l_const);
5321 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5322 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
5323 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5324 fold_build1 (BIT_NOT_EXPR,
5325 lntype, ll_mask),
5326 0)))
5327 {
5328 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5329
5330 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5331 }
5332 }
5333 if (r_const)
5334 {
5335 r_const = fold_convert (lntype, r_const);
5336 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5337 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
5338 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5339 fold_build1 (BIT_NOT_EXPR,
5340 lntype, rl_mask),
5341 0)))
5342 {
5343 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5344
5345 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5346 }
5347 }
5348
5349 /* If the right sides are not constant, do the same for it. Also,
5350 disallow this optimization if a size or signedness mismatch occurs
5351 between the left and right sides. */
5352 if (l_const == 0)
5353 {
5354 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5355 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5356 /* Make sure the two fields on the right
5357 correspond to the left without being swapped. */
5358 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5359 return 0;
5360
5361 first_bit = MIN (lr_bitpos, rr_bitpos);
5362 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5363 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5364 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5365 volatilep);
5366 if (rnmode == VOIDmode)
5367 return 0;
5368
5369 rnbitsize = GET_MODE_BITSIZE (rnmode);
5370 rnbitpos = first_bit & ~ (rnbitsize - 1);
5371 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5372 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5373
5374 if (BYTES_BIG_ENDIAN)
5375 {
5376 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5377 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5378 }
5379
5380 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
5381 size_int (xlr_bitpos), 0);
5382 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
5383 size_int (xrr_bitpos), 0);
5384
5385 /* Make a mask that corresponds to both fields being compared.
5386 Do this for both items being compared. If the operands are the
5387 same size and the bits being compared are in the same position
5388 then we can do this by masking both and comparing the masked
5389 results. */
5390 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5391 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
5392 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5393 {
5394 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5395 ll_unsignedp || rl_unsignedp);
5396 if (! all_ones_mask_p (ll_mask, lnbitsize))
5397 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5398
5399 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
5400 lr_unsignedp || rr_unsignedp);
5401 if (! all_ones_mask_p (lr_mask, rnbitsize))
5402 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5403
5404 return build2 (wanted_code, truth_type, lhs, rhs);
5405 }
5406
5407 /* There is still another way we can do something: If both pairs of
5408 fields being compared are adjacent, we may be able to make a wider
5409 field containing them both.
5410
5411 Note that we still must mask the lhs/rhs expressions. Furthermore,
5412 the mask must be shifted to account for the shift done by
5413 make_bit_field_ref. */
5414 if ((ll_bitsize + ll_bitpos == rl_bitpos
5415 && lr_bitsize + lr_bitpos == rr_bitpos)
5416 || (ll_bitpos == rl_bitpos + rl_bitsize
5417 && lr_bitpos == rr_bitpos + rr_bitsize))
5418 {
5419 tree type;
5420
5421 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
5422 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5423 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
5424 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5425
5426 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5427 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
5428 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5429 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
5430
5431 /* Convert to the smaller type before masking out unwanted bits. */
5432 type = lntype;
5433 if (lntype != rntype)
5434 {
5435 if (lnbitsize > rnbitsize)
5436 {
5437 lhs = fold_convert (rntype, lhs);
5438 ll_mask = fold_convert (rntype, ll_mask);
5439 type = rntype;
5440 }
5441 else if (lnbitsize < rnbitsize)
5442 {
5443 rhs = fold_convert (lntype, rhs);
5444 lr_mask = fold_convert (lntype, lr_mask);
5445 type = lntype;
5446 }
5447 }
5448
5449 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5450 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5451
5452 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5453 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5454
5455 return build2 (wanted_code, truth_type, lhs, rhs);
5456 }
5457
5458 return 0;
5459 }
5460
5461 /* Handle the case of comparisons with constants. If there is something in
5462 common between the masks, those bits of the constants must be the same.
5463 If not, the condition is always false. Test for this to avoid generating
5464 incorrect code below. */
5465 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5466 if (! integer_zerop (result)
5467 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5468 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5469 {
5470 if (wanted_code == NE_EXPR)
5471 {
5472 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5473 return constant_boolean_node (true, truth_type);
5474 }
5475 else
5476 {
5477 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5478 return constant_boolean_node (false, truth_type);
5479 }
5480 }
5481
5482 /* Construct the expression we will return. First get the component
5483 reference we will make. Unless the mask is all ones the width of
5484 that field, perform the mask operation. Then compare with the
5485 merged constant. */
5486 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5487 ll_unsignedp || rl_unsignedp);
5488
5489 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5490 if (! all_ones_mask_p (ll_mask, lnbitsize))
5491 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5492
5493 return build2 (wanted_code, truth_type, result,
5494 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5495 }
5496 \f
5497 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5498 constant. */
5499
5500 static tree
5501 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5502 {
5503 tree arg0 = op0;
5504 enum tree_code op_code;
5505 tree comp_const = op1;
5506 tree minmax_const;
5507 int consts_equal, consts_lt;
5508 tree inner;
5509
5510 STRIP_SIGN_NOPS (arg0);
5511
5512 op_code = TREE_CODE (arg0);
5513 minmax_const = TREE_OPERAND (arg0, 1);
5514 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5515 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5516 inner = TREE_OPERAND (arg0, 0);
5517
5518 /* If something does not permit us to optimize, return the original tree. */
5519 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5520 || TREE_CODE (comp_const) != INTEGER_CST
5521 || TREE_OVERFLOW (comp_const)
5522 || TREE_CODE (minmax_const) != INTEGER_CST
5523 || TREE_OVERFLOW (minmax_const))
5524 return NULL_TREE;
5525
5526 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5527 and GT_EXPR, doing the rest with recursive calls using logical
5528 simplifications. */
5529 switch (code)
5530 {
5531 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5532 {
5533 tree tem = optimize_minmax_comparison (invert_tree_comparison (code, false),
5534 type, op0, op1);
5535 if (tem)
5536 return invert_truthvalue (tem);
5537 return NULL_TREE;
5538 }
5539
5540 case GE_EXPR:
5541 return
5542 fold_build2 (TRUTH_ORIF_EXPR, type,
5543 optimize_minmax_comparison
5544 (EQ_EXPR, type, arg0, comp_const),
5545 optimize_minmax_comparison
5546 (GT_EXPR, type, arg0, comp_const));
5547
5548 case EQ_EXPR:
5549 if (op_code == MAX_EXPR && consts_equal)
5550 /* MAX (X, 0) == 0 -> X <= 0 */
5551 return fold_build2 (LE_EXPR, type, inner, comp_const);
5552
5553 else if (op_code == MAX_EXPR && consts_lt)
5554 /* MAX (X, 0) == 5 -> X == 5 */
5555 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5556
5557 else if (op_code == MAX_EXPR)
5558 /* MAX (X, 0) == -1 -> false */
5559 return omit_one_operand (type, integer_zero_node, inner);
5560
5561 else if (consts_equal)
5562 /* MIN (X, 0) == 0 -> X >= 0 */
5563 return fold_build2 (GE_EXPR, type, inner, comp_const);
5564
5565 else if (consts_lt)
5566 /* MIN (X, 0) == 5 -> false */
5567 return omit_one_operand (type, integer_zero_node, inner);
5568
5569 else
5570 /* MIN (X, 0) == -1 -> X == -1 */
5571 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5572
5573 case GT_EXPR:
5574 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5575 /* MAX (X, 0) > 0 -> X > 0
5576 MAX (X, 0) > 5 -> X > 5 */
5577 return fold_build2 (GT_EXPR, type, inner, comp_const);
5578
5579 else if (op_code == MAX_EXPR)
5580 /* MAX (X, 0) > -1 -> true */
5581 return omit_one_operand (type, integer_one_node, inner);
5582
5583 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5584 /* MIN (X, 0) > 0 -> false
5585 MIN (X, 0) > 5 -> false */
5586 return omit_one_operand (type, integer_zero_node, inner);
5587
5588 else
5589 /* MIN (X, 0) > -1 -> X > -1 */
5590 return fold_build2 (GT_EXPR, type, inner, comp_const);
5591
5592 default:
5593 return NULL_TREE;
5594 }
5595 }
5596 \f
5597 /* T is an integer expression that is being multiplied, divided, or taken a
5598 modulus (CODE says which and what kind of divide or modulus) by a
5599 constant C. See if we can eliminate that operation by folding it with
5600 other operations already in T. WIDE_TYPE, if non-null, is a type that
5601 should be used for the computation if wider than our type.
5602
5603 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5604 (X * 2) + (Y * 4). We must, however, be assured that either the original
5605 expression would not overflow or that overflow is undefined for the type
5606 in the language in question.
5607
5608 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5609 the machine has a multiply-accumulate insn or that this is part of an
5610 addressing calculation.
5611
5612 If we return a non-null expression, it is an equivalent form of the
5613 original computation, but need not be in the original type.
5614
5615 We set *STRICT_OVERFLOW_P to true if the return values depends on
5616 signed overflow being undefined. Otherwise we do not change
5617 *STRICT_OVERFLOW_P. */
5618
5619 static tree
5620 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5621 bool *strict_overflow_p)
5622 {
5623 /* To avoid exponential search depth, refuse to allow recursion past
5624 three levels. Beyond that (1) it's highly unlikely that we'll find
5625 something interesting and (2) we've probably processed it before
5626 when we built the inner expression. */
5627
5628 static int depth;
5629 tree ret;
5630
5631 if (depth > 3)
5632 return NULL;
5633
5634 depth++;
5635 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5636 depth--;
5637
5638 return ret;
5639 }
5640
5641 static tree
5642 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5643 bool *strict_overflow_p)
5644 {
5645 tree type = TREE_TYPE (t);
5646 enum tree_code tcode = TREE_CODE (t);
5647 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5648 > GET_MODE_SIZE (TYPE_MODE (type)))
5649 ? wide_type : type);
5650 tree t1, t2;
5651 int same_p = tcode == code;
5652 tree op0 = NULL_TREE, op1 = NULL_TREE;
5653 bool sub_strict_overflow_p;
5654
5655 /* Don't deal with constants of zero here; they confuse the code below. */
5656 if (integer_zerop (c))
5657 return NULL_TREE;
5658
5659 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5660 op0 = TREE_OPERAND (t, 0);
5661
5662 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5663 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5664
5665 /* Note that we need not handle conditional operations here since fold
5666 already handles those cases. So just do arithmetic here. */
5667 switch (tcode)
5668 {
5669 case INTEGER_CST:
5670 /* For a constant, we can always simplify if we are a multiply
5671 or (for divide and modulus) if it is a multiple of our constant. */
5672 if (code == MULT_EXPR
5673 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5674 return const_binop (code, fold_convert (ctype, t),
5675 fold_convert (ctype, c), 0);
5676 break;
5677
5678 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5679 /* If op0 is an expression ... */
5680 if ((COMPARISON_CLASS_P (op0)
5681 || UNARY_CLASS_P (op0)
5682 || BINARY_CLASS_P (op0)
5683 || VL_EXP_CLASS_P (op0)
5684 || EXPRESSION_CLASS_P (op0))
5685 /* ... and is unsigned, and its type is smaller than ctype,
5686 then we cannot pass through as widening. */
5687 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5688 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5689 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5690 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5691 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5692 /* ... or this is a truncation (t is narrower than op0),
5693 then we cannot pass through this narrowing. */
5694 || (GET_MODE_SIZE (TYPE_MODE (type))
5695 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5696 /* ... or signedness changes for division or modulus,
5697 then we cannot pass through this conversion. */
5698 || (code != MULT_EXPR
5699 && (TYPE_UNSIGNED (ctype)
5700 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5701 break;
5702
5703 /* Pass the constant down and see if we can make a simplification. If
5704 we can, replace this expression with the inner simplification for
5705 possible later conversion to our or some other type. */
5706 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5707 && TREE_CODE (t2) == INTEGER_CST
5708 && !TREE_OVERFLOW (t2)
5709 && (0 != (t1 = extract_muldiv (op0, t2, code,
5710 code == MULT_EXPR
5711 ? ctype : NULL_TREE,
5712 strict_overflow_p))))
5713 return t1;
5714 break;
5715
5716 case ABS_EXPR:
5717 /* If widening the type changes it from signed to unsigned, then we
5718 must avoid building ABS_EXPR itself as unsigned. */
5719 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5720 {
5721 tree cstype = (*lang_hooks.types.signed_type) (ctype);
5722 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5723 != 0)
5724 {
5725 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5726 return fold_convert (ctype, t1);
5727 }
5728 break;
5729 }
5730 /* FALLTHROUGH */
5731 case NEGATE_EXPR:
5732 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5733 != 0)
5734 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5735 break;
5736
5737 case MIN_EXPR: case MAX_EXPR:
5738 /* If widening the type changes the signedness, then we can't perform
5739 this optimization as that changes the result. */
5740 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5741 break;
5742
5743 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5744 sub_strict_overflow_p = false;
5745 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5746 &sub_strict_overflow_p)) != 0
5747 && (t2 = extract_muldiv (op1, c, code, wide_type,
5748 &sub_strict_overflow_p)) != 0)
5749 {
5750 if (tree_int_cst_sgn (c) < 0)
5751 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5752 if (sub_strict_overflow_p)
5753 *strict_overflow_p = true;
5754 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5755 fold_convert (ctype, t2));
5756 }
5757 break;
5758
5759 case LSHIFT_EXPR: case RSHIFT_EXPR:
5760 /* If the second operand is constant, this is a multiplication
5761 or floor division, by a power of two, so we can treat it that
5762 way unless the multiplier or divisor overflows. Signed
5763 left-shift overflow is implementation-defined rather than
5764 undefined in C90, so do not convert signed left shift into
5765 multiplication. */
5766 if (TREE_CODE (op1) == INTEGER_CST
5767 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5768 /* const_binop may not detect overflow correctly,
5769 so check for it explicitly here. */
5770 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5771 && TREE_INT_CST_HIGH (op1) == 0
5772 && 0 != (t1 = fold_convert (ctype,
5773 const_binop (LSHIFT_EXPR,
5774 size_one_node,
5775 op1, 0)))
5776 && !TREE_OVERFLOW (t1))
5777 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5778 ? MULT_EXPR : FLOOR_DIV_EXPR,
5779 ctype, fold_convert (ctype, op0), t1),
5780 c, code, wide_type, strict_overflow_p);
5781 break;
5782
5783 case PLUS_EXPR: case MINUS_EXPR:
5784 /* See if we can eliminate the operation on both sides. If we can, we
5785 can return a new PLUS or MINUS. If we can't, the only remaining
5786 cases where we can do anything are if the second operand is a
5787 constant. */
5788 sub_strict_overflow_p = false;
5789 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5790 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5791 if (t1 != 0 && t2 != 0
5792 && (code == MULT_EXPR
5793 /* If not multiplication, we can only do this if both operands
5794 are divisible by c. */
5795 || (multiple_of_p (ctype, op0, c)
5796 && multiple_of_p (ctype, op1, c))))
5797 {
5798 if (sub_strict_overflow_p)
5799 *strict_overflow_p = true;
5800 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5801 fold_convert (ctype, t2));
5802 }
5803
5804 /* If this was a subtraction, negate OP1 and set it to be an addition.
5805 This simplifies the logic below. */
5806 if (tcode == MINUS_EXPR)
5807 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5808
5809 if (TREE_CODE (op1) != INTEGER_CST)
5810 break;
5811
5812 /* If either OP1 or C are negative, this optimization is not safe for
5813 some of the division and remainder types while for others we need
5814 to change the code. */
5815 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5816 {
5817 if (code == CEIL_DIV_EXPR)
5818 code = FLOOR_DIV_EXPR;
5819 else if (code == FLOOR_DIV_EXPR)
5820 code = CEIL_DIV_EXPR;
5821 else if (code != MULT_EXPR
5822 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5823 break;
5824 }
5825
5826 /* If it's a multiply or a division/modulus operation of a multiple
5827 of our constant, do the operation and verify it doesn't overflow. */
5828 if (code == MULT_EXPR
5829 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5830 {
5831 op1 = const_binop (code, fold_convert (ctype, op1),
5832 fold_convert (ctype, c), 0);
5833 /* We allow the constant to overflow with wrapping semantics. */
5834 if (op1 == 0
5835 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5836 break;
5837 }
5838 else
5839 break;
5840
5841 /* If we have an unsigned type is not a sizetype, we cannot widen
5842 the operation since it will change the result if the original
5843 computation overflowed. */
5844 if (TYPE_UNSIGNED (ctype)
5845 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5846 && ctype != type)
5847 break;
5848
5849 /* If we were able to eliminate our operation from the first side,
5850 apply our operation to the second side and reform the PLUS. */
5851 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5852 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5853
5854 /* The last case is if we are a multiply. In that case, we can
5855 apply the distributive law to commute the multiply and addition
5856 if the multiplication of the constants doesn't overflow. */
5857 if (code == MULT_EXPR)
5858 return fold_build2 (tcode, ctype,
5859 fold_build2 (code, ctype,
5860 fold_convert (ctype, op0),
5861 fold_convert (ctype, c)),
5862 op1);
5863
5864 break;
5865
5866 case MULT_EXPR:
5867 /* We have a special case here if we are doing something like
5868 (C * 8) % 4 since we know that's zero. */
5869 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5870 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5871 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5872 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5873 return omit_one_operand (type, integer_zero_node, op0);
5874
5875 /* ... fall through ... */
5876
5877 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5878 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5879 /* If we can extract our operation from the LHS, do so and return a
5880 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5881 do something only if the second operand is a constant. */
5882 if (same_p
5883 && (t1 = extract_muldiv (op0, c, code, wide_type,
5884 strict_overflow_p)) != 0)
5885 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5886 fold_convert (ctype, op1));
5887 else if (tcode == MULT_EXPR && code == MULT_EXPR
5888 && (t1 = extract_muldiv (op1, c, code, wide_type,
5889 strict_overflow_p)) != 0)
5890 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5891 fold_convert (ctype, t1));
5892 else if (TREE_CODE (op1) != INTEGER_CST)
5893 return 0;
5894
5895 /* If these are the same operation types, we can associate them
5896 assuming no overflow. */
5897 if (tcode == code
5898 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5899 fold_convert (ctype, c), 0))
5900 && !TREE_OVERFLOW (t1))
5901 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5902
5903 /* If these operations "cancel" each other, we have the main
5904 optimizations of this pass, which occur when either constant is a
5905 multiple of the other, in which case we replace this with either an
5906 operation or CODE or TCODE.
5907
5908 If we have an unsigned type that is not a sizetype, we cannot do
5909 this since it will change the result if the original computation
5910 overflowed. */
5911 if ((TYPE_OVERFLOW_UNDEFINED (ctype)
5912 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5913 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5914 || (tcode == MULT_EXPR
5915 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5916 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5917 {
5918 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5919 {
5920 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5921 *strict_overflow_p = true;
5922 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5923 fold_convert (ctype,
5924 const_binop (TRUNC_DIV_EXPR,
5925 op1, c, 0)));
5926 }
5927 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5928 {
5929 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5930 *strict_overflow_p = true;
5931 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5932 fold_convert (ctype,
5933 const_binop (TRUNC_DIV_EXPR,
5934 c, op1, 0)));
5935 }
5936 }
5937 break;
5938
5939 default:
5940 break;
5941 }
5942
5943 return 0;
5944 }
5945 \f
5946 /* Return a node which has the indicated constant VALUE (either 0 or
5947 1), and is of the indicated TYPE. */
5948
5949 tree
5950 constant_boolean_node (int value, tree type)
5951 {
5952 if (type == integer_type_node)
5953 return value ? integer_one_node : integer_zero_node;
5954 else if (type == boolean_type_node)
5955 return value ? boolean_true_node : boolean_false_node;
5956 else
5957 return build_int_cst (type, value);
5958 }
5959
5960
5961 /* Return true if expr looks like an ARRAY_REF and set base and
5962 offset to the appropriate trees. If there is no offset,
5963 offset is set to NULL_TREE. Base will be canonicalized to
5964 something you can get the element type from using
5965 TREE_TYPE (TREE_TYPE (base)). Offset will be the offset
5966 in bytes to the base. */
5967
5968 static bool
5969 extract_array_ref (tree expr, tree *base, tree *offset)
5970 {
5971 /* One canonical form is a PLUS_EXPR with the first
5972 argument being an ADDR_EXPR with a possible NOP_EXPR
5973 attached. */
5974 if (TREE_CODE (expr) == PLUS_EXPR)
5975 {
5976 tree op0 = TREE_OPERAND (expr, 0);
5977 tree inner_base, dummy1;
5978 /* Strip NOP_EXPRs here because the C frontends and/or
5979 folders present us (int *)&x.a + 4B possibly. */
5980 STRIP_NOPS (op0);
5981 if (extract_array_ref (op0, &inner_base, &dummy1))
5982 {
5983 *base = inner_base;
5984 if (dummy1 == NULL_TREE)
5985 *offset = TREE_OPERAND (expr, 1);
5986 else
5987 *offset = fold_build2 (PLUS_EXPR, TREE_TYPE (expr),
5988 dummy1, TREE_OPERAND (expr, 1));
5989 return true;
5990 }
5991 }
5992 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5993 which we transform into an ADDR_EXPR with appropriate
5994 offset. For other arguments to the ADDR_EXPR we assume
5995 zero offset and as such do not care about the ADDR_EXPR
5996 type and strip possible nops from it. */
5997 else if (TREE_CODE (expr) == ADDR_EXPR)
5998 {
5999 tree op0 = TREE_OPERAND (expr, 0);
6000 if (TREE_CODE (op0) == ARRAY_REF)
6001 {
6002 tree idx = TREE_OPERAND (op0, 1);
6003 *base = TREE_OPERAND (op0, 0);
6004 *offset = fold_build2 (MULT_EXPR, TREE_TYPE (idx), idx,
6005 array_ref_element_size (op0));
6006 }
6007 else
6008 {
6009 /* Handle array-to-pointer decay as &a. */
6010 if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
6011 *base = TREE_OPERAND (expr, 0);
6012 else
6013 *base = expr;
6014 *offset = NULL_TREE;
6015 }
6016 return true;
6017 }
6018 /* The next canonical form is a VAR_DECL with POINTER_TYPE. */
6019 else if (SSA_VAR_P (expr)
6020 && TREE_CODE (TREE_TYPE (expr)) == POINTER_TYPE)
6021 {
6022 *base = expr;
6023 *offset = NULL_TREE;
6024 return true;
6025 }
6026
6027 return false;
6028 }
6029
6030
6031 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6032 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6033 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6034 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6035 COND is the first argument to CODE; otherwise (as in the example
6036 given here), it is the second argument. TYPE is the type of the
6037 original expression. Return NULL_TREE if no simplification is
6038 possible. */
6039
6040 static tree
6041 fold_binary_op_with_conditional_arg (enum tree_code code,
6042 tree type, tree op0, tree op1,
6043 tree cond, tree arg, int cond_first_p)
6044 {
6045 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6046 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6047 tree test, true_value, false_value;
6048 tree lhs = NULL_TREE;
6049 tree rhs = NULL_TREE;
6050
6051 /* This transformation is only worthwhile if we don't have to wrap
6052 arg in a SAVE_EXPR, and the operation can be simplified on at least
6053 one of the branches once its pushed inside the COND_EXPR. */
6054 if (!TREE_CONSTANT (arg))
6055 return NULL_TREE;
6056
6057 if (TREE_CODE (cond) == COND_EXPR)
6058 {
6059 test = TREE_OPERAND (cond, 0);
6060 true_value = TREE_OPERAND (cond, 1);
6061 false_value = TREE_OPERAND (cond, 2);
6062 /* If this operand throws an expression, then it does not make
6063 sense to try to perform a logical or arithmetic operation
6064 involving it. */
6065 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6066 lhs = true_value;
6067 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6068 rhs = false_value;
6069 }
6070 else
6071 {
6072 tree testtype = TREE_TYPE (cond);
6073 test = cond;
6074 true_value = constant_boolean_node (true, testtype);
6075 false_value = constant_boolean_node (false, testtype);
6076 }
6077
6078 arg = fold_convert (arg_type, arg);
6079 if (lhs == 0)
6080 {
6081 true_value = fold_convert (cond_type, true_value);
6082 if (cond_first_p)
6083 lhs = fold_build2 (code, type, true_value, arg);
6084 else
6085 lhs = fold_build2 (code, type, arg, true_value);
6086 }
6087 if (rhs == 0)
6088 {
6089 false_value = fold_convert (cond_type, false_value);
6090 if (cond_first_p)
6091 rhs = fold_build2 (code, type, false_value, arg);
6092 else
6093 rhs = fold_build2 (code, type, arg, false_value);
6094 }
6095
6096 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
6097 return fold_convert (type, test);
6098 }
6099
6100 \f
6101 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6102
6103 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6104 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6105 ADDEND is the same as X.
6106
6107 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6108 and finite. The problematic cases are when X is zero, and its mode
6109 has signed zeros. In the case of rounding towards -infinity,
6110 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6111 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6112
6113 static bool
6114 fold_real_zero_addition_p (tree type, tree addend, int negate)
6115 {
6116 if (!real_zerop (addend))
6117 return false;
6118
6119 /* Don't allow the fold with -fsignaling-nans. */
6120 if (HONOR_SNANS (TYPE_MODE (type)))
6121 return false;
6122
6123 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6124 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6125 return true;
6126
6127 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6128 if (TREE_CODE (addend) == REAL_CST
6129 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6130 negate = !negate;
6131
6132 /* The mode has signed zeros, and we have to honor their sign.
6133 In this situation, there is only one case we can return true for.
6134 X - 0 is the same as X unless rounding towards -infinity is
6135 supported. */
6136 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6137 }
6138
6139 /* Subroutine of fold() that checks comparisons of built-in math
6140 functions against real constants.
6141
6142 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6143 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6144 is the type of the result and ARG0 and ARG1 are the operands of the
6145 comparison. ARG1 must be a TREE_REAL_CST.
6146
6147 The function returns the constant folded tree if a simplification
6148 can be made, and NULL_TREE otherwise. */
6149
6150 static tree
6151 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
6152 tree type, tree arg0, tree arg1)
6153 {
6154 REAL_VALUE_TYPE c;
6155
6156 if (BUILTIN_SQRT_P (fcode))
6157 {
6158 tree arg = CALL_EXPR_ARG (arg0, 0);
6159 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6160
6161 c = TREE_REAL_CST (arg1);
6162 if (REAL_VALUE_NEGATIVE (c))
6163 {
6164 /* sqrt(x) < y is always false, if y is negative. */
6165 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6166 return omit_one_operand (type, integer_zero_node, arg);
6167
6168 /* sqrt(x) > y is always true, if y is negative and we
6169 don't care about NaNs, i.e. negative values of x. */
6170 if (code == NE_EXPR || !HONOR_NANS (mode))
6171 return omit_one_operand (type, integer_one_node, arg);
6172
6173 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6174 return fold_build2 (GE_EXPR, type, arg,
6175 build_real (TREE_TYPE (arg), dconst0));
6176 }
6177 else if (code == GT_EXPR || code == GE_EXPR)
6178 {
6179 REAL_VALUE_TYPE c2;
6180
6181 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6182 real_convert (&c2, mode, &c2);
6183
6184 if (REAL_VALUE_ISINF (c2))
6185 {
6186 /* sqrt(x) > y is x == +Inf, when y is very large. */
6187 if (HONOR_INFINITIES (mode))
6188 return fold_build2 (EQ_EXPR, type, arg,
6189 build_real (TREE_TYPE (arg), c2));
6190
6191 /* sqrt(x) > y is always false, when y is very large
6192 and we don't care about infinities. */
6193 return omit_one_operand (type, integer_zero_node, arg);
6194 }
6195
6196 /* sqrt(x) > c is the same as x > c*c. */
6197 return fold_build2 (code, type, arg,
6198 build_real (TREE_TYPE (arg), c2));
6199 }
6200 else if (code == LT_EXPR || code == LE_EXPR)
6201 {
6202 REAL_VALUE_TYPE c2;
6203
6204 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6205 real_convert (&c2, mode, &c2);
6206
6207 if (REAL_VALUE_ISINF (c2))
6208 {
6209 /* sqrt(x) < y is always true, when y is a very large
6210 value and we don't care about NaNs or Infinities. */
6211 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6212 return omit_one_operand (type, integer_one_node, arg);
6213
6214 /* sqrt(x) < y is x != +Inf when y is very large and we
6215 don't care about NaNs. */
6216 if (! HONOR_NANS (mode))
6217 return fold_build2 (NE_EXPR, type, arg,
6218 build_real (TREE_TYPE (arg), c2));
6219
6220 /* sqrt(x) < y is x >= 0 when y is very large and we
6221 don't care about Infinities. */
6222 if (! HONOR_INFINITIES (mode))
6223 return fold_build2 (GE_EXPR, type, arg,
6224 build_real (TREE_TYPE (arg), dconst0));
6225
6226 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6227 if (lang_hooks.decls.global_bindings_p () != 0
6228 || CONTAINS_PLACEHOLDER_P (arg))
6229 return NULL_TREE;
6230
6231 arg = save_expr (arg);
6232 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6233 fold_build2 (GE_EXPR, type, arg,
6234 build_real (TREE_TYPE (arg),
6235 dconst0)),
6236 fold_build2 (NE_EXPR, type, arg,
6237 build_real (TREE_TYPE (arg),
6238 c2)));
6239 }
6240
6241 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6242 if (! HONOR_NANS (mode))
6243 return fold_build2 (code, type, arg,
6244 build_real (TREE_TYPE (arg), c2));
6245
6246 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6247 if (lang_hooks.decls.global_bindings_p () == 0
6248 && ! CONTAINS_PLACEHOLDER_P (arg))
6249 {
6250 arg = save_expr (arg);
6251 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6252 fold_build2 (GE_EXPR, type, arg,
6253 build_real (TREE_TYPE (arg),
6254 dconst0)),
6255 fold_build2 (code, type, arg,
6256 build_real (TREE_TYPE (arg),
6257 c2)));
6258 }
6259 }
6260 }
6261
6262 return NULL_TREE;
6263 }
6264
6265 /* Subroutine of fold() that optimizes comparisons against Infinities,
6266 either +Inf or -Inf.
6267
6268 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6269 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6270 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6271
6272 The function returns the constant folded tree if a simplification
6273 can be made, and NULL_TREE otherwise. */
6274
6275 static tree
6276 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6277 {
6278 enum machine_mode mode;
6279 REAL_VALUE_TYPE max;
6280 tree temp;
6281 bool neg;
6282
6283 mode = TYPE_MODE (TREE_TYPE (arg0));
6284
6285 /* For negative infinity swap the sense of the comparison. */
6286 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6287 if (neg)
6288 code = swap_tree_comparison (code);
6289
6290 switch (code)
6291 {
6292 case GT_EXPR:
6293 /* x > +Inf is always false, if with ignore sNANs. */
6294 if (HONOR_SNANS (mode))
6295 return NULL_TREE;
6296 return omit_one_operand (type, integer_zero_node, arg0);
6297
6298 case LE_EXPR:
6299 /* x <= +Inf is always true, if we don't case about NaNs. */
6300 if (! HONOR_NANS (mode))
6301 return omit_one_operand (type, integer_one_node, arg0);
6302
6303 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6304 if (lang_hooks.decls.global_bindings_p () == 0
6305 && ! CONTAINS_PLACEHOLDER_P (arg0))
6306 {
6307 arg0 = save_expr (arg0);
6308 return fold_build2 (EQ_EXPR, type, arg0, arg0);
6309 }
6310 break;
6311
6312 case EQ_EXPR:
6313 case GE_EXPR:
6314 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6315 real_maxval (&max, neg, mode);
6316 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6317 arg0, build_real (TREE_TYPE (arg0), max));
6318
6319 case LT_EXPR:
6320 /* x < +Inf is always equal to x <= DBL_MAX. */
6321 real_maxval (&max, neg, mode);
6322 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6323 arg0, build_real (TREE_TYPE (arg0), max));
6324
6325 case NE_EXPR:
6326 /* x != +Inf is always equal to !(x > DBL_MAX). */
6327 real_maxval (&max, neg, mode);
6328 if (! HONOR_NANS (mode))
6329 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6330 arg0, build_real (TREE_TYPE (arg0), max));
6331
6332 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6333 arg0, build_real (TREE_TYPE (arg0), max));
6334 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
6335
6336 default:
6337 break;
6338 }
6339
6340 return NULL_TREE;
6341 }
6342
6343 /* Subroutine of fold() that optimizes comparisons of a division by
6344 a nonzero integer constant against an integer constant, i.e.
6345 X/C1 op C2.
6346
6347 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6348 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6349 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6350
6351 The function returns the constant folded tree if a simplification
6352 can be made, and NULL_TREE otherwise. */
6353
6354 static tree
6355 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6356 {
6357 tree prod, tmp, hi, lo;
6358 tree arg00 = TREE_OPERAND (arg0, 0);
6359 tree arg01 = TREE_OPERAND (arg0, 1);
6360 unsigned HOST_WIDE_INT lpart;
6361 HOST_WIDE_INT hpart;
6362 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6363 bool neg_overflow;
6364 int overflow;
6365
6366 /* We have to do this the hard way to detect unsigned overflow.
6367 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6368 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6369 TREE_INT_CST_HIGH (arg01),
6370 TREE_INT_CST_LOW (arg1),
6371 TREE_INT_CST_HIGH (arg1),
6372 &lpart, &hpart, unsigned_p);
6373 prod = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6374 -1, overflow);
6375 neg_overflow = false;
6376
6377 if (unsigned_p)
6378 {
6379 tmp = int_const_binop (MINUS_EXPR, arg01,
6380 build_int_cst (TREE_TYPE (arg01), 1), 0);
6381 lo = prod;
6382
6383 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6384 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6385 TREE_INT_CST_HIGH (prod),
6386 TREE_INT_CST_LOW (tmp),
6387 TREE_INT_CST_HIGH (tmp),
6388 &lpart, &hpart, unsigned_p);
6389 hi = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6390 -1, overflow | TREE_OVERFLOW (prod));
6391 }
6392 else if (tree_int_cst_sgn (arg01) >= 0)
6393 {
6394 tmp = int_const_binop (MINUS_EXPR, arg01,
6395 build_int_cst (TREE_TYPE (arg01), 1), 0);
6396 switch (tree_int_cst_sgn (arg1))
6397 {
6398 case -1:
6399 neg_overflow = true;
6400 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6401 hi = prod;
6402 break;
6403
6404 case 0:
6405 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6406 hi = tmp;
6407 break;
6408
6409 case 1:
6410 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6411 lo = prod;
6412 break;
6413
6414 default:
6415 gcc_unreachable ();
6416 }
6417 }
6418 else
6419 {
6420 /* A negative divisor reverses the relational operators. */
6421 code = swap_tree_comparison (code);
6422
6423 tmp = int_const_binop (PLUS_EXPR, arg01,
6424 build_int_cst (TREE_TYPE (arg01), 1), 0);
6425 switch (tree_int_cst_sgn (arg1))
6426 {
6427 case -1:
6428 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6429 lo = prod;
6430 break;
6431
6432 case 0:
6433 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6434 lo = tmp;
6435 break;
6436
6437 case 1:
6438 neg_overflow = true;
6439 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6440 hi = prod;
6441 break;
6442
6443 default:
6444 gcc_unreachable ();
6445 }
6446 }
6447
6448 switch (code)
6449 {
6450 case EQ_EXPR:
6451 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6452 return omit_one_operand (type, integer_zero_node, arg00);
6453 if (TREE_OVERFLOW (hi))
6454 return fold_build2 (GE_EXPR, type, arg00, lo);
6455 if (TREE_OVERFLOW (lo))
6456 return fold_build2 (LE_EXPR, type, arg00, hi);
6457 return build_range_check (type, arg00, 1, lo, hi);
6458
6459 case NE_EXPR:
6460 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6461 return omit_one_operand (type, integer_one_node, arg00);
6462 if (TREE_OVERFLOW (hi))
6463 return fold_build2 (LT_EXPR, type, arg00, lo);
6464 if (TREE_OVERFLOW (lo))
6465 return fold_build2 (GT_EXPR, type, arg00, hi);
6466 return build_range_check (type, arg00, 0, lo, hi);
6467
6468 case LT_EXPR:
6469 if (TREE_OVERFLOW (lo))
6470 {
6471 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6472 return omit_one_operand (type, tmp, arg00);
6473 }
6474 return fold_build2 (LT_EXPR, type, arg00, lo);
6475
6476 case LE_EXPR:
6477 if (TREE_OVERFLOW (hi))
6478 {
6479 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6480 return omit_one_operand (type, tmp, arg00);
6481 }
6482 return fold_build2 (LE_EXPR, type, arg00, hi);
6483
6484 case GT_EXPR:
6485 if (TREE_OVERFLOW (hi))
6486 {
6487 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6488 return omit_one_operand (type, tmp, arg00);
6489 }
6490 return fold_build2 (GT_EXPR, type, arg00, hi);
6491
6492 case GE_EXPR:
6493 if (TREE_OVERFLOW (lo))
6494 {
6495 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6496 return omit_one_operand (type, tmp, arg00);
6497 }
6498 return fold_build2 (GE_EXPR, type, arg00, lo);
6499
6500 default:
6501 break;
6502 }
6503
6504 return NULL_TREE;
6505 }
6506
6507
6508 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6509 equality/inequality test, then return a simplified form of the test
6510 using a sign testing. Otherwise return NULL. TYPE is the desired
6511 result type. */
6512
6513 static tree
6514 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6515 tree result_type)
6516 {
6517 /* If this is testing a single bit, we can optimize the test. */
6518 if ((code == NE_EXPR || code == EQ_EXPR)
6519 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6520 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6521 {
6522 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6523 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6524 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6525
6526 if (arg00 != NULL_TREE
6527 /* This is only a win if casting to a signed type is cheap,
6528 i.e. when arg00's type is not a partial mode. */
6529 && TYPE_PRECISION (TREE_TYPE (arg00))
6530 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6531 {
6532 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
6533 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6534 result_type, fold_convert (stype, arg00),
6535 build_int_cst (stype, 0));
6536 }
6537 }
6538
6539 return NULL_TREE;
6540 }
6541
6542 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6543 equality/inequality test, then return a simplified form of
6544 the test using shifts and logical operations. Otherwise return
6545 NULL. TYPE is the desired result type. */
6546
6547 tree
6548 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6549 tree result_type)
6550 {
6551 /* If this is testing a single bit, we can optimize the test. */
6552 if ((code == NE_EXPR || code == EQ_EXPR)
6553 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6554 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6555 {
6556 tree inner = TREE_OPERAND (arg0, 0);
6557 tree type = TREE_TYPE (arg0);
6558 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6559 enum machine_mode operand_mode = TYPE_MODE (type);
6560 int ops_unsigned;
6561 tree signed_type, unsigned_type, intermediate_type;
6562 tree tem, one;
6563
6564 /* First, see if we can fold the single bit test into a sign-bit
6565 test. */
6566 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6567 result_type);
6568 if (tem)
6569 return tem;
6570
6571 /* Otherwise we have (A & C) != 0 where C is a single bit,
6572 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6573 Similarly for (A & C) == 0. */
6574
6575 /* If INNER is a right shift of a constant and it plus BITNUM does
6576 not overflow, adjust BITNUM and INNER. */
6577 if (TREE_CODE (inner) == RSHIFT_EXPR
6578 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6579 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6580 && bitnum < TYPE_PRECISION (type)
6581 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6582 bitnum - TYPE_PRECISION (type)))
6583 {
6584 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6585 inner = TREE_OPERAND (inner, 0);
6586 }
6587
6588 /* If we are going to be able to omit the AND below, we must do our
6589 operations as unsigned. If we must use the AND, we have a choice.
6590 Normally unsigned is faster, but for some machines signed is. */
6591 #ifdef LOAD_EXTEND_OP
6592 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6593 && !flag_syntax_only) ? 0 : 1;
6594 #else
6595 ops_unsigned = 1;
6596 #endif
6597
6598 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6599 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6600 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6601 inner = fold_convert (intermediate_type, inner);
6602
6603 if (bitnum != 0)
6604 inner = build2 (RSHIFT_EXPR, intermediate_type,
6605 inner, size_int (bitnum));
6606
6607 one = build_int_cst (intermediate_type, 1);
6608
6609 if (code == EQ_EXPR)
6610 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type, inner, one);
6611
6612 /* Put the AND last so it can combine with more things. */
6613 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6614
6615 /* Make sure to return the proper type. */
6616 inner = fold_convert (result_type, inner);
6617
6618 return inner;
6619 }
6620 return NULL_TREE;
6621 }
6622
6623 /* Check whether we are allowed to reorder operands arg0 and arg1,
6624 such that the evaluation of arg1 occurs before arg0. */
6625
6626 static bool
6627 reorder_operands_p (tree arg0, tree arg1)
6628 {
6629 if (! flag_evaluation_order)
6630 return true;
6631 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6632 return true;
6633 return ! TREE_SIDE_EFFECTS (arg0)
6634 && ! TREE_SIDE_EFFECTS (arg1);
6635 }
6636
6637 /* Test whether it is preferable two swap two operands, ARG0 and
6638 ARG1, for example because ARG0 is an integer constant and ARG1
6639 isn't. If REORDER is true, only recommend swapping if we can
6640 evaluate the operands in reverse order. */
6641
6642 bool
6643 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
6644 {
6645 STRIP_SIGN_NOPS (arg0);
6646 STRIP_SIGN_NOPS (arg1);
6647
6648 if (TREE_CODE (arg1) == INTEGER_CST)
6649 return 0;
6650 if (TREE_CODE (arg0) == INTEGER_CST)
6651 return 1;
6652
6653 if (TREE_CODE (arg1) == REAL_CST)
6654 return 0;
6655 if (TREE_CODE (arg0) == REAL_CST)
6656 return 1;
6657
6658 if (TREE_CODE (arg1) == COMPLEX_CST)
6659 return 0;
6660 if (TREE_CODE (arg0) == COMPLEX_CST)
6661 return 1;
6662
6663 if (TREE_CONSTANT (arg1))
6664 return 0;
6665 if (TREE_CONSTANT (arg0))
6666 return 1;
6667
6668 if (optimize_size)
6669 return 0;
6670
6671 if (reorder && flag_evaluation_order
6672 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6673 return 0;
6674
6675 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6676 for commutative and comparison operators. Ensuring a canonical
6677 form allows the optimizers to find additional redundancies without
6678 having to explicitly check for both orderings. */
6679 if (TREE_CODE (arg0) == SSA_NAME
6680 && TREE_CODE (arg1) == SSA_NAME
6681 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6682 return 1;
6683
6684 /* Put SSA_NAMEs last. */
6685 if (TREE_CODE (arg1) == SSA_NAME)
6686 return 0;
6687 if (TREE_CODE (arg0) == SSA_NAME)
6688 return 1;
6689
6690 /* Put variables last. */
6691 if (DECL_P (arg1))
6692 return 0;
6693 if (DECL_P (arg0))
6694 return 1;
6695
6696 return 0;
6697 }
6698
6699 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6700 ARG0 is extended to a wider type. */
6701
6702 static tree
6703 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6704 {
6705 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6706 tree arg1_unw;
6707 tree shorter_type, outer_type;
6708 tree min, max;
6709 bool above, below;
6710
6711 if (arg0_unw == arg0)
6712 return NULL_TREE;
6713 shorter_type = TREE_TYPE (arg0_unw);
6714
6715 #ifdef HAVE_canonicalize_funcptr_for_compare
6716 /* Disable this optimization if we're casting a function pointer
6717 type on targets that require function pointer canonicalization. */
6718 if (HAVE_canonicalize_funcptr_for_compare
6719 && TREE_CODE (shorter_type) == POINTER_TYPE
6720 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6721 return NULL_TREE;
6722 #endif
6723
6724 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6725 return NULL_TREE;
6726
6727 arg1_unw = get_unwidened (arg1, shorter_type);
6728
6729 /* If possible, express the comparison in the shorter mode. */
6730 if ((code == EQ_EXPR || code == NE_EXPR
6731 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6732 && (TREE_TYPE (arg1_unw) == shorter_type
6733 || (TREE_CODE (arg1_unw) == INTEGER_CST
6734 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6735 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6736 && int_fits_type_p (arg1_unw, shorter_type))))
6737 return fold_build2 (code, type, arg0_unw,
6738 fold_convert (shorter_type, arg1_unw));
6739
6740 if (TREE_CODE (arg1_unw) != INTEGER_CST
6741 || TREE_CODE (shorter_type) != INTEGER_TYPE
6742 || !int_fits_type_p (arg1_unw, shorter_type))
6743 return NULL_TREE;
6744
6745 /* If we are comparing with the integer that does not fit into the range
6746 of the shorter type, the result is known. */
6747 outer_type = TREE_TYPE (arg1_unw);
6748 min = lower_bound_in_type (outer_type, shorter_type);
6749 max = upper_bound_in_type (outer_type, shorter_type);
6750
6751 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6752 max, arg1_unw));
6753 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6754 arg1_unw, min));
6755
6756 switch (code)
6757 {
6758 case EQ_EXPR:
6759 if (above || below)
6760 return omit_one_operand (type, integer_zero_node, arg0);
6761 break;
6762
6763 case NE_EXPR:
6764 if (above || below)
6765 return omit_one_operand (type, integer_one_node, arg0);
6766 break;
6767
6768 case LT_EXPR:
6769 case LE_EXPR:
6770 if (above)
6771 return omit_one_operand (type, integer_one_node, arg0);
6772 else if (below)
6773 return omit_one_operand (type, integer_zero_node, arg0);
6774
6775 case GT_EXPR:
6776 case GE_EXPR:
6777 if (above)
6778 return omit_one_operand (type, integer_zero_node, arg0);
6779 else if (below)
6780 return omit_one_operand (type, integer_one_node, arg0);
6781
6782 default:
6783 break;
6784 }
6785
6786 return NULL_TREE;
6787 }
6788
6789 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6790 ARG0 just the signedness is changed. */
6791
6792 static tree
6793 fold_sign_changed_comparison (enum tree_code code, tree type,
6794 tree arg0, tree arg1)
6795 {
6796 tree arg0_inner;
6797 tree inner_type, outer_type;
6798
6799 if (TREE_CODE (arg0) != NOP_EXPR
6800 && TREE_CODE (arg0) != CONVERT_EXPR)
6801 return NULL_TREE;
6802
6803 outer_type = TREE_TYPE (arg0);
6804 arg0_inner = TREE_OPERAND (arg0, 0);
6805 inner_type = TREE_TYPE (arg0_inner);
6806
6807 #ifdef HAVE_canonicalize_funcptr_for_compare
6808 /* Disable this optimization if we're casting a function pointer
6809 type on targets that require function pointer canonicalization. */
6810 if (HAVE_canonicalize_funcptr_for_compare
6811 && TREE_CODE (inner_type) == POINTER_TYPE
6812 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6813 return NULL_TREE;
6814 #endif
6815
6816 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6817 return NULL_TREE;
6818
6819 if (TREE_CODE (arg1) != INTEGER_CST
6820 && !((TREE_CODE (arg1) == NOP_EXPR
6821 || TREE_CODE (arg1) == CONVERT_EXPR)
6822 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6823 return NULL_TREE;
6824
6825 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6826 && code != NE_EXPR
6827 && code != EQ_EXPR)
6828 return NULL_TREE;
6829
6830 if (TREE_CODE (arg1) == INTEGER_CST)
6831 arg1 = force_fit_type_double (inner_type, TREE_INT_CST_LOW (arg1),
6832 TREE_INT_CST_HIGH (arg1), 0,
6833 TREE_OVERFLOW (arg1));
6834 else
6835 arg1 = fold_convert (inner_type, arg1);
6836
6837 return fold_build2 (code, type, arg0_inner, arg1);
6838 }
6839
6840 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6841 step of the array. Reconstructs s and delta in the case of s * delta
6842 being an integer constant (and thus already folded).
6843 ADDR is the address. MULT is the multiplicative expression.
6844 If the function succeeds, the new address expression is returned. Otherwise
6845 NULL_TREE is returned. */
6846
6847 static tree
6848 try_move_mult_to_index (enum tree_code code, tree addr, tree op1)
6849 {
6850 tree s, delta, step;
6851 tree ref = TREE_OPERAND (addr, 0), pref;
6852 tree ret, pos;
6853 tree itype;
6854 bool mdim = false;
6855
6856 /* Canonicalize op1 into a possibly non-constant delta
6857 and an INTEGER_CST s. */
6858 if (TREE_CODE (op1) == MULT_EXPR)
6859 {
6860 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6861
6862 STRIP_NOPS (arg0);
6863 STRIP_NOPS (arg1);
6864
6865 if (TREE_CODE (arg0) == INTEGER_CST)
6866 {
6867 s = arg0;
6868 delta = arg1;
6869 }
6870 else if (TREE_CODE (arg1) == INTEGER_CST)
6871 {
6872 s = arg1;
6873 delta = arg0;
6874 }
6875 else
6876 return NULL_TREE;
6877 }
6878 else if (TREE_CODE (op1) == INTEGER_CST)
6879 {
6880 delta = op1;
6881 s = NULL_TREE;
6882 }
6883 else
6884 {
6885 /* Simulate we are delta * 1. */
6886 delta = op1;
6887 s = integer_one_node;
6888 }
6889
6890 for (;; ref = TREE_OPERAND (ref, 0))
6891 {
6892 if (TREE_CODE (ref) == ARRAY_REF)
6893 {
6894 /* Remember if this was a multi-dimensional array. */
6895 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6896 mdim = true;
6897
6898 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6899 if (! itype)
6900 continue;
6901
6902 step = array_ref_element_size (ref);
6903 if (TREE_CODE (step) != INTEGER_CST)
6904 continue;
6905
6906 if (s)
6907 {
6908 if (! tree_int_cst_equal (step, s))
6909 continue;
6910 }
6911 else
6912 {
6913 /* Try if delta is a multiple of step. */
6914 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step);
6915 if (! tmp)
6916 continue;
6917 delta = tmp;
6918 }
6919
6920 /* Only fold here if we can verify we do not overflow one
6921 dimension of a multi-dimensional array. */
6922 if (mdim)
6923 {
6924 tree tmp;
6925
6926 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
6927 || !INTEGRAL_TYPE_P (itype)
6928 || !TYPE_MAX_VALUE (itype)
6929 || TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST)
6930 continue;
6931
6932 tmp = fold_binary (code, itype,
6933 fold_convert (itype,
6934 TREE_OPERAND (ref, 1)),
6935 fold_convert (itype, delta));
6936 if (!tmp
6937 || TREE_CODE (tmp) != INTEGER_CST
6938 || tree_int_cst_lt (TYPE_MAX_VALUE (itype), tmp))
6939 continue;
6940 }
6941
6942 break;
6943 }
6944 else
6945 mdim = false;
6946
6947 if (!handled_component_p (ref))
6948 return NULL_TREE;
6949 }
6950
6951 /* We found the suitable array reference. So copy everything up to it,
6952 and replace the index. */
6953
6954 pref = TREE_OPERAND (addr, 0);
6955 ret = copy_node (pref);
6956 pos = ret;
6957
6958 while (pref != ref)
6959 {
6960 pref = TREE_OPERAND (pref, 0);
6961 TREE_OPERAND (pos, 0) = copy_node (pref);
6962 pos = TREE_OPERAND (pos, 0);
6963 }
6964
6965 TREE_OPERAND (pos, 1) = fold_build2 (code, itype,
6966 fold_convert (itype,
6967 TREE_OPERAND (pos, 1)),
6968 fold_convert (itype, delta));
6969
6970 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6971 }
6972
6973
6974 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6975 means A >= Y && A != MAX, but in this case we know that
6976 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6977
6978 static tree
6979 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6980 {
6981 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6982
6983 if (TREE_CODE (bound) == LT_EXPR)
6984 a = TREE_OPERAND (bound, 0);
6985 else if (TREE_CODE (bound) == GT_EXPR)
6986 a = TREE_OPERAND (bound, 1);
6987 else
6988 return NULL_TREE;
6989
6990 typea = TREE_TYPE (a);
6991 if (!INTEGRAL_TYPE_P (typea)
6992 && !POINTER_TYPE_P (typea))
6993 return NULL_TREE;
6994
6995 if (TREE_CODE (ineq) == LT_EXPR)
6996 {
6997 a1 = TREE_OPERAND (ineq, 1);
6998 y = TREE_OPERAND (ineq, 0);
6999 }
7000 else if (TREE_CODE (ineq) == GT_EXPR)
7001 {
7002 a1 = TREE_OPERAND (ineq, 0);
7003 y = TREE_OPERAND (ineq, 1);
7004 }
7005 else
7006 return NULL_TREE;
7007
7008 if (TREE_TYPE (a1) != typea)
7009 return NULL_TREE;
7010
7011 diff = fold_build2 (MINUS_EXPR, typea, a1, a);
7012 if (!integer_onep (diff))
7013 return NULL_TREE;
7014
7015 return fold_build2 (GE_EXPR, type, a, y);
7016 }
7017
7018 /* Fold a sum or difference of at least one multiplication.
7019 Returns the folded tree or NULL if no simplification could be made. */
7020
7021 static tree
7022 fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
7023 {
7024 tree arg00, arg01, arg10, arg11;
7025 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7026
7027 /* (A * C) +- (B * C) -> (A+-B) * C.
7028 (A * C) +- A -> A * (C+-1).
7029 We are most concerned about the case where C is a constant,
7030 but other combinations show up during loop reduction. Since
7031 it is not difficult, try all four possibilities. */
7032
7033 if (TREE_CODE (arg0) == MULT_EXPR)
7034 {
7035 arg00 = TREE_OPERAND (arg0, 0);
7036 arg01 = TREE_OPERAND (arg0, 1);
7037 }
7038 else
7039 {
7040 arg00 = arg0;
7041 arg01 = build_one_cst (type);
7042 }
7043 if (TREE_CODE (arg1) == MULT_EXPR)
7044 {
7045 arg10 = TREE_OPERAND (arg1, 0);
7046 arg11 = TREE_OPERAND (arg1, 1);
7047 }
7048 else
7049 {
7050 arg10 = arg1;
7051 arg11 = build_one_cst (type);
7052 }
7053 same = NULL_TREE;
7054
7055 if (operand_equal_p (arg01, arg11, 0))
7056 same = arg01, alt0 = arg00, alt1 = arg10;
7057 else if (operand_equal_p (arg00, arg10, 0))
7058 same = arg00, alt0 = arg01, alt1 = arg11;
7059 else if (operand_equal_p (arg00, arg11, 0))
7060 same = arg00, alt0 = arg01, alt1 = arg10;
7061 else if (operand_equal_p (arg01, arg10, 0))
7062 same = arg01, alt0 = arg00, alt1 = arg11;
7063
7064 /* No identical multiplicands; see if we can find a common
7065 power-of-two factor in non-power-of-two multiplies. This
7066 can help in multi-dimensional array access. */
7067 else if (host_integerp (arg01, 0)
7068 && host_integerp (arg11, 0))
7069 {
7070 HOST_WIDE_INT int01, int11, tmp;
7071 bool swap = false;
7072 tree maybe_same;
7073 int01 = TREE_INT_CST_LOW (arg01);
7074 int11 = TREE_INT_CST_LOW (arg11);
7075
7076 /* Move min of absolute values to int11. */
7077 if ((int01 >= 0 ? int01 : -int01)
7078 < (int11 >= 0 ? int11 : -int11))
7079 {
7080 tmp = int01, int01 = int11, int11 = tmp;
7081 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7082 maybe_same = arg01;
7083 swap = true;
7084 }
7085 else
7086 maybe_same = arg11;
7087
7088 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0)
7089 {
7090 alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
7091 build_int_cst (TREE_TYPE (arg00),
7092 int01 / int11));
7093 alt1 = arg10;
7094 same = maybe_same;
7095 if (swap)
7096 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7097 }
7098 }
7099
7100 if (same)
7101 return fold_build2 (MULT_EXPR, type,
7102 fold_build2 (code, type,
7103 fold_convert (type, alt0),
7104 fold_convert (type, alt1)),
7105 fold_convert (type, same));
7106
7107 return NULL_TREE;
7108 }
7109
7110 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7111 specified by EXPR into the buffer PTR of length LEN bytes.
7112 Return the number of bytes placed in the buffer, or zero
7113 upon failure. */
7114
7115 static int
7116 native_encode_int (tree expr, unsigned char *ptr, int len)
7117 {
7118 tree type = TREE_TYPE (expr);
7119 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7120 int byte, offset, word, words;
7121 unsigned char value;
7122
7123 if (total_bytes > len)
7124 return 0;
7125 words = total_bytes / UNITS_PER_WORD;
7126
7127 for (byte = 0; byte < total_bytes; byte++)
7128 {
7129 int bitpos = byte * BITS_PER_UNIT;
7130 if (bitpos < HOST_BITS_PER_WIDE_INT)
7131 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7132 else
7133 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7134 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7135
7136 if (total_bytes > UNITS_PER_WORD)
7137 {
7138 word = byte / UNITS_PER_WORD;
7139 if (WORDS_BIG_ENDIAN)
7140 word = (words - 1) - word;
7141 offset = word * UNITS_PER_WORD;
7142 if (BYTES_BIG_ENDIAN)
7143 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7144 else
7145 offset += byte % UNITS_PER_WORD;
7146 }
7147 else
7148 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7149 ptr[offset] = value;
7150 }
7151 return total_bytes;
7152 }
7153
7154
7155 /* Subroutine of native_encode_expr. Encode the REAL_CST
7156 specified by EXPR into the buffer PTR of length LEN bytes.
7157 Return the number of bytes placed in the buffer, or zero
7158 upon failure. */
7159
7160 static int
7161 native_encode_real (tree expr, unsigned char *ptr, int len)
7162 {
7163 tree type = TREE_TYPE (expr);
7164 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7165 int byte, offset, word, words, bitpos;
7166 unsigned char value;
7167
7168 /* There are always 32 bits in each long, no matter the size of
7169 the hosts long. We handle floating point representations with
7170 up to 192 bits. */
7171 long tmp[6];
7172
7173 if (total_bytes > len)
7174 return 0;
7175 words = 32 / UNITS_PER_WORD;
7176
7177 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7178
7179 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7180 bitpos += BITS_PER_UNIT)
7181 {
7182 byte = (bitpos / BITS_PER_UNIT) & 3;
7183 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7184
7185 if (UNITS_PER_WORD < 4)
7186 {
7187 word = byte / UNITS_PER_WORD;
7188 if (WORDS_BIG_ENDIAN)
7189 word = (words - 1) - word;
7190 offset = word * UNITS_PER_WORD;
7191 if (BYTES_BIG_ENDIAN)
7192 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7193 else
7194 offset += byte % UNITS_PER_WORD;
7195 }
7196 else
7197 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7198 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7199 }
7200 return total_bytes;
7201 }
7202
7203 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7204 specified by EXPR into the buffer PTR of length LEN bytes.
7205 Return the number of bytes placed in the buffer, or zero
7206 upon failure. */
7207
7208 static int
7209 native_encode_complex (tree expr, unsigned char *ptr, int len)
7210 {
7211 int rsize, isize;
7212 tree part;
7213
7214 part = TREE_REALPART (expr);
7215 rsize = native_encode_expr (part, ptr, len);
7216 if (rsize == 0)
7217 return 0;
7218 part = TREE_IMAGPART (expr);
7219 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7220 if (isize != rsize)
7221 return 0;
7222 return rsize + isize;
7223 }
7224
7225
7226 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7227 specified by EXPR into the buffer PTR of length LEN bytes.
7228 Return the number of bytes placed in the buffer, or zero
7229 upon failure. */
7230
7231 static int
7232 native_encode_vector (tree expr, unsigned char *ptr, int len)
7233 {
7234 int i, size, offset, count;
7235 tree itype, elem, elements;
7236
7237 offset = 0;
7238 elements = TREE_VECTOR_CST_ELTS (expr);
7239 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7240 itype = TREE_TYPE (TREE_TYPE (expr));
7241 size = GET_MODE_SIZE (TYPE_MODE (itype));
7242 for (i = 0; i < count; i++)
7243 {
7244 if (elements)
7245 {
7246 elem = TREE_VALUE (elements);
7247 elements = TREE_CHAIN (elements);
7248 }
7249 else
7250 elem = NULL_TREE;
7251
7252 if (elem)
7253 {
7254 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7255 return 0;
7256 }
7257 else
7258 {
7259 if (offset + size > len)
7260 return 0;
7261 memset (ptr+offset, 0, size);
7262 }
7263 offset += size;
7264 }
7265 return offset;
7266 }
7267
7268
7269 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7270 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7271 buffer PTR of length LEN bytes. Return the number of bytes
7272 placed in the buffer, or zero upon failure. */
7273
7274 static int
7275 native_encode_expr (tree expr, unsigned char *ptr, int len)
7276 {
7277 switch (TREE_CODE (expr))
7278 {
7279 case INTEGER_CST:
7280 return native_encode_int (expr, ptr, len);
7281
7282 case REAL_CST:
7283 return native_encode_real (expr, ptr, len);
7284
7285 case COMPLEX_CST:
7286 return native_encode_complex (expr, ptr, len);
7287
7288 case VECTOR_CST:
7289 return native_encode_vector (expr, ptr, len);
7290
7291 default:
7292 return 0;
7293 }
7294 }
7295
7296
7297 /* Subroutine of native_interpret_expr. Interpret the contents of
7298 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7299 If the buffer cannot be interpreted, return NULL_TREE. */
7300
7301 static tree
7302 native_interpret_int (tree type, unsigned char *ptr, int len)
7303 {
7304 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7305 int byte, offset, word, words;
7306 unsigned char value;
7307 unsigned int HOST_WIDE_INT lo = 0;
7308 HOST_WIDE_INT hi = 0;
7309
7310 if (total_bytes > len)
7311 return NULL_TREE;
7312 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7313 return NULL_TREE;
7314 words = total_bytes / UNITS_PER_WORD;
7315
7316 for (byte = 0; byte < total_bytes; byte++)
7317 {
7318 int bitpos = byte * BITS_PER_UNIT;
7319 if (total_bytes > UNITS_PER_WORD)
7320 {
7321 word = byte / UNITS_PER_WORD;
7322 if (WORDS_BIG_ENDIAN)
7323 word = (words - 1) - word;
7324 offset = word * UNITS_PER_WORD;
7325 if (BYTES_BIG_ENDIAN)
7326 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7327 else
7328 offset += byte % UNITS_PER_WORD;
7329 }
7330 else
7331 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7332 value = ptr[offset];
7333
7334 if (bitpos < HOST_BITS_PER_WIDE_INT)
7335 lo |= (unsigned HOST_WIDE_INT) value << bitpos;
7336 else
7337 hi |= (unsigned HOST_WIDE_INT) value
7338 << (bitpos - HOST_BITS_PER_WIDE_INT);
7339 }
7340
7341 return build_int_cst_wide_type (type, lo, hi);
7342 }
7343
7344
7345 /* Subroutine of native_interpret_expr. Interpret the contents of
7346 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7347 If the buffer cannot be interpreted, return NULL_TREE. */
7348
7349 static tree
7350 native_interpret_real (tree type, unsigned char *ptr, int len)
7351 {
7352 enum machine_mode mode = TYPE_MODE (type);
7353 int total_bytes = GET_MODE_SIZE (mode);
7354 int byte, offset, word, words, bitpos;
7355 unsigned char value;
7356 /* There are always 32 bits in each long, no matter the size of
7357 the hosts long. We handle floating point representations with
7358 up to 192 bits. */
7359 REAL_VALUE_TYPE r;
7360 long tmp[6];
7361
7362 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7363 if (total_bytes > len || total_bytes > 24)
7364 return NULL_TREE;
7365 words = 32 / UNITS_PER_WORD;
7366
7367 memset (tmp, 0, sizeof (tmp));
7368 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7369 bitpos += BITS_PER_UNIT)
7370 {
7371 byte = (bitpos / BITS_PER_UNIT) & 3;
7372 if (UNITS_PER_WORD < 4)
7373 {
7374 word = byte / UNITS_PER_WORD;
7375 if (WORDS_BIG_ENDIAN)
7376 word = (words - 1) - word;
7377 offset = word * UNITS_PER_WORD;
7378 if (BYTES_BIG_ENDIAN)
7379 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7380 else
7381 offset += byte % UNITS_PER_WORD;
7382 }
7383 else
7384 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7385 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7386
7387 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7388 }
7389
7390 real_from_target (&r, tmp, mode);
7391 return build_real (type, r);
7392 }
7393
7394
7395 /* Subroutine of native_interpret_expr. Interpret the contents of
7396 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7397 If the buffer cannot be interpreted, return NULL_TREE. */
7398
7399 static tree
7400 native_interpret_complex (tree type, unsigned char *ptr, int len)
7401 {
7402 tree etype, rpart, ipart;
7403 int size;
7404
7405 etype = TREE_TYPE (type);
7406 size = GET_MODE_SIZE (TYPE_MODE (etype));
7407 if (size * 2 > len)
7408 return NULL_TREE;
7409 rpart = native_interpret_expr (etype, ptr, size);
7410 if (!rpart)
7411 return NULL_TREE;
7412 ipart = native_interpret_expr (etype, ptr+size, size);
7413 if (!ipart)
7414 return NULL_TREE;
7415 return build_complex (type, rpart, ipart);
7416 }
7417
7418
7419 /* Subroutine of native_interpret_expr. Interpret the contents of
7420 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7421 If the buffer cannot be interpreted, return NULL_TREE. */
7422
7423 static tree
7424 native_interpret_vector (tree type, unsigned char *ptr, int len)
7425 {
7426 tree etype, elem, elements;
7427 int i, size, count;
7428
7429 etype = TREE_TYPE (type);
7430 size = GET_MODE_SIZE (TYPE_MODE (etype));
7431 count = TYPE_VECTOR_SUBPARTS (type);
7432 if (size * count > len)
7433 return NULL_TREE;
7434
7435 elements = NULL_TREE;
7436 for (i = count - 1; i >= 0; i--)
7437 {
7438 elem = native_interpret_expr (etype, ptr+(i*size), size);
7439 if (!elem)
7440 return NULL_TREE;
7441 elements = tree_cons (NULL_TREE, elem, elements);
7442 }
7443 return build_vector (type, elements);
7444 }
7445
7446
7447 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7448 the buffer PTR of length LEN as a constant of type TYPE. For
7449 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7450 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7451 return NULL_TREE. */
7452
7453 static tree
7454 native_interpret_expr (tree type, unsigned char *ptr, int len)
7455 {
7456 switch (TREE_CODE (type))
7457 {
7458 case INTEGER_TYPE:
7459 case ENUMERAL_TYPE:
7460 case BOOLEAN_TYPE:
7461 return native_interpret_int (type, ptr, len);
7462
7463 case REAL_TYPE:
7464 return native_interpret_real (type, ptr, len);
7465
7466 case COMPLEX_TYPE:
7467 return native_interpret_complex (type, ptr, len);
7468
7469 case VECTOR_TYPE:
7470 return native_interpret_vector (type, ptr, len);
7471
7472 default:
7473 return NULL_TREE;
7474 }
7475 }
7476
7477
7478 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7479 TYPE at compile-time. If we're unable to perform the conversion
7480 return NULL_TREE. */
7481
7482 static tree
7483 fold_view_convert_expr (tree type, tree expr)
7484 {
7485 /* We support up to 512-bit values (for V8DFmode). */
7486 unsigned char buffer[64];
7487 int len;
7488
7489 /* Check that the host and target are sane. */
7490 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7491 return NULL_TREE;
7492
7493 len = native_encode_expr (expr, buffer, sizeof (buffer));
7494 if (len == 0)
7495 return NULL_TREE;
7496
7497 return native_interpret_expr (type, buffer, len);
7498 }
7499
7500
7501 /* Fold a unary expression of code CODE and type TYPE with operand
7502 OP0. Return the folded expression if folding is successful.
7503 Otherwise, return NULL_TREE. */
7504
7505 tree
7506 fold_unary (enum tree_code code, tree type, tree op0)
7507 {
7508 tree tem;
7509 tree arg0;
7510 enum tree_code_class kind = TREE_CODE_CLASS (code);
7511
7512 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7513 && TREE_CODE_LENGTH (code) == 1);
7514
7515 arg0 = op0;
7516 if (arg0)
7517 {
7518 if (code == NOP_EXPR || code == CONVERT_EXPR
7519 || code == FLOAT_EXPR || code == ABS_EXPR)
7520 {
7521 /* Don't use STRIP_NOPS, because signedness of argument type
7522 matters. */
7523 STRIP_SIGN_NOPS (arg0);
7524 }
7525 else
7526 {
7527 /* Strip any conversions that don't change the mode. This
7528 is safe for every expression, except for a comparison
7529 expression because its signedness is derived from its
7530 operands.
7531
7532 Note that this is done as an internal manipulation within
7533 the constant folder, in order to find the simplest
7534 representation of the arguments so that their form can be
7535 studied. In any cases, the appropriate type conversions
7536 should be put back in the tree that will get out of the
7537 constant folder. */
7538 STRIP_NOPS (arg0);
7539 }
7540 }
7541
7542 if (TREE_CODE_CLASS (code) == tcc_unary)
7543 {
7544 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7545 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7546 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
7547 else if (TREE_CODE (arg0) == COND_EXPR)
7548 {
7549 tree arg01 = TREE_OPERAND (arg0, 1);
7550 tree arg02 = TREE_OPERAND (arg0, 2);
7551 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7552 arg01 = fold_build1 (code, type, arg01);
7553 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7554 arg02 = fold_build1 (code, type, arg02);
7555 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
7556 arg01, arg02);
7557
7558 /* If this was a conversion, and all we did was to move into
7559 inside the COND_EXPR, bring it back out. But leave it if
7560 it is a conversion from integer to integer and the
7561 result precision is no wider than a word since such a
7562 conversion is cheap and may be optimized away by combine,
7563 while it couldn't if it were outside the COND_EXPR. Then return
7564 so we don't get into an infinite recursion loop taking the
7565 conversion out and then back in. */
7566
7567 if ((code == NOP_EXPR || code == CONVERT_EXPR
7568 || code == NON_LVALUE_EXPR)
7569 && TREE_CODE (tem) == COND_EXPR
7570 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7571 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7572 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7573 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7574 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7575 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7576 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7577 && (INTEGRAL_TYPE_P
7578 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7579 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7580 || flag_syntax_only))
7581 tem = build1 (code, type,
7582 build3 (COND_EXPR,
7583 TREE_TYPE (TREE_OPERAND
7584 (TREE_OPERAND (tem, 1), 0)),
7585 TREE_OPERAND (tem, 0),
7586 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7587 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
7588 return tem;
7589 }
7590 else if (COMPARISON_CLASS_P (arg0))
7591 {
7592 if (TREE_CODE (type) == BOOLEAN_TYPE)
7593 {
7594 arg0 = copy_node (arg0);
7595 TREE_TYPE (arg0) = type;
7596 return arg0;
7597 }
7598 else if (TREE_CODE (type) != INTEGER_TYPE)
7599 return fold_build3 (COND_EXPR, type, arg0,
7600 fold_build1 (code, type,
7601 integer_one_node),
7602 fold_build1 (code, type,
7603 integer_zero_node));
7604 }
7605 }
7606
7607 switch (code)
7608 {
7609 case NOP_EXPR:
7610 case FLOAT_EXPR:
7611 case CONVERT_EXPR:
7612 case FIX_TRUNC_EXPR:
7613 if (TREE_TYPE (op0) == type)
7614 return op0;
7615
7616 /* If we have (type) (a CMP b) and type is an integral type, return
7617 new expression involving the new type. */
7618 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
7619 return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
7620 TREE_OPERAND (op0, 1));
7621
7622 /* Handle cases of two conversions in a row. */
7623 if (TREE_CODE (op0) == NOP_EXPR
7624 || TREE_CODE (op0) == CONVERT_EXPR)
7625 {
7626 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7627 tree inter_type = TREE_TYPE (op0);
7628 int inside_int = INTEGRAL_TYPE_P (inside_type);
7629 int inside_ptr = POINTER_TYPE_P (inside_type);
7630 int inside_float = FLOAT_TYPE_P (inside_type);
7631 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7632 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7633 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7634 int inter_int = INTEGRAL_TYPE_P (inter_type);
7635 int inter_ptr = POINTER_TYPE_P (inter_type);
7636 int inter_float = FLOAT_TYPE_P (inter_type);
7637 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7638 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7639 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7640 int final_int = INTEGRAL_TYPE_P (type);
7641 int final_ptr = POINTER_TYPE_P (type);
7642 int final_float = FLOAT_TYPE_P (type);
7643 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7644 unsigned int final_prec = TYPE_PRECISION (type);
7645 int final_unsignedp = TYPE_UNSIGNED (type);
7646
7647 /* In addition to the cases of two conversions in a row
7648 handled below, if we are converting something to its own
7649 type via an object of identical or wider precision, neither
7650 conversion is needed. */
7651 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7652 && (((inter_int || inter_ptr) && final_int)
7653 || (inter_float && final_float))
7654 && inter_prec >= final_prec)
7655 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7656
7657 /* Likewise, if the intermediate and final types are either both
7658 float or both integer, we don't need the middle conversion if
7659 it is wider than the final type and doesn't change the signedness
7660 (for integers). Avoid this if the final type is a pointer
7661 since then we sometimes need the inner conversion. Likewise if
7662 the outer has a precision not equal to the size of its mode. */
7663 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
7664 || (inter_float && inside_float)
7665 || (inter_vec && inside_vec))
7666 && inter_prec >= inside_prec
7667 && (inter_float || inter_vec
7668 || inter_unsignedp == inside_unsignedp)
7669 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7670 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7671 && ! final_ptr
7672 && (! final_vec || inter_prec == inside_prec))
7673 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7674
7675 /* If we have a sign-extension of a zero-extended value, we can
7676 replace that by a single zero-extension. */
7677 if (inside_int && inter_int && final_int
7678 && inside_prec < inter_prec && inter_prec < final_prec
7679 && inside_unsignedp && !inter_unsignedp)
7680 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7681
7682 /* Two conversions in a row are not needed unless:
7683 - some conversion is floating-point (overstrict for now), or
7684 - some conversion is a vector (overstrict for now), or
7685 - the intermediate type is narrower than both initial and
7686 final, or
7687 - the intermediate type and innermost type differ in signedness,
7688 and the outermost type is wider than the intermediate, or
7689 - the initial type is a pointer type and the precisions of the
7690 intermediate and final types differ, or
7691 - the final type is a pointer type and the precisions of the
7692 initial and intermediate types differ.
7693 - the final type is a pointer type and the initial type not
7694 - the initial type is a pointer to an array and the final type
7695 not. */
7696 if (! inside_float && ! inter_float && ! final_float
7697 && ! inside_vec && ! inter_vec && ! final_vec
7698 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7699 && ! (inside_int && inter_int
7700 && inter_unsignedp != inside_unsignedp
7701 && inter_prec < final_prec)
7702 && ((inter_unsignedp && inter_prec > inside_prec)
7703 == (final_unsignedp && final_prec > inter_prec))
7704 && ! (inside_ptr && inter_prec != final_prec)
7705 && ! (final_ptr && inside_prec != inter_prec)
7706 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7707 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7708 && final_ptr == inside_ptr
7709 && ! (inside_ptr
7710 && TREE_CODE (TREE_TYPE (inside_type)) == ARRAY_TYPE
7711 && TREE_CODE (TREE_TYPE (type)) != ARRAY_TYPE))
7712 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7713 }
7714
7715 /* Handle (T *)&A.B.C for A being of type T and B and C
7716 living at offset zero. This occurs frequently in
7717 C++ upcasting and then accessing the base. */
7718 if (TREE_CODE (op0) == ADDR_EXPR
7719 && POINTER_TYPE_P (type)
7720 && handled_component_p (TREE_OPERAND (op0, 0)))
7721 {
7722 HOST_WIDE_INT bitsize, bitpos;
7723 tree offset;
7724 enum machine_mode mode;
7725 int unsignedp, volatilep;
7726 tree base = TREE_OPERAND (op0, 0);
7727 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7728 &mode, &unsignedp, &volatilep, false);
7729 /* If the reference was to a (constant) zero offset, we can use
7730 the address of the base if it has the same base type
7731 as the result type. */
7732 if (! offset && bitpos == 0
7733 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
7734 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7735 return fold_convert (type, build_fold_addr_expr (base));
7736 }
7737
7738 if ((TREE_CODE (op0) == MODIFY_EXPR
7739 || TREE_CODE (op0) == GIMPLE_MODIFY_STMT)
7740 && TREE_CONSTANT (GENERIC_TREE_OPERAND (op0, 1))
7741 /* Detect assigning a bitfield. */
7742 && !(TREE_CODE (GENERIC_TREE_OPERAND (op0, 0)) == COMPONENT_REF
7743 && DECL_BIT_FIELD
7744 (TREE_OPERAND (GENERIC_TREE_OPERAND (op0, 0), 1))))
7745 {
7746 /* Don't leave an assignment inside a conversion
7747 unless assigning a bitfield. */
7748 tem = fold_build1 (code, type, GENERIC_TREE_OPERAND (op0, 1));
7749 /* First do the assignment, then return converted constant. */
7750 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7751 TREE_NO_WARNING (tem) = 1;
7752 TREE_USED (tem) = 1;
7753 return tem;
7754 }
7755
7756 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7757 constants (if x has signed type, the sign bit cannot be set
7758 in c). This folds extension into the BIT_AND_EXPR. */
7759 if (INTEGRAL_TYPE_P (type)
7760 && TREE_CODE (type) != BOOLEAN_TYPE
7761 && TREE_CODE (op0) == BIT_AND_EXPR
7762 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7763 {
7764 tree and = op0;
7765 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
7766 int change = 0;
7767
7768 if (TYPE_UNSIGNED (TREE_TYPE (and))
7769 || (TYPE_PRECISION (type)
7770 <= TYPE_PRECISION (TREE_TYPE (and))))
7771 change = 1;
7772 else if (TYPE_PRECISION (TREE_TYPE (and1))
7773 <= HOST_BITS_PER_WIDE_INT
7774 && host_integerp (and1, 1))
7775 {
7776 unsigned HOST_WIDE_INT cst;
7777
7778 cst = tree_low_cst (and1, 1);
7779 cst &= (HOST_WIDE_INT) -1
7780 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7781 change = (cst == 0);
7782 #ifdef LOAD_EXTEND_OP
7783 if (change
7784 && !flag_syntax_only
7785 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7786 == ZERO_EXTEND))
7787 {
7788 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
7789 and0 = fold_convert (uns, and0);
7790 and1 = fold_convert (uns, and1);
7791 }
7792 #endif
7793 }
7794 if (change)
7795 {
7796 tem = force_fit_type_double (type, TREE_INT_CST_LOW (and1),
7797 TREE_INT_CST_HIGH (and1), 0,
7798 TREE_OVERFLOW (and1));
7799 return fold_build2 (BIT_AND_EXPR, type,
7800 fold_convert (type, and0), tem);
7801 }
7802 }
7803
7804 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
7805 T2 being pointers to types of the same size. */
7806 if (POINTER_TYPE_P (type)
7807 && BINARY_CLASS_P (arg0)
7808 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7809 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7810 {
7811 tree arg00 = TREE_OPERAND (arg0, 0);
7812 tree t0 = type;
7813 tree t1 = TREE_TYPE (arg00);
7814 tree tt0 = TREE_TYPE (t0);
7815 tree tt1 = TREE_TYPE (t1);
7816 tree s0 = TYPE_SIZE (tt0);
7817 tree s1 = TYPE_SIZE (tt1);
7818
7819 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
7820 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
7821 TREE_OPERAND (arg0, 1));
7822 }
7823
7824 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7825 of the same precision, and X is a integer type not narrower than
7826 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7827 if (INTEGRAL_TYPE_P (type)
7828 && TREE_CODE (op0) == BIT_NOT_EXPR
7829 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7830 && (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
7831 || TREE_CODE (TREE_OPERAND (op0, 0)) == CONVERT_EXPR)
7832 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7833 {
7834 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7835 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7836 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7837 return fold_build1 (BIT_NOT_EXPR, type, fold_convert (type, tem));
7838 }
7839
7840 tem = fold_convert_const (code, type, arg0);
7841 return tem ? tem : NULL_TREE;
7842
7843 case VIEW_CONVERT_EXPR:
7844 if (TREE_TYPE (op0) == type)
7845 return op0;
7846 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
7847 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
7848 return fold_view_convert_expr (type, op0);
7849
7850 case NEGATE_EXPR:
7851 tem = fold_negate_expr (arg0);
7852 if (tem)
7853 return fold_convert (type, tem);
7854 return NULL_TREE;
7855
7856 case ABS_EXPR:
7857 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
7858 return fold_abs_const (arg0, type);
7859 else if (TREE_CODE (arg0) == NEGATE_EXPR)
7860 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
7861 /* Convert fabs((double)float) into (double)fabsf(float). */
7862 else if (TREE_CODE (arg0) == NOP_EXPR
7863 && TREE_CODE (type) == REAL_TYPE)
7864 {
7865 tree targ0 = strip_float_extensions (arg0);
7866 if (targ0 != arg0)
7867 return fold_convert (type, fold_build1 (ABS_EXPR,
7868 TREE_TYPE (targ0),
7869 targ0));
7870 }
7871 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
7872 else if (TREE_CODE (arg0) == ABS_EXPR)
7873 return arg0;
7874 else if (tree_expr_nonnegative_p (arg0))
7875 return arg0;
7876
7877 /* Strip sign ops from argument. */
7878 if (TREE_CODE (type) == REAL_TYPE)
7879 {
7880 tem = fold_strip_sign_ops (arg0);
7881 if (tem)
7882 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
7883 }
7884 return NULL_TREE;
7885
7886 case CONJ_EXPR:
7887 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7888 return fold_convert (type, arg0);
7889 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7890 {
7891 tree itype = TREE_TYPE (type);
7892 tree rpart = fold_convert (itype, TREE_OPERAND (arg0, 0));
7893 tree ipart = fold_convert (itype, TREE_OPERAND (arg0, 1));
7894 return fold_build2 (COMPLEX_EXPR, type, rpart, negate_expr (ipart));
7895 }
7896 if (TREE_CODE (arg0) == COMPLEX_CST)
7897 {
7898 tree itype = TREE_TYPE (type);
7899 tree rpart = fold_convert (itype, TREE_REALPART (arg0));
7900 tree ipart = fold_convert (itype, TREE_IMAGPART (arg0));
7901 return build_complex (type, rpart, negate_expr (ipart));
7902 }
7903 if (TREE_CODE (arg0) == CONJ_EXPR)
7904 return fold_convert (type, TREE_OPERAND (arg0, 0));
7905 return NULL_TREE;
7906
7907 case BIT_NOT_EXPR:
7908 if (TREE_CODE (arg0) == INTEGER_CST)
7909 return fold_not_const (arg0, type);
7910 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
7911 return TREE_OPERAND (arg0, 0);
7912 /* Convert ~ (-A) to A - 1. */
7913 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
7914 return fold_build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0),
7915 build_int_cst (type, 1));
7916 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
7917 else if (INTEGRAL_TYPE_P (type)
7918 && ((TREE_CODE (arg0) == MINUS_EXPR
7919 && integer_onep (TREE_OPERAND (arg0, 1)))
7920 || (TREE_CODE (arg0) == PLUS_EXPR
7921 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
7922 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7923 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7924 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7925 && (tem = fold_unary (BIT_NOT_EXPR, type,
7926 fold_convert (type,
7927 TREE_OPERAND (arg0, 0)))))
7928 return fold_build2 (BIT_XOR_EXPR, type, tem,
7929 fold_convert (type, TREE_OPERAND (arg0, 1)));
7930 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7931 && (tem = fold_unary (BIT_NOT_EXPR, type,
7932 fold_convert (type,
7933 TREE_OPERAND (arg0, 1)))))
7934 return fold_build2 (BIT_XOR_EXPR, type,
7935 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
7936
7937 return NULL_TREE;
7938
7939 case TRUTH_NOT_EXPR:
7940 /* The argument to invert_truthvalue must have Boolean type. */
7941 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
7942 arg0 = fold_convert (boolean_type_node, arg0);
7943
7944 /* Note that the operand of this must be an int
7945 and its values must be 0 or 1.
7946 ("true" is a fixed value perhaps depending on the language,
7947 but we don't handle values other than 1 correctly yet.) */
7948 tem = fold_truth_not_expr (arg0);
7949 if (!tem)
7950 return NULL_TREE;
7951 return fold_convert (type, tem);
7952
7953 case REALPART_EXPR:
7954 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7955 return fold_convert (type, arg0);
7956 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7957 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
7958 TREE_OPERAND (arg0, 1));
7959 if (TREE_CODE (arg0) == COMPLEX_CST)
7960 return fold_convert (type, TREE_REALPART (arg0));
7961 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7962 {
7963 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7964 tem = fold_build2 (TREE_CODE (arg0), itype,
7965 fold_build1 (REALPART_EXPR, itype,
7966 TREE_OPERAND (arg0, 0)),
7967 fold_build1 (REALPART_EXPR, itype,
7968 TREE_OPERAND (arg0, 1)));
7969 return fold_convert (type, tem);
7970 }
7971 if (TREE_CODE (arg0) == CONJ_EXPR)
7972 {
7973 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7974 tem = fold_build1 (REALPART_EXPR, itype, TREE_OPERAND (arg0, 0));
7975 return fold_convert (type, tem);
7976 }
7977 if (TREE_CODE (arg0) == CALL_EXPR)
7978 {
7979 tree fn = get_callee_fndecl (arg0);
7980 if (DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
7981 switch (DECL_FUNCTION_CODE (fn))
7982 {
7983 CASE_FLT_FN (BUILT_IN_CEXPI):
7984 fn = mathfn_built_in (type, BUILT_IN_COS);
7985 if (fn)
7986 return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0));
7987 break;
7988
7989 default:
7990 break;
7991 }
7992 }
7993 return NULL_TREE;
7994
7995 case IMAGPART_EXPR:
7996 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7997 return fold_convert (type, integer_zero_node);
7998 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7999 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
8000 TREE_OPERAND (arg0, 0));
8001 if (TREE_CODE (arg0) == COMPLEX_CST)
8002 return fold_convert (type, TREE_IMAGPART (arg0));
8003 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8004 {
8005 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8006 tem = fold_build2 (TREE_CODE (arg0), itype,
8007 fold_build1 (IMAGPART_EXPR, itype,
8008 TREE_OPERAND (arg0, 0)),
8009 fold_build1 (IMAGPART_EXPR, itype,
8010 TREE_OPERAND (arg0, 1)));
8011 return fold_convert (type, tem);
8012 }
8013 if (TREE_CODE (arg0) == CONJ_EXPR)
8014 {
8015 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8016 tem = fold_build1 (IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8017 return fold_convert (type, negate_expr (tem));
8018 }
8019 if (TREE_CODE (arg0) == CALL_EXPR)
8020 {
8021 tree fn = get_callee_fndecl (arg0);
8022 if (DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8023 switch (DECL_FUNCTION_CODE (fn))
8024 {
8025 CASE_FLT_FN (BUILT_IN_CEXPI):
8026 fn = mathfn_built_in (type, BUILT_IN_SIN);
8027 if (fn)
8028 return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0));
8029 break;
8030
8031 default:
8032 break;
8033 }
8034 }
8035 return NULL_TREE;
8036
8037 default:
8038 return NULL_TREE;
8039 } /* switch (code) */
8040 }
8041
8042 /* Fold a binary expression of code CODE and type TYPE with operands
8043 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8044 Return the folded expression if folding is successful. Otherwise,
8045 return NULL_TREE. */
8046
8047 static tree
8048 fold_minmax (enum tree_code code, tree type, tree op0, tree op1)
8049 {
8050 enum tree_code compl_code;
8051
8052 if (code == MIN_EXPR)
8053 compl_code = MAX_EXPR;
8054 else if (code == MAX_EXPR)
8055 compl_code = MIN_EXPR;
8056 else
8057 gcc_unreachable ();
8058
8059 /* MIN (MAX (a, b), b) == b. */
8060 if (TREE_CODE (op0) == compl_code
8061 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8062 return omit_one_operand (type, op1, TREE_OPERAND (op0, 0));
8063
8064 /* MIN (MAX (b, a), b) == b. */
8065 if (TREE_CODE (op0) == compl_code
8066 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8067 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8068 return omit_one_operand (type, op1, TREE_OPERAND (op0, 1));
8069
8070 /* MIN (a, MAX (a, b)) == a. */
8071 if (TREE_CODE (op1) == compl_code
8072 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8073 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8074 return omit_one_operand (type, op0, TREE_OPERAND (op1, 1));
8075
8076 /* MIN (a, MAX (b, a)) == a. */
8077 if (TREE_CODE (op1) == compl_code
8078 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8079 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8080 return omit_one_operand (type, op0, TREE_OPERAND (op1, 0));
8081
8082 return NULL_TREE;
8083 }
8084
8085 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8086 by changing CODE to reduce the magnitude of constants involved in
8087 ARG0 of the comparison.
8088 Returns a canonicalized comparison tree if a simplification was
8089 possible, otherwise returns NULL_TREE.
8090 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8091 valid if signed overflow is undefined. */
8092
8093 static tree
8094 maybe_canonicalize_comparison_1 (enum tree_code code, tree type,
8095 tree arg0, tree arg1,
8096 bool *strict_overflow_p)
8097 {
8098 enum tree_code code0 = TREE_CODE (arg0);
8099 tree t, cst0 = NULL_TREE;
8100 int sgn0;
8101 bool swap = false;
8102
8103 /* Match A +- CST code arg1 and CST code arg1. */
8104 if (!(((code0 == MINUS_EXPR
8105 || code0 == PLUS_EXPR)
8106 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8107 || code0 == INTEGER_CST))
8108 return NULL_TREE;
8109
8110 /* Identify the constant in arg0 and its sign. */
8111 if (code0 == INTEGER_CST)
8112 cst0 = arg0;
8113 else
8114 cst0 = TREE_OPERAND (arg0, 1);
8115 sgn0 = tree_int_cst_sgn (cst0);
8116
8117 /* Overflowed constants and zero will cause problems. */
8118 if (integer_zerop (cst0)
8119 || TREE_OVERFLOW (cst0))
8120 return NULL_TREE;
8121
8122 /* See if we can reduce the magnitude of the constant in
8123 arg0 by changing the comparison code. */
8124 if (code0 == INTEGER_CST)
8125 {
8126 /* CST <= arg1 -> CST-1 < arg1. */
8127 if (code == LE_EXPR && sgn0 == 1)
8128 code = LT_EXPR;
8129 /* -CST < arg1 -> -CST-1 <= arg1. */
8130 else if (code == LT_EXPR && sgn0 == -1)
8131 code = LE_EXPR;
8132 /* CST > arg1 -> CST-1 >= arg1. */
8133 else if (code == GT_EXPR && sgn0 == 1)
8134 code = GE_EXPR;
8135 /* -CST >= arg1 -> -CST-1 > arg1. */
8136 else if (code == GE_EXPR && sgn0 == -1)
8137 code = GT_EXPR;
8138 else
8139 return NULL_TREE;
8140 /* arg1 code' CST' might be more canonical. */
8141 swap = true;
8142 }
8143 else
8144 {
8145 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8146 if (code == LT_EXPR
8147 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8148 code = LE_EXPR;
8149 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8150 else if (code == GT_EXPR
8151 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8152 code = GE_EXPR;
8153 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8154 else if (code == LE_EXPR
8155 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8156 code = LT_EXPR;
8157 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8158 else if (code == GE_EXPR
8159 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8160 code = GT_EXPR;
8161 else
8162 return NULL_TREE;
8163 *strict_overflow_p = true;
8164 }
8165
8166 /* Now build the constant reduced in magnitude. */
8167 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8168 cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
8169 if (code0 != INTEGER_CST)
8170 t = fold_build2 (code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8171
8172 /* If swapping might yield to a more canonical form, do so. */
8173 if (swap)
8174 return fold_build2 (swap_tree_comparison (code), type, arg1, t);
8175 else
8176 return fold_build2 (code, type, t, arg1);
8177 }
8178
8179 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8180 overflow further. Try to decrease the magnitude of constants involved
8181 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8182 and put sole constants at the second argument position.
8183 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8184
8185 static tree
8186 maybe_canonicalize_comparison (enum tree_code code, tree type,
8187 tree arg0, tree arg1)
8188 {
8189 tree t;
8190 bool strict_overflow_p;
8191 const char * const warnmsg = G_("assuming signed overflow does not occur "
8192 "when reducing constant in comparison");
8193
8194 /* In principle pointers also have undefined overflow behavior,
8195 but that causes problems elsewhere. */
8196 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8197 || POINTER_TYPE_P (TREE_TYPE (arg0)))
8198 return NULL_TREE;
8199
8200 /* Try canonicalization by simplifying arg0. */
8201 strict_overflow_p = false;
8202 t = maybe_canonicalize_comparison_1 (code, type, arg0, arg1,
8203 &strict_overflow_p);
8204 if (t)
8205 {
8206 if (strict_overflow_p)
8207 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8208 return t;
8209 }
8210
8211 /* Try canonicalization by simplifying arg1 using the swapped
8212 comparison. */
8213 code = swap_tree_comparison (code);
8214 strict_overflow_p = false;
8215 t = maybe_canonicalize_comparison_1 (code, type, arg1, arg0,
8216 &strict_overflow_p);
8217 if (t && strict_overflow_p)
8218 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8219 return t;
8220 }
8221
8222 /* Subroutine of fold_binary. This routine performs all of the
8223 transformations that are common to the equality/inequality
8224 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8225 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8226 fold_binary should call fold_binary. Fold a comparison with
8227 tree code CODE and type TYPE with operands OP0 and OP1. Return
8228 the folded comparison or NULL_TREE. */
8229
8230 static tree
8231 fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
8232 {
8233 tree arg0, arg1, tem;
8234
8235 arg0 = op0;
8236 arg1 = op1;
8237
8238 STRIP_SIGN_NOPS (arg0);
8239 STRIP_SIGN_NOPS (arg1);
8240
8241 tem = fold_relational_const (code, type, arg0, arg1);
8242 if (tem != NULL_TREE)
8243 return tem;
8244
8245 /* If one arg is a real or integer constant, put it last. */
8246 if (tree_swap_operands_p (arg0, arg1, true))
8247 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
8248
8249 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8250 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8251 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8252 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8253 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8254 && (TREE_CODE (arg1) == INTEGER_CST
8255 && !TREE_OVERFLOW (arg1)))
8256 {
8257 tree const1 = TREE_OPERAND (arg0, 1);
8258 tree const2 = arg1;
8259 tree variable = TREE_OPERAND (arg0, 0);
8260 tree lhs;
8261 int lhs_add;
8262 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8263
8264 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
8265 TREE_TYPE (arg1), const2, const1);
8266
8267 /* If the constant operation overflowed this can be
8268 simplified as a comparison against INT_MAX/INT_MIN. */
8269 if (TREE_CODE (lhs) == INTEGER_CST
8270 && TREE_OVERFLOW (lhs))
8271 {
8272 int const1_sgn = tree_int_cst_sgn (const1);
8273 enum tree_code code2 = code;
8274
8275 /* Get the sign of the constant on the lhs if the
8276 operation were VARIABLE + CONST1. */
8277 if (TREE_CODE (arg0) == MINUS_EXPR)
8278 const1_sgn = -const1_sgn;
8279
8280 /* The sign of the constant determines if we overflowed
8281 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8282 Canonicalize to the INT_MIN overflow by swapping the comparison
8283 if necessary. */
8284 if (const1_sgn == -1)
8285 code2 = swap_tree_comparison (code);
8286
8287 /* We now can look at the canonicalized case
8288 VARIABLE + 1 CODE2 INT_MIN
8289 and decide on the result. */
8290 if (code2 == LT_EXPR
8291 || code2 == LE_EXPR
8292 || code2 == EQ_EXPR)
8293 return omit_one_operand (type, boolean_false_node, variable);
8294 else if (code2 == NE_EXPR
8295 || code2 == GE_EXPR
8296 || code2 == GT_EXPR)
8297 return omit_one_operand (type, boolean_true_node, variable);
8298 }
8299
8300 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8301 && (TREE_CODE (lhs) != INTEGER_CST
8302 || !TREE_OVERFLOW (lhs)))
8303 {
8304 fold_overflow_warning (("assuming signed overflow does not occur "
8305 "when changing X +- C1 cmp C2 to "
8306 "X cmp C1 +- C2"),
8307 WARN_STRICT_OVERFLOW_COMPARISON);
8308 return fold_build2 (code, type, variable, lhs);
8309 }
8310 }
8311
8312 /* For comparisons of pointers we can decompose it to a compile time
8313 comparison of the base objects and the offsets into the object.
8314 This requires at least one operand being an ADDR_EXPR to do more
8315 than the operand_equal_p test below. */
8316 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8317 && (TREE_CODE (arg0) == ADDR_EXPR
8318 || TREE_CODE (arg1) == ADDR_EXPR))
8319 {
8320 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8321 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8322 enum machine_mode mode;
8323 int volatilep, unsignedp;
8324 bool indirect_base0 = false;
8325
8326 /* Get base and offset for the access. Strip ADDR_EXPR for
8327 get_inner_reference, but put it back by stripping INDIRECT_REF
8328 off the base object if possible. */
8329 base0 = arg0;
8330 if (TREE_CODE (arg0) == ADDR_EXPR)
8331 {
8332 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8333 &bitsize, &bitpos0, &offset0, &mode,
8334 &unsignedp, &volatilep, false);
8335 if (TREE_CODE (base0) == INDIRECT_REF)
8336 base0 = TREE_OPERAND (base0, 0);
8337 else
8338 indirect_base0 = true;
8339 }
8340
8341 base1 = arg1;
8342 if (TREE_CODE (arg1) == ADDR_EXPR)
8343 {
8344 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8345 &bitsize, &bitpos1, &offset1, &mode,
8346 &unsignedp, &volatilep, false);
8347 /* We have to make sure to have an indirect/non-indirect base1
8348 just the same as we did for base0. */
8349 if (TREE_CODE (base1) == INDIRECT_REF
8350 && !indirect_base0)
8351 base1 = TREE_OPERAND (base1, 0);
8352 else if (!indirect_base0)
8353 base1 = NULL_TREE;
8354 }
8355 else if (indirect_base0)
8356 base1 = NULL_TREE;
8357
8358 /* If we have equivalent bases we might be able to simplify. */
8359 if (base0 && base1
8360 && operand_equal_p (base0, base1, 0))
8361 {
8362 /* We can fold this expression to a constant if the non-constant
8363 offset parts are equal. */
8364 if (offset0 == offset1
8365 || (offset0 && offset1
8366 && operand_equal_p (offset0, offset1, 0)))
8367 {
8368 switch (code)
8369 {
8370 case EQ_EXPR:
8371 return build_int_cst (boolean_type_node, bitpos0 == bitpos1);
8372 case NE_EXPR:
8373 return build_int_cst (boolean_type_node, bitpos0 != bitpos1);
8374 case LT_EXPR:
8375 return build_int_cst (boolean_type_node, bitpos0 < bitpos1);
8376 case LE_EXPR:
8377 return build_int_cst (boolean_type_node, bitpos0 <= bitpos1);
8378 case GE_EXPR:
8379 return build_int_cst (boolean_type_node, bitpos0 >= bitpos1);
8380 case GT_EXPR:
8381 return build_int_cst (boolean_type_node, bitpos0 > bitpos1);
8382 default:;
8383 }
8384 }
8385 /* We can simplify the comparison to a comparison of the variable
8386 offset parts if the constant offset parts are equal.
8387 Be careful to use signed size type here because otherwise we
8388 mess with array offsets in the wrong way. This is possible
8389 because pointer arithmetic is restricted to retain within an
8390 object and overflow on pointer differences is undefined as of
8391 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8392 else if (bitpos0 == bitpos1)
8393 {
8394 tree signed_size_type_node;
8395 signed_size_type_node = signed_type_for (size_type_node);
8396
8397 /* By converting to signed size type we cover middle-end pointer
8398 arithmetic which operates on unsigned pointer types of size
8399 type size and ARRAY_REF offsets which are properly sign or
8400 zero extended from their type in case it is narrower than
8401 size type. */
8402 if (offset0 == NULL_TREE)
8403 offset0 = build_int_cst (signed_size_type_node, 0);
8404 else
8405 offset0 = fold_convert (signed_size_type_node, offset0);
8406 if (offset1 == NULL_TREE)
8407 offset1 = build_int_cst (signed_size_type_node, 0);
8408 else
8409 offset1 = fold_convert (signed_size_type_node, offset1);
8410
8411 return fold_build2 (code, type, offset0, offset1);
8412 }
8413 }
8414 }
8415
8416 /* If this is a comparison of two exprs that look like an ARRAY_REF of the
8417 same object, then we can fold this to a comparison of the two offsets in
8418 signed size type. This is possible because pointer arithmetic is
8419 restricted to retain within an object and overflow on pointer differences
8420 is undefined as of 6.5.6/8 and /9 with respect to the signed ptrdiff_t.
8421
8422 We check flag_wrapv directly because pointers types are unsigned,
8423 and therefore TYPE_OVERFLOW_WRAPS returns true for them. That is
8424 normally what we want to avoid certain odd overflow cases, but
8425 not here. */
8426 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8427 && !flag_wrapv
8428 && !TYPE_OVERFLOW_TRAPS (TREE_TYPE (arg0)))
8429 {
8430 tree base0, offset0, base1, offset1;
8431
8432 if (extract_array_ref (arg0, &base0, &offset0)
8433 && extract_array_ref (arg1, &base1, &offset1)
8434 && operand_equal_p (base0, base1, 0))
8435 {
8436 tree signed_size_type_node;
8437 signed_size_type_node = signed_type_for (size_type_node);
8438
8439 /* By converting to signed size type we cover middle-end pointer
8440 arithmetic which operates on unsigned pointer types of size
8441 type size and ARRAY_REF offsets which are properly sign or
8442 zero extended from their type in case it is narrower than
8443 size type. */
8444 if (offset0 == NULL_TREE)
8445 offset0 = build_int_cst (signed_size_type_node, 0);
8446 else
8447 offset0 = fold_convert (signed_size_type_node, offset0);
8448 if (offset1 == NULL_TREE)
8449 offset1 = build_int_cst (signed_size_type_node, 0);
8450 else
8451 offset1 = fold_convert (signed_size_type_node, offset1);
8452
8453 return fold_build2 (code, type, offset0, offset1);
8454 }
8455 }
8456
8457 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8458 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8459 the resulting offset is smaller in absolute value than the
8460 original one. */
8461 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8462 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8463 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8464 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8465 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8466 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8467 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8468 {
8469 tree const1 = TREE_OPERAND (arg0, 1);
8470 tree const2 = TREE_OPERAND (arg1, 1);
8471 tree variable1 = TREE_OPERAND (arg0, 0);
8472 tree variable2 = TREE_OPERAND (arg1, 0);
8473 tree cst;
8474 const char * const warnmsg = G_("assuming signed overflow does not "
8475 "occur when combining constants around "
8476 "a comparison");
8477
8478 /* Put the constant on the side where it doesn't overflow and is
8479 of lower absolute value than before. */
8480 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8481 ? MINUS_EXPR : PLUS_EXPR,
8482 const2, const1, 0);
8483 if (!TREE_OVERFLOW (cst)
8484 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
8485 {
8486 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8487 return fold_build2 (code, type,
8488 variable1,
8489 fold_build2 (TREE_CODE (arg1), TREE_TYPE (arg1),
8490 variable2, cst));
8491 }
8492
8493 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8494 ? MINUS_EXPR : PLUS_EXPR,
8495 const1, const2, 0);
8496 if (!TREE_OVERFLOW (cst)
8497 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
8498 {
8499 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8500 return fold_build2 (code, type,
8501 fold_build2 (TREE_CODE (arg0), TREE_TYPE (arg0),
8502 variable1, cst),
8503 variable2);
8504 }
8505 }
8506
8507 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
8508 signed arithmetic case. That form is created by the compiler
8509 often enough for folding it to be of value. One example is in
8510 computing loop trip counts after Operator Strength Reduction. */
8511 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8512 && TREE_CODE (arg0) == MULT_EXPR
8513 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8514 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8515 && integer_zerop (arg1))
8516 {
8517 tree const1 = TREE_OPERAND (arg0, 1);
8518 tree const2 = arg1; /* zero */
8519 tree variable1 = TREE_OPERAND (arg0, 0);
8520 enum tree_code cmp_code = code;
8521
8522 gcc_assert (!integer_zerop (const1));
8523
8524 fold_overflow_warning (("assuming signed overflow does not occur when "
8525 "eliminating multiplication in comparison "
8526 "with zero"),
8527 WARN_STRICT_OVERFLOW_COMPARISON);
8528
8529 /* If const1 is negative we swap the sense of the comparison. */
8530 if (tree_int_cst_sgn (const1) < 0)
8531 cmp_code = swap_tree_comparison (cmp_code);
8532
8533 return fold_build2 (cmp_code, type, variable1, const2);
8534 }
8535
8536 tem = maybe_canonicalize_comparison (code, type, arg0, arg1);
8537 if (tem)
8538 return tem;
8539
8540 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8541 {
8542 tree targ0 = strip_float_extensions (arg0);
8543 tree targ1 = strip_float_extensions (arg1);
8544 tree newtype = TREE_TYPE (targ0);
8545
8546 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8547 newtype = TREE_TYPE (targ1);
8548
8549 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8550 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8551 return fold_build2 (code, type, fold_convert (newtype, targ0),
8552 fold_convert (newtype, targ1));
8553
8554 /* (-a) CMP (-b) -> b CMP a */
8555 if (TREE_CODE (arg0) == NEGATE_EXPR
8556 && TREE_CODE (arg1) == NEGATE_EXPR)
8557 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
8558 TREE_OPERAND (arg0, 0));
8559
8560 if (TREE_CODE (arg1) == REAL_CST)
8561 {
8562 REAL_VALUE_TYPE cst;
8563 cst = TREE_REAL_CST (arg1);
8564
8565 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8566 if (TREE_CODE (arg0) == NEGATE_EXPR)
8567 return fold_build2 (swap_tree_comparison (code), type,
8568 TREE_OPERAND (arg0, 0),
8569 build_real (TREE_TYPE (arg1),
8570 REAL_VALUE_NEGATE (cst)));
8571
8572 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8573 /* a CMP (-0) -> a CMP 0 */
8574 if (REAL_VALUE_MINUS_ZERO (cst))
8575 return fold_build2 (code, type, arg0,
8576 build_real (TREE_TYPE (arg1), dconst0));
8577
8578 /* x != NaN is always true, other ops are always false. */
8579 if (REAL_VALUE_ISNAN (cst)
8580 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8581 {
8582 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8583 return omit_one_operand (type, tem, arg0);
8584 }
8585
8586 /* Fold comparisons against infinity. */
8587 if (REAL_VALUE_ISINF (cst))
8588 {
8589 tem = fold_inf_compare (code, type, arg0, arg1);
8590 if (tem != NULL_TREE)
8591 return tem;
8592 }
8593 }
8594
8595 /* If this is a comparison of a real constant with a PLUS_EXPR
8596 or a MINUS_EXPR of a real constant, we can convert it into a
8597 comparison with a revised real constant as long as no overflow
8598 occurs when unsafe_math_optimizations are enabled. */
8599 if (flag_unsafe_math_optimizations
8600 && TREE_CODE (arg1) == REAL_CST
8601 && (TREE_CODE (arg0) == PLUS_EXPR
8602 || TREE_CODE (arg0) == MINUS_EXPR)
8603 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8604 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8605 ? MINUS_EXPR : PLUS_EXPR,
8606 arg1, TREE_OPERAND (arg0, 1), 0))
8607 && !TREE_OVERFLOW (tem))
8608 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
8609
8610 /* Likewise, we can simplify a comparison of a real constant with
8611 a MINUS_EXPR whose first operand is also a real constant, i.e.
8612 (c1 - x) < c2 becomes x > c1-c2. */
8613 if (flag_unsafe_math_optimizations
8614 && TREE_CODE (arg1) == REAL_CST
8615 && TREE_CODE (arg0) == MINUS_EXPR
8616 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
8617 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
8618 arg1, 0))
8619 && !TREE_OVERFLOW (tem))
8620 return fold_build2 (swap_tree_comparison (code), type,
8621 TREE_OPERAND (arg0, 1), tem);
8622
8623 /* Fold comparisons against built-in math functions. */
8624 if (TREE_CODE (arg1) == REAL_CST
8625 && flag_unsafe_math_optimizations
8626 && ! flag_errno_math)
8627 {
8628 enum built_in_function fcode = builtin_mathfn_code (arg0);
8629
8630 if (fcode != END_BUILTINS)
8631 {
8632 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
8633 if (tem != NULL_TREE)
8634 return tem;
8635 }
8636 }
8637 }
8638
8639 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
8640 if (TREE_CONSTANT (arg1)
8641 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
8642 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
8643 /* This optimization is invalid for ordered comparisons
8644 if CONST+INCR overflows or if foo+incr might overflow.
8645 This optimization is invalid for floating point due to rounding.
8646 For pointer types we assume overflow doesn't happen. */
8647 && (POINTER_TYPE_P (TREE_TYPE (arg0))
8648 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8649 && (code == EQ_EXPR || code == NE_EXPR))))
8650 {
8651 tree varop, newconst;
8652
8653 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
8654 {
8655 newconst = fold_build2 (PLUS_EXPR, TREE_TYPE (arg0),
8656 arg1, TREE_OPERAND (arg0, 1));
8657 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
8658 TREE_OPERAND (arg0, 0),
8659 TREE_OPERAND (arg0, 1));
8660 }
8661 else
8662 {
8663 newconst = fold_build2 (MINUS_EXPR, TREE_TYPE (arg0),
8664 arg1, TREE_OPERAND (arg0, 1));
8665 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
8666 TREE_OPERAND (arg0, 0),
8667 TREE_OPERAND (arg0, 1));
8668 }
8669
8670
8671 /* If VAROP is a reference to a bitfield, we must mask
8672 the constant by the width of the field. */
8673 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
8674 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
8675 && host_integerp (DECL_SIZE (TREE_OPERAND
8676 (TREE_OPERAND (varop, 0), 1)), 1))
8677 {
8678 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
8679 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
8680 tree folded_compare, shift;
8681
8682 /* First check whether the comparison would come out
8683 always the same. If we don't do that we would
8684 change the meaning with the masking. */
8685 folded_compare = fold_build2 (code, type,
8686 TREE_OPERAND (varop, 0), arg1);
8687 if (TREE_CODE (folded_compare) == INTEGER_CST)
8688 return omit_one_operand (type, folded_compare, varop);
8689
8690 shift = build_int_cst (NULL_TREE,
8691 TYPE_PRECISION (TREE_TYPE (varop)) - size);
8692 shift = fold_convert (TREE_TYPE (varop), shift);
8693 newconst = fold_build2 (LSHIFT_EXPR, TREE_TYPE (varop),
8694 newconst, shift);
8695 newconst = fold_build2 (RSHIFT_EXPR, TREE_TYPE (varop),
8696 newconst, shift);
8697 }
8698
8699 return fold_build2 (code, type, varop, newconst);
8700 }
8701
8702 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
8703 && (TREE_CODE (arg0) == NOP_EXPR
8704 || TREE_CODE (arg0) == CONVERT_EXPR))
8705 {
8706 /* If we are widening one operand of an integer comparison,
8707 see if the other operand is similarly being widened. Perhaps we
8708 can do the comparison in the narrower type. */
8709 tem = fold_widened_comparison (code, type, arg0, arg1);
8710 if (tem)
8711 return tem;
8712
8713 /* Or if we are changing signedness. */
8714 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
8715 if (tem)
8716 return tem;
8717 }
8718
8719 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8720 constant, we can simplify it. */
8721 if (TREE_CODE (arg1) == INTEGER_CST
8722 && (TREE_CODE (arg0) == MIN_EXPR
8723 || TREE_CODE (arg0) == MAX_EXPR)
8724 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8725 {
8726 tem = optimize_minmax_comparison (code, type, op0, op1);
8727 if (tem)
8728 return tem;
8729 }
8730
8731 /* Simplify comparison of something with itself. (For IEEE
8732 floating-point, we can only do some of these simplifications.) */
8733 if (operand_equal_p (arg0, arg1, 0))
8734 {
8735 switch (code)
8736 {
8737 case EQ_EXPR:
8738 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8739 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8740 return constant_boolean_node (1, type);
8741 break;
8742
8743 case GE_EXPR:
8744 case LE_EXPR:
8745 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8746 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8747 return constant_boolean_node (1, type);
8748 return fold_build2 (EQ_EXPR, type, arg0, arg1);
8749
8750 case NE_EXPR:
8751 /* For NE, we can only do this simplification if integer
8752 or we don't honor IEEE floating point NaNs. */
8753 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
8754 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8755 break;
8756 /* ... fall through ... */
8757 case GT_EXPR:
8758 case LT_EXPR:
8759 return constant_boolean_node (0, type);
8760 default:
8761 gcc_unreachable ();
8762 }
8763 }
8764
8765 /* If we are comparing an expression that just has comparisons
8766 of two integer values, arithmetic expressions of those comparisons,
8767 and constants, we can simplify it. There are only three cases
8768 to check: the two values can either be equal, the first can be
8769 greater, or the second can be greater. Fold the expression for
8770 those three values. Since each value must be 0 or 1, we have
8771 eight possibilities, each of which corresponds to the constant 0
8772 or 1 or one of the six possible comparisons.
8773
8774 This handles common cases like (a > b) == 0 but also handles
8775 expressions like ((x > y) - (y > x)) > 0, which supposedly
8776 occur in macroized code. */
8777
8778 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8779 {
8780 tree cval1 = 0, cval2 = 0;
8781 int save_p = 0;
8782
8783 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8784 /* Don't handle degenerate cases here; they should already
8785 have been handled anyway. */
8786 && cval1 != 0 && cval2 != 0
8787 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8788 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8789 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8790 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8791 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8792 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8793 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8794 {
8795 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8796 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8797
8798 /* We can't just pass T to eval_subst in case cval1 or cval2
8799 was the same as ARG1. */
8800
8801 tree high_result
8802 = fold_build2 (code, type,
8803 eval_subst (arg0, cval1, maxval,
8804 cval2, minval),
8805 arg1);
8806 tree equal_result
8807 = fold_build2 (code, type,
8808 eval_subst (arg0, cval1, maxval,
8809 cval2, maxval),
8810 arg1);
8811 tree low_result
8812 = fold_build2 (code, type,
8813 eval_subst (arg0, cval1, minval,
8814 cval2, maxval),
8815 arg1);
8816
8817 /* All three of these results should be 0 or 1. Confirm they are.
8818 Then use those values to select the proper code to use. */
8819
8820 if (TREE_CODE (high_result) == INTEGER_CST
8821 && TREE_CODE (equal_result) == INTEGER_CST
8822 && TREE_CODE (low_result) == INTEGER_CST)
8823 {
8824 /* Make a 3-bit mask with the high-order bit being the
8825 value for `>', the next for '=', and the low for '<'. */
8826 switch ((integer_onep (high_result) * 4)
8827 + (integer_onep (equal_result) * 2)
8828 + integer_onep (low_result))
8829 {
8830 case 0:
8831 /* Always false. */
8832 return omit_one_operand (type, integer_zero_node, arg0);
8833 case 1:
8834 code = LT_EXPR;
8835 break;
8836 case 2:
8837 code = EQ_EXPR;
8838 break;
8839 case 3:
8840 code = LE_EXPR;
8841 break;
8842 case 4:
8843 code = GT_EXPR;
8844 break;
8845 case 5:
8846 code = NE_EXPR;
8847 break;
8848 case 6:
8849 code = GE_EXPR;
8850 break;
8851 case 7:
8852 /* Always true. */
8853 return omit_one_operand (type, integer_one_node, arg0);
8854 }
8855
8856 if (save_p)
8857 return save_expr (build2 (code, type, cval1, cval2));
8858 return fold_build2 (code, type, cval1, cval2);
8859 }
8860 }
8861 }
8862
8863 /* Fold a comparison of the address of COMPONENT_REFs with the same
8864 type and component to a comparison of the address of the base
8865 object. In short, &x->a OP &y->a to x OP y and
8866 &x->a OP &y.a to x OP &y */
8867 if (TREE_CODE (arg0) == ADDR_EXPR
8868 && TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
8869 && TREE_CODE (arg1) == ADDR_EXPR
8870 && TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
8871 {
8872 tree cref0 = TREE_OPERAND (arg0, 0);
8873 tree cref1 = TREE_OPERAND (arg1, 0);
8874 if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
8875 {
8876 tree op0 = TREE_OPERAND (cref0, 0);
8877 tree op1 = TREE_OPERAND (cref1, 0);
8878 return fold_build2 (code, type,
8879 build_fold_addr_expr (op0),
8880 build_fold_addr_expr (op1));
8881 }
8882 }
8883
8884 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8885 into a single range test. */
8886 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
8887 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
8888 && TREE_CODE (arg1) == INTEGER_CST
8889 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8890 && !integer_zerop (TREE_OPERAND (arg0, 1))
8891 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8892 && !TREE_OVERFLOW (arg1))
8893 {
8894 tem = fold_div_compare (code, type, arg0, arg1);
8895 if (tem != NULL_TREE)
8896 return tem;
8897 }
8898
8899 /* Fold ~X op ~Y as Y op X. */
8900 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8901 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8902 {
8903 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
8904 return fold_build2 (code, type,
8905 fold_convert (cmp_type, TREE_OPERAND (arg1, 0)),
8906 TREE_OPERAND (arg0, 0));
8907 }
8908
8909 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
8910 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8911 && TREE_CODE (arg1) == INTEGER_CST)
8912 {
8913 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
8914 return fold_build2 (swap_tree_comparison (code), type,
8915 TREE_OPERAND (arg0, 0),
8916 fold_build1 (BIT_NOT_EXPR, cmp_type,
8917 fold_convert (cmp_type, arg1)));
8918 }
8919
8920 return NULL_TREE;
8921 }
8922
8923
8924 /* Subroutine of fold_binary. Optimize complex multiplications of the
8925 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8926 argument EXPR represents the expression "z" of type TYPE. */
8927
8928 static tree
8929 fold_mult_zconjz (tree type, tree expr)
8930 {
8931 tree itype = TREE_TYPE (type);
8932 tree rpart, ipart, tem;
8933
8934 if (TREE_CODE (expr) == COMPLEX_EXPR)
8935 {
8936 rpart = TREE_OPERAND (expr, 0);
8937 ipart = TREE_OPERAND (expr, 1);
8938 }
8939 else if (TREE_CODE (expr) == COMPLEX_CST)
8940 {
8941 rpart = TREE_REALPART (expr);
8942 ipart = TREE_IMAGPART (expr);
8943 }
8944 else
8945 {
8946 expr = save_expr (expr);
8947 rpart = fold_build1 (REALPART_EXPR, itype, expr);
8948 ipart = fold_build1 (IMAGPART_EXPR, itype, expr);
8949 }
8950
8951 rpart = save_expr (rpart);
8952 ipart = save_expr (ipart);
8953 tem = fold_build2 (PLUS_EXPR, itype,
8954 fold_build2 (MULT_EXPR, itype, rpart, rpart),
8955 fold_build2 (MULT_EXPR, itype, ipart, ipart));
8956 return fold_build2 (COMPLEX_EXPR, type, tem,
8957 fold_convert (itype, integer_zero_node));
8958 }
8959
8960
8961 /* Fold a binary expression of code CODE and type TYPE with operands
8962 OP0 and OP1. Return the folded expression if folding is
8963 successful. Otherwise, return NULL_TREE. */
8964
8965 tree
8966 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
8967 {
8968 enum tree_code_class kind = TREE_CODE_CLASS (code);
8969 tree arg0, arg1, tem;
8970 tree t1 = NULL_TREE;
8971 bool strict_overflow_p;
8972
8973 gcc_assert ((IS_EXPR_CODE_CLASS (kind)
8974 || IS_GIMPLE_STMT_CODE_CLASS (kind))
8975 && TREE_CODE_LENGTH (code) == 2
8976 && op0 != NULL_TREE
8977 && op1 != NULL_TREE);
8978
8979 arg0 = op0;
8980 arg1 = op1;
8981
8982 /* Strip any conversions that don't change the mode. This is
8983 safe for every expression, except for a comparison expression
8984 because its signedness is derived from its operands. So, in
8985 the latter case, only strip conversions that don't change the
8986 signedness.
8987
8988 Note that this is done as an internal manipulation within the
8989 constant folder, in order to find the simplest representation
8990 of the arguments so that their form can be studied. In any
8991 cases, the appropriate type conversions should be put back in
8992 the tree that will get out of the constant folder. */
8993
8994 if (kind == tcc_comparison)
8995 {
8996 STRIP_SIGN_NOPS (arg0);
8997 STRIP_SIGN_NOPS (arg1);
8998 }
8999 else
9000 {
9001 STRIP_NOPS (arg0);
9002 STRIP_NOPS (arg1);
9003 }
9004
9005 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9006 constant but we can't do arithmetic on them. */
9007 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9008 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9009 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9010 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9011 {
9012 if (kind == tcc_binary)
9013 tem = const_binop (code, arg0, arg1, 0);
9014 else if (kind == tcc_comparison)
9015 tem = fold_relational_const (code, type, arg0, arg1);
9016 else
9017 tem = NULL_TREE;
9018
9019 if (tem != NULL_TREE)
9020 {
9021 if (TREE_TYPE (tem) != type)
9022 tem = fold_convert (type, tem);
9023 return tem;
9024 }
9025 }
9026
9027 /* If this is a commutative operation, and ARG0 is a constant, move it
9028 to ARG1 to reduce the number of tests below. */
9029 if (commutative_tree_code (code)
9030 && tree_swap_operands_p (arg0, arg1, true))
9031 return fold_build2 (code, type, op1, op0);
9032
9033 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9034
9035 First check for cases where an arithmetic operation is applied to a
9036 compound, conditional, or comparison operation. Push the arithmetic
9037 operation inside the compound or conditional to see if any folding
9038 can then be done. Convert comparison to conditional for this purpose.
9039 The also optimizes non-constant cases that used to be done in
9040 expand_expr.
9041
9042 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9043 one of the operands is a comparison and the other is a comparison, a
9044 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9045 code below would make the expression more complex. Change it to a
9046 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9047 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9048
9049 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9050 || code == EQ_EXPR || code == NE_EXPR)
9051 && ((truth_value_p (TREE_CODE (arg0))
9052 && (truth_value_p (TREE_CODE (arg1))
9053 || (TREE_CODE (arg1) == BIT_AND_EXPR
9054 && integer_onep (TREE_OPERAND (arg1, 1)))))
9055 || (truth_value_p (TREE_CODE (arg1))
9056 && (truth_value_p (TREE_CODE (arg0))
9057 || (TREE_CODE (arg0) == BIT_AND_EXPR
9058 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9059 {
9060 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9061 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9062 : TRUTH_XOR_EXPR,
9063 boolean_type_node,
9064 fold_convert (boolean_type_node, arg0),
9065 fold_convert (boolean_type_node, arg1));
9066
9067 if (code == EQ_EXPR)
9068 tem = invert_truthvalue (tem);
9069
9070 return fold_convert (type, tem);
9071 }
9072
9073 if (TREE_CODE_CLASS (code) == tcc_binary
9074 || TREE_CODE_CLASS (code) == tcc_comparison)
9075 {
9076 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9077 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9078 fold_build2 (code, type,
9079 TREE_OPERAND (arg0, 1), op1));
9080 if (TREE_CODE (arg1) == COMPOUND_EXPR
9081 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9082 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9083 fold_build2 (code, type,
9084 op0, TREE_OPERAND (arg1, 1)));
9085
9086 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
9087 {
9088 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
9089 arg0, arg1,
9090 /*cond_first_p=*/1);
9091 if (tem != NULL_TREE)
9092 return tem;
9093 }
9094
9095 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
9096 {
9097 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
9098 arg1, arg0,
9099 /*cond_first_p=*/0);
9100 if (tem != NULL_TREE)
9101 return tem;
9102 }
9103 }
9104
9105 switch (code)
9106 {
9107 case PLUS_EXPR:
9108 /* A + (-B) -> A - B */
9109 if (TREE_CODE (arg1) == NEGATE_EXPR)
9110 return fold_build2 (MINUS_EXPR, type,
9111 fold_convert (type, arg0),
9112 fold_convert (type, TREE_OPERAND (arg1, 0)));
9113 /* (-A) + B -> B - A */
9114 if (TREE_CODE (arg0) == NEGATE_EXPR
9115 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
9116 return fold_build2 (MINUS_EXPR, type,
9117 fold_convert (type, arg1),
9118 fold_convert (type, TREE_OPERAND (arg0, 0)));
9119 /* Convert ~A + 1 to -A. */
9120 if (INTEGRAL_TYPE_P (type)
9121 && TREE_CODE (arg0) == BIT_NOT_EXPR
9122 && integer_onep (arg1))
9123 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
9124
9125 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
9126 same or one. */
9127 if ((TREE_CODE (arg0) == MULT_EXPR
9128 || TREE_CODE (arg1) == MULT_EXPR)
9129 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
9130 {
9131 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
9132 if (tem)
9133 return tem;
9134 }
9135
9136 if (! FLOAT_TYPE_P (type))
9137 {
9138 if (integer_zerop (arg1))
9139 return non_lvalue (fold_convert (type, arg0));
9140
9141 /* ~X + X is -1. */
9142 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9143 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9144 && !TYPE_OVERFLOW_TRAPS (type))
9145 {
9146 t1 = build_int_cst_type (type, -1);
9147 return omit_one_operand (type, t1, arg1);
9148 }
9149
9150 /* X + ~X is -1. */
9151 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9152 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
9153 && !TYPE_OVERFLOW_TRAPS (type))
9154 {
9155 t1 = build_int_cst_type (type, -1);
9156 return omit_one_operand (type, t1, arg0);
9157 }
9158
9159 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9160 with a constant, and the two constants have no bits in common,
9161 we should treat this as a BIT_IOR_EXPR since this may produce more
9162 simplifications. */
9163 if (TREE_CODE (arg0) == BIT_AND_EXPR
9164 && TREE_CODE (arg1) == BIT_AND_EXPR
9165 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9166 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9167 && integer_zerop (const_binop (BIT_AND_EXPR,
9168 TREE_OPERAND (arg0, 1),
9169 TREE_OPERAND (arg1, 1), 0)))
9170 {
9171 code = BIT_IOR_EXPR;
9172 goto bit_ior;
9173 }
9174
9175 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9176 (plus (plus (mult) (mult)) (foo)) so that we can
9177 take advantage of the factoring cases below. */
9178 if (((TREE_CODE (arg0) == PLUS_EXPR
9179 || TREE_CODE (arg0) == MINUS_EXPR)
9180 && TREE_CODE (arg1) == MULT_EXPR)
9181 || ((TREE_CODE (arg1) == PLUS_EXPR
9182 || TREE_CODE (arg1) == MINUS_EXPR)
9183 && TREE_CODE (arg0) == MULT_EXPR))
9184 {
9185 tree parg0, parg1, parg, marg;
9186 enum tree_code pcode;
9187
9188 if (TREE_CODE (arg1) == MULT_EXPR)
9189 parg = arg0, marg = arg1;
9190 else
9191 parg = arg1, marg = arg0;
9192 pcode = TREE_CODE (parg);
9193 parg0 = TREE_OPERAND (parg, 0);
9194 parg1 = TREE_OPERAND (parg, 1);
9195 STRIP_NOPS (parg0);
9196 STRIP_NOPS (parg1);
9197
9198 if (TREE_CODE (parg0) == MULT_EXPR
9199 && TREE_CODE (parg1) != MULT_EXPR)
9200 return fold_build2 (pcode, type,
9201 fold_build2 (PLUS_EXPR, type,
9202 fold_convert (type, parg0),
9203 fold_convert (type, marg)),
9204 fold_convert (type, parg1));
9205 if (TREE_CODE (parg0) != MULT_EXPR
9206 && TREE_CODE (parg1) == MULT_EXPR)
9207 return fold_build2 (PLUS_EXPR, type,
9208 fold_convert (type, parg0),
9209 fold_build2 (pcode, type,
9210 fold_convert (type, marg),
9211 fold_convert (type,
9212 parg1)));
9213 }
9214
9215 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
9216 of the array. Loop optimizer sometimes produce this type of
9217 expressions. */
9218 if (TREE_CODE (arg0) == ADDR_EXPR)
9219 {
9220 tem = try_move_mult_to_index (PLUS_EXPR, arg0, arg1);
9221 if (tem)
9222 return fold_convert (type, tem);
9223 }
9224 else if (TREE_CODE (arg1) == ADDR_EXPR)
9225 {
9226 tem = try_move_mult_to_index (PLUS_EXPR, arg1, arg0);
9227 if (tem)
9228 return fold_convert (type, tem);
9229 }
9230 }
9231 else
9232 {
9233 /* See if ARG1 is zero and X + ARG1 reduces to X. */
9234 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
9235 return non_lvalue (fold_convert (type, arg0));
9236
9237 /* Likewise if the operands are reversed. */
9238 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9239 return non_lvalue (fold_convert (type, arg1));
9240
9241 /* Convert X + -C into X - C. */
9242 if (TREE_CODE (arg1) == REAL_CST
9243 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
9244 {
9245 tem = fold_negate_const (arg1, type);
9246 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
9247 return fold_build2 (MINUS_EXPR, type,
9248 fold_convert (type, arg0),
9249 fold_convert (type, tem));
9250 }
9251
9252 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9253 to __complex__ ( x, y ). This is not the same for SNaNs or
9254 if signed zeros are involved. */
9255 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9256 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9257 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9258 {
9259 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9260 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
9261 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
9262 bool arg0rz = false, arg0iz = false;
9263 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9264 || (arg0i && (arg0iz = real_zerop (arg0i))))
9265 {
9266 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
9267 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
9268 if (arg0rz && arg1i && real_zerop (arg1i))
9269 {
9270 tree rp = arg1r ? arg1r
9271 : build1 (REALPART_EXPR, rtype, arg1);
9272 tree ip = arg0i ? arg0i
9273 : build1 (IMAGPART_EXPR, rtype, arg0);
9274 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9275 }
9276 else if (arg0iz && arg1r && real_zerop (arg1r))
9277 {
9278 tree rp = arg0r ? arg0r
9279 : build1 (REALPART_EXPR, rtype, arg0);
9280 tree ip = arg1i ? arg1i
9281 : build1 (IMAGPART_EXPR, rtype, arg1);
9282 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9283 }
9284 }
9285 }
9286
9287 if (flag_unsafe_math_optimizations
9288 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9289 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9290 && (tem = distribute_real_division (code, type, arg0, arg1)))
9291 return tem;
9292
9293 /* Convert x+x into x*2.0. */
9294 if (operand_equal_p (arg0, arg1, 0)
9295 && SCALAR_FLOAT_TYPE_P (type))
9296 return fold_build2 (MULT_EXPR, type, arg0,
9297 build_real (type, dconst2));
9298
9299 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
9300 if (flag_unsafe_math_optimizations
9301 && TREE_CODE (arg1) == PLUS_EXPR
9302 && TREE_CODE (arg0) != MULT_EXPR)
9303 {
9304 tree tree10 = TREE_OPERAND (arg1, 0);
9305 tree tree11 = TREE_OPERAND (arg1, 1);
9306 if (TREE_CODE (tree11) == MULT_EXPR
9307 && TREE_CODE (tree10) == MULT_EXPR)
9308 {
9309 tree tree0;
9310 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
9311 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
9312 }
9313 }
9314 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
9315 if (flag_unsafe_math_optimizations
9316 && TREE_CODE (arg0) == PLUS_EXPR
9317 && TREE_CODE (arg1) != MULT_EXPR)
9318 {
9319 tree tree00 = TREE_OPERAND (arg0, 0);
9320 tree tree01 = TREE_OPERAND (arg0, 1);
9321 if (TREE_CODE (tree01) == MULT_EXPR
9322 && TREE_CODE (tree00) == MULT_EXPR)
9323 {
9324 tree tree0;
9325 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
9326 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
9327 }
9328 }
9329 }
9330
9331 bit_rotate:
9332 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9333 is a rotate of A by C1 bits. */
9334 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9335 is a rotate of A by B bits. */
9336 {
9337 enum tree_code code0, code1;
9338 code0 = TREE_CODE (arg0);
9339 code1 = TREE_CODE (arg1);
9340 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9341 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9342 && operand_equal_p (TREE_OPERAND (arg0, 0),
9343 TREE_OPERAND (arg1, 0), 0)
9344 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
9345 {
9346 tree tree01, tree11;
9347 enum tree_code code01, code11;
9348
9349 tree01 = TREE_OPERAND (arg0, 1);
9350 tree11 = TREE_OPERAND (arg1, 1);
9351 STRIP_NOPS (tree01);
9352 STRIP_NOPS (tree11);
9353 code01 = TREE_CODE (tree01);
9354 code11 = TREE_CODE (tree11);
9355 if (code01 == INTEGER_CST
9356 && code11 == INTEGER_CST
9357 && TREE_INT_CST_HIGH (tree01) == 0
9358 && TREE_INT_CST_HIGH (tree11) == 0
9359 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
9360 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9361 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
9362 code0 == LSHIFT_EXPR ? tree01 : tree11);
9363 else if (code11 == MINUS_EXPR)
9364 {
9365 tree tree110, tree111;
9366 tree110 = TREE_OPERAND (tree11, 0);
9367 tree111 = TREE_OPERAND (tree11, 1);
9368 STRIP_NOPS (tree110);
9369 STRIP_NOPS (tree111);
9370 if (TREE_CODE (tree110) == INTEGER_CST
9371 && 0 == compare_tree_int (tree110,
9372 TYPE_PRECISION
9373 (TREE_TYPE (TREE_OPERAND
9374 (arg0, 0))))
9375 && operand_equal_p (tree01, tree111, 0))
9376 return build2 ((code0 == LSHIFT_EXPR
9377 ? LROTATE_EXPR
9378 : RROTATE_EXPR),
9379 type, TREE_OPERAND (arg0, 0), tree01);
9380 }
9381 else if (code01 == MINUS_EXPR)
9382 {
9383 tree tree010, tree011;
9384 tree010 = TREE_OPERAND (tree01, 0);
9385 tree011 = TREE_OPERAND (tree01, 1);
9386 STRIP_NOPS (tree010);
9387 STRIP_NOPS (tree011);
9388 if (TREE_CODE (tree010) == INTEGER_CST
9389 && 0 == compare_tree_int (tree010,
9390 TYPE_PRECISION
9391 (TREE_TYPE (TREE_OPERAND
9392 (arg0, 0))))
9393 && operand_equal_p (tree11, tree011, 0))
9394 return build2 ((code0 != LSHIFT_EXPR
9395 ? LROTATE_EXPR
9396 : RROTATE_EXPR),
9397 type, TREE_OPERAND (arg0, 0), tree11);
9398 }
9399 }
9400 }
9401
9402 associate:
9403 /* In most languages, can't associate operations on floats through
9404 parentheses. Rather than remember where the parentheses were, we
9405 don't associate floats at all, unless the user has specified
9406 -funsafe-math-optimizations. */
9407
9408 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
9409 {
9410 tree var0, con0, lit0, minus_lit0;
9411 tree var1, con1, lit1, minus_lit1;
9412 bool ok = true;
9413
9414 /* Split both trees into variables, constants, and literals. Then
9415 associate each group together, the constants with literals,
9416 then the result with variables. This increases the chances of
9417 literals being recombined later and of generating relocatable
9418 expressions for the sum of a constant and literal. */
9419 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
9420 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
9421 code == MINUS_EXPR);
9422
9423 /* With undefined overflow we can only associate constants
9424 with one variable. */
9425 if ((POINTER_TYPE_P (type)
9426 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
9427 && var0 && var1)
9428 {
9429 tree tmp0 = var0;
9430 tree tmp1 = var1;
9431
9432 if (TREE_CODE (tmp0) == NEGATE_EXPR)
9433 tmp0 = TREE_OPERAND (tmp0, 0);
9434 if (TREE_CODE (tmp1) == NEGATE_EXPR)
9435 tmp1 = TREE_OPERAND (tmp1, 0);
9436 /* The only case we can still associate with two variables
9437 is if they are the same, modulo negation. */
9438 if (!operand_equal_p (tmp0, tmp1, 0))
9439 ok = false;
9440 }
9441
9442 /* Only do something if we found more than two objects. Otherwise,
9443 nothing has changed and we risk infinite recursion. */
9444 if (ok
9445 && (2 < ((var0 != 0) + (var1 != 0)
9446 + (con0 != 0) + (con1 != 0)
9447 + (lit0 != 0) + (lit1 != 0)
9448 + (minus_lit0 != 0) + (minus_lit1 != 0))))
9449 {
9450 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9451 if (code == MINUS_EXPR)
9452 code = PLUS_EXPR;
9453
9454 var0 = associate_trees (var0, var1, code, type);
9455 con0 = associate_trees (con0, con1, code, type);
9456 lit0 = associate_trees (lit0, lit1, code, type);
9457 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
9458
9459 /* Preserve the MINUS_EXPR if the negative part of the literal is
9460 greater than the positive part. Otherwise, the multiplicative
9461 folding code (i.e extract_muldiv) may be fooled in case
9462 unsigned constants are subtracted, like in the following
9463 example: ((X*2 + 4) - 8U)/2. */
9464 if (minus_lit0 && lit0)
9465 {
9466 if (TREE_CODE (lit0) == INTEGER_CST
9467 && TREE_CODE (minus_lit0) == INTEGER_CST
9468 && tree_int_cst_lt (lit0, minus_lit0))
9469 {
9470 minus_lit0 = associate_trees (minus_lit0, lit0,
9471 MINUS_EXPR, type);
9472 lit0 = 0;
9473 }
9474 else
9475 {
9476 lit0 = associate_trees (lit0, minus_lit0,
9477 MINUS_EXPR, type);
9478 minus_lit0 = 0;
9479 }
9480 }
9481 if (minus_lit0)
9482 {
9483 if (con0 == 0)
9484 return fold_convert (type,
9485 associate_trees (var0, minus_lit0,
9486 MINUS_EXPR, type));
9487 else
9488 {
9489 con0 = associate_trees (con0, minus_lit0,
9490 MINUS_EXPR, type);
9491 return fold_convert (type,
9492 associate_trees (var0, con0,
9493 PLUS_EXPR, type));
9494 }
9495 }
9496
9497 con0 = associate_trees (con0, lit0, code, type);
9498 return fold_convert (type, associate_trees (var0, con0,
9499 code, type));
9500 }
9501 }
9502
9503 return NULL_TREE;
9504
9505 case MINUS_EXPR:
9506 /* A - (-B) -> A + B */
9507 if (TREE_CODE (arg1) == NEGATE_EXPR)
9508 return fold_build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0));
9509 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9510 if (TREE_CODE (arg0) == NEGATE_EXPR
9511 && (FLOAT_TYPE_P (type)
9512 || INTEGRAL_TYPE_P (type))
9513 && negate_expr_p (arg1)
9514 && reorder_operands_p (arg0, arg1))
9515 return fold_build2 (MINUS_EXPR, type, negate_expr (arg1),
9516 TREE_OPERAND (arg0, 0));
9517 /* Convert -A - 1 to ~A. */
9518 if (INTEGRAL_TYPE_P (type)
9519 && TREE_CODE (arg0) == NEGATE_EXPR
9520 && integer_onep (arg1)
9521 && !TYPE_OVERFLOW_TRAPS (type))
9522 return fold_build1 (BIT_NOT_EXPR, type,
9523 fold_convert (type, TREE_OPERAND (arg0, 0)));
9524
9525 /* Convert -1 - A to ~A. */
9526 if (INTEGRAL_TYPE_P (type)
9527 && integer_all_onesp (arg0))
9528 return fold_build1 (BIT_NOT_EXPR, type, op1);
9529
9530 if (! FLOAT_TYPE_P (type))
9531 {
9532 if (integer_zerop (arg0))
9533 return negate_expr (fold_convert (type, arg1));
9534 if (integer_zerop (arg1))
9535 return non_lvalue (fold_convert (type, arg0));
9536
9537 /* Fold A - (A & B) into ~B & A. */
9538 if (!TREE_SIDE_EFFECTS (arg0)
9539 && TREE_CODE (arg1) == BIT_AND_EXPR)
9540 {
9541 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
9542 return fold_build2 (BIT_AND_EXPR, type,
9543 fold_build1 (BIT_NOT_EXPR, type,
9544 TREE_OPERAND (arg1, 0)),
9545 arg0);
9546 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9547 return fold_build2 (BIT_AND_EXPR, type,
9548 fold_build1 (BIT_NOT_EXPR, type,
9549 TREE_OPERAND (arg1, 1)),
9550 arg0);
9551 }
9552
9553 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
9554 any power of 2 minus 1. */
9555 if (TREE_CODE (arg0) == BIT_AND_EXPR
9556 && TREE_CODE (arg1) == BIT_AND_EXPR
9557 && operand_equal_p (TREE_OPERAND (arg0, 0),
9558 TREE_OPERAND (arg1, 0), 0))
9559 {
9560 tree mask0 = TREE_OPERAND (arg0, 1);
9561 tree mask1 = TREE_OPERAND (arg1, 1);
9562 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
9563
9564 if (operand_equal_p (tem, mask1, 0))
9565 {
9566 tem = fold_build2 (BIT_XOR_EXPR, type,
9567 TREE_OPERAND (arg0, 0), mask1);
9568 return fold_build2 (MINUS_EXPR, type, tem, mask1);
9569 }
9570 }
9571 }
9572
9573 /* See if ARG1 is zero and X - ARG1 reduces to X. */
9574 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
9575 return non_lvalue (fold_convert (type, arg0));
9576
9577 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
9578 ARG0 is zero and X + ARG0 reduces to X, since that would mean
9579 (-ARG1 + ARG0) reduces to -ARG1. */
9580 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9581 return negate_expr (fold_convert (type, arg1));
9582
9583 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
9584 __complex__ ( x, -y ). This is not the same for SNaNs or if
9585 signed zeros are involved. */
9586 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9587 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9588 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9589 {
9590 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9591 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
9592 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
9593 bool arg0rz = false, arg0iz = false;
9594 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9595 || (arg0i && (arg0iz = real_zerop (arg0i))))
9596 {
9597 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
9598 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
9599 if (arg0rz && arg1i && real_zerop (arg1i))
9600 {
9601 tree rp = fold_build1 (NEGATE_EXPR, rtype,
9602 arg1r ? arg1r
9603 : build1 (REALPART_EXPR, rtype, arg1));
9604 tree ip = arg0i ? arg0i
9605 : build1 (IMAGPART_EXPR, rtype, arg0);
9606 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9607 }
9608 else if (arg0iz && arg1r && real_zerop (arg1r))
9609 {
9610 tree rp = arg0r ? arg0r
9611 : build1 (REALPART_EXPR, rtype, arg0);
9612 tree ip = fold_build1 (NEGATE_EXPR, rtype,
9613 arg1i ? arg1i
9614 : build1 (IMAGPART_EXPR, rtype, arg1));
9615 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9616 }
9617 }
9618 }
9619
9620 /* Fold &x - &x. This can happen from &x.foo - &x.
9621 This is unsafe for certain floats even in non-IEEE formats.
9622 In IEEE, it is unsafe because it does wrong for NaNs.
9623 Also note that operand_equal_p is always false if an operand
9624 is volatile. */
9625
9626 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
9627 && operand_equal_p (arg0, arg1, 0))
9628 return fold_convert (type, integer_zero_node);
9629
9630 /* A - B -> A + (-B) if B is easily negatable. */
9631 if (negate_expr_p (arg1)
9632 && ((FLOAT_TYPE_P (type)
9633 /* Avoid this transformation if B is a positive REAL_CST. */
9634 && (TREE_CODE (arg1) != REAL_CST
9635 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
9636 || INTEGRAL_TYPE_P (type)))
9637 return fold_build2 (PLUS_EXPR, type,
9638 fold_convert (type, arg0),
9639 fold_convert (type, negate_expr (arg1)));
9640
9641 /* Try folding difference of addresses. */
9642 {
9643 HOST_WIDE_INT diff;
9644
9645 if ((TREE_CODE (arg0) == ADDR_EXPR
9646 || TREE_CODE (arg1) == ADDR_EXPR)
9647 && ptr_difference_const (arg0, arg1, &diff))
9648 return build_int_cst_type (type, diff);
9649 }
9650
9651 /* Fold &a[i] - &a[j] to i-j. */
9652 if (TREE_CODE (arg0) == ADDR_EXPR
9653 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
9654 && TREE_CODE (arg1) == ADDR_EXPR
9655 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
9656 {
9657 tree aref0 = TREE_OPERAND (arg0, 0);
9658 tree aref1 = TREE_OPERAND (arg1, 0);
9659 if (operand_equal_p (TREE_OPERAND (aref0, 0),
9660 TREE_OPERAND (aref1, 0), 0))
9661 {
9662 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
9663 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
9664 tree esz = array_ref_element_size (aref0);
9665 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9666 return fold_build2 (MULT_EXPR, type, diff,
9667 fold_convert (type, esz));
9668
9669 }
9670 }
9671
9672 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
9673 of the array. Loop optimizer sometimes produce this type of
9674 expressions. */
9675 if (TREE_CODE (arg0) == ADDR_EXPR)
9676 {
9677 tem = try_move_mult_to_index (MINUS_EXPR, arg0, arg1);
9678 if (tem)
9679 return fold_convert (type, tem);
9680 }
9681
9682 if (flag_unsafe_math_optimizations
9683 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9684 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9685 && (tem = distribute_real_division (code, type, arg0, arg1)))
9686 return tem;
9687
9688 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
9689 same or one. */
9690 if ((TREE_CODE (arg0) == MULT_EXPR
9691 || TREE_CODE (arg1) == MULT_EXPR)
9692 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
9693 {
9694 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
9695 if (tem)
9696 return tem;
9697 }
9698
9699 goto associate;
9700
9701 case MULT_EXPR:
9702 /* (-A) * (-B) -> A * B */
9703 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
9704 return fold_build2 (MULT_EXPR, type,
9705 fold_convert (type, TREE_OPERAND (arg0, 0)),
9706 fold_convert (type, negate_expr (arg1)));
9707 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
9708 return fold_build2 (MULT_EXPR, type,
9709 fold_convert (type, negate_expr (arg0)),
9710 fold_convert (type, TREE_OPERAND (arg1, 0)));
9711
9712 if (! FLOAT_TYPE_P (type))
9713 {
9714 if (integer_zerop (arg1))
9715 return omit_one_operand (type, arg1, arg0);
9716 if (integer_onep (arg1))
9717 return non_lvalue (fold_convert (type, arg0));
9718 /* Transform x * -1 into -x. */
9719 if (integer_all_onesp (arg1))
9720 return fold_convert (type, negate_expr (arg0));
9721 /* Transform x * -C into -x * C if x is easily negatable. */
9722 if (TREE_CODE (arg1) == INTEGER_CST
9723 && tree_int_cst_sgn (arg1) == -1
9724 && negate_expr_p (arg0)
9725 && (tem = negate_expr (arg1)) != arg1
9726 && !TREE_OVERFLOW (tem))
9727 return fold_build2 (MULT_EXPR, type,
9728 negate_expr (arg0), tem);
9729
9730 /* (a * (1 << b)) is (a << b) */
9731 if (TREE_CODE (arg1) == LSHIFT_EXPR
9732 && integer_onep (TREE_OPERAND (arg1, 0)))
9733 return fold_build2 (LSHIFT_EXPR, type, arg0,
9734 TREE_OPERAND (arg1, 1));
9735 if (TREE_CODE (arg0) == LSHIFT_EXPR
9736 && integer_onep (TREE_OPERAND (arg0, 0)))
9737 return fold_build2 (LSHIFT_EXPR, type, arg1,
9738 TREE_OPERAND (arg0, 1));
9739
9740 strict_overflow_p = false;
9741 if (TREE_CODE (arg1) == INTEGER_CST
9742 && 0 != (tem = extract_muldiv (op0,
9743 fold_convert (type, arg1),
9744 code, NULL_TREE,
9745 &strict_overflow_p)))
9746 {
9747 if (strict_overflow_p)
9748 fold_overflow_warning (("assuming signed overflow does not "
9749 "occur when simplifying "
9750 "multiplication"),
9751 WARN_STRICT_OVERFLOW_MISC);
9752 return fold_convert (type, tem);
9753 }
9754
9755 /* Optimize z * conj(z) for integer complex numbers. */
9756 if (TREE_CODE (arg0) == CONJ_EXPR
9757 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9758 return fold_mult_zconjz (type, arg1);
9759 if (TREE_CODE (arg1) == CONJ_EXPR
9760 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9761 return fold_mult_zconjz (type, arg0);
9762 }
9763 else
9764 {
9765 /* Maybe fold x * 0 to 0. The expressions aren't the same
9766 when x is NaN, since x * 0 is also NaN. Nor are they the
9767 same in modes with signed zeros, since multiplying a
9768 negative value by 0 gives -0, not +0. */
9769 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9770 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9771 && real_zerop (arg1))
9772 return omit_one_operand (type, arg1, arg0);
9773 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
9774 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9775 && real_onep (arg1))
9776 return non_lvalue (fold_convert (type, arg0));
9777
9778 /* Transform x * -1.0 into -x. */
9779 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9780 && real_minus_onep (arg1))
9781 return fold_convert (type, negate_expr (arg0));
9782
9783 /* Convert (C1/X)*C2 into (C1*C2)/X. */
9784 if (flag_unsafe_math_optimizations
9785 && TREE_CODE (arg0) == RDIV_EXPR
9786 && TREE_CODE (arg1) == REAL_CST
9787 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
9788 {
9789 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
9790 arg1, 0);
9791 if (tem)
9792 return fold_build2 (RDIV_EXPR, type, tem,
9793 TREE_OPERAND (arg0, 1));
9794 }
9795
9796 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
9797 if (operand_equal_p (arg0, arg1, 0))
9798 {
9799 tree tem = fold_strip_sign_ops (arg0);
9800 if (tem != NULL_TREE)
9801 {
9802 tem = fold_convert (type, tem);
9803 return fold_build2 (MULT_EXPR, type, tem, tem);
9804 }
9805 }
9806
9807 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
9808 This is not the same for NaNs or if signed zeros are
9809 involved. */
9810 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9811 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9812 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
9813 && TREE_CODE (arg1) == COMPLEX_CST
9814 && real_zerop (TREE_REALPART (arg1)))
9815 {
9816 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9817 if (real_onep (TREE_IMAGPART (arg1)))
9818 return fold_build2 (COMPLEX_EXPR, type,
9819 negate_expr (fold_build1 (IMAGPART_EXPR,
9820 rtype, arg0)),
9821 fold_build1 (REALPART_EXPR, rtype, arg0));
9822 else if (real_minus_onep (TREE_IMAGPART (arg1)))
9823 return fold_build2 (COMPLEX_EXPR, type,
9824 fold_build1 (IMAGPART_EXPR, rtype, arg0),
9825 negate_expr (fold_build1 (REALPART_EXPR,
9826 rtype, arg0)));
9827 }
9828
9829 /* Optimize z * conj(z) for floating point complex numbers.
9830 Guarded by flag_unsafe_math_optimizations as non-finite
9831 imaginary components don't produce scalar results. */
9832 if (flag_unsafe_math_optimizations
9833 && TREE_CODE (arg0) == CONJ_EXPR
9834 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9835 return fold_mult_zconjz (type, arg1);
9836 if (flag_unsafe_math_optimizations
9837 && TREE_CODE (arg1) == CONJ_EXPR
9838 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9839 return fold_mult_zconjz (type, arg0);
9840
9841 if (flag_unsafe_math_optimizations)
9842 {
9843 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
9844 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
9845
9846 /* Optimizations of root(...)*root(...). */
9847 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
9848 {
9849 tree rootfn, arg;
9850 tree arg00 = CALL_EXPR_ARG (arg0, 0);
9851 tree arg10 = CALL_EXPR_ARG (arg1, 0);
9852
9853 /* Optimize sqrt(x)*sqrt(x) as x. */
9854 if (BUILTIN_SQRT_P (fcode0)
9855 && operand_equal_p (arg00, arg10, 0)
9856 && ! HONOR_SNANS (TYPE_MODE (type)))
9857 return arg00;
9858
9859 /* Optimize root(x)*root(y) as root(x*y). */
9860 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
9861 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
9862 return build_call_expr (rootfn, 1, arg);
9863 }
9864
9865 /* Optimize expN(x)*expN(y) as expN(x+y). */
9866 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
9867 {
9868 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
9869 tree arg = fold_build2 (PLUS_EXPR, type,
9870 CALL_EXPR_ARG (arg0, 0),
9871 CALL_EXPR_ARG (arg1, 0));
9872 return build_call_expr (expfn, 1, arg);
9873 }
9874
9875 /* Optimizations of pow(...)*pow(...). */
9876 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
9877 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
9878 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
9879 {
9880 tree arg00 = CALL_EXPR_ARG (arg0, 0);
9881 tree arg01 = CALL_EXPR_ARG (arg0, 1);
9882 tree arg10 = CALL_EXPR_ARG (arg1, 0);
9883 tree arg11 = CALL_EXPR_ARG (arg1, 1);
9884
9885 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
9886 if (operand_equal_p (arg01, arg11, 0))
9887 {
9888 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
9889 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
9890 return build_call_expr (powfn, 2, arg, arg01);
9891 }
9892
9893 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
9894 if (operand_equal_p (arg00, arg10, 0))
9895 {
9896 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
9897 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
9898 return build_call_expr (powfn, 2, arg00, arg);
9899 }
9900 }
9901
9902 /* Optimize tan(x)*cos(x) as sin(x). */
9903 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
9904 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
9905 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
9906 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
9907 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
9908 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
9909 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
9910 CALL_EXPR_ARG (arg1, 0), 0))
9911 {
9912 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
9913
9914 if (sinfn != NULL_TREE)
9915 return build_call_expr (sinfn, 1, CALL_EXPR_ARG (arg0, 0));
9916 }
9917
9918 /* Optimize x*pow(x,c) as pow(x,c+1). */
9919 if (fcode1 == BUILT_IN_POW
9920 || fcode1 == BUILT_IN_POWF
9921 || fcode1 == BUILT_IN_POWL)
9922 {
9923 tree arg10 = CALL_EXPR_ARG (arg1, 0);
9924 tree arg11 = CALL_EXPR_ARG (arg1, 1);
9925 if (TREE_CODE (arg11) == REAL_CST
9926 && !TREE_OVERFLOW (arg11)
9927 && operand_equal_p (arg0, arg10, 0))
9928 {
9929 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
9930 REAL_VALUE_TYPE c;
9931 tree arg;
9932
9933 c = TREE_REAL_CST (arg11);
9934 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
9935 arg = build_real (type, c);
9936 return build_call_expr (powfn, 2, arg0, arg);
9937 }
9938 }
9939
9940 /* Optimize pow(x,c)*x as pow(x,c+1). */
9941 if (fcode0 == BUILT_IN_POW
9942 || fcode0 == BUILT_IN_POWF
9943 || fcode0 == BUILT_IN_POWL)
9944 {
9945 tree arg00 = CALL_EXPR_ARG (arg0, 0);
9946 tree arg01 = CALL_EXPR_ARG (arg0, 1);
9947 if (TREE_CODE (arg01) == REAL_CST
9948 && !TREE_OVERFLOW (arg01)
9949 && operand_equal_p (arg1, arg00, 0))
9950 {
9951 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
9952 REAL_VALUE_TYPE c;
9953 tree arg;
9954
9955 c = TREE_REAL_CST (arg01);
9956 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
9957 arg = build_real (type, c);
9958 return build_call_expr (powfn, 2, arg1, arg);
9959 }
9960 }
9961
9962 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
9963 if (! optimize_size
9964 && operand_equal_p (arg0, arg1, 0))
9965 {
9966 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
9967
9968 if (powfn)
9969 {
9970 tree arg = build_real (type, dconst2);
9971 return build_call_expr (powfn, 2, arg0, arg);
9972 }
9973 }
9974 }
9975 }
9976 goto associate;
9977
9978 case BIT_IOR_EXPR:
9979 bit_ior:
9980 if (integer_all_onesp (arg1))
9981 return omit_one_operand (type, arg1, arg0);
9982 if (integer_zerop (arg1))
9983 return non_lvalue (fold_convert (type, arg0));
9984 if (operand_equal_p (arg0, arg1, 0))
9985 return non_lvalue (fold_convert (type, arg0));
9986
9987 /* ~X | X is -1. */
9988 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9989 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9990 {
9991 t1 = build_int_cst_type (type, -1);
9992 return omit_one_operand (type, t1, arg1);
9993 }
9994
9995 /* X | ~X is -1. */
9996 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9997 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9998 {
9999 t1 = build_int_cst_type (type, -1);
10000 return omit_one_operand (type, t1, arg0);
10001 }
10002
10003 /* Canonicalize (X & C1) | C2. */
10004 if (TREE_CODE (arg0) == BIT_AND_EXPR
10005 && TREE_CODE (arg1) == INTEGER_CST
10006 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10007 {
10008 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, mlo, mhi;
10009 int width = TYPE_PRECISION (type);
10010 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
10011 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10012 hi2 = TREE_INT_CST_HIGH (arg1);
10013 lo2 = TREE_INT_CST_LOW (arg1);
10014
10015 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10016 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
10017 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10018
10019 if (width > HOST_BITS_PER_WIDE_INT)
10020 {
10021 mhi = (unsigned HOST_WIDE_INT) -1
10022 >> (2 * HOST_BITS_PER_WIDE_INT - width);
10023 mlo = -1;
10024 }
10025 else
10026 {
10027 mhi = 0;
10028 mlo = (unsigned HOST_WIDE_INT) -1
10029 >> (HOST_BITS_PER_WIDE_INT - width);
10030 }
10031
10032 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10033 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
10034 return fold_build2 (BIT_IOR_EXPR, type,
10035 TREE_OPERAND (arg0, 0), arg1);
10036
10037 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2. */
10038 hi1 &= mhi;
10039 lo1 &= mlo;
10040 if ((hi1 & ~hi2) != hi1 || (lo1 & ~lo2) != lo1)
10041 return fold_build2 (BIT_IOR_EXPR, type,
10042 fold_build2 (BIT_AND_EXPR, type,
10043 TREE_OPERAND (arg0, 0),
10044 build_int_cst_wide (type,
10045 lo1 & ~lo2,
10046 hi1 & ~hi2)),
10047 arg1);
10048 }
10049
10050 /* (X & Y) | Y is (X, Y). */
10051 if (TREE_CODE (arg0) == BIT_AND_EXPR
10052 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10053 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10054 /* (X & Y) | X is (Y, X). */
10055 if (TREE_CODE (arg0) == BIT_AND_EXPR
10056 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10057 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10058 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
10059 /* X | (X & Y) is (Y, X). */
10060 if (TREE_CODE (arg1) == BIT_AND_EXPR
10061 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10062 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10063 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
10064 /* X | (Y & X) is (Y, X). */
10065 if (TREE_CODE (arg1) == BIT_AND_EXPR
10066 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10067 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10068 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
10069
10070 t1 = distribute_bit_expr (code, type, arg0, arg1);
10071 if (t1 != NULL_TREE)
10072 return t1;
10073
10074 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
10075
10076 This results in more efficient code for machines without a NAND
10077 instruction. Combine will canonicalize to the first form
10078 which will allow use of NAND instructions provided by the
10079 backend if they exist. */
10080 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10081 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10082 {
10083 return fold_build1 (BIT_NOT_EXPR, type,
10084 build2 (BIT_AND_EXPR, type,
10085 TREE_OPERAND (arg0, 0),
10086 TREE_OPERAND (arg1, 0)));
10087 }
10088
10089 /* See if this can be simplified into a rotate first. If that
10090 is unsuccessful continue in the association code. */
10091 goto bit_rotate;
10092
10093 case BIT_XOR_EXPR:
10094 if (integer_zerop (arg1))
10095 return non_lvalue (fold_convert (type, arg0));
10096 if (integer_all_onesp (arg1))
10097 return fold_build1 (BIT_NOT_EXPR, type, arg0);
10098 if (operand_equal_p (arg0, arg1, 0))
10099 return omit_one_operand (type, integer_zero_node, arg0);
10100
10101 /* ~X ^ X is -1. */
10102 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10103 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10104 {
10105 t1 = build_int_cst_type (type, -1);
10106 return omit_one_operand (type, t1, arg1);
10107 }
10108
10109 /* X ^ ~X is -1. */
10110 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10111 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10112 {
10113 t1 = build_int_cst_type (type, -1);
10114 return omit_one_operand (type, t1, arg0);
10115 }
10116
10117 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
10118 with a constant, and the two constants have no bits in common,
10119 we should treat this as a BIT_IOR_EXPR since this may produce more
10120 simplifications. */
10121 if (TREE_CODE (arg0) == BIT_AND_EXPR
10122 && TREE_CODE (arg1) == BIT_AND_EXPR
10123 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10124 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10125 && integer_zerop (const_binop (BIT_AND_EXPR,
10126 TREE_OPERAND (arg0, 1),
10127 TREE_OPERAND (arg1, 1), 0)))
10128 {
10129 code = BIT_IOR_EXPR;
10130 goto bit_ior;
10131 }
10132
10133 /* (X | Y) ^ X -> Y & ~ X*/
10134 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10135 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10136 {
10137 tree t2 = TREE_OPERAND (arg0, 1);
10138 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
10139 arg1);
10140 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10141 fold_convert (type, t1));
10142 return t1;
10143 }
10144
10145 /* (Y | X) ^ X -> Y & ~ X*/
10146 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10147 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10148 {
10149 tree t2 = TREE_OPERAND (arg0, 0);
10150 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
10151 arg1);
10152 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10153 fold_convert (type, t1));
10154 return t1;
10155 }
10156
10157 /* X ^ (X | Y) -> Y & ~ X*/
10158 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10159 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
10160 {
10161 tree t2 = TREE_OPERAND (arg1, 1);
10162 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
10163 arg0);
10164 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10165 fold_convert (type, t1));
10166 return t1;
10167 }
10168
10169 /* X ^ (Y | X) -> Y & ~ X*/
10170 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10171 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
10172 {
10173 tree t2 = TREE_OPERAND (arg1, 0);
10174 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
10175 arg0);
10176 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10177 fold_convert (type, t1));
10178 return t1;
10179 }
10180
10181 /* Convert ~X ^ ~Y to X ^ Y. */
10182 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10183 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10184 return fold_build2 (code, type,
10185 fold_convert (type, TREE_OPERAND (arg0, 0)),
10186 fold_convert (type, TREE_OPERAND (arg1, 0)));
10187
10188 /* Convert ~X ^ C to X ^ ~C. */
10189 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10190 && TREE_CODE (arg1) == INTEGER_CST)
10191 return fold_build2 (code, type,
10192 fold_convert (type, TREE_OPERAND (arg0, 0)),
10193 fold_build1 (BIT_NOT_EXPR, type, arg1));
10194
10195 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10196 if (TREE_CODE (arg0) == BIT_AND_EXPR
10197 && integer_onep (TREE_OPERAND (arg0, 1))
10198 && integer_onep (arg1))
10199 return fold_build2 (EQ_EXPR, type, arg0,
10200 build_int_cst (TREE_TYPE (arg0), 0));
10201
10202 /* Fold (X & Y) ^ Y as ~X & Y. */
10203 if (TREE_CODE (arg0) == BIT_AND_EXPR
10204 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10205 {
10206 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
10207 return fold_build2 (BIT_AND_EXPR, type,
10208 fold_build1 (BIT_NOT_EXPR, type, tem),
10209 fold_convert (type, arg1));
10210 }
10211 /* Fold (X & Y) ^ X as ~Y & X. */
10212 if (TREE_CODE (arg0) == BIT_AND_EXPR
10213 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10214 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10215 {
10216 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
10217 return fold_build2 (BIT_AND_EXPR, type,
10218 fold_build1 (BIT_NOT_EXPR, type, tem),
10219 fold_convert (type, arg1));
10220 }
10221 /* Fold X ^ (X & Y) as X & ~Y. */
10222 if (TREE_CODE (arg1) == BIT_AND_EXPR
10223 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10224 {
10225 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
10226 return fold_build2 (BIT_AND_EXPR, type,
10227 fold_convert (type, arg0),
10228 fold_build1 (BIT_NOT_EXPR, type, tem));
10229 }
10230 /* Fold X ^ (Y & X) as ~Y & X. */
10231 if (TREE_CODE (arg1) == BIT_AND_EXPR
10232 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10233 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10234 {
10235 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
10236 return fold_build2 (BIT_AND_EXPR, type,
10237 fold_build1 (BIT_NOT_EXPR, type, tem),
10238 fold_convert (type, arg0));
10239 }
10240
10241 /* See if this can be simplified into a rotate first. If that
10242 is unsuccessful continue in the association code. */
10243 goto bit_rotate;
10244
10245 case BIT_AND_EXPR:
10246 if (integer_all_onesp (arg1))
10247 return non_lvalue (fold_convert (type, arg0));
10248 if (integer_zerop (arg1))
10249 return omit_one_operand (type, arg1, arg0);
10250 if (operand_equal_p (arg0, arg1, 0))
10251 return non_lvalue (fold_convert (type, arg0));
10252
10253 /* ~X & X is always zero. */
10254 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10255 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10256 return omit_one_operand (type, integer_zero_node, arg1);
10257
10258 /* X & ~X is always zero. */
10259 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10260 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10261 return omit_one_operand (type, integer_zero_node, arg0);
10262
10263 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
10264 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10265 && TREE_CODE (arg1) == INTEGER_CST
10266 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10267 return fold_build2 (BIT_IOR_EXPR, type,
10268 fold_build2 (BIT_AND_EXPR, type,
10269 TREE_OPERAND (arg0, 0), arg1),
10270 fold_build2 (BIT_AND_EXPR, type,
10271 TREE_OPERAND (arg0, 1), arg1));
10272
10273 /* (X | Y) & Y is (X, Y). */
10274 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10275 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10276 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10277 /* (X | Y) & X is (Y, X). */
10278 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10279 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10280 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10281 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
10282 /* X & (X | Y) is (Y, X). */
10283 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10284 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10285 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10286 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
10287 /* X & (Y | X) is (Y, X). */
10288 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10289 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10290 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10291 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
10292
10293 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
10294 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10295 && integer_onep (TREE_OPERAND (arg0, 1))
10296 && integer_onep (arg1))
10297 {
10298 tem = TREE_OPERAND (arg0, 0);
10299 return fold_build2 (EQ_EXPR, type,
10300 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
10301 build_int_cst (TREE_TYPE (tem), 1)),
10302 build_int_cst (TREE_TYPE (tem), 0));
10303 }
10304 /* Fold ~X & 1 as (X & 1) == 0. */
10305 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10306 && integer_onep (arg1))
10307 {
10308 tem = TREE_OPERAND (arg0, 0);
10309 return fold_build2 (EQ_EXPR, type,
10310 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
10311 build_int_cst (TREE_TYPE (tem), 1)),
10312 build_int_cst (TREE_TYPE (tem), 0));
10313 }
10314
10315 /* Fold (X ^ Y) & Y as ~X & Y. */
10316 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10317 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10318 {
10319 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
10320 return fold_build2 (BIT_AND_EXPR, type,
10321 fold_build1 (BIT_NOT_EXPR, type, tem),
10322 fold_convert (type, arg1));
10323 }
10324 /* Fold (X ^ Y) & X as ~Y & X. */
10325 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10326 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10327 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10328 {
10329 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
10330 return fold_build2 (BIT_AND_EXPR, type,
10331 fold_build1 (BIT_NOT_EXPR, type, tem),
10332 fold_convert (type, arg1));
10333 }
10334 /* Fold X & (X ^ Y) as X & ~Y. */
10335 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10336 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10337 {
10338 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
10339 return fold_build2 (BIT_AND_EXPR, type,
10340 fold_convert (type, arg0),
10341 fold_build1 (BIT_NOT_EXPR, type, tem));
10342 }
10343 /* Fold X & (Y ^ X) as ~Y & X. */
10344 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10345 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10346 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10347 {
10348 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
10349 return fold_build2 (BIT_AND_EXPR, type,
10350 fold_build1 (BIT_NOT_EXPR, type, tem),
10351 fold_convert (type, arg0));
10352 }
10353
10354 t1 = distribute_bit_expr (code, type, arg0, arg1);
10355 if (t1 != NULL_TREE)
10356 return t1;
10357 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10358 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
10359 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
10360 {
10361 unsigned int prec
10362 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
10363
10364 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
10365 && (~TREE_INT_CST_LOW (arg1)
10366 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
10367 return fold_convert (type, TREE_OPERAND (arg0, 0));
10368 }
10369
10370 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
10371
10372 This results in more efficient code for machines without a NOR
10373 instruction. Combine will canonicalize to the first form
10374 which will allow use of NOR instructions provided by the
10375 backend if they exist. */
10376 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10377 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10378 {
10379 return fold_build1 (BIT_NOT_EXPR, type,
10380 build2 (BIT_IOR_EXPR, type,
10381 TREE_OPERAND (arg0, 0),
10382 TREE_OPERAND (arg1, 0)));
10383 }
10384
10385 goto associate;
10386
10387 case RDIV_EXPR:
10388 /* Don't touch a floating-point divide by zero unless the mode
10389 of the constant can represent infinity. */
10390 if (TREE_CODE (arg1) == REAL_CST
10391 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
10392 && real_zerop (arg1))
10393 return NULL_TREE;
10394
10395 /* Optimize A / A to 1.0 if we don't care about
10396 NaNs or Infinities. Skip the transformation
10397 for non-real operands. */
10398 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
10399 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10400 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
10401 && operand_equal_p (arg0, arg1, 0))
10402 {
10403 tree r = build_real (TREE_TYPE (arg0), dconst1);
10404
10405 return omit_two_operands (type, r, arg0, arg1);
10406 }
10407
10408 /* The complex version of the above A / A optimization. */
10409 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10410 && operand_equal_p (arg0, arg1, 0))
10411 {
10412 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
10413 if (! HONOR_NANS (TYPE_MODE (elem_type))
10414 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
10415 {
10416 tree r = build_real (elem_type, dconst1);
10417 /* omit_two_operands will call fold_convert for us. */
10418 return omit_two_operands (type, r, arg0, arg1);
10419 }
10420 }
10421
10422 /* (-A) / (-B) -> A / B */
10423 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10424 return fold_build2 (RDIV_EXPR, type,
10425 TREE_OPERAND (arg0, 0),
10426 negate_expr (arg1));
10427 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10428 return fold_build2 (RDIV_EXPR, type,
10429 negate_expr (arg0),
10430 TREE_OPERAND (arg1, 0));
10431
10432 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
10433 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10434 && real_onep (arg1))
10435 return non_lvalue (fold_convert (type, arg0));
10436
10437 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
10438 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10439 && real_minus_onep (arg1))
10440 return non_lvalue (fold_convert (type, negate_expr (arg0)));
10441
10442 /* If ARG1 is a constant, we can convert this to a multiply by the
10443 reciprocal. This does not have the same rounding properties,
10444 so only do this if -funsafe-math-optimizations. We can actually
10445 always safely do it if ARG1 is a power of two, but it's hard to
10446 tell if it is or not in a portable manner. */
10447 if (TREE_CODE (arg1) == REAL_CST)
10448 {
10449 if (flag_unsafe_math_optimizations
10450 && 0 != (tem = const_binop (code, build_real (type, dconst1),
10451 arg1, 0)))
10452 return fold_build2 (MULT_EXPR, type, arg0, tem);
10453 /* Find the reciprocal if optimizing and the result is exact. */
10454 if (optimize)
10455 {
10456 REAL_VALUE_TYPE r;
10457 r = TREE_REAL_CST (arg1);
10458 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
10459 {
10460 tem = build_real (type, r);
10461 return fold_build2 (MULT_EXPR, type,
10462 fold_convert (type, arg0), tem);
10463 }
10464 }
10465 }
10466 /* Convert A/B/C to A/(B*C). */
10467 if (flag_unsafe_math_optimizations
10468 && TREE_CODE (arg0) == RDIV_EXPR)
10469 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
10470 fold_build2 (MULT_EXPR, type,
10471 TREE_OPERAND (arg0, 1), arg1));
10472
10473 /* Convert A/(B/C) to (A/B)*C. */
10474 if (flag_unsafe_math_optimizations
10475 && TREE_CODE (arg1) == RDIV_EXPR)
10476 return fold_build2 (MULT_EXPR, type,
10477 fold_build2 (RDIV_EXPR, type, arg0,
10478 TREE_OPERAND (arg1, 0)),
10479 TREE_OPERAND (arg1, 1));
10480
10481 /* Convert C1/(X*C2) into (C1/C2)/X. */
10482 if (flag_unsafe_math_optimizations
10483 && TREE_CODE (arg1) == MULT_EXPR
10484 && TREE_CODE (arg0) == REAL_CST
10485 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
10486 {
10487 tree tem = const_binop (RDIV_EXPR, arg0,
10488 TREE_OPERAND (arg1, 1), 0);
10489 if (tem)
10490 return fold_build2 (RDIV_EXPR, type, tem,
10491 TREE_OPERAND (arg1, 0));
10492 }
10493
10494 if (flag_unsafe_math_optimizations)
10495 {
10496 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10497 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10498
10499 /* Optimize sin(x)/cos(x) as tan(x). */
10500 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
10501 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
10502 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
10503 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10504 CALL_EXPR_ARG (arg1, 0), 0))
10505 {
10506 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
10507
10508 if (tanfn != NULL_TREE)
10509 return build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0));
10510 }
10511
10512 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
10513 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
10514 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
10515 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
10516 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10517 CALL_EXPR_ARG (arg1, 0), 0))
10518 {
10519 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
10520
10521 if (tanfn != NULL_TREE)
10522 {
10523 tree tmp = build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0));
10524 return fold_build2 (RDIV_EXPR, type,
10525 build_real (type, dconst1), tmp);
10526 }
10527 }
10528
10529 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
10530 NaNs or Infinities. */
10531 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
10532 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
10533 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
10534 {
10535 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10536 tree arg01 = CALL_EXPR_ARG (arg1, 0);
10537
10538 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
10539 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
10540 && operand_equal_p (arg00, arg01, 0))
10541 {
10542 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
10543
10544 if (cosfn != NULL_TREE)
10545 return build_call_expr (cosfn, 1, arg00);
10546 }
10547 }
10548
10549 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
10550 NaNs or Infinities. */
10551 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
10552 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
10553 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
10554 {
10555 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10556 tree arg01 = CALL_EXPR_ARG (arg1, 0);
10557
10558 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
10559 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
10560 && operand_equal_p (arg00, arg01, 0))
10561 {
10562 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
10563
10564 if (cosfn != NULL_TREE)
10565 {
10566 tree tmp = build_call_expr (cosfn, 1, arg00);
10567 return fold_build2 (RDIV_EXPR, type,
10568 build_real (type, dconst1),
10569 tmp);
10570 }
10571 }
10572 }
10573
10574 /* Optimize pow(x,c)/x as pow(x,c-1). */
10575 if (fcode0 == BUILT_IN_POW
10576 || fcode0 == BUILT_IN_POWF
10577 || fcode0 == BUILT_IN_POWL)
10578 {
10579 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10580 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10581 if (TREE_CODE (arg01) == REAL_CST
10582 && !TREE_OVERFLOW (arg01)
10583 && operand_equal_p (arg1, arg00, 0))
10584 {
10585 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10586 REAL_VALUE_TYPE c;
10587 tree arg;
10588
10589 c = TREE_REAL_CST (arg01);
10590 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
10591 arg = build_real (type, c);
10592 return build_call_expr (powfn, 2, arg1, arg);
10593 }
10594 }
10595
10596 /* Optimize x/expN(y) into x*expN(-y). */
10597 if (BUILTIN_EXPONENT_P (fcode1))
10598 {
10599 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10600 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
10601 arg1 = build_call_expr (expfn, 1, fold_convert (type, arg));
10602 return fold_build2 (MULT_EXPR, type, arg0, arg1);
10603 }
10604
10605 /* Optimize x/pow(y,z) into x*pow(y,-z). */
10606 if (fcode1 == BUILT_IN_POW
10607 || fcode1 == BUILT_IN_POWF
10608 || fcode1 == BUILT_IN_POWL)
10609 {
10610 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10611 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10612 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10613 tree neg11 = fold_convert (type, negate_expr (arg11));
10614 arg1 = build_call_expr (powfn, 2, arg10, neg11);
10615 return fold_build2 (MULT_EXPR, type, arg0, arg1);
10616 }
10617 }
10618 return NULL_TREE;
10619
10620 case TRUNC_DIV_EXPR:
10621 case FLOOR_DIV_EXPR:
10622 /* Simplify A / (B << N) where A and B are positive and B is
10623 a power of 2, to A >> (N + log2(B)). */
10624 strict_overflow_p = false;
10625 if (TREE_CODE (arg1) == LSHIFT_EXPR
10626 && (TYPE_UNSIGNED (type)
10627 || tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p)))
10628 {
10629 tree sval = TREE_OPERAND (arg1, 0);
10630 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
10631 {
10632 tree sh_cnt = TREE_OPERAND (arg1, 1);
10633 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
10634
10635 if (strict_overflow_p)
10636 fold_overflow_warning (("assuming signed overflow does not "
10637 "occur when simplifying A / (B << N)"),
10638 WARN_STRICT_OVERFLOW_MISC);
10639
10640 sh_cnt = fold_build2 (PLUS_EXPR, TREE_TYPE (sh_cnt),
10641 sh_cnt, build_int_cst (NULL_TREE, pow2));
10642 return fold_build2 (RSHIFT_EXPR, type,
10643 fold_convert (type, arg0), sh_cnt);
10644 }
10645 }
10646 /* Fall thru */
10647
10648 case ROUND_DIV_EXPR:
10649 case CEIL_DIV_EXPR:
10650 case EXACT_DIV_EXPR:
10651 if (integer_onep (arg1))
10652 return non_lvalue (fold_convert (type, arg0));
10653 if (integer_zerop (arg1))
10654 return NULL_TREE;
10655 /* X / -1 is -X. */
10656 if (!TYPE_UNSIGNED (type)
10657 && TREE_CODE (arg1) == INTEGER_CST
10658 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
10659 && TREE_INT_CST_HIGH (arg1) == -1)
10660 return fold_convert (type, negate_expr (arg0));
10661
10662 /* Convert -A / -B to A / B when the type is signed and overflow is
10663 undefined. */
10664 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10665 && TREE_CODE (arg0) == NEGATE_EXPR
10666 && negate_expr_p (arg1))
10667 {
10668 if (INTEGRAL_TYPE_P (type))
10669 fold_overflow_warning (("assuming signed overflow does not occur "
10670 "when distributing negation across "
10671 "division"),
10672 WARN_STRICT_OVERFLOW_MISC);
10673 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10674 negate_expr (arg1));
10675 }
10676 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10677 && TREE_CODE (arg1) == NEGATE_EXPR
10678 && negate_expr_p (arg0))
10679 {
10680 if (INTEGRAL_TYPE_P (type))
10681 fold_overflow_warning (("assuming signed overflow does not occur "
10682 "when distributing negation across "
10683 "division"),
10684 WARN_STRICT_OVERFLOW_MISC);
10685 return fold_build2 (code, type, negate_expr (arg0),
10686 TREE_OPERAND (arg1, 0));
10687 }
10688
10689 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10690 operation, EXACT_DIV_EXPR.
10691
10692 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10693 At one time others generated faster code, it's not clear if they do
10694 after the last round to changes to the DIV code in expmed.c. */
10695 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
10696 && multiple_of_p (type, arg0, arg1))
10697 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
10698
10699 strict_overflow_p = false;
10700 if (TREE_CODE (arg1) == INTEGER_CST
10701 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10702 &strict_overflow_p)))
10703 {
10704 if (strict_overflow_p)
10705 fold_overflow_warning (("assuming signed overflow does not occur "
10706 "when simplifying division"),
10707 WARN_STRICT_OVERFLOW_MISC);
10708 return fold_convert (type, tem);
10709 }
10710
10711 return NULL_TREE;
10712
10713 case CEIL_MOD_EXPR:
10714 case FLOOR_MOD_EXPR:
10715 case ROUND_MOD_EXPR:
10716 case TRUNC_MOD_EXPR:
10717 /* X % 1 is always zero, but be sure to preserve any side
10718 effects in X. */
10719 if (integer_onep (arg1))
10720 return omit_one_operand (type, integer_zero_node, arg0);
10721
10722 /* X % 0, return X % 0 unchanged so that we can get the
10723 proper warnings and errors. */
10724 if (integer_zerop (arg1))
10725 return NULL_TREE;
10726
10727 /* 0 % X is always zero, but be sure to preserve any side
10728 effects in X. Place this after checking for X == 0. */
10729 if (integer_zerop (arg0))
10730 return omit_one_operand (type, integer_zero_node, arg1);
10731
10732 /* X % -1 is zero. */
10733 if (!TYPE_UNSIGNED (type)
10734 && TREE_CODE (arg1) == INTEGER_CST
10735 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
10736 && TREE_INT_CST_HIGH (arg1) == -1)
10737 return omit_one_operand (type, integer_zero_node, arg0);
10738
10739 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
10740 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
10741 strict_overflow_p = false;
10742 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
10743 && (TYPE_UNSIGNED (type)
10744 || tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p)))
10745 {
10746 tree c = arg1;
10747 /* Also optimize A % (C << N) where C is a power of 2,
10748 to A & ((C << N) - 1). */
10749 if (TREE_CODE (arg1) == LSHIFT_EXPR)
10750 c = TREE_OPERAND (arg1, 0);
10751
10752 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
10753 {
10754 tree mask = fold_build2 (MINUS_EXPR, TREE_TYPE (arg1), arg1,
10755 build_int_cst (TREE_TYPE (arg1), 1));
10756 if (strict_overflow_p)
10757 fold_overflow_warning (("assuming signed overflow does not "
10758 "occur when simplifying "
10759 "X % (power of two)"),
10760 WARN_STRICT_OVERFLOW_MISC);
10761 return fold_build2 (BIT_AND_EXPR, type,
10762 fold_convert (type, arg0),
10763 fold_convert (type, mask));
10764 }
10765 }
10766
10767 /* X % -C is the same as X % C. */
10768 if (code == TRUNC_MOD_EXPR
10769 && !TYPE_UNSIGNED (type)
10770 && TREE_CODE (arg1) == INTEGER_CST
10771 && !TREE_OVERFLOW (arg1)
10772 && TREE_INT_CST_HIGH (arg1) < 0
10773 && !TYPE_OVERFLOW_TRAPS (type)
10774 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
10775 && !sign_bit_p (arg1, arg1))
10776 return fold_build2 (code, type, fold_convert (type, arg0),
10777 fold_convert (type, negate_expr (arg1)));
10778
10779 /* X % -Y is the same as X % Y. */
10780 if (code == TRUNC_MOD_EXPR
10781 && !TYPE_UNSIGNED (type)
10782 && TREE_CODE (arg1) == NEGATE_EXPR
10783 && !TYPE_OVERFLOW_TRAPS (type))
10784 return fold_build2 (code, type, fold_convert (type, arg0),
10785 fold_convert (type, TREE_OPERAND (arg1, 0)));
10786
10787 if (TREE_CODE (arg1) == INTEGER_CST
10788 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10789 &strict_overflow_p)))
10790 {
10791 if (strict_overflow_p)
10792 fold_overflow_warning (("assuming signed overflow does not occur "
10793 "when simplifying modulos"),
10794 WARN_STRICT_OVERFLOW_MISC);
10795 return fold_convert (type, tem);
10796 }
10797
10798 return NULL_TREE;
10799
10800 case LROTATE_EXPR:
10801 case RROTATE_EXPR:
10802 if (integer_all_onesp (arg0))
10803 return omit_one_operand (type, arg0, arg1);
10804 goto shift;
10805
10806 case RSHIFT_EXPR:
10807 /* Optimize -1 >> x for arithmetic right shifts. */
10808 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
10809 return omit_one_operand (type, arg0, arg1);
10810 /* ... fall through ... */
10811
10812 case LSHIFT_EXPR:
10813 shift:
10814 if (integer_zerop (arg1))
10815 return non_lvalue (fold_convert (type, arg0));
10816 if (integer_zerop (arg0))
10817 return omit_one_operand (type, arg0, arg1);
10818
10819 /* Since negative shift count is not well-defined,
10820 don't try to compute it in the compiler. */
10821 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
10822 return NULL_TREE;
10823
10824 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
10825 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
10826 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
10827 && host_integerp (TREE_OPERAND (arg0, 1), false)
10828 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
10829 {
10830 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
10831 + TREE_INT_CST_LOW (arg1));
10832
10833 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
10834 being well defined. */
10835 if (low >= TYPE_PRECISION (type))
10836 {
10837 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
10838 low = low % TYPE_PRECISION (type);
10839 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
10840 return build_int_cst (type, 0);
10841 else
10842 low = TYPE_PRECISION (type) - 1;
10843 }
10844
10845 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10846 build_int_cst (type, low));
10847 }
10848
10849 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
10850 into x & ((unsigned)-1 >> c) for unsigned types. */
10851 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
10852 || (TYPE_UNSIGNED (type)
10853 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
10854 && host_integerp (arg1, false)
10855 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
10856 && host_integerp (TREE_OPERAND (arg0, 1), false)
10857 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
10858 {
10859 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10860 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
10861 tree lshift;
10862 tree arg00;
10863
10864 if (low0 == low1)
10865 {
10866 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
10867
10868 lshift = build_int_cst (type, -1);
10869 lshift = int_const_binop (code, lshift, arg1, 0);
10870
10871 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
10872 }
10873 }
10874
10875 /* Rewrite an LROTATE_EXPR by a constant into an
10876 RROTATE_EXPR by a new constant. */
10877 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
10878 {
10879 tree tem = build_int_cst (TREE_TYPE (arg1),
10880 GET_MODE_BITSIZE (TYPE_MODE (type)));
10881 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
10882 return fold_build2 (RROTATE_EXPR, type, arg0, tem);
10883 }
10884
10885 /* If we have a rotate of a bit operation with the rotate count and
10886 the second operand of the bit operation both constant,
10887 permute the two operations. */
10888 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10889 && (TREE_CODE (arg0) == BIT_AND_EXPR
10890 || TREE_CODE (arg0) == BIT_IOR_EXPR
10891 || TREE_CODE (arg0) == BIT_XOR_EXPR)
10892 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10893 return fold_build2 (TREE_CODE (arg0), type,
10894 fold_build2 (code, type,
10895 TREE_OPERAND (arg0, 0), arg1),
10896 fold_build2 (code, type,
10897 TREE_OPERAND (arg0, 1), arg1));
10898
10899 /* Two consecutive rotates adding up to the width of the mode can
10900 be ignored. */
10901 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10902 && TREE_CODE (arg0) == RROTATE_EXPR
10903 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10904 && TREE_INT_CST_HIGH (arg1) == 0
10905 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
10906 && ((TREE_INT_CST_LOW (arg1)
10907 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
10908 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
10909 return TREE_OPERAND (arg0, 0);
10910
10911 return NULL_TREE;
10912
10913 case MIN_EXPR:
10914 if (operand_equal_p (arg0, arg1, 0))
10915 return omit_one_operand (type, arg0, arg1);
10916 if (INTEGRAL_TYPE_P (type)
10917 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
10918 return omit_one_operand (type, arg1, arg0);
10919 tem = fold_minmax (MIN_EXPR, type, arg0, arg1);
10920 if (tem)
10921 return tem;
10922 goto associate;
10923
10924 case MAX_EXPR:
10925 if (operand_equal_p (arg0, arg1, 0))
10926 return omit_one_operand (type, arg0, arg1);
10927 if (INTEGRAL_TYPE_P (type)
10928 && TYPE_MAX_VALUE (type)
10929 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
10930 return omit_one_operand (type, arg1, arg0);
10931 tem = fold_minmax (MAX_EXPR, type, arg0, arg1);
10932 if (tem)
10933 return tem;
10934 goto associate;
10935
10936 case TRUTH_ANDIF_EXPR:
10937 /* Note that the operands of this must be ints
10938 and their values must be 0 or 1.
10939 ("true" is a fixed value perhaps depending on the language.) */
10940 /* If first arg is constant zero, return it. */
10941 if (integer_zerop (arg0))
10942 return fold_convert (type, arg0);
10943 case TRUTH_AND_EXPR:
10944 /* If either arg is constant true, drop it. */
10945 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10946 return non_lvalue (fold_convert (type, arg1));
10947 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
10948 /* Preserve sequence points. */
10949 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10950 return non_lvalue (fold_convert (type, arg0));
10951 /* If second arg is constant zero, result is zero, but first arg
10952 must be evaluated. */
10953 if (integer_zerop (arg1))
10954 return omit_one_operand (type, arg1, arg0);
10955 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10956 case will be handled here. */
10957 if (integer_zerop (arg0))
10958 return omit_one_operand (type, arg0, arg1);
10959
10960 /* !X && X is always false. */
10961 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10962 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10963 return omit_one_operand (type, integer_zero_node, arg1);
10964 /* X && !X is always false. */
10965 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10966 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10967 return omit_one_operand (type, integer_zero_node, arg0);
10968
10969 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10970 means A >= Y && A != MAX, but in this case we know that
10971 A < X <= MAX. */
10972
10973 if (!TREE_SIDE_EFFECTS (arg0)
10974 && !TREE_SIDE_EFFECTS (arg1))
10975 {
10976 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
10977 if (tem && !operand_equal_p (tem, arg0, 0))
10978 return fold_build2 (code, type, tem, arg1);
10979
10980 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
10981 if (tem && !operand_equal_p (tem, arg1, 0))
10982 return fold_build2 (code, type, arg0, tem);
10983 }
10984
10985 truth_andor:
10986 /* We only do these simplifications if we are optimizing. */
10987 if (!optimize)
10988 return NULL_TREE;
10989
10990 /* Check for things like (A || B) && (A || C). We can convert this
10991 to A || (B && C). Note that either operator can be any of the four
10992 truth and/or operations and the transformation will still be
10993 valid. Also note that we only care about order for the
10994 ANDIF and ORIF operators. If B contains side effects, this
10995 might change the truth-value of A. */
10996 if (TREE_CODE (arg0) == TREE_CODE (arg1)
10997 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
10998 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
10999 || TREE_CODE (arg0) == TRUTH_AND_EXPR
11000 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
11001 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
11002 {
11003 tree a00 = TREE_OPERAND (arg0, 0);
11004 tree a01 = TREE_OPERAND (arg0, 1);
11005 tree a10 = TREE_OPERAND (arg1, 0);
11006 tree a11 = TREE_OPERAND (arg1, 1);
11007 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
11008 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
11009 && (code == TRUTH_AND_EXPR
11010 || code == TRUTH_OR_EXPR));
11011
11012 if (operand_equal_p (a00, a10, 0))
11013 return fold_build2 (TREE_CODE (arg0), type, a00,
11014 fold_build2 (code, type, a01, a11));
11015 else if (commutative && operand_equal_p (a00, a11, 0))
11016 return fold_build2 (TREE_CODE (arg0), type, a00,
11017 fold_build2 (code, type, a01, a10));
11018 else if (commutative && operand_equal_p (a01, a10, 0))
11019 return fold_build2 (TREE_CODE (arg0), type, a01,
11020 fold_build2 (code, type, a00, a11));
11021
11022 /* This case if tricky because we must either have commutative
11023 operators or else A10 must not have side-effects. */
11024
11025 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
11026 && operand_equal_p (a01, a11, 0))
11027 return fold_build2 (TREE_CODE (arg0), type,
11028 fold_build2 (code, type, a00, a10),
11029 a01);
11030 }
11031
11032 /* See if we can build a range comparison. */
11033 if (0 != (tem = fold_range_test (code, type, op0, op1)))
11034 return tem;
11035
11036 /* Check for the possibility of merging component references. If our
11037 lhs is another similar operation, try to merge its rhs with our
11038 rhs. Then try to merge our lhs and rhs. */
11039 if (TREE_CODE (arg0) == code
11040 && 0 != (tem = fold_truthop (code, type,
11041 TREE_OPERAND (arg0, 1), arg1)))
11042 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11043
11044 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
11045 return tem;
11046
11047 return NULL_TREE;
11048
11049 case TRUTH_ORIF_EXPR:
11050 /* Note that the operands of this must be ints
11051 and their values must be 0 or true.
11052 ("true" is a fixed value perhaps depending on the language.) */
11053 /* If first arg is constant true, return it. */
11054 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11055 return fold_convert (type, arg0);
11056 case TRUTH_OR_EXPR:
11057 /* If either arg is constant zero, drop it. */
11058 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
11059 return non_lvalue (fold_convert (type, arg1));
11060 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
11061 /* Preserve sequence points. */
11062 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11063 return non_lvalue (fold_convert (type, arg0));
11064 /* If second arg is constant true, result is true, but we must
11065 evaluate first arg. */
11066 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
11067 return omit_one_operand (type, arg1, arg0);
11068 /* Likewise for first arg, but note this only occurs here for
11069 TRUTH_OR_EXPR. */
11070 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11071 return omit_one_operand (type, arg0, arg1);
11072
11073 /* !X || X is always true. */
11074 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11075 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11076 return omit_one_operand (type, integer_one_node, arg1);
11077 /* X || !X is always true. */
11078 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11079 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11080 return omit_one_operand (type, integer_one_node, arg0);
11081
11082 goto truth_andor;
11083
11084 case TRUTH_XOR_EXPR:
11085 /* If the second arg is constant zero, drop it. */
11086 if (integer_zerop (arg1))
11087 return non_lvalue (fold_convert (type, arg0));
11088 /* If the second arg is constant true, this is a logical inversion. */
11089 if (integer_onep (arg1))
11090 {
11091 /* Only call invert_truthvalue if operand is a truth value. */
11092 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
11093 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
11094 else
11095 tem = invert_truthvalue (arg0);
11096 return non_lvalue (fold_convert (type, tem));
11097 }
11098 /* Identical arguments cancel to zero. */
11099 if (operand_equal_p (arg0, arg1, 0))
11100 return omit_one_operand (type, integer_zero_node, arg0);
11101
11102 /* !X ^ X is always true. */
11103 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11104 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11105 return omit_one_operand (type, integer_one_node, arg1);
11106
11107 /* X ^ !X is always true. */
11108 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11109 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11110 return omit_one_operand (type, integer_one_node, arg0);
11111
11112 return NULL_TREE;
11113
11114 case EQ_EXPR:
11115 case NE_EXPR:
11116 tem = fold_comparison (code, type, op0, op1);
11117 if (tem != NULL_TREE)
11118 return tem;
11119
11120 /* bool_var != 0 becomes bool_var. */
11121 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
11122 && code == NE_EXPR)
11123 return non_lvalue (fold_convert (type, arg0));
11124
11125 /* bool_var == 1 becomes bool_var. */
11126 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
11127 && code == EQ_EXPR)
11128 return non_lvalue (fold_convert (type, arg0));
11129
11130 /* bool_var != 1 becomes !bool_var. */
11131 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
11132 && code == NE_EXPR)
11133 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
11134
11135 /* bool_var == 0 becomes !bool_var. */
11136 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
11137 && code == EQ_EXPR)
11138 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
11139
11140 /* If this is an equality comparison of the address of two non-weak,
11141 unaliased symbols neither of which are extern (since we do not
11142 have access to attributes for externs), then we know the result. */
11143 if (TREE_CODE (arg0) == ADDR_EXPR
11144 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
11145 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
11146 && ! lookup_attribute ("alias",
11147 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
11148 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
11149 && TREE_CODE (arg1) == ADDR_EXPR
11150 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
11151 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
11152 && ! lookup_attribute ("alias",
11153 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
11154 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
11155 {
11156 /* We know that we're looking at the address of two
11157 non-weak, unaliased, static _DECL nodes.
11158
11159 It is both wasteful and incorrect to call operand_equal_p
11160 to compare the two ADDR_EXPR nodes. It is wasteful in that
11161 all we need to do is test pointer equality for the arguments
11162 to the two ADDR_EXPR nodes. It is incorrect to use
11163 operand_equal_p as that function is NOT equivalent to a
11164 C equality test. It can in fact return false for two
11165 objects which would test as equal using the C equality
11166 operator. */
11167 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
11168 return constant_boolean_node (equal
11169 ? code == EQ_EXPR : code != EQ_EXPR,
11170 type);
11171 }
11172
11173 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
11174 a MINUS_EXPR of a constant, we can convert it into a comparison with
11175 a revised constant as long as no overflow occurs. */
11176 if (TREE_CODE (arg1) == INTEGER_CST
11177 && (TREE_CODE (arg0) == PLUS_EXPR
11178 || TREE_CODE (arg0) == MINUS_EXPR)
11179 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11180 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
11181 ? MINUS_EXPR : PLUS_EXPR,
11182 fold_convert (TREE_TYPE (arg0), arg1),
11183 TREE_OPERAND (arg0, 1), 0))
11184 && !TREE_OVERFLOW (tem))
11185 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11186
11187 /* Similarly for a NEGATE_EXPR. */
11188 if (TREE_CODE (arg0) == NEGATE_EXPR
11189 && TREE_CODE (arg1) == INTEGER_CST
11190 && 0 != (tem = negate_expr (arg1))
11191 && TREE_CODE (tem) == INTEGER_CST
11192 && !TREE_OVERFLOW (tem))
11193 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11194
11195 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
11196 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11197 && TREE_CODE (arg1) == INTEGER_CST
11198 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11199 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11200 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg0),
11201 fold_convert (TREE_TYPE (arg0), arg1),
11202 TREE_OPERAND (arg0, 1)));
11203
11204 /* Transform comparisons of the form X +- C CMP X. */
11205 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11206 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11207 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11208 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11209 || POINTER_TYPE_P (TREE_TYPE (arg0))))
11210 {
11211 tree cst = TREE_OPERAND (arg0, 1);
11212
11213 if (code == EQ_EXPR
11214 && !integer_zerop (cst))
11215 return omit_two_operands (type, boolean_false_node,
11216 TREE_OPERAND (arg0, 0), arg1);
11217 else
11218 return omit_two_operands (type, boolean_true_node,
11219 TREE_OPERAND (arg0, 0), arg1);
11220 }
11221
11222 /* If we have X - Y == 0, we can convert that to X == Y and similarly
11223 for !=. Don't do this for ordered comparisons due to overflow. */
11224 if (TREE_CODE (arg0) == MINUS_EXPR
11225 && integer_zerop (arg1))
11226 return fold_build2 (code, type,
11227 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
11228
11229 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
11230 if (TREE_CODE (arg0) == ABS_EXPR
11231 && (integer_zerop (arg1) || real_zerop (arg1)))
11232 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
11233
11234 /* If this is an EQ or NE comparison with zero and ARG0 is
11235 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
11236 two operations, but the latter can be done in one less insn
11237 on machines that have only two-operand insns or on which a
11238 constant cannot be the first operand. */
11239 if (TREE_CODE (arg0) == BIT_AND_EXPR
11240 && integer_zerop (arg1))
11241 {
11242 tree arg00 = TREE_OPERAND (arg0, 0);
11243 tree arg01 = TREE_OPERAND (arg0, 1);
11244 if (TREE_CODE (arg00) == LSHIFT_EXPR
11245 && integer_onep (TREE_OPERAND (arg00, 0)))
11246 return
11247 fold_build2 (code, type,
11248 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11249 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
11250 arg01, TREE_OPERAND (arg00, 1)),
11251 fold_convert (TREE_TYPE (arg0),
11252 integer_one_node)),
11253 arg1);
11254 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
11255 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
11256 return
11257 fold_build2 (code, type,
11258 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11259 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
11260 arg00, TREE_OPERAND (arg01, 1)),
11261 fold_convert (TREE_TYPE (arg0),
11262 integer_one_node)),
11263 arg1);
11264 }
11265
11266 /* If this is an NE or EQ comparison of zero against the result of a
11267 signed MOD operation whose second operand is a power of 2, make
11268 the MOD operation unsigned since it is simpler and equivalent. */
11269 if (integer_zerop (arg1)
11270 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
11271 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
11272 || TREE_CODE (arg0) == CEIL_MOD_EXPR
11273 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
11274 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
11275 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11276 {
11277 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
11278 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
11279 fold_convert (newtype,
11280 TREE_OPERAND (arg0, 0)),
11281 fold_convert (newtype,
11282 TREE_OPERAND (arg0, 1)));
11283
11284 return fold_build2 (code, type, newmod,
11285 fold_convert (newtype, arg1));
11286 }
11287
11288 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
11289 C1 is a valid shift constant, and C2 is a power of two, i.e.
11290 a single bit. */
11291 if (TREE_CODE (arg0) == BIT_AND_EXPR
11292 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
11293 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
11294 == INTEGER_CST
11295 && integer_pow2p (TREE_OPERAND (arg0, 1))
11296 && integer_zerop (arg1))
11297 {
11298 tree itype = TREE_TYPE (arg0);
11299 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
11300 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
11301
11302 /* Check for a valid shift count. */
11303 if (TREE_INT_CST_HIGH (arg001) == 0
11304 && TREE_INT_CST_LOW (arg001) < prec)
11305 {
11306 tree arg01 = TREE_OPERAND (arg0, 1);
11307 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
11308 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
11309 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
11310 can be rewritten as (X & (C2 << C1)) != 0. */
11311 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
11312 {
11313 tem = fold_build2 (LSHIFT_EXPR, itype, arg01, arg001);
11314 tem = fold_build2 (BIT_AND_EXPR, itype, arg000, tem);
11315 return fold_build2 (code, type, tem, arg1);
11316 }
11317 /* Otherwise, for signed (arithmetic) shifts,
11318 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
11319 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
11320 else if (!TYPE_UNSIGNED (itype))
11321 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
11322 arg000, build_int_cst (itype, 0));
11323 /* Otherwise, of unsigned (logical) shifts,
11324 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
11325 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
11326 else
11327 return omit_one_operand (type,
11328 code == EQ_EXPR ? integer_one_node
11329 : integer_zero_node,
11330 arg000);
11331 }
11332 }
11333
11334 /* If this is an NE comparison of zero with an AND of one, remove the
11335 comparison since the AND will give the correct value. */
11336 if (code == NE_EXPR
11337 && integer_zerop (arg1)
11338 && TREE_CODE (arg0) == BIT_AND_EXPR
11339 && integer_onep (TREE_OPERAND (arg0, 1)))
11340 return fold_convert (type, arg0);
11341
11342 /* If we have (A & C) == C where C is a power of 2, convert this into
11343 (A & C) != 0. Similarly for NE_EXPR. */
11344 if (TREE_CODE (arg0) == BIT_AND_EXPR
11345 && integer_pow2p (TREE_OPERAND (arg0, 1))
11346 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11347 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11348 arg0, fold_convert (TREE_TYPE (arg0),
11349 integer_zero_node));
11350
11351 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
11352 bit, then fold the expression into A < 0 or A >= 0. */
11353 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
11354 if (tem)
11355 return tem;
11356
11357 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
11358 Similarly for NE_EXPR. */
11359 if (TREE_CODE (arg0) == BIT_AND_EXPR
11360 && TREE_CODE (arg1) == INTEGER_CST
11361 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11362 {
11363 tree notc = fold_build1 (BIT_NOT_EXPR,
11364 TREE_TYPE (TREE_OPERAND (arg0, 1)),
11365 TREE_OPERAND (arg0, 1));
11366 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11367 arg1, notc);
11368 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
11369 if (integer_nonzerop (dandnotc))
11370 return omit_one_operand (type, rslt, arg0);
11371 }
11372
11373 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
11374 Similarly for NE_EXPR. */
11375 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11376 && TREE_CODE (arg1) == INTEGER_CST
11377 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11378 {
11379 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
11380 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11381 TREE_OPERAND (arg0, 1), notd);
11382 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
11383 if (integer_nonzerop (candnotd))
11384 return omit_one_operand (type, rslt, arg0);
11385 }
11386
11387 /* If this is a comparison of a field, we may be able to simplify it. */
11388 if ((TREE_CODE (arg0) == COMPONENT_REF
11389 || TREE_CODE (arg0) == BIT_FIELD_REF)
11390 /* Handle the constant case even without -O
11391 to make sure the warnings are given. */
11392 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
11393 {
11394 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
11395 if (t1)
11396 return t1;
11397 }
11398
11399 /* Optimize comparisons of strlen vs zero to a compare of the
11400 first character of the string vs zero. To wit,
11401 strlen(ptr) == 0 => *ptr == 0
11402 strlen(ptr) != 0 => *ptr != 0
11403 Other cases should reduce to one of these two (or a constant)
11404 due to the return value of strlen being unsigned. */
11405 if (TREE_CODE (arg0) == CALL_EXPR
11406 && integer_zerop (arg1))
11407 {
11408 tree fndecl = get_callee_fndecl (arg0);
11409
11410 if (fndecl
11411 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
11412 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
11413 && call_expr_nargs (arg0) == 1
11414 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
11415 {
11416 tree iref = build_fold_indirect_ref (CALL_EXPR_ARG (arg0, 0));
11417 return fold_build2 (code, type, iref,
11418 build_int_cst (TREE_TYPE (iref), 0));
11419 }
11420 }
11421
11422 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
11423 of X. Similarly fold (X >> C) == 0 into X >= 0. */
11424 if (TREE_CODE (arg0) == RSHIFT_EXPR
11425 && integer_zerop (arg1)
11426 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11427 {
11428 tree arg00 = TREE_OPERAND (arg0, 0);
11429 tree arg01 = TREE_OPERAND (arg0, 1);
11430 tree itype = TREE_TYPE (arg00);
11431 if (TREE_INT_CST_HIGH (arg01) == 0
11432 && TREE_INT_CST_LOW (arg01)
11433 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
11434 {
11435 if (TYPE_UNSIGNED (itype))
11436 {
11437 itype = lang_hooks.types.signed_type (itype);
11438 arg00 = fold_convert (itype, arg00);
11439 }
11440 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
11441 type, arg00, build_int_cst (itype, 0));
11442 }
11443 }
11444
11445 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
11446 if (integer_zerop (arg1)
11447 && TREE_CODE (arg0) == BIT_XOR_EXPR)
11448 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11449 TREE_OPERAND (arg0, 1));
11450
11451 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
11452 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11453 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11454 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11455 build_int_cst (TREE_TYPE (arg1), 0));
11456 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
11457 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11458 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11459 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11460 return fold_build2 (code, type, TREE_OPERAND (arg0, 1),
11461 build_int_cst (TREE_TYPE (arg1), 0));
11462
11463 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
11464 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11465 && TREE_CODE (arg1) == INTEGER_CST
11466 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11467 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11468 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg1),
11469 TREE_OPERAND (arg0, 1), arg1));
11470
11471 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
11472 (X & C) == 0 when C is a single bit. */
11473 if (TREE_CODE (arg0) == BIT_AND_EXPR
11474 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
11475 && integer_zerop (arg1)
11476 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11477 {
11478 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11479 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
11480 TREE_OPERAND (arg0, 1));
11481 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
11482 type, tem, arg1);
11483 }
11484
11485 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
11486 constant C is a power of two, i.e. a single bit. */
11487 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11488 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11489 && integer_zerop (arg1)
11490 && integer_pow2p (TREE_OPERAND (arg0, 1))
11491 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11492 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
11493 {
11494 tree arg00 = TREE_OPERAND (arg0, 0);
11495 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11496 arg00, build_int_cst (TREE_TYPE (arg00), 0));
11497 }
11498
11499 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
11500 when is C is a power of two, i.e. a single bit. */
11501 if (TREE_CODE (arg0) == BIT_AND_EXPR
11502 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
11503 && integer_zerop (arg1)
11504 && integer_pow2p (TREE_OPERAND (arg0, 1))
11505 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11506 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
11507 {
11508 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
11509 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg000),
11510 arg000, TREE_OPERAND (arg0, 1));
11511 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11512 tem, build_int_cst (TREE_TYPE (tem), 0));
11513 }
11514
11515 if (integer_zerop (arg1)
11516 && tree_expr_nonzero_p (arg0))
11517 {
11518 tree res = constant_boolean_node (code==NE_EXPR, type);
11519 return omit_one_operand (type, res, arg0);
11520 }
11521
11522 /* Fold -X op -Y as X op Y, where op is eq/ne. */
11523 if (TREE_CODE (arg0) == NEGATE_EXPR
11524 && TREE_CODE (arg1) == NEGATE_EXPR)
11525 return fold_build2 (code, type,
11526 TREE_OPERAND (arg0, 0),
11527 TREE_OPERAND (arg1, 0));
11528
11529 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
11530 if (TREE_CODE (arg0) == BIT_AND_EXPR
11531 && TREE_CODE (arg1) == BIT_AND_EXPR)
11532 {
11533 tree arg00 = TREE_OPERAND (arg0, 0);
11534 tree arg01 = TREE_OPERAND (arg0, 1);
11535 tree arg10 = TREE_OPERAND (arg1, 0);
11536 tree arg11 = TREE_OPERAND (arg1, 1);
11537 tree itype = TREE_TYPE (arg0);
11538
11539 if (operand_equal_p (arg01, arg11, 0))
11540 return fold_build2 (code, type,
11541 fold_build2 (BIT_AND_EXPR, itype,
11542 fold_build2 (BIT_XOR_EXPR, itype,
11543 arg00, arg10),
11544 arg01),
11545 build_int_cst (itype, 0));
11546
11547 if (operand_equal_p (arg01, arg10, 0))
11548 return fold_build2 (code, type,
11549 fold_build2 (BIT_AND_EXPR, itype,
11550 fold_build2 (BIT_XOR_EXPR, itype,
11551 arg00, arg11),
11552 arg01),
11553 build_int_cst (itype, 0));
11554
11555 if (operand_equal_p (arg00, arg11, 0))
11556 return fold_build2 (code, type,
11557 fold_build2 (BIT_AND_EXPR, itype,
11558 fold_build2 (BIT_XOR_EXPR, itype,
11559 arg01, arg10),
11560 arg00),
11561 build_int_cst (itype, 0));
11562
11563 if (operand_equal_p (arg00, arg10, 0))
11564 return fold_build2 (code, type,
11565 fold_build2 (BIT_AND_EXPR, itype,
11566 fold_build2 (BIT_XOR_EXPR, itype,
11567 arg01, arg11),
11568 arg00),
11569 build_int_cst (itype, 0));
11570 }
11571
11572 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11573 && TREE_CODE (arg1) == BIT_XOR_EXPR)
11574 {
11575 tree arg00 = TREE_OPERAND (arg0, 0);
11576 tree arg01 = TREE_OPERAND (arg0, 1);
11577 tree arg10 = TREE_OPERAND (arg1, 0);
11578 tree arg11 = TREE_OPERAND (arg1, 1);
11579 tree itype = TREE_TYPE (arg0);
11580
11581 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
11582 operand_equal_p guarantees no side-effects so we don't need
11583 to use omit_one_operand on Z. */
11584 if (operand_equal_p (arg01, arg11, 0))
11585 return fold_build2 (code, type, arg00, arg10);
11586 if (operand_equal_p (arg01, arg10, 0))
11587 return fold_build2 (code, type, arg00, arg11);
11588 if (operand_equal_p (arg00, arg11, 0))
11589 return fold_build2 (code, type, arg01, arg10);
11590 if (operand_equal_p (arg00, arg10, 0))
11591 return fold_build2 (code, type, arg01, arg11);
11592
11593 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
11594 if (TREE_CODE (arg01) == INTEGER_CST
11595 && TREE_CODE (arg11) == INTEGER_CST)
11596 return fold_build2 (code, type,
11597 fold_build2 (BIT_XOR_EXPR, itype, arg00,
11598 fold_build2 (BIT_XOR_EXPR, itype,
11599 arg01, arg11)),
11600 arg10);
11601 }
11602
11603 /* Attempt to simplify equality/inequality comparisons of complex
11604 values. Only lower the comparison if the result is known or
11605 can be simplified to a single scalar comparison. */
11606 if ((TREE_CODE (arg0) == COMPLEX_EXPR
11607 || TREE_CODE (arg0) == COMPLEX_CST)
11608 && (TREE_CODE (arg1) == COMPLEX_EXPR
11609 || TREE_CODE (arg1) == COMPLEX_CST))
11610 {
11611 tree real0, imag0, real1, imag1;
11612 tree rcond, icond;
11613
11614 if (TREE_CODE (arg0) == COMPLEX_EXPR)
11615 {
11616 real0 = TREE_OPERAND (arg0, 0);
11617 imag0 = TREE_OPERAND (arg0, 1);
11618 }
11619 else
11620 {
11621 real0 = TREE_REALPART (arg0);
11622 imag0 = TREE_IMAGPART (arg0);
11623 }
11624
11625 if (TREE_CODE (arg1) == COMPLEX_EXPR)
11626 {
11627 real1 = TREE_OPERAND (arg1, 0);
11628 imag1 = TREE_OPERAND (arg1, 1);
11629 }
11630 else
11631 {
11632 real1 = TREE_REALPART (arg1);
11633 imag1 = TREE_IMAGPART (arg1);
11634 }
11635
11636 rcond = fold_binary (code, type, real0, real1);
11637 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
11638 {
11639 if (integer_zerop (rcond))
11640 {
11641 if (code == EQ_EXPR)
11642 return omit_two_operands (type, boolean_false_node,
11643 imag0, imag1);
11644 return fold_build2 (NE_EXPR, type, imag0, imag1);
11645 }
11646 else
11647 {
11648 if (code == NE_EXPR)
11649 return omit_two_operands (type, boolean_true_node,
11650 imag0, imag1);
11651 return fold_build2 (EQ_EXPR, type, imag0, imag1);
11652 }
11653 }
11654
11655 icond = fold_binary (code, type, imag0, imag1);
11656 if (icond && TREE_CODE (icond) == INTEGER_CST)
11657 {
11658 if (integer_zerop (icond))
11659 {
11660 if (code == EQ_EXPR)
11661 return omit_two_operands (type, boolean_false_node,
11662 real0, real1);
11663 return fold_build2 (NE_EXPR, type, real0, real1);
11664 }
11665 else
11666 {
11667 if (code == NE_EXPR)
11668 return omit_two_operands (type, boolean_true_node,
11669 real0, real1);
11670 return fold_build2 (EQ_EXPR, type, real0, real1);
11671 }
11672 }
11673 }
11674
11675 return NULL_TREE;
11676
11677 case LT_EXPR:
11678 case GT_EXPR:
11679 case LE_EXPR:
11680 case GE_EXPR:
11681 tem = fold_comparison (code, type, op0, op1);
11682 if (tem != NULL_TREE)
11683 return tem;
11684
11685 /* Transform comparisons of the form X +- C CMP X. */
11686 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11687 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11688 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
11689 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
11690 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11691 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
11692 {
11693 tree arg01 = TREE_OPERAND (arg0, 1);
11694 enum tree_code code0 = TREE_CODE (arg0);
11695 int is_positive;
11696
11697 if (TREE_CODE (arg01) == REAL_CST)
11698 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
11699 else
11700 is_positive = tree_int_cst_sgn (arg01);
11701
11702 /* (X - c) > X becomes false. */
11703 if (code == GT_EXPR
11704 && ((code0 == MINUS_EXPR && is_positive >= 0)
11705 || (code0 == PLUS_EXPR && is_positive <= 0)))
11706 {
11707 if (TREE_CODE (arg01) == INTEGER_CST
11708 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11709 fold_overflow_warning (("assuming signed overflow does not "
11710 "occur when assuming that (X - c) > X "
11711 "is always false"),
11712 WARN_STRICT_OVERFLOW_ALL);
11713 return constant_boolean_node (0, type);
11714 }
11715
11716 /* Likewise (X + c) < X becomes false. */
11717 if (code == LT_EXPR
11718 && ((code0 == PLUS_EXPR && is_positive >= 0)
11719 || (code0 == MINUS_EXPR && is_positive <= 0)))
11720 {
11721 if (TREE_CODE (arg01) == INTEGER_CST
11722 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11723 fold_overflow_warning (("assuming signed overflow does not "
11724 "occur when assuming that "
11725 "(X + c) < X is always false"),
11726 WARN_STRICT_OVERFLOW_ALL);
11727 return constant_boolean_node (0, type);
11728 }
11729
11730 /* Convert (X - c) <= X to true. */
11731 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
11732 && code == LE_EXPR
11733 && ((code0 == MINUS_EXPR && is_positive >= 0)
11734 || (code0 == PLUS_EXPR && is_positive <= 0)))
11735 {
11736 if (TREE_CODE (arg01) == INTEGER_CST
11737 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11738 fold_overflow_warning (("assuming signed overflow does not "
11739 "occur when assuming that "
11740 "(X - c) <= X is always true"),
11741 WARN_STRICT_OVERFLOW_ALL);
11742 return constant_boolean_node (1, type);
11743 }
11744
11745 /* Convert (X + c) >= X to true. */
11746 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
11747 && code == GE_EXPR
11748 && ((code0 == PLUS_EXPR && is_positive >= 0)
11749 || (code0 == MINUS_EXPR && is_positive <= 0)))
11750 {
11751 if (TREE_CODE (arg01) == INTEGER_CST
11752 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11753 fold_overflow_warning (("assuming signed overflow does not "
11754 "occur when assuming that "
11755 "(X + c) >= X is always true"),
11756 WARN_STRICT_OVERFLOW_ALL);
11757 return constant_boolean_node (1, type);
11758 }
11759
11760 if (TREE_CODE (arg01) == INTEGER_CST)
11761 {
11762 /* Convert X + c > X and X - c < X to true for integers. */
11763 if (code == GT_EXPR
11764 && ((code0 == PLUS_EXPR && is_positive > 0)
11765 || (code0 == MINUS_EXPR && is_positive < 0)))
11766 {
11767 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11768 fold_overflow_warning (("assuming signed overflow does "
11769 "not occur when assuming that "
11770 "(X + c) > X is always true"),
11771 WARN_STRICT_OVERFLOW_ALL);
11772 return constant_boolean_node (1, type);
11773 }
11774
11775 if (code == LT_EXPR
11776 && ((code0 == MINUS_EXPR && is_positive > 0)
11777 || (code0 == PLUS_EXPR && is_positive < 0)))
11778 {
11779 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11780 fold_overflow_warning (("assuming signed overflow does "
11781 "not occur when assuming that "
11782 "(X - c) < X is always true"),
11783 WARN_STRICT_OVERFLOW_ALL);
11784 return constant_boolean_node (1, type);
11785 }
11786
11787 /* Convert X + c <= X and X - c >= X to false for integers. */
11788 if (code == LE_EXPR
11789 && ((code0 == PLUS_EXPR && is_positive > 0)
11790 || (code0 == MINUS_EXPR && is_positive < 0)))
11791 {
11792 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11793 fold_overflow_warning (("assuming signed overflow does "
11794 "not occur when assuming that "
11795 "(X + c) <= X is always false"),
11796 WARN_STRICT_OVERFLOW_ALL);
11797 return constant_boolean_node (0, type);
11798 }
11799
11800 if (code == GE_EXPR
11801 && ((code0 == MINUS_EXPR && is_positive > 0)
11802 || (code0 == PLUS_EXPR && is_positive < 0)))
11803 {
11804 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11805 fold_overflow_warning (("assuming signed overflow does "
11806 "not occur when assuming that "
11807 "(X - c) >= X is always false"),
11808 WARN_STRICT_OVERFLOW_ALL);
11809 return constant_boolean_node (0, type);
11810 }
11811 }
11812 }
11813
11814 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
11815 This transformation affects the cases which are handled in later
11816 optimizations involving comparisons with non-negative constants. */
11817 if (TREE_CODE (arg1) == INTEGER_CST
11818 && TREE_CODE (arg0) != INTEGER_CST
11819 && tree_int_cst_sgn (arg1) > 0)
11820 {
11821 if (code == GE_EXPR)
11822 {
11823 arg1 = const_binop (MINUS_EXPR, arg1,
11824 build_int_cst (TREE_TYPE (arg1), 1), 0);
11825 return fold_build2 (GT_EXPR, type, arg0,
11826 fold_convert (TREE_TYPE (arg0), arg1));
11827 }
11828 if (code == LT_EXPR)
11829 {
11830 arg1 = const_binop (MINUS_EXPR, arg1,
11831 build_int_cst (TREE_TYPE (arg1), 1), 0);
11832 return fold_build2 (LE_EXPR, type, arg0,
11833 fold_convert (TREE_TYPE (arg0), arg1));
11834 }
11835 }
11836
11837 /* Comparisons with the highest or lowest possible integer of
11838 the specified precision will have known values. */
11839 {
11840 tree arg1_type = TREE_TYPE (arg1);
11841 unsigned int width = TYPE_PRECISION (arg1_type);
11842
11843 if (TREE_CODE (arg1) == INTEGER_CST
11844 && !TREE_OVERFLOW (arg1)
11845 && width <= 2 * HOST_BITS_PER_WIDE_INT
11846 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
11847 {
11848 HOST_WIDE_INT signed_max_hi;
11849 unsigned HOST_WIDE_INT signed_max_lo;
11850 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
11851
11852 if (width <= HOST_BITS_PER_WIDE_INT)
11853 {
11854 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
11855 - 1;
11856 signed_max_hi = 0;
11857 max_hi = 0;
11858
11859 if (TYPE_UNSIGNED (arg1_type))
11860 {
11861 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
11862 min_lo = 0;
11863 min_hi = 0;
11864 }
11865 else
11866 {
11867 max_lo = signed_max_lo;
11868 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
11869 min_hi = -1;
11870 }
11871 }
11872 else
11873 {
11874 width -= HOST_BITS_PER_WIDE_INT;
11875 signed_max_lo = -1;
11876 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
11877 - 1;
11878 max_lo = -1;
11879 min_lo = 0;
11880
11881 if (TYPE_UNSIGNED (arg1_type))
11882 {
11883 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
11884 min_hi = 0;
11885 }
11886 else
11887 {
11888 max_hi = signed_max_hi;
11889 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
11890 }
11891 }
11892
11893 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
11894 && TREE_INT_CST_LOW (arg1) == max_lo)
11895 switch (code)
11896 {
11897 case GT_EXPR:
11898 return omit_one_operand (type, integer_zero_node, arg0);
11899
11900 case GE_EXPR:
11901 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11902
11903 case LE_EXPR:
11904 return omit_one_operand (type, integer_one_node, arg0);
11905
11906 case LT_EXPR:
11907 return fold_build2 (NE_EXPR, type, arg0, arg1);
11908
11909 /* The GE_EXPR and LT_EXPR cases above are not normally
11910 reached because of previous transformations. */
11911
11912 default:
11913 break;
11914 }
11915 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11916 == max_hi
11917 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
11918 switch (code)
11919 {
11920 case GT_EXPR:
11921 arg1 = const_binop (PLUS_EXPR, arg1,
11922 build_int_cst (TREE_TYPE (arg1), 1), 0);
11923 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11924 case LE_EXPR:
11925 arg1 = const_binop (PLUS_EXPR, arg1,
11926 build_int_cst (TREE_TYPE (arg1), 1), 0);
11927 return fold_build2 (NE_EXPR, type, arg0, arg1);
11928 default:
11929 break;
11930 }
11931 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11932 == min_hi
11933 && TREE_INT_CST_LOW (arg1) == min_lo)
11934 switch (code)
11935 {
11936 case LT_EXPR:
11937 return omit_one_operand (type, integer_zero_node, arg0);
11938
11939 case LE_EXPR:
11940 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11941
11942 case GE_EXPR:
11943 return omit_one_operand (type, integer_one_node, arg0);
11944
11945 case GT_EXPR:
11946 return fold_build2 (NE_EXPR, type, op0, op1);
11947
11948 default:
11949 break;
11950 }
11951 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11952 == min_hi
11953 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
11954 switch (code)
11955 {
11956 case GE_EXPR:
11957 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
11958 return fold_build2 (NE_EXPR, type, arg0, arg1);
11959 case LT_EXPR:
11960 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
11961 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11962 default:
11963 break;
11964 }
11965
11966 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
11967 && TREE_INT_CST_LOW (arg1) == signed_max_lo
11968 && TYPE_UNSIGNED (arg1_type)
11969 /* We will flip the signedness of the comparison operator
11970 associated with the mode of arg1, so the sign bit is
11971 specified by this mode. Check that arg1 is the signed
11972 max associated with this sign bit. */
11973 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
11974 /* signed_type does not work on pointer types. */
11975 && INTEGRAL_TYPE_P (arg1_type))
11976 {
11977 /* The following case also applies to X < signed_max+1
11978 and X >= signed_max+1 because previous transformations. */
11979 if (code == LE_EXPR || code == GT_EXPR)
11980 {
11981 tree st0, st1;
11982 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
11983 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
11984 return fold_build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
11985 type, fold_convert (st0, arg0),
11986 build_int_cst (st1, 0));
11987 }
11988 }
11989 }
11990 }
11991
11992 /* If we are comparing an ABS_EXPR with a constant, we can
11993 convert all the cases into explicit comparisons, but they may
11994 well not be faster than doing the ABS and one comparison.
11995 But ABS (X) <= C is a range comparison, which becomes a subtraction
11996 and a comparison, and is probably faster. */
11997 if (code == LE_EXPR
11998 && TREE_CODE (arg1) == INTEGER_CST
11999 && TREE_CODE (arg0) == ABS_EXPR
12000 && ! TREE_SIDE_EFFECTS (arg0)
12001 && (0 != (tem = negate_expr (arg1)))
12002 && TREE_CODE (tem) == INTEGER_CST
12003 && !TREE_OVERFLOW (tem))
12004 return fold_build2 (TRUTH_ANDIF_EXPR, type,
12005 build2 (GE_EXPR, type,
12006 TREE_OPERAND (arg0, 0), tem),
12007 build2 (LE_EXPR, type,
12008 TREE_OPERAND (arg0, 0), arg1));
12009
12010 /* Convert ABS_EXPR<x> >= 0 to true. */
12011 strict_overflow_p = false;
12012 if (code == GE_EXPR
12013 && (integer_zerop (arg1)
12014 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
12015 && real_zerop (arg1)))
12016 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12017 {
12018 if (strict_overflow_p)
12019 fold_overflow_warning (("assuming signed overflow does not occur "
12020 "when simplifying comparison of "
12021 "absolute value and zero"),
12022 WARN_STRICT_OVERFLOW_CONDITIONAL);
12023 return omit_one_operand (type, integer_one_node, arg0);
12024 }
12025
12026 /* Convert ABS_EXPR<x> < 0 to false. */
12027 strict_overflow_p = false;
12028 if (code == LT_EXPR
12029 && (integer_zerop (arg1) || real_zerop (arg1))
12030 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12031 {
12032 if (strict_overflow_p)
12033 fold_overflow_warning (("assuming signed overflow does not occur "
12034 "when simplifying comparison of "
12035 "absolute value and zero"),
12036 WARN_STRICT_OVERFLOW_CONDITIONAL);
12037 return omit_one_operand (type, integer_zero_node, arg0);
12038 }
12039
12040 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
12041 and similarly for >= into !=. */
12042 if ((code == LT_EXPR || code == GE_EXPR)
12043 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12044 && TREE_CODE (arg1) == LSHIFT_EXPR
12045 && integer_onep (TREE_OPERAND (arg1, 0)))
12046 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12047 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12048 TREE_OPERAND (arg1, 1)),
12049 build_int_cst (TREE_TYPE (arg0), 0));
12050
12051 if ((code == LT_EXPR || code == GE_EXPR)
12052 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12053 && (TREE_CODE (arg1) == NOP_EXPR
12054 || TREE_CODE (arg1) == CONVERT_EXPR)
12055 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
12056 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
12057 return
12058 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12059 fold_convert (TREE_TYPE (arg0),
12060 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12061 TREE_OPERAND (TREE_OPERAND (arg1, 0),
12062 1))),
12063 build_int_cst (TREE_TYPE (arg0), 0));
12064
12065 return NULL_TREE;
12066
12067 case UNORDERED_EXPR:
12068 case ORDERED_EXPR:
12069 case UNLT_EXPR:
12070 case UNLE_EXPR:
12071 case UNGT_EXPR:
12072 case UNGE_EXPR:
12073 case UNEQ_EXPR:
12074 case LTGT_EXPR:
12075 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
12076 {
12077 t1 = fold_relational_const (code, type, arg0, arg1);
12078 if (t1 != NULL_TREE)
12079 return t1;
12080 }
12081
12082 /* If the first operand is NaN, the result is constant. */
12083 if (TREE_CODE (arg0) == REAL_CST
12084 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
12085 && (code != LTGT_EXPR || ! flag_trapping_math))
12086 {
12087 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
12088 ? integer_zero_node
12089 : integer_one_node;
12090 return omit_one_operand (type, t1, arg1);
12091 }
12092
12093 /* If the second operand is NaN, the result is constant. */
12094 if (TREE_CODE (arg1) == REAL_CST
12095 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
12096 && (code != LTGT_EXPR || ! flag_trapping_math))
12097 {
12098 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
12099 ? integer_zero_node
12100 : integer_one_node;
12101 return omit_one_operand (type, t1, arg0);
12102 }
12103
12104 /* Simplify unordered comparison of something with itself. */
12105 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
12106 && operand_equal_p (arg0, arg1, 0))
12107 return constant_boolean_node (1, type);
12108
12109 if (code == LTGT_EXPR
12110 && !flag_trapping_math
12111 && operand_equal_p (arg0, arg1, 0))
12112 return constant_boolean_node (0, type);
12113
12114 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
12115 {
12116 tree targ0 = strip_float_extensions (arg0);
12117 tree targ1 = strip_float_extensions (arg1);
12118 tree newtype = TREE_TYPE (targ0);
12119
12120 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
12121 newtype = TREE_TYPE (targ1);
12122
12123 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
12124 return fold_build2 (code, type, fold_convert (newtype, targ0),
12125 fold_convert (newtype, targ1));
12126 }
12127
12128 return NULL_TREE;
12129
12130 case COMPOUND_EXPR:
12131 /* When pedantic, a compound expression can be neither an lvalue
12132 nor an integer constant expression. */
12133 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
12134 return NULL_TREE;
12135 /* Don't let (0, 0) be null pointer constant. */
12136 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
12137 : fold_convert (type, arg1);
12138 return pedantic_non_lvalue (tem);
12139
12140 case COMPLEX_EXPR:
12141 if ((TREE_CODE (arg0) == REAL_CST
12142 && TREE_CODE (arg1) == REAL_CST)
12143 || (TREE_CODE (arg0) == INTEGER_CST
12144 && TREE_CODE (arg1) == INTEGER_CST))
12145 return build_complex (type, arg0, arg1);
12146 return NULL_TREE;
12147
12148 case ASSERT_EXPR:
12149 /* An ASSERT_EXPR should never be passed to fold_binary. */
12150 gcc_unreachable ();
12151
12152 default:
12153 return NULL_TREE;
12154 } /* switch (code) */
12155 }
12156
12157 /* Callback for walk_tree, looking for LABEL_EXPR.
12158 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
12159 Do not check the sub-tree of GOTO_EXPR. */
12160
12161 static tree
12162 contains_label_1 (tree *tp,
12163 int *walk_subtrees,
12164 void *data ATTRIBUTE_UNUSED)
12165 {
12166 switch (TREE_CODE (*tp))
12167 {
12168 case LABEL_EXPR:
12169 return *tp;
12170 case GOTO_EXPR:
12171 *walk_subtrees = 0;
12172 /* no break */
12173 default:
12174 return NULL_TREE;
12175 }
12176 }
12177
12178 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
12179 accessible from outside the sub-tree. Returns NULL_TREE if no
12180 addressable label is found. */
12181
12182 static bool
12183 contains_label_p (tree st)
12184 {
12185 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
12186 }
12187
12188 /* Fold a ternary expression of code CODE and type TYPE with operands
12189 OP0, OP1, and OP2. Return the folded expression if folding is
12190 successful. Otherwise, return NULL_TREE. */
12191
12192 tree
12193 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
12194 {
12195 tree tem;
12196 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
12197 enum tree_code_class kind = TREE_CODE_CLASS (code);
12198
12199 gcc_assert (IS_EXPR_CODE_CLASS (kind)
12200 && TREE_CODE_LENGTH (code) == 3);
12201
12202 /* Strip any conversions that don't change the mode. This is safe
12203 for every expression, except for a comparison expression because
12204 its signedness is derived from its operands. So, in the latter
12205 case, only strip conversions that don't change the signedness.
12206
12207 Note that this is done as an internal manipulation within the
12208 constant folder, in order to find the simplest representation of
12209 the arguments so that their form can be studied. In any cases,
12210 the appropriate type conversions should be put back in the tree
12211 that will get out of the constant folder. */
12212 if (op0)
12213 {
12214 arg0 = op0;
12215 STRIP_NOPS (arg0);
12216 }
12217
12218 if (op1)
12219 {
12220 arg1 = op1;
12221 STRIP_NOPS (arg1);
12222 }
12223
12224 switch (code)
12225 {
12226 case COMPONENT_REF:
12227 if (TREE_CODE (arg0) == CONSTRUCTOR
12228 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
12229 {
12230 unsigned HOST_WIDE_INT idx;
12231 tree field, value;
12232 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
12233 if (field == arg1)
12234 return value;
12235 }
12236 return NULL_TREE;
12237
12238 case COND_EXPR:
12239 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
12240 so all simple results must be passed through pedantic_non_lvalue. */
12241 if (TREE_CODE (arg0) == INTEGER_CST)
12242 {
12243 tree unused_op = integer_zerop (arg0) ? op1 : op2;
12244 tem = integer_zerop (arg0) ? op2 : op1;
12245 /* Only optimize constant conditions when the selected branch
12246 has the same type as the COND_EXPR. This avoids optimizing
12247 away "c ? x : throw", where the throw has a void type.
12248 Avoid throwing away that operand which contains label. */
12249 if ((!TREE_SIDE_EFFECTS (unused_op)
12250 || !contains_label_p (unused_op))
12251 && (! VOID_TYPE_P (TREE_TYPE (tem))
12252 || VOID_TYPE_P (type)))
12253 return pedantic_non_lvalue (tem);
12254 return NULL_TREE;
12255 }
12256 if (operand_equal_p (arg1, op2, 0))
12257 return pedantic_omit_one_operand (type, arg1, arg0);
12258
12259 /* If we have A op B ? A : C, we may be able to convert this to a
12260 simpler expression, depending on the operation and the values
12261 of B and C. Signed zeros prevent all of these transformations,
12262 for reasons given above each one.
12263
12264 Also try swapping the arguments and inverting the conditional. */
12265 if (COMPARISON_CLASS_P (arg0)
12266 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
12267 arg1, TREE_OPERAND (arg0, 1))
12268 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
12269 {
12270 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
12271 if (tem)
12272 return tem;
12273 }
12274
12275 if (COMPARISON_CLASS_P (arg0)
12276 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
12277 op2,
12278 TREE_OPERAND (arg0, 1))
12279 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
12280 {
12281 tem = fold_truth_not_expr (arg0);
12282 if (tem && COMPARISON_CLASS_P (tem))
12283 {
12284 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
12285 if (tem)
12286 return tem;
12287 }
12288 }
12289
12290 /* If the second operand is simpler than the third, swap them
12291 since that produces better jump optimization results. */
12292 if (truth_value_p (TREE_CODE (arg0))
12293 && tree_swap_operands_p (op1, op2, false))
12294 {
12295 /* See if this can be inverted. If it can't, possibly because
12296 it was a floating-point inequality comparison, don't do
12297 anything. */
12298 tem = fold_truth_not_expr (arg0);
12299 if (tem)
12300 return fold_build3 (code, type, tem, op2, op1);
12301 }
12302
12303 /* Convert A ? 1 : 0 to simply A. */
12304 if (integer_onep (op1)
12305 && integer_zerop (op2)
12306 /* If we try to convert OP0 to our type, the
12307 call to fold will try to move the conversion inside
12308 a COND, which will recurse. In that case, the COND_EXPR
12309 is probably the best choice, so leave it alone. */
12310 && type == TREE_TYPE (arg0))
12311 return pedantic_non_lvalue (arg0);
12312
12313 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
12314 over COND_EXPR in cases such as floating point comparisons. */
12315 if (integer_zerop (op1)
12316 && integer_onep (op2)
12317 && truth_value_p (TREE_CODE (arg0)))
12318 return pedantic_non_lvalue (fold_convert (type,
12319 invert_truthvalue (arg0)));
12320
12321 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
12322 if (TREE_CODE (arg0) == LT_EXPR
12323 && integer_zerop (TREE_OPERAND (arg0, 1))
12324 && integer_zerop (op2)
12325 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
12326 {
12327 /* sign_bit_p only checks ARG1 bits within A's precision.
12328 If <sign bit of A> has wider type than A, bits outside
12329 of A's precision in <sign bit of A> need to be checked.
12330 If they are all 0, this optimization needs to be done
12331 in unsigned A's type, if they are all 1 in signed A's type,
12332 otherwise this can't be done. */
12333 if (TYPE_PRECISION (TREE_TYPE (tem))
12334 < TYPE_PRECISION (TREE_TYPE (arg1))
12335 && TYPE_PRECISION (TREE_TYPE (tem))
12336 < TYPE_PRECISION (type))
12337 {
12338 unsigned HOST_WIDE_INT mask_lo;
12339 HOST_WIDE_INT mask_hi;
12340 int inner_width, outer_width;
12341 tree tem_type;
12342
12343 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
12344 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
12345 if (outer_width > TYPE_PRECISION (type))
12346 outer_width = TYPE_PRECISION (type);
12347
12348 if (outer_width > HOST_BITS_PER_WIDE_INT)
12349 {
12350 mask_hi = ((unsigned HOST_WIDE_INT) -1
12351 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
12352 mask_lo = -1;
12353 }
12354 else
12355 {
12356 mask_hi = 0;
12357 mask_lo = ((unsigned HOST_WIDE_INT) -1
12358 >> (HOST_BITS_PER_WIDE_INT - outer_width));
12359 }
12360 if (inner_width > HOST_BITS_PER_WIDE_INT)
12361 {
12362 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
12363 >> (HOST_BITS_PER_WIDE_INT - inner_width));
12364 mask_lo = 0;
12365 }
12366 else
12367 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
12368 >> (HOST_BITS_PER_WIDE_INT - inner_width));
12369
12370 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
12371 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
12372 {
12373 tem_type = lang_hooks.types.signed_type (TREE_TYPE (tem));
12374 tem = fold_convert (tem_type, tem);
12375 }
12376 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
12377 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
12378 {
12379 tem_type = lang_hooks.types.unsigned_type (TREE_TYPE (tem));
12380 tem = fold_convert (tem_type, tem);
12381 }
12382 else
12383 tem = NULL;
12384 }
12385
12386 if (tem)
12387 return fold_convert (type,
12388 fold_build2 (BIT_AND_EXPR,
12389 TREE_TYPE (tem), tem,
12390 fold_convert (TREE_TYPE (tem),
12391 arg1)));
12392 }
12393
12394 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
12395 already handled above. */
12396 if (TREE_CODE (arg0) == BIT_AND_EXPR
12397 && integer_onep (TREE_OPERAND (arg0, 1))
12398 && integer_zerop (op2)
12399 && integer_pow2p (arg1))
12400 {
12401 tree tem = TREE_OPERAND (arg0, 0);
12402 STRIP_NOPS (tem);
12403 if (TREE_CODE (tem) == RSHIFT_EXPR
12404 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
12405 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
12406 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
12407 return fold_build2 (BIT_AND_EXPR, type,
12408 TREE_OPERAND (tem, 0), arg1);
12409 }
12410
12411 /* A & N ? N : 0 is simply A & N if N is a power of two. This
12412 is probably obsolete because the first operand should be a
12413 truth value (that's why we have the two cases above), but let's
12414 leave it in until we can confirm this for all front-ends. */
12415 if (integer_zerop (op2)
12416 && TREE_CODE (arg0) == NE_EXPR
12417 && integer_zerop (TREE_OPERAND (arg0, 1))
12418 && integer_pow2p (arg1)
12419 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12420 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12421 arg1, OEP_ONLY_CONST))
12422 return pedantic_non_lvalue (fold_convert (type,
12423 TREE_OPERAND (arg0, 0)));
12424
12425 /* Convert A ? B : 0 into A && B if A and B are truth values. */
12426 if (integer_zerop (op2)
12427 && truth_value_p (TREE_CODE (arg0))
12428 && truth_value_p (TREE_CODE (arg1)))
12429 return fold_build2 (TRUTH_ANDIF_EXPR, type,
12430 fold_convert (type, arg0),
12431 arg1);
12432
12433 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
12434 if (integer_onep (op2)
12435 && truth_value_p (TREE_CODE (arg0))
12436 && truth_value_p (TREE_CODE (arg1)))
12437 {
12438 /* Only perform transformation if ARG0 is easily inverted. */
12439 tem = fold_truth_not_expr (arg0);
12440 if (tem)
12441 return fold_build2 (TRUTH_ORIF_EXPR, type,
12442 fold_convert (type, tem),
12443 arg1);
12444 }
12445
12446 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
12447 if (integer_zerop (arg1)
12448 && truth_value_p (TREE_CODE (arg0))
12449 && truth_value_p (TREE_CODE (op2)))
12450 {
12451 /* Only perform transformation if ARG0 is easily inverted. */
12452 tem = fold_truth_not_expr (arg0);
12453 if (tem)
12454 return fold_build2 (TRUTH_ANDIF_EXPR, type,
12455 fold_convert (type, tem),
12456 op2);
12457 }
12458
12459 /* Convert A ? 1 : B into A || B if A and B are truth values. */
12460 if (integer_onep (arg1)
12461 && truth_value_p (TREE_CODE (arg0))
12462 && truth_value_p (TREE_CODE (op2)))
12463 return fold_build2 (TRUTH_ORIF_EXPR, type,
12464 fold_convert (type, arg0),
12465 op2);
12466
12467 return NULL_TREE;
12468
12469 case CALL_EXPR:
12470 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
12471 of fold_ternary on them. */
12472 gcc_unreachable ();
12473
12474 case BIT_FIELD_REF:
12475 if ((TREE_CODE (arg0) == VECTOR_CST
12476 || (TREE_CODE (arg0) == CONSTRUCTOR && TREE_CONSTANT (arg0)))
12477 && type == TREE_TYPE (TREE_TYPE (arg0))
12478 && host_integerp (arg1, 1)
12479 && host_integerp (op2, 1))
12480 {
12481 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
12482 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
12483
12484 if (width != 0
12485 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
12486 && (idx % width) == 0
12487 && (idx = idx / width)
12488 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
12489 {
12490 tree elements = NULL_TREE;
12491
12492 if (TREE_CODE (arg0) == VECTOR_CST)
12493 elements = TREE_VECTOR_CST_ELTS (arg0);
12494 else
12495 {
12496 unsigned HOST_WIDE_INT idx;
12497 tree value;
12498
12499 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (arg0), idx, value)
12500 elements = tree_cons (NULL_TREE, value, elements);
12501 }
12502 while (idx-- > 0 && elements)
12503 elements = TREE_CHAIN (elements);
12504 if (elements)
12505 return TREE_VALUE (elements);
12506 else
12507 return fold_convert (type, integer_zero_node);
12508 }
12509 }
12510 return NULL_TREE;
12511
12512 default:
12513 return NULL_TREE;
12514 } /* switch (code) */
12515 }
12516
12517 /* Perform constant folding and related simplification of EXPR.
12518 The related simplifications include x*1 => x, x*0 => 0, etc.,
12519 and application of the associative law.
12520 NOP_EXPR conversions may be removed freely (as long as we
12521 are careful not to change the type of the overall expression).
12522 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
12523 but we can constant-fold them if they have constant operands. */
12524
12525 #ifdef ENABLE_FOLD_CHECKING
12526 # define fold(x) fold_1 (x)
12527 static tree fold_1 (tree);
12528 static
12529 #endif
12530 tree
12531 fold (tree expr)
12532 {
12533 const tree t = expr;
12534 enum tree_code code = TREE_CODE (t);
12535 enum tree_code_class kind = TREE_CODE_CLASS (code);
12536 tree tem;
12537
12538 /* Return right away if a constant. */
12539 if (kind == tcc_constant)
12540 return t;
12541
12542 /* CALL_EXPR-like objects with variable numbers of operands are
12543 treated specially. */
12544 if (kind == tcc_vl_exp)
12545 {
12546 if (code == CALL_EXPR)
12547 {
12548 tem = fold_call_expr (expr, false);
12549 return tem ? tem : expr;
12550 }
12551 return expr;
12552 }
12553
12554 if (IS_EXPR_CODE_CLASS (kind)
12555 || IS_GIMPLE_STMT_CODE_CLASS (kind))
12556 {
12557 tree type = TREE_TYPE (t);
12558 tree op0, op1, op2;
12559
12560 switch (TREE_CODE_LENGTH (code))
12561 {
12562 case 1:
12563 op0 = TREE_OPERAND (t, 0);
12564 tem = fold_unary (code, type, op0);
12565 return tem ? tem : expr;
12566 case 2:
12567 op0 = TREE_OPERAND (t, 0);
12568 op1 = TREE_OPERAND (t, 1);
12569 tem = fold_binary (code, type, op0, op1);
12570 return tem ? tem : expr;
12571 case 3:
12572 op0 = TREE_OPERAND (t, 0);
12573 op1 = TREE_OPERAND (t, 1);
12574 op2 = TREE_OPERAND (t, 2);
12575 tem = fold_ternary (code, type, op0, op1, op2);
12576 return tem ? tem : expr;
12577 default:
12578 break;
12579 }
12580 }
12581
12582 switch (code)
12583 {
12584 case CONST_DECL:
12585 return fold (DECL_INITIAL (t));
12586
12587 default:
12588 return t;
12589 } /* switch (code) */
12590 }
12591
12592 #ifdef ENABLE_FOLD_CHECKING
12593 #undef fold
12594
12595 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
12596 static void fold_check_failed (tree, tree);
12597 void print_fold_checksum (tree);
12598
12599 /* When --enable-checking=fold, compute a digest of expr before
12600 and after actual fold call to see if fold did not accidentally
12601 change original expr. */
12602
12603 tree
12604 fold (tree expr)
12605 {
12606 tree ret;
12607 struct md5_ctx ctx;
12608 unsigned char checksum_before[16], checksum_after[16];
12609 htab_t ht;
12610
12611 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12612 md5_init_ctx (&ctx);
12613 fold_checksum_tree (expr, &ctx, ht);
12614 md5_finish_ctx (&ctx, checksum_before);
12615 htab_empty (ht);
12616
12617 ret = fold_1 (expr);
12618
12619 md5_init_ctx (&ctx);
12620 fold_checksum_tree (expr, &ctx, ht);
12621 md5_finish_ctx (&ctx, checksum_after);
12622 htab_delete (ht);
12623
12624 if (memcmp (checksum_before, checksum_after, 16))
12625 fold_check_failed (expr, ret);
12626
12627 return ret;
12628 }
12629
12630 void
12631 print_fold_checksum (tree expr)
12632 {
12633 struct md5_ctx ctx;
12634 unsigned char checksum[16], cnt;
12635 htab_t ht;
12636
12637 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12638 md5_init_ctx (&ctx);
12639 fold_checksum_tree (expr, &ctx, ht);
12640 md5_finish_ctx (&ctx, checksum);
12641 htab_delete (ht);
12642 for (cnt = 0; cnt < 16; ++cnt)
12643 fprintf (stderr, "%02x", checksum[cnt]);
12644 putc ('\n', stderr);
12645 }
12646
12647 static void
12648 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
12649 {
12650 internal_error ("fold check: original tree changed by fold");
12651 }
12652
12653 static void
12654 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
12655 {
12656 void **slot;
12657 enum tree_code code;
12658 struct tree_function_decl buf;
12659 int i, len;
12660
12661 recursive_label:
12662
12663 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
12664 <= sizeof (struct tree_function_decl))
12665 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
12666 if (expr == NULL)
12667 return;
12668 slot = htab_find_slot (ht, expr, INSERT);
12669 if (*slot != NULL)
12670 return;
12671 *slot = expr;
12672 code = TREE_CODE (expr);
12673 if (TREE_CODE_CLASS (code) == tcc_declaration
12674 && DECL_ASSEMBLER_NAME_SET_P (expr))
12675 {
12676 /* Allow DECL_ASSEMBLER_NAME to be modified. */
12677 memcpy ((char *) &buf, expr, tree_size (expr));
12678 expr = (tree) &buf;
12679 SET_DECL_ASSEMBLER_NAME (expr, NULL);
12680 }
12681 else if (TREE_CODE_CLASS (code) == tcc_type
12682 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
12683 || TYPE_CACHED_VALUES_P (expr)
12684 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
12685 {
12686 /* Allow these fields to be modified. */
12687 memcpy ((char *) &buf, expr, tree_size (expr));
12688 expr = (tree) &buf;
12689 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr) = 0;
12690 TYPE_POINTER_TO (expr) = NULL;
12691 TYPE_REFERENCE_TO (expr) = NULL;
12692 if (TYPE_CACHED_VALUES_P (expr))
12693 {
12694 TYPE_CACHED_VALUES_P (expr) = 0;
12695 TYPE_CACHED_VALUES (expr) = NULL;
12696 }
12697 }
12698 md5_process_bytes (expr, tree_size (expr), ctx);
12699 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
12700 if (TREE_CODE_CLASS (code) != tcc_type
12701 && TREE_CODE_CLASS (code) != tcc_declaration
12702 && code != TREE_LIST)
12703 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
12704 switch (TREE_CODE_CLASS (code))
12705 {
12706 case tcc_constant:
12707 switch (code)
12708 {
12709 case STRING_CST:
12710 md5_process_bytes (TREE_STRING_POINTER (expr),
12711 TREE_STRING_LENGTH (expr), ctx);
12712 break;
12713 case COMPLEX_CST:
12714 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
12715 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
12716 break;
12717 case VECTOR_CST:
12718 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
12719 break;
12720 default:
12721 break;
12722 }
12723 break;
12724 case tcc_exceptional:
12725 switch (code)
12726 {
12727 case TREE_LIST:
12728 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
12729 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
12730 expr = TREE_CHAIN (expr);
12731 goto recursive_label;
12732 break;
12733 case TREE_VEC:
12734 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
12735 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
12736 break;
12737 default:
12738 break;
12739 }
12740 break;
12741 case tcc_expression:
12742 case tcc_reference:
12743 case tcc_comparison:
12744 case tcc_unary:
12745 case tcc_binary:
12746 case tcc_statement:
12747 case tcc_vl_exp:
12748 len = TREE_OPERAND_LENGTH (expr);
12749 for (i = 0; i < len; ++i)
12750 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
12751 break;
12752 case tcc_declaration:
12753 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
12754 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
12755 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
12756 {
12757 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
12758 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
12759 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
12760 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
12761 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
12762 }
12763 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
12764 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
12765
12766 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
12767 {
12768 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
12769 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
12770 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
12771 }
12772 break;
12773 case tcc_type:
12774 if (TREE_CODE (expr) == ENUMERAL_TYPE)
12775 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
12776 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
12777 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
12778 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
12779 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
12780 if (INTEGRAL_TYPE_P (expr)
12781 || SCALAR_FLOAT_TYPE_P (expr))
12782 {
12783 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
12784 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
12785 }
12786 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
12787 if (TREE_CODE (expr) == RECORD_TYPE
12788 || TREE_CODE (expr) == UNION_TYPE
12789 || TREE_CODE (expr) == QUAL_UNION_TYPE)
12790 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
12791 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
12792 break;
12793 default:
12794 break;
12795 }
12796 }
12797
12798 #endif
12799
12800 /* Fold a unary tree expression with code CODE of type TYPE with an
12801 operand OP0. Return a folded expression if successful. Otherwise,
12802 return a tree expression with code CODE of type TYPE with an
12803 operand OP0. */
12804
12805 tree
12806 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
12807 {
12808 tree tem;
12809 #ifdef ENABLE_FOLD_CHECKING
12810 unsigned char checksum_before[16], checksum_after[16];
12811 struct md5_ctx ctx;
12812 htab_t ht;
12813
12814 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12815 md5_init_ctx (&ctx);
12816 fold_checksum_tree (op0, &ctx, ht);
12817 md5_finish_ctx (&ctx, checksum_before);
12818 htab_empty (ht);
12819 #endif
12820
12821 tem = fold_unary (code, type, op0);
12822 if (!tem)
12823 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
12824
12825 #ifdef ENABLE_FOLD_CHECKING
12826 md5_init_ctx (&ctx);
12827 fold_checksum_tree (op0, &ctx, ht);
12828 md5_finish_ctx (&ctx, checksum_after);
12829 htab_delete (ht);
12830
12831 if (memcmp (checksum_before, checksum_after, 16))
12832 fold_check_failed (op0, tem);
12833 #endif
12834 return tem;
12835 }
12836
12837 /* Fold a binary tree expression with code CODE of type TYPE with
12838 operands OP0 and OP1. Return a folded expression if successful.
12839 Otherwise, return a tree expression with code CODE of type TYPE
12840 with operands OP0 and OP1. */
12841
12842 tree
12843 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
12844 MEM_STAT_DECL)
12845 {
12846 tree tem;
12847 #ifdef ENABLE_FOLD_CHECKING
12848 unsigned char checksum_before_op0[16],
12849 checksum_before_op1[16],
12850 checksum_after_op0[16],
12851 checksum_after_op1[16];
12852 struct md5_ctx ctx;
12853 htab_t ht;
12854
12855 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12856 md5_init_ctx (&ctx);
12857 fold_checksum_tree (op0, &ctx, ht);
12858 md5_finish_ctx (&ctx, checksum_before_op0);
12859 htab_empty (ht);
12860
12861 md5_init_ctx (&ctx);
12862 fold_checksum_tree (op1, &ctx, ht);
12863 md5_finish_ctx (&ctx, checksum_before_op1);
12864 htab_empty (ht);
12865 #endif
12866
12867 tem = fold_binary (code, type, op0, op1);
12868 if (!tem)
12869 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
12870
12871 #ifdef ENABLE_FOLD_CHECKING
12872 md5_init_ctx (&ctx);
12873 fold_checksum_tree (op0, &ctx, ht);
12874 md5_finish_ctx (&ctx, checksum_after_op0);
12875 htab_empty (ht);
12876
12877 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12878 fold_check_failed (op0, tem);
12879
12880 md5_init_ctx (&ctx);
12881 fold_checksum_tree (op1, &ctx, ht);
12882 md5_finish_ctx (&ctx, checksum_after_op1);
12883 htab_delete (ht);
12884
12885 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12886 fold_check_failed (op1, tem);
12887 #endif
12888 return tem;
12889 }
12890
12891 /* Fold a ternary tree expression with code CODE of type TYPE with
12892 operands OP0, OP1, and OP2. Return a folded expression if
12893 successful. Otherwise, return a tree expression with code CODE of
12894 type TYPE with operands OP0, OP1, and OP2. */
12895
12896 tree
12897 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
12898 MEM_STAT_DECL)
12899 {
12900 tree tem;
12901 #ifdef ENABLE_FOLD_CHECKING
12902 unsigned char checksum_before_op0[16],
12903 checksum_before_op1[16],
12904 checksum_before_op2[16],
12905 checksum_after_op0[16],
12906 checksum_after_op1[16],
12907 checksum_after_op2[16];
12908 struct md5_ctx ctx;
12909 htab_t ht;
12910
12911 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12912 md5_init_ctx (&ctx);
12913 fold_checksum_tree (op0, &ctx, ht);
12914 md5_finish_ctx (&ctx, checksum_before_op0);
12915 htab_empty (ht);
12916
12917 md5_init_ctx (&ctx);
12918 fold_checksum_tree (op1, &ctx, ht);
12919 md5_finish_ctx (&ctx, checksum_before_op1);
12920 htab_empty (ht);
12921
12922 md5_init_ctx (&ctx);
12923 fold_checksum_tree (op2, &ctx, ht);
12924 md5_finish_ctx (&ctx, checksum_before_op2);
12925 htab_empty (ht);
12926 #endif
12927
12928 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
12929 tem = fold_ternary (code, type, op0, op1, op2);
12930 if (!tem)
12931 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
12932
12933 #ifdef ENABLE_FOLD_CHECKING
12934 md5_init_ctx (&ctx);
12935 fold_checksum_tree (op0, &ctx, ht);
12936 md5_finish_ctx (&ctx, checksum_after_op0);
12937 htab_empty (ht);
12938
12939 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12940 fold_check_failed (op0, tem);
12941
12942 md5_init_ctx (&ctx);
12943 fold_checksum_tree (op1, &ctx, ht);
12944 md5_finish_ctx (&ctx, checksum_after_op1);
12945 htab_empty (ht);
12946
12947 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12948 fold_check_failed (op1, tem);
12949
12950 md5_init_ctx (&ctx);
12951 fold_checksum_tree (op2, &ctx, ht);
12952 md5_finish_ctx (&ctx, checksum_after_op2);
12953 htab_delete (ht);
12954
12955 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
12956 fold_check_failed (op2, tem);
12957 #endif
12958 return tem;
12959 }
12960
12961 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
12962 arguments in ARGARRAY, and a null static chain.
12963 Return a folded expression if successful. Otherwise, return a CALL_EXPR
12964 of type TYPE from the given operands as constructed by build_call_array. */
12965
12966 tree
12967 fold_build_call_array (tree type, tree fn, int nargs, tree *argarray)
12968 {
12969 tree tem;
12970 #ifdef ENABLE_FOLD_CHECKING
12971 unsigned char checksum_before_fn[16],
12972 checksum_before_arglist[16],
12973 checksum_after_fn[16],
12974 checksum_after_arglist[16];
12975 struct md5_ctx ctx;
12976 htab_t ht;
12977 int i;
12978
12979 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12980 md5_init_ctx (&ctx);
12981 fold_checksum_tree (fn, &ctx, ht);
12982 md5_finish_ctx (&ctx, checksum_before_fn);
12983 htab_empty (ht);
12984
12985 md5_init_ctx (&ctx);
12986 for (i = 0; i < nargs; i++)
12987 fold_checksum_tree (argarray[i], &ctx, ht);
12988 md5_finish_ctx (&ctx, checksum_before_arglist);
12989 htab_empty (ht);
12990 #endif
12991
12992 tem = fold_builtin_call_array (type, fn, nargs, argarray);
12993
12994 #ifdef ENABLE_FOLD_CHECKING
12995 md5_init_ctx (&ctx);
12996 fold_checksum_tree (fn, &ctx, ht);
12997 md5_finish_ctx (&ctx, checksum_after_fn);
12998 htab_empty (ht);
12999
13000 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
13001 fold_check_failed (fn, tem);
13002
13003 md5_init_ctx (&ctx);
13004 for (i = 0; i < nargs; i++)
13005 fold_checksum_tree (argarray[i], &ctx, ht);
13006 md5_finish_ctx (&ctx, checksum_after_arglist);
13007 htab_delete (ht);
13008
13009 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
13010 fold_check_failed (NULL_TREE, tem);
13011 #endif
13012 return tem;
13013 }
13014
13015 /* Perform constant folding and related simplification of initializer
13016 expression EXPR. These behave identically to "fold_buildN" but ignore
13017 potential run-time traps and exceptions that fold must preserve. */
13018
13019 #define START_FOLD_INIT \
13020 int saved_signaling_nans = flag_signaling_nans;\
13021 int saved_trapping_math = flag_trapping_math;\
13022 int saved_rounding_math = flag_rounding_math;\
13023 int saved_trapv = flag_trapv;\
13024 int saved_folding_initializer = folding_initializer;\
13025 flag_signaling_nans = 0;\
13026 flag_trapping_math = 0;\
13027 flag_rounding_math = 0;\
13028 flag_trapv = 0;\
13029 folding_initializer = 1;
13030
13031 #define END_FOLD_INIT \
13032 flag_signaling_nans = saved_signaling_nans;\
13033 flag_trapping_math = saved_trapping_math;\
13034 flag_rounding_math = saved_rounding_math;\
13035 flag_trapv = saved_trapv;\
13036 folding_initializer = saved_folding_initializer;
13037
13038 tree
13039 fold_build1_initializer (enum tree_code code, tree type, tree op)
13040 {
13041 tree result;
13042 START_FOLD_INIT;
13043
13044 result = fold_build1 (code, type, op);
13045
13046 END_FOLD_INIT;
13047 return result;
13048 }
13049
13050 tree
13051 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
13052 {
13053 tree result;
13054 START_FOLD_INIT;
13055
13056 result = fold_build2 (code, type, op0, op1);
13057
13058 END_FOLD_INIT;
13059 return result;
13060 }
13061
13062 tree
13063 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
13064 tree op2)
13065 {
13066 tree result;
13067 START_FOLD_INIT;
13068
13069 result = fold_build3 (code, type, op0, op1, op2);
13070
13071 END_FOLD_INIT;
13072 return result;
13073 }
13074
13075 tree
13076 fold_build_call_array_initializer (tree type, tree fn,
13077 int nargs, tree *argarray)
13078 {
13079 tree result;
13080 START_FOLD_INIT;
13081
13082 result = fold_build_call_array (type, fn, nargs, argarray);
13083
13084 END_FOLD_INIT;
13085 return result;
13086 }
13087
13088 #undef START_FOLD_INIT
13089 #undef END_FOLD_INIT
13090
13091 /* Determine if first argument is a multiple of second argument. Return 0 if
13092 it is not, or we cannot easily determined it to be.
13093
13094 An example of the sort of thing we care about (at this point; this routine
13095 could surely be made more general, and expanded to do what the *_DIV_EXPR's
13096 fold cases do now) is discovering that
13097
13098 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13099
13100 is a multiple of
13101
13102 SAVE_EXPR (J * 8)
13103
13104 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
13105
13106 This code also handles discovering that
13107
13108 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13109
13110 is a multiple of 8 so we don't have to worry about dealing with a
13111 possible remainder.
13112
13113 Note that we *look* inside a SAVE_EXPR only to determine how it was
13114 calculated; it is not safe for fold to do much of anything else with the
13115 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
13116 at run time. For example, the latter example above *cannot* be implemented
13117 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
13118 evaluation time of the original SAVE_EXPR is not necessarily the same at
13119 the time the new expression is evaluated. The only optimization of this
13120 sort that would be valid is changing
13121
13122 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
13123
13124 divided by 8 to
13125
13126 SAVE_EXPR (I) * SAVE_EXPR (J)
13127
13128 (where the same SAVE_EXPR (J) is used in the original and the
13129 transformed version). */
13130
13131 int
13132 multiple_of_p (tree type, tree top, tree bottom)
13133 {
13134 if (operand_equal_p (top, bottom, 0))
13135 return 1;
13136
13137 if (TREE_CODE (type) != INTEGER_TYPE)
13138 return 0;
13139
13140 switch (TREE_CODE (top))
13141 {
13142 case BIT_AND_EXPR:
13143 /* Bitwise and provides a power of two multiple. If the mask is
13144 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
13145 if (!integer_pow2p (bottom))
13146 return 0;
13147 /* FALLTHRU */
13148
13149 case MULT_EXPR:
13150 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
13151 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
13152
13153 case PLUS_EXPR:
13154 case MINUS_EXPR:
13155 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
13156 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
13157
13158 case LSHIFT_EXPR:
13159 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
13160 {
13161 tree op1, t1;
13162
13163 op1 = TREE_OPERAND (top, 1);
13164 /* const_binop may not detect overflow correctly,
13165 so check for it explicitly here. */
13166 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
13167 > TREE_INT_CST_LOW (op1)
13168 && TREE_INT_CST_HIGH (op1) == 0
13169 && 0 != (t1 = fold_convert (type,
13170 const_binop (LSHIFT_EXPR,
13171 size_one_node,
13172 op1, 0)))
13173 && !TREE_OVERFLOW (t1))
13174 return multiple_of_p (type, t1, bottom);
13175 }
13176 return 0;
13177
13178 case NOP_EXPR:
13179 /* Can't handle conversions from non-integral or wider integral type. */
13180 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
13181 || (TYPE_PRECISION (type)
13182 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
13183 return 0;
13184
13185 /* .. fall through ... */
13186
13187 case SAVE_EXPR:
13188 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
13189
13190 case INTEGER_CST:
13191 if (TREE_CODE (bottom) != INTEGER_CST
13192 || (TYPE_UNSIGNED (type)
13193 && (tree_int_cst_sgn (top) < 0
13194 || tree_int_cst_sgn (bottom) < 0)))
13195 return 0;
13196 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
13197 top, bottom, 0));
13198
13199 default:
13200 return 0;
13201 }
13202 }
13203
13204 /* Return true if `t' is known to be non-negative. If the return
13205 value is based on the assumption that signed overflow is undefined,
13206 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13207 *STRICT_OVERFLOW_P. */
13208
13209 bool
13210 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
13211 {
13212 if (t == error_mark_node)
13213 return false;
13214
13215 if (TYPE_UNSIGNED (TREE_TYPE (t)))
13216 return true;
13217
13218 switch (TREE_CODE (t))
13219 {
13220 case SSA_NAME:
13221 /* Query VRP to see if it has recorded any information about
13222 the range of this object. */
13223 return ssa_name_nonnegative_p (t);
13224
13225 case ABS_EXPR:
13226 /* We can't return 1 if flag_wrapv is set because
13227 ABS_EXPR<INT_MIN> = INT_MIN. */
13228 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
13229 return true;
13230 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
13231 {
13232 *strict_overflow_p = true;
13233 return true;
13234 }
13235 break;
13236
13237 case INTEGER_CST:
13238 return tree_int_cst_sgn (t) >= 0;
13239
13240 case REAL_CST:
13241 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
13242
13243 case PLUS_EXPR:
13244 if (FLOAT_TYPE_P (TREE_TYPE (t)))
13245 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13246 strict_overflow_p)
13247 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13248 strict_overflow_p));
13249
13250 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
13251 both unsigned and at least 2 bits shorter than the result. */
13252 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
13253 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
13254 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
13255 {
13256 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
13257 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
13258 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
13259 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
13260 {
13261 unsigned int prec = MAX (TYPE_PRECISION (inner1),
13262 TYPE_PRECISION (inner2)) + 1;
13263 return prec < TYPE_PRECISION (TREE_TYPE (t));
13264 }
13265 }
13266 break;
13267
13268 case MULT_EXPR:
13269 if (FLOAT_TYPE_P (TREE_TYPE (t)))
13270 {
13271 /* x * x for floating point x is always non-negative. */
13272 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
13273 return true;
13274 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13275 strict_overflow_p)
13276 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13277 strict_overflow_p));
13278 }
13279
13280 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
13281 both unsigned and their total bits is shorter than the result. */
13282 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
13283 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
13284 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
13285 {
13286 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
13287 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
13288 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
13289 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
13290 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
13291 < TYPE_PRECISION (TREE_TYPE (t));
13292 }
13293 return false;
13294
13295 case BIT_AND_EXPR:
13296 case MAX_EXPR:
13297 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13298 strict_overflow_p)
13299 || tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13300 strict_overflow_p));
13301
13302 case BIT_IOR_EXPR:
13303 case BIT_XOR_EXPR:
13304 case MIN_EXPR:
13305 case RDIV_EXPR:
13306 case TRUNC_DIV_EXPR:
13307 case CEIL_DIV_EXPR:
13308 case FLOOR_DIV_EXPR:
13309 case ROUND_DIV_EXPR:
13310 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13311 strict_overflow_p)
13312 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13313 strict_overflow_p));
13314
13315 case TRUNC_MOD_EXPR:
13316 case CEIL_MOD_EXPR:
13317 case FLOOR_MOD_EXPR:
13318 case ROUND_MOD_EXPR:
13319 case SAVE_EXPR:
13320 case NON_LVALUE_EXPR:
13321 case FLOAT_EXPR:
13322 case FIX_TRUNC_EXPR:
13323 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13324 strict_overflow_p);
13325
13326 case COMPOUND_EXPR:
13327 case MODIFY_EXPR:
13328 case GIMPLE_MODIFY_STMT:
13329 return tree_expr_nonnegative_warnv_p (GENERIC_TREE_OPERAND (t, 1),
13330 strict_overflow_p);
13331
13332 case BIND_EXPR:
13333 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
13334 strict_overflow_p);
13335
13336 case COND_EXPR:
13337 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13338 strict_overflow_p)
13339 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
13340 strict_overflow_p));
13341
13342 case NOP_EXPR:
13343 {
13344 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
13345 tree outer_type = TREE_TYPE (t);
13346
13347 if (TREE_CODE (outer_type) == REAL_TYPE)
13348 {
13349 if (TREE_CODE (inner_type) == REAL_TYPE)
13350 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13351 strict_overflow_p);
13352 if (TREE_CODE (inner_type) == INTEGER_TYPE)
13353 {
13354 if (TYPE_UNSIGNED (inner_type))
13355 return true;
13356 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13357 strict_overflow_p);
13358 }
13359 }
13360 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
13361 {
13362 if (TREE_CODE (inner_type) == REAL_TYPE)
13363 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t,0),
13364 strict_overflow_p);
13365 if (TREE_CODE (inner_type) == INTEGER_TYPE)
13366 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
13367 && TYPE_UNSIGNED (inner_type);
13368 }
13369 }
13370 break;
13371
13372 case TARGET_EXPR:
13373 {
13374 tree temp = TARGET_EXPR_SLOT (t);
13375 t = TARGET_EXPR_INITIAL (t);
13376
13377 /* If the initializer is non-void, then it's a normal expression
13378 that will be assigned to the slot. */
13379 if (!VOID_TYPE_P (t))
13380 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
13381
13382 /* Otherwise, the initializer sets the slot in some way. One common
13383 way is an assignment statement at the end of the initializer. */
13384 while (1)
13385 {
13386 if (TREE_CODE (t) == BIND_EXPR)
13387 t = expr_last (BIND_EXPR_BODY (t));
13388 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
13389 || TREE_CODE (t) == TRY_CATCH_EXPR)
13390 t = expr_last (TREE_OPERAND (t, 0));
13391 else if (TREE_CODE (t) == STATEMENT_LIST)
13392 t = expr_last (t);
13393 else
13394 break;
13395 }
13396 if ((TREE_CODE (t) == MODIFY_EXPR
13397 || TREE_CODE (t) == GIMPLE_MODIFY_STMT)
13398 && GENERIC_TREE_OPERAND (t, 0) == temp)
13399 return tree_expr_nonnegative_warnv_p (GENERIC_TREE_OPERAND (t, 1),
13400 strict_overflow_p);
13401
13402 return false;
13403 }
13404
13405 case CALL_EXPR:
13406 {
13407 tree fndecl = get_callee_fndecl (t);
13408 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
13409 switch (DECL_FUNCTION_CODE (fndecl))
13410 {
13411 CASE_FLT_FN (BUILT_IN_ACOS):
13412 CASE_FLT_FN (BUILT_IN_ACOSH):
13413 CASE_FLT_FN (BUILT_IN_CABS):
13414 CASE_FLT_FN (BUILT_IN_COSH):
13415 CASE_FLT_FN (BUILT_IN_ERFC):
13416 CASE_FLT_FN (BUILT_IN_EXP):
13417 CASE_FLT_FN (BUILT_IN_EXP10):
13418 CASE_FLT_FN (BUILT_IN_EXP2):
13419 CASE_FLT_FN (BUILT_IN_FABS):
13420 CASE_FLT_FN (BUILT_IN_FDIM):
13421 CASE_FLT_FN (BUILT_IN_HYPOT):
13422 CASE_FLT_FN (BUILT_IN_POW10):
13423 CASE_INT_FN (BUILT_IN_FFS):
13424 CASE_INT_FN (BUILT_IN_PARITY):
13425 CASE_INT_FN (BUILT_IN_POPCOUNT):
13426 case BUILT_IN_BSWAP32:
13427 case BUILT_IN_BSWAP64:
13428 /* Always true. */
13429 return true;
13430
13431 CASE_FLT_FN (BUILT_IN_SQRT):
13432 /* sqrt(-0.0) is -0.0. */
13433 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
13434 return true;
13435 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
13436 strict_overflow_p);
13437
13438 CASE_FLT_FN (BUILT_IN_ASINH):
13439 CASE_FLT_FN (BUILT_IN_ATAN):
13440 CASE_FLT_FN (BUILT_IN_ATANH):
13441 CASE_FLT_FN (BUILT_IN_CBRT):
13442 CASE_FLT_FN (BUILT_IN_CEIL):
13443 CASE_FLT_FN (BUILT_IN_ERF):
13444 CASE_FLT_FN (BUILT_IN_EXPM1):
13445 CASE_FLT_FN (BUILT_IN_FLOOR):
13446 CASE_FLT_FN (BUILT_IN_FMOD):
13447 CASE_FLT_FN (BUILT_IN_FREXP):
13448 CASE_FLT_FN (BUILT_IN_LCEIL):
13449 CASE_FLT_FN (BUILT_IN_LDEXP):
13450 CASE_FLT_FN (BUILT_IN_LFLOOR):
13451 CASE_FLT_FN (BUILT_IN_LLCEIL):
13452 CASE_FLT_FN (BUILT_IN_LLFLOOR):
13453 CASE_FLT_FN (BUILT_IN_LLRINT):
13454 CASE_FLT_FN (BUILT_IN_LLROUND):
13455 CASE_FLT_FN (BUILT_IN_LRINT):
13456 CASE_FLT_FN (BUILT_IN_LROUND):
13457 CASE_FLT_FN (BUILT_IN_MODF):
13458 CASE_FLT_FN (BUILT_IN_NEARBYINT):
13459 CASE_FLT_FN (BUILT_IN_RINT):
13460 CASE_FLT_FN (BUILT_IN_ROUND):
13461 CASE_FLT_FN (BUILT_IN_SCALB):
13462 CASE_FLT_FN (BUILT_IN_SCALBLN):
13463 CASE_FLT_FN (BUILT_IN_SCALBN):
13464 CASE_FLT_FN (BUILT_IN_SIGNBIT):
13465 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
13466 CASE_FLT_FN (BUILT_IN_SINH):
13467 CASE_FLT_FN (BUILT_IN_TANH):
13468 CASE_FLT_FN (BUILT_IN_TRUNC):
13469 /* True if the 1st argument is nonnegative. */
13470 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
13471 strict_overflow_p);
13472
13473 CASE_FLT_FN (BUILT_IN_FMAX):
13474 /* True if the 1st OR 2nd arguments are nonnegative. */
13475 return (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
13476 strict_overflow_p)
13477 || (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 1),
13478 strict_overflow_p)));
13479
13480 CASE_FLT_FN (BUILT_IN_FMIN):
13481 /* True if the 1st AND 2nd arguments are nonnegative. */
13482 return (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
13483 strict_overflow_p)
13484 && (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 1),
13485 strict_overflow_p)));
13486
13487 CASE_FLT_FN (BUILT_IN_COPYSIGN):
13488 /* True if the 2nd argument is nonnegative. */
13489 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 1),
13490 strict_overflow_p);
13491
13492 CASE_FLT_FN (BUILT_IN_POWI):
13493 /* True if the 1st argument is nonnegative or the second
13494 argument is an even integer. */
13495 if (TREE_CODE (CALL_EXPR_ARG (t, 1)) == INTEGER_CST)
13496 {
13497 tree arg1 = CALL_EXPR_ARG (t, 1);
13498 if ((TREE_INT_CST_LOW (arg1) & 1) == 0)
13499 return true;
13500 }
13501 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
13502 strict_overflow_p);
13503
13504 CASE_FLT_FN (BUILT_IN_POW):
13505 /* True if the 1st argument is nonnegative or the second
13506 argument is an even integer valued real. */
13507 if (TREE_CODE (CALL_EXPR_ARG (t, 1)) == REAL_CST)
13508 {
13509 REAL_VALUE_TYPE c;
13510 HOST_WIDE_INT n;
13511
13512 c = TREE_REAL_CST (CALL_EXPR_ARG (t, 1));
13513 n = real_to_integer (&c);
13514 if ((n & 1) == 0)
13515 {
13516 REAL_VALUE_TYPE cint;
13517 real_from_integer (&cint, VOIDmode, n,
13518 n < 0 ? -1 : 0, 0);
13519 if (real_identical (&c, &cint))
13520 return true;
13521 }
13522 }
13523 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
13524 strict_overflow_p);
13525
13526 default:
13527 break;
13528 }
13529 }
13530
13531 /* ... fall through ... */
13532
13533 default:
13534 if (truth_value_p (TREE_CODE (t)))
13535 /* Truth values evaluate to 0 or 1, which is nonnegative. */
13536 return true;
13537 }
13538
13539 /* We don't know sign of `t', so be conservative and return false. */
13540 return false;
13541 }
13542
13543 /* Return true if `t' is known to be non-negative. Handle warnings
13544 about undefined signed overflow. */
13545
13546 bool
13547 tree_expr_nonnegative_p (tree t)
13548 {
13549 bool ret, strict_overflow_p;
13550
13551 strict_overflow_p = false;
13552 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
13553 if (strict_overflow_p)
13554 fold_overflow_warning (("assuming signed overflow does not occur when "
13555 "determining that expression is always "
13556 "non-negative"),
13557 WARN_STRICT_OVERFLOW_MISC);
13558 return ret;
13559 }
13560
13561 /* Return true when T is an address and is known to be nonzero.
13562 For floating point we further ensure that T is not denormal.
13563 Similar logic is present in nonzero_address in rtlanal.h.
13564
13565 If the return value is based on the assumption that signed overflow
13566 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13567 change *STRICT_OVERFLOW_P. */
13568
13569 bool
13570 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
13571 {
13572 tree type = TREE_TYPE (t);
13573 bool sub_strict_overflow_p;
13574
13575 /* Doing something useful for floating point would need more work. */
13576 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
13577 return false;
13578
13579 switch (TREE_CODE (t))
13580 {
13581 case SSA_NAME:
13582 /* Query VRP to see if it has recorded any information about
13583 the range of this object. */
13584 return ssa_name_nonzero_p (t);
13585
13586 case ABS_EXPR:
13587 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13588 strict_overflow_p);
13589
13590 case INTEGER_CST:
13591 return !integer_zerop (t);
13592
13593 case PLUS_EXPR:
13594 if (TYPE_OVERFLOW_UNDEFINED (type))
13595 {
13596 /* With the presence of negative values it is hard
13597 to say something. */
13598 sub_strict_overflow_p = false;
13599 if (!tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13600 &sub_strict_overflow_p)
13601 || !tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13602 &sub_strict_overflow_p))
13603 return false;
13604 /* One of operands must be positive and the other non-negative. */
13605 /* We don't set *STRICT_OVERFLOW_P here: even if this value
13606 overflows, on a twos-complement machine the sum of two
13607 nonnegative numbers can never be zero. */
13608 return (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13609 strict_overflow_p)
13610 || tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13611 strict_overflow_p));
13612 }
13613 break;
13614
13615 case MULT_EXPR:
13616 if (TYPE_OVERFLOW_UNDEFINED (type))
13617 {
13618 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13619 strict_overflow_p)
13620 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13621 strict_overflow_p))
13622 {
13623 *strict_overflow_p = true;
13624 return true;
13625 }
13626 }
13627 break;
13628
13629 case NOP_EXPR:
13630 {
13631 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
13632 tree outer_type = TREE_TYPE (t);
13633
13634 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
13635 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13636 strict_overflow_p));
13637 }
13638 break;
13639
13640 case ADDR_EXPR:
13641 {
13642 tree base = get_base_address (TREE_OPERAND (t, 0));
13643
13644 if (!base)
13645 return false;
13646
13647 /* Weak declarations may link to NULL. */
13648 if (VAR_OR_FUNCTION_DECL_P (base))
13649 return !DECL_WEAK (base);
13650
13651 /* Constants are never weak. */
13652 if (CONSTANT_CLASS_P (base))
13653 return true;
13654
13655 return false;
13656 }
13657
13658 case COND_EXPR:
13659 sub_strict_overflow_p = false;
13660 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13661 &sub_strict_overflow_p)
13662 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
13663 &sub_strict_overflow_p))
13664 {
13665 if (sub_strict_overflow_p)
13666 *strict_overflow_p = true;
13667 return true;
13668 }
13669 break;
13670
13671 case MIN_EXPR:
13672 sub_strict_overflow_p = false;
13673 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13674 &sub_strict_overflow_p)
13675 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13676 &sub_strict_overflow_p))
13677 {
13678 if (sub_strict_overflow_p)
13679 *strict_overflow_p = true;
13680 }
13681 break;
13682
13683 case MAX_EXPR:
13684 sub_strict_overflow_p = false;
13685 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13686 &sub_strict_overflow_p))
13687 {
13688 if (sub_strict_overflow_p)
13689 *strict_overflow_p = true;
13690
13691 /* When both operands are nonzero, then MAX must be too. */
13692 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13693 strict_overflow_p))
13694 return true;
13695
13696 /* MAX where operand 0 is positive is positive. */
13697 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13698 strict_overflow_p);
13699 }
13700 /* MAX where operand 1 is positive is positive. */
13701 else if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13702 &sub_strict_overflow_p)
13703 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13704 &sub_strict_overflow_p))
13705 {
13706 if (sub_strict_overflow_p)
13707 *strict_overflow_p = true;
13708 return true;
13709 }
13710 break;
13711
13712 case COMPOUND_EXPR:
13713 case MODIFY_EXPR:
13714 case GIMPLE_MODIFY_STMT:
13715 case BIND_EXPR:
13716 return tree_expr_nonzero_warnv_p (GENERIC_TREE_OPERAND (t, 1),
13717 strict_overflow_p);
13718
13719 case SAVE_EXPR:
13720 case NON_LVALUE_EXPR:
13721 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13722 strict_overflow_p);
13723
13724 case BIT_IOR_EXPR:
13725 return (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13726 strict_overflow_p)
13727 || tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13728 strict_overflow_p));
13729
13730 case CALL_EXPR:
13731 return alloca_call_p (t);
13732
13733 default:
13734 break;
13735 }
13736 return false;
13737 }
13738
13739 /* Return true when T is an address and is known to be nonzero.
13740 Handle warnings about undefined signed overflow. */
13741
13742 bool
13743 tree_expr_nonzero_p (tree t)
13744 {
13745 bool ret, strict_overflow_p;
13746
13747 strict_overflow_p = false;
13748 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
13749 if (strict_overflow_p)
13750 fold_overflow_warning (("assuming signed overflow does not occur when "
13751 "determining that expression is always "
13752 "non-zero"),
13753 WARN_STRICT_OVERFLOW_MISC);
13754 return ret;
13755 }
13756
13757 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
13758 attempt to fold the expression to a constant without modifying TYPE,
13759 OP0 or OP1.
13760
13761 If the expression could be simplified to a constant, then return
13762 the constant. If the expression would not be simplified to a
13763 constant, then return NULL_TREE. */
13764
13765 tree
13766 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
13767 {
13768 tree tem = fold_binary (code, type, op0, op1);
13769 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13770 }
13771
13772 /* Given the components of a unary expression CODE, TYPE and OP0,
13773 attempt to fold the expression to a constant without modifying
13774 TYPE or OP0.
13775
13776 If the expression could be simplified to a constant, then return
13777 the constant. If the expression would not be simplified to a
13778 constant, then return NULL_TREE. */
13779
13780 tree
13781 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
13782 {
13783 tree tem = fold_unary (code, type, op0);
13784 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13785 }
13786
13787 /* If EXP represents referencing an element in a constant string
13788 (either via pointer arithmetic or array indexing), return the
13789 tree representing the value accessed, otherwise return NULL. */
13790
13791 tree
13792 fold_read_from_constant_string (tree exp)
13793 {
13794 if ((TREE_CODE (exp) == INDIRECT_REF
13795 || TREE_CODE (exp) == ARRAY_REF)
13796 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
13797 {
13798 tree exp1 = TREE_OPERAND (exp, 0);
13799 tree index;
13800 tree string;
13801
13802 if (TREE_CODE (exp) == INDIRECT_REF)
13803 string = string_constant (exp1, &index);
13804 else
13805 {
13806 tree low_bound = array_ref_low_bound (exp);
13807 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
13808
13809 /* Optimize the special-case of a zero lower bound.
13810
13811 We convert the low_bound to sizetype to avoid some problems
13812 with constant folding. (E.g. suppose the lower bound is 1,
13813 and its mode is QI. Without the conversion,l (ARRAY
13814 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
13815 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
13816 if (! integer_zerop (low_bound))
13817 index = size_diffop (index, fold_convert (sizetype, low_bound));
13818
13819 string = exp1;
13820 }
13821
13822 if (string
13823 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
13824 && TREE_CODE (string) == STRING_CST
13825 && TREE_CODE (index) == INTEGER_CST
13826 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
13827 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
13828 == MODE_INT)
13829 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
13830 return fold_convert (TREE_TYPE (exp),
13831 build_int_cst (NULL_TREE,
13832 (TREE_STRING_POINTER (string)
13833 [TREE_INT_CST_LOW (index)])));
13834 }
13835 return NULL;
13836 }
13837
13838 /* Return the tree for neg (ARG0) when ARG0 is known to be either
13839 an integer constant or real constant.
13840
13841 TYPE is the type of the result. */
13842
13843 static tree
13844 fold_negate_const (tree arg0, tree type)
13845 {
13846 tree t = NULL_TREE;
13847
13848 switch (TREE_CODE (arg0))
13849 {
13850 case INTEGER_CST:
13851 {
13852 unsigned HOST_WIDE_INT low;
13853 HOST_WIDE_INT high;
13854 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
13855 TREE_INT_CST_HIGH (arg0),
13856 &low, &high);
13857 t = force_fit_type_double (type, low, high, 1,
13858 (overflow | TREE_OVERFLOW (arg0))
13859 && !TYPE_UNSIGNED (type));
13860 break;
13861 }
13862
13863 case REAL_CST:
13864 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
13865 break;
13866
13867 default:
13868 gcc_unreachable ();
13869 }
13870
13871 return t;
13872 }
13873
13874 /* Return the tree for abs (ARG0) when ARG0 is known to be either
13875 an integer constant or real constant.
13876
13877 TYPE is the type of the result. */
13878
13879 tree
13880 fold_abs_const (tree arg0, tree type)
13881 {
13882 tree t = NULL_TREE;
13883
13884 switch (TREE_CODE (arg0))
13885 {
13886 case INTEGER_CST:
13887 /* If the value is unsigned, then the absolute value is
13888 the same as the ordinary value. */
13889 if (TYPE_UNSIGNED (type))
13890 t = arg0;
13891 /* Similarly, if the value is non-negative. */
13892 else if (INT_CST_LT (integer_minus_one_node, arg0))
13893 t = arg0;
13894 /* If the value is negative, then the absolute value is
13895 its negation. */
13896 else
13897 {
13898 unsigned HOST_WIDE_INT low;
13899 HOST_WIDE_INT high;
13900 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
13901 TREE_INT_CST_HIGH (arg0),
13902 &low, &high);
13903 t = force_fit_type_double (type, low, high, -1,
13904 overflow | TREE_OVERFLOW (arg0));
13905 }
13906 break;
13907
13908 case REAL_CST:
13909 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
13910 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
13911 else
13912 t = arg0;
13913 break;
13914
13915 default:
13916 gcc_unreachable ();
13917 }
13918
13919 return t;
13920 }
13921
13922 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
13923 constant. TYPE is the type of the result. */
13924
13925 static tree
13926 fold_not_const (tree arg0, tree type)
13927 {
13928 tree t = NULL_TREE;
13929
13930 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
13931
13932 t = force_fit_type_double (type, ~TREE_INT_CST_LOW (arg0),
13933 ~TREE_INT_CST_HIGH (arg0), 0,
13934 TREE_OVERFLOW (arg0));
13935
13936 return t;
13937 }
13938
13939 /* Given CODE, a relational operator, the target type, TYPE and two
13940 constant operands OP0 and OP1, return the result of the
13941 relational operation. If the result is not a compile time
13942 constant, then return NULL_TREE. */
13943
13944 static tree
13945 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
13946 {
13947 int result, invert;
13948
13949 /* From here on, the only cases we handle are when the result is
13950 known to be a constant. */
13951
13952 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
13953 {
13954 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
13955 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
13956
13957 /* Handle the cases where either operand is a NaN. */
13958 if (real_isnan (c0) || real_isnan (c1))
13959 {
13960 switch (code)
13961 {
13962 case EQ_EXPR:
13963 case ORDERED_EXPR:
13964 result = 0;
13965 break;
13966
13967 case NE_EXPR:
13968 case UNORDERED_EXPR:
13969 case UNLT_EXPR:
13970 case UNLE_EXPR:
13971 case UNGT_EXPR:
13972 case UNGE_EXPR:
13973 case UNEQ_EXPR:
13974 result = 1;
13975 break;
13976
13977 case LT_EXPR:
13978 case LE_EXPR:
13979 case GT_EXPR:
13980 case GE_EXPR:
13981 case LTGT_EXPR:
13982 if (flag_trapping_math)
13983 return NULL_TREE;
13984 result = 0;
13985 break;
13986
13987 default:
13988 gcc_unreachable ();
13989 }
13990
13991 return constant_boolean_node (result, type);
13992 }
13993
13994 return constant_boolean_node (real_compare (code, c0, c1), type);
13995 }
13996
13997 /* Handle equality/inequality of complex constants. */
13998 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
13999 {
14000 tree rcond = fold_relational_const (code, type,
14001 TREE_REALPART (op0),
14002 TREE_REALPART (op1));
14003 tree icond = fold_relational_const (code, type,
14004 TREE_IMAGPART (op0),
14005 TREE_IMAGPART (op1));
14006 if (code == EQ_EXPR)
14007 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
14008 else if (code == NE_EXPR)
14009 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
14010 else
14011 return NULL_TREE;
14012 }
14013
14014 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
14015
14016 To compute GT, swap the arguments and do LT.
14017 To compute GE, do LT and invert the result.
14018 To compute LE, swap the arguments, do LT and invert the result.
14019 To compute NE, do EQ and invert the result.
14020
14021 Therefore, the code below must handle only EQ and LT. */
14022
14023 if (code == LE_EXPR || code == GT_EXPR)
14024 {
14025 tree tem = op0;
14026 op0 = op1;
14027 op1 = tem;
14028 code = swap_tree_comparison (code);
14029 }
14030
14031 /* Note that it is safe to invert for real values here because we
14032 have already handled the one case that it matters. */
14033
14034 invert = 0;
14035 if (code == NE_EXPR || code == GE_EXPR)
14036 {
14037 invert = 1;
14038 code = invert_tree_comparison (code, false);
14039 }
14040
14041 /* Compute a result for LT or EQ if args permit;
14042 Otherwise return T. */
14043 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
14044 {
14045 if (code == EQ_EXPR)
14046 result = tree_int_cst_equal (op0, op1);
14047 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
14048 result = INT_CST_LT_UNSIGNED (op0, op1);
14049 else
14050 result = INT_CST_LT (op0, op1);
14051 }
14052 else
14053 return NULL_TREE;
14054
14055 if (invert)
14056 result ^= 1;
14057 return constant_boolean_node (result, type);
14058 }
14059
14060 /* Build an expression for the a clean point containing EXPR with type TYPE.
14061 Don't build a cleanup point expression for EXPR which don't have side
14062 effects. */
14063
14064 tree
14065 fold_build_cleanup_point_expr (tree type, tree expr)
14066 {
14067 /* If the expression does not have side effects then we don't have to wrap
14068 it with a cleanup point expression. */
14069 if (!TREE_SIDE_EFFECTS (expr))
14070 return expr;
14071
14072 /* If the expression is a return, check to see if the expression inside the
14073 return has no side effects or the right hand side of the modify expression
14074 inside the return. If either don't have side effects set we don't need to
14075 wrap the expression in a cleanup point expression. Note we don't check the
14076 left hand side of the modify because it should always be a return decl. */
14077 if (TREE_CODE (expr) == RETURN_EXPR)
14078 {
14079 tree op = TREE_OPERAND (expr, 0);
14080 if (!op || !TREE_SIDE_EFFECTS (op))
14081 return expr;
14082 op = TREE_OPERAND (op, 1);
14083 if (!TREE_SIDE_EFFECTS (op))
14084 return expr;
14085 }
14086
14087 return build1 (CLEANUP_POINT_EXPR, type, expr);
14088 }
14089
14090 /* Build an expression for the address of T. Folds away INDIRECT_REF to
14091 avoid confusing the gimplify process. */
14092
14093 tree
14094 build_fold_addr_expr_with_type (tree t, tree ptrtype)
14095 {
14096 /* The size of the object is not relevant when talking about its address. */
14097 if (TREE_CODE (t) == WITH_SIZE_EXPR)
14098 t = TREE_OPERAND (t, 0);
14099
14100 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
14101 if (TREE_CODE (t) == INDIRECT_REF
14102 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
14103 {
14104 t = TREE_OPERAND (t, 0);
14105 if (TREE_TYPE (t) != ptrtype)
14106 t = build1 (NOP_EXPR, ptrtype, t);
14107 }
14108 else
14109 {
14110 tree base = t;
14111
14112 while (handled_component_p (base))
14113 base = TREE_OPERAND (base, 0);
14114 if (DECL_P (base))
14115 TREE_ADDRESSABLE (base) = 1;
14116
14117 t = build1 (ADDR_EXPR, ptrtype, t);
14118 }
14119
14120 return t;
14121 }
14122
14123 tree
14124 build_fold_addr_expr (tree t)
14125 {
14126 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
14127 }
14128
14129 /* Given a pointer value OP0 and a type TYPE, return a simplified version
14130 of an indirection through OP0, or NULL_TREE if no simplification is
14131 possible. */
14132
14133 tree
14134 fold_indirect_ref_1 (tree type, tree op0)
14135 {
14136 tree sub = op0;
14137 tree subtype;
14138
14139 STRIP_NOPS (sub);
14140 subtype = TREE_TYPE (sub);
14141 if (!POINTER_TYPE_P (subtype))
14142 return NULL_TREE;
14143
14144 if (TREE_CODE (sub) == ADDR_EXPR)
14145 {
14146 tree op = TREE_OPERAND (sub, 0);
14147 tree optype = TREE_TYPE (op);
14148 /* *&CONST_DECL -> to the value of the const decl. */
14149 if (TREE_CODE (op) == CONST_DECL)
14150 return DECL_INITIAL (op);
14151 /* *&p => p; make sure to handle *&"str"[cst] here. */
14152 if (type == optype)
14153 {
14154 tree fop = fold_read_from_constant_string (op);
14155 if (fop)
14156 return fop;
14157 else
14158 return op;
14159 }
14160 /* *(foo *)&fooarray => fooarray[0] */
14161 else if (TREE_CODE (optype) == ARRAY_TYPE
14162 && type == TREE_TYPE (optype))
14163 {
14164 tree type_domain = TYPE_DOMAIN (optype);
14165 tree min_val = size_zero_node;
14166 if (type_domain && TYPE_MIN_VALUE (type_domain))
14167 min_val = TYPE_MIN_VALUE (type_domain);
14168 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
14169 }
14170 /* *(foo *)&complexfoo => __real__ complexfoo */
14171 else if (TREE_CODE (optype) == COMPLEX_TYPE
14172 && type == TREE_TYPE (optype))
14173 return fold_build1 (REALPART_EXPR, type, op);
14174 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
14175 else if (TREE_CODE (optype) == VECTOR_TYPE
14176 && type == TREE_TYPE (optype))
14177 {
14178 tree part_width = TYPE_SIZE (type);
14179 tree index = bitsize_int (0);
14180 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
14181 }
14182 }
14183
14184 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
14185 if (TREE_CODE (sub) == PLUS_EXPR
14186 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
14187 {
14188 tree op00 = TREE_OPERAND (sub, 0);
14189 tree op01 = TREE_OPERAND (sub, 1);
14190 tree op00type;
14191
14192 STRIP_NOPS (op00);
14193 op00type = TREE_TYPE (op00);
14194 if (TREE_CODE (op00) == ADDR_EXPR
14195 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
14196 && type == TREE_TYPE (TREE_TYPE (op00type)))
14197 {
14198 tree size = TYPE_SIZE_UNIT (type);
14199 if (tree_int_cst_equal (size, op01))
14200 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
14201 }
14202 }
14203
14204 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
14205 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
14206 && type == TREE_TYPE (TREE_TYPE (subtype)))
14207 {
14208 tree type_domain;
14209 tree min_val = size_zero_node;
14210 sub = build_fold_indirect_ref (sub);
14211 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
14212 if (type_domain && TYPE_MIN_VALUE (type_domain))
14213 min_val = TYPE_MIN_VALUE (type_domain);
14214 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
14215 }
14216
14217 return NULL_TREE;
14218 }
14219
14220 /* Builds an expression for an indirection through T, simplifying some
14221 cases. */
14222
14223 tree
14224 build_fold_indirect_ref (tree t)
14225 {
14226 tree type = TREE_TYPE (TREE_TYPE (t));
14227 tree sub = fold_indirect_ref_1 (type, t);
14228
14229 if (sub)
14230 return sub;
14231 else
14232 return build1 (INDIRECT_REF, type, t);
14233 }
14234
14235 /* Given an INDIRECT_REF T, return either T or a simplified version. */
14236
14237 tree
14238 fold_indirect_ref (tree t)
14239 {
14240 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
14241
14242 if (sub)
14243 return sub;
14244 else
14245 return t;
14246 }
14247
14248 /* Strip non-trapping, non-side-effecting tree nodes from an expression
14249 whose result is ignored. The type of the returned tree need not be
14250 the same as the original expression. */
14251
14252 tree
14253 fold_ignored_result (tree t)
14254 {
14255 if (!TREE_SIDE_EFFECTS (t))
14256 return integer_zero_node;
14257
14258 for (;;)
14259 switch (TREE_CODE_CLASS (TREE_CODE (t)))
14260 {
14261 case tcc_unary:
14262 t = TREE_OPERAND (t, 0);
14263 break;
14264
14265 case tcc_binary:
14266 case tcc_comparison:
14267 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14268 t = TREE_OPERAND (t, 0);
14269 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
14270 t = TREE_OPERAND (t, 1);
14271 else
14272 return t;
14273 break;
14274
14275 case tcc_expression:
14276 switch (TREE_CODE (t))
14277 {
14278 case COMPOUND_EXPR:
14279 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14280 return t;
14281 t = TREE_OPERAND (t, 0);
14282 break;
14283
14284 case COND_EXPR:
14285 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
14286 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
14287 return t;
14288 t = TREE_OPERAND (t, 0);
14289 break;
14290
14291 default:
14292 return t;
14293 }
14294 break;
14295
14296 default:
14297 return t;
14298 }
14299 }
14300
14301 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
14302 This can only be applied to objects of a sizetype. */
14303
14304 tree
14305 round_up (tree value, int divisor)
14306 {
14307 tree div = NULL_TREE;
14308
14309 gcc_assert (divisor > 0);
14310 if (divisor == 1)
14311 return value;
14312
14313 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14314 have to do anything. Only do this when we are not given a const,
14315 because in that case, this check is more expensive than just
14316 doing it. */
14317 if (TREE_CODE (value) != INTEGER_CST)
14318 {
14319 div = build_int_cst (TREE_TYPE (value), divisor);
14320
14321 if (multiple_of_p (TREE_TYPE (value), value, div))
14322 return value;
14323 }
14324
14325 /* If divisor is a power of two, simplify this to bit manipulation. */
14326 if (divisor == (divisor & -divisor))
14327 {
14328 if (TREE_CODE (value) == INTEGER_CST)
14329 {
14330 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (value);
14331 unsigned HOST_WIDE_INT high;
14332 bool overflow_p;
14333
14334 if ((low & (divisor - 1)) == 0)
14335 return value;
14336
14337 overflow_p = TREE_OVERFLOW (value);
14338 high = TREE_INT_CST_HIGH (value);
14339 low &= ~(divisor - 1);
14340 low += divisor;
14341 if (low == 0)
14342 {
14343 high++;
14344 if (high == 0)
14345 overflow_p = true;
14346 }
14347
14348 return force_fit_type_double (TREE_TYPE (value), low, high,
14349 -1, overflow_p);
14350 }
14351 else
14352 {
14353 tree t;
14354
14355 t = build_int_cst (TREE_TYPE (value), divisor - 1);
14356 value = size_binop (PLUS_EXPR, value, t);
14357 t = build_int_cst (TREE_TYPE (value), -divisor);
14358 value = size_binop (BIT_AND_EXPR, value, t);
14359 }
14360 }
14361 else
14362 {
14363 if (!div)
14364 div = build_int_cst (TREE_TYPE (value), divisor);
14365 value = size_binop (CEIL_DIV_EXPR, value, div);
14366 value = size_binop (MULT_EXPR, value, div);
14367 }
14368
14369 return value;
14370 }
14371
14372 /* Likewise, but round down. */
14373
14374 tree
14375 round_down (tree value, int divisor)
14376 {
14377 tree div = NULL_TREE;
14378
14379 gcc_assert (divisor > 0);
14380 if (divisor == 1)
14381 return value;
14382
14383 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14384 have to do anything. Only do this when we are not given a const,
14385 because in that case, this check is more expensive than just
14386 doing it. */
14387 if (TREE_CODE (value) != INTEGER_CST)
14388 {
14389 div = build_int_cst (TREE_TYPE (value), divisor);
14390
14391 if (multiple_of_p (TREE_TYPE (value), value, div))
14392 return value;
14393 }
14394
14395 /* If divisor is a power of two, simplify this to bit manipulation. */
14396 if (divisor == (divisor & -divisor))
14397 {
14398 tree t;
14399
14400 t = build_int_cst (TREE_TYPE (value), -divisor);
14401 value = size_binop (BIT_AND_EXPR, value, t);
14402 }
14403 else
14404 {
14405 if (!div)
14406 div = build_int_cst (TREE_TYPE (value), divisor);
14407 value = size_binop (FLOOR_DIV_EXPR, value, div);
14408 value = size_binop (MULT_EXPR, value, div);
14409 }
14410
14411 return value;
14412 }
14413
14414 /* Returns the pointer to the base of the object addressed by EXP and
14415 extracts the information about the offset of the access, storing it
14416 to PBITPOS and POFFSET. */
14417
14418 static tree
14419 split_address_to_core_and_offset (tree exp,
14420 HOST_WIDE_INT *pbitpos, tree *poffset)
14421 {
14422 tree core;
14423 enum machine_mode mode;
14424 int unsignedp, volatilep;
14425 HOST_WIDE_INT bitsize;
14426
14427 if (TREE_CODE (exp) == ADDR_EXPR)
14428 {
14429 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
14430 poffset, &mode, &unsignedp, &volatilep,
14431 false);
14432 core = build_fold_addr_expr (core);
14433 }
14434 else
14435 {
14436 core = exp;
14437 *pbitpos = 0;
14438 *poffset = NULL_TREE;
14439 }
14440
14441 return core;
14442 }
14443
14444 /* Returns true if addresses of E1 and E2 differ by a constant, false
14445 otherwise. If they do, E1 - E2 is stored in *DIFF. */
14446
14447 bool
14448 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
14449 {
14450 tree core1, core2;
14451 HOST_WIDE_INT bitpos1, bitpos2;
14452 tree toffset1, toffset2, tdiff, type;
14453
14454 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
14455 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
14456
14457 if (bitpos1 % BITS_PER_UNIT != 0
14458 || bitpos2 % BITS_PER_UNIT != 0
14459 || !operand_equal_p (core1, core2, 0))
14460 return false;
14461
14462 if (toffset1 && toffset2)
14463 {
14464 type = TREE_TYPE (toffset1);
14465 if (type != TREE_TYPE (toffset2))
14466 toffset2 = fold_convert (type, toffset2);
14467
14468 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
14469 if (!cst_and_fits_in_hwi (tdiff))
14470 return false;
14471
14472 *diff = int_cst_value (tdiff);
14473 }
14474 else if (toffset1 || toffset2)
14475 {
14476 /* If only one of the offsets is non-constant, the difference cannot
14477 be a constant. */
14478 return false;
14479 }
14480 else
14481 *diff = 0;
14482
14483 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
14484 return true;
14485 }
14486
14487 /* Simplify the floating point expression EXP when the sign of the
14488 result is not significant. Return NULL_TREE if no simplification
14489 is possible. */
14490
14491 tree
14492 fold_strip_sign_ops (tree exp)
14493 {
14494 tree arg0, arg1;
14495
14496 switch (TREE_CODE (exp))
14497 {
14498 case ABS_EXPR:
14499 case NEGATE_EXPR:
14500 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
14501 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
14502
14503 case MULT_EXPR:
14504 case RDIV_EXPR:
14505 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
14506 return NULL_TREE;
14507 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
14508 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
14509 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
14510 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
14511 arg0 ? arg0 : TREE_OPERAND (exp, 0),
14512 arg1 ? arg1 : TREE_OPERAND (exp, 1));
14513 break;
14514
14515 case COMPOUND_EXPR:
14516 arg0 = TREE_OPERAND (exp, 0);
14517 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
14518 if (arg1)
14519 return fold_build2 (COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
14520 break;
14521
14522 case COND_EXPR:
14523 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
14524 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
14525 if (arg0 || arg1)
14526 return fold_build3 (COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
14527 arg0 ? arg0 : TREE_OPERAND (exp, 1),
14528 arg1 ? arg1 : TREE_OPERAND (exp, 2));
14529 break;
14530
14531 case CALL_EXPR:
14532 {
14533 const enum built_in_function fcode = builtin_mathfn_code (exp);
14534 switch (fcode)
14535 {
14536 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14537 /* Strip copysign function call, return the 1st argument. */
14538 arg0 = CALL_EXPR_ARG (exp, 0);
14539 arg1 = CALL_EXPR_ARG (exp, 1);
14540 return omit_one_operand (TREE_TYPE (exp), arg0, arg1);
14541
14542 default:
14543 /* Strip sign ops from the argument of "odd" math functions. */
14544 if (negate_mathfn_p (fcode))
14545 {
14546 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
14547 if (arg0)
14548 return build_call_expr (get_callee_fndecl (exp), 1, arg0);
14549 }
14550 break;
14551 }
14552 }
14553 break;
14554
14555 default:
14556 break;
14557 }
14558 return NULL_TREE;
14559 }