(Synchronize with addition made to binutils sources):
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
29
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type_double.
32
33 fold takes a tree as argument and returns a simplified tree.
34
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
38
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
41
42 force_fit_type_double takes a constant, an overflowable flag and a
43 prior overflow indicator. It forces the value to fit the type and
44 sets TREE_OVERFLOW.
45
46 Note: Since the folders get called on non-gimple code as well as
47 gimple code, we need to handle GIMPLE tuples as well as their
48 corresponding tree equivalents. */
49
50 #include "config.h"
51 #include "system.h"
52 #include "coretypes.h"
53 #include "tm.h"
54 #include "flags.h"
55 #include "tree.h"
56 #include "real.h"
57 #include "fixed-value.h"
58 #include "rtl.h"
59 #include "expr.h"
60 #include "tm_p.h"
61 #include "target.h"
62 #include "toplev.h"
63 #include "intl.h"
64 #include "ggc.h"
65 #include "hashtab.h"
66 #include "langhooks.h"
67 #include "md5.h"
68 #include "gimple.h"
69
70 /* Nonzero if we are folding constants inside an initializer; zero
71 otherwise. */
72 int folding_initializer = 0;
73
74 /* The following constants represent a bit based encoding of GCC's
75 comparison operators. This encoding simplifies transformations
76 on relational comparison operators, such as AND and OR. */
77 enum comparison_code {
78 COMPCODE_FALSE = 0,
79 COMPCODE_LT = 1,
80 COMPCODE_EQ = 2,
81 COMPCODE_LE = 3,
82 COMPCODE_GT = 4,
83 COMPCODE_LTGT = 5,
84 COMPCODE_GE = 6,
85 COMPCODE_ORD = 7,
86 COMPCODE_UNORD = 8,
87 COMPCODE_UNLT = 9,
88 COMPCODE_UNEQ = 10,
89 COMPCODE_UNLE = 11,
90 COMPCODE_UNGT = 12,
91 COMPCODE_NE = 13,
92 COMPCODE_UNGE = 14,
93 COMPCODE_TRUE = 15
94 };
95
96 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
97 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
98 static bool negate_mathfn_p (enum built_in_function);
99 static bool negate_expr_p (tree);
100 static tree negate_expr (tree);
101 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
102 static tree associate_trees (tree, tree, enum tree_code, tree);
103 static tree const_binop (enum tree_code, tree, tree, int);
104 static enum comparison_code comparison_to_compcode (enum tree_code);
105 static enum tree_code compcode_to_comparison (enum comparison_code);
106 static int operand_equal_for_comparison_p (tree, tree, tree);
107 static int twoval_comparison_p (tree, tree *, tree *, int *);
108 static tree eval_subst (tree, tree, tree, tree, tree);
109 static tree pedantic_omit_one_operand (tree, tree, tree);
110 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
111 static tree make_bit_field_ref (tree, tree, HOST_WIDE_INT, HOST_WIDE_INT, int);
112 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
113 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
114 enum machine_mode *, int *, int *,
115 tree *, tree *);
116 static int all_ones_mask_p (const_tree, int);
117 static tree sign_bit_p (tree, const_tree);
118 static int simple_operand_p (const_tree);
119 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
120 static tree range_predecessor (tree);
121 static tree range_successor (tree);
122 extern tree make_range (tree, int *, tree *, tree *, bool *);
123 extern tree build_range_check (tree, tree, int, tree, tree);
124 extern bool merge_ranges (int *, tree *, tree *, int, tree, tree, int,
125 tree, tree);
126 static tree fold_range_test (enum tree_code, tree, tree, tree);
127 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
128 static tree unextend (tree, int, int, tree);
129 static tree fold_truthop (enum tree_code, tree, tree, tree);
130 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
131 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
132 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
133 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
134 tree, tree,
135 tree, tree, int);
136 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
137 tree, tree, tree);
138 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
139 static tree fold_div_compare (enum tree_code, tree, tree, tree);
140 static bool reorder_operands_p (const_tree, const_tree);
141 static tree fold_negate_const (tree, tree);
142 static tree fold_not_const (tree, tree);
143 static tree fold_relational_const (enum tree_code, tree, tree, tree);
144 static tree fold_convert_const (enum tree_code, tree, tree);
145
146
147 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
148 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
149 and SUM1. Then this yields nonzero if overflow occurred during the
150 addition.
151
152 Overflow occurs if A and B have the same sign, but A and SUM differ in
153 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
154 sign. */
155 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
156 \f
157 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
158 We do that by representing the two-word integer in 4 words, with only
159 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
160 number. The value of the word is LOWPART + HIGHPART * BASE. */
161
162 #define LOWPART(x) \
163 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
164 #define HIGHPART(x) \
165 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
166 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
167
168 /* Unpack a two-word integer into 4 words.
169 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
170 WORDS points to the array of HOST_WIDE_INTs. */
171
172 static void
173 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
174 {
175 words[0] = LOWPART (low);
176 words[1] = HIGHPART (low);
177 words[2] = LOWPART (hi);
178 words[3] = HIGHPART (hi);
179 }
180
181 /* Pack an array of 4 words into a two-word integer.
182 WORDS points to the array of words.
183 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
184
185 static void
186 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
187 HOST_WIDE_INT *hi)
188 {
189 *low = words[0] + words[1] * BASE;
190 *hi = words[2] + words[3] * BASE;
191 }
192 \f
193 /* Force the double-word integer L1, H1 to be within the range of the
194 integer type TYPE. Stores the properly truncated and sign-extended
195 double-word integer in *LV, *HV. Returns true if the operation
196 overflows, that is, argument and result are different. */
197
198 int
199 fit_double_type (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
200 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, const_tree type)
201 {
202 unsigned HOST_WIDE_INT low0 = l1;
203 HOST_WIDE_INT high0 = h1;
204 unsigned int prec;
205 int sign_extended_type;
206
207 if (POINTER_TYPE_P (type)
208 || TREE_CODE (type) == OFFSET_TYPE)
209 prec = POINTER_SIZE;
210 else
211 prec = TYPE_PRECISION (type);
212
213 /* Size types *are* sign extended. */
214 sign_extended_type = (!TYPE_UNSIGNED (type)
215 || (TREE_CODE (type) == INTEGER_TYPE
216 && TYPE_IS_SIZETYPE (type)));
217
218 /* First clear all bits that are beyond the type's precision. */
219 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
220 ;
221 else if (prec > HOST_BITS_PER_WIDE_INT)
222 h1 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
223 else
224 {
225 h1 = 0;
226 if (prec < HOST_BITS_PER_WIDE_INT)
227 l1 &= ~((HOST_WIDE_INT) (-1) << prec);
228 }
229
230 /* Then do sign extension if necessary. */
231 if (!sign_extended_type)
232 /* No sign extension */;
233 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
234 /* Correct width already. */;
235 else if (prec > HOST_BITS_PER_WIDE_INT)
236 {
237 /* Sign extend top half? */
238 if (h1 & ((unsigned HOST_WIDE_INT)1
239 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
240 h1 |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
241 }
242 else if (prec == HOST_BITS_PER_WIDE_INT)
243 {
244 if ((HOST_WIDE_INT)l1 < 0)
245 h1 = -1;
246 }
247 else
248 {
249 /* Sign extend bottom half? */
250 if (l1 & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
251 {
252 h1 = -1;
253 l1 |= (HOST_WIDE_INT)(-1) << prec;
254 }
255 }
256
257 *lv = l1;
258 *hv = h1;
259
260 /* If the value didn't fit, signal overflow. */
261 return l1 != low0 || h1 != high0;
262 }
263
264 /* We force the double-int HIGH:LOW to the range of the type TYPE by
265 sign or zero extending it.
266 OVERFLOWABLE indicates if we are interested
267 in overflow of the value, when >0 we are only interested in signed
268 overflow, for <0 we are interested in any overflow. OVERFLOWED
269 indicates whether overflow has already occurred. CONST_OVERFLOWED
270 indicates whether constant overflow has already occurred. We force
271 T's value to be within range of T's type (by setting to 0 or 1 all
272 the bits outside the type's range). We set TREE_OVERFLOWED if,
273 OVERFLOWED is nonzero,
274 or OVERFLOWABLE is >0 and signed overflow occurs
275 or OVERFLOWABLE is <0 and any overflow occurs
276 We return a new tree node for the extended double-int. The node
277 is shared if no overflow flags are set. */
278
279 tree
280 force_fit_type_double (tree type, unsigned HOST_WIDE_INT low,
281 HOST_WIDE_INT high, int overflowable,
282 bool overflowed)
283 {
284 int sign_extended_type;
285 bool overflow;
286
287 /* Size types *are* sign extended. */
288 sign_extended_type = (!TYPE_UNSIGNED (type)
289 || (TREE_CODE (type) == INTEGER_TYPE
290 && TYPE_IS_SIZETYPE (type)));
291
292 overflow = fit_double_type (low, high, &low, &high, type);
293
294 /* If we need to set overflow flags, return a new unshared node. */
295 if (overflowed || overflow)
296 {
297 if (overflowed
298 || overflowable < 0
299 || (overflowable > 0 && sign_extended_type))
300 {
301 tree t = make_node (INTEGER_CST);
302 TREE_INT_CST_LOW (t) = low;
303 TREE_INT_CST_HIGH (t) = high;
304 TREE_TYPE (t) = type;
305 TREE_OVERFLOW (t) = 1;
306 return t;
307 }
308 }
309
310 /* Else build a shared node. */
311 return build_int_cst_wide (type, low, high);
312 }
313 \f
314 /* Add two doubleword integers with doubleword result.
315 Return nonzero if the operation overflows according to UNSIGNED_P.
316 Each argument is given as two `HOST_WIDE_INT' pieces.
317 One argument is L1 and H1; the other, L2 and H2.
318 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
319
320 int
321 add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
322 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
323 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
324 bool unsigned_p)
325 {
326 unsigned HOST_WIDE_INT l;
327 HOST_WIDE_INT h;
328
329 l = l1 + l2;
330 h = h1 + h2 + (l < l1);
331
332 *lv = l;
333 *hv = h;
334
335 if (unsigned_p)
336 return (unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1;
337 else
338 return OVERFLOW_SUM_SIGN (h1, h2, h);
339 }
340
341 /* Negate a doubleword integer with doubleword result.
342 Return nonzero if the operation overflows, assuming it's signed.
343 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
344 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
345
346 int
347 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
348 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
349 {
350 if (l1 == 0)
351 {
352 *lv = 0;
353 *hv = - h1;
354 return (*hv & h1) < 0;
355 }
356 else
357 {
358 *lv = -l1;
359 *hv = ~h1;
360 return 0;
361 }
362 }
363 \f
364 /* Multiply two doubleword integers with doubleword result.
365 Return nonzero if the operation overflows according to UNSIGNED_P.
366 Each argument is given as two `HOST_WIDE_INT' pieces.
367 One argument is L1 and H1; the other, L2 and H2.
368 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
369
370 int
371 mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
372 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
373 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
374 bool unsigned_p)
375 {
376 HOST_WIDE_INT arg1[4];
377 HOST_WIDE_INT arg2[4];
378 HOST_WIDE_INT prod[4 * 2];
379 unsigned HOST_WIDE_INT carry;
380 int i, j, k;
381 unsigned HOST_WIDE_INT toplow, neglow;
382 HOST_WIDE_INT tophigh, neghigh;
383
384 encode (arg1, l1, h1);
385 encode (arg2, l2, h2);
386
387 memset (prod, 0, sizeof prod);
388
389 for (i = 0; i < 4; i++)
390 {
391 carry = 0;
392 for (j = 0; j < 4; j++)
393 {
394 k = i + j;
395 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
396 carry += arg1[i] * arg2[j];
397 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
398 carry += prod[k];
399 prod[k] = LOWPART (carry);
400 carry = HIGHPART (carry);
401 }
402 prod[i + 4] = carry;
403 }
404
405 decode (prod, lv, hv);
406 decode (prod + 4, &toplow, &tophigh);
407
408 /* Unsigned overflow is immediate. */
409 if (unsigned_p)
410 return (toplow | tophigh) != 0;
411
412 /* Check for signed overflow by calculating the signed representation of the
413 top half of the result; it should agree with the low half's sign bit. */
414 if (h1 < 0)
415 {
416 neg_double (l2, h2, &neglow, &neghigh);
417 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
418 }
419 if (h2 < 0)
420 {
421 neg_double (l1, h1, &neglow, &neghigh);
422 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
423 }
424 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
425 }
426 \f
427 /* Shift the doubleword integer in L1, H1 left by COUNT places
428 keeping only PREC bits of result.
429 Shift right if COUNT is negative.
430 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
431 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
432
433 void
434 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
435 HOST_WIDE_INT count, unsigned int prec,
436 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
437 {
438 unsigned HOST_WIDE_INT signmask;
439
440 if (count < 0)
441 {
442 rshift_double (l1, h1, -count, prec, lv, hv, arith);
443 return;
444 }
445
446 if (SHIFT_COUNT_TRUNCATED)
447 count %= prec;
448
449 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
450 {
451 /* Shifting by the host word size is undefined according to the
452 ANSI standard, so we must handle this as a special case. */
453 *hv = 0;
454 *lv = 0;
455 }
456 else if (count >= HOST_BITS_PER_WIDE_INT)
457 {
458 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
459 *lv = 0;
460 }
461 else
462 {
463 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
464 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
465 *lv = l1 << count;
466 }
467
468 /* Sign extend all bits that are beyond the precision. */
469
470 signmask = -((prec > HOST_BITS_PER_WIDE_INT
471 ? ((unsigned HOST_WIDE_INT) *hv
472 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
473 : (*lv >> (prec - 1))) & 1);
474
475 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
476 ;
477 else if (prec >= HOST_BITS_PER_WIDE_INT)
478 {
479 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
480 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
481 }
482 else
483 {
484 *hv = signmask;
485 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
486 *lv |= signmask << prec;
487 }
488 }
489
490 /* Shift the doubleword integer in L1, H1 right by COUNT places
491 keeping only PREC bits of result. COUNT must be positive.
492 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
493 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
494
495 void
496 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
497 HOST_WIDE_INT count, unsigned int prec,
498 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
499 int arith)
500 {
501 unsigned HOST_WIDE_INT signmask;
502
503 signmask = (arith
504 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
505 : 0);
506
507 if (SHIFT_COUNT_TRUNCATED)
508 count %= prec;
509
510 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
511 {
512 /* Shifting by the host word size is undefined according to the
513 ANSI standard, so we must handle this as a special case. */
514 *hv = 0;
515 *lv = 0;
516 }
517 else if (count >= HOST_BITS_PER_WIDE_INT)
518 {
519 *hv = 0;
520 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
521 }
522 else
523 {
524 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
525 *lv = ((l1 >> count)
526 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
527 }
528
529 /* Zero / sign extend all bits that are beyond the precision. */
530
531 if (count >= (HOST_WIDE_INT)prec)
532 {
533 *hv = signmask;
534 *lv = signmask;
535 }
536 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
537 ;
538 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
539 {
540 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
541 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
542 }
543 else
544 {
545 *hv = signmask;
546 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
547 *lv |= signmask << (prec - count);
548 }
549 }
550 \f
551 /* Rotate the doubleword integer in L1, H1 left by COUNT places
552 keeping only PREC bits of result.
553 Rotate right if COUNT is negative.
554 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
555
556 void
557 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
558 HOST_WIDE_INT count, unsigned int prec,
559 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
560 {
561 unsigned HOST_WIDE_INT s1l, s2l;
562 HOST_WIDE_INT s1h, s2h;
563
564 count %= prec;
565 if (count < 0)
566 count += prec;
567
568 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
569 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
570 *lv = s1l | s2l;
571 *hv = s1h | s2h;
572 }
573
574 /* Rotate the doubleword integer in L1, H1 left by COUNT places
575 keeping only PREC bits of result. COUNT must be positive.
576 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
577
578 void
579 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
580 HOST_WIDE_INT count, unsigned int prec,
581 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
582 {
583 unsigned HOST_WIDE_INT s1l, s2l;
584 HOST_WIDE_INT s1h, s2h;
585
586 count %= prec;
587 if (count < 0)
588 count += prec;
589
590 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
591 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
592 *lv = s1l | s2l;
593 *hv = s1h | s2h;
594 }
595 \f
596 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
597 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
598 CODE is a tree code for a kind of division, one of
599 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
600 or EXACT_DIV_EXPR
601 It controls how the quotient is rounded to an integer.
602 Return nonzero if the operation overflows.
603 UNS nonzero says do unsigned division. */
604
605 int
606 div_and_round_double (enum tree_code code, int uns,
607 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
608 HOST_WIDE_INT hnum_orig,
609 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
610 HOST_WIDE_INT hden_orig,
611 unsigned HOST_WIDE_INT *lquo,
612 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
613 HOST_WIDE_INT *hrem)
614 {
615 int quo_neg = 0;
616 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
617 HOST_WIDE_INT den[4], quo[4];
618 int i, j;
619 unsigned HOST_WIDE_INT work;
620 unsigned HOST_WIDE_INT carry = 0;
621 unsigned HOST_WIDE_INT lnum = lnum_orig;
622 HOST_WIDE_INT hnum = hnum_orig;
623 unsigned HOST_WIDE_INT lden = lden_orig;
624 HOST_WIDE_INT hden = hden_orig;
625 int overflow = 0;
626
627 if (hden == 0 && lden == 0)
628 overflow = 1, lden = 1;
629
630 /* Calculate quotient sign and convert operands to unsigned. */
631 if (!uns)
632 {
633 if (hnum < 0)
634 {
635 quo_neg = ~ quo_neg;
636 /* (minimum integer) / (-1) is the only overflow case. */
637 if (neg_double (lnum, hnum, &lnum, &hnum)
638 && ((HOST_WIDE_INT) lden & hden) == -1)
639 overflow = 1;
640 }
641 if (hden < 0)
642 {
643 quo_neg = ~ quo_neg;
644 neg_double (lden, hden, &lden, &hden);
645 }
646 }
647
648 if (hnum == 0 && hden == 0)
649 { /* single precision */
650 *hquo = *hrem = 0;
651 /* This unsigned division rounds toward zero. */
652 *lquo = lnum / lden;
653 goto finish_up;
654 }
655
656 if (hnum == 0)
657 { /* trivial case: dividend < divisor */
658 /* hden != 0 already checked. */
659 *hquo = *lquo = 0;
660 *hrem = hnum;
661 *lrem = lnum;
662 goto finish_up;
663 }
664
665 memset (quo, 0, sizeof quo);
666
667 memset (num, 0, sizeof num); /* to zero 9th element */
668 memset (den, 0, sizeof den);
669
670 encode (num, lnum, hnum);
671 encode (den, lden, hden);
672
673 /* Special code for when the divisor < BASE. */
674 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
675 {
676 /* hnum != 0 already checked. */
677 for (i = 4 - 1; i >= 0; i--)
678 {
679 work = num[i] + carry * BASE;
680 quo[i] = work / lden;
681 carry = work % lden;
682 }
683 }
684 else
685 {
686 /* Full double precision division,
687 with thanks to Don Knuth's "Seminumerical Algorithms". */
688 int num_hi_sig, den_hi_sig;
689 unsigned HOST_WIDE_INT quo_est, scale;
690
691 /* Find the highest nonzero divisor digit. */
692 for (i = 4 - 1;; i--)
693 if (den[i] != 0)
694 {
695 den_hi_sig = i;
696 break;
697 }
698
699 /* Insure that the first digit of the divisor is at least BASE/2.
700 This is required by the quotient digit estimation algorithm. */
701
702 scale = BASE / (den[den_hi_sig] + 1);
703 if (scale > 1)
704 { /* scale divisor and dividend */
705 carry = 0;
706 for (i = 0; i <= 4 - 1; i++)
707 {
708 work = (num[i] * scale) + carry;
709 num[i] = LOWPART (work);
710 carry = HIGHPART (work);
711 }
712
713 num[4] = carry;
714 carry = 0;
715 for (i = 0; i <= 4 - 1; i++)
716 {
717 work = (den[i] * scale) + carry;
718 den[i] = LOWPART (work);
719 carry = HIGHPART (work);
720 if (den[i] != 0) den_hi_sig = i;
721 }
722 }
723
724 num_hi_sig = 4;
725
726 /* Main loop */
727 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
728 {
729 /* Guess the next quotient digit, quo_est, by dividing the first
730 two remaining dividend digits by the high order quotient digit.
731 quo_est is never low and is at most 2 high. */
732 unsigned HOST_WIDE_INT tmp;
733
734 num_hi_sig = i + den_hi_sig + 1;
735 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
736 if (num[num_hi_sig] != den[den_hi_sig])
737 quo_est = work / den[den_hi_sig];
738 else
739 quo_est = BASE - 1;
740
741 /* Refine quo_est so it's usually correct, and at most one high. */
742 tmp = work - quo_est * den[den_hi_sig];
743 if (tmp < BASE
744 && (den[den_hi_sig - 1] * quo_est
745 > (tmp * BASE + num[num_hi_sig - 2])))
746 quo_est--;
747
748 /* Try QUO_EST as the quotient digit, by multiplying the
749 divisor by QUO_EST and subtracting from the remaining dividend.
750 Keep in mind that QUO_EST is the I - 1st digit. */
751
752 carry = 0;
753 for (j = 0; j <= den_hi_sig; j++)
754 {
755 work = quo_est * den[j] + carry;
756 carry = HIGHPART (work);
757 work = num[i + j] - LOWPART (work);
758 num[i + j] = LOWPART (work);
759 carry += HIGHPART (work) != 0;
760 }
761
762 /* If quo_est was high by one, then num[i] went negative and
763 we need to correct things. */
764 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
765 {
766 quo_est--;
767 carry = 0; /* add divisor back in */
768 for (j = 0; j <= den_hi_sig; j++)
769 {
770 work = num[i + j] + den[j] + carry;
771 carry = HIGHPART (work);
772 num[i + j] = LOWPART (work);
773 }
774
775 num [num_hi_sig] += carry;
776 }
777
778 /* Store the quotient digit. */
779 quo[i] = quo_est;
780 }
781 }
782
783 decode (quo, lquo, hquo);
784
785 finish_up:
786 /* If result is negative, make it so. */
787 if (quo_neg)
788 neg_double (*lquo, *hquo, lquo, hquo);
789
790 /* Compute trial remainder: rem = num - (quo * den) */
791 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
792 neg_double (*lrem, *hrem, lrem, hrem);
793 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
794
795 switch (code)
796 {
797 case TRUNC_DIV_EXPR:
798 case TRUNC_MOD_EXPR: /* round toward zero */
799 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
800 return overflow;
801
802 case FLOOR_DIV_EXPR:
803 case FLOOR_MOD_EXPR: /* round toward negative infinity */
804 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
805 {
806 /* quo = quo - 1; */
807 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
808 lquo, hquo);
809 }
810 else
811 return overflow;
812 break;
813
814 case CEIL_DIV_EXPR:
815 case CEIL_MOD_EXPR: /* round toward positive infinity */
816 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
817 {
818 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
819 lquo, hquo);
820 }
821 else
822 return overflow;
823 break;
824
825 case ROUND_DIV_EXPR:
826 case ROUND_MOD_EXPR: /* round to closest integer */
827 {
828 unsigned HOST_WIDE_INT labs_rem = *lrem;
829 HOST_WIDE_INT habs_rem = *hrem;
830 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
831 HOST_WIDE_INT habs_den = hden, htwice;
832
833 /* Get absolute values. */
834 if (*hrem < 0)
835 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
836 if (hden < 0)
837 neg_double (lden, hden, &labs_den, &habs_den);
838
839 /* If (2 * abs (lrem) >= abs (lden)), adjust the quotient. */
840 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
841 labs_rem, habs_rem, &ltwice, &htwice);
842
843 if (((unsigned HOST_WIDE_INT) habs_den
844 < (unsigned HOST_WIDE_INT) htwice)
845 || (((unsigned HOST_WIDE_INT) habs_den
846 == (unsigned HOST_WIDE_INT) htwice)
847 && (labs_den <= ltwice)))
848 {
849 if (*hquo < 0)
850 /* quo = quo - 1; */
851 add_double (*lquo, *hquo,
852 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
853 else
854 /* quo = quo + 1; */
855 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
856 lquo, hquo);
857 }
858 else
859 return overflow;
860 }
861 break;
862
863 default:
864 gcc_unreachable ();
865 }
866
867 /* Compute true remainder: rem = num - (quo * den) */
868 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
869 neg_double (*lrem, *hrem, lrem, hrem);
870 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
871 return overflow;
872 }
873
874 /* If ARG2 divides ARG1 with zero remainder, carries out the division
875 of type CODE and returns the quotient.
876 Otherwise returns NULL_TREE. */
877
878 tree
879 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
880 {
881 unsigned HOST_WIDE_INT int1l, int2l;
882 HOST_WIDE_INT int1h, int2h;
883 unsigned HOST_WIDE_INT quol, reml;
884 HOST_WIDE_INT quoh, remh;
885 tree type = TREE_TYPE (arg1);
886 int uns = TYPE_UNSIGNED (type);
887
888 int1l = TREE_INT_CST_LOW (arg1);
889 int1h = TREE_INT_CST_HIGH (arg1);
890 /* &obj[0] + -128 really should be compiled as &obj[-8] rather than
891 &obj[some_exotic_number]. */
892 if (POINTER_TYPE_P (type))
893 {
894 uns = false;
895 type = signed_type_for (type);
896 fit_double_type (int1l, int1h, &int1l, &int1h,
897 type);
898 }
899 else
900 fit_double_type (int1l, int1h, &int1l, &int1h, type);
901 int2l = TREE_INT_CST_LOW (arg2);
902 int2h = TREE_INT_CST_HIGH (arg2);
903
904 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
905 &quol, &quoh, &reml, &remh);
906 if (remh != 0 || reml != 0)
907 return NULL_TREE;
908
909 return build_int_cst_wide (type, quol, quoh);
910 }
911 \f
912 /* This is nonzero if we should defer warnings about undefined
913 overflow. This facility exists because these warnings are a
914 special case. The code to estimate loop iterations does not want
915 to issue any warnings, since it works with expressions which do not
916 occur in user code. Various bits of cleanup code call fold(), but
917 only use the result if it has certain characteristics (e.g., is a
918 constant); that code only wants to issue a warning if the result is
919 used. */
920
921 static int fold_deferring_overflow_warnings;
922
923 /* If a warning about undefined overflow is deferred, this is the
924 warning. Note that this may cause us to turn two warnings into
925 one, but that is fine since it is sufficient to only give one
926 warning per expression. */
927
928 static const char* fold_deferred_overflow_warning;
929
930 /* If a warning about undefined overflow is deferred, this is the
931 level at which the warning should be emitted. */
932
933 static enum warn_strict_overflow_code fold_deferred_overflow_code;
934
935 /* Start deferring overflow warnings. We could use a stack here to
936 permit nested calls, but at present it is not necessary. */
937
938 void
939 fold_defer_overflow_warnings (void)
940 {
941 ++fold_deferring_overflow_warnings;
942 }
943
944 /* Stop deferring overflow warnings. If there is a pending warning,
945 and ISSUE is true, then issue the warning if appropriate. STMT is
946 the statement with which the warning should be associated (used for
947 location information); STMT may be NULL. CODE is the level of the
948 warning--a warn_strict_overflow_code value. This function will use
949 the smaller of CODE and the deferred code when deciding whether to
950 issue the warning. CODE may be zero to mean to always use the
951 deferred code. */
952
953 void
954 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
955 {
956 const char *warnmsg;
957 location_t locus;
958
959 gcc_assert (fold_deferring_overflow_warnings > 0);
960 --fold_deferring_overflow_warnings;
961 if (fold_deferring_overflow_warnings > 0)
962 {
963 if (fold_deferred_overflow_warning != NULL
964 && code != 0
965 && code < (int) fold_deferred_overflow_code)
966 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
967 return;
968 }
969
970 warnmsg = fold_deferred_overflow_warning;
971 fold_deferred_overflow_warning = NULL;
972
973 if (!issue || warnmsg == NULL)
974 return;
975
976 if (gimple_no_warning_p (stmt))
977 return;
978
979 /* Use the smallest code level when deciding to issue the
980 warning. */
981 if (code == 0 || code > (int) fold_deferred_overflow_code)
982 code = fold_deferred_overflow_code;
983
984 if (!issue_strict_overflow_warning (code))
985 return;
986
987 if (stmt == NULL)
988 locus = input_location;
989 else
990 locus = gimple_location (stmt);
991 warning (OPT_Wstrict_overflow, "%H%s", &locus, warnmsg);
992 }
993
994 /* Stop deferring overflow warnings, ignoring any deferred
995 warnings. */
996
997 void
998 fold_undefer_and_ignore_overflow_warnings (void)
999 {
1000 fold_undefer_overflow_warnings (false, NULL, 0);
1001 }
1002
1003 /* Whether we are deferring overflow warnings. */
1004
1005 bool
1006 fold_deferring_overflow_warnings_p (void)
1007 {
1008 return fold_deferring_overflow_warnings > 0;
1009 }
1010
1011 /* This is called when we fold something based on the fact that signed
1012 overflow is undefined. */
1013
1014 static void
1015 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
1016 {
1017 if (fold_deferring_overflow_warnings > 0)
1018 {
1019 if (fold_deferred_overflow_warning == NULL
1020 || wc < fold_deferred_overflow_code)
1021 {
1022 fold_deferred_overflow_warning = gmsgid;
1023 fold_deferred_overflow_code = wc;
1024 }
1025 }
1026 else if (issue_strict_overflow_warning (wc))
1027 warning (OPT_Wstrict_overflow, gmsgid);
1028 }
1029 \f
1030 /* Return true if the built-in mathematical function specified by CODE
1031 is odd, i.e. -f(x) == f(-x). */
1032
1033 static bool
1034 negate_mathfn_p (enum built_in_function code)
1035 {
1036 switch (code)
1037 {
1038 CASE_FLT_FN (BUILT_IN_ASIN):
1039 CASE_FLT_FN (BUILT_IN_ASINH):
1040 CASE_FLT_FN (BUILT_IN_ATAN):
1041 CASE_FLT_FN (BUILT_IN_ATANH):
1042 CASE_FLT_FN (BUILT_IN_CASIN):
1043 CASE_FLT_FN (BUILT_IN_CASINH):
1044 CASE_FLT_FN (BUILT_IN_CATAN):
1045 CASE_FLT_FN (BUILT_IN_CATANH):
1046 CASE_FLT_FN (BUILT_IN_CBRT):
1047 CASE_FLT_FN (BUILT_IN_CPROJ):
1048 CASE_FLT_FN (BUILT_IN_CSIN):
1049 CASE_FLT_FN (BUILT_IN_CSINH):
1050 CASE_FLT_FN (BUILT_IN_CTAN):
1051 CASE_FLT_FN (BUILT_IN_CTANH):
1052 CASE_FLT_FN (BUILT_IN_ERF):
1053 CASE_FLT_FN (BUILT_IN_LLROUND):
1054 CASE_FLT_FN (BUILT_IN_LROUND):
1055 CASE_FLT_FN (BUILT_IN_ROUND):
1056 CASE_FLT_FN (BUILT_IN_SIN):
1057 CASE_FLT_FN (BUILT_IN_SINH):
1058 CASE_FLT_FN (BUILT_IN_TAN):
1059 CASE_FLT_FN (BUILT_IN_TANH):
1060 CASE_FLT_FN (BUILT_IN_TRUNC):
1061 return true;
1062
1063 CASE_FLT_FN (BUILT_IN_LLRINT):
1064 CASE_FLT_FN (BUILT_IN_LRINT):
1065 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1066 CASE_FLT_FN (BUILT_IN_RINT):
1067 return !flag_rounding_math;
1068
1069 default:
1070 break;
1071 }
1072 return false;
1073 }
1074
1075 /* Check whether we may negate an integer constant T without causing
1076 overflow. */
1077
1078 bool
1079 may_negate_without_overflow_p (const_tree t)
1080 {
1081 unsigned HOST_WIDE_INT val;
1082 unsigned int prec;
1083 tree type;
1084
1085 gcc_assert (TREE_CODE (t) == INTEGER_CST);
1086
1087 type = TREE_TYPE (t);
1088 if (TYPE_UNSIGNED (type))
1089 return false;
1090
1091 prec = TYPE_PRECISION (type);
1092 if (prec > HOST_BITS_PER_WIDE_INT)
1093 {
1094 if (TREE_INT_CST_LOW (t) != 0)
1095 return true;
1096 prec -= HOST_BITS_PER_WIDE_INT;
1097 val = TREE_INT_CST_HIGH (t);
1098 }
1099 else
1100 val = TREE_INT_CST_LOW (t);
1101 if (prec < HOST_BITS_PER_WIDE_INT)
1102 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
1103 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
1104 }
1105
1106 /* Determine whether an expression T can be cheaply negated using
1107 the function negate_expr without introducing undefined overflow. */
1108
1109 static bool
1110 negate_expr_p (tree t)
1111 {
1112 tree type;
1113
1114 if (t == 0)
1115 return false;
1116
1117 type = TREE_TYPE (t);
1118
1119 STRIP_SIGN_NOPS (t);
1120 switch (TREE_CODE (t))
1121 {
1122 case INTEGER_CST:
1123 if (TYPE_OVERFLOW_WRAPS (type))
1124 return true;
1125
1126 /* Check that -CST will not overflow type. */
1127 return may_negate_without_overflow_p (t);
1128 case BIT_NOT_EXPR:
1129 return (INTEGRAL_TYPE_P (type)
1130 && TYPE_OVERFLOW_WRAPS (type));
1131
1132 case FIXED_CST:
1133 case REAL_CST:
1134 case NEGATE_EXPR:
1135 return true;
1136
1137 case COMPLEX_CST:
1138 return negate_expr_p (TREE_REALPART (t))
1139 && negate_expr_p (TREE_IMAGPART (t));
1140
1141 case COMPLEX_EXPR:
1142 return negate_expr_p (TREE_OPERAND (t, 0))
1143 && negate_expr_p (TREE_OPERAND (t, 1));
1144
1145 case CONJ_EXPR:
1146 return negate_expr_p (TREE_OPERAND (t, 0));
1147
1148 case PLUS_EXPR:
1149 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1150 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1151 return false;
1152 /* -(A + B) -> (-B) - A. */
1153 if (negate_expr_p (TREE_OPERAND (t, 1))
1154 && reorder_operands_p (TREE_OPERAND (t, 0),
1155 TREE_OPERAND (t, 1)))
1156 return true;
1157 /* -(A + B) -> (-A) - B. */
1158 return negate_expr_p (TREE_OPERAND (t, 0));
1159
1160 case MINUS_EXPR:
1161 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
1162 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1163 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1164 && reorder_operands_p (TREE_OPERAND (t, 0),
1165 TREE_OPERAND (t, 1));
1166
1167 case MULT_EXPR:
1168 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1169 break;
1170
1171 /* Fall through. */
1172
1173 case RDIV_EXPR:
1174 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1175 return negate_expr_p (TREE_OPERAND (t, 1))
1176 || negate_expr_p (TREE_OPERAND (t, 0));
1177 break;
1178
1179 case TRUNC_DIV_EXPR:
1180 case ROUND_DIV_EXPR:
1181 case FLOOR_DIV_EXPR:
1182 case CEIL_DIV_EXPR:
1183 case EXACT_DIV_EXPR:
1184 /* In general we can't negate A / B, because if A is INT_MIN and
1185 B is 1, we may turn this into INT_MIN / -1 which is undefined
1186 and actually traps on some architectures. But if overflow is
1187 undefined, we can negate, because - (INT_MIN / 1) is an
1188 overflow. */
1189 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
1190 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
1191 break;
1192 return negate_expr_p (TREE_OPERAND (t, 1))
1193 || negate_expr_p (TREE_OPERAND (t, 0));
1194
1195 case NOP_EXPR:
1196 /* Negate -((double)float) as (double)(-float). */
1197 if (TREE_CODE (type) == REAL_TYPE)
1198 {
1199 tree tem = strip_float_extensions (t);
1200 if (tem != t)
1201 return negate_expr_p (tem);
1202 }
1203 break;
1204
1205 case CALL_EXPR:
1206 /* Negate -f(x) as f(-x). */
1207 if (negate_mathfn_p (builtin_mathfn_code (t)))
1208 return negate_expr_p (CALL_EXPR_ARG (t, 0));
1209 break;
1210
1211 case RSHIFT_EXPR:
1212 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1213 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1214 {
1215 tree op1 = TREE_OPERAND (t, 1);
1216 if (TREE_INT_CST_HIGH (op1) == 0
1217 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1218 == TREE_INT_CST_LOW (op1))
1219 return true;
1220 }
1221 break;
1222
1223 default:
1224 break;
1225 }
1226 return false;
1227 }
1228
1229 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1230 simplification is possible.
1231 If negate_expr_p would return true for T, NULL_TREE will never be
1232 returned. */
1233
1234 static tree
1235 fold_negate_expr (tree t)
1236 {
1237 tree type = TREE_TYPE (t);
1238 tree tem;
1239
1240 switch (TREE_CODE (t))
1241 {
1242 /* Convert - (~A) to A + 1. */
1243 case BIT_NOT_EXPR:
1244 if (INTEGRAL_TYPE_P (type))
1245 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0),
1246 build_int_cst (type, 1));
1247 break;
1248
1249 case INTEGER_CST:
1250 tem = fold_negate_const (t, type);
1251 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
1252 || !TYPE_OVERFLOW_TRAPS (type))
1253 return tem;
1254 break;
1255
1256 case REAL_CST:
1257 tem = fold_negate_const (t, type);
1258 /* Two's complement FP formats, such as c4x, may overflow. */
1259 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
1260 return tem;
1261 break;
1262
1263 case FIXED_CST:
1264 tem = fold_negate_const (t, type);
1265 return tem;
1266
1267 case COMPLEX_CST:
1268 {
1269 tree rpart = negate_expr (TREE_REALPART (t));
1270 tree ipart = negate_expr (TREE_IMAGPART (t));
1271
1272 if ((TREE_CODE (rpart) == REAL_CST
1273 && TREE_CODE (ipart) == REAL_CST)
1274 || (TREE_CODE (rpart) == INTEGER_CST
1275 && TREE_CODE (ipart) == INTEGER_CST))
1276 return build_complex (type, rpart, ipart);
1277 }
1278 break;
1279
1280 case COMPLEX_EXPR:
1281 if (negate_expr_p (t))
1282 return fold_build2 (COMPLEX_EXPR, type,
1283 fold_negate_expr (TREE_OPERAND (t, 0)),
1284 fold_negate_expr (TREE_OPERAND (t, 1)));
1285 break;
1286
1287 case CONJ_EXPR:
1288 if (negate_expr_p (t))
1289 return fold_build1 (CONJ_EXPR, type,
1290 fold_negate_expr (TREE_OPERAND (t, 0)));
1291 break;
1292
1293 case NEGATE_EXPR:
1294 return TREE_OPERAND (t, 0);
1295
1296 case PLUS_EXPR:
1297 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1298 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1299 {
1300 /* -(A + B) -> (-B) - A. */
1301 if (negate_expr_p (TREE_OPERAND (t, 1))
1302 && reorder_operands_p (TREE_OPERAND (t, 0),
1303 TREE_OPERAND (t, 1)))
1304 {
1305 tem = negate_expr (TREE_OPERAND (t, 1));
1306 return fold_build2 (MINUS_EXPR, type,
1307 tem, TREE_OPERAND (t, 0));
1308 }
1309
1310 /* -(A + B) -> (-A) - B. */
1311 if (negate_expr_p (TREE_OPERAND (t, 0)))
1312 {
1313 tem = negate_expr (TREE_OPERAND (t, 0));
1314 return fold_build2 (MINUS_EXPR, type,
1315 tem, TREE_OPERAND (t, 1));
1316 }
1317 }
1318 break;
1319
1320 case MINUS_EXPR:
1321 /* - (A - B) -> B - A */
1322 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1323 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1324 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1325 return fold_build2 (MINUS_EXPR, type,
1326 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
1327 break;
1328
1329 case MULT_EXPR:
1330 if (TYPE_UNSIGNED (type))
1331 break;
1332
1333 /* Fall through. */
1334
1335 case RDIV_EXPR:
1336 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
1337 {
1338 tem = TREE_OPERAND (t, 1);
1339 if (negate_expr_p (tem))
1340 return fold_build2 (TREE_CODE (t), type,
1341 TREE_OPERAND (t, 0), negate_expr (tem));
1342 tem = TREE_OPERAND (t, 0);
1343 if (negate_expr_p (tem))
1344 return fold_build2 (TREE_CODE (t), type,
1345 negate_expr (tem), TREE_OPERAND (t, 1));
1346 }
1347 break;
1348
1349 case TRUNC_DIV_EXPR:
1350 case ROUND_DIV_EXPR:
1351 case FLOOR_DIV_EXPR:
1352 case CEIL_DIV_EXPR:
1353 case EXACT_DIV_EXPR:
1354 /* In general we can't negate A / B, because if A is INT_MIN and
1355 B is 1, we may turn this into INT_MIN / -1 which is undefined
1356 and actually traps on some architectures. But if overflow is
1357 undefined, we can negate, because - (INT_MIN / 1) is an
1358 overflow. */
1359 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
1360 {
1361 const char * const warnmsg = G_("assuming signed overflow does not "
1362 "occur when negating a division");
1363 tem = TREE_OPERAND (t, 1);
1364 if (negate_expr_p (tem))
1365 {
1366 if (INTEGRAL_TYPE_P (type)
1367 && (TREE_CODE (tem) != INTEGER_CST
1368 || integer_onep (tem)))
1369 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1370 return fold_build2 (TREE_CODE (t), type,
1371 TREE_OPERAND (t, 0), negate_expr (tem));
1372 }
1373 tem = TREE_OPERAND (t, 0);
1374 if (negate_expr_p (tem))
1375 {
1376 if (INTEGRAL_TYPE_P (type)
1377 && (TREE_CODE (tem) != INTEGER_CST
1378 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
1379 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1380 return fold_build2 (TREE_CODE (t), type,
1381 negate_expr (tem), TREE_OPERAND (t, 1));
1382 }
1383 }
1384 break;
1385
1386 case NOP_EXPR:
1387 /* Convert -((double)float) into (double)(-float). */
1388 if (TREE_CODE (type) == REAL_TYPE)
1389 {
1390 tem = strip_float_extensions (t);
1391 if (tem != t && negate_expr_p (tem))
1392 return fold_convert (type, negate_expr (tem));
1393 }
1394 break;
1395
1396 case CALL_EXPR:
1397 /* Negate -f(x) as f(-x). */
1398 if (negate_mathfn_p (builtin_mathfn_code (t))
1399 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
1400 {
1401 tree fndecl, arg;
1402
1403 fndecl = get_callee_fndecl (t);
1404 arg = negate_expr (CALL_EXPR_ARG (t, 0));
1405 return build_call_expr (fndecl, 1, arg);
1406 }
1407 break;
1408
1409 case RSHIFT_EXPR:
1410 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1411 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1412 {
1413 tree op1 = TREE_OPERAND (t, 1);
1414 if (TREE_INT_CST_HIGH (op1) == 0
1415 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1416 == TREE_INT_CST_LOW (op1))
1417 {
1418 tree ntype = TYPE_UNSIGNED (type)
1419 ? signed_type_for (type)
1420 : unsigned_type_for (type);
1421 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1422 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1423 return fold_convert (type, temp);
1424 }
1425 }
1426 break;
1427
1428 default:
1429 break;
1430 }
1431
1432 return NULL_TREE;
1433 }
1434
1435 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1436 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1437 return NULL_TREE. */
1438
1439 static tree
1440 negate_expr (tree t)
1441 {
1442 tree type, tem;
1443
1444 if (t == NULL_TREE)
1445 return NULL_TREE;
1446
1447 type = TREE_TYPE (t);
1448 STRIP_SIGN_NOPS (t);
1449
1450 tem = fold_negate_expr (t);
1451 if (!tem)
1452 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1453 return fold_convert (type, tem);
1454 }
1455 \f
1456 /* Split a tree IN into a constant, literal and variable parts that could be
1457 combined with CODE to make IN. "constant" means an expression with
1458 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1459 commutative arithmetic operation. Store the constant part into *CONP,
1460 the literal in *LITP and return the variable part. If a part isn't
1461 present, set it to null. If the tree does not decompose in this way,
1462 return the entire tree as the variable part and the other parts as null.
1463
1464 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1465 case, we negate an operand that was subtracted. Except if it is a
1466 literal for which we use *MINUS_LITP instead.
1467
1468 If NEGATE_P is true, we are negating all of IN, again except a literal
1469 for which we use *MINUS_LITP instead.
1470
1471 If IN is itself a literal or constant, return it as appropriate.
1472
1473 Note that we do not guarantee that any of the three values will be the
1474 same type as IN, but they will have the same signedness and mode. */
1475
1476 static tree
1477 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1478 tree *minus_litp, int negate_p)
1479 {
1480 tree var = 0;
1481
1482 *conp = 0;
1483 *litp = 0;
1484 *minus_litp = 0;
1485
1486 /* Strip any conversions that don't change the machine mode or signedness. */
1487 STRIP_SIGN_NOPS (in);
1488
1489 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
1490 || TREE_CODE (in) == FIXED_CST)
1491 *litp = in;
1492 else if (TREE_CODE (in) == code
1493 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
1494 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
1495 /* We can associate addition and subtraction together (even
1496 though the C standard doesn't say so) for integers because
1497 the value is not affected. For reals, the value might be
1498 affected, so we can't. */
1499 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1500 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1501 {
1502 tree op0 = TREE_OPERAND (in, 0);
1503 tree op1 = TREE_OPERAND (in, 1);
1504 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1505 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1506
1507 /* First see if either of the operands is a literal, then a constant. */
1508 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
1509 || TREE_CODE (op0) == FIXED_CST)
1510 *litp = op0, op0 = 0;
1511 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
1512 || TREE_CODE (op1) == FIXED_CST)
1513 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1514
1515 if (op0 != 0 && TREE_CONSTANT (op0))
1516 *conp = op0, op0 = 0;
1517 else if (op1 != 0 && TREE_CONSTANT (op1))
1518 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1519
1520 /* If we haven't dealt with either operand, this is not a case we can
1521 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1522 if (op0 != 0 && op1 != 0)
1523 var = in;
1524 else if (op0 != 0)
1525 var = op0;
1526 else
1527 var = op1, neg_var_p = neg1_p;
1528
1529 /* Now do any needed negations. */
1530 if (neg_litp_p)
1531 *minus_litp = *litp, *litp = 0;
1532 if (neg_conp_p)
1533 *conp = negate_expr (*conp);
1534 if (neg_var_p)
1535 var = negate_expr (var);
1536 }
1537 else if (TREE_CONSTANT (in))
1538 *conp = in;
1539 else
1540 var = in;
1541
1542 if (negate_p)
1543 {
1544 if (*litp)
1545 *minus_litp = *litp, *litp = 0;
1546 else if (*minus_litp)
1547 *litp = *minus_litp, *minus_litp = 0;
1548 *conp = negate_expr (*conp);
1549 var = negate_expr (var);
1550 }
1551
1552 return var;
1553 }
1554
1555 /* Re-associate trees split by the above function. T1 and T2 are either
1556 expressions to associate or null. Return the new expression, if any. If
1557 we build an operation, do it in TYPE and with CODE. */
1558
1559 static tree
1560 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1561 {
1562 if (t1 == 0)
1563 return t2;
1564 else if (t2 == 0)
1565 return t1;
1566
1567 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1568 try to fold this since we will have infinite recursion. But do
1569 deal with any NEGATE_EXPRs. */
1570 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1571 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1572 {
1573 if (code == PLUS_EXPR)
1574 {
1575 if (TREE_CODE (t1) == NEGATE_EXPR)
1576 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1577 fold_convert (type, TREE_OPERAND (t1, 0)));
1578 else if (TREE_CODE (t2) == NEGATE_EXPR)
1579 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1580 fold_convert (type, TREE_OPERAND (t2, 0)));
1581 else if (integer_zerop (t2))
1582 return fold_convert (type, t1);
1583 }
1584 else if (code == MINUS_EXPR)
1585 {
1586 if (integer_zerop (t2))
1587 return fold_convert (type, t1);
1588 }
1589
1590 return build2 (code, type, fold_convert (type, t1),
1591 fold_convert (type, t2));
1592 }
1593
1594 return fold_build2 (code, type, fold_convert (type, t1),
1595 fold_convert (type, t2));
1596 }
1597 \f
1598 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1599 for use in int_const_binop, size_binop and size_diffop. */
1600
1601 static bool
1602 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
1603 {
1604 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
1605 return false;
1606 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
1607 return false;
1608
1609 switch (code)
1610 {
1611 case LSHIFT_EXPR:
1612 case RSHIFT_EXPR:
1613 case LROTATE_EXPR:
1614 case RROTATE_EXPR:
1615 return true;
1616
1617 default:
1618 break;
1619 }
1620
1621 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
1622 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
1623 && TYPE_MODE (type1) == TYPE_MODE (type2);
1624 }
1625
1626
1627 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1628 to produce a new constant. Return NULL_TREE if we don't know how
1629 to evaluate CODE at compile-time.
1630
1631 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1632
1633 tree
1634 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2, int notrunc)
1635 {
1636 unsigned HOST_WIDE_INT int1l, int2l;
1637 HOST_WIDE_INT int1h, int2h;
1638 unsigned HOST_WIDE_INT low;
1639 HOST_WIDE_INT hi;
1640 unsigned HOST_WIDE_INT garbagel;
1641 HOST_WIDE_INT garbageh;
1642 tree t;
1643 tree type = TREE_TYPE (arg1);
1644 int uns = TYPE_UNSIGNED (type);
1645 int is_sizetype
1646 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1647 int overflow = 0;
1648
1649 int1l = TREE_INT_CST_LOW (arg1);
1650 int1h = TREE_INT_CST_HIGH (arg1);
1651 int2l = TREE_INT_CST_LOW (arg2);
1652 int2h = TREE_INT_CST_HIGH (arg2);
1653
1654 switch (code)
1655 {
1656 case BIT_IOR_EXPR:
1657 low = int1l | int2l, hi = int1h | int2h;
1658 break;
1659
1660 case BIT_XOR_EXPR:
1661 low = int1l ^ int2l, hi = int1h ^ int2h;
1662 break;
1663
1664 case BIT_AND_EXPR:
1665 low = int1l & int2l, hi = int1h & int2h;
1666 break;
1667
1668 case RSHIFT_EXPR:
1669 int2l = -int2l;
1670 case LSHIFT_EXPR:
1671 /* It's unclear from the C standard whether shifts can overflow.
1672 The following code ignores overflow; perhaps a C standard
1673 interpretation ruling is needed. */
1674 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1675 &low, &hi, !uns);
1676 break;
1677
1678 case RROTATE_EXPR:
1679 int2l = - int2l;
1680 case LROTATE_EXPR:
1681 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1682 &low, &hi);
1683 break;
1684
1685 case PLUS_EXPR:
1686 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1687 break;
1688
1689 case MINUS_EXPR:
1690 neg_double (int2l, int2h, &low, &hi);
1691 add_double (int1l, int1h, low, hi, &low, &hi);
1692 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1693 break;
1694
1695 case MULT_EXPR:
1696 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1697 break;
1698
1699 case TRUNC_DIV_EXPR:
1700 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1701 case EXACT_DIV_EXPR:
1702 /* This is a shortcut for a common special case. */
1703 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1704 && !TREE_OVERFLOW (arg1)
1705 && !TREE_OVERFLOW (arg2)
1706 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1707 {
1708 if (code == CEIL_DIV_EXPR)
1709 int1l += int2l - 1;
1710
1711 low = int1l / int2l, hi = 0;
1712 break;
1713 }
1714
1715 /* ... fall through ... */
1716
1717 case ROUND_DIV_EXPR:
1718 if (int2h == 0 && int2l == 0)
1719 return NULL_TREE;
1720 if (int2h == 0 && int2l == 1)
1721 {
1722 low = int1l, hi = int1h;
1723 break;
1724 }
1725 if (int1l == int2l && int1h == int2h
1726 && ! (int1l == 0 && int1h == 0))
1727 {
1728 low = 1, hi = 0;
1729 break;
1730 }
1731 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1732 &low, &hi, &garbagel, &garbageh);
1733 break;
1734
1735 case TRUNC_MOD_EXPR:
1736 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1737 /* This is a shortcut for a common special case. */
1738 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1739 && !TREE_OVERFLOW (arg1)
1740 && !TREE_OVERFLOW (arg2)
1741 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1742 {
1743 if (code == CEIL_MOD_EXPR)
1744 int1l += int2l - 1;
1745 low = int1l % int2l, hi = 0;
1746 break;
1747 }
1748
1749 /* ... fall through ... */
1750
1751 case ROUND_MOD_EXPR:
1752 if (int2h == 0 && int2l == 0)
1753 return NULL_TREE;
1754 overflow = div_and_round_double (code, uns,
1755 int1l, int1h, int2l, int2h,
1756 &garbagel, &garbageh, &low, &hi);
1757 break;
1758
1759 case MIN_EXPR:
1760 case MAX_EXPR:
1761 if (uns)
1762 low = (((unsigned HOST_WIDE_INT) int1h
1763 < (unsigned HOST_WIDE_INT) int2h)
1764 || (((unsigned HOST_WIDE_INT) int1h
1765 == (unsigned HOST_WIDE_INT) int2h)
1766 && int1l < int2l));
1767 else
1768 low = (int1h < int2h
1769 || (int1h == int2h && int1l < int2l));
1770
1771 if (low == (code == MIN_EXPR))
1772 low = int1l, hi = int1h;
1773 else
1774 low = int2l, hi = int2h;
1775 break;
1776
1777 default:
1778 return NULL_TREE;
1779 }
1780
1781 if (notrunc)
1782 {
1783 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1784
1785 /* Propagate overflow flags ourselves. */
1786 if (((!uns || is_sizetype) && overflow)
1787 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1788 {
1789 t = copy_node (t);
1790 TREE_OVERFLOW (t) = 1;
1791 }
1792 }
1793 else
1794 t = force_fit_type_double (TREE_TYPE (arg1), low, hi, 1,
1795 ((!uns || is_sizetype) && overflow)
1796 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1797
1798 return t;
1799 }
1800
1801 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1802 constant. We assume ARG1 and ARG2 have the same data type, or at least
1803 are the same kind of constant and the same machine mode. Return zero if
1804 combining the constants is not allowed in the current operating mode.
1805
1806 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1807
1808 static tree
1809 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1810 {
1811 /* Sanity check for the recursive cases. */
1812 if (!arg1 || !arg2)
1813 return NULL_TREE;
1814
1815 STRIP_NOPS (arg1);
1816 STRIP_NOPS (arg2);
1817
1818 if (TREE_CODE (arg1) == INTEGER_CST)
1819 return int_const_binop (code, arg1, arg2, notrunc);
1820
1821 if (TREE_CODE (arg1) == REAL_CST)
1822 {
1823 enum machine_mode mode;
1824 REAL_VALUE_TYPE d1;
1825 REAL_VALUE_TYPE d2;
1826 REAL_VALUE_TYPE value;
1827 REAL_VALUE_TYPE result;
1828 bool inexact;
1829 tree t, type;
1830
1831 /* The following codes are handled by real_arithmetic. */
1832 switch (code)
1833 {
1834 case PLUS_EXPR:
1835 case MINUS_EXPR:
1836 case MULT_EXPR:
1837 case RDIV_EXPR:
1838 case MIN_EXPR:
1839 case MAX_EXPR:
1840 break;
1841
1842 default:
1843 return NULL_TREE;
1844 }
1845
1846 d1 = TREE_REAL_CST (arg1);
1847 d2 = TREE_REAL_CST (arg2);
1848
1849 type = TREE_TYPE (arg1);
1850 mode = TYPE_MODE (type);
1851
1852 /* Don't perform operation if we honor signaling NaNs and
1853 either operand is a NaN. */
1854 if (HONOR_SNANS (mode)
1855 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1856 return NULL_TREE;
1857
1858 /* Don't perform operation if it would raise a division
1859 by zero exception. */
1860 if (code == RDIV_EXPR
1861 && REAL_VALUES_EQUAL (d2, dconst0)
1862 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1863 return NULL_TREE;
1864
1865 /* If either operand is a NaN, just return it. Otherwise, set up
1866 for floating-point trap; we return an overflow. */
1867 if (REAL_VALUE_ISNAN (d1))
1868 return arg1;
1869 else if (REAL_VALUE_ISNAN (d2))
1870 return arg2;
1871
1872 inexact = real_arithmetic (&value, code, &d1, &d2);
1873 real_convert (&result, mode, &value);
1874
1875 /* Don't constant fold this floating point operation if
1876 the result has overflowed and flag_trapping_math. */
1877 if (flag_trapping_math
1878 && MODE_HAS_INFINITIES (mode)
1879 && REAL_VALUE_ISINF (result)
1880 && !REAL_VALUE_ISINF (d1)
1881 && !REAL_VALUE_ISINF (d2))
1882 return NULL_TREE;
1883
1884 /* Don't constant fold this floating point operation if the
1885 result may dependent upon the run-time rounding mode and
1886 flag_rounding_math is set, or if GCC's software emulation
1887 is unable to accurately represent the result. */
1888 if ((flag_rounding_math
1889 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1890 && (inexact || !real_identical (&result, &value)))
1891 return NULL_TREE;
1892
1893 t = build_real (type, result);
1894
1895 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1896 return t;
1897 }
1898
1899 if (TREE_CODE (arg1) == FIXED_CST)
1900 {
1901 FIXED_VALUE_TYPE f1;
1902 FIXED_VALUE_TYPE f2;
1903 FIXED_VALUE_TYPE result;
1904 tree t, type;
1905 int sat_p;
1906 bool overflow_p;
1907
1908 /* The following codes are handled by fixed_arithmetic. */
1909 switch (code)
1910 {
1911 case PLUS_EXPR:
1912 case MINUS_EXPR:
1913 case MULT_EXPR:
1914 case TRUNC_DIV_EXPR:
1915 f2 = TREE_FIXED_CST (arg2);
1916 break;
1917
1918 case LSHIFT_EXPR:
1919 case RSHIFT_EXPR:
1920 f2.data.high = TREE_INT_CST_HIGH (arg2);
1921 f2.data.low = TREE_INT_CST_LOW (arg2);
1922 f2.mode = SImode;
1923 break;
1924
1925 default:
1926 return NULL_TREE;
1927 }
1928
1929 f1 = TREE_FIXED_CST (arg1);
1930 type = TREE_TYPE (arg1);
1931 sat_p = TYPE_SATURATING (type);
1932 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1933 t = build_fixed (type, result);
1934 /* Propagate overflow flags. */
1935 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1936 TREE_OVERFLOW (t) = 1;
1937 return t;
1938 }
1939
1940 if (TREE_CODE (arg1) == COMPLEX_CST)
1941 {
1942 tree type = TREE_TYPE (arg1);
1943 tree r1 = TREE_REALPART (arg1);
1944 tree i1 = TREE_IMAGPART (arg1);
1945 tree r2 = TREE_REALPART (arg2);
1946 tree i2 = TREE_IMAGPART (arg2);
1947 tree real, imag;
1948
1949 switch (code)
1950 {
1951 case PLUS_EXPR:
1952 case MINUS_EXPR:
1953 real = const_binop (code, r1, r2, notrunc);
1954 imag = const_binop (code, i1, i2, notrunc);
1955 break;
1956
1957 case MULT_EXPR:
1958 real = const_binop (MINUS_EXPR,
1959 const_binop (MULT_EXPR, r1, r2, notrunc),
1960 const_binop (MULT_EXPR, i1, i2, notrunc),
1961 notrunc);
1962 imag = const_binop (PLUS_EXPR,
1963 const_binop (MULT_EXPR, r1, i2, notrunc),
1964 const_binop (MULT_EXPR, i1, r2, notrunc),
1965 notrunc);
1966 break;
1967
1968 case RDIV_EXPR:
1969 {
1970 tree magsquared
1971 = const_binop (PLUS_EXPR,
1972 const_binop (MULT_EXPR, r2, r2, notrunc),
1973 const_binop (MULT_EXPR, i2, i2, notrunc),
1974 notrunc);
1975 tree t1
1976 = const_binop (PLUS_EXPR,
1977 const_binop (MULT_EXPR, r1, r2, notrunc),
1978 const_binop (MULT_EXPR, i1, i2, notrunc),
1979 notrunc);
1980 tree t2
1981 = const_binop (MINUS_EXPR,
1982 const_binop (MULT_EXPR, i1, r2, notrunc),
1983 const_binop (MULT_EXPR, r1, i2, notrunc),
1984 notrunc);
1985
1986 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1987 code = TRUNC_DIV_EXPR;
1988
1989 real = const_binop (code, t1, magsquared, notrunc);
1990 imag = const_binop (code, t2, magsquared, notrunc);
1991 }
1992 break;
1993
1994 default:
1995 return NULL_TREE;
1996 }
1997
1998 if (real && imag)
1999 return build_complex (type, real, imag);
2000 }
2001
2002 if (TREE_CODE (arg1) == VECTOR_CST)
2003 {
2004 tree type = TREE_TYPE(arg1);
2005 int count = TYPE_VECTOR_SUBPARTS (type), i;
2006 tree elements1, elements2, list = NULL_TREE;
2007
2008 if(TREE_CODE(arg2) != VECTOR_CST)
2009 return NULL_TREE;
2010
2011 elements1 = TREE_VECTOR_CST_ELTS (arg1);
2012 elements2 = TREE_VECTOR_CST_ELTS (arg2);
2013
2014 for (i = 0; i < count; i++)
2015 {
2016 tree elem1, elem2, elem;
2017
2018 /* The trailing elements can be empty and should be treated as 0 */
2019 if(!elements1)
2020 elem1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2021 else
2022 {
2023 elem1 = TREE_VALUE(elements1);
2024 elements1 = TREE_CHAIN (elements1);
2025 }
2026
2027 if(!elements2)
2028 elem2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2029 else
2030 {
2031 elem2 = TREE_VALUE(elements2);
2032 elements2 = TREE_CHAIN (elements2);
2033 }
2034
2035 elem = const_binop (code, elem1, elem2, notrunc);
2036
2037 /* It is possible that const_binop cannot handle the given
2038 code and return NULL_TREE */
2039 if(elem == NULL_TREE)
2040 return NULL_TREE;
2041
2042 list = tree_cons (NULL_TREE, elem, list);
2043 }
2044 return build_vector(type, nreverse(list));
2045 }
2046 return NULL_TREE;
2047 }
2048
2049 /* Create a size type INT_CST node with NUMBER sign extended. KIND
2050 indicates which particular sizetype to create. */
2051
2052 tree
2053 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
2054 {
2055 return build_int_cst (sizetype_tab[(int) kind], number);
2056 }
2057 \f
2058 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
2059 is a tree code. The type of the result is taken from the operands.
2060 Both must be equivalent integer types, ala int_binop_types_match_p.
2061 If the operands are constant, so is the result. */
2062
2063 tree
2064 size_binop (enum tree_code code, tree arg0, tree arg1)
2065 {
2066 tree type = TREE_TYPE (arg0);
2067
2068 if (arg0 == error_mark_node || arg1 == error_mark_node)
2069 return error_mark_node;
2070
2071 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
2072 TREE_TYPE (arg1)));
2073
2074 /* Handle the special case of two integer constants faster. */
2075 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2076 {
2077 /* And some specific cases even faster than that. */
2078 if (code == PLUS_EXPR)
2079 {
2080 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
2081 return arg1;
2082 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
2083 return arg0;
2084 }
2085 else if (code == MINUS_EXPR)
2086 {
2087 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
2088 return arg0;
2089 }
2090 else if (code == MULT_EXPR)
2091 {
2092 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
2093 return arg1;
2094 }
2095
2096 /* Handle general case of two integer constants. */
2097 return int_const_binop (code, arg0, arg1, 0);
2098 }
2099
2100 return fold_build2 (code, type, arg0, arg1);
2101 }
2102
2103 /* Given two values, either both of sizetype or both of bitsizetype,
2104 compute the difference between the two values. Return the value
2105 in signed type corresponding to the type of the operands. */
2106
2107 tree
2108 size_diffop (tree arg0, tree arg1)
2109 {
2110 tree type = TREE_TYPE (arg0);
2111 tree ctype;
2112
2113 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
2114 TREE_TYPE (arg1)));
2115
2116 /* If the type is already signed, just do the simple thing. */
2117 if (!TYPE_UNSIGNED (type))
2118 return size_binop (MINUS_EXPR, arg0, arg1);
2119
2120 if (type == sizetype)
2121 ctype = ssizetype;
2122 else if (type == bitsizetype)
2123 ctype = sbitsizetype;
2124 else
2125 ctype = signed_type_for (type);
2126
2127 /* If either operand is not a constant, do the conversions to the signed
2128 type and subtract. The hardware will do the right thing with any
2129 overflow in the subtraction. */
2130 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
2131 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
2132 fold_convert (ctype, arg1));
2133
2134 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
2135 Otherwise, subtract the other way, convert to CTYPE (we know that can't
2136 overflow) and negate (which can't either). Special-case a result
2137 of zero while we're here. */
2138 if (tree_int_cst_equal (arg0, arg1))
2139 return build_int_cst (ctype, 0);
2140 else if (tree_int_cst_lt (arg1, arg0))
2141 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
2142 else
2143 return size_binop (MINUS_EXPR, build_int_cst (ctype, 0),
2144 fold_convert (ctype, size_binop (MINUS_EXPR,
2145 arg1, arg0)));
2146 }
2147 \f
2148 /* A subroutine of fold_convert_const handling conversions of an
2149 INTEGER_CST to another integer type. */
2150
2151 static tree
2152 fold_convert_const_int_from_int (tree type, const_tree arg1)
2153 {
2154 tree t;
2155
2156 /* Given an integer constant, make new constant with new type,
2157 appropriately sign-extended or truncated. */
2158 t = force_fit_type_double (type, TREE_INT_CST_LOW (arg1),
2159 TREE_INT_CST_HIGH (arg1),
2160 /* Don't set the overflow when
2161 converting from a pointer, */
2162 !POINTER_TYPE_P (TREE_TYPE (arg1))
2163 /* or to a sizetype with same signedness
2164 and the precision is unchanged.
2165 ??? sizetype is always sign-extended,
2166 but its signedness depends on the
2167 frontend. Thus we see spurious overflows
2168 here if we do not check this. */
2169 && !((TYPE_PRECISION (TREE_TYPE (arg1))
2170 == TYPE_PRECISION (type))
2171 && (TYPE_UNSIGNED (TREE_TYPE (arg1))
2172 == TYPE_UNSIGNED (type))
2173 && ((TREE_CODE (TREE_TYPE (arg1)) == INTEGER_TYPE
2174 && TYPE_IS_SIZETYPE (TREE_TYPE (arg1)))
2175 || (TREE_CODE (type) == INTEGER_TYPE
2176 && TYPE_IS_SIZETYPE (type)))),
2177 (TREE_INT_CST_HIGH (arg1) < 0
2178 && (TYPE_UNSIGNED (type)
2179 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2180 | TREE_OVERFLOW (arg1));
2181
2182 return t;
2183 }
2184
2185 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2186 to an integer type. */
2187
2188 static tree
2189 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
2190 {
2191 int overflow = 0;
2192 tree t;
2193
2194 /* The following code implements the floating point to integer
2195 conversion rules required by the Java Language Specification,
2196 that IEEE NaNs are mapped to zero and values that overflow
2197 the target precision saturate, i.e. values greater than
2198 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2199 are mapped to INT_MIN. These semantics are allowed by the
2200 C and C++ standards that simply state that the behavior of
2201 FP-to-integer conversion is unspecified upon overflow. */
2202
2203 HOST_WIDE_INT high, low;
2204 REAL_VALUE_TYPE r;
2205 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2206
2207 switch (code)
2208 {
2209 case FIX_TRUNC_EXPR:
2210 real_trunc (&r, VOIDmode, &x);
2211 break;
2212
2213 default:
2214 gcc_unreachable ();
2215 }
2216
2217 /* If R is NaN, return zero and show we have an overflow. */
2218 if (REAL_VALUE_ISNAN (r))
2219 {
2220 overflow = 1;
2221 high = 0;
2222 low = 0;
2223 }
2224
2225 /* See if R is less than the lower bound or greater than the
2226 upper bound. */
2227
2228 if (! overflow)
2229 {
2230 tree lt = TYPE_MIN_VALUE (type);
2231 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2232 if (REAL_VALUES_LESS (r, l))
2233 {
2234 overflow = 1;
2235 high = TREE_INT_CST_HIGH (lt);
2236 low = TREE_INT_CST_LOW (lt);
2237 }
2238 }
2239
2240 if (! overflow)
2241 {
2242 tree ut = TYPE_MAX_VALUE (type);
2243 if (ut)
2244 {
2245 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2246 if (REAL_VALUES_LESS (u, r))
2247 {
2248 overflow = 1;
2249 high = TREE_INT_CST_HIGH (ut);
2250 low = TREE_INT_CST_LOW (ut);
2251 }
2252 }
2253 }
2254
2255 if (! overflow)
2256 REAL_VALUE_TO_INT (&low, &high, r);
2257
2258 t = force_fit_type_double (type, low, high, -1,
2259 overflow | TREE_OVERFLOW (arg1));
2260 return t;
2261 }
2262
2263 /* A subroutine of fold_convert_const handling conversions of a
2264 FIXED_CST to an integer type. */
2265
2266 static tree
2267 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
2268 {
2269 tree t;
2270 double_int temp, temp_trunc;
2271 unsigned int mode;
2272
2273 /* Right shift FIXED_CST to temp by fbit. */
2274 temp = TREE_FIXED_CST (arg1).data;
2275 mode = TREE_FIXED_CST (arg1).mode;
2276 if (GET_MODE_FBIT (mode) < 2 * HOST_BITS_PER_WIDE_INT)
2277 {
2278 lshift_double (temp.low, temp.high,
2279 - GET_MODE_FBIT (mode), 2 * HOST_BITS_PER_WIDE_INT,
2280 &temp.low, &temp.high, SIGNED_FIXED_POINT_MODE_P (mode));
2281
2282 /* Left shift temp to temp_trunc by fbit. */
2283 lshift_double (temp.low, temp.high,
2284 GET_MODE_FBIT (mode), 2 * HOST_BITS_PER_WIDE_INT,
2285 &temp_trunc.low, &temp_trunc.high,
2286 SIGNED_FIXED_POINT_MODE_P (mode));
2287 }
2288 else
2289 {
2290 temp.low = 0;
2291 temp.high = 0;
2292 temp_trunc.low = 0;
2293 temp_trunc.high = 0;
2294 }
2295
2296 /* If FIXED_CST is negative, we need to round the value toward 0.
2297 By checking if the fractional bits are not zero to add 1 to temp. */
2298 if (SIGNED_FIXED_POINT_MODE_P (mode) && temp_trunc.high < 0
2299 && !double_int_equal_p (TREE_FIXED_CST (arg1).data, temp_trunc))
2300 {
2301 double_int one;
2302 one.low = 1;
2303 one.high = 0;
2304 temp = double_int_add (temp, one);
2305 }
2306
2307 /* Given a fixed-point constant, make new constant with new type,
2308 appropriately sign-extended or truncated. */
2309 t = force_fit_type_double (type, temp.low, temp.high, -1,
2310 (temp.high < 0
2311 && (TYPE_UNSIGNED (type)
2312 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2313 | TREE_OVERFLOW (arg1));
2314
2315 return t;
2316 }
2317
2318 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2319 to another floating point type. */
2320
2321 static tree
2322 fold_convert_const_real_from_real (tree type, const_tree arg1)
2323 {
2324 REAL_VALUE_TYPE value;
2325 tree t;
2326
2327 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2328 t = build_real (type, value);
2329
2330 /* If converting an infinity or NAN to a representation that doesn't
2331 have one, set the overflow bit so that we can produce some kind of
2332 error message at the appropriate point if necessary. It's not the
2333 most user-friendly message, but it's better than nothing. */
2334 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
2335 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
2336 TREE_OVERFLOW (t) = 1;
2337 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
2338 && !MODE_HAS_NANS (TYPE_MODE (type)))
2339 TREE_OVERFLOW (t) = 1;
2340 /* Regular overflow, conversion produced an infinity in a mode that
2341 can't represent them. */
2342 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
2343 && REAL_VALUE_ISINF (value)
2344 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2345 TREE_OVERFLOW (t) = 1;
2346 else
2347 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2348 return t;
2349 }
2350
2351 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2352 to a floating point type. */
2353
2354 static tree
2355 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2356 {
2357 REAL_VALUE_TYPE value;
2358 tree t;
2359
2360 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
2361 t = build_real (type, value);
2362
2363 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2364 return t;
2365 }
2366
2367 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2368 to another fixed-point type. */
2369
2370 static tree
2371 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2372 {
2373 FIXED_VALUE_TYPE value;
2374 tree t;
2375 bool overflow_p;
2376
2377 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2378 TYPE_SATURATING (type));
2379 t = build_fixed (type, value);
2380
2381 /* Propagate overflow flags. */
2382 if (overflow_p | TREE_OVERFLOW (arg1))
2383 TREE_OVERFLOW (t) = 1;
2384 return t;
2385 }
2386
2387 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2388 to a fixed-point type. */
2389
2390 static tree
2391 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2392 {
2393 FIXED_VALUE_TYPE value;
2394 tree t;
2395 bool overflow_p;
2396
2397 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
2398 TREE_INT_CST (arg1),
2399 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2400 TYPE_SATURATING (type));
2401 t = build_fixed (type, value);
2402
2403 /* Propagate overflow flags. */
2404 if (overflow_p | TREE_OVERFLOW (arg1))
2405 TREE_OVERFLOW (t) = 1;
2406 return t;
2407 }
2408
2409 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2410 to a fixed-point type. */
2411
2412 static tree
2413 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2414 {
2415 FIXED_VALUE_TYPE value;
2416 tree t;
2417 bool overflow_p;
2418
2419 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2420 &TREE_REAL_CST (arg1),
2421 TYPE_SATURATING (type));
2422 t = build_fixed (type, value);
2423
2424 /* Propagate overflow flags. */
2425 if (overflow_p | TREE_OVERFLOW (arg1))
2426 TREE_OVERFLOW (t) = 1;
2427 return t;
2428 }
2429
2430 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2431 type TYPE. If no simplification can be done return NULL_TREE. */
2432
2433 static tree
2434 fold_convert_const (enum tree_code code, tree type, tree arg1)
2435 {
2436 if (TREE_TYPE (arg1) == type)
2437 return arg1;
2438
2439 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2440 || TREE_CODE (type) == OFFSET_TYPE)
2441 {
2442 if (TREE_CODE (arg1) == INTEGER_CST)
2443 return fold_convert_const_int_from_int (type, arg1);
2444 else if (TREE_CODE (arg1) == REAL_CST)
2445 return fold_convert_const_int_from_real (code, type, arg1);
2446 else if (TREE_CODE (arg1) == FIXED_CST)
2447 return fold_convert_const_int_from_fixed (type, arg1);
2448 }
2449 else if (TREE_CODE (type) == REAL_TYPE)
2450 {
2451 if (TREE_CODE (arg1) == INTEGER_CST)
2452 return build_real_from_int_cst (type, arg1);
2453 else if (TREE_CODE (arg1) == REAL_CST)
2454 return fold_convert_const_real_from_real (type, arg1);
2455 else if (TREE_CODE (arg1) == FIXED_CST)
2456 return fold_convert_const_real_from_fixed (type, arg1);
2457 }
2458 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2459 {
2460 if (TREE_CODE (arg1) == FIXED_CST)
2461 return fold_convert_const_fixed_from_fixed (type, arg1);
2462 else if (TREE_CODE (arg1) == INTEGER_CST)
2463 return fold_convert_const_fixed_from_int (type, arg1);
2464 else if (TREE_CODE (arg1) == REAL_CST)
2465 return fold_convert_const_fixed_from_real (type, arg1);
2466 }
2467 return NULL_TREE;
2468 }
2469
2470 /* Construct a vector of zero elements of vector type TYPE. */
2471
2472 static tree
2473 build_zero_vector (tree type)
2474 {
2475 tree elem, list;
2476 int i, units;
2477
2478 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2479 units = TYPE_VECTOR_SUBPARTS (type);
2480
2481 list = NULL_TREE;
2482 for (i = 0; i < units; i++)
2483 list = tree_cons (NULL_TREE, elem, list);
2484 return build_vector (type, list);
2485 }
2486
2487 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2488
2489 bool
2490 fold_convertible_p (const_tree type, const_tree arg)
2491 {
2492 tree orig = TREE_TYPE (arg);
2493
2494 if (type == orig)
2495 return true;
2496
2497 if (TREE_CODE (arg) == ERROR_MARK
2498 || TREE_CODE (type) == ERROR_MARK
2499 || TREE_CODE (orig) == ERROR_MARK)
2500 return false;
2501
2502 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2503 return true;
2504
2505 switch (TREE_CODE (type))
2506 {
2507 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2508 case POINTER_TYPE: case REFERENCE_TYPE:
2509 case OFFSET_TYPE:
2510 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2511 || TREE_CODE (orig) == OFFSET_TYPE)
2512 return true;
2513 return (TREE_CODE (orig) == VECTOR_TYPE
2514 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2515
2516 case REAL_TYPE:
2517 case FIXED_POINT_TYPE:
2518 case COMPLEX_TYPE:
2519 case VECTOR_TYPE:
2520 case VOID_TYPE:
2521 return TREE_CODE (type) == TREE_CODE (orig);
2522
2523 default:
2524 return false;
2525 }
2526 }
2527
2528 /* Convert expression ARG to type TYPE. Used by the middle-end for
2529 simple conversions in preference to calling the front-end's convert. */
2530
2531 tree
2532 fold_convert (tree type, tree arg)
2533 {
2534 tree orig = TREE_TYPE (arg);
2535 tree tem;
2536
2537 if (type == orig)
2538 return arg;
2539
2540 if (TREE_CODE (arg) == ERROR_MARK
2541 || TREE_CODE (type) == ERROR_MARK
2542 || TREE_CODE (orig) == ERROR_MARK)
2543 return error_mark_node;
2544
2545 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2546 return fold_build1 (NOP_EXPR, type, arg);
2547
2548 switch (TREE_CODE (type))
2549 {
2550 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2551 case POINTER_TYPE: case REFERENCE_TYPE:
2552 case OFFSET_TYPE:
2553 if (TREE_CODE (arg) == INTEGER_CST)
2554 {
2555 tem = fold_convert_const (NOP_EXPR, type, arg);
2556 if (tem != NULL_TREE)
2557 return tem;
2558 }
2559 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2560 || TREE_CODE (orig) == OFFSET_TYPE)
2561 return fold_build1 (NOP_EXPR, type, arg);
2562 if (TREE_CODE (orig) == COMPLEX_TYPE)
2563 {
2564 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2565 return fold_convert (type, tem);
2566 }
2567 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2568 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2569 return fold_build1 (NOP_EXPR, type, arg);
2570
2571 case REAL_TYPE:
2572 if (TREE_CODE (arg) == INTEGER_CST)
2573 {
2574 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2575 if (tem != NULL_TREE)
2576 return tem;
2577 }
2578 else if (TREE_CODE (arg) == REAL_CST)
2579 {
2580 tem = fold_convert_const (NOP_EXPR, type, arg);
2581 if (tem != NULL_TREE)
2582 return tem;
2583 }
2584 else if (TREE_CODE (arg) == FIXED_CST)
2585 {
2586 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2587 if (tem != NULL_TREE)
2588 return tem;
2589 }
2590
2591 switch (TREE_CODE (orig))
2592 {
2593 case INTEGER_TYPE:
2594 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2595 case POINTER_TYPE: case REFERENCE_TYPE:
2596 return fold_build1 (FLOAT_EXPR, type, arg);
2597
2598 case REAL_TYPE:
2599 return fold_build1 (NOP_EXPR, type, arg);
2600
2601 case FIXED_POINT_TYPE:
2602 return fold_build1 (FIXED_CONVERT_EXPR, type, arg);
2603
2604 case COMPLEX_TYPE:
2605 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2606 return fold_convert (type, tem);
2607
2608 default:
2609 gcc_unreachable ();
2610 }
2611
2612 case FIXED_POINT_TYPE:
2613 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2614 || TREE_CODE (arg) == REAL_CST)
2615 {
2616 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2617 if (tem != NULL_TREE)
2618 return tem;
2619 }
2620
2621 switch (TREE_CODE (orig))
2622 {
2623 case FIXED_POINT_TYPE:
2624 case INTEGER_TYPE:
2625 case ENUMERAL_TYPE:
2626 case BOOLEAN_TYPE:
2627 case REAL_TYPE:
2628 return fold_build1 (FIXED_CONVERT_EXPR, type, arg);
2629
2630 case COMPLEX_TYPE:
2631 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2632 return fold_convert (type, tem);
2633
2634 default:
2635 gcc_unreachable ();
2636 }
2637
2638 case COMPLEX_TYPE:
2639 switch (TREE_CODE (orig))
2640 {
2641 case INTEGER_TYPE:
2642 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2643 case POINTER_TYPE: case REFERENCE_TYPE:
2644 case REAL_TYPE:
2645 case FIXED_POINT_TYPE:
2646 return build2 (COMPLEX_EXPR, type,
2647 fold_convert (TREE_TYPE (type), arg),
2648 fold_convert (TREE_TYPE (type), integer_zero_node));
2649 case COMPLEX_TYPE:
2650 {
2651 tree rpart, ipart;
2652
2653 if (TREE_CODE (arg) == COMPLEX_EXPR)
2654 {
2655 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2656 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2657 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2658 }
2659
2660 arg = save_expr (arg);
2661 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2662 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2663 rpart = fold_convert (TREE_TYPE (type), rpart);
2664 ipart = fold_convert (TREE_TYPE (type), ipart);
2665 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2666 }
2667
2668 default:
2669 gcc_unreachable ();
2670 }
2671
2672 case VECTOR_TYPE:
2673 if (integer_zerop (arg))
2674 return build_zero_vector (type);
2675 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2676 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2677 || TREE_CODE (orig) == VECTOR_TYPE);
2678 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2679
2680 case VOID_TYPE:
2681 tem = fold_ignored_result (arg);
2682 if (TREE_CODE (tem) == MODIFY_EXPR)
2683 return tem;
2684 return fold_build1 (NOP_EXPR, type, tem);
2685
2686 default:
2687 gcc_unreachable ();
2688 }
2689 }
2690 \f
2691 /* Return false if expr can be assumed not to be an lvalue, true
2692 otherwise. */
2693
2694 static bool
2695 maybe_lvalue_p (const_tree x)
2696 {
2697 /* We only need to wrap lvalue tree codes. */
2698 switch (TREE_CODE (x))
2699 {
2700 case VAR_DECL:
2701 case PARM_DECL:
2702 case RESULT_DECL:
2703 case LABEL_DECL:
2704 case FUNCTION_DECL:
2705 case SSA_NAME:
2706
2707 case COMPONENT_REF:
2708 case INDIRECT_REF:
2709 case ALIGN_INDIRECT_REF:
2710 case MISALIGNED_INDIRECT_REF:
2711 case ARRAY_REF:
2712 case ARRAY_RANGE_REF:
2713 case BIT_FIELD_REF:
2714 case OBJ_TYPE_REF:
2715
2716 case REALPART_EXPR:
2717 case IMAGPART_EXPR:
2718 case PREINCREMENT_EXPR:
2719 case PREDECREMENT_EXPR:
2720 case SAVE_EXPR:
2721 case TRY_CATCH_EXPR:
2722 case WITH_CLEANUP_EXPR:
2723 case COMPOUND_EXPR:
2724 case MODIFY_EXPR:
2725 case TARGET_EXPR:
2726 case COND_EXPR:
2727 case BIND_EXPR:
2728 case MIN_EXPR:
2729 case MAX_EXPR:
2730 break;
2731
2732 default:
2733 /* Assume the worst for front-end tree codes. */
2734 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2735 break;
2736 return false;
2737 }
2738
2739 return true;
2740 }
2741
2742 /* Return an expr equal to X but certainly not valid as an lvalue. */
2743
2744 tree
2745 non_lvalue (tree x)
2746 {
2747 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2748 us. */
2749 if (in_gimple_form)
2750 return x;
2751
2752 if (! maybe_lvalue_p (x))
2753 return x;
2754 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2755 }
2756
2757 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2758 Zero means allow extended lvalues. */
2759
2760 int pedantic_lvalues;
2761
2762 /* When pedantic, return an expr equal to X but certainly not valid as a
2763 pedantic lvalue. Otherwise, return X. */
2764
2765 static tree
2766 pedantic_non_lvalue (tree x)
2767 {
2768 if (pedantic_lvalues)
2769 return non_lvalue (x);
2770 else
2771 return x;
2772 }
2773 \f
2774 /* Given a tree comparison code, return the code that is the logical inverse
2775 of the given code. It is not safe to do this for floating-point
2776 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2777 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2778
2779 enum tree_code
2780 invert_tree_comparison (enum tree_code code, bool honor_nans)
2781 {
2782 if (honor_nans && flag_trapping_math)
2783 return ERROR_MARK;
2784
2785 switch (code)
2786 {
2787 case EQ_EXPR:
2788 return NE_EXPR;
2789 case NE_EXPR:
2790 return EQ_EXPR;
2791 case GT_EXPR:
2792 return honor_nans ? UNLE_EXPR : LE_EXPR;
2793 case GE_EXPR:
2794 return honor_nans ? UNLT_EXPR : LT_EXPR;
2795 case LT_EXPR:
2796 return honor_nans ? UNGE_EXPR : GE_EXPR;
2797 case LE_EXPR:
2798 return honor_nans ? UNGT_EXPR : GT_EXPR;
2799 case LTGT_EXPR:
2800 return UNEQ_EXPR;
2801 case UNEQ_EXPR:
2802 return LTGT_EXPR;
2803 case UNGT_EXPR:
2804 return LE_EXPR;
2805 case UNGE_EXPR:
2806 return LT_EXPR;
2807 case UNLT_EXPR:
2808 return GE_EXPR;
2809 case UNLE_EXPR:
2810 return GT_EXPR;
2811 case ORDERED_EXPR:
2812 return UNORDERED_EXPR;
2813 case UNORDERED_EXPR:
2814 return ORDERED_EXPR;
2815 default:
2816 gcc_unreachable ();
2817 }
2818 }
2819
2820 /* Similar, but return the comparison that results if the operands are
2821 swapped. This is safe for floating-point. */
2822
2823 enum tree_code
2824 swap_tree_comparison (enum tree_code code)
2825 {
2826 switch (code)
2827 {
2828 case EQ_EXPR:
2829 case NE_EXPR:
2830 case ORDERED_EXPR:
2831 case UNORDERED_EXPR:
2832 case LTGT_EXPR:
2833 case UNEQ_EXPR:
2834 return code;
2835 case GT_EXPR:
2836 return LT_EXPR;
2837 case GE_EXPR:
2838 return LE_EXPR;
2839 case LT_EXPR:
2840 return GT_EXPR;
2841 case LE_EXPR:
2842 return GE_EXPR;
2843 case UNGT_EXPR:
2844 return UNLT_EXPR;
2845 case UNGE_EXPR:
2846 return UNLE_EXPR;
2847 case UNLT_EXPR:
2848 return UNGT_EXPR;
2849 case UNLE_EXPR:
2850 return UNGE_EXPR;
2851 default:
2852 gcc_unreachable ();
2853 }
2854 }
2855
2856
2857 /* Convert a comparison tree code from an enum tree_code representation
2858 into a compcode bit-based encoding. This function is the inverse of
2859 compcode_to_comparison. */
2860
2861 static enum comparison_code
2862 comparison_to_compcode (enum tree_code code)
2863 {
2864 switch (code)
2865 {
2866 case LT_EXPR:
2867 return COMPCODE_LT;
2868 case EQ_EXPR:
2869 return COMPCODE_EQ;
2870 case LE_EXPR:
2871 return COMPCODE_LE;
2872 case GT_EXPR:
2873 return COMPCODE_GT;
2874 case NE_EXPR:
2875 return COMPCODE_NE;
2876 case GE_EXPR:
2877 return COMPCODE_GE;
2878 case ORDERED_EXPR:
2879 return COMPCODE_ORD;
2880 case UNORDERED_EXPR:
2881 return COMPCODE_UNORD;
2882 case UNLT_EXPR:
2883 return COMPCODE_UNLT;
2884 case UNEQ_EXPR:
2885 return COMPCODE_UNEQ;
2886 case UNLE_EXPR:
2887 return COMPCODE_UNLE;
2888 case UNGT_EXPR:
2889 return COMPCODE_UNGT;
2890 case LTGT_EXPR:
2891 return COMPCODE_LTGT;
2892 case UNGE_EXPR:
2893 return COMPCODE_UNGE;
2894 default:
2895 gcc_unreachable ();
2896 }
2897 }
2898
2899 /* Convert a compcode bit-based encoding of a comparison operator back
2900 to GCC's enum tree_code representation. This function is the
2901 inverse of comparison_to_compcode. */
2902
2903 static enum tree_code
2904 compcode_to_comparison (enum comparison_code code)
2905 {
2906 switch (code)
2907 {
2908 case COMPCODE_LT:
2909 return LT_EXPR;
2910 case COMPCODE_EQ:
2911 return EQ_EXPR;
2912 case COMPCODE_LE:
2913 return LE_EXPR;
2914 case COMPCODE_GT:
2915 return GT_EXPR;
2916 case COMPCODE_NE:
2917 return NE_EXPR;
2918 case COMPCODE_GE:
2919 return GE_EXPR;
2920 case COMPCODE_ORD:
2921 return ORDERED_EXPR;
2922 case COMPCODE_UNORD:
2923 return UNORDERED_EXPR;
2924 case COMPCODE_UNLT:
2925 return UNLT_EXPR;
2926 case COMPCODE_UNEQ:
2927 return UNEQ_EXPR;
2928 case COMPCODE_UNLE:
2929 return UNLE_EXPR;
2930 case COMPCODE_UNGT:
2931 return UNGT_EXPR;
2932 case COMPCODE_LTGT:
2933 return LTGT_EXPR;
2934 case COMPCODE_UNGE:
2935 return UNGE_EXPR;
2936 default:
2937 gcc_unreachable ();
2938 }
2939 }
2940
2941 /* Return a tree for the comparison which is the combination of
2942 doing the AND or OR (depending on CODE) of the two operations LCODE
2943 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2944 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2945 if this makes the transformation invalid. */
2946
2947 tree
2948 combine_comparisons (enum tree_code code, enum tree_code lcode,
2949 enum tree_code rcode, tree truth_type,
2950 tree ll_arg, tree lr_arg)
2951 {
2952 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2953 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2954 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2955 int compcode;
2956
2957 switch (code)
2958 {
2959 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2960 compcode = lcompcode & rcompcode;
2961 break;
2962
2963 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2964 compcode = lcompcode | rcompcode;
2965 break;
2966
2967 default:
2968 return NULL_TREE;
2969 }
2970
2971 if (!honor_nans)
2972 {
2973 /* Eliminate unordered comparisons, as well as LTGT and ORD
2974 which are not used unless the mode has NaNs. */
2975 compcode &= ~COMPCODE_UNORD;
2976 if (compcode == COMPCODE_LTGT)
2977 compcode = COMPCODE_NE;
2978 else if (compcode == COMPCODE_ORD)
2979 compcode = COMPCODE_TRUE;
2980 }
2981 else if (flag_trapping_math)
2982 {
2983 /* Check that the original operation and the optimized ones will trap
2984 under the same condition. */
2985 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2986 && (lcompcode != COMPCODE_EQ)
2987 && (lcompcode != COMPCODE_ORD);
2988 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2989 && (rcompcode != COMPCODE_EQ)
2990 && (rcompcode != COMPCODE_ORD);
2991 bool trap = (compcode & COMPCODE_UNORD) == 0
2992 && (compcode != COMPCODE_EQ)
2993 && (compcode != COMPCODE_ORD);
2994
2995 /* In a short-circuited boolean expression the LHS might be
2996 such that the RHS, if evaluated, will never trap. For
2997 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2998 if neither x nor y is NaN. (This is a mixed blessing: for
2999 example, the expression above will never trap, hence
3000 optimizing it to x < y would be invalid). */
3001 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
3002 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
3003 rtrap = false;
3004
3005 /* If the comparison was short-circuited, and only the RHS
3006 trapped, we may now generate a spurious trap. */
3007 if (rtrap && !ltrap
3008 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3009 return NULL_TREE;
3010
3011 /* If we changed the conditions that cause a trap, we lose. */
3012 if ((ltrap || rtrap) != trap)
3013 return NULL_TREE;
3014 }
3015
3016 if (compcode == COMPCODE_TRUE)
3017 return constant_boolean_node (true, truth_type);
3018 else if (compcode == COMPCODE_FALSE)
3019 return constant_boolean_node (false, truth_type);
3020 else
3021 {
3022 enum tree_code tcode;
3023
3024 tcode = compcode_to_comparison ((enum comparison_code) compcode);
3025 return fold_build2 (tcode, truth_type, ll_arg, lr_arg);
3026 }
3027 }
3028 \f
3029 /* Return nonzero if two operands (typically of the same tree node)
3030 are necessarily equal. If either argument has side-effects this
3031 function returns zero. FLAGS modifies behavior as follows:
3032
3033 If OEP_ONLY_CONST is set, only return nonzero for constants.
3034 This function tests whether the operands are indistinguishable;
3035 it does not test whether they are equal using C's == operation.
3036 The distinction is important for IEEE floating point, because
3037 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
3038 (2) two NaNs may be indistinguishable, but NaN!=NaN.
3039
3040 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
3041 even though it may hold multiple values during a function.
3042 This is because a GCC tree node guarantees that nothing else is
3043 executed between the evaluation of its "operands" (which may often
3044 be evaluated in arbitrary order). Hence if the operands themselves
3045 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
3046 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
3047 unset means assuming isochronic (or instantaneous) tree equivalence.
3048 Unless comparing arbitrary expression trees, such as from different
3049 statements, this flag can usually be left unset.
3050
3051 If OEP_PURE_SAME is set, then pure functions with identical arguments
3052 are considered the same. It is used when the caller has other ways
3053 to ensure that global memory is unchanged in between. */
3054
3055 int
3056 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
3057 {
3058 /* If either is ERROR_MARK, they aren't equal. */
3059 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
3060 return 0;
3061
3062 /* Check equality of integer constants before bailing out due to
3063 precision differences. */
3064 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
3065 return tree_int_cst_equal (arg0, arg1);
3066
3067 /* If both types don't have the same signedness, then we can't consider
3068 them equal. We must check this before the STRIP_NOPS calls
3069 because they may change the signedness of the arguments. As pointers
3070 strictly don't have a signedness, require either two pointers or
3071 two non-pointers as well. */
3072 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
3073 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
3074 return 0;
3075
3076 /* If both types don't have the same precision, then it is not safe
3077 to strip NOPs. */
3078 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
3079 return 0;
3080
3081 STRIP_NOPS (arg0);
3082 STRIP_NOPS (arg1);
3083
3084 /* In case both args are comparisons but with different comparison
3085 code, try to swap the comparison operands of one arg to produce
3086 a match and compare that variant. */
3087 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3088 && COMPARISON_CLASS_P (arg0)
3089 && COMPARISON_CLASS_P (arg1))
3090 {
3091 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
3092
3093 if (TREE_CODE (arg0) == swap_code)
3094 return operand_equal_p (TREE_OPERAND (arg0, 0),
3095 TREE_OPERAND (arg1, 1), flags)
3096 && operand_equal_p (TREE_OPERAND (arg0, 1),
3097 TREE_OPERAND (arg1, 0), flags);
3098 }
3099
3100 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3101 /* This is needed for conversions and for COMPONENT_REF.
3102 Might as well play it safe and always test this. */
3103 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
3104 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
3105 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
3106 return 0;
3107
3108 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3109 We don't care about side effects in that case because the SAVE_EXPR
3110 takes care of that for us. In all other cases, two expressions are
3111 equal if they have no side effects. If we have two identical
3112 expressions with side effects that should be treated the same due
3113 to the only side effects being identical SAVE_EXPR's, that will
3114 be detected in the recursive calls below. */
3115 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
3116 && (TREE_CODE (arg0) == SAVE_EXPR
3117 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
3118 return 1;
3119
3120 /* Next handle constant cases, those for which we can return 1 even
3121 if ONLY_CONST is set. */
3122 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
3123 switch (TREE_CODE (arg0))
3124 {
3125 case INTEGER_CST:
3126 return tree_int_cst_equal (arg0, arg1);
3127
3128 case FIXED_CST:
3129 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
3130 TREE_FIXED_CST (arg1));
3131
3132 case REAL_CST:
3133 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
3134 TREE_REAL_CST (arg1)))
3135 return 1;
3136
3137
3138 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
3139 {
3140 /* If we do not distinguish between signed and unsigned zero,
3141 consider them equal. */
3142 if (real_zerop (arg0) && real_zerop (arg1))
3143 return 1;
3144 }
3145 return 0;
3146
3147 case VECTOR_CST:
3148 {
3149 tree v1, v2;
3150
3151 v1 = TREE_VECTOR_CST_ELTS (arg0);
3152 v2 = TREE_VECTOR_CST_ELTS (arg1);
3153 while (v1 && v2)
3154 {
3155 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
3156 flags))
3157 return 0;
3158 v1 = TREE_CHAIN (v1);
3159 v2 = TREE_CHAIN (v2);
3160 }
3161
3162 return v1 == v2;
3163 }
3164
3165 case COMPLEX_CST:
3166 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
3167 flags)
3168 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
3169 flags));
3170
3171 case STRING_CST:
3172 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
3173 && ! memcmp (TREE_STRING_POINTER (arg0),
3174 TREE_STRING_POINTER (arg1),
3175 TREE_STRING_LENGTH (arg0)));
3176
3177 case ADDR_EXPR:
3178 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
3179 0);
3180 default:
3181 break;
3182 }
3183
3184 if (flags & OEP_ONLY_CONST)
3185 return 0;
3186
3187 /* Define macros to test an operand from arg0 and arg1 for equality and a
3188 variant that allows null and views null as being different from any
3189 non-null value. In the latter case, if either is null, the both
3190 must be; otherwise, do the normal comparison. */
3191 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
3192 TREE_OPERAND (arg1, N), flags)
3193
3194 #define OP_SAME_WITH_NULL(N) \
3195 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3196 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3197
3198 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3199 {
3200 case tcc_unary:
3201 /* Two conversions are equal only if signedness and modes match. */
3202 switch (TREE_CODE (arg0))
3203 {
3204 CASE_CONVERT:
3205 case FIX_TRUNC_EXPR:
3206 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3207 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3208 return 0;
3209 break;
3210 default:
3211 break;
3212 }
3213
3214 return OP_SAME (0);
3215
3216
3217 case tcc_comparison:
3218 case tcc_binary:
3219 if (OP_SAME (0) && OP_SAME (1))
3220 return 1;
3221
3222 /* For commutative ops, allow the other order. */
3223 return (commutative_tree_code (TREE_CODE (arg0))
3224 && operand_equal_p (TREE_OPERAND (arg0, 0),
3225 TREE_OPERAND (arg1, 1), flags)
3226 && operand_equal_p (TREE_OPERAND (arg0, 1),
3227 TREE_OPERAND (arg1, 0), flags));
3228
3229 case tcc_reference:
3230 /* If either of the pointer (or reference) expressions we are
3231 dereferencing contain a side effect, these cannot be equal. */
3232 if (TREE_SIDE_EFFECTS (arg0)
3233 || TREE_SIDE_EFFECTS (arg1))
3234 return 0;
3235
3236 switch (TREE_CODE (arg0))
3237 {
3238 case INDIRECT_REF:
3239 case ALIGN_INDIRECT_REF:
3240 case MISALIGNED_INDIRECT_REF:
3241 case REALPART_EXPR:
3242 case IMAGPART_EXPR:
3243 return OP_SAME (0);
3244
3245 case ARRAY_REF:
3246 case ARRAY_RANGE_REF:
3247 /* Operands 2 and 3 may be null.
3248 Compare the array index by value if it is constant first as we
3249 may have different types but same value here. */
3250 return (OP_SAME (0)
3251 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3252 TREE_OPERAND (arg1, 1))
3253 || OP_SAME (1))
3254 && OP_SAME_WITH_NULL (2)
3255 && OP_SAME_WITH_NULL (3));
3256
3257 case COMPONENT_REF:
3258 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3259 may be NULL when we're called to compare MEM_EXPRs. */
3260 return OP_SAME_WITH_NULL (0)
3261 && OP_SAME (1)
3262 && OP_SAME_WITH_NULL (2);
3263
3264 case BIT_FIELD_REF:
3265 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3266
3267 default:
3268 return 0;
3269 }
3270
3271 case tcc_expression:
3272 switch (TREE_CODE (arg0))
3273 {
3274 case ADDR_EXPR:
3275 case TRUTH_NOT_EXPR:
3276 return OP_SAME (0);
3277
3278 case TRUTH_ANDIF_EXPR:
3279 case TRUTH_ORIF_EXPR:
3280 return OP_SAME (0) && OP_SAME (1);
3281
3282 case TRUTH_AND_EXPR:
3283 case TRUTH_OR_EXPR:
3284 case TRUTH_XOR_EXPR:
3285 if (OP_SAME (0) && OP_SAME (1))
3286 return 1;
3287
3288 /* Otherwise take into account this is a commutative operation. */
3289 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3290 TREE_OPERAND (arg1, 1), flags)
3291 && operand_equal_p (TREE_OPERAND (arg0, 1),
3292 TREE_OPERAND (arg1, 0), flags));
3293
3294 case COND_EXPR:
3295 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3296
3297 default:
3298 return 0;
3299 }
3300
3301 case tcc_vl_exp:
3302 switch (TREE_CODE (arg0))
3303 {
3304 case CALL_EXPR:
3305 /* If the CALL_EXPRs call different functions, then they
3306 clearly can not be equal. */
3307 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3308 flags))
3309 return 0;
3310
3311 {
3312 unsigned int cef = call_expr_flags (arg0);
3313 if (flags & OEP_PURE_SAME)
3314 cef &= ECF_CONST | ECF_PURE;
3315 else
3316 cef &= ECF_CONST;
3317 if (!cef)
3318 return 0;
3319 }
3320
3321 /* Now see if all the arguments are the same. */
3322 {
3323 const_call_expr_arg_iterator iter0, iter1;
3324 const_tree a0, a1;
3325 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3326 a1 = first_const_call_expr_arg (arg1, &iter1);
3327 a0 && a1;
3328 a0 = next_const_call_expr_arg (&iter0),
3329 a1 = next_const_call_expr_arg (&iter1))
3330 if (! operand_equal_p (a0, a1, flags))
3331 return 0;
3332
3333 /* If we get here and both argument lists are exhausted
3334 then the CALL_EXPRs are equal. */
3335 return ! (a0 || a1);
3336 }
3337 default:
3338 return 0;
3339 }
3340
3341 case tcc_declaration:
3342 /* Consider __builtin_sqrt equal to sqrt. */
3343 return (TREE_CODE (arg0) == FUNCTION_DECL
3344 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3345 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3346 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3347
3348 default:
3349 return 0;
3350 }
3351
3352 #undef OP_SAME
3353 #undef OP_SAME_WITH_NULL
3354 }
3355 \f
3356 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3357 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3358
3359 When in doubt, return 0. */
3360
3361 static int
3362 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3363 {
3364 int unsignedp1, unsignedpo;
3365 tree primarg0, primarg1, primother;
3366 unsigned int correct_width;
3367
3368 if (operand_equal_p (arg0, arg1, 0))
3369 return 1;
3370
3371 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3372 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3373 return 0;
3374
3375 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3376 and see if the inner values are the same. This removes any
3377 signedness comparison, which doesn't matter here. */
3378 primarg0 = arg0, primarg1 = arg1;
3379 STRIP_NOPS (primarg0);
3380 STRIP_NOPS (primarg1);
3381 if (operand_equal_p (primarg0, primarg1, 0))
3382 return 1;
3383
3384 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3385 actual comparison operand, ARG0.
3386
3387 First throw away any conversions to wider types
3388 already present in the operands. */
3389
3390 primarg1 = get_narrower (arg1, &unsignedp1);
3391 primother = get_narrower (other, &unsignedpo);
3392
3393 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3394 if (unsignedp1 == unsignedpo
3395 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3396 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3397 {
3398 tree type = TREE_TYPE (arg0);
3399
3400 /* Make sure shorter operand is extended the right way
3401 to match the longer operand. */
3402 primarg1 = fold_convert (signed_or_unsigned_type_for
3403 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3404
3405 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3406 return 1;
3407 }
3408
3409 return 0;
3410 }
3411 \f
3412 /* See if ARG is an expression that is either a comparison or is performing
3413 arithmetic on comparisons. The comparisons must only be comparing
3414 two different values, which will be stored in *CVAL1 and *CVAL2; if
3415 they are nonzero it means that some operands have already been found.
3416 No variables may be used anywhere else in the expression except in the
3417 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3418 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3419
3420 If this is true, return 1. Otherwise, return zero. */
3421
3422 static int
3423 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3424 {
3425 enum tree_code code = TREE_CODE (arg);
3426 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3427
3428 /* We can handle some of the tcc_expression cases here. */
3429 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3430 tclass = tcc_unary;
3431 else if (tclass == tcc_expression
3432 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3433 || code == COMPOUND_EXPR))
3434 tclass = tcc_binary;
3435
3436 else if (tclass == tcc_expression && code == SAVE_EXPR
3437 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3438 {
3439 /* If we've already found a CVAL1 or CVAL2, this expression is
3440 two complex to handle. */
3441 if (*cval1 || *cval2)
3442 return 0;
3443
3444 tclass = tcc_unary;
3445 *save_p = 1;
3446 }
3447
3448 switch (tclass)
3449 {
3450 case tcc_unary:
3451 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3452
3453 case tcc_binary:
3454 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3455 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3456 cval1, cval2, save_p));
3457
3458 case tcc_constant:
3459 return 1;
3460
3461 case tcc_expression:
3462 if (code == COND_EXPR)
3463 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3464 cval1, cval2, save_p)
3465 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3466 cval1, cval2, save_p)
3467 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3468 cval1, cval2, save_p));
3469 return 0;
3470
3471 case tcc_comparison:
3472 /* First see if we can handle the first operand, then the second. For
3473 the second operand, we know *CVAL1 can't be zero. It must be that
3474 one side of the comparison is each of the values; test for the
3475 case where this isn't true by failing if the two operands
3476 are the same. */
3477
3478 if (operand_equal_p (TREE_OPERAND (arg, 0),
3479 TREE_OPERAND (arg, 1), 0))
3480 return 0;
3481
3482 if (*cval1 == 0)
3483 *cval1 = TREE_OPERAND (arg, 0);
3484 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3485 ;
3486 else if (*cval2 == 0)
3487 *cval2 = TREE_OPERAND (arg, 0);
3488 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3489 ;
3490 else
3491 return 0;
3492
3493 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3494 ;
3495 else if (*cval2 == 0)
3496 *cval2 = TREE_OPERAND (arg, 1);
3497 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3498 ;
3499 else
3500 return 0;
3501
3502 return 1;
3503
3504 default:
3505 return 0;
3506 }
3507 }
3508 \f
3509 /* ARG is a tree that is known to contain just arithmetic operations and
3510 comparisons. Evaluate the operations in the tree substituting NEW0 for
3511 any occurrence of OLD0 as an operand of a comparison and likewise for
3512 NEW1 and OLD1. */
3513
3514 static tree
3515 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
3516 {
3517 tree type = TREE_TYPE (arg);
3518 enum tree_code code = TREE_CODE (arg);
3519 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3520
3521 /* We can handle some of the tcc_expression cases here. */
3522 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3523 tclass = tcc_unary;
3524 else if (tclass == tcc_expression
3525 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3526 tclass = tcc_binary;
3527
3528 switch (tclass)
3529 {
3530 case tcc_unary:
3531 return fold_build1 (code, type,
3532 eval_subst (TREE_OPERAND (arg, 0),
3533 old0, new0, old1, new1));
3534
3535 case tcc_binary:
3536 return fold_build2 (code, type,
3537 eval_subst (TREE_OPERAND (arg, 0),
3538 old0, new0, old1, new1),
3539 eval_subst (TREE_OPERAND (arg, 1),
3540 old0, new0, old1, new1));
3541
3542 case tcc_expression:
3543 switch (code)
3544 {
3545 case SAVE_EXPR:
3546 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
3547
3548 case COMPOUND_EXPR:
3549 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
3550
3551 case COND_EXPR:
3552 return fold_build3 (code, type,
3553 eval_subst (TREE_OPERAND (arg, 0),
3554 old0, new0, old1, new1),
3555 eval_subst (TREE_OPERAND (arg, 1),
3556 old0, new0, old1, new1),
3557 eval_subst (TREE_OPERAND (arg, 2),
3558 old0, new0, old1, new1));
3559 default:
3560 break;
3561 }
3562 /* Fall through - ??? */
3563
3564 case tcc_comparison:
3565 {
3566 tree arg0 = TREE_OPERAND (arg, 0);
3567 tree arg1 = TREE_OPERAND (arg, 1);
3568
3569 /* We need to check both for exact equality and tree equality. The
3570 former will be true if the operand has a side-effect. In that
3571 case, we know the operand occurred exactly once. */
3572
3573 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3574 arg0 = new0;
3575 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3576 arg0 = new1;
3577
3578 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3579 arg1 = new0;
3580 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3581 arg1 = new1;
3582
3583 return fold_build2 (code, type, arg0, arg1);
3584 }
3585
3586 default:
3587 return arg;
3588 }
3589 }
3590 \f
3591 /* Return a tree for the case when the result of an expression is RESULT
3592 converted to TYPE and OMITTED was previously an operand of the expression
3593 but is now not needed (e.g., we folded OMITTED * 0).
3594
3595 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3596 the conversion of RESULT to TYPE. */
3597
3598 tree
3599 omit_one_operand (tree type, tree result, tree omitted)
3600 {
3601 tree t = fold_convert (type, result);
3602
3603 /* If the resulting operand is an empty statement, just return the omitted
3604 statement casted to void. */
3605 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3606 return build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3607
3608 if (TREE_SIDE_EFFECTS (omitted))
3609 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3610
3611 return non_lvalue (t);
3612 }
3613
3614 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3615
3616 static tree
3617 pedantic_omit_one_operand (tree type, tree result, tree omitted)
3618 {
3619 tree t = fold_convert (type, result);
3620
3621 /* If the resulting operand is an empty statement, just return the omitted
3622 statement casted to void. */
3623 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3624 return build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3625
3626 if (TREE_SIDE_EFFECTS (omitted))
3627 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3628
3629 return pedantic_non_lvalue (t);
3630 }
3631
3632 /* Return a tree for the case when the result of an expression is RESULT
3633 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3634 of the expression but are now not needed.
3635
3636 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3637 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3638 evaluated before OMITTED2. Otherwise, if neither has side effects,
3639 just do the conversion of RESULT to TYPE. */
3640
3641 tree
3642 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
3643 {
3644 tree t = fold_convert (type, result);
3645
3646 if (TREE_SIDE_EFFECTS (omitted2))
3647 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3648 if (TREE_SIDE_EFFECTS (omitted1))
3649 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3650
3651 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
3652 }
3653
3654 \f
3655 /* Return a simplified tree node for the truth-negation of ARG. This
3656 never alters ARG itself. We assume that ARG is an operation that
3657 returns a truth value (0 or 1).
3658
3659 FIXME: one would think we would fold the result, but it causes
3660 problems with the dominator optimizer. */
3661
3662 tree
3663 fold_truth_not_expr (tree arg)
3664 {
3665 tree t, type = TREE_TYPE (arg);
3666 enum tree_code code = TREE_CODE (arg);
3667
3668 /* If this is a comparison, we can simply invert it, except for
3669 floating-point non-equality comparisons, in which case we just
3670 enclose a TRUTH_NOT_EXPR around what we have. */
3671
3672 if (TREE_CODE_CLASS (code) == tcc_comparison)
3673 {
3674 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3675 if (FLOAT_TYPE_P (op_type)
3676 && flag_trapping_math
3677 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3678 && code != NE_EXPR && code != EQ_EXPR)
3679 return NULL_TREE;
3680
3681 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3682 if (code == ERROR_MARK)
3683 return NULL_TREE;
3684
3685 t = build2 (code, type, TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3686 if (EXPR_HAS_LOCATION (arg))
3687 SET_EXPR_LOCATION (t, EXPR_LOCATION (arg));
3688 return t;
3689 }
3690
3691 switch (code)
3692 {
3693 case INTEGER_CST:
3694 return constant_boolean_node (integer_zerop (arg), type);
3695
3696 case TRUTH_AND_EXPR:
3697 t = build2 (TRUTH_OR_EXPR, type,
3698 invert_truthvalue (TREE_OPERAND (arg, 0)),
3699 invert_truthvalue (TREE_OPERAND (arg, 1)));
3700 break;
3701
3702 case TRUTH_OR_EXPR:
3703 t = build2 (TRUTH_AND_EXPR, type,
3704 invert_truthvalue (TREE_OPERAND (arg, 0)),
3705 invert_truthvalue (TREE_OPERAND (arg, 1)));
3706 break;
3707
3708 case TRUTH_XOR_EXPR:
3709 /* Here we can invert either operand. We invert the first operand
3710 unless the second operand is a TRUTH_NOT_EXPR in which case our
3711 result is the XOR of the first operand with the inside of the
3712 negation of the second operand. */
3713
3714 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3715 t = build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3716 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3717 else
3718 t = build2 (TRUTH_XOR_EXPR, type,
3719 invert_truthvalue (TREE_OPERAND (arg, 0)),
3720 TREE_OPERAND (arg, 1));
3721 break;
3722
3723 case TRUTH_ANDIF_EXPR:
3724 t = build2 (TRUTH_ORIF_EXPR, type,
3725 invert_truthvalue (TREE_OPERAND (arg, 0)),
3726 invert_truthvalue (TREE_OPERAND (arg, 1)));
3727 break;
3728
3729 case TRUTH_ORIF_EXPR:
3730 t = build2 (TRUTH_ANDIF_EXPR, type,
3731 invert_truthvalue (TREE_OPERAND (arg, 0)),
3732 invert_truthvalue (TREE_OPERAND (arg, 1)));
3733 break;
3734
3735 case TRUTH_NOT_EXPR:
3736 return TREE_OPERAND (arg, 0);
3737
3738 case COND_EXPR:
3739 {
3740 tree arg1 = TREE_OPERAND (arg, 1);
3741 tree arg2 = TREE_OPERAND (arg, 2);
3742 /* A COND_EXPR may have a throw as one operand, which
3743 then has void type. Just leave void operands
3744 as they are. */
3745 t = build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3746 VOID_TYPE_P (TREE_TYPE (arg1))
3747 ? arg1 : invert_truthvalue (arg1),
3748 VOID_TYPE_P (TREE_TYPE (arg2))
3749 ? arg2 : invert_truthvalue (arg2));
3750 break;
3751 }
3752
3753 case COMPOUND_EXPR:
3754 t = build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3755 invert_truthvalue (TREE_OPERAND (arg, 1)));
3756 break;
3757
3758 case NON_LVALUE_EXPR:
3759 return invert_truthvalue (TREE_OPERAND (arg, 0));
3760
3761 CASE_CONVERT:
3762 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3763 {
3764 t = build1 (TRUTH_NOT_EXPR, type, arg);
3765 break;
3766 }
3767
3768 /* ... fall through ... */
3769
3770 case FLOAT_EXPR:
3771 t = build1 (TREE_CODE (arg), type,
3772 invert_truthvalue (TREE_OPERAND (arg, 0)));
3773 break;
3774
3775 case BIT_AND_EXPR:
3776 if (!integer_onep (TREE_OPERAND (arg, 1)))
3777 return NULL_TREE;
3778 t = build2 (EQ_EXPR, type, arg, build_int_cst (type, 0));
3779 break;
3780
3781 case SAVE_EXPR:
3782 t = build1 (TRUTH_NOT_EXPR, type, arg);
3783 break;
3784
3785 case CLEANUP_POINT_EXPR:
3786 t = build1 (CLEANUP_POINT_EXPR, type,
3787 invert_truthvalue (TREE_OPERAND (arg, 0)));
3788 break;
3789
3790 default:
3791 t = NULL_TREE;
3792 break;
3793 }
3794
3795 if (t && EXPR_HAS_LOCATION (arg))
3796 SET_EXPR_LOCATION (t, EXPR_LOCATION (arg));
3797
3798 return t;
3799 }
3800
3801 /* Return a simplified tree node for the truth-negation of ARG. This
3802 never alters ARG itself. We assume that ARG is an operation that
3803 returns a truth value (0 or 1).
3804
3805 FIXME: one would think we would fold the result, but it causes
3806 problems with the dominator optimizer. */
3807
3808 tree
3809 invert_truthvalue (tree arg)
3810 {
3811 tree tem;
3812
3813 if (TREE_CODE (arg) == ERROR_MARK)
3814 return arg;
3815
3816 tem = fold_truth_not_expr (arg);
3817 if (!tem)
3818 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3819
3820 return tem;
3821 }
3822
3823 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3824 operands are another bit-wise operation with a common input. If so,
3825 distribute the bit operations to save an operation and possibly two if
3826 constants are involved. For example, convert
3827 (A | B) & (A | C) into A | (B & C)
3828 Further simplification will occur if B and C are constants.
3829
3830 If this optimization cannot be done, 0 will be returned. */
3831
3832 static tree
3833 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3834 {
3835 tree common;
3836 tree left, right;
3837
3838 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3839 || TREE_CODE (arg0) == code
3840 || (TREE_CODE (arg0) != BIT_AND_EXPR
3841 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3842 return 0;
3843
3844 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3845 {
3846 common = TREE_OPERAND (arg0, 0);
3847 left = TREE_OPERAND (arg0, 1);
3848 right = TREE_OPERAND (arg1, 1);
3849 }
3850 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3851 {
3852 common = TREE_OPERAND (arg0, 0);
3853 left = TREE_OPERAND (arg0, 1);
3854 right = TREE_OPERAND (arg1, 0);
3855 }
3856 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3857 {
3858 common = TREE_OPERAND (arg0, 1);
3859 left = TREE_OPERAND (arg0, 0);
3860 right = TREE_OPERAND (arg1, 1);
3861 }
3862 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3863 {
3864 common = TREE_OPERAND (arg0, 1);
3865 left = TREE_OPERAND (arg0, 0);
3866 right = TREE_OPERAND (arg1, 0);
3867 }
3868 else
3869 return 0;
3870
3871 common = fold_convert (type, common);
3872 left = fold_convert (type, left);
3873 right = fold_convert (type, right);
3874 return fold_build2 (TREE_CODE (arg0), type, common,
3875 fold_build2 (code, type, left, right));
3876 }
3877
3878 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3879 with code CODE. This optimization is unsafe. */
3880 static tree
3881 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3882 {
3883 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3884 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3885
3886 /* (A / C) +- (B / C) -> (A +- B) / C. */
3887 if (mul0 == mul1
3888 && operand_equal_p (TREE_OPERAND (arg0, 1),
3889 TREE_OPERAND (arg1, 1), 0))
3890 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3891 fold_build2 (code, type,
3892 TREE_OPERAND (arg0, 0),
3893 TREE_OPERAND (arg1, 0)),
3894 TREE_OPERAND (arg0, 1));
3895
3896 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3897 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3898 TREE_OPERAND (arg1, 0), 0)
3899 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3900 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3901 {
3902 REAL_VALUE_TYPE r0, r1;
3903 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3904 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3905 if (!mul0)
3906 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3907 if (!mul1)
3908 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3909 real_arithmetic (&r0, code, &r0, &r1);
3910 return fold_build2 (MULT_EXPR, type,
3911 TREE_OPERAND (arg0, 0),
3912 build_real (type, r0));
3913 }
3914
3915 return NULL_TREE;
3916 }
3917 \f
3918 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3919 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3920
3921 static tree
3922 make_bit_field_ref (tree inner, tree type, HOST_WIDE_INT bitsize,
3923 HOST_WIDE_INT bitpos, int unsignedp)
3924 {
3925 tree result, bftype;
3926
3927 if (bitpos == 0)
3928 {
3929 tree size = TYPE_SIZE (TREE_TYPE (inner));
3930 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3931 || POINTER_TYPE_P (TREE_TYPE (inner)))
3932 && host_integerp (size, 0)
3933 && tree_low_cst (size, 0) == bitsize)
3934 return fold_convert (type, inner);
3935 }
3936
3937 bftype = type;
3938 if (TYPE_PRECISION (bftype) != bitsize
3939 || TYPE_UNSIGNED (bftype) == !unsignedp)
3940 bftype = build_nonstandard_integer_type (bitsize, 0);
3941
3942 result = build3 (BIT_FIELD_REF, bftype, inner,
3943 size_int (bitsize), bitsize_int (bitpos));
3944
3945 if (bftype != type)
3946 result = fold_convert (type, result);
3947
3948 return result;
3949 }
3950
3951 /* Optimize a bit-field compare.
3952
3953 There are two cases: First is a compare against a constant and the
3954 second is a comparison of two items where the fields are at the same
3955 bit position relative to the start of a chunk (byte, halfword, word)
3956 large enough to contain it. In these cases we can avoid the shift
3957 implicit in bitfield extractions.
3958
3959 For constants, we emit a compare of the shifted constant with the
3960 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3961 compared. For two fields at the same position, we do the ANDs with the
3962 similar mask and compare the result of the ANDs.
3963
3964 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3965 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3966 are the left and right operands of the comparison, respectively.
3967
3968 If the optimization described above can be done, we return the resulting
3969 tree. Otherwise we return zero. */
3970
3971 static tree
3972 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3973 tree lhs, tree rhs)
3974 {
3975 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3976 tree type = TREE_TYPE (lhs);
3977 tree signed_type, unsigned_type;
3978 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3979 enum machine_mode lmode, rmode, nmode;
3980 int lunsignedp, runsignedp;
3981 int lvolatilep = 0, rvolatilep = 0;
3982 tree linner, rinner = NULL_TREE;
3983 tree mask;
3984 tree offset;
3985
3986 /* Get all the information about the extractions being done. If the bit size
3987 if the same as the size of the underlying object, we aren't doing an
3988 extraction at all and so can do nothing. We also don't want to
3989 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3990 then will no longer be able to replace it. */
3991 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3992 &lunsignedp, &lvolatilep, false);
3993 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3994 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3995 return 0;
3996
3997 if (!const_p)
3998 {
3999 /* If this is not a constant, we can only do something if bit positions,
4000 sizes, and signedness are the same. */
4001 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
4002 &runsignedp, &rvolatilep, false);
4003
4004 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
4005 || lunsignedp != runsignedp || offset != 0
4006 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
4007 return 0;
4008 }
4009
4010 /* See if we can find a mode to refer to this field. We should be able to,
4011 but fail if we can't. */
4012 nmode = get_best_mode (lbitsize, lbitpos,
4013 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
4014 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
4015 TYPE_ALIGN (TREE_TYPE (rinner))),
4016 word_mode, lvolatilep || rvolatilep);
4017 if (nmode == VOIDmode)
4018 return 0;
4019
4020 /* Set signed and unsigned types of the precision of this mode for the
4021 shifts below. */
4022 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
4023 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
4024
4025 /* Compute the bit position and size for the new reference and our offset
4026 within it. If the new reference is the same size as the original, we
4027 won't optimize anything, so return zero. */
4028 nbitsize = GET_MODE_BITSIZE (nmode);
4029 nbitpos = lbitpos & ~ (nbitsize - 1);
4030 lbitpos -= nbitpos;
4031 if (nbitsize == lbitsize)
4032 return 0;
4033
4034 if (BYTES_BIG_ENDIAN)
4035 lbitpos = nbitsize - lbitsize - lbitpos;
4036
4037 /* Make the mask to be used against the extracted field. */
4038 mask = build_int_cst_type (unsigned_type, -1);
4039 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
4040 mask = const_binop (RSHIFT_EXPR, mask,
4041 size_int (nbitsize - lbitsize - lbitpos), 0);
4042
4043 if (! const_p)
4044 /* If not comparing with constant, just rework the comparison
4045 and return. */
4046 return fold_build2 (code, compare_type,
4047 fold_build2 (BIT_AND_EXPR, unsigned_type,
4048 make_bit_field_ref (linner,
4049 unsigned_type,
4050 nbitsize, nbitpos,
4051 1),
4052 mask),
4053 fold_build2 (BIT_AND_EXPR, unsigned_type,
4054 make_bit_field_ref (rinner,
4055 unsigned_type,
4056 nbitsize, nbitpos,
4057 1),
4058 mask));
4059
4060 /* Otherwise, we are handling the constant case. See if the constant is too
4061 big for the field. Warn and return a tree of for 0 (false) if so. We do
4062 this not only for its own sake, but to avoid having to test for this
4063 error case below. If we didn't, we might generate wrong code.
4064
4065 For unsigned fields, the constant shifted right by the field length should
4066 be all zero. For signed fields, the high-order bits should agree with
4067 the sign bit. */
4068
4069 if (lunsignedp)
4070 {
4071 if (! integer_zerop (const_binop (RSHIFT_EXPR,
4072 fold_convert (unsigned_type, rhs),
4073 size_int (lbitsize), 0)))
4074 {
4075 warning (0, "comparison is always %d due to width of bit-field",
4076 code == NE_EXPR);
4077 return constant_boolean_node (code == NE_EXPR, compare_type);
4078 }
4079 }
4080 else
4081 {
4082 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
4083 size_int (lbitsize - 1), 0);
4084 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
4085 {
4086 warning (0, "comparison is always %d due to width of bit-field",
4087 code == NE_EXPR);
4088 return constant_boolean_node (code == NE_EXPR, compare_type);
4089 }
4090 }
4091
4092 /* Single-bit compares should always be against zero. */
4093 if (lbitsize == 1 && ! integer_zerop (rhs))
4094 {
4095 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
4096 rhs = build_int_cst (type, 0);
4097 }
4098
4099 /* Make a new bitfield reference, shift the constant over the
4100 appropriate number of bits and mask it with the computed mask
4101 (in case this was a signed field). If we changed it, make a new one. */
4102 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
4103 if (lvolatilep)
4104 {
4105 TREE_SIDE_EFFECTS (lhs) = 1;
4106 TREE_THIS_VOLATILE (lhs) = 1;
4107 }
4108
4109 rhs = const_binop (BIT_AND_EXPR,
4110 const_binop (LSHIFT_EXPR,
4111 fold_convert (unsigned_type, rhs),
4112 size_int (lbitpos), 0),
4113 mask, 0);
4114
4115 return build2 (code, compare_type,
4116 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
4117 rhs);
4118 }
4119 \f
4120 /* Subroutine for fold_truthop: decode a field reference.
4121
4122 If EXP is a comparison reference, we return the innermost reference.
4123
4124 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4125 set to the starting bit number.
4126
4127 If the innermost field can be completely contained in a mode-sized
4128 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
4129
4130 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4131 otherwise it is not changed.
4132
4133 *PUNSIGNEDP is set to the signedness of the field.
4134
4135 *PMASK is set to the mask used. This is either contained in a
4136 BIT_AND_EXPR or derived from the width of the field.
4137
4138 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4139
4140 Return 0 if this is not a component reference or is one that we can't
4141 do anything with. */
4142
4143 static tree
4144 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
4145 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
4146 int *punsignedp, int *pvolatilep,
4147 tree *pmask, tree *pand_mask)
4148 {
4149 tree outer_type = 0;
4150 tree and_mask = 0;
4151 tree mask, inner, offset;
4152 tree unsigned_type;
4153 unsigned int precision;
4154
4155 /* All the optimizations using this function assume integer fields.
4156 There are problems with FP fields since the type_for_size call
4157 below can fail for, e.g., XFmode. */
4158 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4159 return 0;
4160
4161 /* We are interested in the bare arrangement of bits, so strip everything
4162 that doesn't affect the machine mode. However, record the type of the
4163 outermost expression if it may matter below. */
4164 if (CONVERT_EXPR_P (exp)
4165 || TREE_CODE (exp) == NON_LVALUE_EXPR)
4166 outer_type = TREE_TYPE (exp);
4167 STRIP_NOPS (exp);
4168
4169 if (TREE_CODE (exp) == BIT_AND_EXPR)
4170 {
4171 and_mask = TREE_OPERAND (exp, 1);
4172 exp = TREE_OPERAND (exp, 0);
4173 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4174 if (TREE_CODE (and_mask) != INTEGER_CST)
4175 return 0;
4176 }
4177
4178 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
4179 punsignedp, pvolatilep, false);
4180 if ((inner == exp && and_mask == 0)
4181 || *pbitsize < 0 || offset != 0
4182 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
4183 return 0;
4184
4185 /* If the number of bits in the reference is the same as the bitsize of
4186 the outer type, then the outer type gives the signedness. Otherwise
4187 (in case of a small bitfield) the signedness is unchanged. */
4188 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4189 *punsignedp = TYPE_UNSIGNED (outer_type);
4190
4191 /* Compute the mask to access the bitfield. */
4192 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4193 precision = TYPE_PRECISION (unsigned_type);
4194
4195 mask = build_int_cst_type (unsigned_type, -1);
4196
4197 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
4198 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
4199
4200 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4201 if (and_mask != 0)
4202 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
4203 fold_convert (unsigned_type, and_mask), mask);
4204
4205 *pmask = mask;
4206 *pand_mask = and_mask;
4207 return inner;
4208 }
4209
4210 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4211 bit positions. */
4212
4213 static int
4214 all_ones_mask_p (const_tree mask, int size)
4215 {
4216 tree type = TREE_TYPE (mask);
4217 unsigned int precision = TYPE_PRECISION (type);
4218 tree tmask;
4219
4220 tmask = build_int_cst_type (signed_type_for (type), -1);
4221
4222 return
4223 tree_int_cst_equal (mask,
4224 const_binop (RSHIFT_EXPR,
4225 const_binop (LSHIFT_EXPR, tmask,
4226 size_int (precision - size),
4227 0),
4228 size_int (precision - size), 0));
4229 }
4230
4231 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4232 represents the sign bit of EXP's type. If EXP represents a sign
4233 or zero extension, also test VAL against the unextended type.
4234 The return value is the (sub)expression whose sign bit is VAL,
4235 or NULL_TREE otherwise. */
4236
4237 static tree
4238 sign_bit_p (tree exp, const_tree val)
4239 {
4240 unsigned HOST_WIDE_INT mask_lo, lo;
4241 HOST_WIDE_INT mask_hi, hi;
4242 int width;
4243 tree t;
4244
4245 /* Tree EXP must have an integral type. */
4246 t = TREE_TYPE (exp);
4247 if (! INTEGRAL_TYPE_P (t))
4248 return NULL_TREE;
4249
4250 /* Tree VAL must be an integer constant. */
4251 if (TREE_CODE (val) != INTEGER_CST
4252 || TREE_OVERFLOW (val))
4253 return NULL_TREE;
4254
4255 width = TYPE_PRECISION (t);
4256 if (width > HOST_BITS_PER_WIDE_INT)
4257 {
4258 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
4259 lo = 0;
4260
4261 mask_hi = ((unsigned HOST_WIDE_INT) -1
4262 >> (2 * HOST_BITS_PER_WIDE_INT - width));
4263 mask_lo = -1;
4264 }
4265 else
4266 {
4267 hi = 0;
4268 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
4269
4270 mask_hi = 0;
4271 mask_lo = ((unsigned HOST_WIDE_INT) -1
4272 >> (HOST_BITS_PER_WIDE_INT - width));
4273 }
4274
4275 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
4276 treat VAL as if it were unsigned. */
4277 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
4278 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
4279 return exp;
4280
4281 /* Handle extension from a narrower type. */
4282 if (TREE_CODE (exp) == NOP_EXPR
4283 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4284 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4285
4286 return NULL_TREE;
4287 }
4288
4289 /* Subroutine for fold_truthop: determine if an operand is simple enough
4290 to be evaluated unconditionally. */
4291
4292 static int
4293 simple_operand_p (const_tree exp)
4294 {
4295 /* Strip any conversions that don't change the machine mode. */
4296 STRIP_NOPS (exp);
4297
4298 return (CONSTANT_CLASS_P (exp)
4299 || TREE_CODE (exp) == SSA_NAME
4300 || (DECL_P (exp)
4301 && ! TREE_ADDRESSABLE (exp)
4302 && ! TREE_THIS_VOLATILE (exp)
4303 && ! DECL_NONLOCAL (exp)
4304 /* Don't regard global variables as simple. They may be
4305 allocated in ways unknown to the compiler (shared memory,
4306 #pragma weak, etc). */
4307 && ! TREE_PUBLIC (exp)
4308 && ! DECL_EXTERNAL (exp)
4309 /* Loading a static variable is unduly expensive, but global
4310 registers aren't expensive. */
4311 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4312 }
4313 \f
4314 /* The following functions are subroutines to fold_range_test and allow it to
4315 try to change a logical combination of comparisons into a range test.
4316
4317 For example, both
4318 X == 2 || X == 3 || X == 4 || X == 5
4319 and
4320 X >= 2 && X <= 5
4321 are converted to
4322 (unsigned) (X - 2) <= 3
4323
4324 We describe each set of comparisons as being either inside or outside
4325 a range, using a variable named like IN_P, and then describe the
4326 range with a lower and upper bound. If one of the bounds is omitted,
4327 it represents either the highest or lowest value of the type.
4328
4329 In the comments below, we represent a range by two numbers in brackets
4330 preceded by a "+" to designate being inside that range, or a "-" to
4331 designate being outside that range, so the condition can be inverted by
4332 flipping the prefix. An omitted bound is represented by a "-". For
4333 example, "- [-, 10]" means being outside the range starting at the lowest
4334 possible value and ending at 10, in other words, being greater than 10.
4335 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4336 always false.
4337
4338 We set up things so that the missing bounds are handled in a consistent
4339 manner so neither a missing bound nor "true" and "false" need to be
4340 handled using a special case. */
4341
4342 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4343 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4344 and UPPER1_P are nonzero if the respective argument is an upper bound
4345 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4346 must be specified for a comparison. ARG1 will be converted to ARG0's
4347 type if both are specified. */
4348
4349 static tree
4350 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4351 tree arg1, int upper1_p)
4352 {
4353 tree tem;
4354 int result;
4355 int sgn0, sgn1;
4356
4357 /* If neither arg represents infinity, do the normal operation.
4358 Else, if not a comparison, return infinity. Else handle the special
4359 comparison rules. Note that most of the cases below won't occur, but
4360 are handled for consistency. */
4361
4362 if (arg0 != 0 && arg1 != 0)
4363 {
4364 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4365 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4366 STRIP_NOPS (tem);
4367 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4368 }
4369
4370 if (TREE_CODE_CLASS (code) != tcc_comparison)
4371 return 0;
4372
4373 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4374 for neither. In real maths, we cannot assume open ended ranges are
4375 the same. But, this is computer arithmetic, where numbers are finite.
4376 We can therefore make the transformation of any unbounded range with
4377 the value Z, Z being greater than any representable number. This permits
4378 us to treat unbounded ranges as equal. */
4379 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4380 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4381 switch (code)
4382 {
4383 case EQ_EXPR:
4384 result = sgn0 == sgn1;
4385 break;
4386 case NE_EXPR:
4387 result = sgn0 != sgn1;
4388 break;
4389 case LT_EXPR:
4390 result = sgn0 < sgn1;
4391 break;
4392 case LE_EXPR:
4393 result = sgn0 <= sgn1;
4394 break;
4395 case GT_EXPR:
4396 result = sgn0 > sgn1;
4397 break;
4398 case GE_EXPR:
4399 result = sgn0 >= sgn1;
4400 break;
4401 default:
4402 gcc_unreachable ();
4403 }
4404
4405 return constant_boolean_node (result, type);
4406 }
4407 \f
4408 /* Given EXP, a logical expression, set the range it is testing into
4409 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4410 actually being tested. *PLOW and *PHIGH will be made of the same
4411 type as the returned expression. If EXP is not a comparison, we
4412 will most likely not be returning a useful value and range. Set
4413 *STRICT_OVERFLOW_P to true if the return value is only valid
4414 because signed overflow is undefined; otherwise, do not change
4415 *STRICT_OVERFLOW_P. */
4416
4417 tree
4418 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4419 bool *strict_overflow_p)
4420 {
4421 enum tree_code code;
4422 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
4423 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
4424 int in_p, n_in_p;
4425 tree low, high, n_low, n_high;
4426
4427 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4428 and see if we can refine the range. Some of the cases below may not
4429 happen, but it doesn't seem worth worrying about this. We "continue"
4430 the outer loop when we've changed something; otherwise we "break"
4431 the switch, which will "break" the while. */
4432
4433 in_p = 0;
4434 low = high = build_int_cst (TREE_TYPE (exp), 0);
4435
4436 while (1)
4437 {
4438 code = TREE_CODE (exp);
4439 exp_type = TREE_TYPE (exp);
4440
4441 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4442 {
4443 if (TREE_OPERAND_LENGTH (exp) > 0)
4444 arg0 = TREE_OPERAND (exp, 0);
4445 if (TREE_CODE_CLASS (code) == tcc_comparison
4446 || TREE_CODE_CLASS (code) == tcc_unary
4447 || TREE_CODE_CLASS (code) == tcc_binary)
4448 arg0_type = TREE_TYPE (arg0);
4449 if (TREE_CODE_CLASS (code) == tcc_binary
4450 || TREE_CODE_CLASS (code) == tcc_comparison
4451 || (TREE_CODE_CLASS (code) == tcc_expression
4452 && TREE_OPERAND_LENGTH (exp) > 1))
4453 arg1 = TREE_OPERAND (exp, 1);
4454 }
4455
4456 switch (code)
4457 {
4458 case TRUTH_NOT_EXPR:
4459 in_p = ! in_p, exp = arg0;
4460 continue;
4461
4462 case EQ_EXPR: case NE_EXPR:
4463 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4464 /* We can only do something if the range is testing for zero
4465 and if the second operand is an integer constant. Note that
4466 saying something is "in" the range we make is done by
4467 complementing IN_P since it will set in the initial case of
4468 being not equal to zero; "out" is leaving it alone. */
4469 if (low == 0 || high == 0
4470 || ! integer_zerop (low) || ! integer_zerop (high)
4471 || TREE_CODE (arg1) != INTEGER_CST)
4472 break;
4473
4474 switch (code)
4475 {
4476 case NE_EXPR: /* - [c, c] */
4477 low = high = arg1;
4478 break;
4479 case EQ_EXPR: /* + [c, c] */
4480 in_p = ! in_p, low = high = arg1;
4481 break;
4482 case GT_EXPR: /* - [-, c] */
4483 low = 0, high = arg1;
4484 break;
4485 case GE_EXPR: /* + [c, -] */
4486 in_p = ! in_p, low = arg1, high = 0;
4487 break;
4488 case LT_EXPR: /* - [c, -] */
4489 low = arg1, high = 0;
4490 break;
4491 case LE_EXPR: /* + [-, c] */
4492 in_p = ! in_p, low = 0, high = arg1;
4493 break;
4494 default:
4495 gcc_unreachable ();
4496 }
4497
4498 /* If this is an unsigned comparison, we also know that EXP is
4499 greater than or equal to zero. We base the range tests we make
4500 on that fact, so we record it here so we can parse existing
4501 range tests. We test arg0_type since often the return type
4502 of, e.g. EQ_EXPR, is boolean. */
4503 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4504 {
4505 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4506 in_p, low, high, 1,
4507 build_int_cst (arg0_type, 0),
4508 NULL_TREE))
4509 break;
4510
4511 in_p = n_in_p, low = n_low, high = n_high;
4512
4513 /* If the high bound is missing, but we have a nonzero low
4514 bound, reverse the range so it goes from zero to the low bound
4515 minus 1. */
4516 if (high == 0 && low && ! integer_zerop (low))
4517 {
4518 in_p = ! in_p;
4519 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4520 integer_one_node, 0);
4521 low = build_int_cst (arg0_type, 0);
4522 }
4523 }
4524
4525 exp = arg0;
4526 continue;
4527
4528 case NEGATE_EXPR:
4529 /* (-x) IN [a,b] -> x in [-b, -a] */
4530 n_low = range_binop (MINUS_EXPR, exp_type,
4531 build_int_cst (exp_type, 0),
4532 0, high, 1);
4533 n_high = range_binop (MINUS_EXPR, exp_type,
4534 build_int_cst (exp_type, 0),
4535 0, low, 0);
4536 low = n_low, high = n_high;
4537 exp = arg0;
4538 continue;
4539
4540 case BIT_NOT_EXPR:
4541 /* ~ X -> -X - 1 */
4542 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
4543 build_int_cst (exp_type, 1));
4544 continue;
4545
4546 case PLUS_EXPR: case MINUS_EXPR:
4547 if (TREE_CODE (arg1) != INTEGER_CST)
4548 break;
4549
4550 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4551 move a constant to the other side. */
4552 if (!TYPE_UNSIGNED (arg0_type)
4553 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4554 break;
4555
4556 /* If EXP is signed, any overflow in the computation is undefined,
4557 so we don't worry about it so long as our computations on
4558 the bounds don't overflow. For unsigned, overflow is defined
4559 and this is exactly the right thing. */
4560 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4561 arg0_type, low, 0, arg1, 0);
4562 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4563 arg0_type, high, 1, arg1, 0);
4564 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4565 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4566 break;
4567
4568 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4569 *strict_overflow_p = true;
4570
4571 /* Check for an unsigned range which has wrapped around the maximum
4572 value thus making n_high < n_low, and normalize it. */
4573 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4574 {
4575 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4576 integer_one_node, 0);
4577 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4578 integer_one_node, 0);
4579
4580 /* If the range is of the form +/- [ x+1, x ], we won't
4581 be able to normalize it. But then, it represents the
4582 whole range or the empty set, so make it
4583 +/- [ -, - ]. */
4584 if (tree_int_cst_equal (n_low, low)
4585 && tree_int_cst_equal (n_high, high))
4586 low = high = 0;
4587 else
4588 in_p = ! in_p;
4589 }
4590 else
4591 low = n_low, high = n_high;
4592
4593 exp = arg0;
4594 continue;
4595
4596 CASE_CONVERT: case NON_LVALUE_EXPR:
4597 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4598 break;
4599
4600 if (! INTEGRAL_TYPE_P (arg0_type)
4601 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4602 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4603 break;
4604
4605 n_low = low, n_high = high;
4606
4607 if (n_low != 0)
4608 n_low = fold_convert (arg0_type, n_low);
4609
4610 if (n_high != 0)
4611 n_high = fold_convert (arg0_type, n_high);
4612
4613
4614 /* If we're converting arg0 from an unsigned type, to exp,
4615 a signed type, we will be doing the comparison as unsigned.
4616 The tests above have already verified that LOW and HIGH
4617 are both positive.
4618
4619 So we have to ensure that we will handle large unsigned
4620 values the same way that the current signed bounds treat
4621 negative values. */
4622
4623 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4624 {
4625 tree high_positive;
4626 tree equiv_type;
4627 /* For fixed-point modes, we need to pass the saturating flag
4628 as the 2nd parameter. */
4629 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4630 equiv_type = lang_hooks.types.type_for_mode
4631 (TYPE_MODE (arg0_type),
4632 TYPE_SATURATING (arg0_type));
4633 else
4634 equiv_type = lang_hooks.types.type_for_mode
4635 (TYPE_MODE (arg0_type), 1);
4636
4637 /* A range without an upper bound is, naturally, unbounded.
4638 Since convert would have cropped a very large value, use
4639 the max value for the destination type. */
4640 high_positive
4641 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4642 : TYPE_MAX_VALUE (arg0_type);
4643
4644 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4645 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
4646 fold_convert (arg0_type,
4647 high_positive),
4648 build_int_cst (arg0_type, 1));
4649
4650 /* If the low bound is specified, "and" the range with the
4651 range for which the original unsigned value will be
4652 positive. */
4653 if (low != 0)
4654 {
4655 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4656 1, n_low, n_high, 1,
4657 fold_convert (arg0_type,
4658 integer_zero_node),
4659 high_positive))
4660 break;
4661
4662 in_p = (n_in_p == in_p);
4663 }
4664 else
4665 {
4666 /* Otherwise, "or" the range with the range of the input
4667 that will be interpreted as negative. */
4668 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4669 0, n_low, n_high, 1,
4670 fold_convert (arg0_type,
4671 integer_zero_node),
4672 high_positive))
4673 break;
4674
4675 in_p = (in_p != n_in_p);
4676 }
4677 }
4678
4679 exp = arg0;
4680 low = n_low, high = n_high;
4681 continue;
4682
4683 default:
4684 break;
4685 }
4686
4687 break;
4688 }
4689
4690 /* If EXP is a constant, we can evaluate whether this is true or false. */
4691 if (TREE_CODE (exp) == INTEGER_CST)
4692 {
4693 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4694 exp, 0, low, 0))
4695 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4696 exp, 1, high, 1)));
4697 low = high = 0;
4698 exp = 0;
4699 }
4700
4701 *pin_p = in_p, *plow = low, *phigh = high;
4702 return exp;
4703 }
4704 \f
4705 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4706 type, TYPE, return an expression to test if EXP is in (or out of, depending
4707 on IN_P) the range. Return 0 if the test couldn't be created. */
4708
4709 tree
4710 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
4711 {
4712 tree etype = TREE_TYPE (exp), value;
4713
4714 #ifdef HAVE_canonicalize_funcptr_for_compare
4715 /* Disable this optimization for function pointer expressions
4716 on targets that require function pointer canonicalization. */
4717 if (HAVE_canonicalize_funcptr_for_compare
4718 && TREE_CODE (etype) == POINTER_TYPE
4719 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4720 return NULL_TREE;
4721 #endif
4722
4723 if (! in_p)
4724 {
4725 value = build_range_check (type, exp, 1, low, high);
4726 if (value != 0)
4727 return invert_truthvalue (value);
4728
4729 return 0;
4730 }
4731
4732 if (low == 0 && high == 0)
4733 return build_int_cst (type, 1);
4734
4735 if (low == 0)
4736 return fold_build2 (LE_EXPR, type, exp,
4737 fold_convert (etype, high));
4738
4739 if (high == 0)
4740 return fold_build2 (GE_EXPR, type, exp,
4741 fold_convert (etype, low));
4742
4743 if (operand_equal_p (low, high, 0))
4744 return fold_build2 (EQ_EXPR, type, exp,
4745 fold_convert (etype, low));
4746
4747 if (integer_zerop (low))
4748 {
4749 if (! TYPE_UNSIGNED (etype))
4750 {
4751 etype = unsigned_type_for (etype);
4752 high = fold_convert (etype, high);
4753 exp = fold_convert (etype, exp);
4754 }
4755 return build_range_check (type, exp, 1, 0, high);
4756 }
4757
4758 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4759 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4760 {
4761 unsigned HOST_WIDE_INT lo;
4762 HOST_WIDE_INT hi;
4763 int prec;
4764
4765 prec = TYPE_PRECISION (etype);
4766 if (prec <= HOST_BITS_PER_WIDE_INT)
4767 {
4768 hi = 0;
4769 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4770 }
4771 else
4772 {
4773 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4774 lo = (unsigned HOST_WIDE_INT) -1;
4775 }
4776
4777 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4778 {
4779 if (TYPE_UNSIGNED (etype))
4780 {
4781 tree signed_etype = signed_type_for (etype);
4782 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4783 etype
4784 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4785 else
4786 etype = signed_etype;
4787 exp = fold_convert (etype, exp);
4788 }
4789 return fold_build2 (GT_EXPR, type, exp,
4790 build_int_cst (etype, 0));
4791 }
4792 }
4793
4794 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4795 This requires wrap-around arithmetics for the type of the expression.
4796 First make sure that arithmetics in this type is valid, then make sure
4797 that it wraps around. */
4798 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4799 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4800 TYPE_UNSIGNED (etype));
4801
4802 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4803 {
4804 tree utype, minv, maxv;
4805
4806 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4807 for the type in question, as we rely on this here. */
4808 utype = unsigned_type_for (etype);
4809 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4810 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4811 integer_one_node, 1);
4812 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4813
4814 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4815 minv, 1, maxv, 1)))
4816 etype = utype;
4817 else
4818 return 0;
4819 }
4820
4821 high = fold_convert (etype, high);
4822 low = fold_convert (etype, low);
4823 exp = fold_convert (etype, exp);
4824
4825 value = const_binop (MINUS_EXPR, high, low, 0);
4826
4827
4828 if (POINTER_TYPE_P (etype))
4829 {
4830 if (value != 0 && !TREE_OVERFLOW (value))
4831 {
4832 low = fold_convert (sizetype, low);
4833 low = fold_build1 (NEGATE_EXPR, sizetype, low);
4834 return build_range_check (type,
4835 fold_build2 (POINTER_PLUS_EXPR, etype, exp, low),
4836 1, build_int_cst (etype, 0), value);
4837 }
4838 return 0;
4839 }
4840
4841 if (value != 0 && !TREE_OVERFLOW (value))
4842 return build_range_check (type,
4843 fold_build2 (MINUS_EXPR, etype, exp, low),
4844 1, build_int_cst (etype, 0), value);
4845
4846 return 0;
4847 }
4848 \f
4849 /* Return the predecessor of VAL in its type, handling the infinite case. */
4850
4851 static tree
4852 range_predecessor (tree val)
4853 {
4854 tree type = TREE_TYPE (val);
4855
4856 if (INTEGRAL_TYPE_P (type)
4857 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4858 return 0;
4859 else
4860 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4861 }
4862
4863 /* Return the successor of VAL in its type, handling the infinite case. */
4864
4865 static tree
4866 range_successor (tree val)
4867 {
4868 tree type = TREE_TYPE (val);
4869
4870 if (INTEGRAL_TYPE_P (type)
4871 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4872 return 0;
4873 else
4874 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4875 }
4876
4877 /* Given two ranges, see if we can merge them into one. Return 1 if we
4878 can, 0 if we can't. Set the output range into the specified parameters. */
4879
4880 bool
4881 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4882 tree high0, int in1_p, tree low1, tree high1)
4883 {
4884 int no_overlap;
4885 int subset;
4886 int temp;
4887 tree tem;
4888 int in_p;
4889 tree low, high;
4890 int lowequal = ((low0 == 0 && low1 == 0)
4891 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4892 low0, 0, low1, 0)));
4893 int highequal = ((high0 == 0 && high1 == 0)
4894 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4895 high0, 1, high1, 1)));
4896
4897 /* Make range 0 be the range that starts first, or ends last if they
4898 start at the same value. Swap them if it isn't. */
4899 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4900 low0, 0, low1, 0))
4901 || (lowequal
4902 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4903 high1, 1, high0, 1))))
4904 {
4905 temp = in0_p, in0_p = in1_p, in1_p = temp;
4906 tem = low0, low0 = low1, low1 = tem;
4907 tem = high0, high0 = high1, high1 = tem;
4908 }
4909
4910 /* Now flag two cases, whether the ranges are disjoint or whether the
4911 second range is totally subsumed in the first. Note that the tests
4912 below are simplified by the ones above. */
4913 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4914 high0, 1, low1, 0));
4915 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4916 high1, 1, high0, 1));
4917
4918 /* We now have four cases, depending on whether we are including or
4919 excluding the two ranges. */
4920 if (in0_p && in1_p)
4921 {
4922 /* If they don't overlap, the result is false. If the second range
4923 is a subset it is the result. Otherwise, the range is from the start
4924 of the second to the end of the first. */
4925 if (no_overlap)
4926 in_p = 0, low = high = 0;
4927 else if (subset)
4928 in_p = 1, low = low1, high = high1;
4929 else
4930 in_p = 1, low = low1, high = high0;
4931 }
4932
4933 else if (in0_p && ! in1_p)
4934 {
4935 /* If they don't overlap, the result is the first range. If they are
4936 equal, the result is false. If the second range is a subset of the
4937 first, and the ranges begin at the same place, we go from just after
4938 the end of the second range to the end of the first. If the second
4939 range is not a subset of the first, or if it is a subset and both
4940 ranges end at the same place, the range starts at the start of the
4941 first range and ends just before the second range.
4942 Otherwise, we can't describe this as a single range. */
4943 if (no_overlap)
4944 in_p = 1, low = low0, high = high0;
4945 else if (lowequal && highequal)
4946 in_p = 0, low = high = 0;
4947 else if (subset && lowequal)
4948 {
4949 low = range_successor (high1);
4950 high = high0;
4951 in_p = 1;
4952 if (low == 0)
4953 {
4954 /* We are in the weird situation where high0 > high1 but
4955 high1 has no successor. Punt. */
4956 return 0;
4957 }
4958 }
4959 else if (! subset || highequal)
4960 {
4961 low = low0;
4962 high = range_predecessor (low1);
4963 in_p = 1;
4964 if (high == 0)
4965 {
4966 /* low0 < low1 but low1 has no predecessor. Punt. */
4967 return 0;
4968 }
4969 }
4970 else
4971 return 0;
4972 }
4973
4974 else if (! in0_p && in1_p)
4975 {
4976 /* If they don't overlap, the result is the second range. If the second
4977 is a subset of the first, the result is false. Otherwise,
4978 the range starts just after the first range and ends at the
4979 end of the second. */
4980 if (no_overlap)
4981 in_p = 1, low = low1, high = high1;
4982 else if (subset || highequal)
4983 in_p = 0, low = high = 0;
4984 else
4985 {
4986 low = range_successor (high0);
4987 high = high1;
4988 in_p = 1;
4989 if (low == 0)
4990 {
4991 /* high1 > high0 but high0 has no successor. Punt. */
4992 return 0;
4993 }
4994 }
4995 }
4996
4997 else
4998 {
4999 /* The case where we are excluding both ranges. Here the complex case
5000 is if they don't overlap. In that case, the only time we have a
5001 range is if they are adjacent. If the second is a subset of the
5002 first, the result is the first. Otherwise, the range to exclude
5003 starts at the beginning of the first range and ends at the end of the
5004 second. */
5005 if (no_overlap)
5006 {
5007 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
5008 range_successor (high0),
5009 1, low1, 0)))
5010 in_p = 0, low = low0, high = high1;
5011 else
5012 {
5013 /* Canonicalize - [min, x] into - [-, x]. */
5014 if (low0 && TREE_CODE (low0) == INTEGER_CST)
5015 switch (TREE_CODE (TREE_TYPE (low0)))
5016 {
5017 case ENUMERAL_TYPE:
5018 if (TYPE_PRECISION (TREE_TYPE (low0))
5019 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
5020 break;
5021 /* FALLTHROUGH */
5022 case INTEGER_TYPE:
5023 if (tree_int_cst_equal (low0,
5024 TYPE_MIN_VALUE (TREE_TYPE (low0))))
5025 low0 = 0;
5026 break;
5027 case POINTER_TYPE:
5028 if (TYPE_UNSIGNED (TREE_TYPE (low0))
5029 && integer_zerop (low0))
5030 low0 = 0;
5031 break;
5032 default:
5033 break;
5034 }
5035
5036 /* Canonicalize - [x, max] into - [x, -]. */
5037 if (high1 && TREE_CODE (high1) == INTEGER_CST)
5038 switch (TREE_CODE (TREE_TYPE (high1)))
5039 {
5040 case ENUMERAL_TYPE:
5041 if (TYPE_PRECISION (TREE_TYPE (high1))
5042 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
5043 break;
5044 /* FALLTHROUGH */
5045 case INTEGER_TYPE:
5046 if (tree_int_cst_equal (high1,
5047 TYPE_MAX_VALUE (TREE_TYPE (high1))))
5048 high1 = 0;
5049 break;
5050 case POINTER_TYPE:
5051 if (TYPE_UNSIGNED (TREE_TYPE (high1))
5052 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
5053 high1, 1,
5054 integer_one_node, 1)))
5055 high1 = 0;
5056 break;
5057 default:
5058 break;
5059 }
5060
5061 /* The ranges might be also adjacent between the maximum and
5062 minimum values of the given type. For
5063 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
5064 return + [x + 1, y - 1]. */
5065 if (low0 == 0 && high1 == 0)
5066 {
5067 low = range_successor (high0);
5068 high = range_predecessor (low1);
5069 if (low == 0 || high == 0)
5070 return 0;
5071
5072 in_p = 1;
5073 }
5074 else
5075 return 0;
5076 }
5077 }
5078 else if (subset)
5079 in_p = 0, low = low0, high = high0;
5080 else
5081 in_p = 0, low = low0, high = high1;
5082 }
5083
5084 *pin_p = in_p, *plow = low, *phigh = high;
5085 return 1;
5086 }
5087 \f
5088
5089 /* Subroutine of fold, looking inside expressions of the form
5090 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
5091 of the COND_EXPR. This function is being used also to optimize
5092 A op B ? C : A, by reversing the comparison first.
5093
5094 Return a folded expression whose code is not a COND_EXPR
5095 anymore, or NULL_TREE if no folding opportunity is found. */
5096
5097 static tree
5098 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
5099 {
5100 enum tree_code comp_code = TREE_CODE (arg0);
5101 tree arg00 = TREE_OPERAND (arg0, 0);
5102 tree arg01 = TREE_OPERAND (arg0, 1);
5103 tree arg1_type = TREE_TYPE (arg1);
5104 tree tem;
5105
5106 STRIP_NOPS (arg1);
5107 STRIP_NOPS (arg2);
5108
5109 /* If we have A op 0 ? A : -A, consider applying the following
5110 transformations:
5111
5112 A == 0? A : -A same as -A
5113 A != 0? A : -A same as A
5114 A >= 0? A : -A same as abs (A)
5115 A > 0? A : -A same as abs (A)
5116 A <= 0? A : -A same as -abs (A)
5117 A < 0? A : -A same as -abs (A)
5118
5119 None of these transformations work for modes with signed
5120 zeros. If A is +/-0, the first two transformations will
5121 change the sign of the result (from +0 to -0, or vice
5122 versa). The last four will fix the sign of the result,
5123 even though the original expressions could be positive or
5124 negative, depending on the sign of A.
5125
5126 Note that all these transformations are correct if A is
5127 NaN, since the two alternatives (A and -A) are also NaNs. */
5128 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
5129 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
5130 ? real_zerop (arg01)
5131 : integer_zerop (arg01))
5132 && ((TREE_CODE (arg2) == NEGATE_EXPR
5133 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5134 /* In the case that A is of the form X-Y, '-A' (arg2) may
5135 have already been folded to Y-X, check for that. */
5136 || (TREE_CODE (arg1) == MINUS_EXPR
5137 && TREE_CODE (arg2) == MINUS_EXPR
5138 && operand_equal_p (TREE_OPERAND (arg1, 0),
5139 TREE_OPERAND (arg2, 1), 0)
5140 && operand_equal_p (TREE_OPERAND (arg1, 1),
5141 TREE_OPERAND (arg2, 0), 0))))
5142 switch (comp_code)
5143 {
5144 case EQ_EXPR:
5145 case UNEQ_EXPR:
5146 tem = fold_convert (arg1_type, arg1);
5147 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
5148 case NE_EXPR:
5149 case LTGT_EXPR:
5150 return pedantic_non_lvalue (fold_convert (type, arg1));
5151 case UNGE_EXPR:
5152 case UNGT_EXPR:
5153 if (flag_trapping_math)
5154 break;
5155 /* Fall through. */
5156 case GE_EXPR:
5157 case GT_EXPR:
5158 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5159 arg1 = fold_convert (signed_type_for
5160 (TREE_TYPE (arg1)), arg1);
5161 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
5162 return pedantic_non_lvalue (fold_convert (type, tem));
5163 case UNLE_EXPR:
5164 case UNLT_EXPR:
5165 if (flag_trapping_math)
5166 break;
5167 case LE_EXPR:
5168 case LT_EXPR:
5169 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5170 arg1 = fold_convert (signed_type_for
5171 (TREE_TYPE (arg1)), arg1);
5172 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
5173 return negate_expr (fold_convert (type, tem));
5174 default:
5175 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5176 break;
5177 }
5178
5179 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5180 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5181 both transformations are correct when A is NaN: A != 0
5182 is then true, and A == 0 is false. */
5183
5184 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
5185 && integer_zerop (arg01) && integer_zerop (arg2))
5186 {
5187 if (comp_code == NE_EXPR)
5188 return pedantic_non_lvalue (fold_convert (type, arg1));
5189 else if (comp_code == EQ_EXPR)
5190 return build_int_cst (type, 0);
5191 }
5192
5193 /* Try some transformations of A op B ? A : B.
5194
5195 A == B? A : B same as B
5196 A != B? A : B same as A
5197 A >= B? A : B same as max (A, B)
5198 A > B? A : B same as max (B, A)
5199 A <= B? A : B same as min (A, B)
5200 A < B? A : B same as min (B, A)
5201
5202 As above, these transformations don't work in the presence
5203 of signed zeros. For example, if A and B are zeros of
5204 opposite sign, the first two transformations will change
5205 the sign of the result. In the last four, the original
5206 expressions give different results for (A=+0, B=-0) and
5207 (A=-0, B=+0), but the transformed expressions do not.
5208
5209 The first two transformations are correct if either A or B
5210 is a NaN. In the first transformation, the condition will
5211 be false, and B will indeed be chosen. In the case of the
5212 second transformation, the condition A != B will be true,
5213 and A will be chosen.
5214
5215 The conversions to max() and min() are not correct if B is
5216 a number and A is not. The conditions in the original
5217 expressions will be false, so all four give B. The min()
5218 and max() versions would give a NaN instead. */
5219 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
5220 && operand_equal_for_comparison_p (arg01, arg2, arg00)
5221 /* Avoid these transformations if the COND_EXPR may be used
5222 as an lvalue in the C++ front-end. PR c++/19199. */
5223 && (in_gimple_form
5224 || (strcmp (lang_hooks.name, "GNU C++") != 0
5225 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5226 || ! maybe_lvalue_p (arg1)
5227 || ! maybe_lvalue_p (arg2)))
5228 {
5229 tree comp_op0 = arg00;
5230 tree comp_op1 = arg01;
5231 tree comp_type = TREE_TYPE (comp_op0);
5232
5233 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
5234 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
5235 {
5236 comp_type = type;
5237 comp_op0 = arg1;
5238 comp_op1 = arg2;
5239 }
5240
5241 switch (comp_code)
5242 {
5243 case EQ_EXPR:
5244 return pedantic_non_lvalue (fold_convert (type, arg2));
5245 case NE_EXPR:
5246 return pedantic_non_lvalue (fold_convert (type, arg1));
5247 case LE_EXPR:
5248 case LT_EXPR:
5249 case UNLE_EXPR:
5250 case UNLT_EXPR:
5251 /* In C++ a ?: expression can be an lvalue, so put the
5252 operand which will be used if they are equal first
5253 so that we can convert this back to the
5254 corresponding COND_EXPR. */
5255 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5256 {
5257 comp_op0 = fold_convert (comp_type, comp_op0);
5258 comp_op1 = fold_convert (comp_type, comp_op1);
5259 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5260 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
5261 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
5262 return pedantic_non_lvalue (fold_convert (type, tem));
5263 }
5264 break;
5265 case GE_EXPR:
5266 case GT_EXPR:
5267 case UNGE_EXPR:
5268 case UNGT_EXPR:
5269 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5270 {
5271 comp_op0 = fold_convert (comp_type, comp_op0);
5272 comp_op1 = fold_convert (comp_type, comp_op1);
5273 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5274 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
5275 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
5276 return pedantic_non_lvalue (fold_convert (type, tem));
5277 }
5278 break;
5279 case UNEQ_EXPR:
5280 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5281 return pedantic_non_lvalue (fold_convert (type, arg2));
5282 break;
5283 case LTGT_EXPR:
5284 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5285 return pedantic_non_lvalue (fold_convert (type, arg1));
5286 break;
5287 default:
5288 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5289 break;
5290 }
5291 }
5292
5293 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5294 we might still be able to simplify this. For example,
5295 if C1 is one less or one more than C2, this might have started
5296 out as a MIN or MAX and been transformed by this function.
5297 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5298
5299 if (INTEGRAL_TYPE_P (type)
5300 && TREE_CODE (arg01) == INTEGER_CST
5301 && TREE_CODE (arg2) == INTEGER_CST)
5302 switch (comp_code)
5303 {
5304 case EQ_EXPR:
5305 /* We can replace A with C1 in this case. */
5306 arg1 = fold_convert (type, arg01);
5307 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
5308
5309 case LT_EXPR:
5310 /* If C1 is C2 + 1, this is min(A, C2). */
5311 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5312 OEP_ONLY_CONST)
5313 && operand_equal_p (arg01,
5314 const_binop (PLUS_EXPR, arg2,
5315 build_int_cst (type, 1), 0),
5316 OEP_ONLY_CONST))
5317 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
5318 type,
5319 fold_convert (type, arg1),
5320 arg2));
5321 break;
5322
5323 case LE_EXPR:
5324 /* If C1 is C2 - 1, this is min(A, C2). */
5325 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5326 OEP_ONLY_CONST)
5327 && operand_equal_p (arg01,
5328 const_binop (MINUS_EXPR, arg2,
5329 build_int_cst (type, 1), 0),
5330 OEP_ONLY_CONST))
5331 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
5332 type,
5333 fold_convert (type, arg1),
5334 arg2));
5335 break;
5336
5337 case GT_EXPR:
5338 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
5339 MAX_EXPR, to preserve the signedness of the comparison. */
5340 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5341 OEP_ONLY_CONST)
5342 && operand_equal_p (arg01,
5343 const_binop (MINUS_EXPR, arg2,
5344 build_int_cst (type, 1), 0),
5345 OEP_ONLY_CONST))
5346 return pedantic_non_lvalue (fold_convert (type,
5347 fold_build2 (MAX_EXPR, TREE_TYPE (arg00),
5348 arg00,
5349 fold_convert (TREE_TYPE (arg00),
5350 arg2))));
5351 break;
5352
5353 case GE_EXPR:
5354 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
5355 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5356 OEP_ONLY_CONST)
5357 && operand_equal_p (arg01,
5358 const_binop (PLUS_EXPR, arg2,
5359 build_int_cst (type, 1), 0),
5360 OEP_ONLY_CONST))
5361 return pedantic_non_lvalue (fold_convert (type,
5362 fold_build2 (MAX_EXPR, TREE_TYPE (arg00),
5363 arg00,
5364 fold_convert (TREE_TYPE (arg00),
5365 arg2))));
5366 break;
5367 case NE_EXPR:
5368 break;
5369 default:
5370 gcc_unreachable ();
5371 }
5372
5373 return NULL_TREE;
5374 }
5375
5376
5377 \f
5378 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5379 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5380 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5381 false) >= 2)
5382 #endif
5383
5384 /* EXP is some logical combination of boolean tests. See if we can
5385 merge it into some range test. Return the new tree if so. */
5386
5387 static tree
5388 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
5389 {
5390 int or_op = (code == TRUTH_ORIF_EXPR
5391 || code == TRUTH_OR_EXPR);
5392 int in0_p, in1_p, in_p;
5393 tree low0, low1, low, high0, high1, high;
5394 bool strict_overflow_p = false;
5395 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5396 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5397 tree tem;
5398 const char * const warnmsg = G_("assuming signed overflow does not occur "
5399 "when simplifying range test");
5400
5401 /* If this is an OR operation, invert both sides; we will invert
5402 again at the end. */
5403 if (or_op)
5404 in0_p = ! in0_p, in1_p = ! in1_p;
5405
5406 /* If both expressions are the same, if we can merge the ranges, and we
5407 can build the range test, return it or it inverted. If one of the
5408 ranges is always true or always false, consider it to be the same
5409 expression as the other. */
5410 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5411 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5412 in1_p, low1, high1)
5413 && 0 != (tem = (build_range_check (type,
5414 lhs != 0 ? lhs
5415 : rhs != 0 ? rhs : integer_zero_node,
5416 in_p, low, high))))
5417 {
5418 if (strict_overflow_p)
5419 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5420 return or_op ? invert_truthvalue (tem) : tem;
5421 }
5422
5423 /* On machines where the branch cost is expensive, if this is a
5424 short-circuited branch and the underlying object on both sides
5425 is the same, make a non-short-circuit operation. */
5426 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5427 && lhs != 0 && rhs != 0
5428 && (code == TRUTH_ANDIF_EXPR
5429 || code == TRUTH_ORIF_EXPR)
5430 && operand_equal_p (lhs, rhs, 0))
5431 {
5432 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5433 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5434 which cases we can't do this. */
5435 if (simple_operand_p (lhs))
5436 return build2 (code == TRUTH_ANDIF_EXPR
5437 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5438 type, op0, op1);
5439
5440 else if (lang_hooks.decls.global_bindings_p () == 0
5441 && ! CONTAINS_PLACEHOLDER_P (lhs))
5442 {
5443 tree common = save_expr (lhs);
5444
5445 if (0 != (lhs = build_range_check (type, common,
5446 or_op ? ! in0_p : in0_p,
5447 low0, high0))
5448 && (0 != (rhs = build_range_check (type, common,
5449 or_op ? ! in1_p : in1_p,
5450 low1, high1))))
5451 {
5452 if (strict_overflow_p)
5453 fold_overflow_warning (warnmsg,
5454 WARN_STRICT_OVERFLOW_COMPARISON);
5455 return build2 (code == TRUTH_ANDIF_EXPR
5456 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5457 type, lhs, rhs);
5458 }
5459 }
5460 }
5461
5462 return 0;
5463 }
5464 \f
5465 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
5466 bit value. Arrange things so the extra bits will be set to zero if and
5467 only if C is signed-extended to its full width. If MASK is nonzero,
5468 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5469
5470 static tree
5471 unextend (tree c, int p, int unsignedp, tree mask)
5472 {
5473 tree type = TREE_TYPE (c);
5474 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5475 tree temp;
5476
5477 if (p == modesize || unsignedp)
5478 return c;
5479
5480 /* We work by getting just the sign bit into the low-order bit, then
5481 into the high-order bit, then sign-extend. We then XOR that value
5482 with C. */
5483 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
5484 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
5485
5486 /* We must use a signed type in order to get an arithmetic right shift.
5487 However, we must also avoid introducing accidental overflows, so that
5488 a subsequent call to integer_zerop will work. Hence we must
5489 do the type conversion here. At this point, the constant is either
5490 zero or one, and the conversion to a signed type can never overflow.
5491 We could get an overflow if this conversion is done anywhere else. */
5492 if (TYPE_UNSIGNED (type))
5493 temp = fold_convert (signed_type_for (type), temp);
5494
5495 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
5496 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
5497 if (mask != 0)
5498 temp = const_binop (BIT_AND_EXPR, temp,
5499 fold_convert (TREE_TYPE (c), mask), 0);
5500 /* If necessary, convert the type back to match the type of C. */
5501 if (TYPE_UNSIGNED (type))
5502 temp = fold_convert (type, temp);
5503
5504 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
5505 }
5506 \f
5507 /* Find ways of folding logical expressions of LHS and RHS:
5508 Try to merge two comparisons to the same innermost item.
5509 Look for range tests like "ch >= '0' && ch <= '9'".
5510 Look for combinations of simple terms on machines with expensive branches
5511 and evaluate the RHS unconditionally.
5512
5513 For example, if we have p->a == 2 && p->b == 4 and we can make an
5514 object large enough to span both A and B, we can do this with a comparison
5515 against the object ANDed with the a mask.
5516
5517 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5518 operations to do this with one comparison.
5519
5520 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5521 function and the one above.
5522
5523 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5524 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5525
5526 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5527 two operands.
5528
5529 We return the simplified tree or 0 if no optimization is possible. */
5530
5531 static tree
5532 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
5533 {
5534 /* If this is the "or" of two comparisons, we can do something if
5535 the comparisons are NE_EXPR. If this is the "and", we can do something
5536 if the comparisons are EQ_EXPR. I.e.,
5537 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5538
5539 WANTED_CODE is this operation code. For single bit fields, we can
5540 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5541 comparison for one-bit fields. */
5542
5543 enum tree_code wanted_code;
5544 enum tree_code lcode, rcode;
5545 tree ll_arg, lr_arg, rl_arg, rr_arg;
5546 tree ll_inner, lr_inner, rl_inner, rr_inner;
5547 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5548 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5549 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5550 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5551 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5552 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5553 enum machine_mode lnmode, rnmode;
5554 tree ll_mask, lr_mask, rl_mask, rr_mask;
5555 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5556 tree l_const, r_const;
5557 tree lntype, rntype, result;
5558 HOST_WIDE_INT first_bit, end_bit;
5559 int volatilep;
5560 tree orig_lhs = lhs, orig_rhs = rhs;
5561 enum tree_code orig_code = code;
5562
5563 /* Start by getting the comparison codes. Fail if anything is volatile.
5564 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5565 it were surrounded with a NE_EXPR. */
5566
5567 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5568 return 0;
5569
5570 lcode = TREE_CODE (lhs);
5571 rcode = TREE_CODE (rhs);
5572
5573 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5574 {
5575 lhs = build2 (NE_EXPR, truth_type, lhs,
5576 build_int_cst (TREE_TYPE (lhs), 0));
5577 lcode = NE_EXPR;
5578 }
5579
5580 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5581 {
5582 rhs = build2 (NE_EXPR, truth_type, rhs,
5583 build_int_cst (TREE_TYPE (rhs), 0));
5584 rcode = NE_EXPR;
5585 }
5586
5587 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5588 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5589 return 0;
5590
5591 ll_arg = TREE_OPERAND (lhs, 0);
5592 lr_arg = TREE_OPERAND (lhs, 1);
5593 rl_arg = TREE_OPERAND (rhs, 0);
5594 rr_arg = TREE_OPERAND (rhs, 1);
5595
5596 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5597 if (simple_operand_p (ll_arg)
5598 && simple_operand_p (lr_arg))
5599 {
5600 tree result;
5601 if (operand_equal_p (ll_arg, rl_arg, 0)
5602 && operand_equal_p (lr_arg, rr_arg, 0))
5603 {
5604 result = combine_comparisons (code, lcode, rcode,
5605 truth_type, ll_arg, lr_arg);
5606 if (result)
5607 return result;
5608 }
5609 else if (operand_equal_p (ll_arg, rr_arg, 0)
5610 && operand_equal_p (lr_arg, rl_arg, 0))
5611 {
5612 result = combine_comparisons (code, lcode,
5613 swap_tree_comparison (rcode),
5614 truth_type, ll_arg, lr_arg);
5615 if (result)
5616 return result;
5617 }
5618 }
5619
5620 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5621 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5622
5623 /* If the RHS can be evaluated unconditionally and its operands are
5624 simple, it wins to evaluate the RHS unconditionally on machines
5625 with expensive branches. In this case, this isn't a comparison
5626 that can be merged. Avoid doing this if the RHS is a floating-point
5627 comparison since those can trap. */
5628
5629 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5630 false) >= 2
5631 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5632 && simple_operand_p (rl_arg)
5633 && simple_operand_p (rr_arg))
5634 {
5635 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5636 if (code == TRUTH_OR_EXPR
5637 && lcode == NE_EXPR && integer_zerop (lr_arg)
5638 && rcode == NE_EXPR && integer_zerop (rr_arg)
5639 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5640 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5641 return build2 (NE_EXPR, truth_type,
5642 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5643 ll_arg, rl_arg),
5644 build_int_cst (TREE_TYPE (ll_arg), 0));
5645
5646 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5647 if (code == TRUTH_AND_EXPR
5648 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5649 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5650 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5651 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5652 return build2 (EQ_EXPR, truth_type,
5653 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5654 ll_arg, rl_arg),
5655 build_int_cst (TREE_TYPE (ll_arg), 0));
5656
5657 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5658 {
5659 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5660 return build2 (code, truth_type, lhs, rhs);
5661 return NULL_TREE;
5662 }
5663 }
5664
5665 /* See if the comparisons can be merged. Then get all the parameters for
5666 each side. */
5667
5668 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5669 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5670 return 0;
5671
5672 volatilep = 0;
5673 ll_inner = decode_field_reference (ll_arg,
5674 &ll_bitsize, &ll_bitpos, &ll_mode,
5675 &ll_unsignedp, &volatilep, &ll_mask,
5676 &ll_and_mask);
5677 lr_inner = decode_field_reference (lr_arg,
5678 &lr_bitsize, &lr_bitpos, &lr_mode,
5679 &lr_unsignedp, &volatilep, &lr_mask,
5680 &lr_and_mask);
5681 rl_inner = decode_field_reference (rl_arg,
5682 &rl_bitsize, &rl_bitpos, &rl_mode,
5683 &rl_unsignedp, &volatilep, &rl_mask,
5684 &rl_and_mask);
5685 rr_inner = decode_field_reference (rr_arg,
5686 &rr_bitsize, &rr_bitpos, &rr_mode,
5687 &rr_unsignedp, &volatilep, &rr_mask,
5688 &rr_and_mask);
5689
5690 /* It must be true that the inner operation on the lhs of each
5691 comparison must be the same if we are to be able to do anything.
5692 Then see if we have constants. If not, the same must be true for
5693 the rhs's. */
5694 if (volatilep || ll_inner == 0 || rl_inner == 0
5695 || ! operand_equal_p (ll_inner, rl_inner, 0))
5696 return 0;
5697
5698 if (TREE_CODE (lr_arg) == INTEGER_CST
5699 && TREE_CODE (rr_arg) == INTEGER_CST)
5700 l_const = lr_arg, r_const = rr_arg;
5701 else if (lr_inner == 0 || rr_inner == 0
5702 || ! operand_equal_p (lr_inner, rr_inner, 0))
5703 return 0;
5704 else
5705 l_const = r_const = 0;
5706
5707 /* If either comparison code is not correct for our logical operation,
5708 fail. However, we can convert a one-bit comparison against zero into
5709 the opposite comparison against that bit being set in the field. */
5710
5711 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5712 if (lcode != wanted_code)
5713 {
5714 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5715 {
5716 /* Make the left operand unsigned, since we are only interested
5717 in the value of one bit. Otherwise we are doing the wrong
5718 thing below. */
5719 ll_unsignedp = 1;
5720 l_const = ll_mask;
5721 }
5722 else
5723 return 0;
5724 }
5725
5726 /* This is analogous to the code for l_const above. */
5727 if (rcode != wanted_code)
5728 {
5729 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5730 {
5731 rl_unsignedp = 1;
5732 r_const = rl_mask;
5733 }
5734 else
5735 return 0;
5736 }
5737
5738 /* See if we can find a mode that contains both fields being compared on
5739 the left. If we can't, fail. Otherwise, update all constants and masks
5740 to be relative to a field of that size. */
5741 first_bit = MIN (ll_bitpos, rl_bitpos);
5742 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5743 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5744 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5745 volatilep);
5746 if (lnmode == VOIDmode)
5747 return 0;
5748
5749 lnbitsize = GET_MODE_BITSIZE (lnmode);
5750 lnbitpos = first_bit & ~ (lnbitsize - 1);
5751 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5752 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5753
5754 if (BYTES_BIG_ENDIAN)
5755 {
5756 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5757 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5758 }
5759
5760 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
5761 size_int (xll_bitpos), 0);
5762 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
5763 size_int (xrl_bitpos), 0);
5764
5765 if (l_const)
5766 {
5767 l_const = fold_convert (lntype, l_const);
5768 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5769 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
5770 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5771 fold_build1 (BIT_NOT_EXPR,
5772 lntype, ll_mask),
5773 0)))
5774 {
5775 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5776
5777 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5778 }
5779 }
5780 if (r_const)
5781 {
5782 r_const = fold_convert (lntype, r_const);
5783 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5784 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
5785 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5786 fold_build1 (BIT_NOT_EXPR,
5787 lntype, rl_mask),
5788 0)))
5789 {
5790 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5791
5792 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5793 }
5794 }
5795
5796 /* If the right sides are not constant, do the same for it. Also,
5797 disallow this optimization if a size or signedness mismatch occurs
5798 between the left and right sides. */
5799 if (l_const == 0)
5800 {
5801 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5802 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5803 /* Make sure the two fields on the right
5804 correspond to the left without being swapped. */
5805 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5806 return 0;
5807
5808 first_bit = MIN (lr_bitpos, rr_bitpos);
5809 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5810 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5811 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5812 volatilep);
5813 if (rnmode == VOIDmode)
5814 return 0;
5815
5816 rnbitsize = GET_MODE_BITSIZE (rnmode);
5817 rnbitpos = first_bit & ~ (rnbitsize - 1);
5818 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5819 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5820
5821 if (BYTES_BIG_ENDIAN)
5822 {
5823 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5824 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5825 }
5826
5827 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
5828 size_int (xlr_bitpos), 0);
5829 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
5830 size_int (xrr_bitpos), 0);
5831
5832 /* Make a mask that corresponds to both fields being compared.
5833 Do this for both items being compared. If the operands are the
5834 same size and the bits being compared are in the same position
5835 then we can do this by masking both and comparing the masked
5836 results. */
5837 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5838 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
5839 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5840 {
5841 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5842 ll_unsignedp || rl_unsignedp);
5843 if (! all_ones_mask_p (ll_mask, lnbitsize))
5844 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5845
5846 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
5847 lr_unsignedp || rr_unsignedp);
5848 if (! all_ones_mask_p (lr_mask, rnbitsize))
5849 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5850
5851 return build2 (wanted_code, truth_type, lhs, rhs);
5852 }
5853
5854 /* There is still another way we can do something: If both pairs of
5855 fields being compared are adjacent, we may be able to make a wider
5856 field containing them both.
5857
5858 Note that we still must mask the lhs/rhs expressions. Furthermore,
5859 the mask must be shifted to account for the shift done by
5860 make_bit_field_ref. */
5861 if ((ll_bitsize + ll_bitpos == rl_bitpos
5862 && lr_bitsize + lr_bitpos == rr_bitpos)
5863 || (ll_bitpos == rl_bitpos + rl_bitsize
5864 && lr_bitpos == rr_bitpos + rr_bitsize))
5865 {
5866 tree type;
5867
5868 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
5869 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5870 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
5871 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5872
5873 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5874 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
5875 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5876 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
5877
5878 /* Convert to the smaller type before masking out unwanted bits. */
5879 type = lntype;
5880 if (lntype != rntype)
5881 {
5882 if (lnbitsize > rnbitsize)
5883 {
5884 lhs = fold_convert (rntype, lhs);
5885 ll_mask = fold_convert (rntype, ll_mask);
5886 type = rntype;
5887 }
5888 else if (lnbitsize < rnbitsize)
5889 {
5890 rhs = fold_convert (lntype, rhs);
5891 lr_mask = fold_convert (lntype, lr_mask);
5892 type = lntype;
5893 }
5894 }
5895
5896 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5897 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5898
5899 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5900 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5901
5902 return build2 (wanted_code, truth_type, lhs, rhs);
5903 }
5904
5905 return 0;
5906 }
5907
5908 /* Handle the case of comparisons with constants. If there is something in
5909 common between the masks, those bits of the constants must be the same.
5910 If not, the condition is always false. Test for this to avoid generating
5911 incorrect code below. */
5912 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5913 if (! integer_zerop (result)
5914 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5915 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5916 {
5917 if (wanted_code == NE_EXPR)
5918 {
5919 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5920 return constant_boolean_node (true, truth_type);
5921 }
5922 else
5923 {
5924 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5925 return constant_boolean_node (false, truth_type);
5926 }
5927 }
5928
5929 /* Construct the expression we will return. First get the component
5930 reference we will make. Unless the mask is all ones the width of
5931 that field, perform the mask operation. Then compare with the
5932 merged constant. */
5933 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5934 ll_unsignedp || rl_unsignedp);
5935
5936 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5937 if (! all_ones_mask_p (ll_mask, lnbitsize))
5938 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5939
5940 return build2 (wanted_code, truth_type, result,
5941 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5942 }
5943 \f
5944 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5945 constant. */
5946
5947 static tree
5948 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5949 {
5950 tree arg0 = op0;
5951 enum tree_code op_code;
5952 tree comp_const;
5953 tree minmax_const;
5954 int consts_equal, consts_lt;
5955 tree inner;
5956
5957 STRIP_SIGN_NOPS (arg0);
5958
5959 op_code = TREE_CODE (arg0);
5960 minmax_const = TREE_OPERAND (arg0, 1);
5961 comp_const = fold_convert (TREE_TYPE (arg0), op1);
5962 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5963 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5964 inner = TREE_OPERAND (arg0, 0);
5965
5966 /* If something does not permit us to optimize, return the original tree. */
5967 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5968 || TREE_CODE (comp_const) != INTEGER_CST
5969 || TREE_OVERFLOW (comp_const)
5970 || TREE_CODE (minmax_const) != INTEGER_CST
5971 || TREE_OVERFLOW (minmax_const))
5972 return NULL_TREE;
5973
5974 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5975 and GT_EXPR, doing the rest with recursive calls using logical
5976 simplifications. */
5977 switch (code)
5978 {
5979 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5980 {
5981 tree tem = optimize_minmax_comparison (invert_tree_comparison (code, false),
5982 type, op0, op1);
5983 if (tem)
5984 return invert_truthvalue (tem);
5985 return NULL_TREE;
5986 }
5987
5988 case GE_EXPR:
5989 return
5990 fold_build2 (TRUTH_ORIF_EXPR, type,
5991 optimize_minmax_comparison
5992 (EQ_EXPR, type, arg0, comp_const),
5993 optimize_minmax_comparison
5994 (GT_EXPR, type, arg0, comp_const));
5995
5996 case EQ_EXPR:
5997 if (op_code == MAX_EXPR && consts_equal)
5998 /* MAX (X, 0) == 0 -> X <= 0 */
5999 return fold_build2 (LE_EXPR, type, inner, comp_const);
6000
6001 else if (op_code == MAX_EXPR && consts_lt)
6002 /* MAX (X, 0) == 5 -> X == 5 */
6003 return fold_build2 (EQ_EXPR, type, inner, comp_const);
6004
6005 else if (op_code == MAX_EXPR)
6006 /* MAX (X, 0) == -1 -> false */
6007 return omit_one_operand (type, integer_zero_node, inner);
6008
6009 else if (consts_equal)
6010 /* MIN (X, 0) == 0 -> X >= 0 */
6011 return fold_build2 (GE_EXPR, type, inner, comp_const);
6012
6013 else if (consts_lt)
6014 /* MIN (X, 0) == 5 -> false */
6015 return omit_one_operand (type, integer_zero_node, inner);
6016
6017 else
6018 /* MIN (X, 0) == -1 -> X == -1 */
6019 return fold_build2 (EQ_EXPR, type, inner, comp_const);
6020
6021 case GT_EXPR:
6022 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
6023 /* MAX (X, 0) > 0 -> X > 0
6024 MAX (X, 0) > 5 -> X > 5 */
6025 return fold_build2 (GT_EXPR, type, inner, comp_const);
6026
6027 else if (op_code == MAX_EXPR)
6028 /* MAX (X, 0) > -1 -> true */
6029 return omit_one_operand (type, integer_one_node, inner);
6030
6031 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
6032 /* MIN (X, 0) > 0 -> false
6033 MIN (X, 0) > 5 -> false */
6034 return omit_one_operand (type, integer_zero_node, inner);
6035
6036 else
6037 /* MIN (X, 0) > -1 -> X > -1 */
6038 return fold_build2 (GT_EXPR, type, inner, comp_const);
6039
6040 default:
6041 return NULL_TREE;
6042 }
6043 }
6044 \f
6045 /* T is an integer expression that is being multiplied, divided, or taken a
6046 modulus (CODE says which and what kind of divide or modulus) by a
6047 constant C. See if we can eliminate that operation by folding it with
6048 other operations already in T. WIDE_TYPE, if non-null, is a type that
6049 should be used for the computation if wider than our type.
6050
6051 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6052 (X * 2) + (Y * 4). We must, however, be assured that either the original
6053 expression would not overflow or that overflow is undefined for the type
6054 in the language in question.
6055
6056 If we return a non-null expression, it is an equivalent form of the
6057 original computation, but need not be in the original type.
6058
6059 We set *STRICT_OVERFLOW_P to true if the return values depends on
6060 signed overflow being undefined. Otherwise we do not change
6061 *STRICT_OVERFLOW_P. */
6062
6063 static tree
6064 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
6065 bool *strict_overflow_p)
6066 {
6067 /* To avoid exponential search depth, refuse to allow recursion past
6068 three levels. Beyond that (1) it's highly unlikely that we'll find
6069 something interesting and (2) we've probably processed it before
6070 when we built the inner expression. */
6071
6072 static int depth;
6073 tree ret;
6074
6075 if (depth > 3)
6076 return NULL;
6077
6078 depth++;
6079 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6080 depth--;
6081
6082 return ret;
6083 }
6084
6085 static tree
6086 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6087 bool *strict_overflow_p)
6088 {
6089 tree type = TREE_TYPE (t);
6090 enum tree_code tcode = TREE_CODE (t);
6091 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
6092 > GET_MODE_SIZE (TYPE_MODE (type)))
6093 ? wide_type : type);
6094 tree t1, t2;
6095 int same_p = tcode == code;
6096 tree op0 = NULL_TREE, op1 = NULL_TREE;
6097 bool sub_strict_overflow_p;
6098
6099 /* Don't deal with constants of zero here; they confuse the code below. */
6100 if (integer_zerop (c))
6101 return NULL_TREE;
6102
6103 if (TREE_CODE_CLASS (tcode) == tcc_unary)
6104 op0 = TREE_OPERAND (t, 0);
6105
6106 if (TREE_CODE_CLASS (tcode) == tcc_binary)
6107 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6108
6109 /* Note that we need not handle conditional operations here since fold
6110 already handles those cases. So just do arithmetic here. */
6111 switch (tcode)
6112 {
6113 case INTEGER_CST:
6114 /* For a constant, we can always simplify if we are a multiply
6115 or (for divide and modulus) if it is a multiple of our constant. */
6116 if (code == MULT_EXPR
6117 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
6118 return const_binop (code, fold_convert (ctype, t),
6119 fold_convert (ctype, c), 0);
6120 break;
6121
6122 CASE_CONVERT: case NON_LVALUE_EXPR:
6123 /* If op0 is an expression ... */
6124 if ((COMPARISON_CLASS_P (op0)
6125 || UNARY_CLASS_P (op0)
6126 || BINARY_CLASS_P (op0)
6127 || VL_EXP_CLASS_P (op0)
6128 || EXPRESSION_CLASS_P (op0))
6129 /* ... and has wrapping overflow, and its type is smaller
6130 than ctype, then we cannot pass through as widening. */
6131 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
6132 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
6133 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
6134 && (TYPE_PRECISION (ctype)
6135 > TYPE_PRECISION (TREE_TYPE (op0))))
6136 /* ... or this is a truncation (t is narrower than op0),
6137 then we cannot pass through this narrowing. */
6138 || (TYPE_PRECISION (type)
6139 < TYPE_PRECISION (TREE_TYPE (op0)))
6140 /* ... or signedness changes for division or modulus,
6141 then we cannot pass through this conversion. */
6142 || (code != MULT_EXPR
6143 && (TYPE_UNSIGNED (ctype)
6144 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6145 /* ... or has undefined overflow while the converted to
6146 type has not, we cannot do the operation in the inner type
6147 as that would introduce undefined overflow. */
6148 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
6149 && !TYPE_OVERFLOW_UNDEFINED (type))))
6150 break;
6151
6152 /* Pass the constant down and see if we can make a simplification. If
6153 we can, replace this expression with the inner simplification for
6154 possible later conversion to our or some other type. */
6155 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6156 && TREE_CODE (t2) == INTEGER_CST
6157 && !TREE_OVERFLOW (t2)
6158 && (0 != (t1 = extract_muldiv (op0, t2, code,
6159 code == MULT_EXPR
6160 ? ctype : NULL_TREE,
6161 strict_overflow_p))))
6162 return t1;
6163 break;
6164
6165 case ABS_EXPR:
6166 /* If widening the type changes it from signed to unsigned, then we
6167 must avoid building ABS_EXPR itself as unsigned. */
6168 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6169 {
6170 tree cstype = (*signed_type_for) (ctype);
6171 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6172 != 0)
6173 {
6174 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6175 return fold_convert (ctype, t1);
6176 }
6177 break;
6178 }
6179 /* If the constant is negative, we cannot simplify this. */
6180 if (tree_int_cst_sgn (c) == -1)
6181 break;
6182 /* FALLTHROUGH */
6183 case NEGATE_EXPR:
6184 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6185 != 0)
6186 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6187 break;
6188
6189 case MIN_EXPR: case MAX_EXPR:
6190 /* If widening the type changes the signedness, then we can't perform
6191 this optimization as that changes the result. */
6192 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6193 break;
6194
6195 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6196 sub_strict_overflow_p = false;
6197 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6198 &sub_strict_overflow_p)) != 0
6199 && (t2 = extract_muldiv (op1, c, code, wide_type,
6200 &sub_strict_overflow_p)) != 0)
6201 {
6202 if (tree_int_cst_sgn (c) < 0)
6203 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6204 if (sub_strict_overflow_p)
6205 *strict_overflow_p = true;
6206 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6207 fold_convert (ctype, t2));
6208 }
6209 break;
6210
6211 case LSHIFT_EXPR: case RSHIFT_EXPR:
6212 /* If the second operand is constant, this is a multiplication
6213 or floor division, by a power of two, so we can treat it that
6214 way unless the multiplier or divisor overflows. Signed
6215 left-shift overflow is implementation-defined rather than
6216 undefined in C90, so do not convert signed left shift into
6217 multiplication. */
6218 if (TREE_CODE (op1) == INTEGER_CST
6219 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6220 /* const_binop may not detect overflow correctly,
6221 so check for it explicitly here. */
6222 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
6223 && TREE_INT_CST_HIGH (op1) == 0
6224 && 0 != (t1 = fold_convert (ctype,
6225 const_binop (LSHIFT_EXPR,
6226 size_one_node,
6227 op1, 0)))
6228 && !TREE_OVERFLOW (t1))
6229 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6230 ? MULT_EXPR : FLOOR_DIV_EXPR,
6231 ctype, fold_convert (ctype, op0), t1),
6232 c, code, wide_type, strict_overflow_p);
6233 break;
6234
6235 case PLUS_EXPR: case MINUS_EXPR:
6236 /* See if we can eliminate the operation on both sides. If we can, we
6237 can return a new PLUS or MINUS. If we can't, the only remaining
6238 cases where we can do anything are if the second operand is a
6239 constant. */
6240 sub_strict_overflow_p = false;
6241 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6242 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6243 if (t1 != 0 && t2 != 0
6244 && (code == MULT_EXPR
6245 /* If not multiplication, we can only do this if both operands
6246 are divisible by c. */
6247 || (multiple_of_p (ctype, op0, c)
6248 && multiple_of_p (ctype, op1, c))))
6249 {
6250 if (sub_strict_overflow_p)
6251 *strict_overflow_p = true;
6252 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6253 fold_convert (ctype, t2));
6254 }
6255
6256 /* If this was a subtraction, negate OP1 and set it to be an addition.
6257 This simplifies the logic below. */
6258 if (tcode == MINUS_EXPR)
6259 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6260
6261 if (TREE_CODE (op1) != INTEGER_CST)
6262 break;
6263
6264 /* If either OP1 or C are negative, this optimization is not safe for
6265 some of the division and remainder types while for others we need
6266 to change the code. */
6267 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6268 {
6269 if (code == CEIL_DIV_EXPR)
6270 code = FLOOR_DIV_EXPR;
6271 else if (code == FLOOR_DIV_EXPR)
6272 code = CEIL_DIV_EXPR;
6273 else if (code != MULT_EXPR
6274 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6275 break;
6276 }
6277
6278 /* If it's a multiply or a division/modulus operation of a multiple
6279 of our constant, do the operation and verify it doesn't overflow. */
6280 if (code == MULT_EXPR
6281 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
6282 {
6283 op1 = const_binop (code, fold_convert (ctype, op1),
6284 fold_convert (ctype, c), 0);
6285 /* We allow the constant to overflow with wrapping semantics. */
6286 if (op1 == 0
6287 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6288 break;
6289 }
6290 else
6291 break;
6292
6293 /* If we have an unsigned type is not a sizetype, we cannot widen
6294 the operation since it will change the result if the original
6295 computation overflowed. */
6296 if (TYPE_UNSIGNED (ctype)
6297 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
6298 && ctype != type)
6299 break;
6300
6301 /* If we were able to eliminate our operation from the first side,
6302 apply our operation to the second side and reform the PLUS. */
6303 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
6304 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
6305
6306 /* The last case is if we are a multiply. In that case, we can
6307 apply the distributive law to commute the multiply and addition
6308 if the multiplication of the constants doesn't overflow. */
6309 if (code == MULT_EXPR)
6310 return fold_build2 (tcode, ctype,
6311 fold_build2 (code, ctype,
6312 fold_convert (ctype, op0),
6313 fold_convert (ctype, c)),
6314 op1);
6315
6316 break;
6317
6318 case MULT_EXPR:
6319 /* We have a special case here if we are doing something like
6320 (C * 8) % 4 since we know that's zero. */
6321 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6322 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6323 /* If the multiplication can overflow we cannot optimize this.
6324 ??? Until we can properly mark individual operations as
6325 not overflowing we need to treat sizetype special here as
6326 stor-layout relies on this opimization to make
6327 DECL_FIELD_BIT_OFFSET always a constant. */
6328 && (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6329 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
6330 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
6331 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6332 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
6333 {
6334 *strict_overflow_p = true;
6335 return omit_one_operand (type, integer_zero_node, op0);
6336 }
6337
6338 /* ... fall through ... */
6339
6340 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6341 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6342 /* If we can extract our operation from the LHS, do so and return a
6343 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6344 do something only if the second operand is a constant. */
6345 if (same_p
6346 && (t1 = extract_muldiv (op0, c, code, wide_type,
6347 strict_overflow_p)) != 0)
6348 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6349 fold_convert (ctype, op1));
6350 else if (tcode == MULT_EXPR && code == MULT_EXPR
6351 && (t1 = extract_muldiv (op1, c, code, wide_type,
6352 strict_overflow_p)) != 0)
6353 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6354 fold_convert (ctype, t1));
6355 else if (TREE_CODE (op1) != INTEGER_CST)
6356 return 0;
6357
6358 /* If these are the same operation types, we can associate them
6359 assuming no overflow. */
6360 if (tcode == code
6361 && 0 != (t1 = int_const_binop (MULT_EXPR, fold_convert (ctype, op1),
6362 fold_convert (ctype, c), 1))
6363 && 0 != (t1 = force_fit_type_double (ctype, TREE_INT_CST_LOW (t1),
6364 TREE_INT_CST_HIGH (t1),
6365 (TYPE_UNSIGNED (ctype)
6366 && tcode != MULT_EXPR) ? -1 : 1,
6367 TREE_OVERFLOW (t1)))
6368 && !TREE_OVERFLOW (t1))
6369 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
6370
6371 /* If these operations "cancel" each other, we have the main
6372 optimizations of this pass, which occur when either constant is a
6373 multiple of the other, in which case we replace this with either an
6374 operation or CODE or TCODE.
6375
6376 If we have an unsigned type that is not a sizetype, we cannot do
6377 this since it will change the result if the original computation
6378 overflowed. */
6379 if ((TYPE_OVERFLOW_UNDEFINED (ctype)
6380 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
6381 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6382 || (tcode == MULT_EXPR
6383 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6384 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6385 && code != MULT_EXPR)))
6386 {
6387 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
6388 {
6389 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6390 *strict_overflow_p = true;
6391 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6392 fold_convert (ctype,
6393 const_binop (TRUNC_DIV_EXPR,
6394 op1, c, 0)));
6395 }
6396 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
6397 {
6398 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6399 *strict_overflow_p = true;
6400 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6401 fold_convert (ctype,
6402 const_binop (TRUNC_DIV_EXPR,
6403 c, op1, 0)));
6404 }
6405 }
6406 break;
6407
6408 default:
6409 break;
6410 }
6411
6412 return 0;
6413 }
6414 \f
6415 /* Return a node which has the indicated constant VALUE (either 0 or
6416 1), and is of the indicated TYPE. */
6417
6418 tree
6419 constant_boolean_node (int value, tree type)
6420 {
6421 if (type == integer_type_node)
6422 return value ? integer_one_node : integer_zero_node;
6423 else if (type == boolean_type_node)
6424 return value ? boolean_true_node : boolean_false_node;
6425 else
6426 return build_int_cst (type, value);
6427 }
6428
6429
6430 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6431 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6432 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6433 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6434 COND is the first argument to CODE; otherwise (as in the example
6435 given here), it is the second argument. TYPE is the type of the
6436 original expression. Return NULL_TREE if no simplification is
6437 possible. */
6438
6439 static tree
6440 fold_binary_op_with_conditional_arg (enum tree_code code,
6441 tree type, tree op0, tree op1,
6442 tree cond, tree arg, int cond_first_p)
6443 {
6444 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6445 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6446 tree test, true_value, false_value;
6447 tree lhs = NULL_TREE;
6448 tree rhs = NULL_TREE;
6449
6450 /* This transformation is only worthwhile if we don't have to wrap
6451 arg in a SAVE_EXPR, and the operation can be simplified on at least
6452 one of the branches once its pushed inside the COND_EXPR. */
6453 if (!TREE_CONSTANT (arg))
6454 return NULL_TREE;
6455
6456 if (TREE_CODE (cond) == COND_EXPR)
6457 {
6458 test = TREE_OPERAND (cond, 0);
6459 true_value = TREE_OPERAND (cond, 1);
6460 false_value = TREE_OPERAND (cond, 2);
6461 /* If this operand throws an expression, then it does not make
6462 sense to try to perform a logical or arithmetic operation
6463 involving it. */
6464 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6465 lhs = true_value;
6466 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6467 rhs = false_value;
6468 }
6469 else
6470 {
6471 tree testtype = TREE_TYPE (cond);
6472 test = cond;
6473 true_value = constant_boolean_node (true, testtype);
6474 false_value = constant_boolean_node (false, testtype);
6475 }
6476
6477 arg = fold_convert (arg_type, arg);
6478 if (lhs == 0)
6479 {
6480 true_value = fold_convert (cond_type, true_value);
6481 if (cond_first_p)
6482 lhs = fold_build2 (code, type, true_value, arg);
6483 else
6484 lhs = fold_build2 (code, type, arg, true_value);
6485 }
6486 if (rhs == 0)
6487 {
6488 false_value = fold_convert (cond_type, false_value);
6489 if (cond_first_p)
6490 rhs = fold_build2 (code, type, false_value, arg);
6491 else
6492 rhs = fold_build2 (code, type, arg, false_value);
6493 }
6494
6495 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
6496 return fold_convert (type, test);
6497 }
6498
6499 \f
6500 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6501
6502 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6503 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6504 ADDEND is the same as X.
6505
6506 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6507 and finite. The problematic cases are when X is zero, and its mode
6508 has signed zeros. In the case of rounding towards -infinity,
6509 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6510 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6511
6512 bool
6513 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6514 {
6515 if (!real_zerop (addend))
6516 return false;
6517
6518 /* Don't allow the fold with -fsignaling-nans. */
6519 if (HONOR_SNANS (TYPE_MODE (type)))
6520 return false;
6521
6522 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6523 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6524 return true;
6525
6526 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6527 if (TREE_CODE (addend) == REAL_CST
6528 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6529 negate = !negate;
6530
6531 /* The mode has signed zeros, and we have to honor their sign.
6532 In this situation, there is only one case we can return true for.
6533 X - 0 is the same as X unless rounding towards -infinity is
6534 supported. */
6535 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6536 }
6537
6538 /* Subroutine of fold() that checks comparisons of built-in math
6539 functions against real constants.
6540
6541 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6542 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6543 is the type of the result and ARG0 and ARG1 are the operands of the
6544 comparison. ARG1 must be a TREE_REAL_CST.
6545
6546 The function returns the constant folded tree if a simplification
6547 can be made, and NULL_TREE otherwise. */
6548
6549 static tree
6550 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
6551 tree type, tree arg0, tree arg1)
6552 {
6553 REAL_VALUE_TYPE c;
6554
6555 if (BUILTIN_SQRT_P (fcode))
6556 {
6557 tree arg = CALL_EXPR_ARG (arg0, 0);
6558 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6559
6560 c = TREE_REAL_CST (arg1);
6561 if (REAL_VALUE_NEGATIVE (c))
6562 {
6563 /* sqrt(x) < y is always false, if y is negative. */
6564 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6565 return omit_one_operand (type, integer_zero_node, arg);
6566
6567 /* sqrt(x) > y is always true, if y is negative and we
6568 don't care about NaNs, i.e. negative values of x. */
6569 if (code == NE_EXPR || !HONOR_NANS (mode))
6570 return omit_one_operand (type, integer_one_node, arg);
6571
6572 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6573 return fold_build2 (GE_EXPR, type, arg,
6574 build_real (TREE_TYPE (arg), dconst0));
6575 }
6576 else if (code == GT_EXPR || code == GE_EXPR)
6577 {
6578 REAL_VALUE_TYPE c2;
6579
6580 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6581 real_convert (&c2, mode, &c2);
6582
6583 if (REAL_VALUE_ISINF (c2))
6584 {
6585 /* sqrt(x) > y is x == +Inf, when y is very large. */
6586 if (HONOR_INFINITIES (mode))
6587 return fold_build2 (EQ_EXPR, type, arg,
6588 build_real (TREE_TYPE (arg), c2));
6589
6590 /* sqrt(x) > y is always false, when y is very large
6591 and we don't care about infinities. */
6592 return omit_one_operand (type, integer_zero_node, arg);
6593 }
6594
6595 /* sqrt(x) > c is the same as x > c*c. */
6596 return fold_build2 (code, type, arg,
6597 build_real (TREE_TYPE (arg), c2));
6598 }
6599 else if (code == LT_EXPR || code == LE_EXPR)
6600 {
6601 REAL_VALUE_TYPE c2;
6602
6603 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6604 real_convert (&c2, mode, &c2);
6605
6606 if (REAL_VALUE_ISINF (c2))
6607 {
6608 /* sqrt(x) < y is always true, when y is a very large
6609 value and we don't care about NaNs or Infinities. */
6610 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6611 return omit_one_operand (type, integer_one_node, arg);
6612
6613 /* sqrt(x) < y is x != +Inf when y is very large and we
6614 don't care about NaNs. */
6615 if (! HONOR_NANS (mode))
6616 return fold_build2 (NE_EXPR, type, arg,
6617 build_real (TREE_TYPE (arg), c2));
6618
6619 /* sqrt(x) < y is x >= 0 when y is very large and we
6620 don't care about Infinities. */
6621 if (! HONOR_INFINITIES (mode))
6622 return fold_build2 (GE_EXPR, type, arg,
6623 build_real (TREE_TYPE (arg), dconst0));
6624
6625 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6626 if (lang_hooks.decls.global_bindings_p () != 0
6627 || CONTAINS_PLACEHOLDER_P (arg))
6628 return NULL_TREE;
6629
6630 arg = save_expr (arg);
6631 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6632 fold_build2 (GE_EXPR, type, arg,
6633 build_real (TREE_TYPE (arg),
6634 dconst0)),
6635 fold_build2 (NE_EXPR, type, arg,
6636 build_real (TREE_TYPE (arg),
6637 c2)));
6638 }
6639
6640 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6641 if (! HONOR_NANS (mode))
6642 return fold_build2 (code, type, arg,
6643 build_real (TREE_TYPE (arg), c2));
6644
6645 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6646 if (lang_hooks.decls.global_bindings_p () == 0
6647 && ! CONTAINS_PLACEHOLDER_P (arg))
6648 {
6649 arg = save_expr (arg);
6650 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6651 fold_build2 (GE_EXPR, type, arg,
6652 build_real (TREE_TYPE (arg),
6653 dconst0)),
6654 fold_build2 (code, type, arg,
6655 build_real (TREE_TYPE (arg),
6656 c2)));
6657 }
6658 }
6659 }
6660
6661 return NULL_TREE;
6662 }
6663
6664 /* Subroutine of fold() that optimizes comparisons against Infinities,
6665 either +Inf or -Inf.
6666
6667 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6668 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6669 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6670
6671 The function returns the constant folded tree if a simplification
6672 can be made, and NULL_TREE otherwise. */
6673
6674 static tree
6675 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6676 {
6677 enum machine_mode mode;
6678 REAL_VALUE_TYPE max;
6679 tree temp;
6680 bool neg;
6681
6682 mode = TYPE_MODE (TREE_TYPE (arg0));
6683
6684 /* For negative infinity swap the sense of the comparison. */
6685 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6686 if (neg)
6687 code = swap_tree_comparison (code);
6688
6689 switch (code)
6690 {
6691 case GT_EXPR:
6692 /* x > +Inf is always false, if with ignore sNANs. */
6693 if (HONOR_SNANS (mode))
6694 return NULL_TREE;
6695 return omit_one_operand (type, integer_zero_node, arg0);
6696
6697 case LE_EXPR:
6698 /* x <= +Inf is always true, if we don't case about NaNs. */
6699 if (! HONOR_NANS (mode))
6700 return omit_one_operand (type, integer_one_node, arg0);
6701
6702 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6703 if (lang_hooks.decls.global_bindings_p () == 0
6704 && ! CONTAINS_PLACEHOLDER_P (arg0))
6705 {
6706 arg0 = save_expr (arg0);
6707 return fold_build2 (EQ_EXPR, type, arg0, arg0);
6708 }
6709 break;
6710
6711 case EQ_EXPR:
6712 case GE_EXPR:
6713 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6714 real_maxval (&max, neg, mode);
6715 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6716 arg0, build_real (TREE_TYPE (arg0), max));
6717
6718 case LT_EXPR:
6719 /* x < +Inf is always equal to x <= DBL_MAX. */
6720 real_maxval (&max, neg, mode);
6721 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6722 arg0, build_real (TREE_TYPE (arg0), max));
6723
6724 case NE_EXPR:
6725 /* x != +Inf is always equal to !(x > DBL_MAX). */
6726 real_maxval (&max, neg, mode);
6727 if (! HONOR_NANS (mode))
6728 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6729 arg0, build_real (TREE_TYPE (arg0), max));
6730
6731 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6732 arg0, build_real (TREE_TYPE (arg0), max));
6733 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
6734
6735 default:
6736 break;
6737 }
6738
6739 return NULL_TREE;
6740 }
6741
6742 /* Subroutine of fold() that optimizes comparisons of a division by
6743 a nonzero integer constant against an integer constant, i.e.
6744 X/C1 op C2.
6745
6746 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6747 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6748 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6749
6750 The function returns the constant folded tree if a simplification
6751 can be made, and NULL_TREE otherwise. */
6752
6753 static tree
6754 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6755 {
6756 tree prod, tmp, hi, lo;
6757 tree arg00 = TREE_OPERAND (arg0, 0);
6758 tree arg01 = TREE_OPERAND (arg0, 1);
6759 unsigned HOST_WIDE_INT lpart;
6760 HOST_WIDE_INT hpart;
6761 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6762 bool neg_overflow;
6763 int overflow;
6764
6765 /* We have to do this the hard way to detect unsigned overflow.
6766 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6767 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6768 TREE_INT_CST_HIGH (arg01),
6769 TREE_INT_CST_LOW (arg1),
6770 TREE_INT_CST_HIGH (arg1),
6771 &lpart, &hpart, unsigned_p);
6772 prod = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6773 -1, overflow);
6774 neg_overflow = false;
6775
6776 if (unsigned_p)
6777 {
6778 tmp = int_const_binop (MINUS_EXPR, arg01,
6779 build_int_cst (TREE_TYPE (arg01), 1), 0);
6780 lo = prod;
6781
6782 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6783 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6784 TREE_INT_CST_HIGH (prod),
6785 TREE_INT_CST_LOW (tmp),
6786 TREE_INT_CST_HIGH (tmp),
6787 &lpart, &hpart, unsigned_p);
6788 hi = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6789 -1, overflow | TREE_OVERFLOW (prod));
6790 }
6791 else if (tree_int_cst_sgn (arg01) >= 0)
6792 {
6793 tmp = int_const_binop (MINUS_EXPR, arg01,
6794 build_int_cst (TREE_TYPE (arg01), 1), 0);
6795 switch (tree_int_cst_sgn (arg1))
6796 {
6797 case -1:
6798 neg_overflow = true;
6799 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6800 hi = prod;
6801 break;
6802
6803 case 0:
6804 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6805 hi = tmp;
6806 break;
6807
6808 case 1:
6809 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6810 lo = prod;
6811 break;
6812
6813 default:
6814 gcc_unreachable ();
6815 }
6816 }
6817 else
6818 {
6819 /* A negative divisor reverses the relational operators. */
6820 code = swap_tree_comparison (code);
6821
6822 tmp = int_const_binop (PLUS_EXPR, arg01,
6823 build_int_cst (TREE_TYPE (arg01), 1), 0);
6824 switch (tree_int_cst_sgn (arg1))
6825 {
6826 case -1:
6827 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6828 lo = prod;
6829 break;
6830
6831 case 0:
6832 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6833 lo = tmp;
6834 break;
6835
6836 case 1:
6837 neg_overflow = true;
6838 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6839 hi = prod;
6840 break;
6841
6842 default:
6843 gcc_unreachable ();
6844 }
6845 }
6846
6847 switch (code)
6848 {
6849 case EQ_EXPR:
6850 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6851 return omit_one_operand (type, integer_zero_node, arg00);
6852 if (TREE_OVERFLOW (hi))
6853 return fold_build2 (GE_EXPR, type, arg00, lo);
6854 if (TREE_OVERFLOW (lo))
6855 return fold_build2 (LE_EXPR, type, arg00, hi);
6856 return build_range_check (type, arg00, 1, lo, hi);
6857
6858 case NE_EXPR:
6859 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6860 return omit_one_operand (type, integer_one_node, arg00);
6861 if (TREE_OVERFLOW (hi))
6862 return fold_build2 (LT_EXPR, type, arg00, lo);
6863 if (TREE_OVERFLOW (lo))
6864 return fold_build2 (GT_EXPR, type, arg00, hi);
6865 return build_range_check (type, arg00, 0, lo, hi);
6866
6867 case LT_EXPR:
6868 if (TREE_OVERFLOW (lo))
6869 {
6870 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6871 return omit_one_operand (type, tmp, arg00);
6872 }
6873 return fold_build2 (LT_EXPR, type, arg00, lo);
6874
6875 case LE_EXPR:
6876 if (TREE_OVERFLOW (hi))
6877 {
6878 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6879 return omit_one_operand (type, tmp, arg00);
6880 }
6881 return fold_build2 (LE_EXPR, type, arg00, hi);
6882
6883 case GT_EXPR:
6884 if (TREE_OVERFLOW (hi))
6885 {
6886 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6887 return omit_one_operand (type, tmp, arg00);
6888 }
6889 return fold_build2 (GT_EXPR, type, arg00, hi);
6890
6891 case GE_EXPR:
6892 if (TREE_OVERFLOW (lo))
6893 {
6894 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6895 return omit_one_operand (type, tmp, arg00);
6896 }
6897 return fold_build2 (GE_EXPR, type, arg00, lo);
6898
6899 default:
6900 break;
6901 }
6902
6903 return NULL_TREE;
6904 }
6905
6906
6907 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6908 equality/inequality test, then return a simplified form of the test
6909 using a sign testing. Otherwise return NULL. TYPE is the desired
6910 result type. */
6911
6912 static tree
6913 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6914 tree result_type)
6915 {
6916 /* If this is testing a single bit, we can optimize the test. */
6917 if ((code == NE_EXPR || code == EQ_EXPR)
6918 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6919 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6920 {
6921 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6922 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6923 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6924
6925 if (arg00 != NULL_TREE
6926 /* This is only a win if casting to a signed type is cheap,
6927 i.e. when arg00's type is not a partial mode. */
6928 && TYPE_PRECISION (TREE_TYPE (arg00))
6929 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6930 {
6931 tree stype = signed_type_for (TREE_TYPE (arg00));
6932 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6933 result_type, fold_convert (stype, arg00),
6934 build_int_cst (stype, 0));
6935 }
6936 }
6937
6938 return NULL_TREE;
6939 }
6940
6941 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6942 equality/inequality test, then return a simplified form of
6943 the test using shifts and logical operations. Otherwise return
6944 NULL. TYPE is the desired result type. */
6945
6946 tree
6947 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6948 tree result_type)
6949 {
6950 /* If this is testing a single bit, we can optimize the test. */
6951 if ((code == NE_EXPR || code == EQ_EXPR)
6952 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6953 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6954 {
6955 tree inner = TREE_OPERAND (arg0, 0);
6956 tree type = TREE_TYPE (arg0);
6957 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6958 enum machine_mode operand_mode = TYPE_MODE (type);
6959 int ops_unsigned;
6960 tree signed_type, unsigned_type, intermediate_type;
6961 tree tem, one;
6962
6963 /* First, see if we can fold the single bit test into a sign-bit
6964 test. */
6965 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6966 result_type);
6967 if (tem)
6968 return tem;
6969
6970 /* Otherwise we have (A & C) != 0 where C is a single bit,
6971 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6972 Similarly for (A & C) == 0. */
6973
6974 /* If INNER is a right shift of a constant and it plus BITNUM does
6975 not overflow, adjust BITNUM and INNER. */
6976 if (TREE_CODE (inner) == RSHIFT_EXPR
6977 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6978 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6979 && bitnum < TYPE_PRECISION (type)
6980 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6981 bitnum - TYPE_PRECISION (type)))
6982 {
6983 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6984 inner = TREE_OPERAND (inner, 0);
6985 }
6986
6987 /* If we are going to be able to omit the AND below, we must do our
6988 operations as unsigned. If we must use the AND, we have a choice.
6989 Normally unsigned is faster, but for some machines signed is. */
6990 #ifdef LOAD_EXTEND_OP
6991 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6992 && !flag_syntax_only) ? 0 : 1;
6993 #else
6994 ops_unsigned = 1;
6995 #endif
6996
6997 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6998 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6999 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
7000 inner = fold_convert (intermediate_type, inner);
7001
7002 if (bitnum != 0)
7003 inner = build2 (RSHIFT_EXPR, intermediate_type,
7004 inner, size_int (bitnum));
7005
7006 one = build_int_cst (intermediate_type, 1);
7007
7008 if (code == EQ_EXPR)
7009 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type, inner, one);
7010
7011 /* Put the AND last so it can combine with more things. */
7012 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
7013
7014 /* Make sure to return the proper type. */
7015 inner = fold_convert (result_type, inner);
7016
7017 return inner;
7018 }
7019 return NULL_TREE;
7020 }
7021
7022 /* Check whether we are allowed to reorder operands arg0 and arg1,
7023 such that the evaluation of arg1 occurs before arg0. */
7024
7025 static bool
7026 reorder_operands_p (const_tree arg0, const_tree arg1)
7027 {
7028 if (! flag_evaluation_order)
7029 return true;
7030 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
7031 return true;
7032 return ! TREE_SIDE_EFFECTS (arg0)
7033 && ! TREE_SIDE_EFFECTS (arg1);
7034 }
7035
7036 /* Test whether it is preferable two swap two operands, ARG0 and
7037 ARG1, for example because ARG0 is an integer constant and ARG1
7038 isn't. If REORDER is true, only recommend swapping if we can
7039 evaluate the operands in reverse order. */
7040
7041 bool
7042 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
7043 {
7044 STRIP_SIGN_NOPS (arg0);
7045 STRIP_SIGN_NOPS (arg1);
7046
7047 if (TREE_CODE (arg1) == INTEGER_CST)
7048 return 0;
7049 if (TREE_CODE (arg0) == INTEGER_CST)
7050 return 1;
7051
7052 if (TREE_CODE (arg1) == REAL_CST)
7053 return 0;
7054 if (TREE_CODE (arg0) == REAL_CST)
7055 return 1;
7056
7057 if (TREE_CODE (arg1) == FIXED_CST)
7058 return 0;
7059 if (TREE_CODE (arg0) == FIXED_CST)
7060 return 1;
7061
7062 if (TREE_CODE (arg1) == COMPLEX_CST)
7063 return 0;
7064 if (TREE_CODE (arg0) == COMPLEX_CST)
7065 return 1;
7066
7067 if (TREE_CONSTANT (arg1))
7068 return 0;
7069 if (TREE_CONSTANT (arg0))
7070 return 1;
7071
7072 if (optimize_function_for_size_p (cfun))
7073 return 0;
7074
7075 if (reorder && flag_evaluation_order
7076 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
7077 return 0;
7078
7079 /* It is preferable to swap two SSA_NAME to ensure a canonical form
7080 for commutative and comparison operators. Ensuring a canonical
7081 form allows the optimizers to find additional redundancies without
7082 having to explicitly check for both orderings. */
7083 if (TREE_CODE (arg0) == SSA_NAME
7084 && TREE_CODE (arg1) == SSA_NAME
7085 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
7086 return 1;
7087
7088 /* Put SSA_NAMEs last. */
7089 if (TREE_CODE (arg1) == SSA_NAME)
7090 return 0;
7091 if (TREE_CODE (arg0) == SSA_NAME)
7092 return 1;
7093
7094 /* Put variables last. */
7095 if (DECL_P (arg1))
7096 return 0;
7097 if (DECL_P (arg0))
7098 return 1;
7099
7100 return 0;
7101 }
7102
7103 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
7104 ARG0 is extended to a wider type. */
7105
7106 static tree
7107 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
7108 {
7109 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
7110 tree arg1_unw;
7111 tree shorter_type, outer_type;
7112 tree min, max;
7113 bool above, below;
7114
7115 if (arg0_unw == arg0)
7116 return NULL_TREE;
7117 shorter_type = TREE_TYPE (arg0_unw);
7118
7119 #ifdef HAVE_canonicalize_funcptr_for_compare
7120 /* Disable this optimization if we're casting a function pointer
7121 type on targets that require function pointer canonicalization. */
7122 if (HAVE_canonicalize_funcptr_for_compare
7123 && TREE_CODE (shorter_type) == POINTER_TYPE
7124 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
7125 return NULL_TREE;
7126 #endif
7127
7128 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
7129 return NULL_TREE;
7130
7131 arg1_unw = get_unwidened (arg1, NULL_TREE);
7132
7133 /* If possible, express the comparison in the shorter mode. */
7134 if ((code == EQ_EXPR || code == NE_EXPR
7135 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
7136 && (TREE_TYPE (arg1_unw) == shorter_type
7137 || ((TYPE_PRECISION (shorter_type)
7138 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
7139 && (TYPE_UNSIGNED (shorter_type)
7140 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
7141 || (TREE_CODE (arg1_unw) == INTEGER_CST
7142 && (TREE_CODE (shorter_type) == INTEGER_TYPE
7143 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
7144 && int_fits_type_p (arg1_unw, shorter_type))))
7145 return fold_build2 (code, type, arg0_unw,
7146 fold_convert (shorter_type, arg1_unw));
7147
7148 if (TREE_CODE (arg1_unw) != INTEGER_CST
7149 || TREE_CODE (shorter_type) != INTEGER_TYPE
7150 || !int_fits_type_p (arg1_unw, shorter_type))
7151 return NULL_TREE;
7152
7153 /* If we are comparing with the integer that does not fit into the range
7154 of the shorter type, the result is known. */
7155 outer_type = TREE_TYPE (arg1_unw);
7156 min = lower_bound_in_type (outer_type, shorter_type);
7157 max = upper_bound_in_type (outer_type, shorter_type);
7158
7159 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
7160 max, arg1_unw));
7161 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
7162 arg1_unw, min));
7163
7164 switch (code)
7165 {
7166 case EQ_EXPR:
7167 if (above || below)
7168 return omit_one_operand (type, integer_zero_node, arg0);
7169 break;
7170
7171 case NE_EXPR:
7172 if (above || below)
7173 return omit_one_operand (type, integer_one_node, arg0);
7174 break;
7175
7176 case LT_EXPR:
7177 case LE_EXPR:
7178 if (above)
7179 return omit_one_operand (type, integer_one_node, arg0);
7180 else if (below)
7181 return omit_one_operand (type, integer_zero_node, arg0);
7182
7183 case GT_EXPR:
7184 case GE_EXPR:
7185 if (above)
7186 return omit_one_operand (type, integer_zero_node, arg0);
7187 else if (below)
7188 return omit_one_operand (type, integer_one_node, arg0);
7189
7190 default:
7191 break;
7192 }
7193
7194 return NULL_TREE;
7195 }
7196
7197 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
7198 ARG0 just the signedness is changed. */
7199
7200 static tree
7201 fold_sign_changed_comparison (enum tree_code code, tree type,
7202 tree arg0, tree arg1)
7203 {
7204 tree arg0_inner;
7205 tree inner_type, outer_type;
7206
7207 if (!CONVERT_EXPR_P (arg0))
7208 return NULL_TREE;
7209
7210 outer_type = TREE_TYPE (arg0);
7211 arg0_inner = TREE_OPERAND (arg0, 0);
7212 inner_type = TREE_TYPE (arg0_inner);
7213
7214 #ifdef HAVE_canonicalize_funcptr_for_compare
7215 /* Disable this optimization if we're casting a function pointer
7216 type on targets that require function pointer canonicalization. */
7217 if (HAVE_canonicalize_funcptr_for_compare
7218 && TREE_CODE (inner_type) == POINTER_TYPE
7219 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
7220 return NULL_TREE;
7221 #endif
7222
7223 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
7224 return NULL_TREE;
7225
7226 if (TREE_CODE (arg1) != INTEGER_CST
7227 && !(CONVERT_EXPR_P (arg1)
7228 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
7229 return NULL_TREE;
7230
7231 if ((TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
7232 || POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
7233 && code != NE_EXPR
7234 && code != EQ_EXPR)
7235 return NULL_TREE;
7236
7237 if (TREE_CODE (arg1) == INTEGER_CST)
7238 arg1 = force_fit_type_double (inner_type, TREE_INT_CST_LOW (arg1),
7239 TREE_INT_CST_HIGH (arg1), 0,
7240 TREE_OVERFLOW (arg1));
7241 else
7242 arg1 = fold_convert (inner_type, arg1);
7243
7244 return fold_build2 (code, type, arg0_inner, arg1);
7245 }
7246
7247 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
7248 step of the array. Reconstructs s and delta in the case of s * delta
7249 being an integer constant (and thus already folded).
7250 ADDR is the address. MULT is the multiplicative expression.
7251 If the function succeeds, the new address expression is returned. Otherwise
7252 NULL_TREE is returned. */
7253
7254 static tree
7255 try_move_mult_to_index (tree addr, tree op1)
7256 {
7257 tree s, delta, step;
7258 tree ref = TREE_OPERAND (addr, 0), pref;
7259 tree ret, pos;
7260 tree itype;
7261 bool mdim = false;
7262
7263 /* Strip the nops that might be added when converting op1 to sizetype. */
7264 STRIP_NOPS (op1);
7265
7266 /* Canonicalize op1 into a possibly non-constant delta
7267 and an INTEGER_CST s. */
7268 if (TREE_CODE (op1) == MULT_EXPR)
7269 {
7270 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
7271
7272 STRIP_NOPS (arg0);
7273 STRIP_NOPS (arg1);
7274
7275 if (TREE_CODE (arg0) == INTEGER_CST)
7276 {
7277 s = arg0;
7278 delta = arg1;
7279 }
7280 else if (TREE_CODE (arg1) == INTEGER_CST)
7281 {
7282 s = arg1;
7283 delta = arg0;
7284 }
7285 else
7286 return NULL_TREE;
7287 }
7288 else if (TREE_CODE (op1) == INTEGER_CST)
7289 {
7290 delta = op1;
7291 s = NULL_TREE;
7292 }
7293 else
7294 {
7295 /* Simulate we are delta * 1. */
7296 delta = op1;
7297 s = integer_one_node;
7298 }
7299
7300 for (;; ref = TREE_OPERAND (ref, 0))
7301 {
7302 if (TREE_CODE (ref) == ARRAY_REF)
7303 {
7304 /* Remember if this was a multi-dimensional array. */
7305 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
7306 mdim = true;
7307
7308 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
7309 if (! itype)
7310 continue;
7311
7312 step = array_ref_element_size (ref);
7313 if (TREE_CODE (step) != INTEGER_CST)
7314 continue;
7315
7316 if (s)
7317 {
7318 if (! tree_int_cst_equal (step, s))
7319 continue;
7320 }
7321 else
7322 {
7323 /* Try if delta is a multiple of step. */
7324 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
7325 if (! tmp)
7326 continue;
7327 delta = tmp;
7328 }
7329
7330 /* Only fold here if we can verify we do not overflow one
7331 dimension of a multi-dimensional array. */
7332 if (mdim)
7333 {
7334 tree tmp;
7335
7336 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
7337 || !INTEGRAL_TYPE_P (itype)
7338 || !TYPE_MAX_VALUE (itype)
7339 || TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST)
7340 continue;
7341
7342 tmp = fold_binary (PLUS_EXPR, itype,
7343 fold_convert (itype,
7344 TREE_OPERAND (ref, 1)),
7345 fold_convert (itype, delta));
7346 if (!tmp
7347 || TREE_CODE (tmp) != INTEGER_CST
7348 || tree_int_cst_lt (TYPE_MAX_VALUE (itype), tmp))
7349 continue;
7350 }
7351
7352 break;
7353 }
7354 else
7355 mdim = false;
7356
7357 if (!handled_component_p (ref))
7358 return NULL_TREE;
7359 }
7360
7361 /* We found the suitable array reference. So copy everything up to it,
7362 and replace the index. */
7363
7364 pref = TREE_OPERAND (addr, 0);
7365 ret = copy_node (pref);
7366 pos = ret;
7367
7368 while (pref != ref)
7369 {
7370 pref = TREE_OPERAND (pref, 0);
7371 TREE_OPERAND (pos, 0) = copy_node (pref);
7372 pos = TREE_OPERAND (pos, 0);
7373 }
7374
7375 TREE_OPERAND (pos, 1) = fold_build2 (PLUS_EXPR, itype,
7376 fold_convert (itype,
7377 TREE_OPERAND (pos, 1)),
7378 fold_convert (itype, delta));
7379
7380 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
7381 }
7382
7383
7384 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7385 means A >= Y && A != MAX, but in this case we know that
7386 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7387
7388 static tree
7389 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
7390 {
7391 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7392
7393 if (TREE_CODE (bound) == LT_EXPR)
7394 a = TREE_OPERAND (bound, 0);
7395 else if (TREE_CODE (bound) == GT_EXPR)
7396 a = TREE_OPERAND (bound, 1);
7397 else
7398 return NULL_TREE;
7399
7400 typea = TREE_TYPE (a);
7401 if (!INTEGRAL_TYPE_P (typea)
7402 && !POINTER_TYPE_P (typea))
7403 return NULL_TREE;
7404
7405 if (TREE_CODE (ineq) == LT_EXPR)
7406 {
7407 a1 = TREE_OPERAND (ineq, 1);
7408 y = TREE_OPERAND (ineq, 0);
7409 }
7410 else if (TREE_CODE (ineq) == GT_EXPR)
7411 {
7412 a1 = TREE_OPERAND (ineq, 0);
7413 y = TREE_OPERAND (ineq, 1);
7414 }
7415 else
7416 return NULL_TREE;
7417
7418 if (TREE_TYPE (a1) != typea)
7419 return NULL_TREE;
7420
7421 if (POINTER_TYPE_P (typea))
7422 {
7423 /* Convert the pointer types into integer before taking the difference. */
7424 tree ta = fold_convert (ssizetype, a);
7425 tree ta1 = fold_convert (ssizetype, a1);
7426 diff = fold_binary (MINUS_EXPR, ssizetype, ta1, ta);
7427 }
7428 else
7429 diff = fold_binary (MINUS_EXPR, typea, a1, a);
7430
7431 if (!diff || !integer_onep (diff))
7432 return NULL_TREE;
7433
7434 return fold_build2 (GE_EXPR, type, a, y);
7435 }
7436
7437 /* Fold a sum or difference of at least one multiplication.
7438 Returns the folded tree or NULL if no simplification could be made. */
7439
7440 static tree
7441 fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
7442 {
7443 tree arg00, arg01, arg10, arg11;
7444 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7445
7446 /* (A * C) +- (B * C) -> (A+-B) * C.
7447 (A * C) +- A -> A * (C+-1).
7448 We are most concerned about the case where C is a constant,
7449 but other combinations show up during loop reduction. Since
7450 it is not difficult, try all four possibilities. */
7451
7452 if (TREE_CODE (arg0) == MULT_EXPR)
7453 {
7454 arg00 = TREE_OPERAND (arg0, 0);
7455 arg01 = TREE_OPERAND (arg0, 1);
7456 }
7457 else if (TREE_CODE (arg0) == INTEGER_CST)
7458 {
7459 arg00 = build_one_cst (type);
7460 arg01 = arg0;
7461 }
7462 else
7463 {
7464 /* We cannot generate constant 1 for fract. */
7465 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7466 return NULL_TREE;
7467 arg00 = arg0;
7468 arg01 = build_one_cst (type);
7469 }
7470 if (TREE_CODE (arg1) == MULT_EXPR)
7471 {
7472 arg10 = TREE_OPERAND (arg1, 0);
7473 arg11 = TREE_OPERAND (arg1, 1);
7474 }
7475 else if (TREE_CODE (arg1) == INTEGER_CST)
7476 {
7477 arg10 = build_one_cst (type);
7478 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7479 the purpose of this canonicalization. */
7480 if (TREE_INT_CST_HIGH (arg1) == -1
7481 && negate_expr_p (arg1)
7482 && code == PLUS_EXPR)
7483 {
7484 arg11 = negate_expr (arg1);
7485 code = MINUS_EXPR;
7486 }
7487 else
7488 arg11 = arg1;
7489 }
7490 else
7491 {
7492 /* We cannot generate constant 1 for fract. */
7493 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7494 return NULL_TREE;
7495 arg10 = arg1;
7496 arg11 = build_one_cst (type);
7497 }
7498 same = NULL_TREE;
7499
7500 if (operand_equal_p (arg01, arg11, 0))
7501 same = arg01, alt0 = arg00, alt1 = arg10;
7502 else if (operand_equal_p (arg00, arg10, 0))
7503 same = arg00, alt0 = arg01, alt1 = arg11;
7504 else if (operand_equal_p (arg00, arg11, 0))
7505 same = arg00, alt0 = arg01, alt1 = arg10;
7506 else if (operand_equal_p (arg01, arg10, 0))
7507 same = arg01, alt0 = arg00, alt1 = arg11;
7508
7509 /* No identical multiplicands; see if we can find a common
7510 power-of-two factor in non-power-of-two multiplies. This
7511 can help in multi-dimensional array access. */
7512 else if (host_integerp (arg01, 0)
7513 && host_integerp (arg11, 0))
7514 {
7515 HOST_WIDE_INT int01, int11, tmp;
7516 bool swap = false;
7517 tree maybe_same;
7518 int01 = TREE_INT_CST_LOW (arg01);
7519 int11 = TREE_INT_CST_LOW (arg11);
7520
7521 /* Move min of absolute values to int11. */
7522 if ((int01 >= 0 ? int01 : -int01)
7523 < (int11 >= 0 ? int11 : -int11))
7524 {
7525 tmp = int01, int01 = int11, int11 = tmp;
7526 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7527 maybe_same = arg01;
7528 swap = true;
7529 }
7530 else
7531 maybe_same = arg11;
7532
7533 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0
7534 /* The remainder should not be a constant, otherwise we
7535 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7536 increased the number of multiplications necessary. */
7537 && TREE_CODE (arg10) != INTEGER_CST)
7538 {
7539 alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
7540 build_int_cst (TREE_TYPE (arg00),
7541 int01 / int11));
7542 alt1 = arg10;
7543 same = maybe_same;
7544 if (swap)
7545 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7546 }
7547 }
7548
7549 if (same)
7550 return fold_build2 (MULT_EXPR, type,
7551 fold_build2 (code, type,
7552 fold_convert (type, alt0),
7553 fold_convert (type, alt1)),
7554 fold_convert (type, same));
7555
7556 return NULL_TREE;
7557 }
7558
7559 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7560 specified by EXPR into the buffer PTR of length LEN bytes.
7561 Return the number of bytes placed in the buffer, or zero
7562 upon failure. */
7563
7564 static int
7565 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7566 {
7567 tree type = TREE_TYPE (expr);
7568 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7569 int byte, offset, word, words;
7570 unsigned char value;
7571
7572 if (total_bytes > len)
7573 return 0;
7574 words = total_bytes / UNITS_PER_WORD;
7575
7576 for (byte = 0; byte < total_bytes; byte++)
7577 {
7578 int bitpos = byte * BITS_PER_UNIT;
7579 if (bitpos < HOST_BITS_PER_WIDE_INT)
7580 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7581 else
7582 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7583 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7584
7585 if (total_bytes > UNITS_PER_WORD)
7586 {
7587 word = byte / UNITS_PER_WORD;
7588 if (WORDS_BIG_ENDIAN)
7589 word = (words - 1) - word;
7590 offset = word * UNITS_PER_WORD;
7591 if (BYTES_BIG_ENDIAN)
7592 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7593 else
7594 offset += byte % UNITS_PER_WORD;
7595 }
7596 else
7597 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7598 ptr[offset] = value;
7599 }
7600 return total_bytes;
7601 }
7602
7603
7604 /* Subroutine of native_encode_expr. Encode the REAL_CST
7605 specified by EXPR into the buffer PTR of length LEN bytes.
7606 Return the number of bytes placed in the buffer, or zero
7607 upon failure. */
7608
7609 static int
7610 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7611 {
7612 tree type = TREE_TYPE (expr);
7613 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7614 int byte, offset, word, words, bitpos;
7615 unsigned char value;
7616
7617 /* There are always 32 bits in each long, no matter the size of
7618 the hosts long. We handle floating point representations with
7619 up to 192 bits. */
7620 long tmp[6];
7621
7622 if (total_bytes > len)
7623 return 0;
7624 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7625
7626 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7627
7628 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7629 bitpos += BITS_PER_UNIT)
7630 {
7631 byte = (bitpos / BITS_PER_UNIT) & 3;
7632 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7633
7634 if (UNITS_PER_WORD < 4)
7635 {
7636 word = byte / UNITS_PER_WORD;
7637 if (WORDS_BIG_ENDIAN)
7638 word = (words - 1) - word;
7639 offset = word * UNITS_PER_WORD;
7640 if (BYTES_BIG_ENDIAN)
7641 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7642 else
7643 offset += byte % UNITS_PER_WORD;
7644 }
7645 else
7646 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7647 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7648 }
7649 return total_bytes;
7650 }
7651
7652 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7653 specified by EXPR into the buffer PTR of length LEN bytes.
7654 Return the number of bytes placed in the buffer, or zero
7655 upon failure. */
7656
7657 static int
7658 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7659 {
7660 int rsize, isize;
7661 tree part;
7662
7663 part = TREE_REALPART (expr);
7664 rsize = native_encode_expr (part, ptr, len);
7665 if (rsize == 0)
7666 return 0;
7667 part = TREE_IMAGPART (expr);
7668 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7669 if (isize != rsize)
7670 return 0;
7671 return rsize + isize;
7672 }
7673
7674
7675 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7676 specified by EXPR into the buffer PTR of length LEN bytes.
7677 Return the number of bytes placed in the buffer, or zero
7678 upon failure. */
7679
7680 static int
7681 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7682 {
7683 int i, size, offset, count;
7684 tree itype, elem, elements;
7685
7686 offset = 0;
7687 elements = TREE_VECTOR_CST_ELTS (expr);
7688 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7689 itype = TREE_TYPE (TREE_TYPE (expr));
7690 size = GET_MODE_SIZE (TYPE_MODE (itype));
7691 for (i = 0; i < count; i++)
7692 {
7693 if (elements)
7694 {
7695 elem = TREE_VALUE (elements);
7696 elements = TREE_CHAIN (elements);
7697 }
7698 else
7699 elem = NULL_TREE;
7700
7701 if (elem)
7702 {
7703 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7704 return 0;
7705 }
7706 else
7707 {
7708 if (offset + size > len)
7709 return 0;
7710 memset (ptr+offset, 0, size);
7711 }
7712 offset += size;
7713 }
7714 return offset;
7715 }
7716
7717
7718 /* Subroutine of native_encode_expr. Encode the STRING_CST
7719 specified by EXPR into the buffer PTR of length LEN bytes.
7720 Return the number of bytes placed in the buffer, or zero
7721 upon failure. */
7722
7723 static int
7724 native_encode_string (const_tree expr, unsigned char *ptr, int len)
7725 {
7726 tree type = TREE_TYPE (expr);
7727 HOST_WIDE_INT total_bytes;
7728
7729 if (TREE_CODE (type) != ARRAY_TYPE
7730 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7731 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7732 || !host_integerp (TYPE_SIZE_UNIT (type), 0))
7733 return 0;
7734 total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0);
7735 if (total_bytes > len)
7736 return 0;
7737 if (TREE_STRING_LENGTH (expr) < total_bytes)
7738 {
7739 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7740 memset (ptr + TREE_STRING_LENGTH (expr), 0,
7741 total_bytes - TREE_STRING_LENGTH (expr));
7742 }
7743 else
7744 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7745 return total_bytes;
7746 }
7747
7748
7749 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7750 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7751 buffer PTR of length LEN bytes. Return the number of bytes
7752 placed in the buffer, or zero upon failure. */
7753
7754 int
7755 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7756 {
7757 switch (TREE_CODE (expr))
7758 {
7759 case INTEGER_CST:
7760 return native_encode_int (expr, ptr, len);
7761
7762 case REAL_CST:
7763 return native_encode_real (expr, ptr, len);
7764
7765 case COMPLEX_CST:
7766 return native_encode_complex (expr, ptr, len);
7767
7768 case VECTOR_CST:
7769 return native_encode_vector (expr, ptr, len);
7770
7771 case STRING_CST:
7772 return native_encode_string (expr, ptr, len);
7773
7774 default:
7775 return 0;
7776 }
7777 }
7778
7779
7780 /* Subroutine of native_interpret_expr. Interpret the contents of
7781 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7782 If the buffer cannot be interpreted, return NULL_TREE. */
7783
7784 static tree
7785 native_interpret_int (tree type, const unsigned char *ptr, int len)
7786 {
7787 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7788 int byte, offset, word, words;
7789 unsigned char value;
7790 unsigned int HOST_WIDE_INT lo = 0;
7791 HOST_WIDE_INT hi = 0;
7792
7793 if (total_bytes > len)
7794 return NULL_TREE;
7795 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7796 return NULL_TREE;
7797 words = total_bytes / UNITS_PER_WORD;
7798
7799 for (byte = 0; byte < total_bytes; byte++)
7800 {
7801 int bitpos = byte * BITS_PER_UNIT;
7802 if (total_bytes > UNITS_PER_WORD)
7803 {
7804 word = byte / UNITS_PER_WORD;
7805 if (WORDS_BIG_ENDIAN)
7806 word = (words - 1) - word;
7807 offset = word * UNITS_PER_WORD;
7808 if (BYTES_BIG_ENDIAN)
7809 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7810 else
7811 offset += byte % UNITS_PER_WORD;
7812 }
7813 else
7814 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7815 value = ptr[offset];
7816
7817 if (bitpos < HOST_BITS_PER_WIDE_INT)
7818 lo |= (unsigned HOST_WIDE_INT) value << bitpos;
7819 else
7820 hi |= (unsigned HOST_WIDE_INT) value
7821 << (bitpos - HOST_BITS_PER_WIDE_INT);
7822 }
7823
7824 return build_int_cst_wide_type (type, lo, hi);
7825 }
7826
7827
7828 /* Subroutine of native_interpret_expr. Interpret the contents of
7829 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7830 If the buffer cannot be interpreted, return NULL_TREE. */
7831
7832 static tree
7833 native_interpret_real (tree type, const unsigned char *ptr, int len)
7834 {
7835 enum machine_mode mode = TYPE_MODE (type);
7836 int total_bytes = GET_MODE_SIZE (mode);
7837 int byte, offset, word, words, bitpos;
7838 unsigned char value;
7839 /* There are always 32 bits in each long, no matter the size of
7840 the hosts long. We handle floating point representations with
7841 up to 192 bits. */
7842 REAL_VALUE_TYPE r;
7843 long tmp[6];
7844
7845 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7846 if (total_bytes > len || total_bytes > 24)
7847 return NULL_TREE;
7848 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7849
7850 memset (tmp, 0, sizeof (tmp));
7851 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7852 bitpos += BITS_PER_UNIT)
7853 {
7854 byte = (bitpos / BITS_PER_UNIT) & 3;
7855 if (UNITS_PER_WORD < 4)
7856 {
7857 word = byte / UNITS_PER_WORD;
7858 if (WORDS_BIG_ENDIAN)
7859 word = (words - 1) - word;
7860 offset = word * UNITS_PER_WORD;
7861 if (BYTES_BIG_ENDIAN)
7862 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7863 else
7864 offset += byte % UNITS_PER_WORD;
7865 }
7866 else
7867 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7868 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7869
7870 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7871 }
7872
7873 real_from_target (&r, tmp, mode);
7874 return build_real (type, r);
7875 }
7876
7877
7878 /* Subroutine of native_interpret_expr. Interpret the contents of
7879 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7880 If the buffer cannot be interpreted, return NULL_TREE. */
7881
7882 static tree
7883 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7884 {
7885 tree etype, rpart, ipart;
7886 int size;
7887
7888 etype = TREE_TYPE (type);
7889 size = GET_MODE_SIZE (TYPE_MODE (etype));
7890 if (size * 2 > len)
7891 return NULL_TREE;
7892 rpart = native_interpret_expr (etype, ptr, size);
7893 if (!rpart)
7894 return NULL_TREE;
7895 ipart = native_interpret_expr (etype, ptr+size, size);
7896 if (!ipart)
7897 return NULL_TREE;
7898 return build_complex (type, rpart, ipart);
7899 }
7900
7901
7902 /* Subroutine of native_interpret_expr. Interpret the contents of
7903 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7904 If the buffer cannot be interpreted, return NULL_TREE. */
7905
7906 static tree
7907 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7908 {
7909 tree etype, elem, elements;
7910 int i, size, count;
7911
7912 etype = TREE_TYPE (type);
7913 size = GET_MODE_SIZE (TYPE_MODE (etype));
7914 count = TYPE_VECTOR_SUBPARTS (type);
7915 if (size * count > len)
7916 return NULL_TREE;
7917
7918 elements = NULL_TREE;
7919 for (i = count - 1; i >= 0; i--)
7920 {
7921 elem = native_interpret_expr (etype, ptr+(i*size), size);
7922 if (!elem)
7923 return NULL_TREE;
7924 elements = tree_cons (NULL_TREE, elem, elements);
7925 }
7926 return build_vector (type, elements);
7927 }
7928
7929
7930 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7931 the buffer PTR of length LEN as a constant of type TYPE. For
7932 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7933 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7934 return NULL_TREE. */
7935
7936 tree
7937 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7938 {
7939 switch (TREE_CODE (type))
7940 {
7941 case INTEGER_TYPE:
7942 case ENUMERAL_TYPE:
7943 case BOOLEAN_TYPE:
7944 return native_interpret_int (type, ptr, len);
7945
7946 case REAL_TYPE:
7947 return native_interpret_real (type, ptr, len);
7948
7949 case COMPLEX_TYPE:
7950 return native_interpret_complex (type, ptr, len);
7951
7952 case VECTOR_TYPE:
7953 return native_interpret_vector (type, ptr, len);
7954
7955 default:
7956 return NULL_TREE;
7957 }
7958 }
7959
7960
7961 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7962 TYPE at compile-time. If we're unable to perform the conversion
7963 return NULL_TREE. */
7964
7965 static tree
7966 fold_view_convert_expr (tree type, tree expr)
7967 {
7968 /* We support up to 512-bit values (for V8DFmode). */
7969 unsigned char buffer[64];
7970 int len;
7971
7972 /* Check that the host and target are sane. */
7973 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7974 return NULL_TREE;
7975
7976 len = native_encode_expr (expr, buffer, sizeof (buffer));
7977 if (len == 0)
7978 return NULL_TREE;
7979
7980 return native_interpret_expr (type, buffer, len);
7981 }
7982
7983 /* Build an expression for the address of T. Folds away INDIRECT_REF
7984 to avoid confusing the gimplify process. */
7985
7986 tree
7987 build_fold_addr_expr_with_type (tree t, tree ptrtype)
7988 {
7989 /* The size of the object is not relevant when talking about its address. */
7990 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7991 t = TREE_OPERAND (t, 0);
7992
7993 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
7994 if (TREE_CODE (t) == INDIRECT_REF
7995 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
7996 {
7997 t = TREE_OPERAND (t, 0);
7998
7999 if (TREE_TYPE (t) != ptrtype)
8000 t = build1 (NOP_EXPR, ptrtype, t);
8001 }
8002 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
8003 {
8004 t = build_fold_addr_expr (TREE_OPERAND (t, 0));
8005
8006 if (TREE_TYPE (t) != ptrtype)
8007 t = fold_convert (ptrtype, t);
8008 }
8009 else
8010 t = build1 (ADDR_EXPR, ptrtype, t);
8011
8012 return t;
8013 }
8014
8015 /* Build an expression for the address of T. */
8016
8017 tree
8018 build_fold_addr_expr (tree t)
8019 {
8020 tree ptrtype = build_pointer_type (TREE_TYPE (t));
8021
8022 return build_fold_addr_expr_with_type (t, ptrtype);
8023 }
8024
8025 /* Fold a unary expression of code CODE and type TYPE with operand
8026 OP0. Return the folded expression if folding is successful.
8027 Otherwise, return NULL_TREE. */
8028
8029 tree
8030 fold_unary (enum tree_code code, tree type, tree op0)
8031 {
8032 tree tem;
8033 tree arg0;
8034 enum tree_code_class kind = TREE_CODE_CLASS (code);
8035
8036 gcc_assert (IS_EXPR_CODE_CLASS (kind)
8037 && TREE_CODE_LENGTH (code) == 1);
8038
8039 arg0 = op0;
8040 if (arg0)
8041 {
8042 if (CONVERT_EXPR_CODE_P (code)
8043 || code == FLOAT_EXPR || code == ABS_EXPR)
8044 {
8045 /* Don't use STRIP_NOPS, because signedness of argument type
8046 matters. */
8047 STRIP_SIGN_NOPS (arg0);
8048 }
8049 else
8050 {
8051 /* Strip any conversions that don't change the mode. This
8052 is safe for every expression, except for a comparison
8053 expression because its signedness is derived from its
8054 operands.
8055
8056 Note that this is done as an internal manipulation within
8057 the constant folder, in order to find the simplest
8058 representation of the arguments so that their form can be
8059 studied. In any cases, the appropriate type conversions
8060 should be put back in the tree that will get out of the
8061 constant folder. */
8062 STRIP_NOPS (arg0);
8063 }
8064 }
8065
8066 if (TREE_CODE_CLASS (code) == tcc_unary)
8067 {
8068 if (TREE_CODE (arg0) == COMPOUND_EXPR)
8069 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
8070 fold_build1 (code, type,
8071 fold_convert (TREE_TYPE (op0),
8072 TREE_OPERAND (arg0, 1))));
8073 else if (TREE_CODE (arg0) == COND_EXPR)
8074 {
8075 tree arg01 = TREE_OPERAND (arg0, 1);
8076 tree arg02 = TREE_OPERAND (arg0, 2);
8077 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
8078 arg01 = fold_build1 (code, type,
8079 fold_convert (TREE_TYPE (op0), arg01));
8080 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
8081 arg02 = fold_build1 (code, type,
8082 fold_convert (TREE_TYPE (op0), arg02));
8083 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
8084 arg01, arg02);
8085
8086 /* If this was a conversion, and all we did was to move into
8087 inside the COND_EXPR, bring it back out. But leave it if
8088 it is a conversion from integer to integer and the
8089 result precision is no wider than a word since such a
8090 conversion is cheap and may be optimized away by combine,
8091 while it couldn't if it were outside the COND_EXPR. Then return
8092 so we don't get into an infinite recursion loop taking the
8093 conversion out and then back in. */
8094
8095 if ((CONVERT_EXPR_CODE_P (code)
8096 || code == NON_LVALUE_EXPR)
8097 && TREE_CODE (tem) == COND_EXPR
8098 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
8099 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
8100 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
8101 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
8102 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
8103 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
8104 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8105 && (INTEGRAL_TYPE_P
8106 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
8107 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
8108 || flag_syntax_only))
8109 tem = build1 (code, type,
8110 build3 (COND_EXPR,
8111 TREE_TYPE (TREE_OPERAND
8112 (TREE_OPERAND (tem, 1), 0)),
8113 TREE_OPERAND (tem, 0),
8114 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
8115 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
8116 return tem;
8117 }
8118 else if (COMPARISON_CLASS_P (arg0))
8119 {
8120 if (TREE_CODE (type) == BOOLEAN_TYPE)
8121 {
8122 arg0 = copy_node (arg0);
8123 TREE_TYPE (arg0) = type;
8124 return arg0;
8125 }
8126 else if (TREE_CODE (type) != INTEGER_TYPE)
8127 return fold_build3 (COND_EXPR, type, arg0,
8128 fold_build1 (code, type,
8129 integer_one_node),
8130 fold_build1 (code, type,
8131 integer_zero_node));
8132 }
8133 }
8134
8135 switch (code)
8136 {
8137 case PAREN_EXPR:
8138 /* Re-association barriers around constants and other re-association
8139 barriers can be removed. */
8140 if (CONSTANT_CLASS_P (op0)
8141 || TREE_CODE (op0) == PAREN_EXPR)
8142 return fold_convert (type, op0);
8143 return NULL_TREE;
8144
8145 CASE_CONVERT:
8146 case FLOAT_EXPR:
8147 case FIX_TRUNC_EXPR:
8148 if (TREE_TYPE (op0) == type)
8149 return op0;
8150
8151 /* If we have (type) (a CMP b) and type is an integral type, return
8152 new expression involving the new type. */
8153 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
8154 return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
8155 TREE_OPERAND (op0, 1));
8156
8157 /* Handle cases of two conversions in a row. */
8158 if (CONVERT_EXPR_P (op0))
8159 {
8160 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
8161 tree inter_type = TREE_TYPE (op0);
8162 int inside_int = INTEGRAL_TYPE_P (inside_type);
8163 int inside_ptr = POINTER_TYPE_P (inside_type);
8164 int inside_float = FLOAT_TYPE_P (inside_type);
8165 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
8166 unsigned int inside_prec = TYPE_PRECISION (inside_type);
8167 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
8168 int inter_int = INTEGRAL_TYPE_P (inter_type);
8169 int inter_ptr = POINTER_TYPE_P (inter_type);
8170 int inter_float = FLOAT_TYPE_P (inter_type);
8171 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
8172 unsigned int inter_prec = TYPE_PRECISION (inter_type);
8173 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
8174 int final_int = INTEGRAL_TYPE_P (type);
8175 int final_ptr = POINTER_TYPE_P (type);
8176 int final_float = FLOAT_TYPE_P (type);
8177 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
8178 unsigned int final_prec = TYPE_PRECISION (type);
8179 int final_unsignedp = TYPE_UNSIGNED (type);
8180
8181 /* In addition to the cases of two conversions in a row
8182 handled below, if we are converting something to its own
8183 type via an object of identical or wider precision, neither
8184 conversion is needed. */
8185 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
8186 && (((inter_int || inter_ptr) && final_int)
8187 || (inter_float && final_float))
8188 && inter_prec >= final_prec)
8189 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
8190
8191 /* Likewise, if the intermediate and initial types are either both
8192 float or both integer, we don't need the middle conversion if the
8193 former is wider than the latter and doesn't change the signedness
8194 (for integers). Avoid this if the final type is a pointer since
8195 then we sometimes need the middle conversion. Likewise if the
8196 final type has a precision not equal to the size of its mode. */
8197 if (((inter_int && inside_int)
8198 || (inter_float && inside_float)
8199 || (inter_vec && inside_vec))
8200 && inter_prec >= inside_prec
8201 && (inter_float || inter_vec
8202 || inter_unsignedp == inside_unsignedp)
8203 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
8204 && TYPE_MODE (type) == TYPE_MODE (inter_type))
8205 && ! final_ptr
8206 && (! final_vec || inter_prec == inside_prec))
8207 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
8208
8209 /* If we have a sign-extension of a zero-extended value, we can
8210 replace that by a single zero-extension. */
8211 if (inside_int && inter_int && final_int
8212 && inside_prec < inter_prec && inter_prec < final_prec
8213 && inside_unsignedp && !inter_unsignedp)
8214 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
8215
8216 /* Two conversions in a row are not needed unless:
8217 - some conversion is floating-point (overstrict for now), or
8218 - some conversion is a vector (overstrict for now), or
8219 - the intermediate type is narrower than both initial and
8220 final, or
8221 - the intermediate type and innermost type differ in signedness,
8222 and the outermost type is wider than the intermediate, or
8223 - the initial type is a pointer type and the precisions of the
8224 intermediate and final types differ, or
8225 - the final type is a pointer type and the precisions of the
8226 initial and intermediate types differ. */
8227 if (! inside_float && ! inter_float && ! final_float
8228 && ! inside_vec && ! inter_vec && ! final_vec
8229 && (inter_prec >= inside_prec || inter_prec >= final_prec)
8230 && ! (inside_int && inter_int
8231 && inter_unsignedp != inside_unsignedp
8232 && inter_prec < final_prec)
8233 && ((inter_unsignedp && inter_prec > inside_prec)
8234 == (final_unsignedp && final_prec > inter_prec))
8235 && ! (inside_ptr && inter_prec != final_prec)
8236 && ! (final_ptr && inside_prec != inter_prec)
8237 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
8238 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
8239 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
8240 }
8241
8242 /* Handle (T *)&A.B.C for A being of type T and B and C
8243 living at offset zero. This occurs frequently in
8244 C++ upcasting and then accessing the base. */
8245 if (TREE_CODE (op0) == ADDR_EXPR
8246 && POINTER_TYPE_P (type)
8247 && handled_component_p (TREE_OPERAND (op0, 0)))
8248 {
8249 HOST_WIDE_INT bitsize, bitpos;
8250 tree offset;
8251 enum machine_mode mode;
8252 int unsignedp, volatilep;
8253 tree base = TREE_OPERAND (op0, 0);
8254 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
8255 &mode, &unsignedp, &volatilep, false);
8256 /* If the reference was to a (constant) zero offset, we can use
8257 the address of the base if it has the same base type
8258 as the result type. */
8259 if (! offset && bitpos == 0
8260 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
8261 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
8262 return fold_convert (type, build_fold_addr_expr (base));
8263 }
8264
8265 if (TREE_CODE (op0) == MODIFY_EXPR
8266 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
8267 /* Detect assigning a bitfield. */
8268 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
8269 && DECL_BIT_FIELD
8270 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
8271 {
8272 /* Don't leave an assignment inside a conversion
8273 unless assigning a bitfield. */
8274 tem = fold_build1 (code, type, TREE_OPERAND (op0, 1));
8275 /* First do the assignment, then return converted constant. */
8276 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
8277 TREE_NO_WARNING (tem) = 1;
8278 TREE_USED (tem) = 1;
8279 return tem;
8280 }
8281
8282 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8283 constants (if x has signed type, the sign bit cannot be set
8284 in c). This folds extension into the BIT_AND_EXPR.
8285 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
8286 very likely don't have maximal range for their precision and this
8287 transformation effectively doesn't preserve non-maximal ranges. */
8288 if (TREE_CODE (type) == INTEGER_TYPE
8289 && TREE_CODE (op0) == BIT_AND_EXPR
8290 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
8291 {
8292 tree and = op0;
8293 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
8294 int change = 0;
8295
8296 if (TYPE_UNSIGNED (TREE_TYPE (and))
8297 || (TYPE_PRECISION (type)
8298 <= TYPE_PRECISION (TREE_TYPE (and))))
8299 change = 1;
8300 else if (TYPE_PRECISION (TREE_TYPE (and1))
8301 <= HOST_BITS_PER_WIDE_INT
8302 && host_integerp (and1, 1))
8303 {
8304 unsigned HOST_WIDE_INT cst;
8305
8306 cst = tree_low_cst (and1, 1);
8307 cst &= (HOST_WIDE_INT) -1
8308 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
8309 change = (cst == 0);
8310 #ifdef LOAD_EXTEND_OP
8311 if (change
8312 && !flag_syntax_only
8313 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
8314 == ZERO_EXTEND))
8315 {
8316 tree uns = unsigned_type_for (TREE_TYPE (and0));
8317 and0 = fold_convert (uns, and0);
8318 and1 = fold_convert (uns, and1);
8319 }
8320 #endif
8321 }
8322 if (change)
8323 {
8324 tem = force_fit_type_double (type, TREE_INT_CST_LOW (and1),
8325 TREE_INT_CST_HIGH (and1), 0,
8326 TREE_OVERFLOW (and1));
8327 return fold_build2 (BIT_AND_EXPR, type,
8328 fold_convert (type, and0), tem);
8329 }
8330 }
8331
8332 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8333 when one of the new casts will fold away. Conservatively we assume
8334 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8335 if (POINTER_TYPE_P (type)
8336 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
8337 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8338 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
8339 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
8340 {
8341 tree arg00 = TREE_OPERAND (arg0, 0);
8342 tree arg01 = TREE_OPERAND (arg0, 1);
8343
8344 return fold_build2 (TREE_CODE (arg0), type, fold_convert (type, arg00),
8345 fold_convert (sizetype, arg01));
8346 }
8347
8348 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8349 of the same precision, and X is an integer type not narrower than
8350 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8351 if (INTEGRAL_TYPE_P (type)
8352 && TREE_CODE (op0) == BIT_NOT_EXPR
8353 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8354 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8355 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8356 {
8357 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8358 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8359 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8360 return fold_build1 (BIT_NOT_EXPR, type, fold_convert (type, tem));
8361 }
8362
8363 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8364 type of X and Y (integer types only). */
8365 if (INTEGRAL_TYPE_P (type)
8366 && TREE_CODE (op0) == MULT_EXPR
8367 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8368 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8369 {
8370 /* Be careful not to introduce new overflows. */
8371 tree mult_type;
8372 if (TYPE_OVERFLOW_WRAPS (type))
8373 mult_type = type;
8374 else
8375 mult_type = unsigned_type_for (type);
8376
8377 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8378 {
8379 tem = fold_build2 (MULT_EXPR, mult_type,
8380 fold_convert (mult_type,
8381 TREE_OPERAND (op0, 0)),
8382 fold_convert (mult_type,
8383 TREE_OPERAND (op0, 1)));
8384 return fold_convert (type, tem);
8385 }
8386 }
8387
8388 tem = fold_convert_const (code, type, op0);
8389 return tem ? tem : NULL_TREE;
8390
8391 case FIXED_CONVERT_EXPR:
8392 tem = fold_convert_const (code, type, arg0);
8393 return tem ? tem : NULL_TREE;
8394
8395 case VIEW_CONVERT_EXPR:
8396 if (TREE_TYPE (op0) == type)
8397 return op0;
8398 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8399 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
8400
8401 /* For integral conversions with the same precision or pointer
8402 conversions use a NOP_EXPR instead. */
8403 if ((INTEGRAL_TYPE_P (type)
8404 || POINTER_TYPE_P (type))
8405 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8406 || POINTER_TYPE_P (TREE_TYPE (op0)))
8407 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8408 return fold_convert (type, op0);
8409
8410 /* Strip inner integral conversions that do not change the precision. */
8411 if (CONVERT_EXPR_P (op0)
8412 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8413 || POINTER_TYPE_P (TREE_TYPE (op0)))
8414 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8415 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
8416 && (TYPE_PRECISION (TREE_TYPE (op0))
8417 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8418 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
8419
8420 return fold_view_convert_expr (type, op0);
8421
8422 case NEGATE_EXPR:
8423 tem = fold_negate_expr (arg0);
8424 if (tem)
8425 return fold_convert (type, tem);
8426 return NULL_TREE;
8427
8428 case ABS_EXPR:
8429 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8430 return fold_abs_const (arg0, type);
8431 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8432 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8433 /* Convert fabs((double)float) into (double)fabsf(float). */
8434 else if (TREE_CODE (arg0) == NOP_EXPR
8435 && TREE_CODE (type) == REAL_TYPE)
8436 {
8437 tree targ0 = strip_float_extensions (arg0);
8438 if (targ0 != arg0)
8439 return fold_convert (type, fold_build1 (ABS_EXPR,
8440 TREE_TYPE (targ0),
8441 targ0));
8442 }
8443 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8444 else if (TREE_CODE (arg0) == ABS_EXPR)
8445 return arg0;
8446 else if (tree_expr_nonnegative_p (arg0))
8447 return arg0;
8448
8449 /* Strip sign ops from argument. */
8450 if (TREE_CODE (type) == REAL_TYPE)
8451 {
8452 tem = fold_strip_sign_ops (arg0);
8453 if (tem)
8454 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
8455 }
8456 return NULL_TREE;
8457
8458 case CONJ_EXPR:
8459 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8460 return fold_convert (type, arg0);
8461 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8462 {
8463 tree itype = TREE_TYPE (type);
8464 tree rpart = fold_convert (itype, TREE_OPERAND (arg0, 0));
8465 tree ipart = fold_convert (itype, TREE_OPERAND (arg0, 1));
8466 return fold_build2 (COMPLEX_EXPR, type, rpart, negate_expr (ipart));
8467 }
8468 if (TREE_CODE (arg0) == COMPLEX_CST)
8469 {
8470 tree itype = TREE_TYPE (type);
8471 tree rpart = fold_convert (itype, TREE_REALPART (arg0));
8472 tree ipart = fold_convert (itype, TREE_IMAGPART (arg0));
8473 return build_complex (type, rpart, negate_expr (ipart));
8474 }
8475 if (TREE_CODE (arg0) == CONJ_EXPR)
8476 return fold_convert (type, TREE_OPERAND (arg0, 0));
8477 return NULL_TREE;
8478
8479 case BIT_NOT_EXPR:
8480 if (TREE_CODE (arg0) == INTEGER_CST)
8481 return fold_not_const (arg0, type);
8482 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8483 return fold_convert (type, TREE_OPERAND (arg0, 0));
8484 /* Convert ~ (-A) to A - 1. */
8485 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8486 return fold_build2 (MINUS_EXPR, type,
8487 fold_convert (type, TREE_OPERAND (arg0, 0)),
8488 build_int_cst (type, 1));
8489 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8490 else if (INTEGRAL_TYPE_P (type)
8491 && ((TREE_CODE (arg0) == MINUS_EXPR
8492 && integer_onep (TREE_OPERAND (arg0, 1)))
8493 || (TREE_CODE (arg0) == PLUS_EXPR
8494 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8495 return fold_build1 (NEGATE_EXPR, type,
8496 fold_convert (type, TREE_OPERAND (arg0, 0)));
8497 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8498 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8499 && (tem = fold_unary (BIT_NOT_EXPR, type,
8500 fold_convert (type,
8501 TREE_OPERAND (arg0, 0)))))
8502 return fold_build2 (BIT_XOR_EXPR, type, tem,
8503 fold_convert (type, TREE_OPERAND (arg0, 1)));
8504 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8505 && (tem = fold_unary (BIT_NOT_EXPR, type,
8506 fold_convert (type,
8507 TREE_OPERAND (arg0, 1)))))
8508 return fold_build2 (BIT_XOR_EXPR, type,
8509 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
8510 /* Perform BIT_NOT_EXPR on each element individually. */
8511 else if (TREE_CODE (arg0) == VECTOR_CST)
8512 {
8513 tree elements = TREE_VECTOR_CST_ELTS (arg0), elem, list = NULL_TREE;
8514 int count = TYPE_VECTOR_SUBPARTS (type), i;
8515
8516 for (i = 0; i < count; i++)
8517 {
8518 if (elements)
8519 {
8520 elem = TREE_VALUE (elements);
8521 elem = fold_unary (BIT_NOT_EXPR, TREE_TYPE (type), elem);
8522 if (elem == NULL_TREE)
8523 break;
8524 elements = TREE_CHAIN (elements);
8525 }
8526 else
8527 elem = build_int_cst (TREE_TYPE (type), -1);
8528 list = tree_cons (NULL_TREE, elem, list);
8529 }
8530 if (i == count)
8531 return build_vector (type, nreverse (list));
8532 }
8533
8534 return NULL_TREE;
8535
8536 case TRUTH_NOT_EXPR:
8537 /* The argument to invert_truthvalue must have Boolean type. */
8538 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8539 arg0 = fold_convert (boolean_type_node, arg0);
8540
8541 /* Note that the operand of this must be an int
8542 and its values must be 0 or 1.
8543 ("true" is a fixed value perhaps depending on the language,
8544 but we don't handle values other than 1 correctly yet.) */
8545 tem = fold_truth_not_expr (arg0);
8546 if (!tem)
8547 return NULL_TREE;
8548 return fold_convert (type, tem);
8549
8550 case REALPART_EXPR:
8551 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8552 return fold_convert (type, arg0);
8553 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8554 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
8555 TREE_OPERAND (arg0, 1));
8556 if (TREE_CODE (arg0) == COMPLEX_CST)
8557 return fold_convert (type, TREE_REALPART (arg0));
8558 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8559 {
8560 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8561 tem = fold_build2 (TREE_CODE (arg0), itype,
8562 fold_build1 (REALPART_EXPR, itype,
8563 TREE_OPERAND (arg0, 0)),
8564 fold_build1 (REALPART_EXPR, itype,
8565 TREE_OPERAND (arg0, 1)));
8566 return fold_convert (type, tem);
8567 }
8568 if (TREE_CODE (arg0) == CONJ_EXPR)
8569 {
8570 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8571 tem = fold_build1 (REALPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8572 return fold_convert (type, tem);
8573 }
8574 if (TREE_CODE (arg0) == CALL_EXPR)
8575 {
8576 tree fn = get_callee_fndecl (arg0);
8577 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8578 switch (DECL_FUNCTION_CODE (fn))
8579 {
8580 CASE_FLT_FN (BUILT_IN_CEXPI):
8581 fn = mathfn_built_in (type, BUILT_IN_COS);
8582 if (fn)
8583 return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0));
8584 break;
8585
8586 default:
8587 break;
8588 }
8589 }
8590 return NULL_TREE;
8591
8592 case IMAGPART_EXPR:
8593 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8594 return fold_convert (type, integer_zero_node);
8595 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8596 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
8597 TREE_OPERAND (arg0, 0));
8598 if (TREE_CODE (arg0) == COMPLEX_CST)
8599 return fold_convert (type, TREE_IMAGPART (arg0));
8600 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8601 {
8602 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8603 tem = fold_build2 (TREE_CODE (arg0), itype,
8604 fold_build1 (IMAGPART_EXPR, itype,
8605 TREE_OPERAND (arg0, 0)),
8606 fold_build1 (IMAGPART_EXPR, itype,
8607 TREE_OPERAND (arg0, 1)));
8608 return fold_convert (type, tem);
8609 }
8610 if (TREE_CODE (arg0) == CONJ_EXPR)
8611 {
8612 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8613 tem = fold_build1 (IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8614 return fold_convert (type, negate_expr (tem));
8615 }
8616 if (TREE_CODE (arg0) == CALL_EXPR)
8617 {
8618 tree fn = get_callee_fndecl (arg0);
8619 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8620 switch (DECL_FUNCTION_CODE (fn))
8621 {
8622 CASE_FLT_FN (BUILT_IN_CEXPI):
8623 fn = mathfn_built_in (type, BUILT_IN_SIN);
8624 if (fn)
8625 return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0));
8626 break;
8627
8628 default:
8629 break;
8630 }
8631 }
8632 return NULL_TREE;
8633
8634 default:
8635 return NULL_TREE;
8636 } /* switch (code) */
8637 }
8638
8639
8640 /* If the operation was a conversion do _not_ mark a resulting constant
8641 with TREE_OVERFLOW if the original constant was not. These conversions
8642 have implementation defined behavior and retaining the TREE_OVERFLOW
8643 flag here would confuse later passes such as VRP. */
8644 tree
8645 fold_unary_ignore_overflow (enum tree_code code, tree type, tree op0)
8646 {
8647 tree res = fold_unary (code, type, op0);
8648 if (res
8649 && TREE_CODE (res) == INTEGER_CST
8650 && TREE_CODE (op0) == INTEGER_CST
8651 && CONVERT_EXPR_CODE_P (code))
8652 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8653
8654 return res;
8655 }
8656
8657 /* Fold a binary expression of code CODE and type TYPE with operands
8658 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8659 Return the folded expression if folding is successful. Otherwise,
8660 return NULL_TREE. */
8661
8662 static tree
8663 fold_minmax (enum tree_code code, tree type, tree op0, tree op1)
8664 {
8665 enum tree_code compl_code;
8666
8667 if (code == MIN_EXPR)
8668 compl_code = MAX_EXPR;
8669 else if (code == MAX_EXPR)
8670 compl_code = MIN_EXPR;
8671 else
8672 gcc_unreachable ();
8673
8674 /* MIN (MAX (a, b), b) == b. */
8675 if (TREE_CODE (op0) == compl_code
8676 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8677 return omit_one_operand (type, op1, TREE_OPERAND (op0, 0));
8678
8679 /* MIN (MAX (b, a), b) == b. */
8680 if (TREE_CODE (op0) == compl_code
8681 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8682 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8683 return omit_one_operand (type, op1, TREE_OPERAND (op0, 1));
8684
8685 /* MIN (a, MAX (a, b)) == a. */
8686 if (TREE_CODE (op1) == compl_code
8687 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8688 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8689 return omit_one_operand (type, op0, TREE_OPERAND (op1, 1));
8690
8691 /* MIN (a, MAX (b, a)) == a. */
8692 if (TREE_CODE (op1) == compl_code
8693 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8694 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8695 return omit_one_operand (type, op0, TREE_OPERAND (op1, 0));
8696
8697 return NULL_TREE;
8698 }
8699
8700 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8701 by changing CODE to reduce the magnitude of constants involved in
8702 ARG0 of the comparison.
8703 Returns a canonicalized comparison tree if a simplification was
8704 possible, otherwise returns NULL_TREE.
8705 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8706 valid if signed overflow is undefined. */
8707
8708 static tree
8709 maybe_canonicalize_comparison_1 (enum tree_code code, tree type,
8710 tree arg0, tree arg1,
8711 bool *strict_overflow_p)
8712 {
8713 enum tree_code code0 = TREE_CODE (arg0);
8714 tree t, cst0 = NULL_TREE;
8715 int sgn0;
8716 bool swap = false;
8717
8718 /* Match A +- CST code arg1 and CST code arg1. We can change the
8719 first form only if overflow is undefined. */
8720 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8721 /* In principle pointers also have undefined overflow behavior,
8722 but that causes problems elsewhere. */
8723 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8724 && (code0 == MINUS_EXPR
8725 || code0 == PLUS_EXPR)
8726 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8727 || code0 == INTEGER_CST))
8728 return NULL_TREE;
8729
8730 /* Identify the constant in arg0 and its sign. */
8731 if (code0 == INTEGER_CST)
8732 cst0 = arg0;
8733 else
8734 cst0 = TREE_OPERAND (arg0, 1);
8735 sgn0 = tree_int_cst_sgn (cst0);
8736
8737 /* Overflowed constants and zero will cause problems. */
8738 if (integer_zerop (cst0)
8739 || TREE_OVERFLOW (cst0))
8740 return NULL_TREE;
8741
8742 /* See if we can reduce the magnitude of the constant in
8743 arg0 by changing the comparison code. */
8744 if (code0 == INTEGER_CST)
8745 {
8746 /* CST <= arg1 -> CST-1 < arg1. */
8747 if (code == LE_EXPR && sgn0 == 1)
8748 code = LT_EXPR;
8749 /* -CST < arg1 -> -CST-1 <= arg1. */
8750 else if (code == LT_EXPR && sgn0 == -1)
8751 code = LE_EXPR;
8752 /* CST > arg1 -> CST-1 >= arg1. */
8753 else if (code == GT_EXPR && sgn0 == 1)
8754 code = GE_EXPR;
8755 /* -CST >= arg1 -> -CST-1 > arg1. */
8756 else if (code == GE_EXPR && sgn0 == -1)
8757 code = GT_EXPR;
8758 else
8759 return NULL_TREE;
8760 /* arg1 code' CST' might be more canonical. */
8761 swap = true;
8762 }
8763 else
8764 {
8765 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8766 if (code == LT_EXPR
8767 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8768 code = LE_EXPR;
8769 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8770 else if (code == GT_EXPR
8771 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8772 code = GE_EXPR;
8773 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8774 else if (code == LE_EXPR
8775 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8776 code = LT_EXPR;
8777 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8778 else if (code == GE_EXPR
8779 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8780 code = GT_EXPR;
8781 else
8782 return NULL_TREE;
8783 *strict_overflow_p = true;
8784 }
8785
8786 /* Now build the constant reduced in magnitude. But not if that
8787 would produce one outside of its types range. */
8788 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8789 && ((sgn0 == 1
8790 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8791 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8792 || (sgn0 == -1
8793 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8794 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8795 /* We cannot swap the comparison here as that would cause us to
8796 endlessly recurse. */
8797 return NULL_TREE;
8798
8799 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8800 cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
8801 if (code0 != INTEGER_CST)
8802 t = fold_build2 (code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8803
8804 /* If swapping might yield to a more canonical form, do so. */
8805 if (swap)
8806 return fold_build2 (swap_tree_comparison (code), type, arg1, t);
8807 else
8808 return fold_build2 (code, type, t, arg1);
8809 }
8810
8811 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8812 overflow further. Try to decrease the magnitude of constants involved
8813 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8814 and put sole constants at the second argument position.
8815 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8816
8817 static tree
8818 maybe_canonicalize_comparison (enum tree_code code, tree type,
8819 tree arg0, tree arg1)
8820 {
8821 tree t;
8822 bool strict_overflow_p;
8823 const char * const warnmsg = G_("assuming signed overflow does not occur "
8824 "when reducing constant in comparison");
8825
8826 /* Try canonicalization by simplifying arg0. */
8827 strict_overflow_p = false;
8828 t = maybe_canonicalize_comparison_1 (code, type, arg0, arg1,
8829 &strict_overflow_p);
8830 if (t)
8831 {
8832 if (strict_overflow_p)
8833 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8834 return t;
8835 }
8836
8837 /* Try canonicalization by simplifying arg1 using the swapped
8838 comparison. */
8839 code = swap_tree_comparison (code);
8840 strict_overflow_p = false;
8841 t = maybe_canonicalize_comparison_1 (code, type, arg1, arg0,
8842 &strict_overflow_p);
8843 if (t && strict_overflow_p)
8844 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8845 return t;
8846 }
8847
8848 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8849 space. This is used to avoid issuing overflow warnings for
8850 expressions like &p->x which can not wrap. */
8851
8852 static bool
8853 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8854 {
8855 unsigned HOST_WIDE_INT offset_low, total_low;
8856 HOST_WIDE_INT size, offset_high, total_high;
8857
8858 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8859 return true;
8860
8861 if (bitpos < 0)
8862 return true;
8863
8864 if (offset == NULL_TREE)
8865 {
8866 offset_low = 0;
8867 offset_high = 0;
8868 }
8869 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8870 return true;
8871 else
8872 {
8873 offset_low = TREE_INT_CST_LOW (offset);
8874 offset_high = TREE_INT_CST_HIGH (offset);
8875 }
8876
8877 if (add_double_with_sign (offset_low, offset_high,
8878 bitpos / BITS_PER_UNIT, 0,
8879 &total_low, &total_high,
8880 true))
8881 return true;
8882
8883 if (total_high != 0)
8884 return true;
8885
8886 size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8887 if (size <= 0)
8888 return true;
8889
8890 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8891 array. */
8892 if (TREE_CODE (base) == ADDR_EXPR)
8893 {
8894 HOST_WIDE_INT base_size;
8895
8896 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8897 if (base_size > 0 && size < base_size)
8898 size = base_size;
8899 }
8900
8901 return total_low > (unsigned HOST_WIDE_INT) size;
8902 }
8903
8904 /* Subroutine of fold_binary. This routine performs all of the
8905 transformations that are common to the equality/inequality
8906 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8907 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8908 fold_binary should call fold_binary. Fold a comparison with
8909 tree code CODE and type TYPE with operands OP0 and OP1. Return
8910 the folded comparison or NULL_TREE. */
8911
8912 static tree
8913 fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
8914 {
8915 tree arg0, arg1, tem;
8916
8917 arg0 = op0;
8918 arg1 = op1;
8919
8920 STRIP_SIGN_NOPS (arg0);
8921 STRIP_SIGN_NOPS (arg1);
8922
8923 tem = fold_relational_const (code, type, arg0, arg1);
8924 if (tem != NULL_TREE)
8925 return tem;
8926
8927 /* If one arg is a real or integer constant, put it last. */
8928 if (tree_swap_operands_p (arg0, arg1, true))
8929 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
8930
8931 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8932 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8933 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8934 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8935 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8936 && (TREE_CODE (arg1) == INTEGER_CST
8937 && !TREE_OVERFLOW (arg1)))
8938 {
8939 tree const1 = TREE_OPERAND (arg0, 1);
8940 tree const2 = arg1;
8941 tree variable = TREE_OPERAND (arg0, 0);
8942 tree lhs;
8943 int lhs_add;
8944 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8945
8946 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
8947 TREE_TYPE (arg1), const2, const1);
8948
8949 /* If the constant operation overflowed this can be
8950 simplified as a comparison against INT_MAX/INT_MIN. */
8951 if (TREE_CODE (lhs) == INTEGER_CST
8952 && TREE_OVERFLOW (lhs))
8953 {
8954 int const1_sgn = tree_int_cst_sgn (const1);
8955 enum tree_code code2 = code;
8956
8957 /* Get the sign of the constant on the lhs if the
8958 operation were VARIABLE + CONST1. */
8959 if (TREE_CODE (arg0) == MINUS_EXPR)
8960 const1_sgn = -const1_sgn;
8961
8962 /* The sign of the constant determines if we overflowed
8963 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8964 Canonicalize to the INT_MIN overflow by swapping the comparison
8965 if necessary. */
8966 if (const1_sgn == -1)
8967 code2 = swap_tree_comparison (code);
8968
8969 /* We now can look at the canonicalized case
8970 VARIABLE + 1 CODE2 INT_MIN
8971 and decide on the result. */
8972 if (code2 == LT_EXPR
8973 || code2 == LE_EXPR
8974 || code2 == EQ_EXPR)
8975 return omit_one_operand (type, boolean_false_node, variable);
8976 else if (code2 == NE_EXPR
8977 || code2 == GE_EXPR
8978 || code2 == GT_EXPR)
8979 return omit_one_operand (type, boolean_true_node, variable);
8980 }
8981
8982 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8983 && (TREE_CODE (lhs) != INTEGER_CST
8984 || !TREE_OVERFLOW (lhs)))
8985 {
8986 fold_overflow_warning (("assuming signed overflow does not occur "
8987 "when changing X +- C1 cmp C2 to "
8988 "X cmp C1 +- C2"),
8989 WARN_STRICT_OVERFLOW_COMPARISON);
8990 return fold_build2 (code, type, variable, lhs);
8991 }
8992 }
8993
8994 /* For comparisons of pointers we can decompose it to a compile time
8995 comparison of the base objects and the offsets into the object.
8996 This requires at least one operand being an ADDR_EXPR or a
8997 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8998 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8999 && (TREE_CODE (arg0) == ADDR_EXPR
9000 || TREE_CODE (arg1) == ADDR_EXPR
9001 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
9002 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
9003 {
9004 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
9005 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
9006 enum machine_mode mode;
9007 int volatilep, unsignedp;
9008 bool indirect_base0 = false, indirect_base1 = false;
9009
9010 /* Get base and offset for the access. Strip ADDR_EXPR for
9011 get_inner_reference, but put it back by stripping INDIRECT_REF
9012 off the base object if possible. indirect_baseN will be true
9013 if baseN is not an address but refers to the object itself. */
9014 base0 = arg0;
9015 if (TREE_CODE (arg0) == ADDR_EXPR)
9016 {
9017 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
9018 &bitsize, &bitpos0, &offset0, &mode,
9019 &unsignedp, &volatilep, false);
9020 if (TREE_CODE (base0) == INDIRECT_REF)
9021 base0 = TREE_OPERAND (base0, 0);
9022 else
9023 indirect_base0 = true;
9024 }
9025 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9026 {
9027 base0 = TREE_OPERAND (arg0, 0);
9028 offset0 = TREE_OPERAND (arg0, 1);
9029 }
9030
9031 base1 = arg1;
9032 if (TREE_CODE (arg1) == ADDR_EXPR)
9033 {
9034 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
9035 &bitsize, &bitpos1, &offset1, &mode,
9036 &unsignedp, &volatilep, false);
9037 if (TREE_CODE (base1) == INDIRECT_REF)
9038 base1 = TREE_OPERAND (base1, 0);
9039 else
9040 indirect_base1 = true;
9041 }
9042 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9043 {
9044 base1 = TREE_OPERAND (arg1, 0);
9045 offset1 = TREE_OPERAND (arg1, 1);
9046 }
9047
9048 /* If we have equivalent bases we might be able to simplify. */
9049 if (indirect_base0 == indirect_base1
9050 && operand_equal_p (base0, base1, 0))
9051 {
9052 /* We can fold this expression to a constant if the non-constant
9053 offset parts are equal. */
9054 if ((offset0 == offset1
9055 || (offset0 && offset1
9056 && operand_equal_p (offset0, offset1, 0)))
9057 && (code == EQ_EXPR
9058 || code == NE_EXPR
9059 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9060
9061 {
9062 if (code != EQ_EXPR
9063 && code != NE_EXPR
9064 && bitpos0 != bitpos1
9065 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9066 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9067 fold_overflow_warning (("assuming pointer wraparound does not "
9068 "occur when comparing P +- C1 with "
9069 "P +- C2"),
9070 WARN_STRICT_OVERFLOW_CONDITIONAL);
9071
9072 switch (code)
9073 {
9074 case EQ_EXPR:
9075 return constant_boolean_node (bitpos0 == bitpos1, type);
9076 case NE_EXPR:
9077 return constant_boolean_node (bitpos0 != bitpos1, type);
9078 case LT_EXPR:
9079 return constant_boolean_node (bitpos0 < bitpos1, type);
9080 case LE_EXPR:
9081 return constant_boolean_node (bitpos0 <= bitpos1, type);
9082 case GE_EXPR:
9083 return constant_boolean_node (bitpos0 >= bitpos1, type);
9084 case GT_EXPR:
9085 return constant_boolean_node (bitpos0 > bitpos1, type);
9086 default:;
9087 }
9088 }
9089 /* We can simplify the comparison to a comparison of the variable
9090 offset parts if the constant offset parts are equal.
9091 Be careful to use signed size type here because otherwise we
9092 mess with array offsets in the wrong way. This is possible
9093 because pointer arithmetic is restricted to retain within an
9094 object and overflow on pointer differences is undefined as of
9095 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9096 else if (bitpos0 == bitpos1
9097 && ((code == EQ_EXPR || code == NE_EXPR)
9098 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9099 {
9100 tree signed_size_type_node;
9101 signed_size_type_node = signed_type_for (size_type_node);
9102
9103 /* By converting to signed size type we cover middle-end pointer
9104 arithmetic which operates on unsigned pointer types of size
9105 type size and ARRAY_REF offsets which are properly sign or
9106 zero extended from their type in case it is narrower than
9107 size type. */
9108 if (offset0 == NULL_TREE)
9109 offset0 = build_int_cst (signed_size_type_node, 0);
9110 else
9111 offset0 = fold_convert (signed_size_type_node, offset0);
9112 if (offset1 == NULL_TREE)
9113 offset1 = build_int_cst (signed_size_type_node, 0);
9114 else
9115 offset1 = fold_convert (signed_size_type_node, offset1);
9116
9117 if (code != EQ_EXPR
9118 && code != NE_EXPR
9119 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9120 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9121 fold_overflow_warning (("assuming pointer wraparound does not "
9122 "occur when comparing P +- C1 with "
9123 "P +- C2"),
9124 WARN_STRICT_OVERFLOW_COMPARISON);
9125
9126 return fold_build2 (code, type, offset0, offset1);
9127 }
9128 }
9129 /* For non-equal bases we can simplify if they are addresses
9130 of local binding decls or constants. */
9131 else if (indirect_base0 && indirect_base1
9132 /* We know that !operand_equal_p (base0, base1, 0)
9133 because the if condition was false. But make
9134 sure two decls are not the same. */
9135 && base0 != base1
9136 && TREE_CODE (arg0) == ADDR_EXPR
9137 && TREE_CODE (arg1) == ADDR_EXPR
9138 && (((TREE_CODE (base0) == VAR_DECL
9139 || TREE_CODE (base0) == PARM_DECL)
9140 && (targetm.binds_local_p (base0)
9141 || CONSTANT_CLASS_P (base1)))
9142 || CONSTANT_CLASS_P (base0))
9143 && (((TREE_CODE (base1) == VAR_DECL
9144 || TREE_CODE (base1) == PARM_DECL)
9145 && (targetm.binds_local_p (base1)
9146 || CONSTANT_CLASS_P (base0)))
9147 || CONSTANT_CLASS_P (base1)))
9148 {
9149 if (code == EQ_EXPR)
9150 return omit_two_operands (type, boolean_false_node, arg0, arg1);
9151 else if (code == NE_EXPR)
9152 return omit_two_operands (type, boolean_true_node, arg0, arg1);
9153 }
9154 /* For equal offsets we can simplify to a comparison of the
9155 base addresses. */
9156 else if (bitpos0 == bitpos1
9157 && (indirect_base0
9158 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9159 && (indirect_base1
9160 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9161 && ((offset0 == offset1)
9162 || (offset0 && offset1
9163 && operand_equal_p (offset0, offset1, 0))))
9164 {
9165 if (indirect_base0)
9166 base0 = build_fold_addr_expr (base0);
9167 if (indirect_base1)
9168 base1 = build_fold_addr_expr (base1);
9169 return fold_build2 (code, type, base0, base1);
9170 }
9171 }
9172
9173 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9174 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9175 the resulting offset is smaller in absolute value than the
9176 original one. */
9177 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9178 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9179 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9180 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9181 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9182 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9183 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9184 {
9185 tree const1 = TREE_OPERAND (arg0, 1);
9186 tree const2 = TREE_OPERAND (arg1, 1);
9187 tree variable1 = TREE_OPERAND (arg0, 0);
9188 tree variable2 = TREE_OPERAND (arg1, 0);
9189 tree cst;
9190 const char * const warnmsg = G_("assuming signed overflow does not "
9191 "occur when combining constants around "
9192 "a comparison");
9193
9194 /* Put the constant on the side where it doesn't overflow and is
9195 of lower absolute value than before. */
9196 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9197 ? MINUS_EXPR : PLUS_EXPR,
9198 const2, const1, 0);
9199 if (!TREE_OVERFLOW (cst)
9200 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
9201 {
9202 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9203 return fold_build2 (code, type,
9204 variable1,
9205 fold_build2 (TREE_CODE (arg1), TREE_TYPE (arg1),
9206 variable2, cst));
9207 }
9208
9209 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9210 ? MINUS_EXPR : PLUS_EXPR,
9211 const1, const2, 0);
9212 if (!TREE_OVERFLOW (cst)
9213 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
9214 {
9215 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9216 return fold_build2 (code, type,
9217 fold_build2 (TREE_CODE (arg0), TREE_TYPE (arg0),
9218 variable1, cst),
9219 variable2);
9220 }
9221 }
9222
9223 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9224 signed arithmetic case. That form is created by the compiler
9225 often enough for folding it to be of value. One example is in
9226 computing loop trip counts after Operator Strength Reduction. */
9227 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9228 && TREE_CODE (arg0) == MULT_EXPR
9229 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9230 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9231 && integer_zerop (arg1))
9232 {
9233 tree const1 = TREE_OPERAND (arg0, 1);
9234 tree const2 = arg1; /* zero */
9235 tree variable1 = TREE_OPERAND (arg0, 0);
9236 enum tree_code cmp_code = code;
9237
9238 gcc_assert (!integer_zerop (const1));
9239
9240 fold_overflow_warning (("assuming signed overflow does not occur when "
9241 "eliminating multiplication in comparison "
9242 "with zero"),
9243 WARN_STRICT_OVERFLOW_COMPARISON);
9244
9245 /* If const1 is negative we swap the sense of the comparison. */
9246 if (tree_int_cst_sgn (const1) < 0)
9247 cmp_code = swap_tree_comparison (cmp_code);
9248
9249 return fold_build2 (cmp_code, type, variable1, const2);
9250 }
9251
9252 tem = maybe_canonicalize_comparison (code, type, op0, op1);
9253 if (tem)
9254 return tem;
9255
9256 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9257 {
9258 tree targ0 = strip_float_extensions (arg0);
9259 tree targ1 = strip_float_extensions (arg1);
9260 tree newtype = TREE_TYPE (targ0);
9261
9262 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9263 newtype = TREE_TYPE (targ1);
9264
9265 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9266 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9267 return fold_build2 (code, type, fold_convert (newtype, targ0),
9268 fold_convert (newtype, targ1));
9269
9270 /* (-a) CMP (-b) -> b CMP a */
9271 if (TREE_CODE (arg0) == NEGATE_EXPR
9272 && TREE_CODE (arg1) == NEGATE_EXPR)
9273 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
9274 TREE_OPERAND (arg0, 0));
9275
9276 if (TREE_CODE (arg1) == REAL_CST)
9277 {
9278 REAL_VALUE_TYPE cst;
9279 cst = TREE_REAL_CST (arg1);
9280
9281 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9282 if (TREE_CODE (arg0) == NEGATE_EXPR)
9283 return fold_build2 (swap_tree_comparison (code), type,
9284 TREE_OPERAND (arg0, 0),
9285 build_real (TREE_TYPE (arg1),
9286 REAL_VALUE_NEGATE (cst)));
9287
9288 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9289 /* a CMP (-0) -> a CMP 0 */
9290 if (REAL_VALUE_MINUS_ZERO (cst))
9291 return fold_build2 (code, type, arg0,
9292 build_real (TREE_TYPE (arg1), dconst0));
9293
9294 /* x != NaN is always true, other ops are always false. */
9295 if (REAL_VALUE_ISNAN (cst)
9296 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9297 {
9298 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9299 return omit_one_operand (type, tem, arg0);
9300 }
9301
9302 /* Fold comparisons against infinity. */
9303 if (REAL_VALUE_ISINF (cst)
9304 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9305 {
9306 tem = fold_inf_compare (code, type, arg0, arg1);
9307 if (tem != NULL_TREE)
9308 return tem;
9309 }
9310 }
9311
9312 /* If this is a comparison of a real constant with a PLUS_EXPR
9313 or a MINUS_EXPR of a real constant, we can convert it into a
9314 comparison with a revised real constant as long as no overflow
9315 occurs when unsafe_math_optimizations are enabled. */
9316 if (flag_unsafe_math_optimizations
9317 && TREE_CODE (arg1) == REAL_CST
9318 && (TREE_CODE (arg0) == PLUS_EXPR
9319 || TREE_CODE (arg0) == MINUS_EXPR)
9320 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9321 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9322 ? MINUS_EXPR : PLUS_EXPR,
9323 arg1, TREE_OPERAND (arg0, 1), 0))
9324 && !TREE_OVERFLOW (tem))
9325 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9326
9327 /* Likewise, we can simplify a comparison of a real constant with
9328 a MINUS_EXPR whose first operand is also a real constant, i.e.
9329 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9330 floating-point types only if -fassociative-math is set. */
9331 if (flag_associative_math
9332 && TREE_CODE (arg1) == REAL_CST
9333 && TREE_CODE (arg0) == MINUS_EXPR
9334 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9335 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9336 arg1, 0))
9337 && !TREE_OVERFLOW (tem))
9338 return fold_build2 (swap_tree_comparison (code), type,
9339 TREE_OPERAND (arg0, 1), tem);
9340
9341 /* Fold comparisons against built-in math functions. */
9342 if (TREE_CODE (arg1) == REAL_CST
9343 && flag_unsafe_math_optimizations
9344 && ! flag_errno_math)
9345 {
9346 enum built_in_function fcode = builtin_mathfn_code (arg0);
9347
9348 if (fcode != END_BUILTINS)
9349 {
9350 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
9351 if (tem != NULL_TREE)
9352 return tem;
9353 }
9354 }
9355 }
9356
9357 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9358 && CONVERT_EXPR_P (arg0))
9359 {
9360 /* If we are widening one operand of an integer comparison,
9361 see if the other operand is similarly being widened. Perhaps we
9362 can do the comparison in the narrower type. */
9363 tem = fold_widened_comparison (code, type, arg0, arg1);
9364 if (tem)
9365 return tem;
9366
9367 /* Or if we are changing signedness. */
9368 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
9369 if (tem)
9370 return tem;
9371 }
9372
9373 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9374 constant, we can simplify it. */
9375 if (TREE_CODE (arg1) == INTEGER_CST
9376 && (TREE_CODE (arg0) == MIN_EXPR
9377 || TREE_CODE (arg0) == MAX_EXPR)
9378 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9379 {
9380 tem = optimize_minmax_comparison (code, type, op0, op1);
9381 if (tem)
9382 return tem;
9383 }
9384
9385 /* Simplify comparison of something with itself. (For IEEE
9386 floating-point, we can only do some of these simplifications.) */
9387 if (operand_equal_p (arg0, arg1, 0))
9388 {
9389 switch (code)
9390 {
9391 case EQ_EXPR:
9392 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9393 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9394 return constant_boolean_node (1, type);
9395 break;
9396
9397 case GE_EXPR:
9398 case LE_EXPR:
9399 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9400 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9401 return constant_boolean_node (1, type);
9402 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9403
9404 case NE_EXPR:
9405 /* For NE, we can only do this simplification if integer
9406 or we don't honor IEEE floating point NaNs. */
9407 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9408 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9409 break;
9410 /* ... fall through ... */
9411 case GT_EXPR:
9412 case LT_EXPR:
9413 return constant_boolean_node (0, type);
9414 default:
9415 gcc_unreachable ();
9416 }
9417 }
9418
9419 /* If we are comparing an expression that just has comparisons
9420 of two integer values, arithmetic expressions of those comparisons,
9421 and constants, we can simplify it. There are only three cases
9422 to check: the two values can either be equal, the first can be
9423 greater, or the second can be greater. Fold the expression for
9424 those three values. Since each value must be 0 or 1, we have
9425 eight possibilities, each of which corresponds to the constant 0
9426 or 1 or one of the six possible comparisons.
9427
9428 This handles common cases like (a > b) == 0 but also handles
9429 expressions like ((x > y) - (y > x)) > 0, which supposedly
9430 occur in macroized code. */
9431
9432 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9433 {
9434 tree cval1 = 0, cval2 = 0;
9435 int save_p = 0;
9436
9437 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9438 /* Don't handle degenerate cases here; they should already
9439 have been handled anyway. */
9440 && cval1 != 0 && cval2 != 0
9441 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9442 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9443 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9444 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9445 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9446 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9447 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9448 {
9449 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9450 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9451
9452 /* We can't just pass T to eval_subst in case cval1 or cval2
9453 was the same as ARG1. */
9454
9455 tree high_result
9456 = fold_build2 (code, type,
9457 eval_subst (arg0, cval1, maxval,
9458 cval2, minval),
9459 arg1);
9460 tree equal_result
9461 = fold_build2 (code, type,
9462 eval_subst (arg0, cval1, maxval,
9463 cval2, maxval),
9464 arg1);
9465 tree low_result
9466 = fold_build2 (code, type,
9467 eval_subst (arg0, cval1, minval,
9468 cval2, maxval),
9469 arg1);
9470
9471 /* All three of these results should be 0 or 1. Confirm they are.
9472 Then use those values to select the proper code to use. */
9473
9474 if (TREE_CODE (high_result) == INTEGER_CST
9475 && TREE_CODE (equal_result) == INTEGER_CST
9476 && TREE_CODE (low_result) == INTEGER_CST)
9477 {
9478 /* Make a 3-bit mask with the high-order bit being the
9479 value for `>', the next for '=', and the low for '<'. */
9480 switch ((integer_onep (high_result) * 4)
9481 + (integer_onep (equal_result) * 2)
9482 + integer_onep (low_result))
9483 {
9484 case 0:
9485 /* Always false. */
9486 return omit_one_operand (type, integer_zero_node, arg0);
9487 case 1:
9488 code = LT_EXPR;
9489 break;
9490 case 2:
9491 code = EQ_EXPR;
9492 break;
9493 case 3:
9494 code = LE_EXPR;
9495 break;
9496 case 4:
9497 code = GT_EXPR;
9498 break;
9499 case 5:
9500 code = NE_EXPR;
9501 break;
9502 case 6:
9503 code = GE_EXPR;
9504 break;
9505 case 7:
9506 /* Always true. */
9507 return omit_one_operand (type, integer_one_node, arg0);
9508 }
9509
9510 if (save_p)
9511 return save_expr (build2 (code, type, cval1, cval2));
9512 return fold_build2 (code, type, cval1, cval2);
9513 }
9514 }
9515 }
9516
9517 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9518 into a single range test. */
9519 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9520 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9521 && TREE_CODE (arg1) == INTEGER_CST
9522 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9523 && !integer_zerop (TREE_OPERAND (arg0, 1))
9524 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9525 && !TREE_OVERFLOW (arg1))
9526 {
9527 tem = fold_div_compare (code, type, arg0, arg1);
9528 if (tem != NULL_TREE)
9529 return tem;
9530 }
9531
9532 /* Fold ~X op ~Y as Y op X. */
9533 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9534 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9535 {
9536 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9537 return fold_build2 (code, type,
9538 fold_convert (cmp_type, TREE_OPERAND (arg1, 0)),
9539 TREE_OPERAND (arg0, 0));
9540 }
9541
9542 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9543 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9544 && TREE_CODE (arg1) == INTEGER_CST)
9545 {
9546 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9547 return fold_build2 (swap_tree_comparison (code), type,
9548 TREE_OPERAND (arg0, 0),
9549 fold_build1 (BIT_NOT_EXPR, cmp_type,
9550 fold_convert (cmp_type, arg1)));
9551 }
9552
9553 return NULL_TREE;
9554 }
9555
9556
9557 /* Subroutine of fold_binary. Optimize complex multiplications of the
9558 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9559 argument EXPR represents the expression "z" of type TYPE. */
9560
9561 static tree
9562 fold_mult_zconjz (tree type, tree expr)
9563 {
9564 tree itype = TREE_TYPE (type);
9565 tree rpart, ipart, tem;
9566
9567 if (TREE_CODE (expr) == COMPLEX_EXPR)
9568 {
9569 rpart = TREE_OPERAND (expr, 0);
9570 ipart = TREE_OPERAND (expr, 1);
9571 }
9572 else if (TREE_CODE (expr) == COMPLEX_CST)
9573 {
9574 rpart = TREE_REALPART (expr);
9575 ipart = TREE_IMAGPART (expr);
9576 }
9577 else
9578 {
9579 expr = save_expr (expr);
9580 rpart = fold_build1 (REALPART_EXPR, itype, expr);
9581 ipart = fold_build1 (IMAGPART_EXPR, itype, expr);
9582 }
9583
9584 rpart = save_expr (rpart);
9585 ipart = save_expr (ipart);
9586 tem = fold_build2 (PLUS_EXPR, itype,
9587 fold_build2 (MULT_EXPR, itype, rpart, rpart),
9588 fold_build2 (MULT_EXPR, itype, ipart, ipart));
9589 return fold_build2 (COMPLEX_EXPR, type, tem,
9590 fold_convert (itype, integer_zero_node));
9591 }
9592
9593
9594 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9595 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9596 guarantees that P and N have the same least significant log2(M) bits.
9597 N is not otherwise constrained. In particular, N is not normalized to
9598 0 <= N < M as is common. In general, the precise value of P is unknown.
9599 M is chosen as large as possible such that constant N can be determined.
9600
9601 Returns M and sets *RESIDUE to N.
9602
9603 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9604 account. This is not always possible due to PR 35705.
9605 */
9606
9607 static unsigned HOST_WIDE_INT
9608 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9609 bool allow_func_align)
9610 {
9611 enum tree_code code;
9612
9613 *residue = 0;
9614
9615 code = TREE_CODE (expr);
9616 if (code == ADDR_EXPR)
9617 {
9618 expr = TREE_OPERAND (expr, 0);
9619 if (handled_component_p (expr))
9620 {
9621 HOST_WIDE_INT bitsize, bitpos;
9622 tree offset;
9623 enum machine_mode mode;
9624 int unsignedp, volatilep;
9625
9626 expr = get_inner_reference (expr, &bitsize, &bitpos, &offset,
9627 &mode, &unsignedp, &volatilep, false);
9628 *residue = bitpos / BITS_PER_UNIT;
9629 if (offset)
9630 {
9631 if (TREE_CODE (offset) == INTEGER_CST)
9632 *residue += TREE_INT_CST_LOW (offset);
9633 else
9634 /* We don't handle more complicated offset expressions. */
9635 return 1;
9636 }
9637 }
9638
9639 if (DECL_P (expr)
9640 && (allow_func_align || TREE_CODE (expr) != FUNCTION_DECL))
9641 return DECL_ALIGN_UNIT (expr);
9642 }
9643 else if (code == POINTER_PLUS_EXPR)
9644 {
9645 tree op0, op1;
9646 unsigned HOST_WIDE_INT modulus;
9647 enum tree_code inner_code;
9648
9649 op0 = TREE_OPERAND (expr, 0);
9650 STRIP_NOPS (op0);
9651 modulus = get_pointer_modulus_and_residue (op0, residue,
9652 allow_func_align);
9653
9654 op1 = TREE_OPERAND (expr, 1);
9655 STRIP_NOPS (op1);
9656 inner_code = TREE_CODE (op1);
9657 if (inner_code == INTEGER_CST)
9658 {
9659 *residue += TREE_INT_CST_LOW (op1);
9660 return modulus;
9661 }
9662 else if (inner_code == MULT_EXPR)
9663 {
9664 op1 = TREE_OPERAND (op1, 1);
9665 if (TREE_CODE (op1) == INTEGER_CST)
9666 {
9667 unsigned HOST_WIDE_INT align;
9668
9669 /* Compute the greatest power-of-2 divisor of op1. */
9670 align = TREE_INT_CST_LOW (op1);
9671 align &= -align;
9672
9673 /* If align is non-zero and less than *modulus, replace
9674 *modulus with align., If align is 0, then either op1 is 0
9675 or the greatest power-of-2 divisor of op1 doesn't fit in an
9676 unsigned HOST_WIDE_INT. In either case, no additional
9677 constraint is imposed. */
9678 if (align)
9679 modulus = MIN (modulus, align);
9680
9681 return modulus;
9682 }
9683 }
9684 }
9685
9686 /* If we get here, we were unable to determine anything useful about the
9687 expression. */
9688 return 1;
9689 }
9690
9691
9692 /* Fold a binary expression of code CODE and type TYPE with operands
9693 OP0 and OP1. Return the folded expression if folding is
9694 successful. Otherwise, return NULL_TREE. */
9695
9696 tree
9697 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
9698 {
9699 enum tree_code_class kind = TREE_CODE_CLASS (code);
9700 tree arg0, arg1, tem;
9701 tree t1 = NULL_TREE;
9702 bool strict_overflow_p;
9703
9704 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9705 && TREE_CODE_LENGTH (code) == 2
9706 && op0 != NULL_TREE
9707 && op1 != NULL_TREE);
9708
9709 arg0 = op0;
9710 arg1 = op1;
9711
9712 /* Strip any conversions that don't change the mode. This is
9713 safe for every expression, except for a comparison expression
9714 because its signedness is derived from its operands. So, in
9715 the latter case, only strip conversions that don't change the
9716 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9717 preserved.
9718
9719 Note that this is done as an internal manipulation within the
9720 constant folder, in order to find the simplest representation
9721 of the arguments so that their form can be studied. In any
9722 cases, the appropriate type conversions should be put back in
9723 the tree that will get out of the constant folder. */
9724
9725 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9726 {
9727 STRIP_SIGN_NOPS (arg0);
9728 STRIP_SIGN_NOPS (arg1);
9729 }
9730 else
9731 {
9732 STRIP_NOPS (arg0);
9733 STRIP_NOPS (arg1);
9734 }
9735
9736 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9737 constant but we can't do arithmetic on them. */
9738 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9739 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9740 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9741 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9742 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9743 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9744 {
9745 if (kind == tcc_binary)
9746 {
9747 /* Make sure type and arg0 have the same saturating flag. */
9748 gcc_assert (TYPE_SATURATING (type)
9749 == TYPE_SATURATING (TREE_TYPE (arg0)));
9750 tem = const_binop (code, arg0, arg1, 0);
9751 }
9752 else if (kind == tcc_comparison)
9753 tem = fold_relational_const (code, type, arg0, arg1);
9754 else
9755 tem = NULL_TREE;
9756
9757 if (tem != NULL_TREE)
9758 {
9759 if (TREE_TYPE (tem) != type)
9760 tem = fold_convert (type, tem);
9761 return tem;
9762 }
9763 }
9764
9765 /* If this is a commutative operation, and ARG0 is a constant, move it
9766 to ARG1 to reduce the number of tests below. */
9767 if (commutative_tree_code (code)
9768 && tree_swap_operands_p (arg0, arg1, true))
9769 return fold_build2 (code, type, op1, op0);
9770
9771 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9772
9773 First check for cases where an arithmetic operation is applied to a
9774 compound, conditional, or comparison operation. Push the arithmetic
9775 operation inside the compound or conditional to see if any folding
9776 can then be done. Convert comparison to conditional for this purpose.
9777 The also optimizes non-constant cases that used to be done in
9778 expand_expr.
9779
9780 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9781 one of the operands is a comparison and the other is a comparison, a
9782 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9783 code below would make the expression more complex. Change it to a
9784 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9785 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9786
9787 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9788 || code == EQ_EXPR || code == NE_EXPR)
9789 && ((truth_value_p (TREE_CODE (arg0))
9790 && (truth_value_p (TREE_CODE (arg1))
9791 || (TREE_CODE (arg1) == BIT_AND_EXPR
9792 && integer_onep (TREE_OPERAND (arg1, 1)))))
9793 || (truth_value_p (TREE_CODE (arg1))
9794 && (truth_value_p (TREE_CODE (arg0))
9795 || (TREE_CODE (arg0) == BIT_AND_EXPR
9796 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9797 {
9798 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9799 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9800 : TRUTH_XOR_EXPR,
9801 boolean_type_node,
9802 fold_convert (boolean_type_node, arg0),
9803 fold_convert (boolean_type_node, arg1));
9804
9805 if (code == EQ_EXPR)
9806 tem = invert_truthvalue (tem);
9807
9808 return fold_convert (type, tem);
9809 }
9810
9811 if (TREE_CODE_CLASS (code) == tcc_binary
9812 || TREE_CODE_CLASS (code) == tcc_comparison)
9813 {
9814 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9815 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9816 fold_build2 (code, type,
9817 fold_convert (TREE_TYPE (op0),
9818 TREE_OPERAND (arg0, 1)),
9819 op1));
9820 if (TREE_CODE (arg1) == COMPOUND_EXPR
9821 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9822 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9823 fold_build2 (code, type, op0,
9824 fold_convert (TREE_TYPE (op1),
9825 TREE_OPERAND (arg1, 1))));
9826
9827 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
9828 {
9829 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
9830 arg0, arg1,
9831 /*cond_first_p=*/1);
9832 if (tem != NULL_TREE)
9833 return tem;
9834 }
9835
9836 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
9837 {
9838 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
9839 arg1, arg0,
9840 /*cond_first_p=*/0);
9841 if (tem != NULL_TREE)
9842 return tem;
9843 }
9844 }
9845
9846 switch (code)
9847 {
9848 case POINTER_PLUS_EXPR:
9849 /* 0 +p index -> (type)index */
9850 if (integer_zerop (arg0))
9851 return non_lvalue (fold_convert (type, arg1));
9852
9853 /* PTR +p 0 -> PTR */
9854 if (integer_zerop (arg1))
9855 return non_lvalue (fold_convert (type, arg0));
9856
9857 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9858 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9859 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9860 return fold_convert (type, fold_build2 (PLUS_EXPR, sizetype,
9861 fold_convert (sizetype, arg1),
9862 fold_convert (sizetype, arg0)));
9863
9864 /* index +p PTR -> PTR +p index */
9865 if (POINTER_TYPE_P (TREE_TYPE (arg1))
9866 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9867 return fold_build2 (POINTER_PLUS_EXPR, type,
9868 fold_convert (type, arg1),
9869 fold_convert (sizetype, arg0));
9870
9871 /* (PTR +p B) +p A -> PTR +p (B + A) */
9872 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9873 {
9874 tree inner;
9875 tree arg01 = fold_convert (sizetype, TREE_OPERAND (arg0, 1));
9876 tree arg00 = TREE_OPERAND (arg0, 0);
9877 inner = fold_build2 (PLUS_EXPR, sizetype,
9878 arg01, fold_convert (sizetype, arg1));
9879 return fold_convert (type,
9880 fold_build2 (POINTER_PLUS_EXPR,
9881 TREE_TYPE (arg00), arg00, inner));
9882 }
9883
9884 /* PTR_CST +p CST -> CST1 */
9885 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9886 return fold_build2 (PLUS_EXPR, type, arg0, fold_convert (type, arg1));
9887
9888 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9889 of the array. Loop optimizer sometimes produce this type of
9890 expressions. */
9891 if (TREE_CODE (arg0) == ADDR_EXPR)
9892 {
9893 tem = try_move_mult_to_index (arg0, fold_convert (sizetype, arg1));
9894 if (tem)
9895 return fold_convert (type, tem);
9896 }
9897
9898 return NULL_TREE;
9899
9900 case PLUS_EXPR:
9901 /* A + (-B) -> A - B */
9902 if (TREE_CODE (arg1) == NEGATE_EXPR)
9903 return fold_build2 (MINUS_EXPR, type,
9904 fold_convert (type, arg0),
9905 fold_convert (type, TREE_OPERAND (arg1, 0)));
9906 /* (-A) + B -> B - A */
9907 if (TREE_CODE (arg0) == NEGATE_EXPR
9908 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
9909 return fold_build2 (MINUS_EXPR, type,
9910 fold_convert (type, arg1),
9911 fold_convert (type, TREE_OPERAND (arg0, 0)));
9912
9913 if (INTEGRAL_TYPE_P (type))
9914 {
9915 /* Convert ~A + 1 to -A. */
9916 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9917 && integer_onep (arg1))
9918 return fold_build1 (NEGATE_EXPR, type,
9919 fold_convert (type, TREE_OPERAND (arg0, 0)));
9920
9921 /* ~X + X is -1. */
9922 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9923 && !TYPE_OVERFLOW_TRAPS (type))
9924 {
9925 tree tem = TREE_OPERAND (arg0, 0);
9926
9927 STRIP_NOPS (tem);
9928 if (operand_equal_p (tem, arg1, 0))
9929 {
9930 t1 = build_int_cst_type (type, -1);
9931 return omit_one_operand (type, t1, arg1);
9932 }
9933 }
9934
9935 /* X + ~X is -1. */
9936 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9937 && !TYPE_OVERFLOW_TRAPS (type))
9938 {
9939 tree tem = TREE_OPERAND (arg1, 0);
9940
9941 STRIP_NOPS (tem);
9942 if (operand_equal_p (arg0, tem, 0))
9943 {
9944 t1 = build_int_cst_type (type, -1);
9945 return omit_one_operand (type, t1, arg0);
9946 }
9947 }
9948
9949 /* X + (X / CST) * -CST is X % CST. */
9950 if (TREE_CODE (arg1) == MULT_EXPR
9951 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9952 && operand_equal_p (arg0,
9953 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9954 {
9955 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9956 tree cst1 = TREE_OPERAND (arg1, 1);
9957 tree sum = fold_binary (PLUS_EXPR, TREE_TYPE (cst1), cst1, cst0);
9958 if (sum && integer_zerop (sum))
9959 return fold_convert (type,
9960 fold_build2 (TRUNC_MOD_EXPR,
9961 TREE_TYPE (arg0), arg0, cst0));
9962 }
9963 }
9964
9965 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
9966 same or one. Make sure type is not saturating.
9967 fold_plusminus_mult_expr will re-associate. */
9968 if ((TREE_CODE (arg0) == MULT_EXPR
9969 || TREE_CODE (arg1) == MULT_EXPR)
9970 && !TYPE_SATURATING (type)
9971 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9972 {
9973 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
9974 if (tem)
9975 return tem;
9976 }
9977
9978 if (! FLOAT_TYPE_P (type))
9979 {
9980 if (integer_zerop (arg1))
9981 return non_lvalue (fold_convert (type, arg0));
9982
9983 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9984 with a constant, and the two constants have no bits in common,
9985 we should treat this as a BIT_IOR_EXPR since this may produce more
9986 simplifications. */
9987 if (TREE_CODE (arg0) == BIT_AND_EXPR
9988 && TREE_CODE (arg1) == BIT_AND_EXPR
9989 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9990 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9991 && integer_zerop (const_binop (BIT_AND_EXPR,
9992 TREE_OPERAND (arg0, 1),
9993 TREE_OPERAND (arg1, 1), 0)))
9994 {
9995 code = BIT_IOR_EXPR;
9996 goto bit_ior;
9997 }
9998
9999 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10000 (plus (plus (mult) (mult)) (foo)) so that we can
10001 take advantage of the factoring cases below. */
10002 if (((TREE_CODE (arg0) == PLUS_EXPR
10003 || TREE_CODE (arg0) == MINUS_EXPR)
10004 && TREE_CODE (arg1) == MULT_EXPR)
10005 || ((TREE_CODE (arg1) == PLUS_EXPR
10006 || TREE_CODE (arg1) == MINUS_EXPR)
10007 && TREE_CODE (arg0) == MULT_EXPR))
10008 {
10009 tree parg0, parg1, parg, marg;
10010 enum tree_code pcode;
10011
10012 if (TREE_CODE (arg1) == MULT_EXPR)
10013 parg = arg0, marg = arg1;
10014 else
10015 parg = arg1, marg = arg0;
10016 pcode = TREE_CODE (parg);
10017 parg0 = TREE_OPERAND (parg, 0);
10018 parg1 = TREE_OPERAND (parg, 1);
10019 STRIP_NOPS (parg0);
10020 STRIP_NOPS (parg1);
10021
10022 if (TREE_CODE (parg0) == MULT_EXPR
10023 && TREE_CODE (parg1) != MULT_EXPR)
10024 return fold_build2 (pcode, type,
10025 fold_build2 (PLUS_EXPR, type,
10026 fold_convert (type, parg0),
10027 fold_convert (type, marg)),
10028 fold_convert (type, parg1));
10029 if (TREE_CODE (parg0) != MULT_EXPR
10030 && TREE_CODE (parg1) == MULT_EXPR)
10031 return fold_build2 (PLUS_EXPR, type,
10032 fold_convert (type, parg0),
10033 fold_build2 (pcode, type,
10034 fold_convert (type, marg),
10035 fold_convert (type,
10036 parg1)));
10037 }
10038 }
10039 else
10040 {
10041 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10042 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
10043 return non_lvalue (fold_convert (type, arg0));
10044
10045 /* Likewise if the operands are reversed. */
10046 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10047 return non_lvalue (fold_convert (type, arg1));
10048
10049 /* Convert X + -C into X - C. */
10050 if (TREE_CODE (arg1) == REAL_CST
10051 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
10052 {
10053 tem = fold_negate_const (arg1, type);
10054 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
10055 return fold_build2 (MINUS_EXPR, type,
10056 fold_convert (type, arg0),
10057 fold_convert (type, tem));
10058 }
10059
10060 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10061 to __complex__ ( x, y ). This is not the same for SNaNs or
10062 if signed zeros are involved. */
10063 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10064 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10065 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10066 {
10067 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10068 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
10069 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
10070 bool arg0rz = false, arg0iz = false;
10071 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10072 || (arg0i && (arg0iz = real_zerop (arg0i))))
10073 {
10074 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
10075 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
10076 if (arg0rz && arg1i && real_zerop (arg1i))
10077 {
10078 tree rp = arg1r ? arg1r
10079 : build1 (REALPART_EXPR, rtype, arg1);
10080 tree ip = arg0i ? arg0i
10081 : build1 (IMAGPART_EXPR, rtype, arg0);
10082 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
10083 }
10084 else if (arg0iz && arg1r && real_zerop (arg1r))
10085 {
10086 tree rp = arg0r ? arg0r
10087 : build1 (REALPART_EXPR, rtype, arg0);
10088 tree ip = arg1i ? arg1i
10089 : build1 (IMAGPART_EXPR, rtype, arg1);
10090 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
10091 }
10092 }
10093 }
10094
10095 if (flag_unsafe_math_optimizations
10096 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10097 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10098 && (tem = distribute_real_division (code, type, arg0, arg1)))
10099 return tem;
10100
10101 /* Convert x+x into x*2.0. */
10102 if (operand_equal_p (arg0, arg1, 0)
10103 && SCALAR_FLOAT_TYPE_P (type))
10104 return fold_build2 (MULT_EXPR, type, arg0,
10105 build_real (type, dconst2));
10106
10107 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10108 We associate floats only if the user has specified
10109 -fassociative-math. */
10110 if (flag_associative_math
10111 && TREE_CODE (arg1) == PLUS_EXPR
10112 && TREE_CODE (arg0) != MULT_EXPR)
10113 {
10114 tree tree10 = TREE_OPERAND (arg1, 0);
10115 tree tree11 = TREE_OPERAND (arg1, 1);
10116 if (TREE_CODE (tree11) == MULT_EXPR
10117 && TREE_CODE (tree10) == MULT_EXPR)
10118 {
10119 tree tree0;
10120 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
10121 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
10122 }
10123 }
10124 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10125 We associate floats only if the user has specified
10126 -fassociative-math. */
10127 if (flag_associative_math
10128 && TREE_CODE (arg0) == PLUS_EXPR
10129 && TREE_CODE (arg1) != MULT_EXPR)
10130 {
10131 tree tree00 = TREE_OPERAND (arg0, 0);
10132 tree tree01 = TREE_OPERAND (arg0, 1);
10133 if (TREE_CODE (tree01) == MULT_EXPR
10134 && TREE_CODE (tree00) == MULT_EXPR)
10135 {
10136 tree tree0;
10137 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
10138 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
10139 }
10140 }
10141 }
10142
10143 bit_rotate:
10144 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10145 is a rotate of A by C1 bits. */
10146 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10147 is a rotate of A by B bits. */
10148 {
10149 enum tree_code code0, code1;
10150 tree rtype;
10151 code0 = TREE_CODE (arg0);
10152 code1 = TREE_CODE (arg1);
10153 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10154 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10155 && operand_equal_p (TREE_OPERAND (arg0, 0),
10156 TREE_OPERAND (arg1, 0), 0)
10157 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10158 TYPE_UNSIGNED (rtype))
10159 /* Only create rotates in complete modes. Other cases are not
10160 expanded properly. */
10161 && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
10162 {
10163 tree tree01, tree11;
10164 enum tree_code code01, code11;
10165
10166 tree01 = TREE_OPERAND (arg0, 1);
10167 tree11 = TREE_OPERAND (arg1, 1);
10168 STRIP_NOPS (tree01);
10169 STRIP_NOPS (tree11);
10170 code01 = TREE_CODE (tree01);
10171 code11 = TREE_CODE (tree11);
10172 if (code01 == INTEGER_CST
10173 && code11 == INTEGER_CST
10174 && TREE_INT_CST_HIGH (tree01) == 0
10175 && TREE_INT_CST_HIGH (tree11) == 0
10176 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
10177 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10178 return fold_convert (type,
10179 build2 (LROTATE_EXPR,
10180 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10181 TREE_OPERAND (arg0, 0),
10182 code0 == LSHIFT_EXPR
10183 ? tree01 : tree11));
10184 else if (code11 == MINUS_EXPR)
10185 {
10186 tree tree110, tree111;
10187 tree110 = TREE_OPERAND (tree11, 0);
10188 tree111 = TREE_OPERAND (tree11, 1);
10189 STRIP_NOPS (tree110);
10190 STRIP_NOPS (tree111);
10191 if (TREE_CODE (tree110) == INTEGER_CST
10192 && 0 == compare_tree_int (tree110,
10193 TYPE_PRECISION
10194 (TREE_TYPE (TREE_OPERAND
10195 (arg0, 0))))
10196 && operand_equal_p (tree01, tree111, 0))
10197 return fold_convert (type,
10198 build2 ((code0 == LSHIFT_EXPR
10199 ? LROTATE_EXPR
10200 : RROTATE_EXPR),
10201 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10202 TREE_OPERAND (arg0, 0), tree01));
10203 }
10204 else if (code01 == MINUS_EXPR)
10205 {
10206 tree tree010, tree011;
10207 tree010 = TREE_OPERAND (tree01, 0);
10208 tree011 = TREE_OPERAND (tree01, 1);
10209 STRIP_NOPS (tree010);
10210 STRIP_NOPS (tree011);
10211 if (TREE_CODE (tree010) == INTEGER_CST
10212 && 0 == compare_tree_int (tree010,
10213 TYPE_PRECISION
10214 (TREE_TYPE (TREE_OPERAND
10215 (arg0, 0))))
10216 && operand_equal_p (tree11, tree011, 0))
10217 return fold_convert (type,
10218 build2 ((code0 != LSHIFT_EXPR
10219 ? LROTATE_EXPR
10220 : RROTATE_EXPR),
10221 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10222 TREE_OPERAND (arg0, 0), tree11));
10223 }
10224 }
10225 }
10226
10227 associate:
10228 /* In most languages, can't associate operations on floats through
10229 parentheses. Rather than remember where the parentheses were, we
10230 don't associate floats at all, unless the user has specified
10231 -fassociative-math.
10232 And, we need to make sure type is not saturating. */
10233
10234 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10235 && !TYPE_SATURATING (type))
10236 {
10237 tree var0, con0, lit0, minus_lit0;
10238 tree var1, con1, lit1, minus_lit1;
10239 bool ok = true;
10240
10241 /* Split both trees into variables, constants, and literals. Then
10242 associate each group together, the constants with literals,
10243 then the result with variables. This increases the chances of
10244 literals being recombined later and of generating relocatable
10245 expressions for the sum of a constant and literal. */
10246 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10247 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10248 code == MINUS_EXPR);
10249
10250 /* With undefined overflow we can only associate constants
10251 with one variable. */
10252 if (((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10253 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10254 && var0 && var1)
10255 {
10256 tree tmp0 = var0;
10257 tree tmp1 = var1;
10258
10259 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10260 tmp0 = TREE_OPERAND (tmp0, 0);
10261 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10262 tmp1 = TREE_OPERAND (tmp1, 0);
10263 /* The only case we can still associate with two variables
10264 is if they are the same, modulo negation. */
10265 if (!operand_equal_p (tmp0, tmp1, 0))
10266 ok = false;
10267 }
10268
10269 /* Only do something if we found more than two objects. Otherwise,
10270 nothing has changed and we risk infinite recursion. */
10271 if (ok
10272 && (2 < ((var0 != 0) + (var1 != 0)
10273 + (con0 != 0) + (con1 != 0)
10274 + (lit0 != 0) + (lit1 != 0)
10275 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10276 {
10277 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10278 if (code == MINUS_EXPR)
10279 code = PLUS_EXPR;
10280
10281 var0 = associate_trees (var0, var1, code, type);
10282 con0 = associate_trees (con0, con1, code, type);
10283 lit0 = associate_trees (lit0, lit1, code, type);
10284 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
10285
10286 /* Preserve the MINUS_EXPR if the negative part of the literal is
10287 greater than the positive part. Otherwise, the multiplicative
10288 folding code (i.e extract_muldiv) may be fooled in case
10289 unsigned constants are subtracted, like in the following
10290 example: ((X*2 + 4) - 8U)/2. */
10291 if (minus_lit0 && lit0)
10292 {
10293 if (TREE_CODE (lit0) == INTEGER_CST
10294 && TREE_CODE (minus_lit0) == INTEGER_CST
10295 && tree_int_cst_lt (lit0, minus_lit0))
10296 {
10297 minus_lit0 = associate_trees (minus_lit0, lit0,
10298 MINUS_EXPR, type);
10299 lit0 = 0;
10300 }
10301 else
10302 {
10303 lit0 = associate_trees (lit0, minus_lit0,
10304 MINUS_EXPR, type);
10305 minus_lit0 = 0;
10306 }
10307 }
10308 if (minus_lit0)
10309 {
10310 if (con0 == 0)
10311 return fold_convert (type,
10312 associate_trees (var0, minus_lit0,
10313 MINUS_EXPR, type));
10314 else
10315 {
10316 con0 = associate_trees (con0, minus_lit0,
10317 MINUS_EXPR, type);
10318 return fold_convert (type,
10319 associate_trees (var0, con0,
10320 PLUS_EXPR, type));
10321 }
10322 }
10323
10324 con0 = associate_trees (con0, lit0, code, type);
10325 return fold_convert (type, associate_trees (var0, con0,
10326 code, type));
10327 }
10328 }
10329
10330 return NULL_TREE;
10331
10332 case MINUS_EXPR:
10333 /* Pointer simplifications for subtraction, simple reassociations. */
10334 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10335 {
10336 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10337 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10338 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10339 {
10340 tree arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
10341 tree arg01 = fold_convert (type, TREE_OPERAND (arg0, 1));
10342 tree arg10 = fold_convert (type, TREE_OPERAND (arg1, 0));
10343 tree arg11 = fold_convert (type, TREE_OPERAND (arg1, 1));
10344 return fold_build2 (PLUS_EXPR, type,
10345 fold_build2 (MINUS_EXPR, type, arg00, arg10),
10346 fold_build2 (MINUS_EXPR, type, arg01, arg11));
10347 }
10348 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10349 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10350 {
10351 tree arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
10352 tree arg01 = fold_convert (type, TREE_OPERAND (arg0, 1));
10353 tree tmp = fold_binary (MINUS_EXPR, type, arg00, fold_convert (type, arg1));
10354 if (tmp)
10355 return fold_build2 (PLUS_EXPR, type, tmp, arg01);
10356 }
10357 }
10358 /* A - (-B) -> A + B */
10359 if (TREE_CODE (arg1) == NEGATE_EXPR)
10360 return fold_build2 (PLUS_EXPR, type, op0,
10361 fold_convert (type, TREE_OPERAND (arg1, 0)));
10362 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10363 if (TREE_CODE (arg0) == NEGATE_EXPR
10364 && (FLOAT_TYPE_P (type)
10365 || INTEGRAL_TYPE_P (type))
10366 && negate_expr_p (arg1)
10367 && reorder_operands_p (arg0, arg1))
10368 return fold_build2 (MINUS_EXPR, type,
10369 fold_convert (type, negate_expr (arg1)),
10370 fold_convert (type, TREE_OPERAND (arg0, 0)));
10371 /* Convert -A - 1 to ~A. */
10372 if (INTEGRAL_TYPE_P (type)
10373 && TREE_CODE (arg0) == NEGATE_EXPR
10374 && integer_onep (arg1)
10375 && !TYPE_OVERFLOW_TRAPS (type))
10376 return fold_build1 (BIT_NOT_EXPR, type,
10377 fold_convert (type, TREE_OPERAND (arg0, 0)));
10378
10379 /* Convert -1 - A to ~A. */
10380 if (INTEGRAL_TYPE_P (type)
10381 && integer_all_onesp (arg0))
10382 return fold_build1 (BIT_NOT_EXPR, type, op1);
10383
10384
10385 /* X - (X / CST) * CST is X % CST. */
10386 if (INTEGRAL_TYPE_P (type)
10387 && TREE_CODE (arg1) == MULT_EXPR
10388 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10389 && operand_equal_p (arg0,
10390 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10391 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10392 TREE_OPERAND (arg1, 1), 0))
10393 return fold_convert (type,
10394 fold_build2 (TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10395 arg0, TREE_OPERAND (arg1, 1)));
10396
10397 if (! FLOAT_TYPE_P (type))
10398 {
10399 if (integer_zerop (arg0))
10400 return negate_expr (fold_convert (type, arg1));
10401 if (integer_zerop (arg1))
10402 return non_lvalue (fold_convert (type, arg0));
10403
10404 /* Fold A - (A & B) into ~B & A. */
10405 if (!TREE_SIDE_EFFECTS (arg0)
10406 && TREE_CODE (arg1) == BIT_AND_EXPR)
10407 {
10408 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10409 {
10410 tree arg10 = fold_convert (type, TREE_OPERAND (arg1, 0));
10411 return fold_build2 (BIT_AND_EXPR, type,
10412 fold_build1 (BIT_NOT_EXPR, type, arg10),
10413 fold_convert (type, arg0));
10414 }
10415 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10416 {
10417 tree arg11 = fold_convert (type, TREE_OPERAND (arg1, 1));
10418 return fold_build2 (BIT_AND_EXPR, type,
10419 fold_build1 (BIT_NOT_EXPR, type, arg11),
10420 fold_convert (type, arg0));
10421 }
10422 }
10423
10424 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10425 any power of 2 minus 1. */
10426 if (TREE_CODE (arg0) == BIT_AND_EXPR
10427 && TREE_CODE (arg1) == BIT_AND_EXPR
10428 && operand_equal_p (TREE_OPERAND (arg0, 0),
10429 TREE_OPERAND (arg1, 0), 0))
10430 {
10431 tree mask0 = TREE_OPERAND (arg0, 1);
10432 tree mask1 = TREE_OPERAND (arg1, 1);
10433 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
10434
10435 if (operand_equal_p (tem, mask1, 0))
10436 {
10437 tem = fold_build2 (BIT_XOR_EXPR, type,
10438 TREE_OPERAND (arg0, 0), mask1);
10439 return fold_build2 (MINUS_EXPR, type, tem, mask1);
10440 }
10441 }
10442 }
10443
10444 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10445 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10446 return non_lvalue (fold_convert (type, arg0));
10447
10448 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10449 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10450 (-ARG1 + ARG0) reduces to -ARG1. */
10451 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10452 return negate_expr (fold_convert (type, arg1));
10453
10454 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10455 __complex__ ( x, -y ). This is not the same for SNaNs or if
10456 signed zeros are involved. */
10457 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10458 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10459 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10460 {
10461 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10462 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
10463 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
10464 bool arg0rz = false, arg0iz = false;
10465 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10466 || (arg0i && (arg0iz = real_zerop (arg0i))))
10467 {
10468 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
10469 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
10470 if (arg0rz && arg1i && real_zerop (arg1i))
10471 {
10472 tree rp = fold_build1 (NEGATE_EXPR, rtype,
10473 arg1r ? arg1r
10474 : build1 (REALPART_EXPR, rtype, arg1));
10475 tree ip = arg0i ? arg0i
10476 : build1 (IMAGPART_EXPR, rtype, arg0);
10477 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
10478 }
10479 else if (arg0iz && arg1r && real_zerop (arg1r))
10480 {
10481 tree rp = arg0r ? arg0r
10482 : build1 (REALPART_EXPR, rtype, arg0);
10483 tree ip = fold_build1 (NEGATE_EXPR, rtype,
10484 arg1i ? arg1i
10485 : build1 (IMAGPART_EXPR, rtype, arg1));
10486 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
10487 }
10488 }
10489 }
10490
10491 /* Fold &x - &x. This can happen from &x.foo - &x.
10492 This is unsafe for certain floats even in non-IEEE formats.
10493 In IEEE, it is unsafe because it does wrong for NaNs.
10494 Also note that operand_equal_p is always false if an operand
10495 is volatile. */
10496
10497 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10498 && operand_equal_p (arg0, arg1, 0))
10499 return fold_convert (type, integer_zero_node);
10500
10501 /* A - B -> A + (-B) if B is easily negatable. */
10502 if (negate_expr_p (arg1)
10503 && ((FLOAT_TYPE_P (type)
10504 /* Avoid this transformation if B is a positive REAL_CST. */
10505 && (TREE_CODE (arg1) != REAL_CST
10506 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10507 || INTEGRAL_TYPE_P (type)))
10508 return fold_build2 (PLUS_EXPR, type,
10509 fold_convert (type, arg0),
10510 fold_convert (type, negate_expr (arg1)));
10511
10512 /* Try folding difference of addresses. */
10513 {
10514 HOST_WIDE_INT diff;
10515
10516 if ((TREE_CODE (arg0) == ADDR_EXPR
10517 || TREE_CODE (arg1) == ADDR_EXPR)
10518 && ptr_difference_const (arg0, arg1, &diff))
10519 return build_int_cst_type (type, diff);
10520 }
10521
10522 /* Fold &a[i] - &a[j] to i-j. */
10523 if (TREE_CODE (arg0) == ADDR_EXPR
10524 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10525 && TREE_CODE (arg1) == ADDR_EXPR
10526 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10527 {
10528 tree aref0 = TREE_OPERAND (arg0, 0);
10529 tree aref1 = TREE_OPERAND (arg1, 0);
10530 if (operand_equal_p (TREE_OPERAND (aref0, 0),
10531 TREE_OPERAND (aref1, 0), 0))
10532 {
10533 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
10534 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
10535 tree esz = array_ref_element_size (aref0);
10536 tree diff = build2 (MINUS_EXPR, type, op0, op1);
10537 return fold_build2 (MULT_EXPR, type, diff,
10538 fold_convert (type, esz));
10539
10540 }
10541 }
10542
10543 if (FLOAT_TYPE_P (type)
10544 && flag_unsafe_math_optimizations
10545 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10546 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10547 && (tem = distribute_real_division (code, type, arg0, arg1)))
10548 return tem;
10549
10550 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
10551 same or one. Make sure type is not saturating.
10552 fold_plusminus_mult_expr will re-associate. */
10553 if ((TREE_CODE (arg0) == MULT_EXPR
10554 || TREE_CODE (arg1) == MULT_EXPR)
10555 && !TYPE_SATURATING (type)
10556 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10557 {
10558 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
10559 if (tem)
10560 return tem;
10561 }
10562
10563 goto associate;
10564
10565 case MULT_EXPR:
10566 /* (-A) * (-B) -> A * B */
10567 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10568 return fold_build2 (MULT_EXPR, type,
10569 fold_convert (type, TREE_OPERAND (arg0, 0)),
10570 fold_convert (type, negate_expr (arg1)));
10571 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10572 return fold_build2 (MULT_EXPR, type,
10573 fold_convert (type, negate_expr (arg0)),
10574 fold_convert (type, TREE_OPERAND (arg1, 0)));
10575
10576 if (! FLOAT_TYPE_P (type))
10577 {
10578 if (integer_zerop (arg1))
10579 return omit_one_operand (type, arg1, arg0);
10580 if (integer_onep (arg1))
10581 return non_lvalue (fold_convert (type, arg0));
10582 /* Transform x * -1 into -x. Make sure to do the negation
10583 on the original operand with conversions not stripped
10584 because we can only strip non-sign-changing conversions. */
10585 if (integer_all_onesp (arg1))
10586 return fold_convert (type, negate_expr (op0));
10587 /* Transform x * -C into -x * C if x is easily negatable. */
10588 if (TREE_CODE (arg1) == INTEGER_CST
10589 && tree_int_cst_sgn (arg1) == -1
10590 && negate_expr_p (arg0)
10591 && (tem = negate_expr (arg1)) != arg1
10592 && !TREE_OVERFLOW (tem))
10593 return fold_build2 (MULT_EXPR, type,
10594 fold_convert (type, negate_expr (arg0)), tem);
10595
10596 /* (a * (1 << b)) is (a << b) */
10597 if (TREE_CODE (arg1) == LSHIFT_EXPR
10598 && integer_onep (TREE_OPERAND (arg1, 0)))
10599 return fold_build2 (LSHIFT_EXPR, type, op0,
10600 TREE_OPERAND (arg1, 1));
10601 if (TREE_CODE (arg0) == LSHIFT_EXPR
10602 && integer_onep (TREE_OPERAND (arg0, 0)))
10603 return fold_build2 (LSHIFT_EXPR, type, op1,
10604 TREE_OPERAND (arg0, 1));
10605
10606 /* (A + A) * C -> A * 2 * C */
10607 if (TREE_CODE (arg0) == PLUS_EXPR
10608 && TREE_CODE (arg1) == INTEGER_CST
10609 && operand_equal_p (TREE_OPERAND (arg0, 0),
10610 TREE_OPERAND (arg0, 1), 0))
10611 return fold_build2 (MULT_EXPR, type,
10612 omit_one_operand (type, TREE_OPERAND (arg0, 0),
10613 TREE_OPERAND (arg0, 1)),
10614 fold_build2 (MULT_EXPR, type,
10615 build_int_cst (type, 2) , arg1));
10616
10617 strict_overflow_p = false;
10618 if (TREE_CODE (arg1) == INTEGER_CST
10619 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10620 &strict_overflow_p)))
10621 {
10622 if (strict_overflow_p)
10623 fold_overflow_warning (("assuming signed overflow does not "
10624 "occur when simplifying "
10625 "multiplication"),
10626 WARN_STRICT_OVERFLOW_MISC);
10627 return fold_convert (type, tem);
10628 }
10629
10630 /* Optimize z * conj(z) for integer complex numbers. */
10631 if (TREE_CODE (arg0) == CONJ_EXPR
10632 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10633 return fold_mult_zconjz (type, arg1);
10634 if (TREE_CODE (arg1) == CONJ_EXPR
10635 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10636 return fold_mult_zconjz (type, arg0);
10637 }
10638 else
10639 {
10640 /* Maybe fold x * 0 to 0. The expressions aren't the same
10641 when x is NaN, since x * 0 is also NaN. Nor are they the
10642 same in modes with signed zeros, since multiplying a
10643 negative value by 0 gives -0, not +0. */
10644 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10645 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10646 && real_zerop (arg1))
10647 return omit_one_operand (type, arg1, arg0);
10648 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10649 Likewise for complex arithmetic with signed zeros. */
10650 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10651 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10652 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10653 && real_onep (arg1))
10654 return non_lvalue (fold_convert (type, arg0));
10655
10656 /* Transform x * -1.0 into -x. */
10657 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10658 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10659 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10660 && real_minus_onep (arg1))
10661 return fold_convert (type, negate_expr (arg0));
10662
10663 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10664 the result for floating point types due to rounding so it is applied
10665 only if -fassociative-math was specify. */
10666 if (flag_associative_math
10667 && TREE_CODE (arg0) == RDIV_EXPR
10668 && TREE_CODE (arg1) == REAL_CST
10669 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10670 {
10671 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10672 arg1, 0);
10673 if (tem)
10674 return fold_build2 (RDIV_EXPR, type, tem,
10675 TREE_OPERAND (arg0, 1));
10676 }
10677
10678 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10679 if (operand_equal_p (arg0, arg1, 0))
10680 {
10681 tree tem = fold_strip_sign_ops (arg0);
10682 if (tem != NULL_TREE)
10683 {
10684 tem = fold_convert (type, tem);
10685 return fold_build2 (MULT_EXPR, type, tem, tem);
10686 }
10687 }
10688
10689 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10690 This is not the same for NaNs or if signed zeros are
10691 involved. */
10692 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10693 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10694 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10695 && TREE_CODE (arg1) == COMPLEX_CST
10696 && real_zerop (TREE_REALPART (arg1)))
10697 {
10698 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10699 if (real_onep (TREE_IMAGPART (arg1)))
10700 return fold_build2 (COMPLEX_EXPR, type,
10701 negate_expr (fold_build1 (IMAGPART_EXPR,
10702 rtype, arg0)),
10703 fold_build1 (REALPART_EXPR, rtype, arg0));
10704 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10705 return fold_build2 (COMPLEX_EXPR, type,
10706 fold_build1 (IMAGPART_EXPR, rtype, arg0),
10707 negate_expr (fold_build1 (REALPART_EXPR,
10708 rtype, arg0)));
10709 }
10710
10711 /* Optimize z * conj(z) for floating point complex numbers.
10712 Guarded by flag_unsafe_math_optimizations as non-finite
10713 imaginary components don't produce scalar results. */
10714 if (flag_unsafe_math_optimizations
10715 && TREE_CODE (arg0) == CONJ_EXPR
10716 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10717 return fold_mult_zconjz (type, arg1);
10718 if (flag_unsafe_math_optimizations
10719 && TREE_CODE (arg1) == CONJ_EXPR
10720 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10721 return fold_mult_zconjz (type, arg0);
10722
10723 if (flag_unsafe_math_optimizations)
10724 {
10725 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10726 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10727
10728 /* Optimizations of root(...)*root(...). */
10729 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10730 {
10731 tree rootfn, arg;
10732 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10733 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10734
10735 /* Optimize sqrt(x)*sqrt(x) as x. */
10736 if (BUILTIN_SQRT_P (fcode0)
10737 && operand_equal_p (arg00, arg10, 0)
10738 && ! HONOR_SNANS (TYPE_MODE (type)))
10739 return arg00;
10740
10741 /* Optimize root(x)*root(y) as root(x*y). */
10742 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10743 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
10744 return build_call_expr (rootfn, 1, arg);
10745 }
10746
10747 /* Optimize expN(x)*expN(y) as expN(x+y). */
10748 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10749 {
10750 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10751 tree arg = fold_build2 (PLUS_EXPR, type,
10752 CALL_EXPR_ARG (arg0, 0),
10753 CALL_EXPR_ARG (arg1, 0));
10754 return build_call_expr (expfn, 1, arg);
10755 }
10756
10757 /* Optimizations of pow(...)*pow(...). */
10758 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10759 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10760 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10761 {
10762 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10763 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10764 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10765 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10766
10767 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10768 if (operand_equal_p (arg01, arg11, 0))
10769 {
10770 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10771 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
10772 return build_call_expr (powfn, 2, arg, arg01);
10773 }
10774
10775 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10776 if (operand_equal_p (arg00, arg10, 0))
10777 {
10778 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10779 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
10780 return build_call_expr (powfn, 2, arg00, arg);
10781 }
10782 }
10783
10784 /* Optimize tan(x)*cos(x) as sin(x). */
10785 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10786 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10787 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10788 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10789 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10790 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10791 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10792 CALL_EXPR_ARG (arg1, 0), 0))
10793 {
10794 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10795
10796 if (sinfn != NULL_TREE)
10797 return build_call_expr (sinfn, 1, CALL_EXPR_ARG (arg0, 0));
10798 }
10799
10800 /* Optimize x*pow(x,c) as pow(x,c+1). */
10801 if (fcode1 == BUILT_IN_POW
10802 || fcode1 == BUILT_IN_POWF
10803 || fcode1 == BUILT_IN_POWL)
10804 {
10805 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10806 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10807 if (TREE_CODE (arg11) == REAL_CST
10808 && !TREE_OVERFLOW (arg11)
10809 && operand_equal_p (arg0, arg10, 0))
10810 {
10811 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10812 REAL_VALUE_TYPE c;
10813 tree arg;
10814
10815 c = TREE_REAL_CST (arg11);
10816 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10817 arg = build_real (type, c);
10818 return build_call_expr (powfn, 2, arg0, arg);
10819 }
10820 }
10821
10822 /* Optimize pow(x,c)*x as pow(x,c+1). */
10823 if (fcode0 == BUILT_IN_POW
10824 || fcode0 == BUILT_IN_POWF
10825 || fcode0 == BUILT_IN_POWL)
10826 {
10827 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10828 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10829 if (TREE_CODE (arg01) == REAL_CST
10830 && !TREE_OVERFLOW (arg01)
10831 && operand_equal_p (arg1, arg00, 0))
10832 {
10833 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10834 REAL_VALUE_TYPE c;
10835 tree arg;
10836
10837 c = TREE_REAL_CST (arg01);
10838 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10839 arg = build_real (type, c);
10840 return build_call_expr (powfn, 2, arg1, arg);
10841 }
10842 }
10843
10844 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
10845 if (optimize_function_for_speed_p (cfun)
10846 && operand_equal_p (arg0, arg1, 0))
10847 {
10848 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10849
10850 if (powfn)
10851 {
10852 tree arg = build_real (type, dconst2);
10853 return build_call_expr (powfn, 2, arg0, arg);
10854 }
10855 }
10856 }
10857 }
10858 goto associate;
10859
10860 case BIT_IOR_EXPR:
10861 bit_ior:
10862 if (integer_all_onesp (arg1))
10863 return omit_one_operand (type, arg1, arg0);
10864 if (integer_zerop (arg1))
10865 return non_lvalue (fold_convert (type, arg0));
10866 if (operand_equal_p (arg0, arg1, 0))
10867 return non_lvalue (fold_convert (type, arg0));
10868
10869 /* ~X | X is -1. */
10870 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10871 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10872 {
10873 t1 = fold_convert (type, integer_zero_node);
10874 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10875 return omit_one_operand (type, t1, arg1);
10876 }
10877
10878 /* X | ~X is -1. */
10879 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10880 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10881 {
10882 t1 = fold_convert (type, integer_zero_node);
10883 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10884 return omit_one_operand (type, t1, arg0);
10885 }
10886
10887 /* Canonicalize (X & C1) | C2. */
10888 if (TREE_CODE (arg0) == BIT_AND_EXPR
10889 && TREE_CODE (arg1) == INTEGER_CST
10890 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10891 {
10892 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, hi3, lo3, mlo, mhi;
10893 int width = TYPE_PRECISION (type), w;
10894 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
10895 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10896 hi2 = TREE_INT_CST_HIGH (arg1);
10897 lo2 = TREE_INT_CST_LOW (arg1);
10898
10899 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10900 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
10901 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10902
10903 if (width > HOST_BITS_PER_WIDE_INT)
10904 {
10905 mhi = (unsigned HOST_WIDE_INT) -1
10906 >> (2 * HOST_BITS_PER_WIDE_INT - width);
10907 mlo = -1;
10908 }
10909 else
10910 {
10911 mhi = 0;
10912 mlo = (unsigned HOST_WIDE_INT) -1
10913 >> (HOST_BITS_PER_WIDE_INT - width);
10914 }
10915
10916 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10917 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
10918 return fold_build2 (BIT_IOR_EXPR, type,
10919 TREE_OPERAND (arg0, 0), arg1);
10920
10921 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10922 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10923 mode which allows further optimizations. */
10924 hi1 &= mhi;
10925 lo1 &= mlo;
10926 hi2 &= mhi;
10927 lo2 &= mlo;
10928 hi3 = hi1 & ~hi2;
10929 lo3 = lo1 & ~lo2;
10930 for (w = BITS_PER_UNIT;
10931 w <= width && w <= HOST_BITS_PER_WIDE_INT;
10932 w <<= 1)
10933 {
10934 unsigned HOST_WIDE_INT mask
10935 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
10936 if (((lo1 | lo2) & mask) == mask
10937 && (lo1 & ~mask) == 0 && hi1 == 0)
10938 {
10939 hi3 = 0;
10940 lo3 = mask;
10941 break;
10942 }
10943 }
10944 if (hi3 != hi1 || lo3 != lo1)
10945 return fold_build2 (BIT_IOR_EXPR, type,
10946 fold_build2 (BIT_AND_EXPR, type,
10947 TREE_OPERAND (arg0, 0),
10948 build_int_cst_wide (type,
10949 lo3, hi3)),
10950 arg1);
10951 }
10952
10953 /* (X & Y) | Y is (X, Y). */
10954 if (TREE_CODE (arg0) == BIT_AND_EXPR
10955 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10956 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10957 /* (X & Y) | X is (Y, X). */
10958 if (TREE_CODE (arg0) == BIT_AND_EXPR
10959 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10960 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10961 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
10962 /* X | (X & Y) is (Y, X). */
10963 if (TREE_CODE (arg1) == BIT_AND_EXPR
10964 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10965 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10966 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
10967 /* X | (Y & X) is (Y, X). */
10968 if (TREE_CODE (arg1) == BIT_AND_EXPR
10969 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10970 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10971 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
10972
10973 t1 = distribute_bit_expr (code, type, arg0, arg1);
10974 if (t1 != NULL_TREE)
10975 return t1;
10976
10977 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
10978
10979 This results in more efficient code for machines without a NAND
10980 instruction. Combine will canonicalize to the first form
10981 which will allow use of NAND instructions provided by the
10982 backend if they exist. */
10983 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10984 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10985 {
10986 return fold_build1 (BIT_NOT_EXPR, type,
10987 build2 (BIT_AND_EXPR, type,
10988 fold_convert (type,
10989 TREE_OPERAND (arg0, 0)),
10990 fold_convert (type,
10991 TREE_OPERAND (arg1, 0))));
10992 }
10993
10994 /* See if this can be simplified into a rotate first. If that
10995 is unsuccessful continue in the association code. */
10996 goto bit_rotate;
10997
10998 case BIT_XOR_EXPR:
10999 if (integer_zerop (arg1))
11000 return non_lvalue (fold_convert (type, arg0));
11001 if (integer_all_onesp (arg1))
11002 return fold_build1 (BIT_NOT_EXPR, type, op0);
11003 if (operand_equal_p (arg0, arg1, 0))
11004 return omit_one_operand (type, integer_zero_node, arg0);
11005
11006 /* ~X ^ X is -1. */
11007 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11008 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11009 {
11010 t1 = fold_convert (type, integer_zero_node);
11011 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
11012 return omit_one_operand (type, t1, arg1);
11013 }
11014
11015 /* X ^ ~X is -1. */
11016 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11017 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11018 {
11019 t1 = fold_convert (type, integer_zero_node);
11020 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
11021 return omit_one_operand (type, t1, arg0);
11022 }
11023
11024 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11025 with a constant, and the two constants have no bits in common,
11026 we should treat this as a BIT_IOR_EXPR since this may produce more
11027 simplifications. */
11028 if (TREE_CODE (arg0) == BIT_AND_EXPR
11029 && TREE_CODE (arg1) == BIT_AND_EXPR
11030 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11031 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11032 && integer_zerop (const_binop (BIT_AND_EXPR,
11033 TREE_OPERAND (arg0, 1),
11034 TREE_OPERAND (arg1, 1), 0)))
11035 {
11036 code = BIT_IOR_EXPR;
11037 goto bit_ior;
11038 }
11039
11040 /* (X | Y) ^ X -> Y & ~ X*/
11041 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11042 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11043 {
11044 tree t2 = TREE_OPERAND (arg0, 1);
11045 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
11046 arg1);
11047 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
11048 fold_convert (type, t1));
11049 return t1;
11050 }
11051
11052 /* (Y | X) ^ X -> Y & ~ X*/
11053 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11054 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11055 {
11056 tree t2 = TREE_OPERAND (arg0, 0);
11057 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
11058 arg1);
11059 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
11060 fold_convert (type, t1));
11061 return t1;
11062 }
11063
11064 /* X ^ (X | Y) -> Y & ~ X*/
11065 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11066 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11067 {
11068 tree t2 = TREE_OPERAND (arg1, 1);
11069 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
11070 arg0);
11071 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
11072 fold_convert (type, t1));
11073 return t1;
11074 }
11075
11076 /* X ^ (Y | X) -> Y & ~ X*/
11077 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11078 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11079 {
11080 tree t2 = TREE_OPERAND (arg1, 0);
11081 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
11082 arg0);
11083 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
11084 fold_convert (type, t1));
11085 return t1;
11086 }
11087
11088 /* Convert ~X ^ ~Y to X ^ Y. */
11089 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11090 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11091 return fold_build2 (code, type,
11092 fold_convert (type, TREE_OPERAND (arg0, 0)),
11093 fold_convert (type, TREE_OPERAND (arg1, 0)));
11094
11095 /* Convert ~X ^ C to X ^ ~C. */
11096 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11097 && TREE_CODE (arg1) == INTEGER_CST)
11098 return fold_build2 (code, type,
11099 fold_convert (type, TREE_OPERAND (arg0, 0)),
11100 fold_build1 (BIT_NOT_EXPR, type, arg1));
11101
11102 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11103 if (TREE_CODE (arg0) == BIT_AND_EXPR
11104 && integer_onep (TREE_OPERAND (arg0, 1))
11105 && integer_onep (arg1))
11106 return fold_build2 (EQ_EXPR, type, arg0,
11107 build_int_cst (TREE_TYPE (arg0), 0));
11108
11109 /* Fold (X & Y) ^ Y as ~X & Y. */
11110 if (TREE_CODE (arg0) == BIT_AND_EXPR
11111 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11112 {
11113 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
11114 return fold_build2 (BIT_AND_EXPR, type,
11115 fold_build1 (BIT_NOT_EXPR, type, tem),
11116 fold_convert (type, arg1));
11117 }
11118 /* Fold (X & Y) ^ X as ~Y & X. */
11119 if (TREE_CODE (arg0) == BIT_AND_EXPR
11120 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11121 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11122 {
11123 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
11124 return fold_build2 (BIT_AND_EXPR, type,
11125 fold_build1 (BIT_NOT_EXPR, type, tem),
11126 fold_convert (type, arg1));
11127 }
11128 /* Fold X ^ (X & Y) as X & ~Y. */
11129 if (TREE_CODE (arg1) == BIT_AND_EXPR
11130 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11131 {
11132 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
11133 return fold_build2 (BIT_AND_EXPR, type,
11134 fold_convert (type, arg0),
11135 fold_build1 (BIT_NOT_EXPR, type, tem));
11136 }
11137 /* Fold X ^ (Y & X) as ~Y & X. */
11138 if (TREE_CODE (arg1) == BIT_AND_EXPR
11139 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11140 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11141 {
11142 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
11143 return fold_build2 (BIT_AND_EXPR, type,
11144 fold_build1 (BIT_NOT_EXPR, type, tem),
11145 fold_convert (type, arg0));
11146 }
11147
11148 /* See if this can be simplified into a rotate first. If that
11149 is unsuccessful continue in the association code. */
11150 goto bit_rotate;
11151
11152 case BIT_AND_EXPR:
11153 if (integer_all_onesp (arg1))
11154 return non_lvalue (fold_convert (type, arg0));
11155 if (integer_zerop (arg1))
11156 return omit_one_operand (type, arg1, arg0);
11157 if (operand_equal_p (arg0, arg1, 0))
11158 return non_lvalue (fold_convert (type, arg0));
11159
11160 /* ~X & X is always zero. */
11161 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11162 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11163 return omit_one_operand (type, integer_zero_node, arg1);
11164
11165 /* X & ~X is always zero. */
11166 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11167 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11168 return omit_one_operand (type, integer_zero_node, arg0);
11169
11170 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11171 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11172 && TREE_CODE (arg1) == INTEGER_CST
11173 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11174 {
11175 tree tmp1 = fold_convert (type, arg1);
11176 tree tmp2 = fold_convert (type, TREE_OPERAND (arg0, 0));
11177 tree tmp3 = fold_convert (type, TREE_OPERAND (arg0, 1));
11178 tmp2 = fold_build2 (BIT_AND_EXPR, type, tmp2, tmp1);
11179 tmp3 = fold_build2 (BIT_AND_EXPR, type, tmp3, tmp1);
11180 return fold_convert (type,
11181 fold_build2 (BIT_IOR_EXPR, type, tmp2, tmp3));
11182 }
11183
11184 /* (X | Y) & Y is (X, Y). */
11185 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11186 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11187 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
11188 /* (X | Y) & X is (Y, X). */
11189 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11190 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11191 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11192 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
11193 /* X & (X | Y) is (Y, X). */
11194 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11195 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11196 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11197 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
11198 /* X & (Y | X) is (Y, X). */
11199 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11200 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11201 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11202 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
11203
11204 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11205 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11206 && integer_onep (TREE_OPERAND (arg0, 1))
11207 && integer_onep (arg1))
11208 {
11209 tem = TREE_OPERAND (arg0, 0);
11210 return fold_build2 (EQ_EXPR, type,
11211 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
11212 build_int_cst (TREE_TYPE (tem), 1)),
11213 build_int_cst (TREE_TYPE (tem), 0));
11214 }
11215 /* Fold ~X & 1 as (X & 1) == 0. */
11216 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11217 && integer_onep (arg1))
11218 {
11219 tem = TREE_OPERAND (arg0, 0);
11220 return fold_build2 (EQ_EXPR, type,
11221 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
11222 build_int_cst (TREE_TYPE (tem), 1)),
11223 build_int_cst (TREE_TYPE (tem), 0));
11224 }
11225
11226 /* Fold (X ^ Y) & Y as ~X & Y. */
11227 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11228 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11229 {
11230 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
11231 return fold_build2 (BIT_AND_EXPR, type,
11232 fold_build1 (BIT_NOT_EXPR, type, tem),
11233 fold_convert (type, arg1));
11234 }
11235 /* Fold (X ^ Y) & X as ~Y & X. */
11236 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11237 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11238 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11239 {
11240 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
11241 return fold_build2 (BIT_AND_EXPR, type,
11242 fold_build1 (BIT_NOT_EXPR, type, tem),
11243 fold_convert (type, arg1));
11244 }
11245 /* Fold X & (X ^ Y) as X & ~Y. */
11246 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11247 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11248 {
11249 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
11250 return fold_build2 (BIT_AND_EXPR, type,
11251 fold_convert (type, arg0),
11252 fold_build1 (BIT_NOT_EXPR, type, tem));
11253 }
11254 /* Fold X & (Y ^ X) as ~Y & X. */
11255 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11256 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11257 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11258 {
11259 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
11260 return fold_build2 (BIT_AND_EXPR, type,
11261 fold_build1 (BIT_NOT_EXPR, type, tem),
11262 fold_convert (type, arg0));
11263 }
11264
11265 t1 = distribute_bit_expr (code, type, arg0, arg1);
11266 if (t1 != NULL_TREE)
11267 return t1;
11268 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11269 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11270 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11271 {
11272 unsigned int prec
11273 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11274
11275 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
11276 && (~TREE_INT_CST_LOW (arg1)
11277 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
11278 return fold_convert (type, TREE_OPERAND (arg0, 0));
11279 }
11280
11281 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11282
11283 This results in more efficient code for machines without a NOR
11284 instruction. Combine will canonicalize to the first form
11285 which will allow use of NOR instructions provided by the
11286 backend if they exist. */
11287 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11288 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11289 {
11290 return fold_build1 (BIT_NOT_EXPR, type,
11291 build2 (BIT_IOR_EXPR, type,
11292 fold_convert (type,
11293 TREE_OPERAND (arg0, 0)),
11294 fold_convert (type,
11295 TREE_OPERAND (arg1, 0))));
11296 }
11297
11298 /* If arg0 is derived from the address of an object or function, we may
11299 be able to fold this expression using the object or function's
11300 alignment. */
11301 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
11302 {
11303 unsigned HOST_WIDE_INT modulus, residue;
11304 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
11305
11306 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11307 integer_onep (arg1));
11308
11309 /* This works because modulus is a power of 2. If this weren't the
11310 case, we'd have to replace it by its greatest power-of-2
11311 divisor: modulus & -modulus. */
11312 if (low < modulus)
11313 return build_int_cst (type, residue & low);
11314 }
11315
11316 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11317 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11318 if the new mask might be further optimized. */
11319 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11320 || TREE_CODE (arg0) == RSHIFT_EXPR)
11321 && host_integerp (TREE_OPERAND (arg0, 1), 1)
11322 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
11323 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
11324 < TYPE_PRECISION (TREE_TYPE (arg0))
11325 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11326 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
11327 {
11328 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
11329 unsigned HOST_WIDE_INT mask
11330 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
11331 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11332 tree shift_type = TREE_TYPE (arg0);
11333
11334 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11335 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11336 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11337 && TYPE_PRECISION (TREE_TYPE (arg0))
11338 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
11339 {
11340 unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
11341 tree arg00 = TREE_OPERAND (arg0, 0);
11342 /* See if more bits can be proven as zero because of
11343 zero extension. */
11344 if (TREE_CODE (arg00) == NOP_EXPR
11345 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11346 {
11347 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11348 if (TYPE_PRECISION (inner_type)
11349 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
11350 && TYPE_PRECISION (inner_type) < prec)
11351 {
11352 prec = TYPE_PRECISION (inner_type);
11353 /* See if we can shorten the right shift. */
11354 if (shiftc < prec)
11355 shift_type = inner_type;
11356 }
11357 }
11358 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11359 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11360 zerobits <<= prec - shiftc;
11361 /* For arithmetic shift if sign bit could be set, zerobits
11362 can contain actually sign bits, so no transformation is
11363 possible, unless MASK masks them all away. In that
11364 case the shift needs to be converted into logical shift. */
11365 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11366 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11367 {
11368 if ((mask & zerobits) == 0)
11369 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11370 else
11371 zerobits = 0;
11372 }
11373 }
11374
11375 /* ((X << 16) & 0xff00) is (X, 0). */
11376 if ((mask & zerobits) == mask)
11377 return omit_one_operand (type, build_int_cst (type, 0), arg0);
11378
11379 newmask = mask | zerobits;
11380 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11381 {
11382 unsigned int prec;
11383
11384 /* Only do the transformation if NEWMASK is some integer
11385 mode's mask. */
11386 for (prec = BITS_PER_UNIT;
11387 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11388 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11389 break;
11390 if (prec < HOST_BITS_PER_WIDE_INT
11391 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11392 {
11393 tree newmaskt;
11394
11395 if (shift_type != TREE_TYPE (arg0))
11396 {
11397 tem = fold_build2 (TREE_CODE (arg0), shift_type,
11398 fold_convert (shift_type,
11399 TREE_OPERAND (arg0, 0)),
11400 TREE_OPERAND (arg0, 1));
11401 tem = fold_convert (type, tem);
11402 }
11403 else
11404 tem = op0;
11405 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11406 if (!tree_int_cst_equal (newmaskt, arg1))
11407 return fold_build2 (BIT_AND_EXPR, type, tem, newmaskt);
11408 }
11409 }
11410 }
11411
11412 goto associate;
11413
11414 case RDIV_EXPR:
11415 /* Don't touch a floating-point divide by zero unless the mode
11416 of the constant can represent infinity. */
11417 if (TREE_CODE (arg1) == REAL_CST
11418 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11419 && real_zerop (arg1))
11420 return NULL_TREE;
11421
11422 /* Optimize A / A to 1.0 if we don't care about
11423 NaNs or Infinities. Skip the transformation
11424 for non-real operands. */
11425 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11426 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11427 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11428 && operand_equal_p (arg0, arg1, 0))
11429 {
11430 tree r = build_real (TREE_TYPE (arg0), dconst1);
11431
11432 return omit_two_operands (type, r, arg0, arg1);
11433 }
11434
11435 /* The complex version of the above A / A optimization. */
11436 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11437 && operand_equal_p (arg0, arg1, 0))
11438 {
11439 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11440 if (! HONOR_NANS (TYPE_MODE (elem_type))
11441 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11442 {
11443 tree r = build_real (elem_type, dconst1);
11444 /* omit_two_operands will call fold_convert for us. */
11445 return omit_two_operands (type, r, arg0, arg1);
11446 }
11447 }
11448
11449 /* (-A) / (-B) -> A / B */
11450 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11451 return fold_build2 (RDIV_EXPR, type,
11452 TREE_OPERAND (arg0, 0),
11453 negate_expr (arg1));
11454 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11455 return fold_build2 (RDIV_EXPR, type,
11456 negate_expr (arg0),
11457 TREE_OPERAND (arg1, 0));
11458
11459 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11460 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11461 && real_onep (arg1))
11462 return non_lvalue (fold_convert (type, arg0));
11463
11464 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11465 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11466 && real_minus_onep (arg1))
11467 return non_lvalue (fold_convert (type, negate_expr (arg0)));
11468
11469 /* If ARG1 is a constant, we can convert this to a multiply by the
11470 reciprocal. This does not have the same rounding properties,
11471 so only do this if -freciprocal-math. We can actually
11472 always safely do it if ARG1 is a power of two, but it's hard to
11473 tell if it is or not in a portable manner. */
11474 if (TREE_CODE (arg1) == REAL_CST)
11475 {
11476 if (flag_reciprocal_math
11477 && 0 != (tem = const_binop (code, build_real (type, dconst1),
11478 arg1, 0)))
11479 return fold_build2 (MULT_EXPR, type, arg0, tem);
11480 /* Find the reciprocal if optimizing and the result is exact. */
11481 if (optimize)
11482 {
11483 REAL_VALUE_TYPE r;
11484 r = TREE_REAL_CST (arg1);
11485 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
11486 {
11487 tem = build_real (type, r);
11488 return fold_build2 (MULT_EXPR, type,
11489 fold_convert (type, arg0), tem);
11490 }
11491 }
11492 }
11493 /* Convert A/B/C to A/(B*C). */
11494 if (flag_reciprocal_math
11495 && TREE_CODE (arg0) == RDIV_EXPR)
11496 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11497 fold_build2 (MULT_EXPR, type,
11498 TREE_OPERAND (arg0, 1), arg1));
11499
11500 /* Convert A/(B/C) to (A/B)*C. */
11501 if (flag_reciprocal_math
11502 && TREE_CODE (arg1) == RDIV_EXPR)
11503 return fold_build2 (MULT_EXPR, type,
11504 fold_build2 (RDIV_EXPR, type, arg0,
11505 TREE_OPERAND (arg1, 0)),
11506 TREE_OPERAND (arg1, 1));
11507
11508 /* Convert C1/(X*C2) into (C1/C2)/X. */
11509 if (flag_reciprocal_math
11510 && TREE_CODE (arg1) == MULT_EXPR
11511 && TREE_CODE (arg0) == REAL_CST
11512 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11513 {
11514 tree tem = const_binop (RDIV_EXPR, arg0,
11515 TREE_OPERAND (arg1, 1), 0);
11516 if (tem)
11517 return fold_build2 (RDIV_EXPR, type, tem,
11518 TREE_OPERAND (arg1, 0));
11519 }
11520
11521 if (flag_unsafe_math_optimizations)
11522 {
11523 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11524 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11525
11526 /* Optimize sin(x)/cos(x) as tan(x). */
11527 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11528 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11529 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11530 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11531 CALL_EXPR_ARG (arg1, 0), 0))
11532 {
11533 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11534
11535 if (tanfn != NULL_TREE)
11536 return build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11537 }
11538
11539 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11540 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11541 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11542 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11543 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11544 CALL_EXPR_ARG (arg1, 0), 0))
11545 {
11546 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11547
11548 if (tanfn != NULL_TREE)
11549 {
11550 tree tmp = build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11551 return fold_build2 (RDIV_EXPR, type,
11552 build_real (type, dconst1), tmp);
11553 }
11554 }
11555
11556 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11557 NaNs or Infinities. */
11558 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11559 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11560 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11561 {
11562 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11563 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11564
11565 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11566 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11567 && operand_equal_p (arg00, arg01, 0))
11568 {
11569 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11570
11571 if (cosfn != NULL_TREE)
11572 return build_call_expr (cosfn, 1, arg00);
11573 }
11574 }
11575
11576 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11577 NaNs or Infinities. */
11578 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11579 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11580 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11581 {
11582 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11583 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11584
11585 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11586 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11587 && operand_equal_p (arg00, arg01, 0))
11588 {
11589 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11590
11591 if (cosfn != NULL_TREE)
11592 {
11593 tree tmp = build_call_expr (cosfn, 1, arg00);
11594 return fold_build2 (RDIV_EXPR, type,
11595 build_real (type, dconst1),
11596 tmp);
11597 }
11598 }
11599 }
11600
11601 /* Optimize pow(x,c)/x as pow(x,c-1). */
11602 if (fcode0 == BUILT_IN_POW
11603 || fcode0 == BUILT_IN_POWF
11604 || fcode0 == BUILT_IN_POWL)
11605 {
11606 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11607 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11608 if (TREE_CODE (arg01) == REAL_CST
11609 && !TREE_OVERFLOW (arg01)
11610 && operand_equal_p (arg1, arg00, 0))
11611 {
11612 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11613 REAL_VALUE_TYPE c;
11614 tree arg;
11615
11616 c = TREE_REAL_CST (arg01);
11617 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11618 arg = build_real (type, c);
11619 return build_call_expr (powfn, 2, arg1, arg);
11620 }
11621 }
11622
11623 /* Optimize a/root(b/c) into a*root(c/b). */
11624 if (BUILTIN_ROOT_P (fcode1))
11625 {
11626 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11627
11628 if (TREE_CODE (rootarg) == RDIV_EXPR)
11629 {
11630 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11631 tree b = TREE_OPERAND (rootarg, 0);
11632 tree c = TREE_OPERAND (rootarg, 1);
11633
11634 tree tmp = fold_build2 (RDIV_EXPR, type, c, b);
11635
11636 tmp = build_call_expr (rootfn, 1, tmp);
11637 return fold_build2 (MULT_EXPR, type, arg0, tmp);
11638 }
11639 }
11640
11641 /* Optimize x/expN(y) into x*expN(-y). */
11642 if (BUILTIN_EXPONENT_P (fcode1))
11643 {
11644 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11645 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11646 arg1 = build_call_expr (expfn, 1, fold_convert (type, arg));
11647 return fold_build2 (MULT_EXPR, type, arg0, arg1);
11648 }
11649
11650 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11651 if (fcode1 == BUILT_IN_POW
11652 || fcode1 == BUILT_IN_POWF
11653 || fcode1 == BUILT_IN_POWL)
11654 {
11655 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11656 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11657 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11658 tree neg11 = fold_convert (type, negate_expr (arg11));
11659 arg1 = build_call_expr (powfn, 2, arg10, neg11);
11660 return fold_build2 (MULT_EXPR, type, arg0, arg1);
11661 }
11662 }
11663 return NULL_TREE;
11664
11665 case TRUNC_DIV_EXPR:
11666 case FLOOR_DIV_EXPR:
11667 /* Simplify A / (B << N) where A and B are positive and B is
11668 a power of 2, to A >> (N + log2(B)). */
11669 strict_overflow_p = false;
11670 if (TREE_CODE (arg1) == LSHIFT_EXPR
11671 && (TYPE_UNSIGNED (type)
11672 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11673 {
11674 tree sval = TREE_OPERAND (arg1, 0);
11675 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11676 {
11677 tree sh_cnt = TREE_OPERAND (arg1, 1);
11678 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
11679
11680 if (strict_overflow_p)
11681 fold_overflow_warning (("assuming signed overflow does not "
11682 "occur when simplifying A / (B << N)"),
11683 WARN_STRICT_OVERFLOW_MISC);
11684
11685 sh_cnt = fold_build2 (PLUS_EXPR, TREE_TYPE (sh_cnt),
11686 sh_cnt, build_int_cst (NULL_TREE, pow2));
11687 return fold_build2 (RSHIFT_EXPR, type,
11688 fold_convert (type, arg0), sh_cnt);
11689 }
11690 }
11691
11692 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
11693 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
11694 if (INTEGRAL_TYPE_P (type)
11695 && TYPE_UNSIGNED (type)
11696 && code == FLOOR_DIV_EXPR)
11697 return fold_build2 (TRUNC_DIV_EXPR, type, op0, op1);
11698
11699 /* Fall thru */
11700
11701 case ROUND_DIV_EXPR:
11702 case CEIL_DIV_EXPR:
11703 case EXACT_DIV_EXPR:
11704 if (integer_onep (arg1))
11705 return non_lvalue (fold_convert (type, arg0));
11706 if (integer_zerop (arg1))
11707 return NULL_TREE;
11708 /* X / -1 is -X. */
11709 if (!TYPE_UNSIGNED (type)
11710 && TREE_CODE (arg1) == INTEGER_CST
11711 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11712 && TREE_INT_CST_HIGH (arg1) == -1)
11713 return fold_convert (type, negate_expr (arg0));
11714
11715 /* Convert -A / -B to A / B when the type is signed and overflow is
11716 undefined. */
11717 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11718 && TREE_CODE (arg0) == NEGATE_EXPR
11719 && negate_expr_p (arg1))
11720 {
11721 if (INTEGRAL_TYPE_P (type))
11722 fold_overflow_warning (("assuming signed overflow does not occur "
11723 "when distributing negation across "
11724 "division"),
11725 WARN_STRICT_OVERFLOW_MISC);
11726 return fold_build2 (code, type,
11727 fold_convert (type, TREE_OPERAND (arg0, 0)),
11728 fold_convert (type, negate_expr (arg1)));
11729 }
11730 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11731 && TREE_CODE (arg1) == NEGATE_EXPR
11732 && negate_expr_p (arg0))
11733 {
11734 if (INTEGRAL_TYPE_P (type))
11735 fold_overflow_warning (("assuming signed overflow does not occur "
11736 "when distributing negation across "
11737 "division"),
11738 WARN_STRICT_OVERFLOW_MISC);
11739 return fold_build2 (code, type,
11740 fold_convert (type, negate_expr (arg0)),
11741 fold_convert (type, TREE_OPERAND (arg1, 0)));
11742 }
11743
11744 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11745 operation, EXACT_DIV_EXPR.
11746
11747 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11748 At one time others generated faster code, it's not clear if they do
11749 after the last round to changes to the DIV code in expmed.c. */
11750 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11751 && multiple_of_p (type, arg0, arg1))
11752 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
11753
11754 strict_overflow_p = false;
11755 if (TREE_CODE (arg1) == INTEGER_CST
11756 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11757 &strict_overflow_p)))
11758 {
11759 if (strict_overflow_p)
11760 fold_overflow_warning (("assuming signed overflow does not occur "
11761 "when simplifying division"),
11762 WARN_STRICT_OVERFLOW_MISC);
11763 return fold_convert (type, tem);
11764 }
11765
11766 return NULL_TREE;
11767
11768 case CEIL_MOD_EXPR:
11769 case FLOOR_MOD_EXPR:
11770 case ROUND_MOD_EXPR:
11771 case TRUNC_MOD_EXPR:
11772 /* X % 1 is always zero, but be sure to preserve any side
11773 effects in X. */
11774 if (integer_onep (arg1))
11775 return omit_one_operand (type, integer_zero_node, arg0);
11776
11777 /* X % 0, return X % 0 unchanged so that we can get the
11778 proper warnings and errors. */
11779 if (integer_zerop (arg1))
11780 return NULL_TREE;
11781
11782 /* 0 % X is always zero, but be sure to preserve any side
11783 effects in X. Place this after checking for X == 0. */
11784 if (integer_zerop (arg0))
11785 return omit_one_operand (type, integer_zero_node, arg1);
11786
11787 /* X % -1 is zero. */
11788 if (!TYPE_UNSIGNED (type)
11789 && TREE_CODE (arg1) == INTEGER_CST
11790 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11791 && TREE_INT_CST_HIGH (arg1) == -1)
11792 return omit_one_operand (type, integer_zero_node, arg0);
11793
11794 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
11795 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
11796 strict_overflow_p = false;
11797 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
11798 && (TYPE_UNSIGNED (type)
11799 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11800 {
11801 tree c = arg1;
11802 /* Also optimize A % (C << N) where C is a power of 2,
11803 to A & ((C << N) - 1). */
11804 if (TREE_CODE (arg1) == LSHIFT_EXPR)
11805 c = TREE_OPERAND (arg1, 0);
11806
11807 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
11808 {
11809 tree mask = fold_build2 (MINUS_EXPR, TREE_TYPE (arg1), arg1,
11810 build_int_cst (TREE_TYPE (arg1), 1));
11811 if (strict_overflow_p)
11812 fold_overflow_warning (("assuming signed overflow does not "
11813 "occur when simplifying "
11814 "X % (power of two)"),
11815 WARN_STRICT_OVERFLOW_MISC);
11816 return fold_build2 (BIT_AND_EXPR, type,
11817 fold_convert (type, arg0),
11818 fold_convert (type, mask));
11819 }
11820 }
11821
11822 /* X % -C is the same as X % C. */
11823 if (code == TRUNC_MOD_EXPR
11824 && !TYPE_UNSIGNED (type)
11825 && TREE_CODE (arg1) == INTEGER_CST
11826 && !TREE_OVERFLOW (arg1)
11827 && TREE_INT_CST_HIGH (arg1) < 0
11828 && !TYPE_OVERFLOW_TRAPS (type)
11829 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
11830 && !sign_bit_p (arg1, arg1))
11831 return fold_build2 (code, type, fold_convert (type, arg0),
11832 fold_convert (type, negate_expr (arg1)));
11833
11834 /* X % -Y is the same as X % Y. */
11835 if (code == TRUNC_MOD_EXPR
11836 && !TYPE_UNSIGNED (type)
11837 && TREE_CODE (arg1) == NEGATE_EXPR
11838 && !TYPE_OVERFLOW_TRAPS (type))
11839 return fold_build2 (code, type, fold_convert (type, arg0),
11840 fold_convert (type, TREE_OPERAND (arg1, 0)));
11841
11842 if (TREE_CODE (arg1) == INTEGER_CST
11843 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11844 &strict_overflow_p)))
11845 {
11846 if (strict_overflow_p)
11847 fold_overflow_warning (("assuming signed overflow does not occur "
11848 "when simplifying modulus"),
11849 WARN_STRICT_OVERFLOW_MISC);
11850 return fold_convert (type, tem);
11851 }
11852
11853 return NULL_TREE;
11854
11855 case LROTATE_EXPR:
11856 case RROTATE_EXPR:
11857 if (integer_all_onesp (arg0))
11858 return omit_one_operand (type, arg0, arg1);
11859 goto shift;
11860
11861 case RSHIFT_EXPR:
11862 /* Optimize -1 >> x for arithmetic right shifts. */
11863 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
11864 && tree_expr_nonnegative_p (arg1))
11865 return omit_one_operand (type, arg0, arg1);
11866 /* ... fall through ... */
11867
11868 case LSHIFT_EXPR:
11869 shift:
11870 if (integer_zerop (arg1))
11871 return non_lvalue (fold_convert (type, arg0));
11872 if (integer_zerop (arg0))
11873 return omit_one_operand (type, arg0, arg1);
11874
11875 /* Since negative shift count is not well-defined,
11876 don't try to compute it in the compiler. */
11877 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11878 return NULL_TREE;
11879
11880 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
11881 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
11882 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11883 && host_integerp (TREE_OPERAND (arg0, 1), false)
11884 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11885 {
11886 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
11887 + TREE_INT_CST_LOW (arg1));
11888
11889 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11890 being well defined. */
11891 if (low >= TYPE_PRECISION (type))
11892 {
11893 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
11894 low = low % TYPE_PRECISION (type);
11895 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
11896 return omit_one_operand (type, build_int_cst (type, 0),
11897 TREE_OPERAND (arg0, 0));
11898 else
11899 low = TYPE_PRECISION (type) - 1;
11900 }
11901
11902 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11903 build_int_cst (type, low));
11904 }
11905
11906 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
11907 into x & ((unsigned)-1 >> c) for unsigned types. */
11908 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
11909 || (TYPE_UNSIGNED (type)
11910 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
11911 && host_integerp (arg1, false)
11912 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11913 && host_integerp (TREE_OPERAND (arg0, 1), false)
11914 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11915 {
11916 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
11917 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
11918 tree lshift;
11919 tree arg00;
11920
11921 if (low0 == low1)
11922 {
11923 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
11924
11925 lshift = build_int_cst (type, -1);
11926 lshift = int_const_binop (code, lshift, arg1, 0);
11927
11928 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
11929 }
11930 }
11931
11932 /* Rewrite an LROTATE_EXPR by a constant into an
11933 RROTATE_EXPR by a new constant. */
11934 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
11935 {
11936 tree tem = build_int_cst (TREE_TYPE (arg1),
11937 TYPE_PRECISION (type));
11938 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
11939 return fold_build2 (RROTATE_EXPR, type, op0, tem);
11940 }
11941
11942 /* If we have a rotate of a bit operation with the rotate count and
11943 the second operand of the bit operation both constant,
11944 permute the two operations. */
11945 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11946 && (TREE_CODE (arg0) == BIT_AND_EXPR
11947 || TREE_CODE (arg0) == BIT_IOR_EXPR
11948 || TREE_CODE (arg0) == BIT_XOR_EXPR)
11949 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11950 return fold_build2 (TREE_CODE (arg0), type,
11951 fold_build2 (code, type,
11952 TREE_OPERAND (arg0, 0), arg1),
11953 fold_build2 (code, type,
11954 TREE_OPERAND (arg0, 1), arg1));
11955
11956 /* Two consecutive rotates adding up to the precision of the
11957 type can be ignored. */
11958 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11959 && TREE_CODE (arg0) == RROTATE_EXPR
11960 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11961 && TREE_INT_CST_HIGH (arg1) == 0
11962 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
11963 && ((TREE_INT_CST_LOW (arg1)
11964 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
11965 == (unsigned int) TYPE_PRECISION (type)))
11966 return TREE_OPERAND (arg0, 0);
11967
11968 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
11969 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
11970 if the latter can be further optimized. */
11971 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
11972 && TREE_CODE (arg0) == BIT_AND_EXPR
11973 && TREE_CODE (arg1) == INTEGER_CST
11974 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11975 {
11976 tree mask = fold_build2 (code, type,
11977 fold_convert (type, TREE_OPERAND (arg0, 1)),
11978 arg1);
11979 tree shift = fold_build2 (code, type,
11980 fold_convert (type, TREE_OPERAND (arg0, 0)),
11981 arg1);
11982 tem = fold_binary (BIT_AND_EXPR, type, shift, mask);
11983 if (tem)
11984 return tem;
11985 }
11986
11987 return NULL_TREE;
11988
11989 case MIN_EXPR:
11990 if (operand_equal_p (arg0, arg1, 0))
11991 return omit_one_operand (type, arg0, arg1);
11992 if (INTEGRAL_TYPE_P (type)
11993 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
11994 return omit_one_operand (type, arg1, arg0);
11995 tem = fold_minmax (MIN_EXPR, type, arg0, arg1);
11996 if (tem)
11997 return tem;
11998 goto associate;
11999
12000 case MAX_EXPR:
12001 if (operand_equal_p (arg0, arg1, 0))
12002 return omit_one_operand (type, arg0, arg1);
12003 if (INTEGRAL_TYPE_P (type)
12004 && TYPE_MAX_VALUE (type)
12005 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
12006 return omit_one_operand (type, arg1, arg0);
12007 tem = fold_minmax (MAX_EXPR, type, arg0, arg1);
12008 if (tem)
12009 return tem;
12010 goto associate;
12011
12012 case TRUTH_ANDIF_EXPR:
12013 /* Note that the operands of this must be ints
12014 and their values must be 0 or 1.
12015 ("true" is a fixed value perhaps depending on the language.) */
12016 /* If first arg is constant zero, return it. */
12017 if (integer_zerop (arg0))
12018 return fold_convert (type, arg0);
12019 case TRUTH_AND_EXPR:
12020 /* If either arg is constant true, drop it. */
12021 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12022 return non_lvalue (fold_convert (type, arg1));
12023 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12024 /* Preserve sequence points. */
12025 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12026 return non_lvalue (fold_convert (type, arg0));
12027 /* If second arg is constant zero, result is zero, but first arg
12028 must be evaluated. */
12029 if (integer_zerop (arg1))
12030 return omit_one_operand (type, arg1, arg0);
12031 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12032 case will be handled here. */
12033 if (integer_zerop (arg0))
12034 return omit_one_operand (type, arg0, arg1);
12035
12036 /* !X && X is always false. */
12037 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12038 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12039 return omit_one_operand (type, integer_zero_node, arg1);
12040 /* X && !X is always false. */
12041 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12042 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12043 return omit_one_operand (type, integer_zero_node, arg0);
12044
12045 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12046 means A >= Y && A != MAX, but in this case we know that
12047 A < X <= MAX. */
12048
12049 if (!TREE_SIDE_EFFECTS (arg0)
12050 && !TREE_SIDE_EFFECTS (arg1))
12051 {
12052 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
12053 if (tem && !operand_equal_p (tem, arg0, 0))
12054 return fold_build2 (code, type, tem, arg1);
12055
12056 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
12057 if (tem && !operand_equal_p (tem, arg1, 0))
12058 return fold_build2 (code, type, arg0, tem);
12059 }
12060
12061 truth_andor:
12062 /* We only do these simplifications if we are optimizing. */
12063 if (!optimize)
12064 return NULL_TREE;
12065
12066 /* Check for things like (A || B) && (A || C). We can convert this
12067 to A || (B && C). Note that either operator can be any of the four
12068 truth and/or operations and the transformation will still be
12069 valid. Also note that we only care about order for the
12070 ANDIF and ORIF operators. If B contains side effects, this
12071 might change the truth-value of A. */
12072 if (TREE_CODE (arg0) == TREE_CODE (arg1)
12073 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
12074 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
12075 || TREE_CODE (arg0) == TRUTH_AND_EXPR
12076 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
12077 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
12078 {
12079 tree a00 = TREE_OPERAND (arg0, 0);
12080 tree a01 = TREE_OPERAND (arg0, 1);
12081 tree a10 = TREE_OPERAND (arg1, 0);
12082 tree a11 = TREE_OPERAND (arg1, 1);
12083 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
12084 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
12085 && (code == TRUTH_AND_EXPR
12086 || code == TRUTH_OR_EXPR));
12087
12088 if (operand_equal_p (a00, a10, 0))
12089 return fold_build2 (TREE_CODE (arg0), type, a00,
12090 fold_build2 (code, type, a01, a11));
12091 else if (commutative && operand_equal_p (a00, a11, 0))
12092 return fold_build2 (TREE_CODE (arg0), type, a00,
12093 fold_build2 (code, type, a01, a10));
12094 else if (commutative && operand_equal_p (a01, a10, 0))
12095 return fold_build2 (TREE_CODE (arg0), type, a01,
12096 fold_build2 (code, type, a00, a11));
12097
12098 /* This case if tricky because we must either have commutative
12099 operators or else A10 must not have side-effects. */
12100
12101 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
12102 && operand_equal_p (a01, a11, 0))
12103 return fold_build2 (TREE_CODE (arg0), type,
12104 fold_build2 (code, type, a00, a10),
12105 a01);
12106 }
12107
12108 /* See if we can build a range comparison. */
12109 if (0 != (tem = fold_range_test (code, type, op0, op1)))
12110 return tem;
12111
12112 /* Check for the possibility of merging component references. If our
12113 lhs is another similar operation, try to merge its rhs with our
12114 rhs. Then try to merge our lhs and rhs. */
12115 if (TREE_CODE (arg0) == code
12116 && 0 != (tem = fold_truthop (code, type,
12117 TREE_OPERAND (arg0, 1), arg1)))
12118 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
12119
12120 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
12121 return tem;
12122
12123 return NULL_TREE;
12124
12125 case TRUTH_ORIF_EXPR:
12126 /* Note that the operands of this must be ints
12127 and their values must be 0 or true.
12128 ("true" is a fixed value perhaps depending on the language.) */
12129 /* If first arg is constant true, return it. */
12130 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12131 return fold_convert (type, arg0);
12132 case TRUTH_OR_EXPR:
12133 /* If either arg is constant zero, drop it. */
12134 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12135 return non_lvalue (fold_convert (type, arg1));
12136 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12137 /* Preserve sequence points. */
12138 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12139 return non_lvalue (fold_convert (type, arg0));
12140 /* If second arg is constant true, result is true, but we must
12141 evaluate first arg. */
12142 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12143 return omit_one_operand (type, arg1, arg0);
12144 /* Likewise for first arg, but note this only occurs here for
12145 TRUTH_OR_EXPR. */
12146 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12147 return omit_one_operand (type, arg0, arg1);
12148
12149 /* !X || X is always true. */
12150 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12151 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12152 return omit_one_operand (type, integer_one_node, arg1);
12153 /* X || !X is always true. */
12154 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12155 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12156 return omit_one_operand (type, integer_one_node, arg0);
12157
12158 goto truth_andor;
12159
12160 case TRUTH_XOR_EXPR:
12161 /* If the second arg is constant zero, drop it. */
12162 if (integer_zerop (arg1))
12163 return non_lvalue (fold_convert (type, arg0));
12164 /* If the second arg is constant true, this is a logical inversion. */
12165 if (integer_onep (arg1))
12166 {
12167 /* Only call invert_truthvalue if operand is a truth value. */
12168 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
12169 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
12170 else
12171 tem = invert_truthvalue (arg0);
12172 return non_lvalue (fold_convert (type, tem));
12173 }
12174 /* Identical arguments cancel to zero. */
12175 if (operand_equal_p (arg0, arg1, 0))
12176 return omit_one_operand (type, integer_zero_node, arg0);
12177
12178 /* !X ^ X is always true. */
12179 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12180 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12181 return omit_one_operand (type, integer_one_node, arg1);
12182
12183 /* X ^ !X is always true. */
12184 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12185 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12186 return omit_one_operand (type, integer_one_node, arg0);
12187
12188 return NULL_TREE;
12189
12190 case EQ_EXPR:
12191 case NE_EXPR:
12192 tem = fold_comparison (code, type, op0, op1);
12193 if (tem != NULL_TREE)
12194 return tem;
12195
12196 /* bool_var != 0 becomes bool_var. */
12197 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12198 && code == NE_EXPR)
12199 return non_lvalue (fold_convert (type, arg0));
12200
12201 /* bool_var == 1 becomes bool_var. */
12202 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12203 && code == EQ_EXPR)
12204 return non_lvalue (fold_convert (type, arg0));
12205
12206 /* bool_var != 1 becomes !bool_var. */
12207 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12208 && code == NE_EXPR)
12209 return fold_build1 (TRUTH_NOT_EXPR, type, fold_convert (type, arg0));
12210
12211 /* bool_var == 0 becomes !bool_var. */
12212 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12213 && code == EQ_EXPR)
12214 return fold_build1 (TRUTH_NOT_EXPR, type, fold_convert (type, arg0));
12215
12216 /* If this is an equality comparison of the address of two non-weak,
12217 unaliased symbols neither of which are extern (since we do not
12218 have access to attributes for externs), then we know the result. */
12219 if (TREE_CODE (arg0) == ADDR_EXPR
12220 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12221 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12222 && ! lookup_attribute ("alias",
12223 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12224 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12225 && TREE_CODE (arg1) == ADDR_EXPR
12226 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12227 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12228 && ! lookup_attribute ("alias",
12229 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12230 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12231 {
12232 /* We know that we're looking at the address of two
12233 non-weak, unaliased, static _DECL nodes.
12234
12235 It is both wasteful and incorrect to call operand_equal_p
12236 to compare the two ADDR_EXPR nodes. It is wasteful in that
12237 all we need to do is test pointer equality for the arguments
12238 to the two ADDR_EXPR nodes. It is incorrect to use
12239 operand_equal_p as that function is NOT equivalent to a
12240 C equality test. It can in fact return false for two
12241 objects which would test as equal using the C equality
12242 operator. */
12243 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12244 return constant_boolean_node (equal
12245 ? code == EQ_EXPR : code != EQ_EXPR,
12246 type);
12247 }
12248
12249 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12250 a MINUS_EXPR of a constant, we can convert it into a comparison with
12251 a revised constant as long as no overflow occurs. */
12252 if (TREE_CODE (arg1) == INTEGER_CST
12253 && (TREE_CODE (arg0) == PLUS_EXPR
12254 || TREE_CODE (arg0) == MINUS_EXPR)
12255 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12256 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
12257 ? MINUS_EXPR : PLUS_EXPR,
12258 fold_convert (TREE_TYPE (arg0), arg1),
12259 TREE_OPERAND (arg0, 1), 0))
12260 && !TREE_OVERFLOW (tem))
12261 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
12262
12263 /* Similarly for a NEGATE_EXPR. */
12264 if (TREE_CODE (arg0) == NEGATE_EXPR
12265 && TREE_CODE (arg1) == INTEGER_CST
12266 && 0 != (tem = negate_expr (arg1))
12267 && TREE_CODE (tem) == INTEGER_CST
12268 && !TREE_OVERFLOW (tem))
12269 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
12270
12271 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12272 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12273 && TREE_CODE (arg1) == INTEGER_CST
12274 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12275 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12276 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg0),
12277 fold_convert (TREE_TYPE (arg0), arg1),
12278 TREE_OPERAND (arg0, 1)));
12279
12280 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12281 if ((TREE_CODE (arg0) == PLUS_EXPR
12282 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12283 || TREE_CODE (arg0) == MINUS_EXPR)
12284 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12285 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12286 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12287 {
12288 tree val = TREE_OPERAND (arg0, 1);
12289 return omit_two_operands (type,
12290 fold_build2 (code, type,
12291 val,
12292 build_int_cst (TREE_TYPE (val),
12293 0)),
12294 TREE_OPERAND (arg0, 0), arg1);
12295 }
12296
12297 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12298 if (TREE_CODE (arg0) == MINUS_EXPR
12299 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12300 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0)
12301 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
12302 {
12303 return omit_two_operands (type,
12304 code == NE_EXPR
12305 ? boolean_true_node : boolean_false_node,
12306 TREE_OPERAND (arg0, 1), arg1);
12307 }
12308
12309 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12310 for !=. Don't do this for ordered comparisons due to overflow. */
12311 if (TREE_CODE (arg0) == MINUS_EXPR
12312 && integer_zerop (arg1))
12313 return fold_build2 (code, type,
12314 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
12315
12316 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12317 if (TREE_CODE (arg0) == ABS_EXPR
12318 && (integer_zerop (arg1) || real_zerop (arg1)))
12319 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
12320
12321 /* If this is an EQ or NE comparison with zero and ARG0 is
12322 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12323 two operations, but the latter can be done in one less insn
12324 on machines that have only two-operand insns or on which a
12325 constant cannot be the first operand. */
12326 if (TREE_CODE (arg0) == BIT_AND_EXPR
12327 && integer_zerop (arg1))
12328 {
12329 tree arg00 = TREE_OPERAND (arg0, 0);
12330 tree arg01 = TREE_OPERAND (arg0, 1);
12331 if (TREE_CODE (arg00) == LSHIFT_EXPR
12332 && integer_onep (TREE_OPERAND (arg00, 0)))
12333 {
12334 tree tem = fold_build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
12335 arg01, TREE_OPERAND (arg00, 1));
12336 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12337 build_int_cst (TREE_TYPE (arg0), 1));
12338 return fold_build2 (code, type,
12339 fold_convert (TREE_TYPE (arg1), tem), arg1);
12340 }
12341 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12342 && integer_onep (TREE_OPERAND (arg01, 0)))
12343 {
12344 tree tem = fold_build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
12345 arg00, TREE_OPERAND (arg01, 1));
12346 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12347 build_int_cst (TREE_TYPE (arg0), 1));
12348 return fold_build2 (code, type,
12349 fold_convert (TREE_TYPE (arg1), tem), arg1);
12350 }
12351 }
12352
12353 /* If this is an NE or EQ comparison of zero against the result of a
12354 signed MOD operation whose second operand is a power of 2, make
12355 the MOD operation unsigned since it is simpler and equivalent. */
12356 if (integer_zerop (arg1)
12357 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12358 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12359 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12360 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12361 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12362 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12363 {
12364 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12365 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
12366 fold_convert (newtype,
12367 TREE_OPERAND (arg0, 0)),
12368 fold_convert (newtype,
12369 TREE_OPERAND (arg0, 1)));
12370
12371 return fold_build2 (code, type, newmod,
12372 fold_convert (newtype, arg1));
12373 }
12374
12375 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12376 C1 is a valid shift constant, and C2 is a power of two, i.e.
12377 a single bit. */
12378 if (TREE_CODE (arg0) == BIT_AND_EXPR
12379 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12380 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12381 == INTEGER_CST
12382 && integer_pow2p (TREE_OPERAND (arg0, 1))
12383 && integer_zerop (arg1))
12384 {
12385 tree itype = TREE_TYPE (arg0);
12386 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
12387 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12388
12389 /* Check for a valid shift count. */
12390 if (TREE_INT_CST_HIGH (arg001) == 0
12391 && TREE_INT_CST_LOW (arg001) < prec)
12392 {
12393 tree arg01 = TREE_OPERAND (arg0, 1);
12394 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12395 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12396 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12397 can be rewritten as (X & (C2 << C1)) != 0. */
12398 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12399 {
12400 tem = fold_build2 (LSHIFT_EXPR, itype, arg01, arg001);
12401 tem = fold_build2 (BIT_AND_EXPR, itype, arg000, tem);
12402 return fold_build2 (code, type, tem, arg1);
12403 }
12404 /* Otherwise, for signed (arithmetic) shifts,
12405 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12406 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12407 else if (!TYPE_UNSIGNED (itype))
12408 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12409 arg000, build_int_cst (itype, 0));
12410 /* Otherwise, of unsigned (logical) shifts,
12411 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12412 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12413 else
12414 return omit_one_operand (type,
12415 code == EQ_EXPR ? integer_one_node
12416 : integer_zero_node,
12417 arg000);
12418 }
12419 }
12420
12421 /* If this is an NE comparison of zero with an AND of one, remove the
12422 comparison since the AND will give the correct value. */
12423 if (code == NE_EXPR
12424 && integer_zerop (arg1)
12425 && TREE_CODE (arg0) == BIT_AND_EXPR
12426 && integer_onep (TREE_OPERAND (arg0, 1)))
12427 return fold_convert (type, arg0);
12428
12429 /* If we have (A & C) == C where C is a power of 2, convert this into
12430 (A & C) != 0. Similarly for NE_EXPR. */
12431 if (TREE_CODE (arg0) == BIT_AND_EXPR
12432 && integer_pow2p (TREE_OPERAND (arg0, 1))
12433 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12434 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12435 arg0, fold_convert (TREE_TYPE (arg0),
12436 integer_zero_node));
12437
12438 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12439 bit, then fold the expression into A < 0 or A >= 0. */
12440 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
12441 if (tem)
12442 return tem;
12443
12444 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12445 Similarly for NE_EXPR. */
12446 if (TREE_CODE (arg0) == BIT_AND_EXPR
12447 && TREE_CODE (arg1) == INTEGER_CST
12448 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12449 {
12450 tree notc = fold_build1 (BIT_NOT_EXPR,
12451 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12452 TREE_OPERAND (arg0, 1));
12453 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
12454 arg1, notc);
12455 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12456 if (integer_nonzerop (dandnotc))
12457 return omit_one_operand (type, rslt, arg0);
12458 }
12459
12460 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12461 Similarly for NE_EXPR. */
12462 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12463 && TREE_CODE (arg1) == INTEGER_CST
12464 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12465 {
12466 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12467 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
12468 TREE_OPERAND (arg0, 1), notd);
12469 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12470 if (integer_nonzerop (candnotd))
12471 return omit_one_operand (type, rslt, arg0);
12472 }
12473
12474 /* If this is a comparison of a field, we may be able to simplify it. */
12475 if ((TREE_CODE (arg0) == COMPONENT_REF
12476 || TREE_CODE (arg0) == BIT_FIELD_REF)
12477 /* Handle the constant case even without -O
12478 to make sure the warnings are given. */
12479 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12480 {
12481 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
12482 if (t1)
12483 return t1;
12484 }
12485
12486 /* Optimize comparisons of strlen vs zero to a compare of the
12487 first character of the string vs zero. To wit,
12488 strlen(ptr) == 0 => *ptr == 0
12489 strlen(ptr) != 0 => *ptr != 0
12490 Other cases should reduce to one of these two (or a constant)
12491 due to the return value of strlen being unsigned. */
12492 if (TREE_CODE (arg0) == CALL_EXPR
12493 && integer_zerop (arg1))
12494 {
12495 tree fndecl = get_callee_fndecl (arg0);
12496
12497 if (fndecl
12498 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12499 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12500 && call_expr_nargs (arg0) == 1
12501 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12502 {
12503 tree iref = build_fold_indirect_ref (CALL_EXPR_ARG (arg0, 0));
12504 return fold_build2 (code, type, iref,
12505 build_int_cst (TREE_TYPE (iref), 0));
12506 }
12507 }
12508
12509 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12510 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12511 if (TREE_CODE (arg0) == RSHIFT_EXPR
12512 && integer_zerop (arg1)
12513 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12514 {
12515 tree arg00 = TREE_OPERAND (arg0, 0);
12516 tree arg01 = TREE_OPERAND (arg0, 1);
12517 tree itype = TREE_TYPE (arg00);
12518 if (TREE_INT_CST_HIGH (arg01) == 0
12519 && TREE_INT_CST_LOW (arg01)
12520 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
12521 {
12522 if (TYPE_UNSIGNED (itype))
12523 {
12524 itype = signed_type_for (itype);
12525 arg00 = fold_convert (itype, arg00);
12526 }
12527 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12528 type, arg00, build_int_cst (itype, 0));
12529 }
12530 }
12531
12532 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12533 if (integer_zerop (arg1)
12534 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12535 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12536 TREE_OPERAND (arg0, 1));
12537
12538 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12539 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12540 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12541 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12542 build_int_cst (TREE_TYPE (arg1), 0));
12543 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12544 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12545 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12546 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12547 return fold_build2 (code, type, TREE_OPERAND (arg0, 1),
12548 build_int_cst (TREE_TYPE (arg1), 0));
12549
12550 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12551 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12552 && TREE_CODE (arg1) == INTEGER_CST
12553 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12554 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12555 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg1),
12556 TREE_OPERAND (arg0, 1), arg1));
12557
12558 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12559 (X & C) == 0 when C is a single bit. */
12560 if (TREE_CODE (arg0) == BIT_AND_EXPR
12561 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12562 && integer_zerop (arg1)
12563 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12564 {
12565 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
12566 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12567 TREE_OPERAND (arg0, 1));
12568 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12569 type, tem, arg1);
12570 }
12571
12572 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12573 constant C is a power of two, i.e. a single bit. */
12574 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12575 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12576 && integer_zerop (arg1)
12577 && integer_pow2p (TREE_OPERAND (arg0, 1))
12578 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12579 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12580 {
12581 tree arg00 = TREE_OPERAND (arg0, 0);
12582 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12583 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12584 }
12585
12586 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12587 when is C is a power of two, i.e. a single bit. */
12588 if (TREE_CODE (arg0) == BIT_AND_EXPR
12589 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12590 && integer_zerop (arg1)
12591 && integer_pow2p (TREE_OPERAND (arg0, 1))
12592 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12593 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12594 {
12595 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12596 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg000),
12597 arg000, TREE_OPERAND (arg0, 1));
12598 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12599 tem, build_int_cst (TREE_TYPE (tem), 0));
12600 }
12601
12602 if (integer_zerop (arg1)
12603 && tree_expr_nonzero_p (arg0))
12604 {
12605 tree res = constant_boolean_node (code==NE_EXPR, type);
12606 return omit_one_operand (type, res, arg0);
12607 }
12608
12609 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12610 if (TREE_CODE (arg0) == NEGATE_EXPR
12611 && TREE_CODE (arg1) == NEGATE_EXPR)
12612 return fold_build2 (code, type,
12613 TREE_OPERAND (arg0, 0),
12614 TREE_OPERAND (arg1, 0));
12615
12616 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12617 if (TREE_CODE (arg0) == BIT_AND_EXPR
12618 && TREE_CODE (arg1) == BIT_AND_EXPR)
12619 {
12620 tree arg00 = TREE_OPERAND (arg0, 0);
12621 tree arg01 = TREE_OPERAND (arg0, 1);
12622 tree arg10 = TREE_OPERAND (arg1, 0);
12623 tree arg11 = TREE_OPERAND (arg1, 1);
12624 tree itype = TREE_TYPE (arg0);
12625
12626 if (operand_equal_p (arg01, arg11, 0))
12627 return fold_build2 (code, type,
12628 fold_build2 (BIT_AND_EXPR, itype,
12629 fold_build2 (BIT_XOR_EXPR, itype,
12630 arg00, arg10),
12631 arg01),
12632 build_int_cst (itype, 0));
12633
12634 if (operand_equal_p (arg01, arg10, 0))
12635 return fold_build2 (code, type,
12636 fold_build2 (BIT_AND_EXPR, itype,
12637 fold_build2 (BIT_XOR_EXPR, itype,
12638 arg00, arg11),
12639 arg01),
12640 build_int_cst (itype, 0));
12641
12642 if (operand_equal_p (arg00, arg11, 0))
12643 return fold_build2 (code, type,
12644 fold_build2 (BIT_AND_EXPR, itype,
12645 fold_build2 (BIT_XOR_EXPR, itype,
12646 arg01, arg10),
12647 arg00),
12648 build_int_cst (itype, 0));
12649
12650 if (operand_equal_p (arg00, arg10, 0))
12651 return fold_build2 (code, type,
12652 fold_build2 (BIT_AND_EXPR, itype,
12653 fold_build2 (BIT_XOR_EXPR, itype,
12654 arg01, arg11),
12655 arg00),
12656 build_int_cst (itype, 0));
12657 }
12658
12659 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12660 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12661 {
12662 tree arg00 = TREE_OPERAND (arg0, 0);
12663 tree arg01 = TREE_OPERAND (arg0, 1);
12664 tree arg10 = TREE_OPERAND (arg1, 0);
12665 tree arg11 = TREE_OPERAND (arg1, 1);
12666 tree itype = TREE_TYPE (arg0);
12667
12668 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12669 operand_equal_p guarantees no side-effects so we don't need
12670 to use omit_one_operand on Z. */
12671 if (operand_equal_p (arg01, arg11, 0))
12672 return fold_build2 (code, type, arg00, arg10);
12673 if (operand_equal_p (arg01, arg10, 0))
12674 return fold_build2 (code, type, arg00, arg11);
12675 if (operand_equal_p (arg00, arg11, 0))
12676 return fold_build2 (code, type, arg01, arg10);
12677 if (operand_equal_p (arg00, arg10, 0))
12678 return fold_build2 (code, type, arg01, arg11);
12679
12680 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12681 if (TREE_CODE (arg01) == INTEGER_CST
12682 && TREE_CODE (arg11) == INTEGER_CST)
12683 return fold_build2 (code, type,
12684 fold_build2 (BIT_XOR_EXPR, itype, arg00,
12685 fold_build2 (BIT_XOR_EXPR, itype,
12686 arg01, arg11)),
12687 arg10);
12688 }
12689
12690 /* Attempt to simplify equality/inequality comparisons of complex
12691 values. Only lower the comparison if the result is known or
12692 can be simplified to a single scalar comparison. */
12693 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12694 || TREE_CODE (arg0) == COMPLEX_CST)
12695 && (TREE_CODE (arg1) == COMPLEX_EXPR
12696 || TREE_CODE (arg1) == COMPLEX_CST))
12697 {
12698 tree real0, imag0, real1, imag1;
12699 tree rcond, icond;
12700
12701 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12702 {
12703 real0 = TREE_OPERAND (arg0, 0);
12704 imag0 = TREE_OPERAND (arg0, 1);
12705 }
12706 else
12707 {
12708 real0 = TREE_REALPART (arg0);
12709 imag0 = TREE_IMAGPART (arg0);
12710 }
12711
12712 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12713 {
12714 real1 = TREE_OPERAND (arg1, 0);
12715 imag1 = TREE_OPERAND (arg1, 1);
12716 }
12717 else
12718 {
12719 real1 = TREE_REALPART (arg1);
12720 imag1 = TREE_IMAGPART (arg1);
12721 }
12722
12723 rcond = fold_binary (code, type, real0, real1);
12724 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12725 {
12726 if (integer_zerop (rcond))
12727 {
12728 if (code == EQ_EXPR)
12729 return omit_two_operands (type, boolean_false_node,
12730 imag0, imag1);
12731 return fold_build2 (NE_EXPR, type, imag0, imag1);
12732 }
12733 else
12734 {
12735 if (code == NE_EXPR)
12736 return omit_two_operands (type, boolean_true_node,
12737 imag0, imag1);
12738 return fold_build2 (EQ_EXPR, type, imag0, imag1);
12739 }
12740 }
12741
12742 icond = fold_binary (code, type, imag0, imag1);
12743 if (icond && TREE_CODE (icond) == INTEGER_CST)
12744 {
12745 if (integer_zerop (icond))
12746 {
12747 if (code == EQ_EXPR)
12748 return omit_two_operands (type, boolean_false_node,
12749 real0, real1);
12750 return fold_build2 (NE_EXPR, type, real0, real1);
12751 }
12752 else
12753 {
12754 if (code == NE_EXPR)
12755 return omit_two_operands (type, boolean_true_node,
12756 real0, real1);
12757 return fold_build2 (EQ_EXPR, type, real0, real1);
12758 }
12759 }
12760 }
12761
12762 return NULL_TREE;
12763
12764 case LT_EXPR:
12765 case GT_EXPR:
12766 case LE_EXPR:
12767 case GE_EXPR:
12768 tem = fold_comparison (code, type, op0, op1);
12769 if (tem != NULL_TREE)
12770 return tem;
12771
12772 /* Transform comparisons of the form X +- C CMP X. */
12773 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12774 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12775 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12776 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
12777 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12778 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
12779 {
12780 tree arg01 = TREE_OPERAND (arg0, 1);
12781 enum tree_code code0 = TREE_CODE (arg0);
12782 int is_positive;
12783
12784 if (TREE_CODE (arg01) == REAL_CST)
12785 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12786 else
12787 is_positive = tree_int_cst_sgn (arg01);
12788
12789 /* (X - c) > X becomes false. */
12790 if (code == GT_EXPR
12791 && ((code0 == MINUS_EXPR && is_positive >= 0)
12792 || (code0 == PLUS_EXPR && is_positive <= 0)))
12793 {
12794 if (TREE_CODE (arg01) == INTEGER_CST
12795 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12796 fold_overflow_warning (("assuming signed overflow does not "
12797 "occur when assuming that (X - c) > X "
12798 "is always false"),
12799 WARN_STRICT_OVERFLOW_ALL);
12800 return constant_boolean_node (0, type);
12801 }
12802
12803 /* Likewise (X + c) < X becomes false. */
12804 if (code == LT_EXPR
12805 && ((code0 == PLUS_EXPR && is_positive >= 0)
12806 || (code0 == MINUS_EXPR && is_positive <= 0)))
12807 {
12808 if (TREE_CODE (arg01) == INTEGER_CST
12809 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12810 fold_overflow_warning (("assuming signed overflow does not "
12811 "occur when assuming that "
12812 "(X + c) < X is always false"),
12813 WARN_STRICT_OVERFLOW_ALL);
12814 return constant_boolean_node (0, type);
12815 }
12816
12817 /* Convert (X - c) <= X to true. */
12818 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12819 && code == LE_EXPR
12820 && ((code0 == MINUS_EXPR && is_positive >= 0)
12821 || (code0 == PLUS_EXPR && is_positive <= 0)))
12822 {
12823 if (TREE_CODE (arg01) == INTEGER_CST
12824 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12825 fold_overflow_warning (("assuming signed overflow does not "
12826 "occur when assuming that "
12827 "(X - c) <= X is always true"),
12828 WARN_STRICT_OVERFLOW_ALL);
12829 return constant_boolean_node (1, type);
12830 }
12831
12832 /* Convert (X + c) >= X to true. */
12833 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12834 && code == GE_EXPR
12835 && ((code0 == PLUS_EXPR && is_positive >= 0)
12836 || (code0 == MINUS_EXPR && is_positive <= 0)))
12837 {
12838 if (TREE_CODE (arg01) == INTEGER_CST
12839 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12840 fold_overflow_warning (("assuming signed overflow does not "
12841 "occur when assuming that "
12842 "(X + c) >= X is always true"),
12843 WARN_STRICT_OVERFLOW_ALL);
12844 return constant_boolean_node (1, type);
12845 }
12846
12847 if (TREE_CODE (arg01) == INTEGER_CST)
12848 {
12849 /* Convert X + c > X and X - c < X to true for integers. */
12850 if (code == GT_EXPR
12851 && ((code0 == PLUS_EXPR && is_positive > 0)
12852 || (code0 == MINUS_EXPR && is_positive < 0)))
12853 {
12854 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12855 fold_overflow_warning (("assuming signed overflow does "
12856 "not occur when assuming that "
12857 "(X + c) > X is always true"),
12858 WARN_STRICT_OVERFLOW_ALL);
12859 return constant_boolean_node (1, type);
12860 }
12861
12862 if (code == LT_EXPR
12863 && ((code0 == MINUS_EXPR && is_positive > 0)
12864 || (code0 == PLUS_EXPR && is_positive < 0)))
12865 {
12866 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12867 fold_overflow_warning (("assuming signed overflow does "
12868 "not occur when assuming that "
12869 "(X - c) < X is always true"),
12870 WARN_STRICT_OVERFLOW_ALL);
12871 return constant_boolean_node (1, type);
12872 }
12873
12874 /* Convert X + c <= X and X - c >= X to false for integers. */
12875 if (code == LE_EXPR
12876 && ((code0 == PLUS_EXPR && is_positive > 0)
12877 || (code0 == MINUS_EXPR && is_positive < 0)))
12878 {
12879 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12880 fold_overflow_warning (("assuming signed overflow does "
12881 "not occur when assuming that "
12882 "(X + c) <= X is always false"),
12883 WARN_STRICT_OVERFLOW_ALL);
12884 return constant_boolean_node (0, type);
12885 }
12886
12887 if (code == GE_EXPR
12888 && ((code0 == MINUS_EXPR && is_positive > 0)
12889 || (code0 == PLUS_EXPR && is_positive < 0)))
12890 {
12891 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12892 fold_overflow_warning (("assuming signed overflow does "
12893 "not occur when assuming that "
12894 "(X - c) >= X is always false"),
12895 WARN_STRICT_OVERFLOW_ALL);
12896 return constant_boolean_node (0, type);
12897 }
12898 }
12899 }
12900
12901 /* Comparisons with the highest or lowest possible integer of
12902 the specified precision will have known values. */
12903 {
12904 tree arg1_type = TREE_TYPE (arg1);
12905 unsigned int width = TYPE_PRECISION (arg1_type);
12906
12907 if (TREE_CODE (arg1) == INTEGER_CST
12908 && width <= 2 * HOST_BITS_PER_WIDE_INT
12909 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
12910 {
12911 HOST_WIDE_INT signed_max_hi;
12912 unsigned HOST_WIDE_INT signed_max_lo;
12913 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
12914
12915 if (width <= HOST_BITS_PER_WIDE_INT)
12916 {
12917 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12918 - 1;
12919 signed_max_hi = 0;
12920 max_hi = 0;
12921
12922 if (TYPE_UNSIGNED (arg1_type))
12923 {
12924 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12925 min_lo = 0;
12926 min_hi = 0;
12927 }
12928 else
12929 {
12930 max_lo = signed_max_lo;
12931 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12932 min_hi = -1;
12933 }
12934 }
12935 else
12936 {
12937 width -= HOST_BITS_PER_WIDE_INT;
12938 signed_max_lo = -1;
12939 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12940 - 1;
12941 max_lo = -1;
12942 min_lo = 0;
12943
12944 if (TYPE_UNSIGNED (arg1_type))
12945 {
12946 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12947 min_hi = 0;
12948 }
12949 else
12950 {
12951 max_hi = signed_max_hi;
12952 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12953 }
12954 }
12955
12956 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
12957 && TREE_INT_CST_LOW (arg1) == max_lo)
12958 switch (code)
12959 {
12960 case GT_EXPR:
12961 return omit_one_operand (type, integer_zero_node, arg0);
12962
12963 case GE_EXPR:
12964 return fold_build2 (EQ_EXPR, type, op0, op1);
12965
12966 case LE_EXPR:
12967 return omit_one_operand (type, integer_one_node, arg0);
12968
12969 case LT_EXPR:
12970 return fold_build2 (NE_EXPR, type, op0, op1);
12971
12972 /* The GE_EXPR and LT_EXPR cases above are not normally
12973 reached because of previous transformations. */
12974
12975 default:
12976 break;
12977 }
12978 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12979 == max_hi
12980 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
12981 switch (code)
12982 {
12983 case GT_EXPR:
12984 arg1 = const_binop (PLUS_EXPR, arg1,
12985 build_int_cst (TREE_TYPE (arg1), 1), 0);
12986 return fold_build2 (EQ_EXPR, type,
12987 fold_convert (TREE_TYPE (arg1), arg0),
12988 arg1);
12989 case LE_EXPR:
12990 arg1 = const_binop (PLUS_EXPR, arg1,
12991 build_int_cst (TREE_TYPE (arg1), 1), 0);
12992 return fold_build2 (NE_EXPR, type,
12993 fold_convert (TREE_TYPE (arg1), arg0),
12994 arg1);
12995 default:
12996 break;
12997 }
12998 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12999 == min_hi
13000 && TREE_INT_CST_LOW (arg1) == min_lo)
13001 switch (code)
13002 {
13003 case LT_EXPR:
13004 return omit_one_operand (type, integer_zero_node, arg0);
13005
13006 case LE_EXPR:
13007 return fold_build2 (EQ_EXPR, type, op0, op1);
13008
13009 case GE_EXPR:
13010 return omit_one_operand (type, integer_one_node, arg0);
13011
13012 case GT_EXPR:
13013 return fold_build2 (NE_EXPR, type, op0, op1);
13014
13015 default:
13016 break;
13017 }
13018 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13019 == min_hi
13020 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
13021 switch (code)
13022 {
13023 case GE_EXPR:
13024 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
13025 return fold_build2 (NE_EXPR, type,
13026 fold_convert (TREE_TYPE (arg1), arg0),
13027 arg1);
13028 case LT_EXPR:
13029 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
13030 return fold_build2 (EQ_EXPR, type,
13031 fold_convert (TREE_TYPE (arg1), arg0),
13032 arg1);
13033 default:
13034 break;
13035 }
13036
13037 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
13038 && TREE_INT_CST_LOW (arg1) == signed_max_lo
13039 && TYPE_UNSIGNED (arg1_type)
13040 /* We will flip the signedness of the comparison operator
13041 associated with the mode of arg1, so the sign bit is
13042 specified by this mode. Check that arg1 is the signed
13043 max associated with this sign bit. */
13044 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
13045 /* signed_type does not work on pointer types. */
13046 && INTEGRAL_TYPE_P (arg1_type))
13047 {
13048 /* The following case also applies to X < signed_max+1
13049 and X >= signed_max+1 because previous transformations. */
13050 if (code == LE_EXPR || code == GT_EXPR)
13051 {
13052 tree st;
13053 st = signed_type_for (TREE_TYPE (arg1));
13054 return fold_build2 (code == LE_EXPR ? GE_EXPR : LT_EXPR,
13055 type, fold_convert (st, arg0),
13056 build_int_cst (st, 0));
13057 }
13058 }
13059 }
13060 }
13061
13062 /* If we are comparing an ABS_EXPR with a constant, we can
13063 convert all the cases into explicit comparisons, but they may
13064 well not be faster than doing the ABS and one comparison.
13065 But ABS (X) <= C is a range comparison, which becomes a subtraction
13066 and a comparison, and is probably faster. */
13067 if (code == LE_EXPR
13068 && TREE_CODE (arg1) == INTEGER_CST
13069 && TREE_CODE (arg0) == ABS_EXPR
13070 && ! TREE_SIDE_EFFECTS (arg0)
13071 && (0 != (tem = negate_expr (arg1)))
13072 && TREE_CODE (tem) == INTEGER_CST
13073 && !TREE_OVERFLOW (tem))
13074 return fold_build2 (TRUTH_ANDIF_EXPR, type,
13075 build2 (GE_EXPR, type,
13076 TREE_OPERAND (arg0, 0), tem),
13077 build2 (LE_EXPR, type,
13078 TREE_OPERAND (arg0, 0), arg1));
13079
13080 /* Convert ABS_EXPR<x> >= 0 to true. */
13081 strict_overflow_p = false;
13082 if (code == GE_EXPR
13083 && (integer_zerop (arg1)
13084 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13085 && real_zerop (arg1)))
13086 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13087 {
13088 if (strict_overflow_p)
13089 fold_overflow_warning (("assuming signed overflow does not occur "
13090 "when simplifying comparison of "
13091 "absolute value and zero"),
13092 WARN_STRICT_OVERFLOW_CONDITIONAL);
13093 return omit_one_operand (type, integer_one_node, arg0);
13094 }
13095
13096 /* Convert ABS_EXPR<x> < 0 to false. */
13097 strict_overflow_p = false;
13098 if (code == LT_EXPR
13099 && (integer_zerop (arg1) || real_zerop (arg1))
13100 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13101 {
13102 if (strict_overflow_p)
13103 fold_overflow_warning (("assuming signed overflow does not occur "
13104 "when simplifying comparison of "
13105 "absolute value and zero"),
13106 WARN_STRICT_OVERFLOW_CONDITIONAL);
13107 return omit_one_operand (type, integer_zero_node, arg0);
13108 }
13109
13110 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13111 and similarly for >= into !=. */
13112 if ((code == LT_EXPR || code == GE_EXPR)
13113 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13114 && TREE_CODE (arg1) == LSHIFT_EXPR
13115 && integer_onep (TREE_OPERAND (arg1, 0)))
13116 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13117 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13118 TREE_OPERAND (arg1, 1)),
13119 build_int_cst (TREE_TYPE (arg0), 0));
13120
13121 if ((code == LT_EXPR || code == GE_EXPR)
13122 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13123 && CONVERT_EXPR_P (arg1)
13124 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13125 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13126 return
13127 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13128 fold_convert (TREE_TYPE (arg0),
13129 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13130 TREE_OPERAND (TREE_OPERAND (arg1, 0),
13131 1))),
13132 build_int_cst (TREE_TYPE (arg0), 0));
13133
13134 return NULL_TREE;
13135
13136 case UNORDERED_EXPR:
13137 case ORDERED_EXPR:
13138 case UNLT_EXPR:
13139 case UNLE_EXPR:
13140 case UNGT_EXPR:
13141 case UNGE_EXPR:
13142 case UNEQ_EXPR:
13143 case LTGT_EXPR:
13144 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13145 {
13146 t1 = fold_relational_const (code, type, arg0, arg1);
13147 if (t1 != NULL_TREE)
13148 return t1;
13149 }
13150
13151 /* If the first operand is NaN, the result is constant. */
13152 if (TREE_CODE (arg0) == REAL_CST
13153 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13154 && (code != LTGT_EXPR || ! flag_trapping_math))
13155 {
13156 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13157 ? integer_zero_node
13158 : integer_one_node;
13159 return omit_one_operand (type, t1, arg1);
13160 }
13161
13162 /* If the second operand is NaN, the result is constant. */
13163 if (TREE_CODE (arg1) == REAL_CST
13164 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13165 && (code != LTGT_EXPR || ! flag_trapping_math))
13166 {
13167 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13168 ? integer_zero_node
13169 : integer_one_node;
13170 return omit_one_operand (type, t1, arg0);
13171 }
13172
13173 /* Simplify unordered comparison of something with itself. */
13174 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13175 && operand_equal_p (arg0, arg1, 0))
13176 return constant_boolean_node (1, type);
13177
13178 if (code == LTGT_EXPR
13179 && !flag_trapping_math
13180 && operand_equal_p (arg0, arg1, 0))
13181 return constant_boolean_node (0, type);
13182
13183 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13184 {
13185 tree targ0 = strip_float_extensions (arg0);
13186 tree targ1 = strip_float_extensions (arg1);
13187 tree newtype = TREE_TYPE (targ0);
13188
13189 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13190 newtype = TREE_TYPE (targ1);
13191
13192 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13193 return fold_build2 (code, type, fold_convert (newtype, targ0),
13194 fold_convert (newtype, targ1));
13195 }
13196
13197 return NULL_TREE;
13198
13199 case COMPOUND_EXPR:
13200 /* When pedantic, a compound expression can be neither an lvalue
13201 nor an integer constant expression. */
13202 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13203 return NULL_TREE;
13204 /* Don't let (0, 0) be null pointer constant. */
13205 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13206 : fold_convert (type, arg1);
13207 return pedantic_non_lvalue (tem);
13208
13209 case COMPLEX_EXPR:
13210 if ((TREE_CODE (arg0) == REAL_CST
13211 && TREE_CODE (arg1) == REAL_CST)
13212 || (TREE_CODE (arg0) == INTEGER_CST
13213 && TREE_CODE (arg1) == INTEGER_CST))
13214 return build_complex (type, arg0, arg1);
13215 return NULL_TREE;
13216
13217 case ASSERT_EXPR:
13218 /* An ASSERT_EXPR should never be passed to fold_binary. */
13219 gcc_unreachable ();
13220
13221 default:
13222 return NULL_TREE;
13223 } /* switch (code) */
13224 }
13225
13226 /* Callback for walk_tree, looking for LABEL_EXPR.
13227 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
13228 Do not check the sub-tree of GOTO_EXPR. */
13229
13230 static tree
13231 contains_label_1 (tree *tp,
13232 int *walk_subtrees,
13233 void *data ATTRIBUTE_UNUSED)
13234 {
13235 switch (TREE_CODE (*tp))
13236 {
13237 case LABEL_EXPR:
13238 return *tp;
13239 case GOTO_EXPR:
13240 *walk_subtrees = 0;
13241 /* no break */
13242 default:
13243 return NULL_TREE;
13244 }
13245 }
13246
13247 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
13248 accessible from outside the sub-tree. Returns NULL_TREE if no
13249 addressable label is found. */
13250
13251 static bool
13252 contains_label_p (tree st)
13253 {
13254 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
13255 }
13256
13257 /* Fold a ternary expression of code CODE and type TYPE with operands
13258 OP0, OP1, and OP2. Return the folded expression if folding is
13259 successful. Otherwise, return NULL_TREE. */
13260
13261 tree
13262 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
13263 {
13264 tree tem;
13265 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
13266 enum tree_code_class kind = TREE_CODE_CLASS (code);
13267
13268 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13269 && TREE_CODE_LENGTH (code) == 3);
13270
13271 /* Strip any conversions that don't change the mode. This is safe
13272 for every expression, except for a comparison expression because
13273 its signedness is derived from its operands. So, in the latter
13274 case, only strip conversions that don't change the signedness.
13275
13276 Note that this is done as an internal manipulation within the
13277 constant folder, in order to find the simplest representation of
13278 the arguments so that their form can be studied. In any cases,
13279 the appropriate type conversions should be put back in the tree
13280 that will get out of the constant folder. */
13281 if (op0)
13282 {
13283 arg0 = op0;
13284 STRIP_NOPS (arg0);
13285 }
13286
13287 if (op1)
13288 {
13289 arg1 = op1;
13290 STRIP_NOPS (arg1);
13291 }
13292
13293 switch (code)
13294 {
13295 case COMPONENT_REF:
13296 if (TREE_CODE (arg0) == CONSTRUCTOR
13297 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13298 {
13299 unsigned HOST_WIDE_INT idx;
13300 tree field, value;
13301 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13302 if (field == arg1)
13303 return value;
13304 }
13305 return NULL_TREE;
13306
13307 case COND_EXPR:
13308 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13309 so all simple results must be passed through pedantic_non_lvalue. */
13310 if (TREE_CODE (arg0) == INTEGER_CST)
13311 {
13312 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13313 tem = integer_zerop (arg0) ? op2 : op1;
13314 /* Only optimize constant conditions when the selected branch
13315 has the same type as the COND_EXPR. This avoids optimizing
13316 away "c ? x : throw", where the throw has a void type.
13317 Avoid throwing away that operand which contains label. */
13318 if ((!TREE_SIDE_EFFECTS (unused_op)
13319 || !contains_label_p (unused_op))
13320 && (! VOID_TYPE_P (TREE_TYPE (tem))
13321 || VOID_TYPE_P (type)))
13322 return pedantic_non_lvalue (tem);
13323 return NULL_TREE;
13324 }
13325 if (operand_equal_p (arg1, op2, 0))
13326 return pedantic_omit_one_operand (type, arg1, arg0);
13327
13328 /* If we have A op B ? A : C, we may be able to convert this to a
13329 simpler expression, depending on the operation and the values
13330 of B and C. Signed zeros prevent all of these transformations,
13331 for reasons given above each one.
13332
13333 Also try swapping the arguments and inverting the conditional. */
13334 if (COMPARISON_CLASS_P (arg0)
13335 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13336 arg1, TREE_OPERAND (arg0, 1))
13337 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13338 {
13339 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
13340 if (tem)
13341 return tem;
13342 }
13343
13344 if (COMPARISON_CLASS_P (arg0)
13345 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13346 op2,
13347 TREE_OPERAND (arg0, 1))
13348 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
13349 {
13350 tem = fold_truth_not_expr (arg0);
13351 if (tem && COMPARISON_CLASS_P (tem))
13352 {
13353 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
13354 if (tem)
13355 return tem;
13356 }
13357 }
13358
13359 /* If the second operand is simpler than the third, swap them
13360 since that produces better jump optimization results. */
13361 if (truth_value_p (TREE_CODE (arg0))
13362 && tree_swap_operands_p (op1, op2, false))
13363 {
13364 /* See if this can be inverted. If it can't, possibly because
13365 it was a floating-point inequality comparison, don't do
13366 anything. */
13367 tem = fold_truth_not_expr (arg0);
13368 if (tem)
13369 return fold_build3 (code, type, tem, op2, op1);
13370 }
13371
13372 /* Convert A ? 1 : 0 to simply A. */
13373 if (integer_onep (op1)
13374 && integer_zerop (op2)
13375 /* If we try to convert OP0 to our type, the
13376 call to fold will try to move the conversion inside
13377 a COND, which will recurse. In that case, the COND_EXPR
13378 is probably the best choice, so leave it alone. */
13379 && type == TREE_TYPE (arg0))
13380 return pedantic_non_lvalue (arg0);
13381
13382 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13383 over COND_EXPR in cases such as floating point comparisons. */
13384 if (integer_zerop (op1)
13385 && integer_onep (op2)
13386 && truth_value_p (TREE_CODE (arg0)))
13387 return pedantic_non_lvalue (fold_convert (type,
13388 invert_truthvalue (arg0)));
13389
13390 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13391 if (TREE_CODE (arg0) == LT_EXPR
13392 && integer_zerop (TREE_OPERAND (arg0, 1))
13393 && integer_zerop (op2)
13394 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13395 {
13396 /* sign_bit_p only checks ARG1 bits within A's precision.
13397 If <sign bit of A> has wider type than A, bits outside
13398 of A's precision in <sign bit of A> need to be checked.
13399 If they are all 0, this optimization needs to be done
13400 in unsigned A's type, if they are all 1 in signed A's type,
13401 otherwise this can't be done. */
13402 if (TYPE_PRECISION (TREE_TYPE (tem))
13403 < TYPE_PRECISION (TREE_TYPE (arg1))
13404 && TYPE_PRECISION (TREE_TYPE (tem))
13405 < TYPE_PRECISION (type))
13406 {
13407 unsigned HOST_WIDE_INT mask_lo;
13408 HOST_WIDE_INT mask_hi;
13409 int inner_width, outer_width;
13410 tree tem_type;
13411
13412 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13413 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13414 if (outer_width > TYPE_PRECISION (type))
13415 outer_width = TYPE_PRECISION (type);
13416
13417 if (outer_width > HOST_BITS_PER_WIDE_INT)
13418 {
13419 mask_hi = ((unsigned HOST_WIDE_INT) -1
13420 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
13421 mask_lo = -1;
13422 }
13423 else
13424 {
13425 mask_hi = 0;
13426 mask_lo = ((unsigned HOST_WIDE_INT) -1
13427 >> (HOST_BITS_PER_WIDE_INT - outer_width));
13428 }
13429 if (inner_width > HOST_BITS_PER_WIDE_INT)
13430 {
13431 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
13432 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13433 mask_lo = 0;
13434 }
13435 else
13436 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
13437 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13438
13439 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
13440 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
13441 {
13442 tem_type = signed_type_for (TREE_TYPE (tem));
13443 tem = fold_convert (tem_type, tem);
13444 }
13445 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
13446 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
13447 {
13448 tem_type = unsigned_type_for (TREE_TYPE (tem));
13449 tem = fold_convert (tem_type, tem);
13450 }
13451 else
13452 tem = NULL;
13453 }
13454
13455 if (tem)
13456 return fold_convert (type,
13457 fold_build2 (BIT_AND_EXPR,
13458 TREE_TYPE (tem), tem,
13459 fold_convert (TREE_TYPE (tem),
13460 arg1)));
13461 }
13462
13463 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13464 already handled above. */
13465 if (TREE_CODE (arg0) == BIT_AND_EXPR
13466 && integer_onep (TREE_OPERAND (arg0, 1))
13467 && integer_zerop (op2)
13468 && integer_pow2p (arg1))
13469 {
13470 tree tem = TREE_OPERAND (arg0, 0);
13471 STRIP_NOPS (tem);
13472 if (TREE_CODE (tem) == RSHIFT_EXPR
13473 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
13474 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13475 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
13476 return fold_build2 (BIT_AND_EXPR, type,
13477 TREE_OPERAND (tem, 0), arg1);
13478 }
13479
13480 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13481 is probably obsolete because the first operand should be a
13482 truth value (that's why we have the two cases above), but let's
13483 leave it in until we can confirm this for all front-ends. */
13484 if (integer_zerop (op2)
13485 && TREE_CODE (arg0) == NE_EXPR
13486 && integer_zerop (TREE_OPERAND (arg0, 1))
13487 && integer_pow2p (arg1)
13488 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13489 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13490 arg1, OEP_ONLY_CONST))
13491 return pedantic_non_lvalue (fold_convert (type,
13492 TREE_OPERAND (arg0, 0)));
13493
13494 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13495 if (integer_zerop (op2)
13496 && truth_value_p (TREE_CODE (arg0))
13497 && truth_value_p (TREE_CODE (arg1)))
13498 return fold_build2 (TRUTH_ANDIF_EXPR, type,
13499 fold_convert (type, arg0),
13500 arg1);
13501
13502 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13503 if (integer_onep (op2)
13504 && truth_value_p (TREE_CODE (arg0))
13505 && truth_value_p (TREE_CODE (arg1)))
13506 {
13507 /* Only perform transformation if ARG0 is easily inverted. */
13508 tem = fold_truth_not_expr (arg0);
13509 if (tem)
13510 return fold_build2 (TRUTH_ORIF_EXPR, type,
13511 fold_convert (type, tem),
13512 arg1);
13513 }
13514
13515 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13516 if (integer_zerop (arg1)
13517 && truth_value_p (TREE_CODE (arg0))
13518 && truth_value_p (TREE_CODE (op2)))
13519 {
13520 /* Only perform transformation if ARG0 is easily inverted. */
13521 tem = fold_truth_not_expr (arg0);
13522 if (tem)
13523 return fold_build2 (TRUTH_ANDIF_EXPR, type,
13524 fold_convert (type, tem),
13525 op2);
13526 }
13527
13528 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13529 if (integer_onep (arg1)
13530 && truth_value_p (TREE_CODE (arg0))
13531 && truth_value_p (TREE_CODE (op2)))
13532 return fold_build2 (TRUTH_ORIF_EXPR, type,
13533 fold_convert (type, arg0),
13534 op2);
13535
13536 return NULL_TREE;
13537
13538 case CALL_EXPR:
13539 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13540 of fold_ternary on them. */
13541 gcc_unreachable ();
13542
13543 case BIT_FIELD_REF:
13544 if ((TREE_CODE (arg0) == VECTOR_CST
13545 || (TREE_CODE (arg0) == CONSTRUCTOR && TREE_CONSTANT (arg0)))
13546 && type == TREE_TYPE (TREE_TYPE (arg0)))
13547 {
13548 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
13549 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
13550
13551 if (width != 0
13552 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
13553 && (idx % width) == 0
13554 && (idx = idx / width)
13555 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
13556 {
13557 tree elements = NULL_TREE;
13558
13559 if (TREE_CODE (arg0) == VECTOR_CST)
13560 elements = TREE_VECTOR_CST_ELTS (arg0);
13561 else
13562 {
13563 unsigned HOST_WIDE_INT idx;
13564 tree value;
13565
13566 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (arg0), idx, value)
13567 elements = tree_cons (NULL_TREE, value, elements);
13568 }
13569 while (idx-- > 0 && elements)
13570 elements = TREE_CHAIN (elements);
13571 if (elements)
13572 return TREE_VALUE (elements);
13573 else
13574 return fold_convert (type, integer_zero_node);
13575 }
13576 }
13577
13578 /* A bit-field-ref that referenced the full argument can be stripped. */
13579 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
13580 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
13581 && integer_zerop (op2))
13582 return fold_convert (type, arg0);
13583
13584 return NULL_TREE;
13585
13586 default:
13587 return NULL_TREE;
13588 } /* switch (code) */
13589 }
13590
13591 /* Perform constant folding and related simplification of EXPR.
13592 The related simplifications include x*1 => x, x*0 => 0, etc.,
13593 and application of the associative law.
13594 NOP_EXPR conversions may be removed freely (as long as we
13595 are careful not to change the type of the overall expression).
13596 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13597 but we can constant-fold them if they have constant operands. */
13598
13599 #ifdef ENABLE_FOLD_CHECKING
13600 # define fold(x) fold_1 (x)
13601 static tree fold_1 (tree);
13602 static
13603 #endif
13604 tree
13605 fold (tree expr)
13606 {
13607 const tree t = expr;
13608 enum tree_code code = TREE_CODE (t);
13609 enum tree_code_class kind = TREE_CODE_CLASS (code);
13610 tree tem;
13611
13612 /* Return right away if a constant. */
13613 if (kind == tcc_constant)
13614 return t;
13615
13616 /* CALL_EXPR-like objects with variable numbers of operands are
13617 treated specially. */
13618 if (kind == tcc_vl_exp)
13619 {
13620 if (code == CALL_EXPR)
13621 {
13622 tem = fold_call_expr (expr, false);
13623 return tem ? tem : expr;
13624 }
13625 return expr;
13626 }
13627
13628 if (IS_EXPR_CODE_CLASS (kind))
13629 {
13630 tree type = TREE_TYPE (t);
13631 tree op0, op1, op2;
13632
13633 switch (TREE_CODE_LENGTH (code))
13634 {
13635 case 1:
13636 op0 = TREE_OPERAND (t, 0);
13637 tem = fold_unary (code, type, op0);
13638 return tem ? tem : expr;
13639 case 2:
13640 op0 = TREE_OPERAND (t, 0);
13641 op1 = TREE_OPERAND (t, 1);
13642 tem = fold_binary (code, type, op0, op1);
13643 return tem ? tem : expr;
13644 case 3:
13645 op0 = TREE_OPERAND (t, 0);
13646 op1 = TREE_OPERAND (t, 1);
13647 op2 = TREE_OPERAND (t, 2);
13648 tem = fold_ternary (code, type, op0, op1, op2);
13649 return tem ? tem : expr;
13650 default:
13651 break;
13652 }
13653 }
13654
13655 switch (code)
13656 {
13657 case ARRAY_REF:
13658 {
13659 tree op0 = TREE_OPERAND (t, 0);
13660 tree op1 = TREE_OPERAND (t, 1);
13661
13662 if (TREE_CODE (op1) == INTEGER_CST
13663 && TREE_CODE (op0) == CONSTRUCTOR
13664 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13665 {
13666 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (op0);
13667 unsigned HOST_WIDE_INT end = VEC_length (constructor_elt, elts);
13668 unsigned HOST_WIDE_INT begin = 0;
13669
13670 /* Find a matching index by means of a binary search. */
13671 while (begin != end)
13672 {
13673 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
13674 tree index = VEC_index (constructor_elt, elts, middle)->index;
13675
13676 if (TREE_CODE (index) == INTEGER_CST
13677 && tree_int_cst_lt (index, op1))
13678 begin = middle + 1;
13679 else if (TREE_CODE (index) == INTEGER_CST
13680 && tree_int_cst_lt (op1, index))
13681 end = middle;
13682 else if (TREE_CODE (index) == RANGE_EXPR
13683 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
13684 begin = middle + 1;
13685 else if (TREE_CODE (index) == RANGE_EXPR
13686 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
13687 end = middle;
13688 else
13689 return VEC_index (constructor_elt, elts, middle)->value;
13690 }
13691 }
13692
13693 return t;
13694 }
13695
13696 case CONST_DECL:
13697 return fold (DECL_INITIAL (t));
13698
13699 default:
13700 return t;
13701 } /* switch (code) */
13702 }
13703
13704 #ifdef ENABLE_FOLD_CHECKING
13705 #undef fold
13706
13707 static void fold_checksum_tree (const_tree, struct md5_ctx *, htab_t);
13708 static void fold_check_failed (const_tree, const_tree);
13709 void print_fold_checksum (const_tree);
13710
13711 /* When --enable-checking=fold, compute a digest of expr before
13712 and after actual fold call to see if fold did not accidentally
13713 change original expr. */
13714
13715 tree
13716 fold (tree expr)
13717 {
13718 tree ret;
13719 struct md5_ctx ctx;
13720 unsigned char checksum_before[16], checksum_after[16];
13721 htab_t ht;
13722
13723 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13724 md5_init_ctx (&ctx);
13725 fold_checksum_tree (expr, &ctx, ht);
13726 md5_finish_ctx (&ctx, checksum_before);
13727 htab_empty (ht);
13728
13729 ret = fold_1 (expr);
13730
13731 md5_init_ctx (&ctx);
13732 fold_checksum_tree (expr, &ctx, ht);
13733 md5_finish_ctx (&ctx, checksum_after);
13734 htab_delete (ht);
13735
13736 if (memcmp (checksum_before, checksum_after, 16))
13737 fold_check_failed (expr, ret);
13738
13739 return ret;
13740 }
13741
13742 void
13743 print_fold_checksum (const_tree expr)
13744 {
13745 struct md5_ctx ctx;
13746 unsigned char checksum[16], cnt;
13747 htab_t ht;
13748
13749 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13750 md5_init_ctx (&ctx);
13751 fold_checksum_tree (expr, &ctx, ht);
13752 md5_finish_ctx (&ctx, checksum);
13753 htab_delete (ht);
13754 for (cnt = 0; cnt < 16; ++cnt)
13755 fprintf (stderr, "%02x", checksum[cnt]);
13756 putc ('\n', stderr);
13757 }
13758
13759 static void
13760 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
13761 {
13762 internal_error ("fold check: original tree changed by fold");
13763 }
13764
13765 static void
13766 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx, htab_t ht)
13767 {
13768 const void **slot;
13769 enum tree_code code;
13770 union tree_node buf;
13771 int i, len;
13772
13773 recursive_label:
13774
13775 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
13776 <= sizeof (struct tree_function_decl))
13777 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
13778 if (expr == NULL)
13779 return;
13780 slot = (const void **) htab_find_slot (ht, expr, INSERT);
13781 if (*slot != NULL)
13782 return;
13783 *slot = expr;
13784 code = TREE_CODE (expr);
13785 if (TREE_CODE_CLASS (code) == tcc_declaration
13786 && DECL_ASSEMBLER_NAME_SET_P (expr))
13787 {
13788 /* Allow DECL_ASSEMBLER_NAME to be modified. */
13789 memcpy ((char *) &buf, expr, tree_size (expr));
13790 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
13791 expr = (tree) &buf;
13792 }
13793 else if (TREE_CODE_CLASS (code) == tcc_type
13794 && (TYPE_POINTER_TO (expr)
13795 || TYPE_REFERENCE_TO (expr)
13796 || TYPE_CACHED_VALUES_P (expr)
13797 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
13798 || TYPE_NEXT_VARIANT (expr)))
13799 {
13800 /* Allow these fields to be modified. */
13801 tree tmp;
13802 memcpy ((char *) &buf, expr, tree_size (expr));
13803 expr = tmp = (tree) &buf;
13804 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
13805 TYPE_POINTER_TO (tmp) = NULL;
13806 TYPE_REFERENCE_TO (tmp) = NULL;
13807 TYPE_NEXT_VARIANT (tmp) = NULL;
13808 if (TYPE_CACHED_VALUES_P (tmp))
13809 {
13810 TYPE_CACHED_VALUES_P (tmp) = 0;
13811 TYPE_CACHED_VALUES (tmp) = NULL;
13812 }
13813 }
13814 md5_process_bytes (expr, tree_size (expr), ctx);
13815 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
13816 if (TREE_CODE_CLASS (code) != tcc_type
13817 && TREE_CODE_CLASS (code) != tcc_declaration
13818 && code != TREE_LIST
13819 && code != SSA_NAME)
13820 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
13821 switch (TREE_CODE_CLASS (code))
13822 {
13823 case tcc_constant:
13824 switch (code)
13825 {
13826 case STRING_CST:
13827 md5_process_bytes (TREE_STRING_POINTER (expr),
13828 TREE_STRING_LENGTH (expr), ctx);
13829 break;
13830 case COMPLEX_CST:
13831 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
13832 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
13833 break;
13834 case VECTOR_CST:
13835 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
13836 break;
13837 default:
13838 break;
13839 }
13840 break;
13841 case tcc_exceptional:
13842 switch (code)
13843 {
13844 case TREE_LIST:
13845 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
13846 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
13847 expr = TREE_CHAIN (expr);
13848 goto recursive_label;
13849 break;
13850 case TREE_VEC:
13851 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
13852 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
13853 break;
13854 default:
13855 break;
13856 }
13857 break;
13858 case tcc_expression:
13859 case tcc_reference:
13860 case tcc_comparison:
13861 case tcc_unary:
13862 case tcc_binary:
13863 case tcc_statement:
13864 case tcc_vl_exp:
13865 len = TREE_OPERAND_LENGTH (expr);
13866 for (i = 0; i < len; ++i)
13867 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
13868 break;
13869 case tcc_declaration:
13870 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
13871 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
13872 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
13873 {
13874 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
13875 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
13876 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
13877 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
13878 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
13879 }
13880 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
13881 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
13882
13883 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
13884 {
13885 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
13886 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
13887 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
13888 }
13889 break;
13890 case tcc_type:
13891 if (TREE_CODE (expr) == ENUMERAL_TYPE)
13892 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
13893 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
13894 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
13895 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
13896 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
13897 if (INTEGRAL_TYPE_P (expr)
13898 || SCALAR_FLOAT_TYPE_P (expr))
13899 {
13900 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
13901 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
13902 }
13903 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
13904 if (TREE_CODE (expr) == RECORD_TYPE
13905 || TREE_CODE (expr) == UNION_TYPE
13906 || TREE_CODE (expr) == QUAL_UNION_TYPE)
13907 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
13908 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
13909 break;
13910 default:
13911 break;
13912 }
13913 }
13914
13915 /* Helper function for outputting the checksum of a tree T. When
13916 debugging with gdb, you can "define mynext" to be "next" followed
13917 by "call debug_fold_checksum (op0)", then just trace down till the
13918 outputs differ. */
13919
13920 void
13921 debug_fold_checksum (const_tree t)
13922 {
13923 int i;
13924 unsigned char checksum[16];
13925 struct md5_ctx ctx;
13926 htab_t ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13927
13928 md5_init_ctx (&ctx);
13929 fold_checksum_tree (t, &ctx, ht);
13930 md5_finish_ctx (&ctx, checksum);
13931 htab_empty (ht);
13932
13933 for (i = 0; i < 16; i++)
13934 fprintf (stderr, "%d ", checksum[i]);
13935
13936 fprintf (stderr, "\n");
13937 }
13938
13939 #endif
13940
13941 /* Fold a unary tree expression with code CODE of type TYPE with an
13942 operand OP0. Return a folded expression if successful. Otherwise,
13943 return a tree expression with code CODE of type TYPE with an
13944 operand OP0. */
13945
13946 tree
13947 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
13948 {
13949 tree tem;
13950 #ifdef ENABLE_FOLD_CHECKING
13951 unsigned char checksum_before[16], checksum_after[16];
13952 struct md5_ctx ctx;
13953 htab_t ht;
13954
13955 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13956 md5_init_ctx (&ctx);
13957 fold_checksum_tree (op0, &ctx, ht);
13958 md5_finish_ctx (&ctx, checksum_before);
13959 htab_empty (ht);
13960 #endif
13961
13962 tem = fold_unary (code, type, op0);
13963 if (!tem)
13964 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
13965
13966 #ifdef ENABLE_FOLD_CHECKING
13967 md5_init_ctx (&ctx);
13968 fold_checksum_tree (op0, &ctx, ht);
13969 md5_finish_ctx (&ctx, checksum_after);
13970 htab_delete (ht);
13971
13972 if (memcmp (checksum_before, checksum_after, 16))
13973 fold_check_failed (op0, tem);
13974 #endif
13975 return tem;
13976 }
13977
13978 /* Fold a binary tree expression with code CODE of type TYPE with
13979 operands OP0 and OP1. Return a folded expression if successful.
13980 Otherwise, return a tree expression with code CODE of type TYPE
13981 with operands OP0 and OP1. */
13982
13983 tree
13984 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
13985 MEM_STAT_DECL)
13986 {
13987 tree tem;
13988 #ifdef ENABLE_FOLD_CHECKING
13989 unsigned char checksum_before_op0[16],
13990 checksum_before_op1[16],
13991 checksum_after_op0[16],
13992 checksum_after_op1[16];
13993 struct md5_ctx ctx;
13994 htab_t ht;
13995
13996 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13997 md5_init_ctx (&ctx);
13998 fold_checksum_tree (op0, &ctx, ht);
13999 md5_finish_ctx (&ctx, checksum_before_op0);
14000 htab_empty (ht);
14001
14002 md5_init_ctx (&ctx);
14003 fold_checksum_tree (op1, &ctx, ht);
14004 md5_finish_ctx (&ctx, checksum_before_op1);
14005 htab_empty (ht);
14006 #endif
14007
14008 tem = fold_binary (code, type, op0, op1);
14009 if (!tem)
14010 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
14011
14012 #ifdef ENABLE_FOLD_CHECKING
14013 md5_init_ctx (&ctx);
14014 fold_checksum_tree (op0, &ctx, ht);
14015 md5_finish_ctx (&ctx, checksum_after_op0);
14016 htab_empty (ht);
14017
14018 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14019 fold_check_failed (op0, tem);
14020
14021 md5_init_ctx (&ctx);
14022 fold_checksum_tree (op1, &ctx, ht);
14023 md5_finish_ctx (&ctx, checksum_after_op1);
14024 htab_delete (ht);
14025
14026 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14027 fold_check_failed (op1, tem);
14028 #endif
14029 return tem;
14030 }
14031
14032 /* Fold a ternary tree expression with code CODE of type TYPE with
14033 operands OP0, OP1, and OP2. Return a folded expression if
14034 successful. Otherwise, return a tree expression with code CODE of
14035 type TYPE with operands OP0, OP1, and OP2. */
14036
14037 tree
14038 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
14039 MEM_STAT_DECL)
14040 {
14041 tree tem;
14042 #ifdef ENABLE_FOLD_CHECKING
14043 unsigned char checksum_before_op0[16],
14044 checksum_before_op1[16],
14045 checksum_before_op2[16],
14046 checksum_after_op0[16],
14047 checksum_after_op1[16],
14048 checksum_after_op2[16];
14049 struct md5_ctx ctx;
14050 htab_t ht;
14051
14052 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14053 md5_init_ctx (&ctx);
14054 fold_checksum_tree (op0, &ctx, ht);
14055 md5_finish_ctx (&ctx, checksum_before_op0);
14056 htab_empty (ht);
14057
14058 md5_init_ctx (&ctx);
14059 fold_checksum_tree (op1, &ctx, ht);
14060 md5_finish_ctx (&ctx, checksum_before_op1);
14061 htab_empty (ht);
14062
14063 md5_init_ctx (&ctx);
14064 fold_checksum_tree (op2, &ctx, ht);
14065 md5_finish_ctx (&ctx, checksum_before_op2);
14066 htab_empty (ht);
14067 #endif
14068
14069 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14070 tem = fold_ternary (code, type, op0, op1, op2);
14071 if (!tem)
14072 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
14073
14074 #ifdef ENABLE_FOLD_CHECKING
14075 md5_init_ctx (&ctx);
14076 fold_checksum_tree (op0, &ctx, ht);
14077 md5_finish_ctx (&ctx, checksum_after_op0);
14078 htab_empty (ht);
14079
14080 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14081 fold_check_failed (op0, tem);
14082
14083 md5_init_ctx (&ctx);
14084 fold_checksum_tree (op1, &ctx, ht);
14085 md5_finish_ctx (&ctx, checksum_after_op1);
14086 htab_empty (ht);
14087
14088 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14089 fold_check_failed (op1, tem);
14090
14091 md5_init_ctx (&ctx);
14092 fold_checksum_tree (op2, &ctx, ht);
14093 md5_finish_ctx (&ctx, checksum_after_op2);
14094 htab_delete (ht);
14095
14096 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14097 fold_check_failed (op2, tem);
14098 #endif
14099 return tem;
14100 }
14101
14102 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14103 arguments in ARGARRAY, and a null static chain.
14104 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14105 of type TYPE from the given operands as constructed by build_call_array. */
14106
14107 tree
14108 fold_build_call_array (tree type, tree fn, int nargs, tree *argarray)
14109 {
14110 tree tem;
14111 #ifdef ENABLE_FOLD_CHECKING
14112 unsigned char checksum_before_fn[16],
14113 checksum_before_arglist[16],
14114 checksum_after_fn[16],
14115 checksum_after_arglist[16];
14116 struct md5_ctx ctx;
14117 htab_t ht;
14118 int i;
14119
14120 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14121 md5_init_ctx (&ctx);
14122 fold_checksum_tree (fn, &ctx, ht);
14123 md5_finish_ctx (&ctx, checksum_before_fn);
14124 htab_empty (ht);
14125
14126 md5_init_ctx (&ctx);
14127 for (i = 0; i < nargs; i++)
14128 fold_checksum_tree (argarray[i], &ctx, ht);
14129 md5_finish_ctx (&ctx, checksum_before_arglist);
14130 htab_empty (ht);
14131 #endif
14132
14133 tem = fold_builtin_call_array (type, fn, nargs, argarray);
14134
14135 #ifdef ENABLE_FOLD_CHECKING
14136 md5_init_ctx (&ctx);
14137 fold_checksum_tree (fn, &ctx, ht);
14138 md5_finish_ctx (&ctx, checksum_after_fn);
14139 htab_empty (ht);
14140
14141 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14142 fold_check_failed (fn, tem);
14143
14144 md5_init_ctx (&ctx);
14145 for (i = 0; i < nargs; i++)
14146 fold_checksum_tree (argarray[i], &ctx, ht);
14147 md5_finish_ctx (&ctx, checksum_after_arglist);
14148 htab_delete (ht);
14149
14150 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14151 fold_check_failed (NULL_TREE, tem);
14152 #endif
14153 return tem;
14154 }
14155
14156 /* Perform constant folding and related simplification of initializer
14157 expression EXPR. These behave identically to "fold_buildN" but ignore
14158 potential run-time traps and exceptions that fold must preserve. */
14159
14160 #define START_FOLD_INIT \
14161 int saved_signaling_nans = flag_signaling_nans;\
14162 int saved_trapping_math = flag_trapping_math;\
14163 int saved_rounding_math = flag_rounding_math;\
14164 int saved_trapv = flag_trapv;\
14165 int saved_folding_initializer = folding_initializer;\
14166 flag_signaling_nans = 0;\
14167 flag_trapping_math = 0;\
14168 flag_rounding_math = 0;\
14169 flag_trapv = 0;\
14170 folding_initializer = 1;
14171
14172 #define END_FOLD_INIT \
14173 flag_signaling_nans = saved_signaling_nans;\
14174 flag_trapping_math = saved_trapping_math;\
14175 flag_rounding_math = saved_rounding_math;\
14176 flag_trapv = saved_trapv;\
14177 folding_initializer = saved_folding_initializer;
14178
14179 tree
14180 fold_build1_initializer (enum tree_code code, tree type, tree op)
14181 {
14182 tree result;
14183 START_FOLD_INIT;
14184
14185 result = fold_build1 (code, type, op);
14186
14187 END_FOLD_INIT;
14188 return result;
14189 }
14190
14191 tree
14192 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
14193 {
14194 tree result;
14195 START_FOLD_INIT;
14196
14197 result = fold_build2 (code, type, op0, op1);
14198
14199 END_FOLD_INIT;
14200 return result;
14201 }
14202
14203 tree
14204 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
14205 tree op2)
14206 {
14207 tree result;
14208 START_FOLD_INIT;
14209
14210 result = fold_build3 (code, type, op0, op1, op2);
14211
14212 END_FOLD_INIT;
14213 return result;
14214 }
14215
14216 tree
14217 fold_build_call_array_initializer (tree type, tree fn,
14218 int nargs, tree *argarray)
14219 {
14220 tree result;
14221 START_FOLD_INIT;
14222
14223 result = fold_build_call_array (type, fn, nargs, argarray);
14224
14225 END_FOLD_INIT;
14226 return result;
14227 }
14228
14229 #undef START_FOLD_INIT
14230 #undef END_FOLD_INIT
14231
14232 /* Determine if first argument is a multiple of second argument. Return 0 if
14233 it is not, or we cannot easily determined it to be.
14234
14235 An example of the sort of thing we care about (at this point; this routine
14236 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14237 fold cases do now) is discovering that
14238
14239 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14240
14241 is a multiple of
14242
14243 SAVE_EXPR (J * 8)
14244
14245 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14246
14247 This code also handles discovering that
14248
14249 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14250
14251 is a multiple of 8 so we don't have to worry about dealing with a
14252 possible remainder.
14253
14254 Note that we *look* inside a SAVE_EXPR only to determine how it was
14255 calculated; it is not safe for fold to do much of anything else with the
14256 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14257 at run time. For example, the latter example above *cannot* be implemented
14258 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14259 evaluation time of the original SAVE_EXPR is not necessarily the same at
14260 the time the new expression is evaluated. The only optimization of this
14261 sort that would be valid is changing
14262
14263 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14264
14265 divided by 8 to
14266
14267 SAVE_EXPR (I) * SAVE_EXPR (J)
14268
14269 (where the same SAVE_EXPR (J) is used in the original and the
14270 transformed version). */
14271
14272 int
14273 multiple_of_p (tree type, const_tree top, const_tree bottom)
14274 {
14275 if (operand_equal_p (top, bottom, 0))
14276 return 1;
14277
14278 if (TREE_CODE (type) != INTEGER_TYPE)
14279 return 0;
14280
14281 switch (TREE_CODE (top))
14282 {
14283 case BIT_AND_EXPR:
14284 /* Bitwise and provides a power of two multiple. If the mask is
14285 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14286 if (!integer_pow2p (bottom))
14287 return 0;
14288 /* FALLTHRU */
14289
14290 case MULT_EXPR:
14291 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14292 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14293
14294 case PLUS_EXPR:
14295 case MINUS_EXPR:
14296 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14297 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14298
14299 case LSHIFT_EXPR:
14300 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14301 {
14302 tree op1, t1;
14303
14304 op1 = TREE_OPERAND (top, 1);
14305 /* const_binop may not detect overflow correctly,
14306 so check for it explicitly here. */
14307 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
14308 > TREE_INT_CST_LOW (op1)
14309 && TREE_INT_CST_HIGH (op1) == 0
14310 && 0 != (t1 = fold_convert (type,
14311 const_binop (LSHIFT_EXPR,
14312 size_one_node,
14313 op1, 0)))
14314 && !TREE_OVERFLOW (t1))
14315 return multiple_of_p (type, t1, bottom);
14316 }
14317 return 0;
14318
14319 case NOP_EXPR:
14320 /* Can't handle conversions from non-integral or wider integral type. */
14321 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14322 || (TYPE_PRECISION (type)
14323 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14324 return 0;
14325
14326 /* .. fall through ... */
14327
14328 case SAVE_EXPR:
14329 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14330
14331 case INTEGER_CST:
14332 if (TREE_CODE (bottom) != INTEGER_CST
14333 || integer_zerop (bottom)
14334 || (TYPE_UNSIGNED (type)
14335 && (tree_int_cst_sgn (top) < 0
14336 || tree_int_cst_sgn (bottom) < 0)))
14337 return 0;
14338 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
14339 top, bottom, 0));
14340
14341 default:
14342 return 0;
14343 }
14344 }
14345
14346 /* Return true if CODE or TYPE is known to be non-negative. */
14347
14348 static bool
14349 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14350 {
14351 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14352 && truth_value_p (code))
14353 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14354 have a signed:1 type (where the value is -1 and 0). */
14355 return true;
14356 return false;
14357 }
14358
14359 /* Return true if (CODE OP0) is known to be non-negative. If the return
14360 value is based on the assumption that signed overflow is undefined,
14361 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14362 *STRICT_OVERFLOW_P. */
14363
14364 bool
14365 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14366 bool *strict_overflow_p)
14367 {
14368 if (TYPE_UNSIGNED (type))
14369 return true;
14370
14371 switch (code)
14372 {
14373 case ABS_EXPR:
14374 /* We can't return 1 if flag_wrapv is set because
14375 ABS_EXPR<INT_MIN> = INT_MIN. */
14376 if (!INTEGRAL_TYPE_P (type))
14377 return true;
14378 if (TYPE_OVERFLOW_UNDEFINED (type))
14379 {
14380 *strict_overflow_p = true;
14381 return true;
14382 }
14383 break;
14384
14385 case NON_LVALUE_EXPR:
14386 case FLOAT_EXPR:
14387 case FIX_TRUNC_EXPR:
14388 return tree_expr_nonnegative_warnv_p (op0,
14389 strict_overflow_p);
14390
14391 case NOP_EXPR:
14392 {
14393 tree inner_type = TREE_TYPE (op0);
14394 tree outer_type = type;
14395
14396 if (TREE_CODE (outer_type) == REAL_TYPE)
14397 {
14398 if (TREE_CODE (inner_type) == REAL_TYPE)
14399 return tree_expr_nonnegative_warnv_p (op0,
14400 strict_overflow_p);
14401 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14402 {
14403 if (TYPE_UNSIGNED (inner_type))
14404 return true;
14405 return tree_expr_nonnegative_warnv_p (op0,
14406 strict_overflow_p);
14407 }
14408 }
14409 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
14410 {
14411 if (TREE_CODE (inner_type) == REAL_TYPE)
14412 return tree_expr_nonnegative_warnv_p (op0,
14413 strict_overflow_p);
14414 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14415 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14416 && TYPE_UNSIGNED (inner_type);
14417 }
14418 }
14419 break;
14420
14421 default:
14422 return tree_simple_nonnegative_warnv_p (code, type);
14423 }
14424
14425 /* We don't know sign of `t', so be conservative and return false. */
14426 return false;
14427 }
14428
14429 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14430 value is based on the assumption that signed overflow is undefined,
14431 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14432 *STRICT_OVERFLOW_P. */
14433
14434 bool
14435 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14436 tree op1, bool *strict_overflow_p)
14437 {
14438 if (TYPE_UNSIGNED (type))
14439 return true;
14440
14441 switch (code)
14442 {
14443 case POINTER_PLUS_EXPR:
14444 case PLUS_EXPR:
14445 if (FLOAT_TYPE_P (type))
14446 return (tree_expr_nonnegative_warnv_p (op0,
14447 strict_overflow_p)
14448 && tree_expr_nonnegative_warnv_p (op1,
14449 strict_overflow_p));
14450
14451 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14452 both unsigned and at least 2 bits shorter than the result. */
14453 if (TREE_CODE (type) == INTEGER_TYPE
14454 && TREE_CODE (op0) == NOP_EXPR
14455 && TREE_CODE (op1) == NOP_EXPR)
14456 {
14457 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14458 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14459 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14460 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14461 {
14462 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14463 TYPE_PRECISION (inner2)) + 1;
14464 return prec < TYPE_PRECISION (type);
14465 }
14466 }
14467 break;
14468
14469 case MULT_EXPR:
14470 if (FLOAT_TYPE_P (type))
14471 {
14472 /* x * x for floating point x is always non-negative. */
14473 if (operand_equal_p (op0, op1, 0))
14474 return true;
14475 return (tree_expr_nonnegative_warnv_p (op0,
14476 strict_overflow_p)
14477 && tree_expr_nonnegative_warnv_p (op1,
14478 strict_overflow_p));
14479 }
14480
14481 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14482 both unsigned and their total bits is shorter than the result. */
14483 if (TREE_CODE (type) == INTEGER_TYPE
14484 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14485 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14486 {
14487 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14488 ? TREE_TYPE (TREE_OPERAND (op0, 0))
14489 : TREE_TYPE (op0);
14490 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14491 ? TREE_TYPE (TREE_OPERAND (op1, 0))
14492 : TREE_TYPE (op1);
14493
14494 bool unsigned0 = TYPE_UNSIGNED (inner0);
14495 bool unsigned1 = TYPE_UNSIGNED (inner1);
14496
14497 if (TREE_CODE (op0) == INTEGER_CST)
14498 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14499
14500 if (TREE_CODE (op1) == INTEGER_CST)
14501 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14502
14503 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14504 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14505 {
14506 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14507 ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
14508 : TYPE_PRECISION (inner0);
14509
14510 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14511 ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
14512 : TYPE_PRECISION (inner1);
14513
14514 return precision0 + precision1 < TYPE_PRECISION (type);
14515 }
14516 }
14517 return false;
14518
14519 case BIT_AND_EXPR:
14520 case MAX_EXPR:
14521 return (tree_expr_nonnegative_warnv_p (op0,
14522 strict_overflow_p)
14523 || tree_expr_nonnegative_warnv_p (op1,
14524 strict_overflow_p));
14525
14526 case BIT_IOR_EXPR:
14527 case BIT_XOR_EXPR:
14528 case MIN_EXPR:
14529 case RDIV_EXPR:
14530 case TRUNC_DIV_EXPR:
14531 case CEIL_DIV_EXPR:
14532 case FLOOR_DIV_EXPR:
14533 case ROUND_DIV_EXPR:
14534 return (tree_expr_nonnegative_warnv_p (op0,
14535 strict_overflow_p)
14536 && tree_expr_nonnegative_warnv_p (op1,
14537 strict_overflow_p));
14538
14539 case TRUNC_MOD_EXPR:
14540 case CEIL_MOD_EXPR:
14541 case FLOOR_MOD_EXPR:
14542 case ROUND_MOD_EXPR:
14543 return tree_expr_nonnegative_warnv_p (op0,
14544 strict_overflow_p);
14545 default:
14546 return tree_simple_nonnegative_warnv_p (code, type);
14547 }
14548
14549 /* We don't know sign of `t', so be conservative and return false. */
14550 return false;
14551 }
14552
14553 /* Return true if T is known to be non-negative. If the return
14554 value is based on the assumption that signed overflow is undefined,
14555 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14556 *STRICT_OVERFLOW_P. */
14557
14558 bool
14559 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14560 {
14561 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14562 return true;
14563
14564 switch (TREE_CODE (t))
14565 {
14566 case INTEGER_CST:
14567 return tree_int_cst_sgn (t) >= 0;
14568
14569 case REAL_CST:
14570 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14571
14572 case FIXED_CST:
14573 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14574
14575 case COND_EXPR:
14576 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14577 strict_overflow_p)
14578 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
14579 strict_overflow_p));
14580 default:
14581 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14582 TREE_TYPE (t));
14583 }
14584 /* We don't know sign of `t', so be conservative and return false. */
14585 return false;
14586 }
14587
14588 /* Return true if T is known to be non-negative. If the return
14589 value is based on the assumption that signed overflow is undefined,
14590 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14591 *STRICT_OVERFLOW_P. */
14592
14593 bool
14594 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
14595 tree arg0, tree arg1, bool *strict_overflow_p)
14596 {
14597 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
14598 switch (DECL_FUNCTION_CODE (fndecl))
14599 {
14600 CASE_FLT_FN (BUILT_IN_ACOS):
14601 CASE_FLT_FN (BUILT_IN_ACOSH):
14602 CASE_FLT_FN (BUILT_IN_CABS):
14603 CASE_FLT_FN (BUILT_IN_COSH):
14604 CASE_FLT_FN (BUILT_IN_ERFC):
14605 CASE_FLT_FN (BUILT_IN_EXP):
14606 CASE_FLT_FN (BUILT_IN_EXP10):
14607 CASE_FLT_FN (BUILT_IN_EXP2):
14608 CASE_FLT_FN (BUILT_IN_FABS):
14609 CASE_FLT_FN (BUILT_IN_FDIM):
14610 CASE_FLT_FN (BUILT_IN_HYPOT):
14611 CASE_FLT_FN (BUILT_IN_POW10):
14612 CASE_INT_FN (BUILT_IN_FFS):
14613 CASE_INT_FN (BUILT_IN_PARITY):
14614 CASE_INT_FN (BUILT_IN_POPCOUNT):
14615 case BUILT_IN_BSWAP32:
14616 case BUILT_IN_BSWAP64:
14617 /* Always true. */
14618 return true;
14619
14620 CASE_FLT_FN (BUILT_IN_SQRT):
14621 /* sqrt(-0.0) is -0.0. */
14622 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
14623 return true;
14624 return tree_expr_nonnegative_warnv_p (arg0,
14625 strict_overflow_p);
14626
14627 CASE_FLT_FN (BUILT_IN_ASINH):
14628 CASE_FLT_FN (BUILT_IN_ATAN):
14629 CASE_FLT_FN (BUILT_IN_ATANH):
14630 CASE_FLT_FN (BUILT_IN_CBRT):
14631 CASE_FLT_FN (BUILT_IN_CEIL):
14632 CASE_FLT_FN (BUILT_IN_ERF):
14633 CASE_FLT_FN (BUILT_IN_EXPM1):
14634 CASE_FLT_FN (BUILT_IN_FLOOR):
14635 CASE_FLT_FN (BUILT_IN_FMOD):
14636 CASE_FLT_FN (BUILT_IN_FREXP):
14637 CASE_FLT_FN (BUILT_IN_LCEIL):
14638 CASE_FLT_FN (BUILT_IN_LDEXP):
14639 CASE_FLT_FN (BUILT_IN_LFLOOR):
14640 CASE_FLT_FN (BUILT_IN_LLCEIL):
14641 CASE_FLT_FN (BUILT_IN_LLFLOOR):
14642 CASE_FLT_FN (BUILT_IN_LLRINT):
14643 CASE_FLT_FN (BUILT_IN_LLROUND):
14644 CASE_FLT_FN (BUILT_IN_LRINT):
14645 CASE_FLT_FN (BUILT_IN_LROUND):
14646 CASE_FLT_FN (BUILT_IN_MODF):
14647 CASE_FLT_FN (BUILT_IN_NEARBYINT):
14648 CASE_FLT_FN (BUILT_IN_RINT):
14649 CASE_FLT_FN (BUILT_IN_ROUND):
14650 CASE_FLT_FN (BUILT_IN_SCALB):
14651 CASE_FLT_FN (BUILT_IN_SCALBLN):
14652 CASE_FLT_FN (BUILT_IN_SCALBN):
14653 CASE_FLT_FN (BUILT_IN_SIGNBIT):
14654 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
14655 CASE_FLT_FN (BUILT_IN_SINH):
14656 CASE_FLT_FN (BUILT_IN_TANH):
14657 CASE_FLT_FN (BUILT_IN_TRUNC):
14658 /* True if the 1st argument is nonnegative. */
14659 return tree_expr_nonnegative_warnv_p (arg0,
14660 strict_overflow_p);
14661
14662 CASE_FLT_FN (BUILT_IN_FMAX):
14663 /* True if the 1st OR 2nd arguments are nonnegative. */
14664 return (tree_expr_nonnegative_warnv_p (arg0,
14665 strict_overflow_p)
14666 || (tree_expr_nonnegative_warnv_p (arg1,
14667 strict_overflow_p)));
14668
14669 CASE_FLT_FN (BUILT_IN_FMIN):
14670 /* True if the 1st AND 2nd arguments are nonnegative. */
14671 return (tree_expr_nonnegative_warnv_p (arg0,
14672 strict_overflow_p)
14673 && (tree_expr_nonnegative_warnv_p (arg1,
14674 strict_overflow_p)));
14675
14676 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14677 /* True if the 2nd argument is nonnegative. */
14678 return tree_expr_nonnegative_warnv_p (arg1,
14679 strict_overflow_p);
14680
14681 CASE_FLT_FN (BUILT_IN_POWI):
14682 /* True if the 1st argument is nonnegative or the second
14683 argument is an even integer. */
14684 if (TREE_CODE (arg1) == INTEGER_CST
14685 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
14686 return true;
14687 return tree_expr_nonnegative_warnv_p (arg0,
14688 strict_overflow_p);
14689
14690 CASE_FLT_FN (BUILT_IN_POW):
14691 /* True if the 1st argument is nonnegative or the second
14692 argument is an even integer valued real. */
14693 if (TREE_CODE (arg1) == REAL_CST)
14694 {
14695 REAL_VALUE_TYPE c;
14696 HOST_WIDE_INT n;
14697
14698 c = TREE_REAL_CST (arg1);
14699 n = real_to_integer (&c);
14700 if ((n & 1) == 0)
14701 {
14702 REAL_VALUE_TYPE cint;
14703 real_from_integer (&cint, VOIDmode, n,
14704 n < 0 ? -1 : 0, 0);
14705 if (real_identical (&c, &cint))
14706 return true;
14707 }
14708 }
14709 return tree_expr_nonnegative_warnv_p (arg0,
14710 strict_overflow_p);
14711
14712 default:
14713 break;
14714 }
14715 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
14716 type);
14717 }
14718
14719 /* Return true if T is known to be non-negative. If the return
14720 value is based on the assumption that signed overflow is undefined,
14721 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14722 *STRICT_OVERFLOW_P. */
14723
14724 bool
14725 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14726 {
14727 enum tree_code code = TREE_CODE (t);
14728 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14729 return true;
14730
14731 switch (code)
14732 {
14733 case TARGET_EXPR:
14734 {
14735 tree temp = TARGET_EXPR_SLOT (t);
14736 t = TARGET_EXPR_INITIAL (t);
14737
14738 /* If the initializer is non-void, then it's a normal expression
14739 that will be assigned to the slot. */
14740 if (!VOID_TYPE_P (t))
14741 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
14742
14743 /* Otherwise, the initializer sets the slot in some way. One common
14744 way is an assignment statement at the end of the initializer. */
14745 while (1)
14746 {
14747 if (TREE_CODE (t) == BIND_EXPR)
14748 t = expr_last (BIND_EXPR_BODY (t));
14749 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
14750 || TREE_CODE (t) == TRY_CATCH_EXPR)
14751 t = expr_last (TREE_OPERAND (t, 0));
14752 else if (TREE_CODE (t) == STATEMENT_LIST)
14753 t = expr_last (t);
14754 else
14755 break;
14756 }
14757 if (TREE_CODE (t) == MODIFY_EXPR
14758 && TREE_OPERAND (t, 0) == temp)
14759 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14760 strict_overflow_p);
14761
14762 return false;
14763 }
14764
14765 case CALL_EXPR:
14766 {
14767 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
14768 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
14769
14770 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
14771 get_callee_fndecl (t),
14772 arg0,
14773 arg1,
14774 strict_overflow_p);
14775 }
14776 case COMPOUND_EXPR:
14777 case MODIFY_EXPR:
14778 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14779 strict_overflow_p);
14780 case BIND_EXPR:
14781 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
14782 strict_overflow_p);
14783 case SAVE_EXPR:
14784 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14785 strict_overflow_p);
14786
14787 default:
14788 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14789 TREE_TYPE (t));
14790 }
14791
14792 /* We don't know sign of `t', so be conservative and return false. */
14793 return false;
14794 }
14795
14796 /* Return true if T is known to be non-negative. If the return
14797 value is based on the assumption that signed overflow is undefined,
14798 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14799 *STRICT_OVERFLOW_P. */
14800
14801 bool
14802 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14803 {
14804 enum tree_code code;
14805 if (t == error_mark_node)
14806 return false;
14807
14808 code = TREE_CODE (t);
14809 switch (TREE_CODE_CLASS (code))
14810 {
14811 case tcc_binary:
14812 case tcc_comparison:
14813 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14814 TREE_TYPE (t),
14815 TREE_OPERAND (t, 0),
14816 TREE_OPERAND (t, 1),
14817 strict_overflow_p);
14818
14819 case tcc_unary:
14820 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14821 TREE_TYPE (t),
14822 TREE_OPERAND (t, 0),
14823 strict_overflow_p);
14824
14825 case tcc_constant:
14826 case tcc_declaration:
14827 case tcc_reference:
14828 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14829
14830 default:
14831 break;
14832 }
14833
14834 switch (code)
14835 {
14836 case TRUTH_AND_EXPR:
14837 case TRUTH_OR_EXPR:
14838 case TRUTH_XOR_EXPR:
14839 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14840 TREE_TYPE (t),
14841 TREE_OPERAND (t, 0),
14842 TREE_OPERAND (t, 1),
14843 strict_overflow_p);
14844 case TRUTH_NOT_EXPR:
14845 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14846 TREE_TYPE (t),
14847 TREE_OPERAND (t, 0),
14848 strict_overflow_p);
14849
14850 case COND_EXPR:
14851 case CONSTRUCTOR:
14852 case OBJ_TYPE_REF:
14853 case ASSERT_EXPR:
14854 case ADDR_EXPR:
14855 case WITH_SIZE_EXPR:
14856 case EXC_PTR_EXPR:
14857 case SSA_NAME:
14858 case FILTER_EXPR:
14859 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14860
14861 default:
14862 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
14863 }
14864 }
14865
14866 /* Return true if `t' is known to be non-negative. Handle warnings
14867 about undefined signed overflow. */
14868
14869 bool
14870 tree_expr_nonnegative_p (tree t)
14871 {
14872 bool ret, strict_overflow_p;
14873
14874 strict_overflow_p = false;
14875 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
14876 if (strict_overflow_p)
14877 fold_overflow_warning (("assuming signed overflow does not occur when "
14878 "determining that expression is always "
14879 "non-negative"),
14880 WARN_STRICT_OVERFLOW_MISC);
14881 return ret;
14882 }
14883
14884
14885 /* Return true when (CODE OP0) is an address and is known to be nonzero.
14886 For floating point we further ensure that T is not denormal.
14887 Similar logic is present in nonzero_address in rtlanal.h.
14888
14889 If the return value is based on the assumption that signed overflow
14890 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14891 change *STRICT_OVERFLOW_P. */
14892
14893 bool
14894 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
14895 bool *strict_overflow_p)
14896 {
14897 switch (code)
14898 {
14899 case ABS_EXPR:
14900 return tree_expr_nonzero_warnv_p (op0,
14901 strict_overflow_p);
14902
14903 case NOP_EXPR:
14904 {
14905 tree inner_type = TREE_TYPE (op0);
14906 tree outer_type = type;
14907
14908 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
14909 && tree_expr_nonzero_warnv_p (op0,
14910 strict_overflow_p));
14911 }
14912 break;
14913
14914 case NON_LVALUE_EXPR:
14915 return tree_expr_nonzero_warnv_p (op0,
14916 strict_overflow_p);
14917
14918 default:
14919 break;
14920 }
14921
14922 return false;
14923 }
14924
14925 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
14926 For floating point we further ensure that T is not denormal.
14927 Similar logic is present in nonzero_address in rtlanal.h.
14928
14929 If the return value is based on the assumption that signed overflow
14930 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14931 change *STRICT_OVERFLOW_P. */
14932
14933 bool
14934 tree_binary_nonzero_warnv_p (enum tree_code code,
14935 tree type,
14936 tree op0,
14937 tree op1, bool *strict_overflow_p)
14938 {
14939 bool sub_strict_overflow_p;
14940 switch (code)
14941 {
14942 case POINTER_PLUS_EXPR:
14943 case PLUS_EXPR:
14944 if (TYPE_OVERFLOW_UNDEFINED (type))
14945 {
14946 /* With the presence of negative values it is hard
14947 to say something. */
14948 sub_strict_overflow_p = false;
14949 if (!tree_expr_nonnegative_warnv_p (op0,
14950 &sub_strict_overflow_p)
14951 || !tree_expr_nonnegative_warnv_p (op1,
14952 &sub_strict_overflow_p))
14953 return false;
14954 /* One of operands must be positive and the other non-negative. */
14955 /* We don't set *STRICT_OVERFLOW_P here: even if this value
14956 overflows, on a twos-complement machine the sum of two
14957 nonnegative numbers can never be zero. */
14958 return (tree_expr_nonzero_warnv_p (op0,
14959 strict_overflow_p)
14960 || tree_expr_nonzero_warnv_p (op1,
14961 strict_overflow_p));
14962 }
14963 break;
14964
14965 case MULT_EXPR:
14966 if (TYPE_OVERFLOW_UNDEFINED (type))
14967 {
14968 if (tree_expr_nonzero_warnv_p (op0,
14969 strict_overflow_p)
14970 && tree_expr_nonzero_warnv_p (op1,
14971 strict_overflow_p))
14972 {
14973 *strict_overflow_p = true;
14974 return true;
14975 }
14976 }
14977 break;
14978
14979 case MIN_EXPR:
14980 sub_strict_overflow_p = false;
14981 if (tree_expr_nonzero_warnv_p (op0,
14982 &sub_strict_overflow_p)
14983 && tree_expr_nonzero_warnv_p (op1,
14984 &sub_strict_overflow_p))
14985 {
14986 if (sub_strict_overflow_p)
14987 *strict_overflow_p = true;
14988 }
14989 break;
14990
14991 case MAX_EXPR:
14992 sub_strict_overflow_p = false;
14993 if (tree_expr_nonzero_warnv_p (op0,
14994 &sub_strict_overflow_p))
14995 {
14996 if (sub_strict_overflow_p)
14997 *strict_overflow_p = true;
14998
14999 /* When both operands are nonzero, then MAX must be too. */
15000 if (tree_expr_nonzero_warnv_p (op1,
15001 strict_overflow_p))
15002 return true;
15003
15004 /* MAX where operand 0 is positive is positive. */
15005 return tree_expr_nonnegative_warnv_p (op0,
15006 strict_overflow_p);
15007 }
15008 /* MAX where operand 1 is positive is positive. */
15009 else if (tree_expr_nonzero_warnv_p (op1,
15010 &sub_strict_overflow_p)
15011 && tree_expr_nonnegative_warnv_p (op1,
15012 &sub_strict_overflow_p))
15013 {
15014 if (sub_strict_overflow_p)
15015 *strict_overflow_p = true;
15016 return true;
15017 }
15018 break;
15019
15020 case BIT_IOR_EXPR:
15021 return (tree_expr_nonzero_warnv_p (op1,
15022 strict_overflow_p)
15023 || tree_expr_nonzero_warnv_p (op0,
15024 strict_overflow_p));
15025
15026 default:
15027 break;
15028 }
15029
15030 return false;
15031 }
15032
15033 /* Return true when T is an address and is known to be nonzero.
15034 For floating point we further ensure that T is not denormal.
15035 Similar logic is present in nonzero_address in rtlanal.h.
15036
15037 If the return value is based on the assumption that signed overflow
15038 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15039 change *STRICT_OVERFLOW_P. */
15040
15041 bool
15042 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15043 {
15044 bool sub_strict_overflow_p;
15045 switch (TREE_CODE (t))
15046 {
15047 case INTEGER_CST:
15048 return !integer_zerop (t);
15049
15050 case ADDR_EXPR:
15051 {
15052 tree base = get_base_address (TREE_OPERAND (t, 0));
15053
15054 if (!base)
15055 return false;
15056
15057 /* Weak declarations may link to NULL. Other things may also be NULL
15058 so protect with -fdelete-null-pointer-checks; but not variables
15059 allocated on the stack. */
15060 if (DECL_P (base)
15061 && (flag_delete_null_pointer_checks
15062 || (TREE_CODE (base) == VAR_DECL && !TREE_STATIC (base))))
15063 return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
15064
15065 /* Constants are never weak. */
15066 if (CONSTANT_CLASS_P (base))
15067 return true;
15068
15069 return false;
15070 }
15071
15072 case COND_EXPR:
15073 sub_strict_overflow_p = false;
15074 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15075 &sub_strict_overflow_p)
15076 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15077 &sub_strict_overflow_p))
15078 {
15079 if (sub_strict_overflow_p)
15080 *strict_overflow_p = true;
15081 return true;
15082 }
15083 break;
15084
15085 default:
15086 break;
15087 }
15088 return false;
15089 }
15090
15091 /* Return true when T is an address and is known to be nonzero.
15092 For floating point we further ensure that T is not denormal.
15093 Similar logic is present in nonzero_address in rtlanal.h.
15094
15095 If the return value is based on the assumption that signed overflow
15096 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15097 change *STRICT_OVERFLOW_P. */
15098
15099 bool
15100 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15101 {
15102 tree type = TREE_TYPE (t);
15103 enum tree_code code;
15104
15105 /* Doing something useful for floating point would need more work. */
15106 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
15107 return false;
15108
15109 code = TREE_CODE (t);
15110 switch (TREE_CODE_CLASS (code))
15111 {
15112 case tcc_unary:
15113 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15114 strict_overflow_p);
15115 case tcc_binary:
15116 case tcc_comparison:
15117 return tree_binary_nonzero_warnv_p (code, type,
15118 TREE_OPERAND (t, 0),
15119 TREE_OPERAND (t, 1),
15120 strict_overflow_p);
15121 case tcc_constant:
15122 case tcc_declaration:
15123 case tcc_reference:
15124 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15125
15126 default:
15127 break;
15128 }
15129
15130 switch (code)
15131 {
15132 case TRUTH_NOT_EXPR:
15133 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15134 strict_overflow_p);
15135
15136 case TRUTH_AND_EXPR:
15137 case TRUTH_OR_EXPR:
15138 case TRUTH_XOR_EXPR:
15139 return tree_binary_nonzero_warnv_p (code, type,
15140 TREE_OPERAND (t, 0),
15141 TREE_OPERAND (t, 1),
15142 strict_overflow_p);
15143
15144 case COND_EXPR:
15145 case CONSTRUCTOR:
15146 case OBJ_TYPE_REF:
15147 case ASSERT_EXPR:
15148 case ADDR_EXPR:
15149 case WITH_SIZE_EXPR:
15150 case EXC_PTR_EXPR:
15151 case SSA_NAME:
15152 case FILTER_EXPR:
15153 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15154
15155 case COMPOUND_EXPR:
15156 case MODIFY_EXPR:
15157 case BIND_EXPR:
15158 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15159 strict_overflow_p);
15160
15161 case SAVE_EXPR:
15162 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
15163 strict_overflow_p);
15164
15165 case CALL_EXPR:
15166 return alloca_call_p (t);
15167
15168 default:
15169 break;
15170 }
15171 return false;
15172 }
15173
15174 /* Return true when T is an address and is known to be nonzero.
15175 Handle warnings about undefined signed overflow. */
15176
15177 bool
15178 tree_expr_nonzero_p (tree t)
15179 {
15180 bool ret, strict_overflow_p;
15181
15182 strict_overflow_p = false;
15183 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
15184 if (strict_overflow_p)
15185 fold_overflow_warning (("assuming signed overflow does not occur when "
15186 "determining that expression is always "
15187 "non-zero"),
15188 WARN_STRICT_OVERFLOW_MISC);
15189 return ret;
15190 }
15191
15192 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15193 attempt to fold the expression to a constant without modifying TYPE,
15194 OP0 or OP1.
15195
15196 If the expression could be simplified to a constant, then return
15197 the constant. If the expression would not be simplified to a
15198 constant, then return NULL_TREE. */
15199
15200 tree
15201 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15202 {
15203 tree tem = fold_binary (code, type, op0, op1);
15204 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15205 }
15206
15207 /* Given the components of a unary expression CODE, TYPE and OP0,
15208 attempt to fold the expression to a constant without modifying
15209 TYPE or OP0.
15210
15211 If the expression could be simplified to a constant, then return
15212 the constant. If the expression would not be simplified to a
15213 constant, then return NULL_TREE. */
15214
15215 tree
15216 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15217 {
15218 tree tem = fold_unary (code, type, op0);
15219 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15220 }
15221
15222 /* If EXP represents referencing an element in a constant string
15223 (either via pointer arithmetic or array indexing), return the
15224 tree representing the value accessed, otherwise return NULL. */
15225
15226 tree
15227 fold_read_from_constant_string (tree exp)
15228 {
15229 if ((TREE_CODE (exp) == INDIRECT_REF
15230 || TREE_CODE (exp) == ARRAY_REF)
15231 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15232 {
15233 tree exp1 = TREE_OPERAND (exp, 0);
15234 tree index;
15235 tree string;
15236
15237 if (TREE_CODE (exp) == INDIRECT_REF)
15238 string = string_constant (exp1, &index);
15239 else
15240 {
15241 tree low_bound = array_ref_low_bound (exp);
15242 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
15243
15244 /* Optimize the special-case of a zero lower bound.
15245
15246 We convert the low_bound to sizetype to avoid some problems
15247 with constant folding. (E.g. suppose the lower bound is 1,
15248 and its mode is QI. Without the conversion,l (ARRAY
15249 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15250 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15251 if (! integer_zerop (low_bound))
15252 index = size_diffop (index, fold_convert (sizetype, low_bound));
15253
15254 string = exp1;
15255 }
15256
15257 if (string
15258 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15259 && TREE_CODE (string) == STRING_CST
15260 && TREE_CODE (index) == INTEGER_CST
15261 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15262 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15263 == MODE_INT)
15264 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15265 return build_int_cst_type (TREE_TYPE (exp),
15266 (TREE_STRING_POINTER (string)
15267 [TREE_INT_CST_LOW (index)]));
15268 }
15269 return NULL;
15270 }
15271
15272 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15273 an integer constant, real, or fixed-point constant.
15274
15275 TYPE is the type of the result. */
15276
15277 static tree
15278 fold_negate_const (tree arg0, tree type)
15279 {
15280 tree t = NULL_TREE;
15281
15282 switch (TREE_CODE (arg0))
15283 {
15284 case INTEGER_CST:
15285 {
15286 unsigned HOST_WIDE_INT low;
15287 HOST_WIDE_INT high;
15288 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
15289 TREE_INT_CST_HIGH (arg0),
15290 &low, &high);
15291 t = force_fit_type_double (type, low, high, 1,
15292 (overflow | TREE_OVERFLOW (arg0))
15293 && !TYPE_UNSIGNED (type));
15294 break;
15295 }
15296
15297 case REAL_CST:
15298 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
15299 break;
15300
15301 case FIXED_CST:
15302 {
15303 FIXED_VALUE_TYPE f;
15304 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15305 &(TREE_FIXED_CST (arg0)), NULL,
15306 TYPE_SATURATING (type));
15307 t = build_fixed (type, f);
15308 /* Propagate overflow flags. */
15309 if (overflow_p | TREE_OVERFLOW (arg0))
15310 TREE_OVERFLOW (t) = 1;
15311 break;
15312 }
15313
15314 default:
15315 gcc_unreachable ();
15316 }
15317
15318 return t;
15319 }
15320
15321 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15322 an integer constant or real constant.
15323
15324 TYPE is the type of the result. */
15325
15326 tree
15327 fold_abs_const (tree arg0, tree type)
15328 {
15329 tree t = NULL_TREE;
15330
15331 switch (TREE_CODE (arg0))
15332 {
15333 case INTEGER_CST:
15334 /* If the value is unsigned, then the absolute value is
15335 the same as the ordinary value. */
15336 if (TYPE_UNSIGNED (type))
15337 t = arg0;
15338 /* Similarly, if the value is non-negative. */
15339 else if (INT_CST_LT (integer_minus_one_node, arg0))
15340 t = arg0;
15341 /* If the value is negative, then the absolute value is
15342 its negation. */
15343 else
15344 {
15345 unsigned HOST_WIDE_INT low;
15346 HOST_WIDE_INT high;
15347 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
15348 TREE_INT_CST_HIGH (arg0),
15349 &low, &high);
15350 t = force_fit_type_double (type, low, high, -1,
15351 overflow | TREE_OVERFLOW (arg0));
15352 }
15353 break;
15354
15355 case REAL_CST:
15356 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15357 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
15358 else
15359 t = arg0;
15360 break;
15361
15362 default:
15363 gcc_unreachable ();
15364 }
15365
15366 return t;
15367 }
15368
15369 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15370 constant. TYPE is the type of the result. */
15371
15372 static tree
15373 fold_not_const (tree arg0, tree type)
15374 {
15375 tree t = NULL_TREE;
15376
15377 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15378
15379 t = force_fit_type_double (type, ~TREE_INT_CST_LOW (arg0),
15380 ~TREE_INT_CST_HIGH (arg0), 0,
15381 TREE_OVERFLOW (arg0));
15382
15383 return t;
15384 }
15385
15386 /* Given CODE, a relational operator, the target type, TYPE and two
15387 constant operands OP0 and OP1, return the result of the
15388 relational operation. If the result is not a compile time
15389 constant, then return NULL_TREE. */
15390
15391 static tree
15392 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15393 {
15394 int result, invert;
15395
15396 /* From here on, the only cases we handle are when the result is
15397 known to be a constant. */
15398
15399 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15400 {
15401 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15402 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15403
15404 /* Handle the cases where either operand is a NaN. */
15405 if (real_isnan (c0) || real_isnan (c1))
15406 {
15407 switch (code)
15408 {
15409 case EQ_EXPR:
15410 case ORDERED_EXPR:
15411 result = 0;
15412 break;
15413
15414 case NE_EXPR:
15415 case UNORDERED_EXPR:
15416 case UNLT_EXPR:
15417 case UNLE_EXPR:
15418 case UNGT_EXPR:
15419 case UNGE_EXPR:
15420 case UNEQ_EXPR:
15421 result = 1;
15422 break;
15423
15424 case LT_EXPR:
15425 case LE_EXPR:
15426 case GT_EXPR:
15427 case GE_EXPR:
15428 case LTGT_EXPR:
15429 if (flag_trapping_math)
15430 return NULL_TREE;
15431 result = 0;
15432 break;
15433
15434 default:
15435 gcc_unreachable ();
15436 }
15437
15438 return constant_boolean_node (result, type);
15439 }
15440
15441 return constant_boolean_node (real_compare (code, c0, c1), type);
15442 }
15443
15444 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15445 {
15446 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15447 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15448 return constant_boolean_node (fixed_compare (code, c0, c1), type);
15449 }
15450
15451 /* Handle equality/inequality of complex constants. */
15452 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15453 {
15454 tree rcond = fold_relational_const (code, type,
15455 TREE_REALPART (op0),
15456 TREE_REALPART (op1));
15457 tree icond = fold_relational_const (code, type,
15458 TREE_IMAGPART (op0),
15459 TREE_IMAGPART (op1));
15460 if (code == EQ_EXPR)
15461 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15462 else if (code == NE_EXPR)
15463 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15464 else
15465 return NULL_TREE;
15466 }
15467
15468 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15469
15470 To compute GT, swap the arguments and do LT.
15471 To compute GE, do LT and invert the result.
15472 To compute LE, swap the arguments, do LT and invert the result.
15473 To compute NE, do EQ and invert the result.
15474
15475 Therefore, the code below must handle only EQ and LT. */
15476
15477 if (code == LE_EXPR || code == GT_EXPR)
15478 {
15479 tree tem = op0;
15480 op0 = op1;
15481 op1 = tem;
15482 code = swap_tree_comparison (code);
15483 }
15484
15485 /* Note that it is safe to invert for real values here because we
15486 have already handled the one case that it matters. */
15487
15488 invert = 0;
15489 if (code == NE_EXPR || code == GE_EXPR)
15490 {
15491 invert = 1;
15492 code = invert_tree_comparison (code, false);
15493 }
15494
15495 /* Compute a result for LT or EQ if args permit;
15496 Otherwise return T. */
15497 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15498 {
15499 if (code == EQ_EXPR)
15500 result = tree_int_cst_equal (op0, op1);
15501 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
15502 result = INT_CST_LT_UNSIGNED (op0, op1);
15503 else
15504 result = INT_CST_LT (op0, op1);
15505 }
15506 else
15507 return NULL_TREE;
15508
15509 if (invert)
15510 result ^= 1;
15511 return constant_boolean_node (result, type);
15512 }
15513
15514 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15515 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15516 itself. */
15517
15518 tree
15519 fold_build_cleanup_point_expr (tree type, tree expr)
15520 {
15521 /* If the expression does not have side effects then we don't have to wrap
15522 it with a cleanup point expression. */
15523 if (!TREE_SIDE_EFFECTS (expr))
15524 return expr;
15525
15526 /* If the expression is a return, check to see if the expression inside the
15527 return has no side effects or the right hand side of the modify expression
15528 inside the return. If either don't have side effects set we don't need to
15529 wrap the expression in a cleanup point expression. Note we don't check the
15530 left hand side of the modify because it should always be a return decl. */
15531 if (TREE_CODE (expr) == RETURN_EXPR)
15532 {
15533 tree op = TREE_OPERAND (expr, 0);
15534 if (!op || !TREE_SIDE_EFFECTS (op))
15535 return expr;
15536 op = TREE_OPERAND (op, 1);
15537 if (!TREE_SIDE_EFFECTS (op))
15538 return expr;
15539 }
15540
15541 return build1 (CLEANUP_POINT_EXPR, type, expr);
15542 }
15543
15544 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15545 of an indirection through OP0, or NULL_TREE if no simplification is
15546 possible. */
15547
15548 tree
15549 fold_indirect_ref_1 (tree type, tree op0)
15550 {
15551 tree sub = op0;
15552 tree subtype;
15553
15554 STRIP_NOPS (sub);
15555 subtype = TREE_TYPE (sub);
15556 if (!POINTER_TYPE_P (subtype))
15557 return NULL_TREE;
15558
15559 if (TREE_CODE (sub) == ADDR_EXPR)
15560 {
15561 tree op = TREE_OPERAND (sub, 0);
15562 tree optype = TREE_TYPE (op);
15563 /* *&CONST_DECL -> to the value of the const decl. */
15564 if (TREE_CODE (op) == CONST_DECL)
15565 return DECL_INITIAL (op);
15566 /* *&p => p; make sure to handle *&"str"[cst] here. */
15567 if (type == optype)
15568 {
15569 tree fop = fold_read_from_constant_string (op);
15570 if (fop)
15571 return fop;
15572 else
15573 return op;
15574 }
15575 /* *(foo *)&fooarray => fooarray[0] */
15576 else if (TREE_CODE (optype) == ARRAY_TYPE
15577 && type == TREE_TYPE (optype))
15578 {
15579 tree type_domain = TYPE_DOMAIN (optype);
15580 tree min_val = size_zero_node;
15581 if (type_domain && TYPE_MIN_VALUE (type_domain))
15582 min_val = TYPE_MIN_VALUE (type_domain);
15583 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
15584 }
15585 /* *(foo *)&complexfoo => __real__ complexfoo */
15586 else if (TREE_CODE (optype) == COMPLEX_TYPE
15587 && type == TREE_TYPE (optype))
15588 return fold_build1 (REALPART_EXPR, type, op);
15589 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15590 else if (TREE_CODE (optype) == VECTOR_TYPE
15591 && type == TREE_TYPE (optype))
15592 {
15593 tree part_width = TYPE_SIZE (type);
15594 tree index = bitsize_int (0);
15595 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
15596 }
15597 }
15598
15599 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15600 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15601 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15602 {
15603 tree op00 = TREE_OPERAND (sub, 0);
15604 tree op01 = TREE_OPERAND (sub, 1);
15605 tree op00type;
15606
15607 STRIP_NOPS (op00);
15608 op00type = TREE_TYPE (op00);
15609 if (TREE_CODE (op00) == ADDR_EXPR
15610 && TREE_CODE (TREE_TYPE (op00type)) == VECTOR_TYPE
15611 && type == TREE_TYPE (TREE_TYPE (op00type)))
15612 {
15613 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
15614 tree part_width = TYPE_SIZE (type);
15615 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
15616 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
15617 tree index = bitsize_int (indexi);
15618
15619 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (op00type)))
15620 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (op00, 0),
15621 part_width, index);
15622
15623 }
15624 }
15625
15626
15627 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15628 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15629 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15630 {
15631 tree op00 = TREE_OPERAND (sub, 0);
15632 tree op01 = TREE_OPERAND (sub, 1);
15633 tree op00type;
15634
15635 STRIP_NOPS (op00);
15636 op00type = TREE_TYPE (op00);
15637 if (TREE_CODE (op00) == ADDR_EXPR
15638 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
15639 && type == TREE_TYPE (TREE_TYPE (op00type)))
15640 {
15641 tree size = TYPE_SIZE_UNIT (type);
15642 if (tree_int_cst_equal (size, op01))
15643 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
15644 }
15645 }
15646
15647 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15648 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
15649 && type == TREE_TYPE (TREE_TYPE (subtype)))
15650 {
15651 tree type_domain;
15652 tree min_val = size_zero_node;
15653 sub = build_fold_indirect_ref (sub);
15654 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
15655 if (type_domain && TYPE_MIN_VALUE (type_domain))
15656 min_val = TYPE_MIN_VALUE (type_domain);
15657 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
15658 }
15659
15660 return NULL_TREE;
15661 }
15662
15663 /* Builds an expression for an indirection through T, simplifying some
15664 cases. */
15665
15666 tree
15667 build_fold_indirect_ref (tree t)
15668 {
15669 tree type = TREE_TYPE (TREE_TYPE (t));
15670 tree sub = fold_indirect_ref_1 (type, t);
15671
15672 if (sub)
15673 return sub;
15674 else
15675 return build1 (INDIRECT_REF, type, t);
15676 }
15677
15678 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15679
15680 tree
15681 fold_indirect_ref (tree t)
15682 {
15683 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
15684
15685 if (sub)
15686 return sub;
15687 else
15688 return t;
15689 }
15690
15691 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15692 whose result is ignored. The type of the returned tree need not be
15693 the same as the original expression. */
15694
15695 tree
15696 fold_ignored_result (tree t)
15697 {
15698 if (!TREE_SIDE_EFFECTS (t))
15699 return integer_zero_node;
15700
15701 for (;;)
15702 switch (TREE_CODE_CLASS (TREE_CODE (t)))
15703 {
15704 case tcc_unary:
15705 t = TREE_OPERAND (t, 0);
15706 break;
15707
15708 case tcc_binary:
15709 case tcc_comparison:
15710 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15711 t = TREE_OPERAND (t, 0);
15712 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
15713 t = TREE_OPERAND (t, 1);
15714 else
15715 return t;
15716 break;
15717
15718 case tcc_expression:
15719 switch (TREE_CODE (t))
15720 {
15721 case COMPOUND_EXPR:
15722 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15723 return t;
15724 t = TREE_OPERAND (t, 0);
15725 break;
15726
15727 case COND_EXPR:
15728 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
15729 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
15730 return t;
15731 t = TREE_OPERAND (t, 0);
15732 break;
15733
15734 default:
15735 return t;
15736 }
15737 break;
15738
15739 default:
15740 return t;
15741 }
15742 }
15743
15744 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
15745 This can only be applied to objects of a sizetype. */
15746
15747 tree
15748 round_up (tree value, int divisor)
15749 {
15750 tree div = NULL_TREE;
15751
15752 gcc_assert (divisor > 0);
15753 if (divisor == 1)
15754 return value;
15755
15756 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15757 have to do anything. Only do this when we are not given a const,
15758 because in that case, this check is more expensive than just
15759 doing it. */
15760 if (TREE_CODE (value) != INTEGER_CST)
15761 {
15762 div = build_int_cst (TREE_TYPE (value), divisor);
15763
15764 if (multiple_of_p (TREE_TYPE (value), value, div))
15765 return value;
15766 }
15767
15768 /* If divisor is a power of two, simplify this to bit manipulation. */
15769 if (divisor == (divisor & -divisor))
15770 {
15771 if (TREE_CODE (value) == INTEGER_CST)
15772 {
15773 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (value);
15774 unsigned HOST_WIDE_INT high;
15775 bool overflow_p;
15776
15777 if ((low & (divisor - 1)) == 0)
15778 return value;
15779
15780 overflow_p = TREE_OVERFLOW (value);
15781 high = TREE_INT_CST_HIGH (value);
15782 low &= ~(divisor - 1);
15783 low += divisor;
15784 if (low == 0)
15785 {
15786 high++;
15787 if (high == 0)
15788 overflow_p = true;
15789 }
15790
15791 return force_fit_type_double (TREE_TYPE (value), low, high,
15792 -1, overflow_p);
15793 }
15794 else
15795 {
15796 tree t;
15797
15798 t = build_int_cst (TREE_TYPE (value), divisor - 1);
15799 value = size_binop (PLUS_EXPR, value, t);
15800 t = build_int_cst (TREE_TYPE (value), -divisor);
15801 value = size_binop (BIT_AND_EXPR, value, t);
15802 }
15803 }
15804 else
15805 {
15806 if (!div)
15807 div = build_int_cst (TREE_TYPE (value), divisor);
15808 value = size_binop (CEIL_DIV_EXPR, value, div);
15809 value = size_binop (MULT_EXPR, value, div);
15810 }
15811
15812 return value;
15813 }
15814
15815 /* Likewise, but round down. */
15816
15817 tree
15818 round_down (tree value, int divisor)
15819 {
15820 tree div = NULL_TREE;
15821
15822 gcc_assert (divisor > 0);
15823 if (divisor == 1)
15824 return value;
15825
15826 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15827 have to do anything. Only do this when we are not given a const,
15828 because in that case, this check is more expensive than just
15829 doing it. */
15830 if (TREE_CODE (value) != INTEGER_CST)
15831 {
15832 div = build_int_cst (TREE_TYPE (value), divisor);
15833
15834 if (multiple_of_p (TREE_TYPE (value), value, div))
15835 return value;
15836 }
15837
15838 /* If divisor is a power of two, simplify this to bit manipulation. */
15839 if (divisor == (divisor & -divisor))
15840 {
15841 tree t;
15842
15843 t = build_int_cst (TREE_TYPE (value), -divisor);
15844 value = size_binop (BIT_AND_EXPR, value, t);
15845 }
15846 else
15847 {
15848 if (!div)
15849 div = build_int_cst (TREE_TYPE (value), divisor);
15850 value = size_binop (FLOOR_DIV_EXPR, value, div);
15851 value = size_binop (MULT_EXPR, value, div);
15852 }
15853
15854 return value;
15855 }
15856
15857 /* Returns the pointer to the base of the object addressed by EXP and
15858 extracts the information about the offset of the access, storing it
15859 to PBITPOS and POFFSET. */
15860
15861 static tree
15862 split_address_to_core_and_offset (tree exp,
15863 HOST_WIDE_INT *pbitpos, tree *poffset)
15864 {
15865 tree core;
15866 enum machine_mode mode;
15867 int unsignedp, volatilep;
15868 HOST_WIDE_INT bitsize;
15869
15870 if (TREE_CODE (exp) == ADDR_EXPR)
15871 {
15872 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
15873 poffset, &mode, &unsignedp, &volatilep,
15874 false);
15875 core = build_fold_addr_expr (core);
15876 }
15877 else
15878 {
15879 core = exp;
15880 *pbitpos = 0;
15881 *poffset = NULL_TREE;
15882 }
15883
15884 return core;
15885 }
15886
15887 /* Returns true if addresses of E1 and E2 differ by a constant, false
15888 otherwise. If they do, E1 - E2 is stored in *DIFF. */
15889
15890 bool
15891 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
15892 {
15893 tree core1, core2;
15894 HOST_WIDE_INT bitpos1, bitpos2;
15895 tree toffset1, toffset2, tdiff, type;
15896
15897 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
15898 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
15899
15900 if (bitpos1 % BITS_PER_UNIT != 0
15901 || bitpos2 % BITS_PER_UNIT != 0
15902 || !operand_equal_p (core1, core2, 0))
15903 return false;
15904
15905 if (toffset1 && toffset2)
15906 {
15907 type = TREE_TYPE (toffset1);
15908 if (type != TREE_TYPE (toffset2))
15909 toffset2 = fold_convert (type, toffset2);
15910
15911 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
15912 if (!cst_and_fits_in_hwi (tdiff))
15913 return false;
15914
15915 *diff = int_cst_value (tdiff);
15916 }
15917 else if (toffset1 || toffset2)
15918 {
15919 /* If only one of the offsets is non-constant, the difference cannot
15920 be a constant. */
15921 return false;
15922 }
15923 else
15924 *diff = 0;
15925
15926 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
15927 return true;
15928 }
15929
15930 /* Simplify the floating point expression EXP when the sign of the
15931 result is not significant. Return NULL_TREE if no simplification
15932 is possible. */
15933
15934 tree
15935 fold_strip_sign_ops (tree exp)
15936 {
15937 tree arg0, arg1;
15938
15939 switch (TREE_CODE (exp))
15940 {
15941 case ABS_EXPR:
15942 case NEGATE_EXPR:
15943 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15944 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
15945
15946 case MULT_EXPR:
15947 case RDIV_EXPR:
15948 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
15949 return NULL_TREE;
15950 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15951 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15952 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
15953 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
15954 arg0 ? arg0 : TREE_OPERAND (exp, 0),
15955 arg1 ? arg1 : TREE_OPERAND (exp, 1));
15956 break;
15957
15958 case COMPOUND_EXPR:
15959 arg0 = TREE_OPERAND (exp, 0);
15960 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15961 if (arg1)
15962 return fold_build2 (COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
15963 break;
15964
15965 case COND_EXPR:
15966 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15967 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
15968 if (arg0 || arg1)
15969 return fold_build3 (COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
15970 arg0 ? arg0 : TREE_OPERAND (exp, 1),
15971 arg1 ? arg1 : TREE_OPERAND (exp, 2));
15972 break;
15973
15974 case CALL_EXPR:
15975 {
15976 const enum built_in_function fcode = builtin_mathfn_code (exp);
15977 switch (fcode)
15978 {
15979 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15980 /* Strip copysign function call, return the 1st argument. */
15981 arg0 = CALL_EXPR_ARG (exp, 0);
15982 arg1 = CALL_EXPR_ARG (exp, 1);
15983 return omit_one_operand (TREE_TYPE (exp), arg0, arg1);
15984
15985 default:
15986 /* Strip sign ops from the argument of "odd" math functions. */
15987 if (negate_mathfn_p (fcode))
15988 {
15989 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
15990 if (arg0)
15991 return build_call_expr (get_callee_fndecl (exp), 1, arg0);
15992 }
15993 break;
15994 }
15995 }
15996 break;
15997
15998 default:
15999 break;
16000 }
16001 return NULL_TREE;
16002 }