re PR rtl-optimization/30807 (postreload bug (might be generic in trunk))
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
29
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type_double.
32
33 fold takes a tree as argument and returns a simplified tree.
34
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
38
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
41
42 force_fit_type_double takes a constant, an overflowable flag and a
43 prior overflow indicator. It forces the value to fit the type and
44 sets TREE_OVERFLOW.
45
46 Note: Since the folders get called on non-gimple code as well as
47 gimple code, we need to handle GIMPLE tuples as well as their
48 corresponding tree equivalents. */
49
50 #include "config.h"
51 #include "system.h"
52 #include "coretypes.h"
53 #include "tm.h"
54 #include "flags.h"
55 #include "tree.h"
56 #include "real.h"
57 #include "fixed-value.h"
58 #include "rtl.h"
59 #include "expr.h"
60 #include "tm_p.h"
61 #include "target.h"
62 #include "toplev.h"
63 #include "intl.h"
64 #include "ggc.h"
65 #include "hashtab.h"
66 #include "langhooks.h"
67 #include "md5.h"
68 #include "gimple.h"
69
70 /* Nonzero if we are folding constants inside an initializer; zero
71 otherwise. */
72 int folding_initializer = 0;
73
74 /* The following constants represent a bit based encoding of GCC's
75 comparison operators. This encoding simplifies transformations
76 on relational comparison operators, such as AND and OR. */
77 enum comparison_code {
78 COMPCODE_FALSE = 0,
79 COMPCODE_LT = 1,
80 COMPCODE_EQ = 2,
81 COMPCODE_LE = 3,
82 COMPCODE_GT = 4,
83 COMPCODE_LTGT = 5,
84 COMPCODE_GE = 6,
85 COMPCODE_ORD = 7,
86 COMPCODE_UNORD = 8,
87 COMPCODE_UNLT = 9,
88 COMPCODE_UNEQ = 10,
89 COMPCODE_UNLE = 11,
90 COMPCODE_UNGT = 12,
91 COMPCODE_NE = 13,
92 COMPCODE_UNGE = 14,
93 COMPCODE_TRUE = 15
94 };
95
96 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
97 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
98 static bool negate_mathfn_p (enum built_in_function);
99 static bool negate_expr_p (tree);
100 static tree negate_expr (tree);
101 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
102 static tree associate_trees (tree, tree, enum tree_code, tree);
103 static tree const_binop (enum tree_code, tree, tree, int);
104 static enum comparison_code comparison_to_compcode (enum tree_code);
105 static enum tree_code compcode_to_comparison (enum comparison_code);
106 static int operand_equal_for_comparison_p (tree, tree, tree);
107 static int twoval_comparison_p (tree, tree *, tree *, int *);
108 static tree eval_subst (tree, tree, tree, tree, tree);
109 static tree pedantic_omit_one_operand (tree, tree, tree);
110 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
111 static tree make_bit_field_ref (tree, tree, HOST_WIDE_INT, HOST_WIDE_INT, int);
112 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
113 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
114 enum machine_mode *, int *, int *,
115 tree *, tree *);
116 static int all_ones_mask_p (const_tree, int);
117 static tree sign_bit_p (tree, const_tree);
118 static int simple_operand_p (const_tree);
119 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
120 static tree range_predecessor (tree);
121 static tree range_successor (tree);
122 extern tree make_range (tree, int *, tree *, tree *, bool *);
123 extern tree build_range_check (tree, tree, int, tree, tree);
124 extern bool merge_ranges (int *, tree *, tree *, int, tree, tree, int,
125 tree, tree);
126 static tree fold_range_test (enum tree_code, tree, tree, tree);
127 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
128 static tree unextend (tree, int, int, tree);
129 static tree fold_truthop (enum tree_code, tree, tree, tree);
130 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
131 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
132 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
133 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
134 tree, tree,
135 tree, tree, int);
136 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
137 tree, tree, tree);
138 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
139 static tree fold_div_compare (enum tree_code, tree, tree, tree);
140 static bool reorder_operands_p (const_tree, const_tree);
141 static tree fold_negate_const (tree, tree);
142 static tree fold_not_const (tree, tree);
143 static tree fold_relational_const (enum tree_code, tree, tree, tree);
144 static tree fold_convert_const (enum tree_code, tree, tree);
145
146
147 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
148 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
149 and SUM1. Then this yields nonzero if overflow occurred during the
150 addition.
151
152 Overflow occurs if A and B have the same sign, but A and SUM differ in
153 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
154 sign. */
155 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
156 \f
157 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
158 We do that by representing the two-word integer in 4 words, with only
159 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
160 number. The value of the word is LOWPART + HIGHPART * BASE. */
161
162 #define LOWPART(x) \
163 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
164 #define HIGHPART(x) \
165 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
166 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
167
168 /* Unpack a two-word integer into 4 words.
169 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
170 WORDS points to the array of HOST_WIDE_INTs. */
171
172 static void
173 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
174 {
175 words[0] = LOWPART (low);
176 words[1] = HIGHPART (low);
177 words[2] = LOWPART (hi);
178 words[3] = HIGHPART (hi);
179 }
180
181 /* Pack an array of 4 words into a two-word integer.
182 WORDS points to the array of words.
183 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
184
185 static void
186 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
187 HOST_WIDE_INT *hi)
188 {
189 *low = words[0] + words[1] * BASE;
190 *hi = words[2] + words[3] * BASE;
191 }
192 \f
193 /* Force the double-word integer L1, H1 to be within the range of the
194 integer type TYPE. Stores the properly truncated and sign-extended
195 double-word integer in *LV, *HV. Returns true if the operation
196 overflows, that is, argument and result are different. */
197
198 int
199 fit_double_type (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
200 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, const_tree type)
201 {
202 unsigned HOST_WIDE_INT low0 = l1;
203 HOST_WIDE_INT high0 = h1;
204 unsigned int prec;
205 int sign_extended_type;
206
207 if (POINTER_TYPE_P (type)
208 || TREE_CODE (type) == OFFSET_TYPE)
209 prec = POINTER_SIZE;
210 else
211 prec = TYPE_PRECISION (type);
212
213 /* Size types *are* sign extended. */
214 sign_extended_type = (!TYPE_UNSIGNED (type)
215 || (TREE_CODE (type) == INTEGER_TYPE
216 && TYPE_IS_SIZETYPE (type)));
217
218 /* First clear all bits that are beyond the type's precision. */
219 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
220 ;
221 else if (prec > HOST_BITS_PER_WIDE_INT)
222 h1 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
223 else
224 {
225 h1 = 0;
226 if (prec < HOST_BITS_PER_WIDE_INT)
227 l1 &= ~((HOST_WIDE_INT) (-1) << prec);
228 }
229
230 /* Then do sign extension if necessary. */
231 if (!sign_extended_type)
232 /* No sign extension */;
233 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
234 /* Correct width already. */;
235 else if (prec > HOST_BITS_PER_WIDE_INT)
236 {
237 /* Sign extend top half? */
238 if (h1 & ((unsigned HOST_WIDE_INT)1
239 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
240 h1 |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
241 }
242 else if (prec == HOST_BITS_PER_WIDE_INT)
243 {
244 if ((HOST_WIDE_INT)l1 < 0)
245 h1 = -1;
246 }
247 else
248 {
249 /* Sign extend bottom half? */
250 if (l1 & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
251 {
252 h1 = -1;
253 l1 |= (HOST_WIDE_INT)(-1) << prec;
254 }
255 }
256
257 *lv = l1;
258 *hv = h1;
259
260 /* If the value didn't fit, signal overflow. */
261 return l1 != low0 || h1 != high0;
262 }
263
264 /* We force the double-int HIGH:LOW to the range of the type TYPE by
265 sign or zero extending it.
266 OVERFLOWABLE indicates if we are interested
267 in overflow of the value, when >0 we are only interested in signed
268 overflow, for <0 we are interested in any overflow. OVERFLOWED
269 indicates whether overflow has already occurred. CONST_OVERFLOWED
270 indicates whether constant overflow has already occurred. We force
271 T's value to be within range of T's type (by setting to 0 or 1 all
272 the bits outside the type's range). We set TREE_OVERFLOWED if,
273 OVERFLOWED is nonzero,
274 or OVERFLOWABLE is >0 and signed overflow occurs
275 or OVERFLOWABLE is <0 and any overflow occurs
276 We return a new tree node for the extended double-int. The node
277 is shared if no overflow flags are set. */
278
279 tree
280 force_fit_type_double (tree type, unsigned HOST_WIDE_INT low,
281 HOST_WIDE_INT high, int overflowable,
282 bool overflowed)
283 {
284 int sign_extended_type;
285 bool overflow;
286
287 /* Size types *are* sign extended. */
288 sign_extended_type = (!TYPE_UNSIGNED (type)
289 || (TREE_CODE (type) == INTEGER_TYPE
290 && TYPE_IS_SIZETYPE (type)));
291
292 overflow = fit_double_type (low, high, &low, &high, type);
293
294 /* If we need to set overflow flags, return a new unshared node. */
295 if (overflowed || overflow)
296 {
297 if (overflowed
298 || overflowable < 0
299 || (overflowable > 0 && sign_extended_type))
300 {
301 tree t = make_node (INTEGER_CST);
302 TREE_INT_CST_LOW (t) = low;
303 TREE_INT_CST_HIGH (t) = high;
304 TREE_TYPE (t) = type;
305 TREE_OVERFLOW (t) = 1;
306 return t;
307 }
308 }
309
310 /* Else build a shared node. */
311 return build_int_cst_wide (type, low, high);
312 }
313 \f
314 /* Add two doubleword integers with doubleword result.
315 Return nonzero if the operation overflows according to UNSIGNED_P.
316 Each argument is given as two `HOST_WIDE_INT' pieces.
317 One argument is L1 and H1; the other, L2 and H2.
318 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
319
320 int
321 add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
322 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
323 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
324 bool unsigned_p)
325 {
326 unsigned HOST_WIDE_INT l;
327 HOST_WIDE_INT h;
328
329 l = l1 + l2;
330 h = h1 + h2 + (l < l1);
331
332 *lv = l;
333 *hv = h;
334
335 if (unsigned_p)
336 return (unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1;
337 else
338 return OVERFLOW_SUM_SIGN (h1, h2, h);
339 }
340
341 /* Negate a doubleword integer with doubleword result.
342 Return nonzero if the operation overflows, assuming it's signed.
343 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
344 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
345
346 int
347 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
348 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
349 {
350 if (l1 == 0)
351 {
352 *lv = 0;
353 *hv = - h1;
354 return (*hv & h1) < 0;
355 }
356 else
357 {
358 *lv = -l1;
359 *hv = ~h1;
360 return 0;
361 }
362 }
363 \f
364 /* Multiply two doubleword integers with doubleword result.
365 Return nonzero if the operation overflows according to UNSIGNED_P.
366 Each argument is given as two `HOST_WIDE_INT' pieces.
367 One argument is L1 and H1; the other, L2 and H2.
368 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
369
370 int
371 mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
372 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
373 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
374 bool unsigned_p)
375 {
376 HOST_WIDE_INT arg1[4];
377 HOST_WIDE_INT arg2[4];
378 HOST_WIDE_INT prod[4 * 2];
379 unsigned HOST_WIDE_INT carry;
380 int i, j, k;
381 unsigned HOST_WIDE_INT toplow, neglow;
382 HOST_WIDE_INT tophigh, neghigh;
383
384 encode (arg1, l1, h1);
385 encode (arg2, l2, h2);
386
387 memset (prod, 0, sizeof prod);
388
389 for (i = 0; i < 4; i++)
390 {
391 carry = 0;
392 for (j = 0; j < 4; j++)
393 {
394 k = i + j;
395 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
396 carry += arg1[i] * arg2[j];
397 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
398 carry += prod[k];
399 prod[k] = LOWPART (carry);
400 carry = HIGHPART (carry);
401 }
402 prod[i + 4] = carry;
403 }
404
405 decode (prod, lv, hv);
406 decode (prod + 4, &toplow, &tophigh);
407
408 /* Unsigned overflow is immediate. */
409 if (unsigned_p)
410 return (toplow | tophigh) != 0;
411
412 /* Check for signed overflow by calculating the signed representation of the
413 top half of the result; it should agree with the low half's sign bit. */
414 if (h1 < 0)
415 {
416 neg_double (l2, h2, &neglow, &neghigh);
417 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
418 }
419 if (h2 < 0)
420 {
421 neg_double (l1, h1, &neglow, &neghigh);
422 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
423 }
424 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
425 }
426 \f
427 /* Shift the doubleword integer in L1, H1 left by COUNT places
428 keeping only PREC bits of result.
429 Shift right if COUNT is negative.
430 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
431 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
432
433 void
434 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
435 HOST_WIDE_INT count, unsigned int prec,
436 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
437 {
438 unsigned HOST_WIDE_INT signmask;
439
440 if (count < 0)
441 {
442 rshift_double (l1, h1, -count, prec, lv, hv, arith);
443 return;
444 }
445
446 if (SHIFT_COUNT_TRUNCATED)
447 count %= prec;
448
449 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
450 {
451 /* Shifting by the host word size is undefined according to the
452 ANSI standard, so we must handle this as a special case. */
453 *hv = 0;
454 *lv = 0;
455 }
456 else if (count >= HOST_BITS_PER_WIDE_INT)
457 {
458 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
459 *lv = 0;
460 }
461 else
462 {
463 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
464 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
465 *lv = l1 << count;
466 }
467
468 /* Sign extend all bits that are beyond the precision. */
469
470 signmask = -((prec > HOST_BITS_PER_WIDE_INT
471 ? ((unsigned HOST_WIDE_INT) *hv
472 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
473 : (*lv >> (prec - 1))) & 1);
474
475 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
476 ;
477 else if (prec >= HOST_BITS_PER_WIDE_INT)
478 {
479 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
480 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
481 }
482 else
483 {
484 *hv = signmask;
485 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
486 *lv |= signmask << prec;
487 }
488 }
489
490 /* Shift the doubleword integer in L1, H1 right by COUNT places
491 keeping only PREC bits of result. COUNT must be positive.
492 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
493 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
494
495 void
496 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
497 HOST_WIDE_INT count, unsigned int prec,
498 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
499 int arith)
500 {
501 unsigned HOST_WIDE_INT signmask;
502
503 signmask = (arith
504 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
505 : 0);
506
507 if (SHIFT_COUNT_TRUNCATED)
508 count %= prec;
509
510 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
511 {
512 /* Shifting by the host word size is undefined according to the
513 ANSI standard, so we must handle this as a special case. */
514 *hv = 0;
515 *lv = 0;
516 }
517 else if (count >= HOST_BITS_PER_WIDE_INT)
518 {
519 *hv = 0;
520 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
521 }
522 else
523 {
524 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
525 *lv = ((l1 >> count)
526 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
527 }
528
529 /* Zero / sign extend all bits that are beyond the precision. */
530
531 if (count >= (HOST_WIDE_INT)prec)
532 {
533 *hv = signmask;
534 *lv = signmask;
535 }
536 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
537 ;
538 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
539 {
540 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
541 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
542 }
543 else
544 {
545 *hv = signmask;
546 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
547 *lv |= signmask << (prec - count);
548 }
549 }
550 \f
551 /* Rotate the doubleword integer in L1, H1 left by COUNT places
552 keeping only PREC bits of result.
553 Rotate right if COUNT is negative.
554 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
555
556 void
557 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
558 HOST_WIDE_INT count, unsigned int prec,
559 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
560 {
561 unsigned HOST_WIDE_INT s1l, s2l;
562 HOST_WIDE_INT s1h, s2h;
563
564 count %= prec;
565 if (count < 0)
566 count += prec;
567
568 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
569 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
570 *lv = s1l | s2l;
571 *hv = s1h | s2h;
572 }
573
574 /* Rotate the doubleword integer in L1, H1 left by COUNT places
575 keeping only PREC bits of result. COUNT must be positive.
576 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
577
578 void
579 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
580 HOST_WIDE_INT count, unsigned int prec,
581 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
582 {
583 unsigned HOST_WIDE_INT s1l, s2l;
584 HOST_WIDE_INT s1h, s2h;
585
586 count %= prec;
587 if (count < 0)
588 count += prec;
589
590 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
591 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
592 *lv = s1l | s2l;
593 *hv = s1h | s2h;
594 }
595 \f
596 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
597 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
598 CODE is a tree code for a kind of division, one of
599 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
600 or EXACT_DIV_EXPR
601 It controls how the quotient is rounded to an integer.
602 Return nonzero if the operation overflows.
603 UNS nonzero says do unsigned division. */
604
605 int
606 div_and_round_double (enum tree_code code, int uns,
607 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
608 HOST_WIDE_INT hnum_orig,
609 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
610 HOST_WIDE_INT hden_orig,
611 unsigned HOST_WIDE_INT *lquo,
612 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
613 HOST_WIDE_INT *hrem)
614 {
615 int quo_neg = 0;
616 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
617 HOST_WIDE_INT den[4], quo[4];
618 int i, j;
619 unsigned HOST_WIDE_INT work;
620 unsigned HOST_WIDE_INT carry = 0;
621 unsigned HOST_WIDE_INT lnum = lnum_orig;
622 HOST_WIDE_INT hnum = hnum_orig;
623 unsigned HOST_WIDE_INT lden = lden_orig;
624 HOST_WIDE_INT hden = hden_orig;
625 int overflow = 0;
626
627 if (hden == 0 && lden == 0)
628 overflow = 1, lden = 1;
629
630 /* Calculate quotient sign and convert operands to unsigned. */
631 if (!uns)
632 {
633 if (hnum < 0)
634 {
635 quo_neg = ~ quo_neg;
636 /* (minimum integer) / (-1) is the only overflow case. */
637 if (neg_double (lnum, hnum, &lnum, &hnum)
638 && ((HOST_WIDE_INT) lden & hden) == -1)
639 overflow = 1;
640 }
641 if (hden < 0)
642 {
643 quo_neg = ~ quo_neg;
644 neg_double (lden, hden, &lden, &hden);
645 }
646 }
647
648 if (hnum == 0 && hden == 0)
649 { /* single precision */
650 *hquo = *hrem = 0;
651 /* This unsigned division rounds toward zero. */
652 *lquo = lnum / lden;
653 goto finish_up;
654 }
655
656 if (hnum == 0)
657 { /* trivial case: dividend < divisor */
658 /* hden != 0 already checked. */
659 *hquo = *lquo = 0;
660 *hrem = hnum;
661 *lrem = lnum;
662 goto finish_up;
663 }
664
665 memset (quo, 0, sizeof quo);
666
667 memset (num, 0, sizeof num); /* to zero 9th element */
668 memset (den, 0, sizeof den);
669
670 encode (num, lnum, hnum);
671 encode (den, lden, hden);
672
673 /* Special code for when the divisor < BASE. */
674 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
675 {
676 /* hnum != 0 already checked. */
677 for (i = 4 - 1; i >= 0; i--)
678 {
679 work = num[i] + carry * BASE;
680 quo[i] = work / lden;
681 carry = work % lden;
682 }
683 }
684 else
685 {
686 /* Full double precision division,
687 with thanks to Don Knuth's "Seminumerical Algorithms". */
688 int num_hi_sig, den_hi_sig;
689 unsigned HOST_WIDE_INT quo_est, scale;
690
691 /* Find the highest nonzero divisor digit. */
692 for (i = 4 - 1;; i--)
693 if (den[i] != 0)
694 {
695 den_hi_sig = i;
696 break;
697 }
698
699 /* Insure that the first digit of the divisor is at least BASE/2.
700 This is required by the quotient digit estimation algorithm. */
701
702 scale = BASE / (den[den_hi_sig] + 1);
703 if (scale > 1)
704 { /* scale divisor and dividend */
705 carry = 0;
706 for (i = 0; i <= 4 - 1; i++)
707 {
708 work = (num[i] * scale) + carry;
709 num[i] = LOWPART (work);
710 carry = HIGHPART (work);
711 }
712
713 num[4] = carry;
714 carry = 0;
715 for (i = 0; i <= 4 - 1; i++)
716 {
717 work = (den[i] * scale) + carry;
718 den[i] = LOWPART (work);
719 carry = HIGHPART (work);
720 if (den[i] != 0) den_hi_sig = i;
721 }
722 }
723
724 num_hi_sig = 4;
725
726 /* Main loop */
727 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
728 {
729 /* Guess the next quotient digit, quo_est, by dividing the first
730 two remaining dividend digits by the high order quotient digit.
731 quo_est is never low and is at most 2 high. */
732 unsigned HOST_WIDE_INT tmp;
733
734 num_hi_sig = i + den_hi_sig + 1;
735 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
736 if (num[num_hi_sig] != den[den_hi_sig])
737 quo_est = work / den[den_hi_sig];
738 else
739 quo_est = BASE - 1;
740
741 /* Refine quo_est so it's usually correct, and at most one high. */
742 tmp = work - quo_est * den[den_hi_sig];
743 if (tmp < BASE
744 && (den[den_hi_sig - 1] * quo_est
745 > (tmp * BASE + num[num_hi_sig - 2])))
746 quo_est--;
747
748 /* Try QUO_EST as the quotient digit, by multiplying the
749 divisor by QUO_EST and subtracting from the remaining dividend.
750 Keep in mind that QUO_EST is the I - 1st digit. */
751
752 carry = 0;
753 for (j = 0; j <= den_hi_sig; j++)
754 {
755 work = quo_est * den[j] + carry;
756 carry = HIGHPART (work);
757 work = num[i + j] - LOWPART (work);
758 num[i + j] = LOWPART (work);
759 carry += HIGHPART (work) != 0;
760 }
761
762 /* If quo_est was high by one, then num[i] went negative and
763 we need to correct things. */
764 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
765 {
766 quo_est--;
767 carry = 0; /* add divisor back in */
768 for (j = 0; j <= den_hi_sig; j++)
769 {
770 work = num[i + j] + den[j] + carry;
771 carry = HIGHPART (work);
772 num[i + j] = LOWPART (work);
773 }
774
775 num [num_hi_sig] += carry;
776 }
777
778 /* Store the quotient digit. */
779 quo[i] = quo_est;
780 }
781 }
782
783 decode (quo, lquo, hquo);
784
785 finish_up:
786 /* If result is negative, make it so. */
787 if (quo_neg)
788 neg_double (*lquo, *hquo, lquo, hquo);
789
790 /* Compute trial remainder: rem = num - (quo * den) */
791 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
792 neg_double (*lrem, *hrem, lrem, hrem);
793 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
794
795 switch (code)
796 {
797 case TRUNC_DIV_EXPR:
798 case TRUNC_MOD_EXPR: /* round toward zero */
799 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
800 return overflow;
801
802 case FLOOR_DIV_EXPR:
803 case FLOOR_MOD_EXPR: /* round toward negative infinity */
804 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
805 {
806 /* quo = quo - 1; */
807 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
808 lquo, hquo);
809 }
810 else
811 return overflow;
812 break;
813
814 case CEIL_DIV_EXPR:
815 case CEIL_MOD_EXPR: /* round toward positive infinity */
816 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
817 {
818 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
819 lquo, hquo);
820 }
821 else
822 return overflow;
823 break;
824
825 case ROUND_DIV_EXPR:
826 case ROUND_MOD_EXPR: /* round to closest integer */
827 {
828 unsigned HOST_WIDE_INT labs_rem = *lrem;
829 HOST_WIDE_INT habs_rem = *hrem;
830 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
831 HOST_WIDE_INT habs_den = hden, htwice;
832
833 /* Get absolute values. */
834 if (*hrem < 0)
835 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
836 if (hden < 0)
837 neg_double (lden, hden, &labs_den, &habs_den);
838
839 /* If (2 * abs (lrem) >= abs (lden)), adjust the quotient. */
840 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
841 labs_rem, habs_rem, &ltwice, &htwice);
842
843 if (((unsigned HOST_WIDE_INT) habs_den
844 < (unsigned HOST_WIDE_INT) htwice)
845 || (((unsigned HOST_WIDE_INT) habs_den
846 == (unsigned HOST_WIDE_INT) htwice)
847 && (labs_den <= ltwice)))
848 {
849 if (*hquo < 0)
850 /* quo = quo - 1; */
851 add_double (*lquo, *hquo,
852 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
853 else
854 /* quo = quo + 1; */
855 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
856 lquo, hquo);
857 }
858 else
859 return overflow;
860 }
861 break;
862
863 default:
864 gcc_unreachable ();
865 }
866
867 /* Compute true remainder: rem = num - (quo * den) */
868 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
869 neg_double (*lrem, *hrem, lrem, hrem);
870 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
871 return overflow;
872 }
873
874 /* If ARG2 divides ARG1 with zero remainder, carries out the division
875 of type CODE and returns the quotient.
876 Otherwise returns NULL_TREE. */
877
878 tree
879 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
880 {
881 unsigned HOST_WIDE_INT int1l, int2l;
882 HOST_WIDE_INT int1h, int2h;
883 unsigned HOST_WIDE_INT quol, reml;
884 HOST_WIDE_INT quoh, remh;
885 tree type = TREE_TYPE (arg1);
886 int uns = TYPE_UNSIGNED (type);
887
888 int1l = TREE_INT_CST_LOW (arg1);
889 int1h = TREE_INT_CST_HIGH (arg1);
890 /* &obj[0] + -128 really should be compiled as &obj[-8] rather than
891 &obj[some_exotic_number]. */
892 if (POINTER_TYPE_P (type))
893 {
894 uns = false;
895 type = signed_type_for (type);
896 fit_double_type (int1l, int1h, &int1l, &int1h,
897 type);
898 }
899 else
900 fit_double_type (int1l, int1h, &int1l, &int1h, type);
901 int2l = TREE_INT_CST_LOW (arg2);
902 int2h = TREE_INT_CST_HIGH (arg2);
903
904 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
905 &quol, &quoh, &reml, &remh);
906 if (remh != 0 || reml != 0)
907 return NULL_TREE;
908
909 return build_int_cst_wide (type, quol, quoh);
910 }
911 \f
912 /* This is nonzero if we should defer warnings about undefined
913 overflow. This facility exists because these warnings are a
914 special case. The code to estimate loop iterations does not want
915 to issue any warnings, since it works with expressions which do not
916 occur in user code. Various bits of cleanup code call fold(), but
917 only use the result if it has certain characteristics (e.g., is a
918 constant); that code only wants to issue a warning if the result is
919 used. */
920
921 static int fold_deferring_overflow_warnings;
922
923 /* If a warning about undefined overflow is deferred, this is the
924 warning. Note that this may cause us to turn two warnings into
925 one, but that is fine since it is sufficient to only give one
926 warning per expression. */
927
928 static const char* fold_deferred_overflow_warning;
929
930 /* If a warning about undefined overflow is deferred, this is the
931 level at which the warning should be emitted. */
932
933 static enum warn_strict_overflow_code fold_deferred_overflow_code;
934
935 /* Start deferring overflow warnings. We could use a stack here to
936 permit nested calls, but at present it is not necessary. */
937
938 void
939 fold_defer_overflow_warnings (void)
940 {
941 ++fold_deferring_overflow_warnings;
942 }
943
944 /* Stop deferring overflow warnings. If there is a pending warning,
945 and ISSUE is true, then issue the warning if appropriate. STMT is
946 the statement with which the warning should be associated (used for
947 location information); STMT may be NULL. CODE is the level of the
948 warning--a warn_strict_overflow_code value. This function will use
949 the smaller of CODE and the deferred code when deciding whether to
950 issue the warning. CODE may be zero to mean to always use the
951 deferred code. */
952
953 void
954 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
955 {
956 const char *warnmsg;
957 location_t locus;
958
959 gcc_assert (fold_deferring_overflow_warnings > 0);
960 --fold_deferring_overflow_warnings;
961 if (fold_deferring_overflow_warnings > 0)
962 {
963 if (fold_deferred_overflow_warning != NULL
964 && code != 0
965 && code < (int) fold_deferred_overflow_code)
966 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
967 return;
968 }
969
970 warnmsg = fold_deferred_overflow_warning;
971 fold_deferred_overflow_warning = NULL;
972
973 if (!issue || warnmsg == NULL)
974 return;
975
976 if (gimple_no_warning_p (stmt))
977 return;
978
979 /* Use the smallest code level when deciding to issue the
980 warning. */
981 if (code == 0 || code > (int) fold_deferred_overflow_code)
982 code = fold_deferred_overflow_code;
983
984 if (!issue_strict_overflow_warning (code))
985 return;
986
987 if (stmt == NULL)
988 locus = input_location;
989 else
990 locus = gimple_location (stmt);
991 warning (OPT_Wstrict_overflow, "%H%s", &locus, warnmsg);
992 }
993
994 /* Stop deferring overflow warnings, ignoring any deferred
995 warnings. */
996
997 void
998 fold_undefer_and_ignore_overflow_warnings (void)
999 {
1000 fold_undefer_overflow_warnings (false, NULL, 0);
1001 }
1002
1003 /* Whether we are deferring overflow warnings. */
1004
1005 bool
1006 fold_deferring_overflow_warnings_p (void)
1007 {
1008 return fold_deferring_overflow_warnings > 0;
1009 }
1010
1011 /* This is called when we fold something based on the fact that signed
1012 overflow is undefined. */
1013
1014 static void
1015 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
1016 {
1017 if (fold_deferring_overflow_warnings > 0)
1018 {
1019 if (fold_deferred_overflow_warning == NULL
1020 || wc < fold_deferred_overflow_code)
1021 {
1022 fold_deferred_overflow_warning = gmsgid;
1023 fold_deferred_overflow_code = wc;
1024 }
1025 }
1026 else if (issue_strict_overflow_warning (wc))
1027 warning (OPT_Wstrict_overflow, gmsgid);
1028 }
1029 \f
1030 /* Return true if the built-in mathematical function specified by CODE
1031 is odd, i.e. -f(x) == f(-x). */
1032
1033 static bool
1034 negate_mathfn_p (enum built_in_function code)
1035 {
1036 switch (code)
1037 {
1038 CASE_FLT_FN (BUILT_IN_ASIN):
1039 CASE_FLT_FN (BUILT_IN_ASINH):
1040 CASE_FLT_FN (BUILT_IN_ATAN):
1041 CASE_FLT_FN (BUILT_IN_ATANH):
1042 CASE_FLT_FN (BUILT_IN_CASIN):
1043 CASE_FLT_FN (BUILT_IN_CASINH):
1044 CASE_FLT_FN (BUILT_IN_CATAN):
1045 CASE_FLT_FN (BUILT_IN_CATANH):
1046 CASE_FLT_FN (BUILT_IN_CBRT):
1047 CASE_FLT_FN (BUILT_IN_CPROJ):
1048 CASE_FLT_FN (BUILT_IN_CSIN):
1049 CASE_FLT_FN (BUILT_IN_CSINH):
1050 CASE_FLT_FN (BUILT_IN_CTAN):
1051 CASE_FLT_FN (BUILT_IN_CTANH):
1052 CASE_FLT_FN (BUILT_IN_ERF):
1053 CASE_FLT_FN (BUILT_IN_LLROUND):
1054 CASE_FLT_FN (BUILT_IN_LROUND):
1055 CASE_FLT_FN (BUILT_IN_ROUND):
1056 CASE_FLT_FN (BUILT_IN_SIN):
1057 CASE_FLT_FN (BUILT_IN_SINH):
1058 CASE_FLT_FN (BUILT_IN_TAN):
1059 CASE_FLT_FN (BUILT_IN_TANH):
1060 CASE_FLT_FN (BUILT_IN_TRUNC):
1061 return true;
1062
1063 CASE_FLT_FN (BUILT_IN_LLRINT):
1064 CASE_FLT_FN (BUILT_IN_LRINT):
1065 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1066 CASE_FLT_FN (BUILT_IN_RINT):
1067 return !flag_rounding_math;
1068
1069 default:
1070 break;
1071 }
1072 return false;
1073 }
1074
1075 /* Check whether we may negate an integer constant T without causing
1076 overflow. */
1077
1078 bool
1079 may_negate_without_overflow_p (const_tree t)
1080 {
1081 unsigned HOST_WIDE_INT val;
1082 unsigned int prec;
1083 tree type;
1084
1085 gcc_assert (TREE_CODE (t) == INTEGER_CST);
1086
1087 type = TREE_TYPE (t);
1088 if (TYPE_UNSIGNED (type))
1089 return false;
1090
1091 prec = TYPE_PRECISION (type);
1092 if (prec > HOST_BITS_PER_WIDE_INT)
1093 {
1094 if (TREE_INT_CST_LOW (t) != 0)
1095 return true;
1096 prec -= HOST_BITS_PER_WIDE_INT;
1097 val = TREE_INT_CST_HIGH (t);
1098 }
1099 else
1100 val = TREE_INT_CST_LOW (t);
1101 if (prec < HOST_BITS_PER_WIDE_INT)
1102 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
1103 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
1104 }
1105
1106 /* Determine whether an expression T can be cheaply negated using
1107 the function negate_expr without introducing undefined overflow. */
1108
1109 static bool
1110 negate_expr_p (tree t)
1111 {
1112 tree type;
1113
1114 if (t == 0)
1115 return false;
1116
1117 type = TREE_TYPE (t);
1118
1119 STRIP_SIGN_NOPS (t);
1120 switch (TREE_CODE (t))
1121 {
1122 case INTEGER_CST:
1123 if (TYPE_OVERFLOW_WRAPS (type))
1124 return true;
1125
1126 /* Check that -CST will not overflow type. */
1127 return may_negate_without_overflow_p (t);
1128 case BIT_NOT_EXPR:
1129 return (INTEGRAL_TYPE_P (type)
1130 && TYPE_OVERFLOW_WRAPS (type));
1131
1132 case FIXED_CST:
1133 case REAL_CST:
1134 case NEGATE_EXPR:
1135 return true;
1136
1137 case COMPLEX_CST:
1138 return negate_expr_p (TREE_REALPART (t))
1139 && negate_expr_p (TREE_IMAGPART (t));
1140
1141 case COMPLEX_EXPR:
1142 return negate_expr_p (TREE_OPERAND (t, 0))
1143 && negate_expr_p (TREE_OPERAND (t, 1));
1144
1145 case CONJ_EXPR:
1146 return negate_expr_p (TREE_OPERAND (t, 0));
1147
1148 case PLUS_EXPR:
1149 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1150 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1151 return false;
1152 /* -(A + B) -> (-B) - A. */
1153 if (negate_expr_p (TREE_OPERAND (t, 1))
1154 && reorder_operands_p (TREE_OPERAND (t, 0),
1155 TREE_OPERAND (t, 1)))
1156 return true;
1157 /* -(A + B) -> (-A) - B. */
1158 return negate_expr_p (TREE_OPERAND (t, 0));
1159
1160 case MINUS_EXPR:
1161 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
1162 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1163 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1164 && reorder_operands_p (TREE_OPERAND (t, 0),
1165 TREE_OPERAND (t, 1));
1166
1167 case MULT_EXPR:
1168 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1169 break;
1170
1171 /* Fall through. */
1172
1173 case RDIV_EXPR:
1174 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1175 return negate_expr_p (TREE_OPERAND (t, 1))
1176 || negate_expr_p (TREE_OPERAND (t, 0));
1177 break;
1178
1179 case TRUNC_DIV_EXPR:
1180 case ROUND_DIV_EXPR:
1181 case FLOOR_DIV_EXPR:
1182 case CEIL_DIV_EXPR:
1183 case EXACT_DIV_EXPR:
1184 /* In general we can't negate A / B, because if A is INT_MIN and
1185 B is 1, we may turn this into INT_MIN / -1 which is undefined
1186 and actually traps on some architectures. But if overflow is
1187 undefined, we can negate, because - (INT_MIN / 1) is an
1188 overflow. */
1189 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
1190 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
1191 break;
1192 return negate_expr_p (TREE_OPERAND (t, 1))
1193 || negate_expr_p (TREE_OPERAND (t, 0));
1194
1195 case NOP_EXPR:
1196 /* Negate -((double)float) as (double)(-float). */
1197 if (TREE_CODE (type) == REAL_TYPE)
1198 {
1199 tree tem = strip_float_extensions (t);
1200 if (tem != t)
1201 return negate_expr_p (tem);
1202 }
1203 break;
1204
1205 case CALL_EXPR:
1206 /* Negate -f(x) as f(-x). */
1207 if (negate_mathfn_p (builtin_mathfn_code (t)))
1208 return negate_expr_p (CALL_EXPR_ARG (t, 0));
1209 break;
1210
1211 case RSHIFT_EXPR:
1212 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1213 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1214 {
1215 tree op1 = TREE_OPERAND (t, 1);
1216 if (TREE_INT_CST_HIGH (op1) == 0
1217 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1218 == TREE_INT_CST_LOW (op1))
1219 return true;
1220 }
1221 break;
1222
1223 default:
1224 break;
1225 }
1226 return false;
1227 }
1228
1229 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1230 simplification is possible.
1231 If negate_expr_p would return true for T, NULL_TREE will never be
1232 returned. */
1233
1234 static tree
1235 fold_negate_expr (tree t)
1236 {
1237 tree type = TREE_TYPE (t);
1238 tree tem;
1239
1240 switch (TREE_CODE (t))
1241 {
1242 /* Convert - (~A) to A + 1. */
1243 case BIT_NOT_EXPR:
1244 if (INTEGRAL_TYPE_P (type))
1245 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0),
1246 build_int_cst (type, 1));
1247 break;
1248
1249 case INTEGER_CST:
1250 tem = fold_negate_const (t, type);
1251 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
1252 || !TYPE_OVERFLOW_TRAPS (type))
1253 return tem;
1254 break;
1255
1256 case REAL_CST:
1257 tem = fold_negate_const (t, type);
1258 /* Two's complement FP formats, such as c4x, may overflow. */
1259 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
1260 return tem;
1261 break;
1262
1263 case FIXED_CST:
1264 tem = fold_negate_const (t, type);
1265 return tem;
1266
1267 case COMPLEX_CST:
1268 {
1269 tree rpart = negate_expr (TREE_REALPART (t));
1270 tree ipart = negate_expr (TREE_IMAGPART (t));
1271
1272 if ((TREE_CODE (rpart) == REAL_CST
1273 && TREE_CODE (ipart) == REAL_CST)
1274 || (TREE_CODE (rpart) == INTEGER_CST
1275 && TREE_CODE (ipart) == INTEGER_CST))
1276 return build_complex (type, rpart, ipart);
1277 }
1278 break;
1279
1280 case COMPLEX_EXPR:
1281 if (negate_expr_p (t))
1282 return fold_build2 (COMPLEX_EXPR, type,
1283 fold_negate_expr (TREE_OPERAND (t, 0)),
1284 fold_negate_expr (TREE_OPERAND (t, 1)));
1285 break;
1286
1287 case CONJ_EXPR:
1288 if (negate_expr_p (t))
1289 return fold_build1 (CONJ_EXPR, type,
1290 fold_negate_expr (TREE_OPERAND (t, 0)));
1291 break;
1292
1293 case NEGATE_EXPR:
1294 return TREE_OPERAND (t, 0);
1295
1296 case PLUS_EXPR:
1297 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1298 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1299 {
1300 /* -(A + B) -> (-B) - A. */
1301 if (negate_expr_p (TREE_OPERAND (t, 1))
1302 && reorder_operands_p (TREE_OPERAND (t, 0),
1303 TREE_OPERAND (t, 1)))
1304 {
1305 tem = negate_expr (TREE_OPERAND (t, 1));
1306 return fold_build2 (MINUS_EXPR, type,
1307 tem, TREE_OPERAND (t, 0));
1308 }
1309
1310 /* -(A + B) -> (-A) - B. */
1311 if (negate_expr_p (TREE_OPERAND (t, 0)))
1312 {
1313 tem = negate_expr (TREE_OPERAND (t, 0));
1314 return fold_build2 (MINUS_EXPR, type,
1315 tem, TREE_OPERAND (t, 1));
1316 }
1317 }
1318 break;
1319
1320 case MINUS_EXPR:
1321 /* - (A - B) -> B - A */
1322 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1323 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1324 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1325 return fold_build2 (MINUS_EXPR, type,
1326 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
1327 break;
1328
1329 case MULT_EXPR:
1330 if (TYPE_UNSIGNED (type))
1331 break;
1332
1333 /* Fall through. */
1334
1335 case RDIV_EXPR:
1336 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
1337 {
1338 tem = TREE_OPERAND (t, 1);
1339 if (negate_expr_p (tem))
1340 return fold_build2 (TREE_CODE (t), type,
1341 TREE_OPERAND (t, 0), negate_expr (tem));
1342 tem = TREE_OPERAND (t, 0);
1343 if (negate_expr_p (tem))
1344 return fold_build2 (TREE_CODE (t), type,
1345 negate_expr (tem), TREE_OPERAND (t, 1));
1346 }
1347 break;
1348
1349 case TRUNC_DIV_EXPR:
1350 case ROUND_DIV_EXPR:
1351 case FLOOR_DIV_EXPR:
1352 case CEIL_DIV_EXPR:
1353 case EXACT_DIV_EXPR:
1354 /* In general we can't negate A / B, because if A is INT_MIN and
1355 B is 1, we may turn this into INT_MIN / -1 which is undefined
1356 and actually traps on some architectures. But if overflow is
1357 undefined, we can negate, because - (INT_MIN / 1) is an
1358 overflow. */
1359 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
1360 {
1361 const char * const warnmsg = G_("assuming signed overflow does not "
1362 "occur when negating a division");
1363 tem = TREE_OPERAND (t, 1);
1364 if (negate_expr_p (tem))
1365 {
1366 if (INTEGRAL_TYPE_P (type)
1367 && (TREE_CODE (tem) != INTEGER_CST
1368 || integer_onep (tem)))
1369 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1370 return fold_build2 (TREE_CODE (t), type,
1371 TREE_OPERAND (t, 0), negate_expr (tem));
1372 }
1373 tem = TREE_OPERAND (t, 0);
1374 if (negate_expr_p (tem))
1375 {
1376 if (INTEGRAL_TYPE_P (type)
1377 && (TREE_CODE (tem) != INTEGER_CST
1378 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
1379 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1380 return fold_build2 (TREE_CODE (t), type,
1381 negate_expr (tem), TREE_OPERAND (t, 1));
1382 }
1383 }
1384 break;
1385
1386 case NOP_EXPR:
1387 /* Convert -((double)float) into (double)(-float). */
1388 if (TREE_CODE (type) == REAL_TYPE)
1389 {
1390 tem = strip_float_extensions (t);
1391 if (tem != t && negate_expr_p (tem))
1392 return fold_convert (type, negate_expr (tem));
1393 }
1394 break;
1395
1396 case CALL_EXPR:
1397 /* Negate -f(x) as f(-x). */
1398 if (negate_mathfn_p (builtin_mathfn_code (t))
1399 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
1400 {
1401 tree fndecl, arg;
1402
1403 fndecl = get_callee_fndecl (t);
1404 arg = negate_expr (CALL_EXPR_ARG (t, 0));
1405 return build_call_expr (fndecl, 1, arg);
1406 }
1407 break;
1408
1409 case RSHIFT_EXPR:
1410 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1411 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1412 {
1413 tree op1 = TREE_OPERAND (t, 1);
1414 if (TREE_INT_CST_HIGH (op1) == 0
1415 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1416 == TREE_INT_CST_LOW (op1))
1417 {
1418 tree ntype = TYPE_UNSIGNED (type)
1419 ? signed_type_for (type)
1420 : unsigned_type_for (type);
1421 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1422 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1423 return fold_convert (type, temp);
1424 }
1425 }
1426 break;
1427
1428 default:
1429 break;
1430 }
1431
1432 return NULL_TREE;
1433 }
1434
1435 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1436 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1437 return NULL_TREE. */
1438
1439 static tree
1440 negate_expr (tree t)
1441 {
1442 tree type, tem;
1443
1444 if (t == NULL_TREE)
1445 return NULL_TREE;
1446
1447 type = TREE_TYPE (t);
1448 STRIP_SIGN_NOPS (t);
1449
1450 tem = fold_negate_expr (t);
1451 if (!tem)
1452 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1453 return fold_convert (type, tem);
1454 }
1455 \f
1456 /* Split a tree IN into a constant, literal and variable parts that could be
1457 combined with CODE to make IN. "constant" means an expression with
1458 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1459 commutative arithmetic operation. Store the constant part into *CONP,
1460 the literal in *LITP and return the variable part. If a part isn't
1461 present, set it to null. If the tree does not decompose in this way,
1462 return the entire tree as the variable part and the other parts as null.
1463
1464 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1465 case, we negate an operand that was subtracted. Except if it is a
1466 literal for which we use *MINUS_LITP instead.
1467
1468 If NEGATE_P is true, we are negating all of IN, again except a literal
1469 for which we use *MINUS_LITP instead.
1470
1471 If IN is itself a literal or constant, return it as appropriate.
1472
1473 Note that we do not guarantee that any of the three values will be the
1474 same type as IN, but they will have the same signedness and mode. */
1475
1476 static tree
1477 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1478 tree *minus_litp, int negate_p)
1479 {
1480 tree var = 0;
1481
1482 *conp = 0;
1483 *litp = 0;
1484 *minus_litp = 0;
1485
1486 /* Strip any conversions that don't change the machine mode or signedness. */
1487 STRIP_SIGN_NOPS (in);
1488
1489 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
1490 || TREE_CODE (in) == FIXED_CST)
1491 *litp = in;
1492 else if (TREE_CODE (in) == code
1493 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
1494 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
1495 /* We can associate addition and subtraction together (even
1496 though the C standard doesn't say so) for integers because
1497 the value is not affected. For reals, the value might be
1498 affected, so we can't. */
1499 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1500 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1501 {
1502 tree op0 = TREE_OPERAND (in, 0);
1503 tree op1 = TREE_OPERAND (in, 1);
1504 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1505 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1506
1507 /* First see if either of the operands is a literal, then a constant. */
1508 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
1509 || TREE_CODE (op0) == FIXED_CST)
1510 *litp = op0, op0 = 0;
1511 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
1512 || TREE_CODE (op1) == FIXED_CST)
1513 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1514
1515 if (op0 != 0 && TREE_CONSTANT (op0))
1516 *conp = op0, op0 = 0;
1517 else if (op1 != 0 && TREE_CONSTANT (op1))
1518 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1519
1520 /* If we haven't dealt with either operand, this is not a case we can
1521 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1522 if (op0 != 0 && op1 != 0)
1523 var = in;
1524 else if (op0 != 0)
1525 var = op0;
1526 else
1527 var = op1, neg_var_p = neg1_p;
1528
1529 /* Now do any needed negations. */
1530 if (neg_litp_p)
1531 *minus_litp = *litp, *litp = 0;
1532 if (neg_conp_p)
1533 *conp = negate_expr (*conp);
1534 if (neg_var_p)
1535 var = negate_expr (var);
1536 }
1537 else if (TREE_CONSTANT (in))
1538 *conp = in;
1539 else
1540 var = in;
1541
1542 if (negate_p)
1543 {
1544 if (*litp)
1545 *minus_litp = *litp, *litp = 0;
1546 else if (*minus_litp)
1547 *litp = *minus_litp, *minus_litp = 0;
1548 *conp = negate_expr (*conp);
1549 var = negate_expr (var);
1550 }
1551
1552 return var;
1553 }
1554
1555 /* Re-associate trees split by the above function. T1 and T2 are either
1556 expressions to associate or null. Return the new expression, if any. If
1557 we build an operation, do it in TYPE and with CODE. */
1558
1559 static tree
1560 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1561 {
1562 if (t1 == 0)
1563 return t2;
1564 else if (t2 == 0)
1565 return t1;
1566
1567 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1568 try to fold this since we will have infinite recursion. But do
1569 deal with any NEGATE_EXPRs. */
1570 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1571 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1572 {
1573 if (code == PLUS_EXPR)
1574 {
1575 if (TREE_CODE (t1) == NEGATE_EXPR)
1576 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1577 fold_convert (type, TREE_OPERAND (t1, 0)));
1578 else if (TREE_CODE (t2) == NEGATE_EXPR)
1579 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1580 fold_convert (type, TREE_OPERAND (t2, 0)));
1581 else if (integer_zerop (t2))
1582 return fold_convert (type, t1);
1583 }
1584 else if (code == MINUS_EXPR)
1585 {
1586 if (integer_zerop (t2))
1587 return fold_convert (type, t1);
1588 }
1589
1590 return build2 (code, type, fold_convert (type, t1),
1591 fold_convert (type, t2));
1592 }
1593
1594 return fold_build2 (code, type, fold_convert (type, t1),
1595 fold_convert (type, t2));
1596 }
1597 \f
1598 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1599 for use in int_const_binop, size_binop and size_diffop. */
1600
1601 static bool
1602 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
1603 {
1604 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
1605 return false;
1606 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
1607 return false;
1608
1609 switch (code)
1610 {
1611 case LSHIFT_EXPR:
1612 case RSHIFT_EXPR:
1613 case LROTATE_EXPR:
1614 case RROTATE_EXPR:
1615 return true;
1616
1617 default:
1618 break;
1619 }
1620
1621 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
1622 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
1623 && TYPE_MODE (type1) == TYPE_MODE (type2);
1624 }
1625
1626
1627 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1628 to produce a new constant. Return NULL_TREE if we don't know how
1629 to evaluate CODE at compile-time.
1630
1631 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1632
1633 tree
1634 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2, int notrunc)
1635 {
1636 unsigned HOST_WIDE_INT int1l, int2l;
1637 HOST_WIDE_INT int1h, int2h;
1638 unsigned HOST_WIDE_INT low;
1639 HOST_WIDE_INT hi;
1640 unsigned HOST_WIDE_INT garbagel;
1641 HOST_WIDE_INT garbageh;
1642 tree t;
1643 tree type = TREE_TYPE (arg1);
1644 int uns = TYPE_UNSIGNED (type);
1645 int is_sizetype
1646 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1647 int overflow = 0;
1648
1649 int1l = TREE_INT_CST_LOW (arg1);
1650 int1h = TREE_INT_CST_HIGH (arg1);
1651 int2l = TREE_INT_CST_LOW (arg2);
1652 int2h = TREE_INT_CST_HIGH (arg2);
1653
1654 switch (code)
1655 {
1656 case BIT_IOR_EXPR:
1657 low = int1l | int2l, hi = int1h | int2h;
1658 break;
1659
1660 case BIT_XOR_EXPR:
1661 low = int1l ^ int2l, hi = int1h ^ int2h;
1662 break;
1663
1664 case BIT_AND_EXPR:
1665 low = int1l & int2l, hi = int1h & int2h;
1666 break;
1667
1668 case RSHIFT_EXPR:
1669 int2l = -int2l;
1670 case LSHIFT_EXPR:
1671 /* It's unclear from the C standard whether shifts can overflow.
1672 The following code ignores overflow; perhaps a C standard
1673 interpretation ruling is needed. */
1674 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1675 &low, &hi, !uns);
1676 break;
1677
1678 case RROTATE_EXPR:
1679 int2l = - int2l;
1680 case LROTATE_EXPR:
1681 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1682 &low, &hi);
1683 break;
1684
1685 case PLUS_EXPR:
1686 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1687 break;
1688
1689 case MINUS_EXPR:
1690 neg_double (int2l, int2h, &low, &hi);
1691 add_double (int1l, int1h, low, hi, &low, &hi);
1692 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1693 break;
1694
1695 case MULT_EXPR:
1696 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1697 break;
1698
1699 case TRUNC_DIV_EXPR:
1700 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1701 case EXACT_DIV_EXPR:
1702 /* This is a shortcut for a common special case. */
1703 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1704 && !TREE_OVERFLOW (arg1)
1705 && !TREE_OVERFLOW (arg2)
1706 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1707 {
1708 if (code == CEIL_DIV_EXPR)
1709 int1l += int2l - 1;
1710
1711 low = int1l / int2l, hi = 0;
1712 break;
1713 }
1714
1715 /* ... fall through ... */
1716
1717 case ROUND_DIV_EXPR:
1718 if (int2h == 0 && int2l == 0)
1719 return NULL_TREE;
1720 if (int2h == 0 && int2l == 1)
1721 {
1722 low = int1l, hi = int1h;
1723 break;
1724 }
1725 if (int1l == int2l && int1h == int2h
1726 && ! (int1l == 0 && int1h == 0))
1727 {
1728 low = 1, hi = 0;
1729 break;
1730 }
1731 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1732 &low, &hi, &garbagel, &garbageh);
1733 break;
1734
1735 case TRUNC_MOD_EXPR:
1736 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1737 /* This is a shortcut for a common special case. */
1738 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1739 && !TREE_OVERFLOW (arg1)
1740 && !TREE_OVERFLOW (arg2)
1741 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1742 {
1743 if (code == CEIL_MOD_EXPR)
1744 int1l += int2l - 1;
1745 low = int1l % int2l, hi = 0;
1746 break;
1747 }
1748
1749 /* ... fall through ... */
1750
1751 case ROUND_MOD_EXPR:
1752 if (int2h == 0 && int2l == 0)
1753 return NULL_TREE;
1754 overflow = div_and_round_double (code, uns,
1755 int1l, int1h, int2l, int2h,
1756 &garbagel, &garbageh, &low, &hi);
1757 break;
1758
1759 case MIN_EXPR:
1760 case MAX_EXPR:
1761 if (uns)
1762 low = (((unsigned HOST_WIDE_INT) int1h
1763 < (unsigned HOST_WIDE_INT) int2h)
1764 || (((unsigned HOST_WIDE_INT) int1h
1765 == (unsigned HOST_WIDE_INT) int2h)
1766 && int1l < int2l));
1767 else
1768 low = (int1h < int2h
1769 || (int1h == int2h && int1l < int2l));
1770
1771 if (low == (code == MIN_EXPR))
1772 low = int1l, hi = int1h;
1773 else
1774 low = int2l, hi = int2h;
1775 break;
1776
1777 default:
1778 return NULL_TREE;
1779 }
1780
1781 if (notrunc)
1782 {
1783 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1784
1785 /* Propagate overflow flags ourselves. */
1786 if (((!uns || is_sizetype) && overflow)
1787 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1788 {
1789 t = copy_node (t);
1790 TREE_OVERFLOW (t) = 1;
1791 }
1792 }
1793 else
1794 t = force_fit_type_double (TREE_TYPE (arg1), low, hi, 1,
1795 ((!uns || is_sizetype) && overflow)
1796 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1797
1798 return t;
1799 }
1800
1801 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1802 constant. We assume ARG1 and ARG2 have the same data type, or at least
1803 are the same kind of constant and the same machine mode. Return zero if
1804 combining the constants is not allowed in the current operating mode.
1805
1806 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1807
1808 static tree
1809 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1810 {
1811 /* Sanity check for the recursive cases. */
1812 if (!arg1 || !arg2)
1813 return NULL_TREE;
1814
1815 STRIP_NOPS (arg1);
1816 STRIP_NOPS (arg2);
1817
1818 if (TREE_CODE (arg1) == INTEGER_CST)
1819 return int_const_binop (code, arg1, arg2, notrunc);
1820
1821 if (TREE_CODE (arg1) == REAL_CST)
1822 {
1823 enum machine_mode mode;
1824 REAL_VALUE_TYPE d1;
1825 REAL_VALUE_TYPE d2;
1826 REAL_VALUE_TYPE value;
1827 REAL_VALUE_TYPE result;
1828 bool inexact;
1829 tree t, type;
1830
1831 /* The following codes are handled by real_arithmetic. */
1832 switch (code)
1833 {
1834 case PLUS_EXPR:
1835 case MINUS_EXPR:
1836 case MULT_EXPR:
1837 case RDIV_EXPR:
1838 case MIN_EXPR:
1839 case MAX_EXPR:
1840 break;
1841
1842 default:
1843 return NULL_TREE;
1844 }
1845
1846 d1 = TREE_REAL_CST (arg1);
1847 d2 = TREE_REAL_CST (arg2);
1848
1849 type = TREE_TYPE (arg1);
1850 mode = TYPE_MODE (type);
1851
1852 /* Don't perform operation if we honor signaling NaNs and
1853 either operand is a NaN. */
1854 if (HONOR_SNANS (mode)
1855 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1856 return NULL_TREE;
1857
1858 /* Don't perform operation if it would raise a division
1859 by zero exception. */
1860 if (code == RDIV_EXPR
1861 && REAL_VALUES_EQUAL (d2, dconst0)
1862 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1863 return NULL_TREE;
1864
1865 /* If either operand is a NaN, just return it. Otherwise, set up
1866 for floating-point trap; we return an overflow. */
1867 if (REAL_VALUE_ISNAN (d1))
1868 return arg1;
1869 else if (REAL_VALUE_ISNAN (d2))
1870 return arg2;
1871
1872 inexact = real_arithmetic (&value, code, &d1, &d2);
1873 real_convert (&result, mode, &value);
1874
1875 /* Don't constant fold this floating point operation if
1876 the result has overflowed and flag_trapping_math. */
1877 if (flag_trapping_math
1878 && MODE_HAS_INFINITIES (mode)
1879 && REAL_VALUE_ISINF (result)
1880 && !REAL_VALUE_ISINF (d1)
1881 && !REAL_VALUE_ISINF (d2))
1882 return NULL_TREE;
1883
1884 /* Don't constant fold this floating point operation if the
1885 result may dependent upon the run-time rounding mode and
1886 flag_rounding_math is set, or if GCC's software emulation
1887 is unable to accurately represent the result. */
1888 if ((flag_rounding_math
1889 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1890 && (inexact || !real_identical (&result, &value)))
1891 return NULL_TREE;
1892
1893 t = build_real (type, result);
1894
1895 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1896 return t;
1897 }
1898
1899 if (TREE_CODE (arg1) == FIXED_CST)
1900 {
1901 FIXED_VALUE_TYPE f1;
1902 FIXED_VALUE_TYPE f2;
1903 FIXED_VALUE_TYPE result;
1904 tree t, type;
1905 int sat_p;
1906 bool overflow_p;
1907
1908 /* The following codes are handled by fixed_arithmetic. */
1909 switch (code)
1910 {
1911 case PLUS_EXPR:
1912 case MINUS_EXPR:
1913 case MULT_EXPR:
1914 case TRUNC_DIV_EXPR:
1915 f2 = TREE_FIXED_CST (arg2);
1916 break;
1917
1918 case LSHIFT_EXPR:
1919 case RSHIFT_EXPR:
1920 f2.data.high = TREE_INT_CST_HIGH (arg2);
1921 f2.data.low = TREE_INT_CST_LOW (arg2);
1922 f2.mode = SImode;
1923 break;
1924
1925 default:
1926 return NULL_TREE;
1927 }
1928
1929 f1 = TREE_FIXED_CST (arg1);
1930 type = TREE_TYPE (arg1);
1931 sat_p = TYPE_SATURATING (type);
1932 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1933 t = build_fixed (type, result);
1934 /* Propagate overflow flags. */
1935 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1936 TREE_OVERFLOW (t) = 1;
1937 return t;
1938 }
1939
1940 if (TREE_CODE (arg1) == COMPLEX_CST)
1941 {
1942 tree type = TREE_TYPE (arg1);
1943 tree r1 = TREE_REALPART (arg1);
1944 tree i1 = TREE_IMAGPART (arg1);
1945 tree r2 = TREE_REALPART (arg2);
1946 tree i2 = TREE_IMAGPART (arg2);
1947 tree real, imag;
1948
1949 switch (code)
1950 {
1951 case PLUS_EXPR:
1952 case MINUS_EXPR:
1953 real = const_binop (code, r1, r2, notrunc);
1954 imag = const_binop (code, i1, i2, notrunc);
1955 break;
1956
1957 case MULT_EXPR:
1958 real = const_binop (MINUS_EXPR,
1959 const_binop (MULT_EXPR, r1, r2, notrunc),
1960 const_binop (MULT_EXPR, i1, i2, notrunc),
1961 notrunc);
1962 imag = const_binop (PLUS_EXPR,
1963 const_binop (MULT_EXPR, r1, i2, notrunc),
1964 const_binop (MULT_EXPR, i1, r2, notrunc),
1965 notrunc);
1966 break;
1967
1968 case RDIV_EXPR:
1969 {
1970 tree magsquared
1971 = const_binop (PLUS_EXPR,
1972 const_binop (MULT_EXPR, r2, r2, notrunc),
1973 const_binop (MULT_EXPR, i2, i2, notrunc),
1974 notrunc);
1975 tree t1
1976 = const_binop (PLUS_EXPR,
1977 const_binop (MULT_EXPR, r1, r2, notrunc),
1978 const_binop (MULT_EXPR, i1, i2, notrunc),
1979 notrunc);
1980 tree t2
1981 = const_binop (MINUS_EXPR,
1982 const_binop (MULT_EXPR, i1, r2, notrunc),
1983 const_binop (MULT_EXPR, r1, i2, notrunc),
1984 notrunc);
1985
1986 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1987 code = TRUNC_DIV_EXPR;
1988
1989 real = const_binop (code, t1, magsquared, notrunc);
1990 imag = const_binop (code, t2, magsquared, notrunc);
1991 }
1992 break;
1993
1994 default:
1995 return NULL_TREE;
1996 }
1997
1998 if (real && imag)
1999 return build_complex (type, real, imag);
2000 }
2001
2002 if (TREE_CODE (arg1) == VECTOR_CST)
2003 {
2004 tree type = TREE_TYPE(arg1);
2005 int count = TYPE_VECTOR_SUBPARTS (type), i;
2006 tree elements1, elements2, list = NULL_TREE;
2007
2008 if(TREE_CODE(arg2) != VECTOR_CST)
2009 return NULL_TREE;
2010
2011 elements1 = TREE_VECTOR_CST_ELTS (arg1);
2012 elements2 = TREE_VECTOR_CST_ELTS (arg2);
2013
2014 for (i = 0; i < count; i++)
2015 {
2016 tree elem1, elem2, elem;
2017
2018 /* The trailing elements can be empty and should be treated as 0 */
2019 if(!elements1)
2020 elem1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2021 else
2022 {
2023 elem1 = TREE_VALUE(elements1);
2024 elements1 = TREE_CHAIN (elements1);
2025 }
2026
2027 if(!elements2)
2028 elem2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2029 else
2030 {
2031 elem2 = TREE_VALUE(elements2);
2032 elements2 = TREE_CHAIN (elements2);
2033 }
2034
2035 elem = const_binop (code, elem1, elem2, notrunc);
2036
2037 /* It is possible that const_binop cannot handle the given
2038 code and return NULL_TREE */
2039 if(elem == NULL_TREE)
2040 return NULL_TREE;
2041
2042 list = tree_cons (NULL_TREE, elem, list);
2043 }
2044 return build_vector(type, nreverse(list));
2045 }
2046 return NULL_TREE;
2047 }
2048
2049 /* Create a size type INT_CST node with NUMBER sign extended. KIND
2050 indicates which particular sizetype to create. */
2051
2052 tree
2053 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
2054 {
2055 return build_int_cst (sizetype_tab[(int) kind], number);
2056 }
2057 \f
2058 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
2059 is a tree code. The type of the result is taken from the operands.
2060 Both must be equivalent integer types, ala int_binop_types_match_p.
2061 If the operands are constant, so is the result. */
2062
2063 tree
2064 size_binop (enum tree_code code, tree arg0, tree arg1)
2065 {
2066 tree type = TREE_TYPE (arg0);
2067
2068 if (arg0 == error_mark_node || arg1 == error_mark_node)
2069 return error_mark_node;
2070
2071 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
2072 TREE_TYPE (arg1)));
2073
2074 /* Handle the special case of two integer constants faster. */
2075 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2076 {
2077 /* And some specific cases even faster than that. */
2078 if (code == PLUS_EXPR)
2079 {
2080 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
2081 return arg1;
2082 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
2083 return arg0;
2084 }
2085 else if (code == MINUS_EXPR)
2086 {
2087 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
2088 return arg0;
2089 }
2090 else if (code == MULT_EXPR)
2091 {
2092 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
2093 return arg1;
2094 }
2095
2096 /* Handle general case of two integer constants. */
2097 return int_const_binop (code, arg0, arg1, 0);
2098 }
2099
2100 return fold_build2 (code, type, arg0, arg1);
2101 }
2102
2103 /* Given two values, either both of sizetype or both of bitsizetype,
2104 compute the difference between the two values. Return the value
2105 in signed type corresponding to the type of the operands. */
2106
2107 tree
2108 size_diffop (tree arg0, tree arg1)
2109 {
2110 tree type = TREE_TYPE (arg0);
2111 tree ctype;
2112
2113 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
2114 TREE_TYPE (arg1)));
2115
2116 /* If the type is already signed, just do the simple thing. */
2117 if (!TYPE_UNSIGNED (type))
2118 return size_binop (MINUS_EXPR, arg0, arg1);
2119
2120 if (type == sizetype)
2121 ctype = ssizetype;
2122 else if (type == bitsizetype)
2123 ctype = sbitsizetype;
2124 else
2125 ctype = signed_type_for (type);
2126
2127 /* If either operand is not a constant, do the conversions to the signed
2128 type and subtract. The hardware will do the right thing with any
2129 overflow in the subtraction. */
2130 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
2131 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
2132 fold_convert (ctype, arg1));
2133
2134 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
2135 Otherwise, subtract the other way, convert to CTYPE (we know that can't
2136 overflow) and negate (which can't either). Special-case a result
2137 of zero while we're here. */
2138 if (tree_int_cst_equal (arg0, arg1))
2139 return build_int_cst (ctype, 0);
2140 else if (tree_int_cst_lt (arg1, arg0))
2141 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
2142 else
2143 return size_binop (MINUS_EXPR, build_int_cst (ctype, 0),
2144 fold_convert (ctype, size_binop (MINUS_EXPR,
2145 arg1, arg0)));
2146 }
2147 \f
2148 /* A subroutine of fold_convert_const handling conversions of an
2149 INTEGER_CST to another integer type. */
2150
2151 static tree
2152 fold_convert_const_int_from_int (tree type, const_tree arg1)
2153 {
2154 tree t;
2155
2156 /* Given an integer constant, make new constant with new type,
2157 appropriately sign-extended or truncated. */
2158 t = force_fit_type_double (type, TREE_INT_CST_LOW (arg1),
2159 TREE_INT_CST_HIGH (arg1),
2160 /* Don't set the overflow when
2161 converting from a pointer, */
2162 !POINTER_TYPE_P (TREE_TYPE (arg1))
2163 /* or to a sizetype with same signedness
2164 and the precision is unchanged.
2165 ??? sizetype is always sign-extended,
2166 but its signedness depends on the
2167 frontend. Thus we see spurious overflows
2168 here if we do not check this. */
2169 && !((TYPE_PRECISION (TREE_TYPE (arg1))
2170 == TYPE_PRECISION (type))
2171 && (TYPE_UNSIGNED (TREE_TYPE (arg1))
2172 == TYPE_UNSIGNED (type))
2173 && ((TREE_CODE (TREE_TYPE (arg1)) == INTEGER_TYPE
2174 && TYPE_IS_SIZETYPE (TREE_TYPE (arg1)))
2175 || (TREE_CODE (type) == INTEGER_TYPE
2176 && TYPE_IS_SIZETYPE (type)))),
2177 (TREE_INT_CST_HIGH (arg1) < 0
2178 && (TYPE_UNSIGNED (type)
2179 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2180 | TREE_OVERFLOW (arg1));
2181
2182 return t;
2183 }
2184
2185 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2186 to an integer type. */
2187
2188 static tree
2189 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
2190 {
2191 int overflow = 0;
2192 tree t;
2193
2194 /* The following code implements the floating point to integer
2195 conversion rules required by the Java Language Specification,
2196 that IEEE NaNs are mapped to zero and values that overflow
2197 the target precision saturate, i.e. values greater than
2198 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2199 are mapped to INT_MIN. These semantics are allowed by the
2200 C and C++ standards that simply state that the behavior of
2201 FP-to-integer conversion is unspecified upon overflow. */
2202
2203 HOST_WIDE_INT high, low;
2204 REAL_VALUE_TYPE r;
2205 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2206
2207 switch (code)
2208 {
2209 case FIX_TRUNC_EXPR:
2210 real_trunc (&r, VOIDmode, &x);
2211 break;
2212
2213 default:
2214 gcc_unreachable ();
2215 }
2216
2217 /* If R is NaN, return zero and show we have an overflow. */
2218 if (REAL_VALUE_ISNAN (r))
2219 {
2220 overflow = 1;
2221 high = 0;
2222 low = 0;
2223 }
2224
2225 /* See if R is less than the lower bound or greater than the
2226 upper bound. */
2227
2228 if (! overflow)
2229 {
2230 tree lt = TYPE_MIN_VALUE (type);
2231 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2232 if (REAL_VALUES_LESS (r, l))
2233 {
2234 overflow = 1;
2235 high = TREE_INT_CST_HIGH (lt);
2236 low = TREE_INT_CST_LOW (lt);
2237 }
2238 }
2239
2240 if (! overflow)
2241 {
2242 tree ut = TYPE_MAX_VALUE (type);
2243 if (ut)
2244 {
2245 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2246 if (REAL_VALUES_LESS (u, r))
2247 {
2248 overflow = 1;
2249 high = TREE_INT_CST_HIGH (ut);
2250 low = TREE_INT_CST_LOW (ut);
2251 }
2252 }
2253 }
2254
2255 if (! overflow)
2256 REAL_VALUE_TO_INT (&low, &high, r);
2257
2258 t = force_fit_type_double (type, low, high, -1,
2259 overflow | TREE_OVERFLOW (arg1));
2260 return t;
2261 }
2262
2263 /* A subroutine of fold_convert_const handling conversions of a
2264 FIXED_CST to an integer type. */
2265
2266 static tree
2267 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
2268 {
2269 tree t;
2270 double_int temp, temp_trunc;
2271 unsigned int mode;
2272
2273 /* Right shift FIXED_CST to temp by fbit. */
2274 temp = TREE_FIXED_CST (arg1).data;
2275 mode = TREE_FIXED_CST (arg1).mode;
2276 if (GET_MODE_FBIT (mode) < 2 * HOST_BITS_PER_WIDE_INT)
2277 {
2278 lshift_double (temp.low, temp.high,
2279 - GET_MODE_FBIT (mode), 2 * HOST_BITS_PER_WIDE_INT,
2280 &temp.low, &temp.high, SIGNED_FIXED_POINT_MODE_P (mode));
2281
2282 /* Left shift temp to temp_trunc by fbit. */
2283 lshift_double (temp.low, temp.high,
2284 GET_MODE_FBIT (mode), 2 * HOST_BITS_PER_WIDE_INT,
2285 &temp_trunc.low, &temp_trunc.high,
2286 SIGNED_FIXED_POINT_MODE_P (mode));
2287 }
2288 else
2289 {
2290 temp.low = 0;
2291 temp.high = 0;
2292 temp_trunc.low = 0;
2293 temp_trunc.high = 0;
2294 }
2295
2296 /* If FIXED_CST is negative, we need to round the value toward 0.
2297 By checking if the fractional bits are not zero to add 1 to temp. */
2298 if (SIGNED_FIXED_POINT_MODE_P (mode) && temp_trunc.high < 0
2299 && !double_int_equal_p (TREE_FIXED_CST (arg1).data, temp_trunc))
2300 {
2301 double_int one;
2302 one.low = 1;
2303 one.high = 0;
2304 temp = double_int_add (temp, one);
2305 }
2306
2307 /* Given a fixed-point constant, make new constant with new type,
2308 appropriately sign-extended or truncated. */
2309 t = force_fit_type_double (type, temp.low, temp.high, -1,
2310 (temp.high < 0
2311 && (TYPE_UNSIGNED (type)
2312 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2313 | TREE_OVERFLOW (arg1));
2314
2315 return t;
2316 }
2317
2318 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2319 to another floating point type. */
2320
2321 static tree
2322 fold_convert_const_real_from_real (tree type, const_tree arg1)
2323 {
2324 REAL_VALUE_TYPE value;
2325 tree t;
2326
2327 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2328 t = build_real (type, value);
2329
2330 /* If converting an infinity or NAN to a representation that doesn't
2331 have one, set the overflow bit so that we can produce some kind of
2332 error message at the appropriate point if necessary. It's not the
2333 most user-friendly message, but it's better than nothing. */
2334 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
2335 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
2336 TREE_OVERFLOW (t) = 1;
2337 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
2338 && !MODE_HAS_NANS (TYPE_MODE (type)))
2339 TREE_OVERFLOW (t) = 1;
2340 /* Regular overflow, conversion produced an infinity in a mode that
2341 can't represent them. */
2342 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
2343 && REAL_VALUE_ISINF (value)
2344 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2345 TREE_OVERFLOW (t) = 1;
2346 else
2347 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2348 return t;
2349 }
2350
2351 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2352 to a floating point type. */
2353
2354 static tree
2355 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2356 {
2357 REAL_VALUE_TYPE value;
2358 tree t;
2359
2360 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
2361 t = build_real (type, value);
2362
2363 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2364 return t;
2365 }
2366
2367 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2368 to another fixed-point type. */
2369
2370 static tree
2371 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2372 {
2373 FIXED_VALUE_TYPE value;
2374 tree t;
2375 bool overflow_p;
2376
2377 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2378 TYPE_SATURATING (type));
2379 t = build_fixed (type, value);
2380
2381 /* Propagate overflow flags. */
2382 if (overflow_p | TREE_OVERFLOW (arg1))
2383 TREE_OVERFLOW (t) = 1;
2384 return t;
2385 }
2386
2387 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2388 to a fixed-point type. */
2389
2390 static tree
2391 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2392 {
2393 FIXED_VALUE_TYPE value;
2394 tree t;
2395 bool overflow_p;
2396
2397 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
2398 TREE_INT_CST (arg1),
2399 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2400 TYPE_SATURATING (type));
2401 t = build_fixed (type, value);
2402
2403 /* Propagate overflow flags. */
2404 if (overflow_p | TREE_OVERFLOW (arg1))
2405 TREE_OVERFLOW (t) = 1;
2406 return t;
2407 }
2408
2409 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2410 to a fixed-point type. */
2411
2412 static tree
2413 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2414 {
2415 FIXED_VALUE_TYPE value;
2416 tree t;
2417 bool overflow_p;
2418
2419 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2420 &TREE_REAL_CST (arg1),
2421 TYPE_SATURATING (type));
2422 t = build_fixed (type, value);
2423
2424 /* Propagate overflow flags. */
2425 if (overflow_p | TREE_OVERFLOW (arg1))
2426 TREE_OVERFLOW (t) = 1;
2427 return t;
2428 }
2429
2430 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2431 type TYPE. If no simplification can be done return NULL_TREE. */
2432
2433 static tree
2434 fold_convert_const (enum tree_code code, tree type, tree arg1)
2435 {
2436 if (TREE_TYPE (arg1) == type)
2437 return arg1;
2438
2439 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2440 || TREE_CODE (type) == OFFSET_TYPE)
2441 {
2442 if (TREE_CODE (arg1) == INTEGER_CST)
2443 return fold_convert_const_int_from_int (type, arg1);
2444 else if (TREE_CODE (arg1) == REAL_CST)
2445 return fold_convert_const_int_from_real (code, type, arg1);
2446 else if (TREE_CODE (arg1) == FIXED_CST)
2447 return fold_convert_const_int_from_fixed (type, arg1);
2448 }
2449 else if (TREE_CODE (type) == REAL_TYPE)
2450 {
2451 if (TREE_CODE (arg1) == INTEGER_CST)
2452 return build_real_from_int_cst (type, arg1);
2453 else if (TREE_CODE (arg1) == REAL_CST)
2454 return fold_convert_const_real_from_real (type, arg1);
2455 else if (TREE_CODE (arg1) == FIXED_CST)
2456 return fold_convert_const_real_from_fixed (type, arg1);
2457 }
2458 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2459 {
2460 if (TREE_CODE (arg1) == FIXED_CST)
2461 return fold_convert_const_fixed_from_fixed (type, arg1);
2462 else if (TREE_CODE (arg1) == INTEGER_CST)
2463 return fold_convert_const_fixed_from_int (type, arg1);
2464 else if (TREE_CODE (arg1) == REAL_CST)
2465 return fold_convert_const_fixed_from_real (type, arg1);
2466 }
2467 return NULL_TREE;
2468 }
2469
2470 /* Construct a vector of zero elements of vector type TYPE. */
2471
2472 static tree
2473 build_zero_vector (tree type)
2474 {
2475 tree elem, list;
2476 int i, units;
2477
2478 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2479 units = TYPE_VECTOR_SUBPARTS (type);
2480
2481 list = NULL_TREE;
2482 for (i = 0; i < units; i++)
2483 list = tree_cons (NULL_TREE, elem, list);
2484 return build_vector (type, list);
2485 }
2486
2487 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2488
2489 bool
2490 fold_convertible_p (const_tree type, const_tree arg)
2491 {
2492 tree orig = TREE_TYPE (arg);
2493
2494 if (type == orig)
2495 return true;
2496
2497 if (TREE_CODE (arg) == ERROR_MARK
2498 || TREE_CODE (type) == ERROR_MARK
2499 || TREE_CODE (orig) == ERROR_MARK)
2500 return false;
2501
2502 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2503 return true;
2504
2505 switch (TREE_CODE (type))
2506 {
2507 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2508 case POINTER_TYPE: case REFERENCE_TYPE:
2509 case OFFSET_TYPE:
2510 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2511 || TREE_CODE (orig) == OFFSET_TYPE)
2512 return true;
2513 return (TREE_CODE (orig) == VECTOR_TYPE
2514 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2515
2516 case REAL_TYPE:
2517 case FIXED_POINT_TYPE:
2518 case COMPLEX_TYPE:
2519 case VECTOR_TYPE:
2520 case VOID_TYPE:
2521 return TREE_CODE (type) == TREE_CODE (orig);
2522
2523 default:
2524 return false;
2525 }
2526 }
2527
2528 /* Convert expression ARG to type TYPE. Used by the middle-end for
2529 simple conversions in preference to calling the front-end's convert. */
2530
2531 tree
2532 fold_convert (tree type, tree arg)
2533 {
2534 tree orig = TREE_TYPE (arg);
2535 tree tem;
2536
2537 if (type == orig)
2538 return arg;
2539
2540 if (TREE_CODE (arg) == ERROR_MARK
2541 || TREE_CODE (type) == ERROR_MARK
2542 || TREE_CODE (orig) == ERROR_MARK)
2543 return error_mark_node;
2544
2545 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2546 return fold_build1 (NOP_EXPR, type, arg);
2547
2548 switch (TREE_CODE (type))
2549 {
2550 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2551 case POINTER_TYPE: case REFERENCE_TYPE:
2552 case OFFSET_TYPE:
2553 if (TREE_CODE (arg) == INTEGER_CST)
2554 {
2555 tem = fold_convert_const (NOP_EXPR, type, arg);
2556 if (tem != NULL_TREE)
2557 return tem;
2558 }
2559 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2560 || TREE_CODE (orig) == OFFSET_TYPE)
2561 return fold_build1 (NOP_EXPR, type, arg);
2562 if (TREE_CODE (orig) == COMPLEX_TYPE)
2563 {
2564 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2565 return fold_convert (type, tem);
2566 }
2567 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2568 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2569 return fold_build1 (NOP_EXPR, type, arg);
2570
2571 case REAL_TYPE:
2572 if (TREE_CODE (arg) == INTEGER_CST)
2573 {
2574 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2575 if (tem != NULL_TREE)
2576 return tem;
2577 }
2578 else if (TREE_CODE (arg) == REAL_CST)
2579 {
2580 tem = fold_convert_const (NOP_EXPR, type, arg);
2581 if (tem != NULL_TREE)
2582 return tem;
2583 }
2584 else if (TREE_CODE (arg) == FIXED_CST)
2585 {
2586 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2587 if (tem != NULL_TREE)
2588 return tem;
2589 }
2590
2591 switch (TREE_CODE (orig))
2592 {
2593 case INTEGER_TYPE:
2594 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2595 case POINTER_TYPE: case REFERENCE_TYPE:
2596 return fold_build1 (FLOAT_EXPR, type, arg);
2597
2598 case REAL_TYPE:
2599 return fold_build1 (NOP_EXPR, type, arg);
2600
2601 case FIXED_POINT_TYPE:
2602 return fold_build1 (FIXED_CONVERT_EXPR, type, arg);
2603
2604 case COMPLEX_TYPE:
2605 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2606 return fold_convert (type, tem);
2607
2608 default:
2609 gcc_unreachable ();
2610 }
2611
2612 case FIXED_POINT_TYPE:
2613 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2614 || TREE_CODE (arg) == REAL_CST)
2615 {
2616 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2617 if (tem != NULL_TREE)
2618 return tem;
2619 }
2620
2621 switch (TREE_CODE (orig))
2622 {
2623 case FIXED_POINT_TYPE:
2624 case INTEGER_TYPE:
2625 case ENUMERAL_TYPE:
2626 case BOOLEAN_TYPE:
2627 case REAL_TYPE:
2628 return fold_build1 (FIXED_CONVERT_EXPR, type, arg);
2629
2630 case COMPLEX_TYPE:
2631 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2632 return fold_convert (type, tem);
2633
2634 default:
2635 gcc_unreachable ();
2636 }
2637
2638 case COMPLEX_TYPE:
2639 switch (TREE_CODE (orig))
2640 {
2641 case INTEGER_TYPE:
2642 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2643 case POINTER_TYPE: case REFERENCE_TYPE:
2644 case REAL_TYPE:
2645 case FIXED_POINT_TYPE:
2646 return fold_build2 (COMPLEX_EXPR, type,
2647 fold_convert (TREE_TYPE (type), arg),
2648 fold_convert (TREE_TYPE (type),
2649 integer_zero_node));
2650 case COMPLEX_TYPE:
2651 {
2652 tree rpart, ipart;
2653
2654 if (TREE_CODE (arg) == COMPLEX_EXPR)
2655 {
2656 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2657 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2658 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2659 }
2660
2661 arg = save_expr (arg);
2662 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2663 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2664 rpart = fold_convert (TREE_TYPE (type), rpart);
2665 ipart = fold_convert (TREE_TYPE (type), ipart);
2666 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2667 }
2668
2669 default:
2670 gcc_unreachable ();
2671 }
2672
2673 case VECTOR_TYPE:
2674 if (integer_zerop (arg))
2675 return build_zero_vector (type);
2676 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2677 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2678 || TREE_CODE (orig) == VECTOR_TYPE);
2679 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2680
2681 case VOID_TYPE:
2682 tem = fold_ignored_result (arg);
2683 if (TREE_CODE (tem) == MODIFY_EXPR)
2684 return tem;
2685 return fold_build1 (NOP_EXPR, type, tem);
2686
2687 default:
2688 gcc_unreachable ();
2689 }
2690 }
2691 \f
2692 /* Return false if expr can be assumed not to be an lvalue, true
2693 otherwise. */
2694
2695 static bool
2696 maybe_lvalue_p (const_tree x)
2697 {
2698 /* We only need to wrap lvalue tree codes. */
2699 switch (TREE_CODE (x))
2700 {
2701 case VAR_DECL:
2702 case PARM_DECL:
2703 case RESULT_DECL:
2704 case LABEL_DECL:
2705 case FUNCTION_DECL:
2706 case SSA_NAME:
2707
2708 case COMPONENT_REF:
2709 case INDIRECT_REF:
2710 case ALIGN_INDIRECT_REF:
2711 case MISALIGNED_INDIRECT_REF:
2712 case ARRAY_REF:
2713 case ARRAY_RANGE_REF:
2714 case BIT_FIELD_REF:
2715 case OBJ_TYPE_REF:
2716
2717 case REALPART_EXPR:
2718 case IMAGPART_EXPR:
2719 case PREINCREMENT_EXPR:
2720 case PREDECREMENT_EXPR:
2721 case SAVE_EXPR:
2722 case TRY_CATCH_EXPR:
2723 case WITH_CLEANUP_EXPR:
2724 case COMPOUND_EXPR:
2725 case MODIFY_EXPR:
2726 case TARGET_EXPR:
2727 case COND_EXPR:
2728 case BIND_EXPR:
2729 case MIN_EXPR:
2730 case MAX_EXPR:
2731 break;
2732
2733 default:
2734 /* Assume the worst for front-end tree codes. */
2735 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2736 break;
2737 return false;
2738 }
2739
2740 return true;
2741 }
2742
2743 /* Return an expr equal to X but certainly not valid as an lvalue. */
2744
2745 tree
2746 non_lvalue (tree x)
2747 {
2748 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2749 us. */
2750 if (in_gimple_form)
2751 return x;
2752
2753 if (! maybe_lvalue_p (x))
2754 return x;
2755 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2756 }
2757
2758 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2759 Zero means allow extended lvalues. */
2760
2761 int pedantic_lvalues;
2762
2763 /* When pedantic, return an expr equal to X but certainly not valid as a
2764 pedantic lvalue. Otherwise, return X. */
2765
2766 static tree
2767 pedantic_non_lvalue (tree x)
2768 {
2769 if (pedantic_lvalues)
2770 return non_lvalue (x);
2771 else
2772 return x;
2773 }
2774 \f
2775 /* Given a tree comparison code, return the code that is the logical inverse
2776 of the given code. It is not safe to do this for floating-point
2777 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2778 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2779
2780 enum tree_code
2781 invert_tree_comparison (enum tree_code code, bool honor_nans)
2782 {
2783 if (honor_nans && flag_trapping_math)
2784 return ERROR_MARK;
2785
2786 switch (code)
2787 {
2788 case EQ_EXPR:
2789 return NE_EXPR;
2790 case NE_EXPR:
2791 return EQ_EXPR;
2792 case GT_EXPR:
2793 return honor_nans ? UNLE_EXPR : LE_EXPR;
2794 case GE_EXPR:
2795 return honor_nans ? UNLT_EXPR : LT_EXPR;
2796 case LT_EXPR:
2797 return honor_nans ? UNGE_EXPR : GE_EXPR;
2798 case LE_EXPR:
2799 return honor_nans ? UNGT_EXPR : GT_EXPR;
2800 case LTGT_EXPR:
2801 return UNEQ_EXPR;
2802 case UNEQ_EXPR:
2803 return LTGT_EXPR;
2804 case UNGT_EXPR:
2805 return LE_EXPR;
2806 case UNGE_EXPR:
2807 return LT_EXPR;
2808 case UNLT_EXPR:
2809 return GE_EXPR;
2810 case UNLE_EXPR:
2811 return GT_EXPR;
2812 case ORDERED_EXPR:
2813 return UNORDERED_EXPR;
2814 case UNORDERED_EXPR:
2815 return ORDERED_EXPR;
2816 default:
2817 gcc_unreachable ();
2818 }
2819 }
2820
2821 /* Similar, but return the comparison that results if the operands are
2822 swapped. This is safe for floating-point. */
2823
2824 enum tree_code
2825 swap_tree_comparison (enum tree_code code)
2826 {
2827 switch (code)
2828 {
2829 case EQ_EXPR:
2830 case NE_EXPR:
2831 case ORDERED_EXPR:
2832 case UNORDERED_EXPR:
2833 case LTGT_EXPR:
2834 case UNEQ_EXPR:
2835 return code;
2836 case GT_EXPR:
2837 return LT_EXPR;
2838 case GE_EXPR:
2839 return LE_EXPR;
2840 case LT_EXPR:
2841 return GT_EXPR;
2842 case LE_EXPR:
2843 return GE_EXPR;
2844 case UNGT_EXPR:
2845 return UNLT_EXPR;
2846 case UNGE_EXPR:
2847 return UNLE_EXPR;
2848 case UNLT_EXPR:
2849 return UNGT_EXPR;
2850 case UNLE_EXPR:
2851 return UNGE_EXPR;
2852 default:
2853 gcc_unreachable ();
2854 }
2855 }
2856
2857
2858 /* Convert a comparison tree code from an enum tree_code representation
2859 into a compcode bit-based encoding. This function is the inverse of
2860 compcode_to_comparison. */
2861
2862 static enum comparison_code
2863 comparison_to_compcode (enum tree_code code)
2864 {
2865 switch (code)
2866 {
2867 case LT_EXPR:
2868 return COMPCODE_LT;
2869 case EQ_EXPR:
2870 return COMPCODE_EQ;
2871 case LE_EXPR:
2872 return COMPCODE_LE;
2873 case GT_EXPR:
2874 return COMPCODE_GT;
2875 case NE_EXPR:
2876 return COMPCODE_NE;
2877 case GE_EXPR:
2878 return COMPCODE_GE;
2879 case ORDERED_EXPR:
2880 return COMPCODE_ORD;
2881 case UNORDERED_EXPR:
2882 return COMPCODE_UNORD;
2883 case UNLT_EXPR:
2884 return COMPCODE_UNLT;
2885 case UNEQ_EXPR:
2886 return COMPCODE_UNEQ;
2887 case UNLE_EXPR:
2888 return COMPCODE_UNLE;
2889 case UNGT_EXPR:
2890 return COMPCODE_UNGT;
2891 case LTGT_EXPR:
2892 return COMPCODE_LTGT;
2893 case UNGE_EXPR:
2894 return COMPCODE_UNGE;
2895 default:
2896 gcc_unreachable ();
2897 }
2898 }
2899
2900 /* Convert a compcode bit-based encoding of a comparison operator back
2901 to GCC's enum tree_code representation. This function is the
2902 inverse of comparison_to_compcode. */
2903
2904 static enum tree_code
2905 compcode_to_comparison (enum comparison_code code)
2906 {
2907 switch (code)
2908 {
2909 case COMPCODE_LT:
2910 return LT_EXPR;
2911 case COMPCODE_EQ:
2912 return EQ_EXPR;
2913 case COMPCODE_LE:
2914 return LE_EXPR;
2915 case COMPCODE_GT:
2916 return GT_EXPR;
2917 case COMPCODE_NE:
2918 return NE_EXPR;
2919 case COMPCODE_GE:
2920 return GE_EXPR;
2921 case COMPCODE_ORD:
2922 return ORDERED_EXPR;
2923 case COMPCODE_UNORD:
2924 return UNORDERED_EXPR;
2925 case COMPCODE_UNLT:
2926 return UNLT_EXPR;
2927 case COMPCODE_UNEQ:
2928 return UNEQ_EXPR;
2929 case COMPCODE_UNLE:
2930 return UNLE_EXPR;
2931 case COMPCODE_UNGT:
2932 return UNGT_EXPR;
2933 case COMPCODE_LTGT:
2934 return LTGT_EXPR;
2935 case COMPCODE_UNGE:
2936 return UNGE_EXPR;
2937 default:
2938 gcc_unreachable ();
2939 }
2940 }
2941
2942 /* Return a tree for the comparison which is the combination of
2943 doing the AND or OR (depending on CODE) of the two operations LCODE
2944 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2945 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2946 if this makes the transformation invalid. */
2947
2948 tree
2949 combine_comparisons (enum tree_code code, enum tree_code lcode,
2950 enum tree_code rcode, tree truth_type,
2951 tree ll_arg, tree lr_arg)
2952 {
2953 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2954 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2955 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2956 int compcode;
2957
2958 switch (code)
2959 {
2960 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2961 compcode = lcompcode & rcompcode;
2962 break;
2963
2964 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2965 compcode = lcompcode | rcompcode;
2966 break;
2967
2968 default:
2969 return NULL_TREE;
2970 }
2971
2972 if (!honor_nans)
2973 {
2974 /* Eliminate unordered comparisons, as well as LTGT and ORD
2975 which are not used unless the mode has NaNs. */
2976 compcode &= ~COMPCODE_UNORD;
2977 if (compcode == COMPCODE_LTGT)
2978 compcode = COMPCODE_NE;
2979 else if (compcode == COMPCODE_ORD)
2980 compcode = COMPCODE_TRUE;
2981 }
2982 else if (flag_trapping_math)
2983 {
2984 /* Check that the original operation and the optimized ones will trap
2985 under the same condition. */
2986 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2987 && (lcompcode != COMPCODE_EQ)
2988 && (lcompcode != COMPCODE_ORD);
2989 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2990 && (rcompcode != COMPCODE_EQ)
2991 && (rcompcode != COMPCODE_ORD);
2992 bool trap = (compcode & COMPCODE_UNORD) == 0
2993 && (compcode != COMPCODE_EQ)
2994 && (compcode != COMPCODE_ORD);
2995
2996 /* In a short-circuited boolean expression the LHS might be
2997 such that the RHS, if evaluated, will never trap. For
2998 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2999 if neither x nor y is NaN. (This is a mixed blessing: for
3000 example, the expression above will never trap, hence
3001 optimizing it to x < y would be invalid). */
3002 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
3003 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
3004 rtrap = false;
3005
3006 /* If the comparison was short-circuited, and only the RHS
3007 trapped, we may now generate a spurious trap. */
3008 if (rtrap && !ltrap
3009 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3010 return NULL_TREE;
3011
3012 /* If we changed the conditions that cause a trap, we lose. */
3013 if ((ltrap || rtrap) != trap)
3014 return NULL_TREE;
3015 }
3016
3017 if (compcode == COMPCODE_TRUE)
3018 return constant_boolean_node (true, truth_type);
3019 else if (compcode == COMPCODE_FALSE)
3020 return constant_boolean_node (false, truth_type);
3021 else
3022 {
3023 enum tree_code tcode;
3024
3025 tcode = compcode_to_comparison ((enum comparison_code) compcode);
3026 return fold_build2 (tcode, truth_type, ll_arg, lr_arg);
3027 }
3028 }
3029 \f
3030 /* Return nonzero if two operands (typically of the same tree node)
3031 are necessarily equal. If either argument has side-effects this
3032 function returns zero. FLAGS modifies behavior as follows:
3033
3034 If OEP_ONLY_CONST is set, only return nonzero for constants.
3035 This function tests whether the operands are indistinguishable;
3036 it does not test whether they are equal using C's == operation.
3037 The distinction is important for IEEE floating point, because
3038 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
3039 (2) two NaNs may be indistinguishable, but NaN!=NaN.
3040
3041 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
3042 even though it may hold multiple values during a function.
3043 This is because a GCC tree node guarantees that nothing else is
3044 executed between the evaluation of its "operands" (which may often
3045 be evaluated in arbitrary order). Hence if the operands themselves
3046 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
3047 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
3048 unset means assuming isochronic (or instantaneous) tree equivalence.
3049 Unless comparing arbitrary expression trees, such as from different
3050 statements, this flag can usually be left unset.
3051
3052 If OEP_PURE_SAME is set, then pure functions with identical arguments
3053 are considered the same. It is used when the caller has other ways
3054 to ensure that global memory is unchanged in between. */
3055
3056 int
3057 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
3058 {
3059 /* If either is ERROR_MARK, they aren't equal. */
3060 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
3061 return 0;
3062
3063 /* Check equality of integer constants before bailing out due to
3064 precision differences. */
3065 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
3066 return tree_int_cst_equal (arg0, arg1);
3067
3068 /* If both types don't have the same signedness, then we can't consider
3069 them equal. We must check this before the STRIP_NOPS calls
3070 because they may change the signedness of the arguments. As pointers
3071 strictly don't have a signedness, require either two pointers or
3072 two non-pointers as well. */
3073 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
3074 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
3075 return 0;
3076
3077 /* If both types don't have the same precision, then it is not safe
3078 to strip NOPs. */
3079 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
3080 return 0;
3081
3082 STRIP_NOPS (arg0);
3083 STRIP_NOPS (arg1);
3084
3085 /* In case both args are comparisons but with different comparison
3086 code, try to swap the comparison operands of one arg to produce
3087 a match and compare that variant. */
3088 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3089 && COMPARISON_CLASS_P (arg0)
3090 && COMPARISON_CLASS_P (arg1))
3091 {
3092 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
3093
3094 if (TREE_CODE (arg0) == swap_code)
3095 return operand_equal_p (TREE_OPERAND (arg0, 0),
3096 TREE_OPERAND (arg1, 1), flags)
3097 && operand_equal_p (TREE_OPERAND (arg0, 1),
3098 TREE_OPERAND (arg1, 0), flags);
3099 }
3100
3101 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3102 /* This is needed for conversions and for COMPONENT_REF.
3103 Might as well play it safe and always test this. */
3104 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
3105 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
3106 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
3107 return 0;
3108
3109 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3110 We don't care about side effects in that case because the SAVE_EXPR
3111 takes care of that for us. In all other cases, two expressions are
3112 equal if they have no side effects. If we have two identical
3113 expressions with side effects that should be treated the same due
3114 to the only side effects being identical SAVE_EXPR's, that will
3115 be detected in the recursive calls below. */
3116 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
3117 && (TREE_CODE (arg0) == SAVE_EXPR
3118 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
3119 return 1;
3120
3121 /* Next handle constant cases, those for which we can return 1 even
3122 if ONLY_CONST is set. */
3123 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
3124 switch (TREE_CODE (arg0))
3125 {
3126 case INTEGER_CST:
3127 return tree_int_cst_equal (arg0, arg1);
3128
3129 case FIXED_CST:
3130 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
3131 TREE_FIXED_CST (arg1));
3132
3133 case REAL_CST:
3134 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
3135 TREE_REAL_CST (arg1)))
3136 return 1;
3137
3138
3139 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
3140 {
3141 /* If we do not distinguish between signed and unsigned zero,
3142 consider them equal. */
3143 if (real_zerop (arg0) && real_zerop (arg1))
3144 return 1;
3145 }
3146 return 0;
3147
3148 case VECTOR_CST:
3149 {
3150 tree v1, v2;
3151
3152 v1 = TREE_VECTOR_CST_ELTS (arg0);
3153 v2 = TREE_VECTOR_CST_ELTS (arg1);
3154 while (v1 && v2)
3155 {
3156 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
3157 flags))
3158 return 0;
3159 v1 = TREE_CHAIN (v1);
3160 v2 = TREE_CHAIN (v2);
3161 }
3162
3163 return v1 == v2;
3164 }
3165
3166 case COMPLEX_CST:
3167 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
3168 flags)
3169 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
3170 flags));
3171
3172 case STRING_CST:
3173 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
3174 && ! memcmp (TREE_STRING_POINTER (arg0),
3175 TREE_STRING_POINTER (arg1),
3176 TREE_STRING_LENGTH (arg0)));
3177
3178 case ADDR_EXPR:
3179 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
3180 0);
3181 default:
3182 break;
3183 }
3184
3185 if (flags & OEP_ONLY_CONST)
3186 return 0;
3187
3188 /* Define macros to test an operand from arg0 and arg1 for equality and a
3189 variant that allows null and views null as being different from any
3190 non-null value. In the latter case, if either is null, the both
3191 must be; otherwise, do the normal comparison. */
3192 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
3193 TREE_OPERAND (arg1, N), flags)
3194
3195 #define OP_SAME_WITH_NULL(N) \
3196 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3197 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3198
3199 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3200 {
3201 case tcc_unary:
3202 /* Two conversions are equal only if signedness and modes match. */
3203 switch (TREE_CODE (arg0))
3204 {
3205 CASE_CONVERT:
3206 case FIX_TRUNC_EXPR:
3207 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3208 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3209 return 0;
3210 break;
3211 default:
3212 break;
3213 }
3214
3215 return OP_SAME (0);
3216
3217
3218 case tcc_comparison:
3219 case tcc_binary:
3220 if (OP_SAME (0) && OP_SAME (1))
3221 return 1;
3222
3223 /* For commutative ops, allow the other order. */
3224 return (commutative_tree_code (TREE_CODE (arg0))
3225 && operand_equal_p (TREE_OPERAND (arg0, 0),
3226 TREE_OPERAND (arg1, 1), flags)
3227 && operand_equal_p (TREE_OPERAND (arg0, 1),
3228 TREE_OPERAND (arg1, 0), flags));
3229
3230 case tcc_reference:
3231 /* If either of the pointer (or reference) expressions we are
3232 dereferencing contain a side effect, these cannot be equal. */
3233 if (TREE_SIDE_EFFECTS (arg0)
3234 || TREE_SIDE_EFFECTS (arg1))
3235 return 0;
3236
3237 switch (TREE_CODE (arg0))
3238 {
3239 case INDIRECT_REF:
3240 case ALIGN_INDIRECT_REF:
3241 case MISALIGNED_INDIRECT_REF:
3242 case REALPART_EXPR:
3243 case IMAGPART_EXPR:
3244 return OP_SAME (0);
3245
3246 case ARRAY_REF:
3247 case ARRAY_RANGE_REF:
3248 /* Operands 2 and 3 may be null.
3249 Compare the array index by value if it is constant first as we
3250 may have different types but same value here. */
3251 return (OP_SAME (0)
3252 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3253 TREE_OPERAND (arg1, 1))
3254 || OP_SAME (1))
3255 && OP_SAME_WITH_NULL (2)
3256 && OP_SAME_WITH_NULL (3));
3257
3258 case COMPONENT_REF:
3259 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3260 may be NULL when we're called to compare MEM_EXPRs. */
3261 return OP_SAME_WITH_NULL (0)
3262 && OP_SAME (1)
3263 && OP_SAME_WITH_NULL (2);
3264
3265 case BIT_FIELD_REF:
3266 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3267
3268 default:
3269 return 0;
3270 }
3271
3272 case tcc_expression:
3273 switch (TREE_CODE (arg0))
3274 {
3275 case ADDR_EXPR:
3276 case TRUTH_NOT_EXPR:
3277 return OP_SAME (0);
3278
3279 case TRUTH_ANDIF_EXPR:
3280 case TRUTH_ORIF_EXPR:
3281 return OP_SAME (0) && OP_SAME (1);
3282
3283 case TRUTH_AND_EXPR:
3284 case TRUTH_OR_EXPR:
3285 case TRUTH_XOR_EXPR:
3286 if (OP_SAME (0) && OP_SAME (1))
3287 return 1;
3288
3289 /* Otherwise take into account this is a commutative operation. */
3290 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3291 TREE_OPERAND (arg1, 1), flags)
3292 && operand_equal_p (TREE_OPERAND (arg0, 1),
3293 TREE_OPERAND (arg1, 0), flags));
3294
3295 case COND_EXPR:
3296 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3297
3298 default:
3299 return 0;
3300 }
3301
3302 case tcc_vl_exp:
3303 switch (TREE_CODE (arg0))
3304 {
3305 case CALL_EXPR:
3306 /* If the CALL_EXPRs call different functions, then they
3307 clearly can not be equal. */
3308 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3309 flags))
3310 return 0;
3311
3312 {
3313 unsigned int cef = call_expr_flags (arg0);
3314 if (flags & OEP_PURE_SAME)
3315 cef &= ECF_CONST | ECF_PURE;
3316 else
3317 cef &= ECF_CONST;
3318 if (!cef)
3319 return 0;
3320 }
3321
3322 /* Now see if all the arguments are the same. */
3323 {
3324 const_call_expr_arg_iterator iter0, iter1;
3325 const_tree a0, a1;
3326 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3327 a1 = first_const_call_expr_arg (arg1, &iter1);
3328 a0 && a1;
3329 a0 = next_const_call_expr_arg (&iter0),
3330 a1 = next_const_call_expr_arg (&iter1))
3331 if (! operand_equal_p (a0, a1, flags))
3332 return 0;
3333
3334 /* If we get here and both argument lists are exhausted
3335 then the CALL_EXPRs are equal. */
3336 return ! (a0 || a1);
3337 }
3338 default:
3339 return 0;
3340 }
3341
3342 case tcc_declaration:
3343 /* Consider __builtin_sqrt equal to sqrt. */
3344 return (TREE_CODE (arg0) == FUNCTION_DECL
3345 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3346 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3347 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3348
3349 default:
3350 return 0;
3351 }
3352
3353 #undef OP_SAME
3354 #undef OP_SAME_WITH_NULL
3355 }
3356 \f
3357 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3358 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3359
3360 When in doubt, return 0. */
3361
3362 static int
3363 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3364 {
3365 int unsignedp1, unsignedpo;
3366 tree primarg0, primarg1, primother;
3367 unsigned int correct_width;
3368
3369 if (operand_equal_p (arg0, arg1, 0))
3370 return 1;
3371
3372 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3373 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3374 return 0;
3375
3376 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3377 and see if the inner values are the same. This removes any
3378 signedness comparison, which doesn't matter here. */
3379 primarg0 = arg0, primarg1 = arg1;
3380 STRIP_NOPS (primarg0);
3381 STRIP_NOPS (primarg1);
3382 if (operand_equal_p (primarg0, primarg1, 0))
3383 return 1;
3384
3385 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3386 actual comparison operand, ARG0.
3387
3388 First throw away any conversions to wider types
3389 already present in the operands. */
3390
3391 primarg1 = get_narrower (arg1, &unsignedp1);
3392 primother = get_narrower (other, &unsignedpo);
3393
3394 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3395 if (unsignedp1 == unsignedpo
3396 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3397 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3398 {
3399 tree type = TREE_TYPE (arg0);
3400
3401 /* Make sure shorter operand is extended the right way
3402 to match the longer operand. */
3403 primarg1 = fold_convert (signed_or_unsigned_type_for
3404 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3405
3406 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3407 return 1;
3408 }
3409
3410 return 0;
3411 }
3412 \f
3413 /* See if ARG is an expression that is either a comparison or is performing
3414 arithmetic on comparisons. The comparisons must only be comparing
3415 two different values, which will be stored in *CVAL1 and *CVAL2; if
3416 they are nonzero it means that some operands have already been found.
3417 No variables may be used anywhere else in the expression except in the
3418 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3419 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3420
3421 If this is true, return 1. Otherwise, return zero. */
3422
3423 static int
3424 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3425 {
3426 enum tree_code code = TREE_CODE (arg);
3427 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3428
3429 /* We can handle some of the tcc_expression cases here. */
3430 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3431 tclass = tcc_unary;
3432 else if (tclass == tcc_expression
3433 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3434 || code == COMPOUND_EXPR))
3435 tclass = tcc_binary;
3436
3437 else if (tclass == tcc_expression && code == SAVE_EXPR
3438 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3439 {
3440 /* If we've already found a CVAL1 or CVAL2, this expression is
3441 two complex to handle. */
3442 if (*cval1 || *cval2)
3443 return 0;
3444
3445 tclass = tcc_unary;
3446 *save_p = 1;
3447 }
3448
3449 switch (tclass)
3450 {
3451 case tcc_unary:
3452 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3453
3454 case tcc_binary:
3455 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3456 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3457 cval1, cval2, save_p));
3458
3459 case tcc_constant:
3460 return 1;
3461
3462 case tcc_expression:
3463 if (code == COND_EXPR)
3464 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3465 cval1, cval2, save_p)
3466 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3467 cval1, cval2, save_p)
3468 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3469 cval1, cval2, save_p));
3470 return 0;
3471
3472 case tcc_comparison:
3473 /* First see if we can handle the first operand, then the second. For
3474 the second operand, we know *CVAL1 can't be zero. It must be that
3475 one side of the comparison is each of the values; test for the
3476 case where this isn't true by failing if the two operands
3477 are the same. */
3478
3479 if (operand_equal_p (TREE_OPERAND (arg, 0),
3480 TREE_OPERAND (arg, 1), 0))
3481 return 0;
3482
3483 if (*cval1 == 0)
3484 *cval1 = TREE_OPERAND (arg, 0);
3485 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3486 ;
3487 else if (*cval2 == 0)
3488 *cval2 = TREE_OPERAND (arg, 0);
3489 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3490 ;
3491 else
3492 return 0;
3493
3494 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3495 ;
3496 else if (*cval2 == 0)
3497 *cval2 = TREE_OPERAND (arg, 1);
3498 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3499 ;
3500 else
3501 return 0;
3502
3503 return 1;
3504
3505 default:
3506 return 0;
3507 }
3508 }
3509 \f
3510 /* ARG is a tree that is known to contain just arithmetic operations and
3511 comparisons. Evaluate the operations in the tree substituting NEW0 for
3512 any occurrence of OLD0 as an operand of a comparison and likewise for
3513 NEW1 and OLD1. */
3514
3515 static tree
3516 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
3517 {
3518 tree type = TREE_TYPE (arg);
3519 enum tree_code code = TREE_CODE (arg);
3520 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3521
3522 /* We can handle some of the tcc_expression cases here. */
3523 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3524 tclass = tcc_unary;
3525 else if (tclass == tcc_expression
3526 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3527 tclass = tcc_binary;
3528
3529 switch (tclass)
3530 {
3531 case tcc_unary:
3532 return fold_build1 (code, type,
3533 eval_subst (TREE_OPERAND (arg, 0),
3534 old0, new0, old1, new1));
3535
3536 case tcc_binary:
3537 return fold_build2 (code, type,
3538 eval_subst (TREE_OPERAND (arg, 0),
3539 old0, new0, old1, new1),
3540 eval_subst (TREE_OPERAND (arg, 1),
3541 old0, new0, old1, new1));
3542
3543 case tcc_expression:
3544 switch (code)
3545 {
3546 case SAVE_EXPR:
3547 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
3548
3549 case COMPOUND_EXPR:
3550 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
3551
3552 case COND_EXPR:
3553 return fold_build3 (code, type,
3554 eval_subst (TREE_OPERAND (arg, 0),
3555 old0, new0, old1, new1),
3556 eval_subst (TREE_OPERAND (arg, 1),
3557 old0, new0, old1, new1),
3558 eval_subst (TREE_OPERAND (arg, 2),
3559 old0, new0, old1, new1));
3560 default:
3561 break;
3562 }
3563 /* Fall through - ??? */
3564
3565 case tcc_comparison:
3566 {
3567 tree arg0 = TREE_OPERAND (arg, 0);
3568 tree arg1 = TREE_OPERAND (arg, 1);
3569
3570 /* We need to check both for exact equality and tree equality. The
3571 former will be true if the operand has a side-effect. In that
3572 case, we know the operand occurred exactly once. */
3573
3574 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3575 arg0 = new0;
3576 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3577 arg0 = new1;
3578
3579 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3580 arg1 = new0;
3581 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3582 arg1 = new1;
3583
3584 return fold_build2 (code, type, arg0, arg1);
3585 }
3586
3587 default:
3588 return arg;
3589 }
3590 }
3591 \f
3592 /* Return a tree for the case when the result of an expression is RESULT
3593 converted to TYPE and OMITTED was previously an operand of the expression
3594 but is now not needed (e.g., we folded OMITTED * 0).
3595
3596 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3597 the conversion of RESULT to TYPE. */
3598
3599 tree
3600 omit_one_operand (tree type, tree result, tree omitted)
3601 {
3602 tree t = fold_convert (type, result);
3603
3604 /* If the resulting operand is an empty statement, just return the omitted
3605 statement casted to void. */
3606 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3607 return build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3608
3609 if (TREE_SIDE_EFFECTS (omitted))
3610 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3611
3612 return non_lvalue (t);
3613 }
3614
3615 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3616
3617 static tree
3618 pedantic_omit_one_operand (tree type, tree result, tree omitted)
3619 {
3620 tree t = fold_convert (type, result);
3621
3622 /* If the resulting operand is an empty statement, just return the omitted
3623 statement casted to void. */
3624 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3625 return build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3626
3627 if (TREE_SIDE_EFFECTS (omitted))
3628 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3629
3630 return pedantic_non_lvalue (t);
3631 }
3632
3633 /* Return a tree for the case when the result of an expression is RESULT
3634 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3635 of the expression but are now not needed.
3636
3637 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3638 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3639 evaluated before OMITTED2. Otherwise, if neither has side effects,
3640 just do the conversion of RESULT to TYPE. */
3641
3642 tree
3643 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
3644 {
3645 tree t = fold_convert (type, result);
3646
3647 if (TREE_SIDE_EFFECTS (omitted2))
3648 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3649 if (TREE_SIDE_EFFECTS (omitted1))
3650 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3651
3652 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
3653 }
3654
3655 \f
3656 /* Return a simplified tree node for the truth-negation of ARG. This
3657 never alters ARG itself. We assume that ARG is an operation that
3658 returns a truth value (0 or 1).
3659
3660 FIXME: one would think we would fold the result, but it causes
3661 problems with the dominator optimizer. */
3662
3663 tree
3664 fold_truth_not_expr (tree arg)
3665 {
3666 tree t, type = TREE_TYPE (arg);
3667 enum tree_code code = TREE_CODE (arg);
3668
3669 /* If this is a comparison, we can simply invert it, except for
3670 floating-point non-equality comparisons, in which case we just
3671 enclose a TRUTH_NOT_EXPR around what we have. */
3672
3673 if (TREE_CODE_CLASS (code) == tcc_comparison)
3674 {
3675 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3676 if (FLOAT_TYPE_P (op_type)
3677 && flag_trapping_math
3678 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3679 && code != NE_EXPR && code != EQ_EXPR)
3680 return NULL_TREE;
3681
3682 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3683 if (code == ERROR_MARK)
3684 return NULL_TREE;
3685
3686 t = build2 (code, type, TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3687 if (EXPR_HAS_LOCATION (arg))
3688 SET_EXPR_LOCATION (t, EXPR_LOCATION (arg));
3689 return t;
3690 }
3691
3692 switch (code)
3693 {
3694 case INTEGER_CST:
3695 return constant_boolean_node (integer_zerop (arg), type);
3696
3697 case TRUTH_AND_EXPR:
3698 t = build2 (TRUTH_OR_EXPR, type,
3699 invert_truthvalue (TREE_OPERAND (arg, 0)),
3700 invert_truthvalue (TREE_OPERAND (arg, 1)));
3701 break;
3702
3703 case TRUTH_OR_EXPR:
3704 t = build2 (TRUTH_AND_EXPR, type,
3705 invert_truthvalue (TREE_OPERAND (arg, 0)),
3706 invert_truthvalue (TREE_OPERAND (arg, 1)));
3707 break;
3708
3709 case TRUTH_XOR_EXPR:
3710 /* Here we can invert either operand. We invert the first operand
3711 unless the second operand is a TRUTH_NOT_EXPR in which case our
3712 result is the XOR of the first operand with the inside of the
3713 negation of the second operand. */
3714
3715 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3716 t = build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3717 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3718 else
3719 t = build2 (TRUTH_XOR_EXPR, type,
3720 invert_truthvalue (TREE_OPERAND (arg, 0)),
3721 TREE_OPERAND (arg, 1));
3722 break;
3723
3724 case TRUTH_ANDIF_EXPR:
3725 t = build2 (TRUTH_ORIF_EXPR, type,
3726 invert_truthvalue (TREE_OPERAND (arg, 0)),
3727 invert_truthvalue (TREE_OPERAND (arg, 1)));
3728 break;
3729
3730 case TRUTH_ORIF_EXPR:
3731 t = build2 (TRUTH_ANDIF_EXPR, type,
3732 invert_truthvalue (TREE_OPERAND (arg, 0)),
3733 invert_truthvalue (TREE_OPERAND (arg, 1)));
3734 break;
3735
3736 case TRUTH_NOT_EXPR:
3737 return TREE_OPERAND (arg, 0);
3738
3739 case COND_EXPR:
3740 {
3741 tree arg1 = TREE_OPERAND (arg, 1);
3742 tree arg2 = TREE_OPERAND (arg, 2);
3743 /* A COND_EXPR may have a throw as one operand, which
3744 then has void type. Just leave void operands
3745 as they are. */
3746 t = build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3747 VOID_TYPE_P (TREE_TYPE (arg1))
3748 ? arg1 : invert_truthvalue (arg1),
3749 VOID_TYPE_P (TREE_TYPE (arg2))
3750 ? arg2 : invert_truthvalue (arg2));
3751 break;
3752 }
3753
3754 case COMPOUND_EXPR:
3755 t = build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3756 invert_truthvalue (TREE_OPERAND (arg, 1)));
3757 break;
3758
3759 case NON_LVALUE_EXPR:
3760 return invert_truthvalue (TREE_OPERAND (arg, 0));
3761
3762 CASE_CONVERT:
3763 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3764 {
3765 t = build1 (TRUTH_NOT_EXPR, type, arg);
3766 break;
3767 }
3768
3769 /* ... fall through ... */
3770
3771 case FLOAT_EXPR:
3772 t = build1 (TREE_CODE (arg), type,
3773 invert_truthvalue (TREE_OPERAND (arg, 0)));
3774 break;
3775
3776 case BIT_AND_EXPR:
3777 if (!integer_onep (TREE_OPERAND (arg, 1)))
3778 return NULL_TREE;
3779 t = build2 (EQ_EXPR, type, arg, build_int_cst (type, 0));
3780 break;
3781
3782 case SAVE_EXPR:
3783 t = build1 (TRUTH_NOT_EXPR, type, arg);
3784 break;
3785
3786 case CLEANUP_POINT_EXPR:
3787 t = build1 (CLEANUP_POINT_EXPR, type,
3788 invert_truthvalue (TREE_OPERAND (arg, 0)));
3789 break;
3790
3791 default:
3792 t = NULL_TREE;
3793 break;
3794 }
3795
3796 if (t && EXPR_HAS_LOCATION (arg))
3797 SET_EXPR_LOCATION (t, EXPR_LOCATION (arg));
3798
3799 return t;
3800 }
3801
3802 /* Return a simplified tree node for the truth-negation of ARG. This
3803 never alters ARG itself. We assume that ARG is an operation that
3804 returns a truth value (0 or 1).
3805
3806 FIXME: one would think we would fold the result, but it causes
3807 problems with the dominator optimizer. */
3808
3809 tree
3810 invert_truthvalue (tree arg)
3811 {
3812 tree tem;
3813
3814 if (TREE_CODE (arg) == ERROR_MARK)
3815 return arg;
3816
3817 tem = fold_truth_not_expr (arg);
3818 if (!tem)
3819 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3820
3821 return tem;
3822 }
3823
3824 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3825 operands are another bit-wise operation with a common input. If so,
3826 distribute the bit operations to save an operation and possibly two if
3827 constants are involved. For example, convert
3828 (A | B) & (A | C) into A | (B & C)
3829 Further simplification will occur if B and C are constants.
3830
3831 If this optimization cannot be done, 0 will be returned. */
3832
3833 static tree
3834 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3835 {
3836 tree common;
3837 tree left, right;
3838
3839 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3840 || TREE_CODE (arg0) == code
3841 || (TREE_CODE (arg0) != BIT_AND_EXPR
3842 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3843 return 0;
3844
3845 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3846 {
3847 common = TREE_OPERAND (arg0, 0);
3848 left = TREE_OPERAND (arg0, 1);
3849 right = TREE_OPERAND (arg1, 1);
3850 }
3851 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3852 {
3853 common = TREE_OPERAND (arg0, 0);
3854 left = TREE_OPERAND (arg0, 1);
3855 right = TREE_OPERAND (arg1, 0);
3856 }
3857 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3858 {
3859 common = TREE_OPERAND (arg0, 1);
3860 left = TREE_OPERAND (arg0, 0);
3861 right = TREE_OPERAND (arg1, 1);
3862 }
3863 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3864 {
3865 common = TREE_OPERAND (arg0, 1);
3866 left = TREE_OPERAND (arg0, 0);
3867 right = TREE_OPERAND (arg1, 0);
3868 }
3869 else
3870 return 0;
3871
3872 common = fold_convert (type, common);
3873 left = fold_convert (type, left);
3874 right = fold_convert (type, right);
3875 return fold_build2 (TREE_CODE (arg0), type, common,
3876 fold_build2 (code, type, left, right));
3877 }
3878
3879 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3880 with code CODE. This optimization is unsafe. */
3881 static tree
3882 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3883 {
3884 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3885 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3886
3887 /* (A / C) +- (B / C) -> (A +- B) / C. */
3888 if (mul0 == mul1
3889 && operand_equal_p (TREE_OPERAND (arg0, 1),
3890 TREE_OPERAND (arg1, 1), 0))
3891 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3892 fold_build2 (code, type,
3893 TREE_OPERAND (arg0, 0),
3894 TREE_OPERAND (arg1, 0)),
3895 TREE_OPERAND (arg0, 1));
3896
3897 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3898 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3899 TREE_OPERAND (arg1, 0), 0)
3900 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3901 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3902 {
3903 REAL_VALUE_TYPE r0, r1;
3904 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3905 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3906 if (!mul0)
3907 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3908 if (!mul1)
3909 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3910 real_arithmetic (&r0, code, &r0, &r1);
3911 return fold_build2 (MULT_EXPR, type,
3912 TREE_OPERAND (arg0, 0),
3913 build_real (type, r0));
3914 }
3915
3916 return NULL_TREE;
3917 }
3918 \f
3919 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3920 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3921
3922 static tree
3923 make_bit_field_ref (tree inner, tree type, HOST_WIDE_INT bitsize,
3924 HOST_WIDE_INT bitpos, int unsignedp)
3925 {
3926 tree result, bftype;
3927
3928 if (bitpos == 0)
3929 {
3930 tree size = TYPE_SIZE (TREE_TYPE (inner));
3931 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3932 || POINTER_TYPE_P (TREE_TYPE (inner)))
3933 && host_integerp (size, 0)
3934 && tree_low_cst (size, 0) == bitsize)
3935 return fold_convert (type, inner);
3936 }
3937
3938 bftype = type;
3939 if (TYPE_PRECISION (bftype) != bitsize
3940 || TYPE_UNSIGNED (bftype) == !unsignedp)
3941 bftype = build_nonstandard_integer_type (bitsize, 0);
3942
3943 result = build3 (BIT_FIELD_REF, bftype, inner,
3944 size_int (bitsize), bitsize_int (bitpos));
3945
3946 if (bftype != type)
3947 result = fold_convert (type, result);
3948
3949 return result;
3950 }
3951
3952 /* Optimize a bit-field compare.
3953
3954 There are two cases: First is a compare against a constant and the
3955 second is a comparison of two items where the fields are at the same
3956 bit position relative to the start of a chunk (byte, halfword, word)
3957 large enough to contain it. In these cases we can avoid the shift
3958 implicit in bitfield extractions.
3959
3960 For constants, we emit a compare of the shifted constant with the
3961 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3962 compared. For two fields at the same position, we do the ANDs with the
3963 similar mask and compare the result of the ANDs.
3964
3965 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3966 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3967 are the left and right operands of the comparison, respectively.
3968
3969 If the optimization described above can be done, we return the resulting
3970 tree. Otherwise we return zero. */
3971
3972 static tree
3973 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3974 tree lhs, tree rhs)
3975 {
3976 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3977 tree type = TREE_TYPE (lhs);
3978 tree signed_type, unsigned_type;
3979 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3980 enum machine_mode lmode, rmode, nmode;
3981 int lunsignedp, runsignedp;
3982 int lvolatilep = 0, rvolatilep = 0;
3983 tree linner, rinner = NULL_TREE;
3984 tree mask;
3985 tree offset;
3986
3987 /* Get all the information about the extractions being done. If the bit size
3988 if the same as the size of the underlying object, we aren't doing an
3989 extraction at all and so can do nothing. We also don't want to
3990 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3991 then will no longer be able to replace it. */
3992 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3993 &lunsignedp, &lvolatilep, false);
3994 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3995 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3996 return 0;
3997
3998 if (!const_p)
3999 {
4000 /* If this is not a constant, we can only do something if bit positions,
4001 sizes, and signedness are the same. */
4002 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
4003 &runsignedp, &rvolatilep, false);
4004
4005 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
4006 || lunsignedp != runsignedp || offset != 0
4007 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
4008 return 0;
4009 }
4010
4011 /* See if we can find a mode to refer to this field. We should be able to,
4012 but fail if we can't. */
4013 nmode = get_best_mode (lbitsize, lbitpos,
4014 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
4015 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
4016 TYPE_ALIGN (TREE_TYPE (rinner))),
4017 word_mode, lvolatilep || rvolatilep);
4018 if (nmode == VOIDmode)
4019 return 0;
4020
4021 /* Set signed and unsigned types of the precision of this mode for the
4022 shifts below. */
4023 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
4024 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
4025
4026 /* Compute the bit position and size for the new reference and our offset
4027 within it. If the new reference is the same size as the original, we
4028 won't optimize anything, so return zero. */
4029 nbitsize = GET_MODE_BITSIZE (nmode);
4030 nbitpos = lbitpos & ~ (nbitsize - 1);
4031 lbitpos -= nbitpos;
4032 if (nbitsize == lbitsize)
4033 return 0;
4034
4035 if (BYTES_BIG_ENDIAN)
4036 lbitpos = nbitsize - lbitsize - lbitpos;
4037
4038 /* Make the mask to be used against the extracted field. */
4039 mask = build_int_cst_type (unsigned_type, -1);
4040 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
4041 mask = const_binop (RSHIFT_EXPR, mask,
4042 size_int (nbitsize - lbitsize - lbitpos), 0);
4043
4044 if (! const_p)
4045 /* If not comparing with constant, just rework the comparison
4046 and return. */
4047 return fold_build2 (code, compare_type,
4048 fold_build2 (BIT_AND_EXPR, unsigned_type,
4049 make_bit_field_ref (linner,
4050 unsigned_type,
4051 nbitsize, nbitpos,
4052 1),
4053 mask),
4054 fold_build2 (BIT_AND_EXPR, unsigned_type,
4055 make_bit_field_ref (rinner,
4056 unsigned_type,
4057 nbitsize, nbitpos,
4058 1),
4059 mask));
4060
4061 /* Otherwise, we are handling the constant case. See if the constant is too
4062 big for the field. Warn and return a tree of for 0 (false) if so. We do
4063 this not only for its own sake, but to avoid having to test for this
4064 error case below. If we didn't, we might generate wrong code.
4065
4066 For unsigned fields, the constant shifted right by the field length should
4067 be all zero. For signed fields, the high-order bits should agree with
4068 the sign bit. */
4069
4070 if (lunsignedp)
4071 {
4072 if (! integer_zerop (const_binop (RSHIFT_EXPR,
4073 fold_convert (unsigned_type, rhs),
4074 size_int (lbitsize), 0)))
4075 {
4076 warning (0, "comparison is always %d due to width of bit-field",
4077 code == NE_EXPR);
4078 return constant_boolean_node (code == NE_EXPR, compare_type);
4079 }
4080 }
4081 else
4082 {
4083 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
4084 size_int (lbitsize - 1), 0);
4085 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
4086 {
4087 warning (0, "comparison is always %d due to width of bit-field",
4088 code == NE_EXPR);
4089 return constant_boolean_node (code == NE_EXPR, compare_type);
4090 }
4091 }
4092
4093 /* Single-bit compares should always be against zero. */
4094 if (lbitsize == 1 && ! integer_zerop (rhs))
4095 {
4096 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
4097 rhs = build_int_cst (type, 0);
4098 }
4099
4100 /* Make a new bitfield reference, shift the constant over the
4101 appropriate number of bits and mask it with the computed mask
4102 (in case this was a signed field). If we changed it, make a new one. */
4103 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
4104 if (lvolatilep)
4105 {
4106 TREE_SIDE_EFFECTS (lhs) = 1;
4107 TREE_THIS_VOLATILE (lhs) = 1;
4108 }
4109
4110 rhs = const_binop (BIT_AND_EXPR,
4111 const_binop (LSHIFT_EXPR,
4112 fold_convert (unsigned_type, rhs),
4113 size_int (lbitpos), 0),
4114 mask, 0);
4115
4116 return build2 (code, compare_type,
4117 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
4118 rhs);
4119 }
4120 \f
4121 /* Subroutine for fold_truthop: decode a field reference.
4122
4123 If EXP is a comparison reference, we return the innermost reference.
4124
4125 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4126 set to the starting bit number.
4127
4128 If the innermost field can be completely contained in a mode-sized
4129 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
4130
4131 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4132 otherwise it is not changed.
4133
4134 *PUNSIGNEDP is set to the signedness of the field.
4135
4136 *PMASK is set to the mask used. This is either contained in a
4137 BIT_AND_EXPR or derived from the width of the field.
4138
4139 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4140
4141 Return 0 if this is not a component reference or is one that we can't
4142 do anything with. */
4143
4144 static tree
4145 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
4146 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
4147 int *punsignedp, int *pvolatilep,
4148 tree *pmask, tree *pand_mask)
4149 {
4150 tree outer_type = 0;
4151 tree and_mask = 0;
4152 tree mask, inner, offset;
4153 tree unsigned_type;
4154 unsigned int precision;
4155
4156 /* All the optimizations using this function assume integer fields.
4157 There are problems with FP fields since the type_for_size call
4158 below can fail for, e.g., XFmode. */
4159 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4160 return 0;
4161
4162 /* We are interested in the bare arrangement of bits, so strip everything
4163 that doesn't affect the machine mode. However, record the type of the
4164 outermost expression if it may matter below. */
4165 if (CONVERT_EXPR_P (exp)
4166 || TREE_CODE (exp) == NON_LVALUE_EXPR)
4167 outer_type = TREE_TYPE (exp);
4168 STRIP_NOPS (exp);
4169
4170 if (TREE_CODE (exp) == BIT_AND_EXPR)
4171 {
4172 and_mask = TREE_OPERAND (exp, 1);
4173 exp = TREE_OPERAND (exp, 0);
4174 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4175 if (TREE_CODE (and_mask) != INTEGER_CST)
4176 return 0;
4177 }
4178
4179 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
4180 punsignedp, pvolatilep, false);
4181 if ((inner == exp && and_mask == 0)
4182 || *pbitsize < 0 || offset != 0
4183 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
4184 return 0;
4185
4186 /* If the number of bits in the reference is the same as the bitsize of
4187 the outer type, then the outer type gives the signedness. Otherwise
4188 (in case of a small bitfield) the signedness is unchanged. */
4189 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4190 *punsignedp = TYPE_UNSIGNED (outer_type);
4191
4192 /* Compute the mask to access the bitfield. */
4193 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4194 precision = TYPE_PRECISION (unsigned_type);
4195
4196 mask = build_int_cst_type (unsigned_type, -1);
4197
4198 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
4199 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
4200
4201 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4202 if (and_mask != 0)
4203 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
4204 fold_convert (unsigned_type, and_mask), mask);
4205
4206 *pmask = mask;
4207 *pand_mask = and_mask;
4208 return inner;
4209 }
4210
4211 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4212 bit positions. */
4213
4214 static int
4215 all_ones_mask_p (const_tree mask, int size)
4216 {
4217 tree type = TREE_TYPE (mask);
4218 unsigned int precision = TYPE_PRECISION (type);
4219 tree tmask;
4220
4221 tmask = build_int_cst_type (signed_type_for (type), -1);
4222
4223 return
4224 tree_int_cst_equal (mask,
4225 const_binop (RSHIFT_EXPR,
4226 const_binop (LSHIFT_EXPR, tmask,
4227 size_int (precision - size),
4228 0),
4229 size_int (precision - size), 0));
4230 }
4231
4232 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4233 represents the sign bit of EXP's type. If EXP represents a sign
4234 or zero extension, also test VAL against the unextended type.
4235 The return value is the (sub)expression whose sign bit is VAL,
4236 or NULL_TREE otherwise. */
4237
4238 static tree
4239 sign_bit_p (tree exp, const_tree val)
4240 {
4241 unsigned HOST_WIDE_INT mask_lo, lo;
4242 HOST_WIDE_INT mask_hi, hi;
4243 int width;
4244 tree t;
4245
4246 /* Tree EXP must have an integral type. */
4247 t = TREE_TYPE (exp);
4248 if (! INTEGRAL_TYPE_P (t))
4249 return NULL_TREE;
4250
4251 /* Tree VAL must be an integer constant. */
4252 if (TREE_CODE (val) != INTEGER_CST
4253 || TREE_OVERFLOW (val))
4254 return NULL_TREE;
4255
4256 width = TYPE_PRECISION (t);
4257 if (width > HOST_BITS_PER_WIDE_INT)
4258 {
4259 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
4260 lo = 0;
4261
4262 mask_hi = ((unsigned HOST_WIDE_INT) -1
4263 >> (2 * HOST_BITS_PER_WIDE_INT - width));
4264 mask_lo = -1;
4265 }
4266 else
4267 {
4268 hi = 0;
4269 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
4270
4271 mask_hi = 0;
4272 mask_lo = ((unsigned HOST_WIDE_INT) -1
4273 >> (HOST_BITS_PER_WIDE_INT - width));
4274 }
4275
4276 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
4277 treat VAL as if it were unsigned. */
4278 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
4279 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
4280 return exp;
4281
4282 /* Handle extension from a narrower type. */
4283 if (TREE_CODE (exp) == NOP_EXPR
4284 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4285 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4286
4287 return NULL_TREE;
4288 }
4289
4290 /* Subroutine for fold_truthop: determine if an operand is simple enough
4291 to be evaluated unconditionally. */
4292
4293 static int
4294 simple_operand_p (const_tree exp)
4295 {
4296 /* Strip any conversions that don't change the machine mode. */
4297 STRIP_NOPS (exp);
4298
4299 return (CONSTANT_CLASS_P (exp)
4300 || TREE_CODE (exp) == SSA_NAME
4301 || (DECL_P (exp)
4302 && ! TREE_ADDRESSABLE (exp)
4303 && ! TREE_THIS_VOLATILE (exp)
4304 && ! DECL_NONLOCAL (exp)
4305 /* Don't regard global variables as simple. They may be
4306 allocated in ways unknown to the compiler (shared memory,
4307 #pragma weak, etc). */
4308 && ! TREE_PUBLIC (exp)
4309 && ! DECL_EXTERNAL (exp)
4310 /* Loading a static variable is unduly expensive, but global
4311 registers aren't expensive. */
4312 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4313 }
4314 \f
4315 /* The following functions are subroutines to fold_range_test and allow it to
4316 try to change a logical combination of comparisons into a range test.
4317
4318 For example, both
4319 X == 2 || X == 3 || X == 4 || X == 5
4320 and
4321 X >= 2 && X <= 5
4322 are converted to
4323 (unsigned) (X - 2) <= 3
4324
4325 We describe each set of comparisons as being either inside or outside
4326 a range, using a variable named like IN_P, and then describe the
4327 range with a lower and upper bound. If one of the bounds is omitted,
4328 it represents either the highest or lowest value of the type.
4329
4330 In the comments below, we represent a range by two numbers in brackets
4331 preceded by a "+" to designate being inside that range, or a "-" to
4332 designate being outside that range, so the condition can be inverted by
4333 flipping the prefix. An omitted bound is represented by a "-". For
4334 example, "- [-, 10]" means being outside the range starting at the lowest
4335 possible value and ending at 10, in other words, being greater than 10.
4336 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4337 always false.
4338
4339 We set up things so that the missing bounds are handled in a consistent
4340 manner so neither a missing bound nor "true" and "false" need to be
4341 handled using a special case. */
4342
4343 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4344 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4345 and UPPER1_P are nonzero if the respective argument is an upper bound
4346 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4347 must be specified for a comparison. ARG1 will be converted to ARG0's
4348 type if both are specified. */
4349
4350 static tree
4351 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4352 tree arg1, int upper1_p)
4353 {
4354 tree tem;
4355 int result;
4356 int sgn0, sgn1;
4357
4358 /* If neither arg represents infinity, do the normal operation.
4359 Else, if not a comparison, return infinity. Else handle the special
4360 comparison rules. Note that most of the cases below won't occur, but
4361 are handled for consistency. */
4362
4363 if (arg0 != 0 && arg1 != 0)
4364 {
4365 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4366 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4367 STRIP_NOPS (tem);
4368 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4369 }
4370
4371 if (TREE_CODE_CLASS (code) != tcc_comparison)
4372 return 0;
4373
4374 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4375 for neither. In real maths, we cannot assume open ended ranges are
4376 the same. But, this is computer arithmetic, where numbers are finite.
4377 We can therefore make the transformation of any unbounded range with
4378 the value Z, Z being greater than any representable number. This permits
4379 us to treat unbounded ranges as equal. */
4380 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4381 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4382 switch (code)
4383 {
4384 case EQ_EXPR:
4385 result = sgn0 == sgn1;
4386 break;
4387 case NE_EXPR:
4388 result = sgn0 != sgn1;
4389 break;
4390 case LT_EXPR:
4391 result = sgn0 < sgn1;
4392 break;
4393 case LE_EXPR:
4394 result = sgn0 <= sgn1;
4395 break;
4396 case GT_EXPR:
4397 result = sgn0 > sgn1;
4398 break;
4399 case GE_EXPR:
4400 result = sgn0 >= sgn1;
4401 break;
4402 default:
4403 gcc_unreachable ();
4404 }
4405
4406 return constant_boolean_node (result, type);
4407 }
4408 \f
4409 /* Given EXP, a logical expression, set the range it is testing into
4410 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4411 actually being tested. *PLOW and *PHIGH will be made of the same
4412 type as the returned expression. If EXP is not a comparison, we
4413 will most likely not be returning a useful value and range. Set
4414 *STRICT_OVERFLOW_P to true if the return value is only valid
4415 because signed overflow is undefined; otherwise, do not change
4416 *STRICT_OVERFLOW_P. */
4417
4418 tree
4419 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4420 bool *strict_overflow_p)
4421 {
4422 enum tree_code code;
4423 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
4424 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
4425 int in_p, n_in_p;
4426 tree low, high, n_low, n_high;
4427
4428 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4429 and see if we can refine the range. Some of the cases below may not
4430 happen, but it doesn't seem worth worrying about this. We "continue"
4431 the outer loop when we've changed something; otherwise we "break"
4432 the switch, which will "break" the while. */
4433
4434 in_p = 0;
4435 low = high = build_int_cst (TREE_TYPE (exp), 0);
4436
4437 while (1)
4438 {
4439 code = TREE_CODE (exp);
4440 exp_type = TREE_TYPE (exp);
4441
4442 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4443 {
4444 if (TREE_OPERAND_LENGTH (exp) > 0)
4445 arg0 = TREE_OPERAND (exp, 0);
4446 if (TREE_CODE_CLASS (code) == tcc_comparison
4447 || TREE_CODE_CLASS (code) == tcc_unary
4448 || TREE_CODE_CLASS (code) == tcc_binary)
4449 arg0_type = TREE_TYPE (arg0);
4450 if (TREE_CODE_CLASS (code) == tcc_binary
4451 || TREE_CODE_CLASS (code) == tcc_comparison
4452 || (TREE_CODE_CLASS (code) == tcc_expression
4453 && TREE_OPERAND_LENGTH (exp) > 1))
4454 arg1 = TREE_OPERAND (exp, 1);
4455 }
4456
4457 switch (code)
4458 {
4459 case TRUTH_NOT_EXPR:
4460 in_p = ! in_p, exp = arg0;
4461 continue;
4462
4463 case EQ_EXPR: case NE_EXPR:
4464 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4465 /* We can only do something if the range is testing for zero
4466 and if the second operand is an integer constant. Note that
4467 saying something is "in" the range we make is done by
4468 complementing IN_P since it will set in the initial case of
4469 being not equal to zero; "out" is leaving it alone. */
4470 if (low == 0 || high == 0
4471 || ! integer_zerop (low) || ! integer_zerop (high)
4472 || TREE_CODE (arg1) != INTEGER_CST)
4473 break;
4474
4475 switch (code)
4476 {
4477 case NE_EXPR: /* - [c, c] */
4478 low = high = arg1;
4479 break;
4480 case EQ_EXPR: /* + [c, c] */
4481 in_p = ! in_p, low = high = arg1;
4482 break;
4483 case GT_EXPR: /* - [-, c] */
4484 low = 0, high = arg1;
4485 break;
4486 case GE_EXPR: /* + [c, -] */
4487 in_p = ! in_p, low = arg1, high = 0;
4488 break;
4489 case LT_EXPR: /* - [c, -] */
4490 low = arg1, high = 0;
4491 break;
4492 case LE_EXPR: /* + [-, c] */
4493 in_p = ! in_p, low = 0, high = arg1;
4494 break;
4495 default:
4496 gcc_unreachable ();
4497 }
4498
4499 /* If this is an unsigned comparison, we also know that EXP is
4500 greater than or equal to zero. We base the range tests we make
4501 on that fact, so we record it here so we can parse existing
4502 range tests. We test arg0_type since often the return type
4503 of, e.g. EQ_EXPR, is boolean. */
4504 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4505 {
4506 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4507 in_p, low, high, 1,
4508 build_int_cst (arg0_type, 0),
4509 NULL_TREE))
4510 break;
4511
4512 in_p = n_in_p, low = n_low, high = n_high;
4513
4514 /* If the high bound is missing, but we have a nonzero low
4515 bound, reverse the range so it goes from zero to the low bound
4516 minus 1. */
4517 if (high == 0 && low && ! integer_zerop (low))
4518 {
4519 in_p = ! in_p;
4520 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4521 integer_one_node, 0);
4522 low = build_int_cst (arg0_type, 0);
4523 }
4524 }
4525
4526 exp = arg0;
4527 continue;
4528
4529 case NEGATE_EXPR:
4530 /* (-x) IN [a,b] -> x in [-b, -a] */
4531 n_low = range_binop (MINUS_EXPR, exp_type,
4532 build_int_cst (exp_type, 0),
4533 0, high, 1);
4534 n_high = range_binop (MINUS_EXPR, exp_type,
4535 build_int_cst (exp_type, 0),
4536 0, low, 0);
4537 low = n_low, high = n_high;
4538 exp = arg0;
4539 continue;
4540
4541 case BIT_NOT_EXPR:
4542 /* ~ X -> -X - 1 */
4543 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
4544 build_int_cst (exp_type, 1));
4545 continue;
4546
4547 case PLUS_EXPR: case MINUS_EXPR:
4548 if (TREE_CODE (arg1) != INTEGER_CST)
4549 break;
4550
4551 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4552 move a constant to the other side. */
4553 if (!TYPE_UNSIGNED (arg0_type)
4554 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4555 break;
4556
4557 /* If EXP is signed, any overflow in the computation is undefined,
4558 so we don't worry about it so long as our computations on
4559 the bounds don't overflow. For unsigned, overflow is defined
4560 and this is exactly the right thing. */
4561 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4562 arg0_type, low, 0, arg1, 0);
4563 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4564 arg0_type, high, 1, arg1, 0);
4565 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4566 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4567 break;
4568
4569 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4570 *strict_overflow_p = true;
4571
4572 /* Check for an unsigned range which has wrapped around the maximum
4573 value thus making n_high < n_low, and normalize it. */
4574 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4575 {
4576 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4577 integer_one_node, 0);
4578 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4579 integer_one_node, 0);
4580
4581 /* If the range is of the form +/- [ x+1, x ], we won't
4582 be able to normalize it. But then, it represents the
4583 whole range or the empty set, so make it
4584 +/- [ -, - ]. */
4585 if (tree_int_cst_equal (n_low, low)
4586 && tree_int_cst_equal (n_high, high))
4587 low = high = 0;
4588 else
4589 in_p = ! in_p;
4590 }
4591 else
4592 low = n_low, high = n_high;
4593
4594 exp = arg0;
4595 continue;
4596
4597 CASE_CONVERT: case NON_LVALUE_EXPR:
4598 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4599 break;
4600
4601 if (! INTEGRAL_TYPE_P (arg0_type)
4602 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4603 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4604 break;
4605
4606 n_low = low, n_high = high;
4607
4608 if (n_low != 0)
4609 n_low = fold_convert (arg0_type, n_low);
4610
4611 if (n_high != 0)
4612 n_high = fold_convert (arg0_type, n_high);
4613
4614
4615 /* If we're converting arg0 from an unsigned type, to exp,
4616 a signed type, we will be doing the comparison as unsigned.
4617 The tests above have already verified that LOW and HIGH
4618 are both positive.
4619
4620 So we have to ensure that we will handle large unsigned
4621 values the same way that the current signed bounds treat
4622 negative values. */
4623
4624 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4625 {
4626 tree high_positive;
4627 tree equiv_type;
4628 /* For fixed-point modes, we need to pass the saturating flag
4629 as the 2nd parameter. */
4630 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4631 equiv_type = lang_hooks.types.type_for_mode
4632 (TYPE_MODE (arg0_type),
4633 TYPE_SATURATING (arg0_type));
4634 else
4635 equiv_type = lang_hooks.types.type_for_mode
4636 (TYPE_MODE (arg0_type), 1);
4637
4638 /* A range without an upper bound is, naturally, unbounded.
4639 Since convert would have cropped a very large value, use
4640 the max value for the destination type. */
4641 high_positive
4642 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4643 : TYPE_MAX_VALUE (arg0_type);
4644
4645 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4646 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
4647 fold_convert (arg0_type,
4648 high_positive),
4649 build_int_cst (arg0_type, 1));
4650
4651 /* If the low bound is specified, "and" the range with the
4652 range for which the original unsigned value will be
4653 positive. */
4654 if (low != 0)
4655 {
4656 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4657 1, n_low, n_high, 1,
4658 fold_convert (arg0_type,
4659 integer_zero_node),
4660 high_positive))
4661 break;
4662
4663 in_p = (n_in_p == in_p);
4664 }
4665 else
4666 {
4667 /* Otherwise, "or" the range with the range of the input
4668 that will be interpreted as negative. */
4669 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4670 0, n_low, n_high, 1,
4671 fold_convert (arg0_type,
4672 integer_zero_node),
4673 high_positive))
4674 break;
4675
4676 in_p = (in_p != n_in_p);
4677 }
4678 }
4679
4680 exp = arg0;
4681 low = n_low, high = n_high;
4682 continue;
4683
4684 default:
4685 break;
4686 }
4687
4688 break;
4689 }
4690
4691 /* If EXP is a constant, we can evaluate whether this is true or false. */
4692 if (TREE_CODE (exp) == INTEGER_CST)
4693 {
4694 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4695 exp, 0, low, 0))
4696 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4697 exp, 1, high, 1)));
4698 low = high = 0;
4699 exp = 0;
4700 }
4701
4702 *pin_p = in_p, *plow = low, *phigh = high;
4703 return exp;
4704 }
4705 \f
4706 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4707 type, TYPE, return an expression to test if EXP is in (or out of, depending
4708 on IN_P) the range. Return 0 if the test couldn't be created. */
4709
4710 tree
4711 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
4712 {
4713 tree etype = TREE_TYPE (exp), value;
4714
4715 #ifdef HAVE_canonicalize_funcptr_for_compare
4716 /* Disable this optimization for function pointer expressions
4717 on targets that require function pointer canonicalization. */
4718 if (HAVE_canonicalize_funcptr_for_compare
4719 && TREE_CODE (etype) == POINTER_TYPE
4720 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4721 return NULL_TREE;
4722 #endif
4723
4724 if (! in_p)
4725 {
4726 value = build_range_check (type, exp, 1, low, high);
4727 if (value != 0)
4728 return invert_truthvalue (value);
4729
4730 return 0;
4731 }
4732
4733 if (low == 0 && high == 0)
4734 return build_int_cst (type, 1);
4735
4736 if (low == 0)
4737 return fold_build2 (LE_EXPR, type, exp,
4738 fold_convert (etype, high));
4739
4740 if (high == 0)
4741 return fold_build2 (GE_EXPR, type, exp,
4742 fold_convert (etype, low));
4743
4744 if (operand_equal_p (low, high, 0))
4745 return fold_build2 (EQ_EXPR, type, exp,
4746 fold_convert (etype, low));
4747
4748 if (integer_zerop (low))
4749 {
4750 if (! TYPE_UNSIGNED (etype))
4751 {
4752 etype = unsigned_type_for (etype);
4753 high = fold_convert (etype, high);
4754 exp = fold_convert (etype, exp);
4755 }
4756 return build_range_check (type, exp, 1, 0, high);
4757 }
4758
4759 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4760 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4761 {
4762 unsigned HOST_WIDE_INT lo;
4763 HOST_WIDE_INT hi;
4764 int prec;
4765
4766 prec = TYPE_PRECISION (etype);
4767 if (prec <= HOST_BITS_PER_WIDE_INT)
4768 {
4769 hi = 0;
4770 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4771 }
4772 else
4773 {
4774 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4775 lo = (unsigned HOST_WIDE_INT) -1;
4776 }
4777
4778 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4779 {
4780 if (TYPE_UNSIGNED (etype))
4781 {
4782 tree signed_etype = signed_type_for (etype);
4783 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4784 etype
4785 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4786 else
4787 etype = signed_etype;
4788 exp = fold_convert (etype, exp);
4789 }
4790 return fold_build2 (GT_EXPR, type, exp,
4791 build_int_cst (etype, 0));
4792 }
4793 }
4794
4795 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4796 This requires wrap-around arithmetics for the type of the expression.
4797 First make sure that arithmetics in this type is valid, then make sure
4798 that it wraps around. */
4799 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4800 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4801 TYPE_UNSIGNED (etype));
4802
4803 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4804 {
4805 tree utype, minv, maxv;
4806
4807 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4808 for the type in question, as we rely on this here. */
4809 utype = unsigned_type_for (etype);
4810 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4811 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4812 integer_one_node, 1);
4813 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4814
4815 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4816 minv, 1, maxv, 1)))
4817 etype = utype;
4818 else
4819 return 0;
4820 }
4821
4822 high = fold_convert (etype, high);
4823 low = fold_convert (etype, low);
4824 exp = fold_convert (etype, exp);
4825
4826 value = const_binop (MINUS_EXPR, high, low, 0);
4827
4828
4829 if (POINTER_TYPE_P (etype))
4830 {
4831 if (value != 0 && !TREE_OVERFLOW (value))
4832 {
4833 low = fold_convert (sizetype, low);
4834 low = fold_build1 (NEGATE_EXPR, sizetype, low);
4835 return build_range_check (type,
4836 fold_build2 (POINTER_PLUS_EXPR, etype, exp, low),
4837 1, build_int_cst (etype, 0), value);
4838 }
4839 return 0;
4840 }
4841
4842 if (value != 0 && !TREE_OVERFLOW (value))
4843 return build_range_check (type,
4844 fold_build2 (MINUS_EXPR, etype, exp, low),
4845 1, build_int_cst (etype, 0), value);
4846
4847 return 0;
4848 }
4849 \f
4850 /* Return the predecessor of VAL in its type, handling the infinite case. */
4851
4852 static tree
4853 range_predecessor (tree val)
4854 {
4855 tree type = TREE_TYPE (val);
4856
4857 if (INTEGRAL_TYPE_P (type)
4858 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4859 return 0;
4860 else
4861 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4862 }
4863
4864 /* Return the successor of VAL in its type, handling the infinite case. */
4865
4866 static tree
4867 range_successor (tree val)
4868 {
4869 tree type = TREE_TYPE (val);
4870
4871 if (INTEGRAL_TYPE_P (type)
4872 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4873 return 0;
4874 else
4875 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4876 }
4877
4878 /* Given two ranges, see if we can merge them into one. Return 1 if we
4879 can, 0 if we can't. Set the output range into the specified parameters. */
4880
4881 bool
4882 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4883 tree high0, int in1_p, tree low1, tree high1)
4884 {
4885 int no_overlap;
4886 int subset;
4887 int temp;
4888 tree tem;
4889 int in_p;
4890 tree low, high;
4891 int lowequal = ((low0 == 0 && low1 == 0)
4892 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4893 low0, 0, low1, 0)));
4894 int highequal = ((high0 == 0 && high1 == 0)
4895 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4896 high0, 1, high1, 1)));
4897
4898 /* Make range 0 be the range that starts first, or ends last if they
4899 start at the same value. Swap them if it isn't. */
4900 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4901 low0, 0, low1, 0))
4902 || (lowequal
4903 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4904 high1, 1, high0, 1))))
4905 {
4906 temp = in0_p, in0_p = in1_p, in1_p = temp;
4907 tem = low0, low0 = low1, low1 = tem;
4908 tem = high0, high0 = high1, high1 = tem;
4909 }
4910
4911 /* Now flag two cases, whether the ranges are disjoint or whether the
4912 second range is totally subsumed in the first. Note that the tests
4913 below are simplified by the ones above. */
4914 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4915 high0, 1, low1, 0));
4916 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4917 high1, 1, high0, 1));
4918
4919 /* We now have four cases, depending on whether we are including or
4920 excluding the two ranges. */
4921 if (in0_p && in1_p)
4922 {
4923 /* If they don't overlap, the result is false. If the second range
4924 is a subset it is the result. Otherwise, the range is from the start
4925 of the second to the end of the first. */
4926 if (no_overlap)
4927 in_p = 0, low = high = 0;
4928 else if (subset)
4929 in_p = 1, low = low1, high = high1;
4930 else
4931 in_p = 1, low = low1, high = high0;
4932 }
4933
4934 else if (in0_p && ! in1_p)
4935 {
4936 /* If they don't overlap, the result is the first range. If they are
4937 equal, the result is false. If the second range is a subset of the
4938 first, and the ranges begin at the same place, we go from just after
4939 the end of the second range to the end of the first. If the second
4940 range is not a subset of the first, or if it is a subset and both
4941 ranges end at the same place, the range starts at the start of the
4942 first range and ends just before the second range.
4943 Otherwise, we can't describe this as a single range. */
4944 if (no_overlap)
4945 in_p = 1, low = low0, high = high0;
4946 else if (lowequal && highequal)
4947 in_p = 0, low = high = 0;
4948 else if (subset && lowequal)
4949 {
4950 low = range_successor (high1);
4951 high = high0;
4952 in_p = 1;
4953 if (low == 0)
4954 {
4955 /* We are in the weird situation where high0 > high1 but
4956 high1 has no successor. Punt. */
4957 return 0;
4958 }
4959 }
4960 else if (! subset || highequal)
4961 {
4962 low = low0;
4963 high = range_predecessor (low1);
4964 in_p = 1;
4965 if (high == 0)
4966 {
4967 /* low0 < low1 but low1 has no predecessor. Punt. */
4968 return 0;
4969 }
4970 }
4971 else
4972 return 0;
4973 }
4974
4975 else if (! in0_p && in1_p)
4976 {
4977 /* If they don't overlap, the result is the second range. If the second
4978 is a subset of the first, the result is false. Otherwise,
4979 the range starts just after the first range and ends at the
4980 end of the second. */
4981 if (no_overlap)
4982 in_p = 1, low = low1, high = high1;
4983 else if (subset || highequal)
4984 in_p = 0, low = high = 0;
4985 else
4986 {
4987 low = range_successor (high0);
4988 high = high1;
4989 in_p = 1;
4990 if (low == 0)
4991 {
4992 /* high1 > high0 but high0 has no successor. Punt. */
4993 return 0;
4994 }
4995 }
4996 }
4997
4998 else
4999 {
5000 /* The case where we are excluding both ranges. Here the complex case
5001 is if they don't overlap. In that case, the only time we have a
5002 range is if they are adjacent. If the second is a subset of the
5003 first, the result is the first. Otherwise, the range to exclude
5004 starts at the beginning of the first range and ends at the end of the
5005 second. */
5006 if (no_overlap)
5007 {
5008 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
5009 range_successor (high0),
5010 1, low1, 0)))
5011 in_p = 0, low = low0, high = high1;
5012 else
5013 {
5014 /* Canonicalize - [min, x] into - [-, x]. */
5015 if (low0 && TREE_CODE (low0) == INTEGER_CST)
5016 switch (TREE_CODE (TREE_TYPE (low0)))
5017 {
5018 case ENUMERAL_TYPE:
5019 if (TYPE_PRECISION (TREE_TYPE (low0))
5020 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
5021 break;
5022 /* FALLTHROUGH */
5023 case INTEGER_TYPE:
5024 if (tree_int_cst_equal (low0,
5025 TYPE_MIN_VALUE (TREE_TYPE (low0))))
5026 low0 = 0;
5027 break;
5028 case POINTER_TYPE:
5029 if (TYPE_UNSIGNED (TREE_TYPE (low0))
5030 && integer_zerop (low0))
5031 low0 = 0;
5032 break;
5033 default:
5034 break;
5035 }
5036
5037 /* Canonicalize - [x, max] into - [x, -]. */
5038 if (high1 && TREE_CODE (high1) == INTEGER_CST)
5039 switch (TREE_CODE (TREE_TYPE (high1)))
5040 {
5041 case ENUMERAL_TYPE:
5042 if (TYPE_PRECISION (TREE_TYPE (high1))
5043 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
5044 break;
5045 /* FALLTHROUGH */
5046 case INTEGER_TYPE:
5047 if (tree_int_cst_equal (high1,
5048 TYPE_MAX_VALUE (TREE_TYPE (high1))))
5049 high1 = 0;
5050 break;
5051 case POINTER_TYPE:
5052 if (TYPE_UNSIGNED (TREE_TYPE (high1))
5053 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
5054 high1, 1,
5055 integer_one_node, 1)))
5056 high1 = 0;
5057 break;
5058 default:
5059 break;
5060 }
5061
5062 /* The ranges might be also adjacent between the maximum and
5063 minimum values of the given type. For
5064 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
5065 return + [x + 1, y - 1]. */
5066 if (low0 == 0 && high1 == 0)
5067 {
5068 low = range_successor (high0);
5069 high = range_predecessor (low1);
5070 if (low == 0 || high == 0)
5071 return 0;
5072
5073 in_p = 1;
5074 }
5075 else
5076 return 0;
5077 }
5078 }
5079 else if (subset)
5080 in_p = 0, low = low0, high = high0;
5081 else
5082 in_p = 0, low = low0, high = high1;
5083 }
5084
5085 *pin_p = in_p, *plow = low, *phigh = high;
5086 return 1;
5087 }
5088 \f
5089
5090 /* Subroutine of fold, looking inside expressions of the form
5091 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
5092 of the COND_EXPR. This function is being used also to optimize
5093 A op B ? C : A, by reversing the comparison first.
5094
5095 Return a folded expression whose code is not a COND_EXPR
5096 anymore, or NULL_TREE if no folding opportunity is found. */
5097
5098 static tree
5099 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
5100 {
5101 enum tree_code comp_code = TREE_CODE (arg0);
5102 tree arg00 = TREE_OPERAND (arg0, 0);
5103 tree arg01 = TREE_OPERAND (arg0, 1);
5104 tree arg1_type = TREE_TYPE (arg1);
5105 tree tem;
5106
5107 STRIP_NOPS (arg1);
5108 STRIP_NOPS (arg2);
5109
5110 /* If we have A op 0 ? A : -A, consider applying the following
5111 transformations:
5112
5113 A == 0? A : -A same as -A
5114 A != 0? A : -A same as A
5115 A >= 0? A : -A same as abs (A)
5116 A > 0? A : -A same as abs (A)
5117 A <= 0? A : -A same as -abs (A)
5118 A < 0? A : -A same as -abs (A)
5119
5120 None of these transformations work for modes with signed
5121 zeros. If A is +/-0, the first two transformations will
5122 change the sign of the result (from +0 to -0, or vice
5123 versa). The last four will fix the sign of the result,
5124 even though the original expressions could be positive or
5125 negative, depending on the sign of A.
5126
5127 Note that all these transformations are correct if A is
5128 NaN, since the two alternatives (A and -A) are also NaNs. */
5129 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
5130 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
5131 ? real_zerop (arg01)
5132 : integer_zerop (arg01))
5133 && ((TREE_CODE (arg2) == NEGATE_EXPR
5134 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5135 /* In the case that A is of the form X-Y, '-A' (arg2) may
5136 have already been folded to Y-X, check for that. */
5137 || (TREE_CODE (arg1) == MINUS_EXPR
5138 && TREE_CODE (arg2) == MINUS_EXPR
5139 && operand_equal_p (TREE_OPERAND (arg1, 0),
5140 TREE_OPERAND (arg2, 1), 0)
5141 && operand_equal_p (TREE_OPERAND (arg1, 1),
5142 TREE_OPERAND (arg2, 0), 0))))
5143 switch (comp_code)
5144 {
5145 case EQ_EXPR:
5146 case UNEQ_EXPR:
5147 tem = fold_convert (arg1_type, arg1);
5148 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
5149 case NE_EXPR:
5150 case LTGT_EXPR:
5151 return pedantic_non_lvalue (fold_convert (type, arg1));
5152 case UNGE_EXPR:
5153 case UNGT_EXPR:
5154 if (flag_trapping_math)
5155 break;
5156 /* Fall through. */
5157 case GE_EXPR:
5158 case GT_EXPR:
5159 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5160 arg1 = fold_convert (signed_type_for
5161 (TREE_TYPE (arg1)), arg1);
5162 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
5163 return pedantic_non_lvalue (fold_convert (type, tem));
5164 case UNLE_EXPR:
5165 case UNLT_EXPR:
5166 if (flag_trapping_math)
5167 break;
5168 case LE_EXPR:
5169 case LT_EXPR:
5170 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5171 arg1 = fold_convert (signed_type_for
5172 (TREE_TYPE (arg1)), arg1);
5173 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
5174 return negate_expr (fold_convert (type, tem));
5175 default:
5176 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5177 break;
5178 }
5179
5180 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5181 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5182 both transformations are correct when A is NaN: A != 0
5183 is then true, and A == 0 is false. */
5184
5185 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
5186 && integer_zerop (arg01) && integer_zerop (arg2))
5187 {
5188 if (comp_code == NE_EXPR)
5189 return pedantic_non_lvalue (fold_convert (type, arg1));
5190 else if (comp_code == EQ_EXPR)
5191 return build_int_cst (type, 0);
5192 }
5193
5194 /* Try some transformations of A op B ? A : B.
5195
5196 A == B? A : B same as B
5197 A != B? A : B same as A
5198 A >= B? A : B same as max (A, B)
5199 A > B? A : B same as max (B, A)
5200 A <= B? A : B same as min (A, B)
5201 A < B? A : B same as min (B, A)
5202
5203 As above, these transformations don't work in the presence
5204 of signed zeros. For example, if A and B are zeros of
5205 opposite sign, the first two transformations will change
5206 the sign of the result. In the last four, the original
5207 expressions give different results for (A=+0, B=-0) and
5208 (A=-0, B=+0), but the transformed expressions do not.
5209
5210 The first two transformations are correct if either A or B
5211 is a NaN. In the first transformation, the condition will
5212 be false, and B will indeed be chosen. In the case of the
5213 second transformation, the condition A != B will be true,
5214 and A will be chosen.
5215
5216 The conversions to max() and min() are not correct if B is
5217 a number and A is not. The conditions in the original
5218 expressions will be false, so all four give B. The min()
5219 and max() versions would give a NaN instead. */
5220 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
5221 && operand_equal_for_comparison_p (arg01, arg2, arg00)
5222 /* Avoid these transformations if the COND_EXPR may be used
5223 as an lvalue in the C++ front-end. PR c++/19199. */
5224 && (in_gimple_form
5225 || (strcmp (lang_hooks.name, "GNU C++") != 0
5226 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5227 || ! maybe_lvalue_p (arg1)
5228 || ! maybe_lvalue_p (arg2)))
5229 {
5230 tree comp_op0 = arg00;
5231 tree comp_op1 = arg01;
5232 tree comp_type = TREE_TYPE (comp_op0);
5233
5234 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
5235 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
5236 {
5237 comp_type = type;
5238 comp_op0 = arg1;
5239 comp_op1 = arg2;
5240 }
5241
5242 switch (comp_code)
5243 {
5244 case EQ_EXPR:
5245 return pedantic_non_lvalue (fold_convert (type, arg2));
5246 case NE_EXPR:
5247 return pedantic_non_lvalue (fold_convert (type, arg1));
5248 case LE_EXPR:
5249 case LT_EXPR:
5250 case UNLE_EXPR:
5251 case UNLT_EXPR:
5252 /* In C++ a ?: expression can be an lvalue, so put the
5253 operand which will be used if they are equal first
5254 so that we can convert this back to the
5255 corresponding COND_EXPR. */
5256 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5257 {
5258 comp_op0 = fold_convert (comp_type, comp_op0);
5259 comp_op1 = fold_convert (comp_type, comp_op1);
5260 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5261 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
5262 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
5263 return pedantic_non_lvalue (fold_convert (type, tem));
5264 }
5265 break;
5266 case GE_EXPR:
5267 case GT_EXPR:
5268 case UNGE_EXPR:
5269 case UNGT_EXPR:
5270 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5271 {
5272 comp_op0 = fold_convert (comp_type, comp_op0);
5273 comp_op1 = fold_convert (comp_type, comp_op1);
5274 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5275 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
5276 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
5277 return pedantic_non_lvalue (fold_convert (type, tem));
5278 }
5279 break;
5280 case UNEQ_EXPR:
5281 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5282 return pedantic_non_lvalue (fold_convert (type, arg2));
5283 break;
5284 case LTGT_EXPR:
5285 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5286 return pedantic_non_lvalue (fold_convert (type, arg1));
5287 break;
5288 default:
5289 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5290 break;
5291 }
5292 }
5293
5294 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5295 we might still be able to simplify this. For example,
5296 if C1 is one less or one more than C2, this might have started
5297 out as a MIN or MAX and been transformed by this function.
5298 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5299
5300 if (INTEGRAL_TYPE_P (type)
5301 && TREE_CODE (arg01) == INTEGER_CST
5302 && TREE_CODE (arg2) == INTEGER_CST)
5303 switch (comp_code)
5304 {
5305 case EQ_EXPR:
5306 /* We can replace A with C1 in this case. */
5307 arg1 = fold_convert (type, arg01);
5308 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
5309
5310 case LT_EXPR:
5311 /* If C1 is C2 + 1, this is min(A, C2). */
5312 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5313 OEP_ONLY_CONST)
5314 && operand_equal_p (arg01,
5315 const_binop (PLUS_EXPR, arg2,
5316 build_int_cst (type, 1), 0),
5317 OEP_ONLY_CONST))
5318 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
5319 type,
5320 fold_convert (type, arg1),
5321 arg2));
5322 break;
5323
5324 case LE_EXPR:
5325 /* If C1 is C2 - 1, this is min(A, C2). */
5326 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5327 OEP_ONLY_CONST)
5328 && operand_equal_p (arg01,
5329 const_binop (MINUS_EXPR, arg2,
5330 build_int_cst (type, 1), 0),
5331 OEP_ONLY_CONST))
5332 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
5333 type,
5334 fold_convert (type, arg1),
5335 arg2));
5336 break;
5337
5338 case GT_EXPR:
5339 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
5340 MAX_EXPR, to preserve the signedness of the comparison. */
5341 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5342 OEP_ONLY_CONST)
5343 && operand_equal_p (arg01,
5344 const_binop (MINUS_EXPR, arg2,
5345 build_int_cst (type, 1), 0),
5346 OEP_ONLY_CONST))
5347 return pedantic_non_lvalue (fold_convert (type,
5348 fold_build2 (MAX_EXPR, TREE_TYPE (arg00),
5349 arg00,
5350 fold_convert (TREE_TYPE (arg00),
5351 arg2))));
5352 break;
5353
5354 case GE_EXPR:
5355 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
5356 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5357 OEP_ONLY_CONST)
5358 && operand_equal_p (arg01,
5359 const_binop (PLUS_EXPR, arg2,
5360 build_int_cst (type, 1), 0),
5361 OEP_ONLY_CONST))
5362 return pedantic_non_lvalue (fold_convert (type,
5363 fold_build2 (MAX_EXPR, TREE_TYPE (arg00),
5364 arg00,
5365 fold_convert (TREE_TYPE (arg00),
5366 arg2))));
5367 break;
5368 case NE_EXPR:
5369 break;
5370 default:
5371 gcc_unreachable ();
5372 }
5373
5374 return NULL_TREE;
5375 }
5376
5377
5378 \f
5379 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5380 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5381 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5382 false) >= 2)
5383 #endif
5384
5385 /* EXP is some logical combination of boolean tests. See if we can
5386 merge it into some range test. Return the new tree if so. */
5387
5388 static tree
5389 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
5390 {
5391 int or_op = (code == TRUTH_ORIF_EXPR
5392 || code == TRUTH_OR_EXPR);
5393 int in0_p, in1_p, in_p;
5394 tree low0, low1, low, high0, high1, high;
5395 bool strict_overflow_p = false;
5396 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5397 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5398 tree tem;
5399 const char * const warnmsg = G_("assuming signed overflow does not occur "
5400 "when simplifying range test");
5401
5402 /* If this is an OR operation, invert both sides; we will invert
5403 again at the end. */
5404 if (or_op)
5405 in0_p = ! in0_p, in1_p = ! in1_p;
5406
5407 /* If both expressions are the same, if we can merge the ranges, and we
5408 can build the range test, return it or it inverted. If one of the
5409 ranges is always true or always false, consider it to be the same
5410 expression as the other. */
5411 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5412 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5413 in1_p, low1, high1)
5414 && 0 != (tem = (build_range_check (type,
5415 lhs != 0 ? lhs
5416 : rhs != 0 ? rhs : integer_zero_node,
5417 in_p, low, high))))
5418 {
5419 if (strict_overflow_p)
5420 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5421 return or_op ? invert_truthvalue (tem) : tem;
5422 }
5423
5424 /* On machines where the branch cost is expensive, if this is a
5425 short-circuited branch and the underlying object on both sides
5426 is the same, make a non-short-circuit operation. */
5427 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5428 && lhs != 0 && rhs != 0
5429 && (code == TRUTH_ANDIF_EXPR
5430 || code == TRUTH_ORIF_EXPR)
5431 && operand_equal_p (lhs, rhs, 0))
5432 {
5433 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5434 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5435 which cases we can't do this. */
5436 if (simple_operand_p (lhs))
5437 return build2 (code == TRUTH_ANDIF_EXPR
5438 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5439 type, op0, op1);
5440
5441 else if (lang_hooks.decls.global_bindings_p () == 0
5442 && ! CONTAINS_PLACEHOLDER_P (lhs))
5443 {
5444 tree common = save_expr (lhs);
5445
5446 if (0 != (lhs = build_range_check (type, common,
5447 or_op ? ! in0_p : in0_p,
5448 low0, high0))
5449 && (0 != (rhs = build_range_check (type, common,
5450 or_op ? ! in1_p : in1_p,
5451 low1, high1))))
5452 {
5453 if (strict_overflow_p)
5454 fold_overflow_warning (warnmsg,
5455 WARN_STRICT_OVERFLOW_COMPARISON);
5456 return build2 (code == TRUTH_ANDIF_EXPR
5457 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5458 type, lhs, rhs);
5459 }
5460 }
5461 }
5462
5463 return 0;
5464 }
5465 \f
5466 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
5467 bit value. Arrange things so the extra bits will be set to zero if and
5468 only if C is signed-extended to its full width. If MASK is nonzero,
5469 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5470
5471 static tree
5472 unextend (tree c, int p, int unsignedp, tree mask)
5473 {
5474 tree type = TREE_TYPE (c);
5475 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5476 tree temp;
5477
5478 if (p == modesize || unsignedp)
5479 return c;
5480
5481 /* We work by getting just the sign bit into the low-order bit, then
5482 into the high-order bit, then sign-extend. We then XOR that value
5483 with C. */
5484 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
5485 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
5486
5487 /* We must use a signed type in order to get an arithmetic right shift.
5488 However, we must also avoid introducing accidental overflows, so that
5489 a subsequent call to integer_zerop will work. Hence we must
5490 do the type conversion here. At this point, the constant is either
5491 zero or one, and the conversion to a signed type can never overflow.
5492 We could get an overflow if this conversion is done anywhere else. */
5493 if (TYPE_UNSIGNED (type))
5494 temp = fold_convert (signed_type_for (type), temp);
5495
5496 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
5497 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
5498 if (mask != 0)
5499 temp = const_binop (BIT_AND_EXPR, temp,
5500 fold_convert (TREE_TYPE (c), mask), 0);
5501 /* If necessary, convert the type back to match the type of C. */
5502 if (TYPE_UNSIGNED (type))
5503 temp = fold_convert (type, temp);
5504
5505 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
5506 }
5507 \f
5508 /* Find ways of folding logical expressions of LHS and RHS:
5509 Try to merge two comparisons to the same innermost item.
5510 Look for range tests like "ch >= '0' && ch <= '9'".
5511 Look for combinations of simple terms on machines with expensive branches
5512 and evaluate the RHS unconditionally.
5513
5514 For example, if we have p->a == 2 && p->b == 4 and we can make an
5515 object large enough to span both A and B, we can do this with a comparison
5516 against the object ANDed with the a mask.
5517
5518 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5519 operations to do this with one comparison.
5520
5521 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5522 function and the one above.
5523
5524 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5525 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5526
5527 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5528 two operands.
5529
5530 We return the simplified tree or 0 if no optimization is possible. */
5531
5532 static tree
5533 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
5534 {
5535 /* If this is the "or" of two comparisons, we can do something if
5536 the comparisons are NE_EXPR. If this is the "and", we can do something
5537 if the comparisons are EQ_EXPR. I.e.,
5538 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5539
5540 WANTED_CODE is this operation code. For single bit fields, we can
5541 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5542 comparison for one-bit fields. */
5543
5544 enum tree_code wanted_code;
5545 enum tree_code lcode, rcode;
5546 tree ll_arg, lr_arg, rl_arg, rr_arg;
5547 tree ll_inner, lr_inner, rl_inner, rr_inner;
5548 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5549 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5550 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5551 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5552 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5553 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5554 enum machine_mode lnmode, rnmode;
5555 tree ll_mask, lr_mask, rl_mask, rr_mask;
5556 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5557 tree l_const, r_const;
5558 tree lntype, rntype, result;
5559 HOST_WIDE_INT first_bit, end_bit;
5560 int volatilep;
5561 tree orig_lhs = lhs, orig_rhs = rhs;
5562 enum tree_code orig_code = code;
5563
5564 /* Start by getting the comparison codes. Fail if anything is volatile.
5565 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5566 it were surrounded with a NE_EXPR. */
5567
5568 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5569 return 0;
5570
5571 lcode = TREE_CODE (lhs);
5572 rcode = TREE_CODE (rhs);
5573
5574 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5575 {
5576 lhs = build2 (NE_EXPR, truth_type, lhs,
5577 build_int_cst (TREE_TYPE (lhs), 0));
5578 lcode = NE_EXPR;
5579 }
5580
5581 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5582 {
5583 rhs = build2 (NE_EXPR, truth_type, rhs,
5584 build_int_cst (TREE_TYPE (rhs), 0));
5585 rcode = NE_EXPR;
5586 }
5587
5588 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5589 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5590 return 0;
5591
5592 ll_arg = TREE_OPERAND (lhs, 0);
5593 lr_arg = TREE_OPERAND (lhs, 1);
5594 rl_arg = TREE_OPERAND (rhs, 0);
5595 rr_arg = TREE_OPERAND (rhs, 1);
5596
5597 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5598 if (simple_operand_p (ll_arg)
5599 && simple_operand_p (lr_arg))
5600 {
5601 tree result;
5602 if (operand_equal_p (ll_arg, rl_arg, 0)
5603 && operand_equal_p (lr_arg, rr_arg, 0))
5604 {
5605 result = combine_comparisons (code, lcode, rcode,
5606 truth_type, ll_arg, lr_arg);
5607 if (result)
5608 return result;
5609 }
5610 else if (operand_equal_p (ll_arg, rr_arg, 0)
5611 && operand_equal_p (lr_arg, rl_arg, 0))
5612 {
5613 result = combine_comparisons (code, lcode,
5614 swap_tree_comparison (rcode),
5615 truth_type, ll_arg, lr_arg);
5616 if (result)
5617 return result;
5618 }
5619 }
5620
5621 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5622 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5623
5624 /* If the RHS can be evaluated unconditionally and its operands are
5625 simple, it wins to evaluate the RHS unconditionally on machines
5626 with expensive branches. In this case, this isn't a comparison
5627 that can be merged. Avoid doing this if the RHS is a floating-point
5628 comparison since those can trap. */
5629
5630 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5631 false) >= 2
5632 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5633 && simple_operand_p (rl_arg)
5634 && simple_operand_p (rr_arg))
5635 {
5636 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5637 if (code == TRUTH_OR_EXPR
5638 && lcode == NE_EXPR && integer_zerop (lr_arg)
5639 && rcode == NE_EXPR && integer_zerop (rr_arg)
5640 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5641 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5642 return build2 (NE_EXPR, truth_type,
5643 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5644 ll_arg, rl_arg),
5645 build_int_cst (TREE_TYPE (ll_arg), 0));
5646
5647 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5648 if (code == TRUTH_AND_EXPR
5649 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5650 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5651 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5652 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5653 return build2 (EQ_EXPR, truth_type,
5654 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5655 ll_arg, rl_arg),
5656 build_int_cst (TREE_TYPE (ll_arg), 0));
5657
5658 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5659 {
5660 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5661 return build2 (code, truth_type, lhs, rhs);
5662 return NULL_TREE;
5663 }
5664 }
5665
5666 /* See if the comparisons can be merged. Then get all the parameters for
5667 each side. */
5668
5669 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5670 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5671 return 0;
5672
5673 volatilep = 0;
5674 ll_inner = decode_field_reference (ll_arg,
5675 &ll_bitsize, &ll_bitpos, &ll_mode,
5676 &ll_unsignedp, &volatilep, &ll_mask,
5677 &ll_and_mask);
5678 lr_inner = decode_field_reference (lr_arg,
5679 &lr_bitsize, &lr_bitpos, &lr_mode,
5680 &lr_unsignedp, &volatilep, &lr_mask,
5681 &lr_and_mask);
5682 rl_inner = decode_field_reference (rl_arg,
5683 &rl_bitsize, &rl_bitpos, &rl_mode,
5684 &rl_unsignedp, &volatilep, &rl_mask,
5685 &rl_and_mask);
5686 rr_inner = decode_field_reference (rr_arg,
5687 &rr_bitsize, &rr_bitpos, &rr_mode,
5688 &rr_unsignedp, &volatilep, &rr_mask,
5689 &rr_and_mask);
5690
5691 /* It must be true that the inner operation on the lhs of each
5692 comparison must be the same if we are to be able to do anything.
5693 Then see if we have constants. If not, the same must be true for
5694 the rhs's. */
5695 if (volatilep || ll_inner == 0 || rl_inner == 0
5696 || ! operand_equal_p (ll_inner, rl_inner, 0))
5697 return 0;
5698
5699 if (TREE_CODE (lr_arg) == INTEGER_CST
5700 && TREE_CODE (rr_arg) == INTEGER_CST)
5701 l_const = lr_arg, r_const = rr_arg;
5702 else if (lr_inner == 0 || rr_inner == 0
5703 || ! operand_equal_p (lr_inner, rr_inner, 0))
5704 return 0;
5705 else
5706 l_const = r_const = 0;
5707
5708 /* If either comparison code is not correct for our logical operation,
5709 fail. However, we can convert a one-bit comparison against zero into
5710 the opposite comparison against that bit being set in the field. */
5711
5712 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5713 if (lcode != wanted_code)
5714 {
5715 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5716 {
5717 /* Make the left operand unsigned, since we are only interested
5718 in the value of one bit. Otherwise we are doing the wrong
5719 thing below. */
5720 ll_unsignedp = 1;
5721 l_const = ll_mask;
5722 }
5723 else
5724 return 0;
5725 }
5726
5727 /* This is analogous to the code for l_const above. */
5728 if (rcode != wanted_code)
5729 {
5730 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5731 {
5732 rl_unsignedp = 1;
5733 r_const = rl_mask;
5734 }
5735 else
5736 return 0;
5737 }
5738
5739 /* See if we can find a mode that contains both fields being compared on
5740 the left. If we can't, fail. Otherwise, update all constants and masks
5741 to be relative to a field of that size. */
5742 first_bit = MIN (ll_bitpos, rl_bitpos);
5743 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5744 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5745 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5746 volatilep);
5747 if (lnmode == VOIDmode)
5748 return 0;
5749
5750 lnbitsize = GET_MODE_BITSIZE (lnmode);
5751 lnbitpos = first_bit & ~ (lnbitsize - 1);
5752 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5753 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5754
5755 if (BYTES_BIG_ENDIAN)
5756 {
5757 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5758 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5759 }
5760
5761 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
5762 size_int (xll_bitpos), 0);
5763 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
5764 size_int (xrl_bitpos), 0);
5765
5766 if (l_const)
5767 {
5768 l_const = fold_convert (lntype, l_const);
5769 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5770 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
5771 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5772 fold_build1 (BIT_NOT_EXPR,
5773 lntype, ll_mask),
5774 0)))
5775 {
5776 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5777
5778 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5779 }
5780 }
5781 if (r_const)
5782 {
5783 r_const = fold_convert (lntype, r_const);
5784 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5785 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
5786 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5787 fold_build1 (BIT_NOT_EXPR,
5788 lntype, rl_mask),
5789 0)))
5790 {
5791 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5792
5793 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5794 }
5795 }
5796
5797 /* If the right sides are not constant, do the same for it. Also,
5798 disallow this optimization if a size or signedness mismatch occurs
5799 between the left and right sides. */
5800 if (l_const == 0)
5801 {
5802 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5803 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5804 /* Make sure the two fields on the right
5805 correspond to the left without being swapped. */
5806 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5807 return 0;
5808
5809 first_bit = MIN (lr_bitpos, rr_bitpos);
5810 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5811 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5812 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5813 volatilep);
5814 if (rnmode == VOIDmode)
5815 return 0;
5816
5817 rnbitsize = GET_MODE_BITSIZE (rnmode);
5818 rnbitpos = first_bit & ~ (rnbitsize - 1);
5819 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5820 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5821
5822 if (BYTES_BIG_ENDIAN)
5823 {
5824 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5825 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5826 }
5827
5828 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
5829 size_int (xlr_bitpos), 0);
5830 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
5831 size_int (xrr_bitpos), 0);
5832
5833 /* Make a mask that corresponds to both fields being compared.
5834 Do this for both items being compared. If the operands are the
5835 same size and the bits being compared are in the same position
5836 then we can do this by masking both and comparing the masked
5837 results. */
5838 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5839 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
5840 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5841 {
5842 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5843 ll_unsignedp || rl_unsignedp);
5844 if (! all_ones_mask_p (ll_mask, lnbitsize))
5845 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5846
5847 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
5848 lr_unsignedp || rr_unsignedp);
5849 if (! all_ones_mask_p (lr_mask, rnbitsize))
5850 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5851
5852 return build2 (wanted_code, truth_type, lhs, rhs);
5853 }
5854
5855 /* There is still another way we can do something: If both pairs of
5856 fields being compared are adjacent, we may be able to make a wider
5857 field containing them both.
5858
5859 Note that we still must mask the lhs/rhs expressions. Furthermore,
5860 the mask must be shifted to account for the shift done by
5861 make_bit_field_ref. */
5862 if ((ll_bitsize + ll_bitpos == rl_bitpos
5863 && lr_bitsize + lr_bitpos == rr_bitpos)
5864 || (ll_bitpos == rl_bitpos + rl_bitsize
5865 && lr_bitpos == rr_bitpos + rr_bitsize))
5866 {
5867 tree type;
5868
5869 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
5870 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5871 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
5872 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5873
5874 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5875 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
5876 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5877 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
5878
5879 /* Convert to the smaller type before masking out unwanted bits. */
5880 type = lntype;
5881 if (lntype != rntype)
5882 {
5883 if (lnbitsize > rnbitsize)
5884 {
5885 lhs = fold_convert (rntype, lhs);
5886 ll_mask = fold_convert (rntype, ll_mask);
5887 type = rntype;
5888 }
5889 else if (lnbitsize < rnbitsize)
5890 {
5891 rhs = fold_convert (lntype, rhs);
5892 lr_mask = fold_convert (lntype, lr_mask);
5893 type = lntype;
5894 }
5895 }
5896
5897 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5898 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5899
5900 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5901 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5902
5903 return build2 (wanted_code, truth_type, lhs, rhs);
5904 }
5905
5906 return 0;
5907 }
5908
5909 /* Handle the case of comparisons with constants. If there is something in
5910 common between the masks, those bits of the constants must be the same.
5911 If not, the condition is always false. Test for this to avoid generating
5912 incorrect code below. */
5913 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5914 if (! integer_zerop (result)
5915 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5916 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5917 {
5918 if (wanted_code == NE_EXPR)
5919 {
5920 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5921 return constant_boolean_node (true, truth_type);
5922 }
5923 else
5924 {
5925 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5926 return constant_boolean_node (false, truth_type);
5927 }
5928 }
5929
5930 /* Construct the expression we will return. First get the component
5931 reference we will make. Unless the mask is all ones the width of
5932 that field, perform the mask operation. Then compare with the
5933 merged constant. */
5934 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5935 ll_unsignedp || rl_unsignedp);
5936
5937 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5938 if (! all_ones_mask_p (ll_mask, lnbitsize))
5939 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5940
5941 return build2 (wanted_code, truth_type, result,
5942 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5943 }
5944 \f
5945 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5946 constant. */
5947
5948 static tree
5949 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5950 {
5951 tree arg0 = op0;
5952 enum tree_code op_code;
5953 tree comp_const;
5954 tree minmax_const;
5955 int consts_equal, consts_lt;
5956 tree inner;
5957
5958 STRIP_SIGN_NOPS (arg0);
5959
5960 op_code = TREE_CODE (arg0);
5961 minmax_const = TREE_OPERAND (arg0, 1);
5962 comp_const = fold_convert (TREE_TYPE (arg0), op1);
5963 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5964 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5965 inner = TREE_OPERAND (arg0, 0);
5966
5967 /* If something does not permit us to optimize, return the original tree. */
5968 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5969 || TREE_CODE (comp_const) != INTEGER_CST
5970 || TREE_OVERFLOW (comp_const)
5971 || TREE_CODE (minmax_const) != INTEGER_CST
5972 || TREE_OVERFLOW (minmax_const))
5973 return NULL_TREE;
5974
5975 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5976 and GT_EXPR, doing the rest with recursive calls using logical
5977 simplifications. */
5978 switch (code)
5979 {
5980 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5981 {
5982 tree tem = optimize_minmax_comparison (invert_tree_comparison (code, false),
5983 type, op0, op1);
5984 if (tem)
5985 return invert_truthvalue (tem);
5986 return NULL_TREE;
5987 }
5988
5989 case GE_EXPR:
5990 return
5991 fold_build2 (TRUTH_ORIF_EXPR, type,
5992 optimize_minmax_comparison
5993 (EQ_EXPR, type, arg0, comp_const),
5994 optimize_minmax_comparison
5995 (GT_EXPR, type, arg0, comp_const));
5996
5997 case EQ_EXPR:
5998 if (op_code == MAX_EXPR && consts_equal)
5999 /* MAX (X, 0) == 0 -> X <= 0 */
6000 return fold_build2 (LE_EXPR, type, inner, comp_const);
6001
6002 else if (op_code == MAX_EXPR && consts_lt)
6003 /* MAX (X, 0) == 5 -> X == 5 */
6004 return fold_build2 (EQ_EXPR, type, inner, comp_const);
6005
6006 else if (op_code == MAX_EXPR)
6007 /* MAX (X, 0) == -1 -> false */
6008 return omit_one_operand (type, integer_zero_node, inner);
6009
6010 else if (consts_equal)
6011 /* MIN (X, 0) == 0 -> X >= 0 */
6012 return fold_build2 (GE_EXPR, type, inner, comp_const);
6013
6014 else if (consts_lt)
6015 /* MIN (X, 0) == 5 -> false */
6016 return omit_one_operand (type, integer_zero_node, inner);
6017
6018 else
6019 /* MIN (X, 0) == -1 -> X == -1 */
6020 return fold_build2 (EQ_EXPR, type, inner, comp_const);
6021
6022 case GT_EXPR:
6023 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
6024 /* MAX (X, 0) > 0 -> X > 0
6025 MAX (X, 0) > 5 -> X > 5 */
6026 return fold_build2 (GT_EXPR, type, inner, comp_const);
6027
6028 else if (op_code == MAX_EXPR)
6029 /* MAX (X, 0) > -1 -> true */
6030 return omit_one_operand (type, integer_one_node, inner);
6031
6032 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
6033 /* MIN (X, 0) > 0 -> false
6034 MIN (X, 0) > 5 -> false */
6035 return omit_one_operand (type, integer_zero_node, inner);
6036
6037 else
6038 /* MIN (X, 0) > -1 -> X > -1 */
6039 return fold_build2 (GT_EXPR, type, inner, comp_const);
6040
6041 default:
6042 return NULL_TREE;
6043 }
6044 }
6045 \f
6046 /* T is an integer expression that is being multiplied, divided, or taken a
6047 modulus (CODE says which and what kind of divide or modulus) by a
6048 constant C. See if we can eliminate that operation by folding it with
6049 other operations already in T. WIDE_TYPE, if non-null, is a type that
6050 should be used for the computation if wider than our type.
6051
6052 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6053 (X * 2) + (Y * 4). We must, however, be assured that either the original
6054 expression would not overflow or that overflow is undefined for the type
6055 in the language in question.
6056
6057 If we return a non-null expression, it is an equivalent form of the
6058 original computation, but need not be in the original type.
6059
6060 We set *STRICT_OVERFLOW_P to true if the return values depends on
6061 signed overflow being undefined. Otherwise we do not change
6062 *STRICT_OVERFLOW_P. */
6063
6064 static tree
6065 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
6066 bool *strict_overflow_p)
6067 {
6068 /* To avoid exponential search depth, refuse to allow recursion past
6069 three levels. Beyond that (1) it's highly unlikely that we'll find
6070 something interesting and (2) we've probably processed it before
6071 when we built the inner expression. */
6072
6073 static int depth;
6074 tree ret;
6075
6076 if (depth > 3)
6077 return NULL;
6078
6079 depth++;
6080 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6081 depth--;
6082
6083 return ret;
6084 }
6085
6086 static tree
6087 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6088 bool *strict_overflow_p)
6089 {
6090 tree type = TREE_TYPE (t);
6091 enum tree_code tcode = TREE_CODE (t);
6092 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
6093 > GET_MODE_SIZE (TYPE_MODE (type)))
6094 ? wide_type : type);
6095 tree t1, t2;
6096 int same_p = tcode == code;
6097 tree op0 = NULL_TREE, op1 = NULL_TREE;
6098 bool sub_strict_overflow_p;
6099
6100 /* Don't deal with constants of zero here; they confuse the code below. */
6101 if (integer_zerop (c))
6102 return NULL_TREE;
6103
6104 if (TREE_CODE_CLASS (tcode) == tcc_unary)
6105 op0 = TREE_OPERAND (t, 0);
6106
6107 if (TREE_CODE_CLASS (tcode) == tcc_binary)
6108 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6109
6110 /* Note that we need not handle conditional operations here since fold
6111 already handles those cases. So just do arithmetic here. */
6112 switch (tcode)
6113 {
6114 case INTEGER_CST:
6115 /* For a constant, we can always simplify if we are a multiply
6116 or (for divide and modulus) if it is a multiple of our constant. */
6117 if (code == MULT_EXPR
6118 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
6119 return const_binop (code, fold_convert (ctype, t),
6120 fold_convert (ctype, c), 0);
6121 break;
6122
6123 CASE_CONVERT: case NON_LVALUE_EXPR:
6124 /* If op0 is an expression ... */
6125 if ((COMPARISON_CLASS_P (op0)
6126 || UNARY_CLASS_P (op0)
6127 || BINARY_CLASS_P (op0)
6128 || VL_EXP_CLASS_P (op0)
6129 || EXPRESSION_CLASS_P (op0))
6130 /* ... and has wrapping overflow, and its type is smaller
6131 than ctype, then we cannot pass through as widening. */
6132 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
6133 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
6134 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
6135 && (TYPE_PRECISION (ctype)
6136 > TYPE_PRECISION (TREE_TYPE (op0))))
6137 /* ... or this is a truncation (t is narrower than op0),
6138 then we cannot pass through this narrowing. */
6139 || (TYPE_PRECISION (type)
6140 < TYPE_PRECISION (TREE_TYPE (op0)))
6141 /* ... or signedness changes for division or modulus,
6142 then we cannot pass through this conversion. */
6143 || (code != MULT_EXPR
6144 && (TYPE_UNSIGNED (ctype)
6145 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6146 /* ... or has undefined overflow while the converted to
6147 type has not, we cannot do the operation in the inner type
6148 as that would introduce undefined overflow. */
6149 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
6150 && !TYPE_OVERFLOW_UNDEFINED (type))))
6151 break;
6152
6153 /* Pass the constant down and see if we can make a simplification. If
6154 we can, replace this expression with the inner simplification for
6155 possible later conversion to our or some other type. */
6156 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6157 && TREE_CODE (t2) == INTEGER_CST
6158 && !TREE_OVERFLOW (t2)
6159 && (0 != (t1 = extract_muldiv (op0, t2, code,
6160 code == MULT_EXPR
6161 ? ctype : NULL_TREE,
6162 strict_overflow_p))))
6163 return t1;
6164 break;
6165
6166 case ABS_EXPR:
6167 /* If widening the type changes it from signed to unsigned, then we
6168 must avoid building ABS_EXPR itself as unsigned. */
6169 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6170 {
6171 tree cstype = (*signed_type_for) (ctype);
6172 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6173 != 0)
6174 {
6175 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6176 return fold_convert (ctype, t1);
6177 }
6178 break;
6179 }
6180 /* If the constant is negative, we cannot simplify this. */
6181 if (tree_int_cst_sgn (c) == -1)
6182 break;
6183 /* FALLTHROUGH */
6184 case NEGATE_EXPR:
6185 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6186 != 0)
6187 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6188 break;
6189
6190 case MIN_EXPR: case MAX_EXPR:
6191 /* If widening the type changes the signedness, then we can't perform
6192 this optimization as that changes the result. */
6193 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6194 break;
6195
6196 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6197 sub_strict_overflow_p = false;
6198 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6199 &sub_strict_overflow_p)) != 0
6200 && (t2 = extract_muldiv (op1, c, code, wide_type,
6201 &sub_strict_overflow_p)) != 0)
6202 {
6203 if (tree_int_cst_sgn (c) < 0)
6204 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6205 if (sub_strict_overflow_p)
6206 *strict_overflow_p = true;
6207 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6208 fold_convert (ctype, t2));
6209 }
6210 break;
6211
6212 case LSHIFT_EXPR: case RSHIFT_EXPR:
6213 /* If the second operand is constant, this is a multiplication
6214 or floor division, by a power of two, so we can treat it that
6215 way unless the multiplier or divisor overflows. Signed
6216 left-shift overflow is implementation-defined rather than
6217 undefined in C90, so do not convert signed left shift into
6218 multiplication. */
6219 if (TREE_CODE (op1) == INTEGER_CST
6220 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6221 /* const_binop may not detect overflow correctly,
6222 so check for it explicitly here. */
6223 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
6224 && TREE_INT_CST_HIGH (op1) == 0
6225 && 0 != (t1 = fold_convert (ctype,
6226 const_binop (LSHIFT_EXPR,
6227 size_one_node,
6228 op1, 0)))
6229 && !TREE_OVERFLOW (t1))
6230 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6231 ? MULT_EXPR : FLOOR_DIV_EXPR,
6232 ctype, fold_convert (ctype, op0), t1),
6233 c, code, wide_type, strict_overflow_p);
6234 break;
6235
6236 case PLUS_EXPR: case MINUS_EXPR:
6237 /* See if we can eliminate the operation on both sides. If we can, we
6238 can return a new PLUS or MINUS. If we can't, the only remaining
6239 cases where we can do anything are if the second operand is a
6240 constant. */
6241 sub_strict_overflow_p = false;
6242 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6243 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6244 if (t1 != 0 && t2 != 0
6245 && (code == MULT_EXPR
6246 /* If not multiplication, we can only do this if both operands
6247 are divisible by c. */
6248 || (multiple_of_p (ctype, op0, c)
6249 && multiple_of_p (ctype, op1, c))))
6250 {
6251 if (sub_strict_overflow_p)
6252 *strict_overflow_p = true;
6253 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6254 fold_convert (ctype, t2));
6255 }
6256
6257 /* If this was a subtraction, negate OP1 and set it to be an addition.
6258 This simplifies the logic below. */
6259 if (tcode == MINUS_EXPR)
6260 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6261
6262 if (TREE_CODE (op1) != INTEGER_CST)
6263 break;
6264
6265 /* If either OP1 or C are negative, this optimization is not safe for
6266 some of the division and remainder types while for others we need
6267 to change the code. */
6268 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6269 {
6270 if (code == CEIL_DIV_EXPR)
6271 code = FLOOR_DIV_EXPR;
6272 else if (code == FLOOR_DIV_EXPR)
6273 code = CEIL_DIV_EXPR;
6274 else if (code != MULT_EXPR
6275 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6276 break;
6277 }
6278
6279 /* If it's a multiply or a division/modulus operation of a multiple
6280 of our constant, do the operation and verify it doesn't overflow. */
6281 if (code == MULT_EXPR
6282 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
6283 {
6284 op1 = const_binop (code, fold_convert (ctype, op1),
6285 fold_convert (ctype, c), 0);
6286 /* We allow the constant to overflow with wrapping semantics. */
6287 if (op1 == 0
6288 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6289 break;
6290 }
6291 else
6292 break;
6293
6294 /* If we have an unsigned type is not a sizetype, we cannot widen
6295 the operation since it will change the result if the original
6296 computation overflowed. */
6297 if (TYPE_UNSIGNED (ctype)
6298 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
6299 && ctype != type)
6300 break;
6301
6302 /* If we were able to eliminate our operation from the first side,
6303 apply our operation to the second side and reform the PLUS. */
6304 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
6305 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
6306
6307 /* The last case is if we are a multiply. In that case, we can
6308 apply the distributive law to commute the multiply and addition
6309 if the multiplication of the constants doesn't overflow. */
6310 if (code == MULT_EXPR)
6311 return fold_build2 (tcode, ctype,
6312 fold_build2 (code, ctype,
6313 fold_convert (ctype, op0),
6314 fold_convert (ctype, c)),
6315 op1);
6316
6317 break;
6318
6319 case MULT_EXPR:
6320 /* We have a special case here if we are doing something like
6321 (C * 8) % 4 since we know that's zero. */
6322 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6323 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6324 /* If the multiplication can overflow we cannot optimize this.
6325 ??? Until we can properly mark individual operations as
6326 not overflowing we need to treat sizetype special here as
6327 stor-layout relies on this opimization to make
6328 DECL_FIELD_BIT_OFFSET always a constant. */
6329 && (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6330 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
6331 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
6332 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6333 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
6334 {
6335 *strict_overflow_p = true;
6336 return omit_one_operand (type, integer_zero_node, op0);
6337 }
6338
6339 /* ... fall through ... */
6340
6341 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6342 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6343 /* If we can extract our operation from the LHS, do so and return a
6344 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6345 do something only if the second operand is a constant. */
6346 if (same_p
6347 && (t1 = extract_muldiv (op0, c, code, wide_type,
6348 strict_overflow_p)) != 0)
6349 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6350 fold_convert (ctype, op1));
6351 else if (tcode == MULT_EXPR && code == MULT_EXPR
6352 && (t1 = extract_muldiv (op1, c, code, wide_type,
6353 strict_overflow_p)) != 0)
6354 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6355 fold_convert (ctype, t1));
6356 else if (TREE_CODE (op1) != INTEGER_CST)
6357 return 0;
6358
6359 /* If these are the same operation types, we can associate them
6360 assuming no overflow. */
6361 if (tcode == code
6362 && 0 != (t1 = int_const_binop (MULT_EXPR, fold_convert (ctype, op1),
6363 fold_convert (ctype, c), 1))
6364 && 0 != (t1 = force_fit_type_double (ctype, TREE_INT_CST_LOW (t1),
6365 TREE_INT_CST_HIGH (t1),
6366 (TYPE_UNSIGNED (ctype)
6367 && tcode != MULT_EXPR) ? -1 : 1,
6368 TREE_OVERFLOW (t1)))
6369 && !TREE_OVERFLOW (t1))
6370 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
6371
6372 /* If these operations "cancel" each other, we have the main
6373 optimizations of this pass, which occur when either constant is a
6374 multiple of the other, in which case we replace this with either an
6375 operation or CODE or TCODE.
6376
6377 If we have an unsigned type that is not a sizetype, we cannot do
6378 this since it will change the result if the original computation
6379 overflowed. */
6380 if ((TYPE_OVERFLOW_UNDEFINED (ctype)
6381 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
6382 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6383 || (tcode == MULT_EXPR
6384 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6385 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6386 && code != MULT_EXPR)))
6387 {
6388 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
6389 {
6390 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6391 *strict_overflow_p = true;
6392 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6393 fold_convert (ctype,
6394 const_binop (TRUNC_DIV_EXPR,
6395 op1, c, 0)));
6396 }
6397 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
6398 {
6399 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6400 *strict_overflow_p = true;
6401 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6402 fold_convert (ctype,
6403 const_binop (TRUNC_DIV_EXPR,
6404 c, op1, 0)));
6405 }
6406 }
6407 break;
6408
6409 default:
6410 break;
6411 }
6412
6413 return 0;
6414 }
6415 \f
6416 /* Return a node which has the indicated constant VALUE (either 0 or
6417 1), and is of the indicated TYPE. */
6418
6419 tree
6420 constant_boolean_node (int value, tree type)
6421 {
6422 if (type == integer_type_node)
6423 return value ? integer_one_node : integer_zero_node;
6424 else if (type == boolean_type_node)
6425 return value ? boolean_true_node : boolean_false_node;
6426 else
6427 return build_int_cst (type, value);
6428 }
6429
6430
6431 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6432 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6433 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6434 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6435 COND is the first argument to CODE; otherwise (as in the example
6436 given here), it is the second argument. TYPE is the type of the
6437 original expression. Return NULL_TREE if no simplification is
6438 possible. */
6439
6440 static tree
6441 fold_binary_op_with_conditional_arg (enum tree_code code,
6442 tree type, tree op0, tree op1,
6443 tree cond, tree arg, int cond_first_p)
6444 {
6445 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6446 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6447 tree test, true_value, false_value;
6448 tree lhs = NULL_TREE;
6449 tree rhs = NULL_TREE;
6450
6451 /* This transformation is only worthwhile if we don't have to wrap
6452 arg in a SAVE_EXPR, and the operation can be simplified on at least
6453 one of the branches once its pushed inside the COND_EXPR. */
6454 if (!TREE_CONSTANT (arg))
6455 return NULL_TREE;
6456
6457 if (TREE_CODE (cond) == COND_EXPR)
6458 {
6459 test = TREE_OPERAND (cond, 0);
6460 true_value = TREE_OPERAND (cond, 1);
6461 false_value = TREE_OPERAND (cond, 2);
6462 /* If this operand throws an expression, then it does not make
6463 sense to try to perform a logical or arithmetic operation
6464 involving it. */
6465 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6466 lhs = true_value;
6467 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6468 rhs = false_value;
6469 }
6470 else
6471 {
6472 tree testtype = TREE_TYPE (cond);
6473 test = cond;
6474 true_value = constant_boolean_node (true, testtype);
6475 false_value = constant_boolean_node (false, testtype);
6476 }
6477
6478 arg = fold_convert (arg_type, arg);
6479 if (lhs == 0)
6480 {
6481 true_value = fold_convert (cond_type, true_value);
6482 if (cond_first_p)
6483 lhs = fold_build2 (code, type, true_value, arg);
6484 else
6485 lhs = fold_build2 (code, type, arg, true_value);
6486 }
6487 if (rhs == 0)
6488 {
6489 false_value = fold_convert (cond_type, false_value);
6490 if (cond_first_p)
6491 rhs = fold_build2 (code, type, false_value, arg);
6492 else
6493 rhs = fold_build2 (code, type, arg, false_value);
6494 }
6495
6496 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
6497 return fold_convert (type, test);
6498 }
6499
6500 \f
6501 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6502
6503 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6504 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6505 ADDEND is the same as X.
6506
6507 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6508 and finite. The problematic cases are when X is zero, and its mode
6509 has signed zeros. In the case of rounding towards -infinity,
6510 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6511 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6512
6513 bool
6514 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6515 {
6516 if (!real_zerop (addend))
6517 return false;
6518
6519 /* Don't allow the fold with -fsignaling-nans. */
6520 if (HONOR_SNANS (TYPE_MODE (type)))
6521 return false;
6522
6523 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6524 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6525 return true;
6526
6527 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6528 if (TREE_CODE (addend) == REAL_CST
6529 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6530 negate = !negate;
6531
6532 /* The mode has signed zeros, and we have to honor their sign.
6533 In this situation, there is only one case we can return true for.
6534 X - 0 is the same as X unless rounding towards -infinity is
6535 supported. */
6536 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6537 }
6538
6539 /* Subroutine of fold() that checks comparisons of built-in math
6540 functions against real constants.
6541
6542 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6543 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6544 is the type of the result and ARG0 and ARG1 are the operands of the
6545 comparison. ARG1 must be a TREE_REAL_CST.
6546
6547 The function returns the constant folded tree if a simplification
6548 can be made, and NULL_TREE otherwise. */
6549
6550 static tree
6551 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
6552 tree type, tree arg0, tree arg1)
6553 {
6554 REAL_VALUE_TYPE c;
6555
6556 if (BUILTIN_SQRT_P (fcode))
6557 {
6558 tree arg = CALL_EXPR_ARG (arg0, 0);
6559 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6560
6561 c = TREE_REAL_CST (arg1);
6562 if (REAL_VALUE_NEGATIVE (c))
6563 {
6564 /* sqrt(x) < y is always false, if y is negative. */
6565 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6566 return omit_one_operand (type, integer_zero_node, arg);
6567
6568 /* sqrt(x) > y is always true, if y is negative and we
6569 don't care about NaNs, i.e. negative values of x. */
6570 if (code == NE_EXPR || !HONOR_NANS (mode))
6571 return omit_one_operand (type, integer_one_node, arg);
6572
6573 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6574 return fold_build2 (GE_EXPR, type, arg,
6575 build_real (TREE_TYPE (arg), dconst0));
6576 }
6577 else if (code == GT_EXPR || code == GE_EXPR)
6578 {
6579 REAL_VALUE_TYPE c2;
6580
6581 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6582 real_convert (&c2, mode, &c2);
6583
6584 if (REAL_VALUE_ISINF (c2))
6585 {
6586 /* sqrt(x) > y is x == +Inf, when y is very large. */
6587 if (HONOR_INFINITIES (mode))
6588 return fold_build2 (EQ_EXPR, type, arg,
6589 build_real (TREE_TYPE (arg), c2));
6590
6591 /* sqrt(x) > y is always false, when y is very large
6592 and we don't care about infinities. */
6593 return omit_one_operand (type, integer_zero_node, arg);
6594 }
6595
6596 /* sqrt(x) > c is the same as x > c*c. */
6597 return fold_build2 (code, type, arg,
6598 build_real (TREE_TYPE (arg), c2));
6599 }
6600 else if (code == LT_EXPR || code == LE_EXPR)
6601 {
6602 REAL_VALUE_TYPE c2;
6603
6604 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6605 real_convert (&c2, mode, &c2);
6606
6607 if (REAL_VALUE_ISINF (c2))
6608 {
6609 /* sqrt(x) < y is always true, when y is a very large
6610 value and we don't care about NaNs or Infinities. */
6611 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6612 return omit_one_operand (type, integer_one_node, arg);
6613
6614 /* sqrt(x) < y is x != +Inf when y is very large and we
6615 don't care about NaNs. */
6616 if (! HONOR_NANS (mode))
6617 return fold_build2 (NE_EXPR, type, arg,
6618 build_real (TREE_TYPE (arg), c2));
6619
6620 /* sqrt(x) < y is x >= 0 when y is very large and we
6621 don't care about Infinities. */
6622 if (! HONOR_INFINITIES (mode))
6623 return fold_build2 (GE_EXPR, type, arg,
6624 build_real (TREE_TYPE (arg), dconst0));
6625
6626 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6627 if (lang_hooks.decls.global_bindings_p () != 0
6628 || CONTAINS_PLACEHOLDER_P (arg))
6629 return NULL_TREE;
6630
6631 arg = save_expr (arg);
6632 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6633 fold_build2 (GE_EXPR, type, arg,
6634 build_real (TREE_TYPE (arg),
6635 dconst0)),
6636 fold_build2 (NE_EXPR, type, arg,
6637 build_real (TREE_TYPE (arg),
6638 c2)));
6639 }
6640
6641 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6642 if (! HONOR_NANS (mode))
6643 return fold_build2 (code, type, arg,
6644 build_real (TREE_TYPE (arg), c2));
6645
6646 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6647 if (lang_hooks.decls.global_bindings_p () == 0
6648 && ! CONTAINS_PLACEHOLDER_P (arg))
6649 {
6650 arg = save_expr (arg);
6651 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6652 fold_build2 (GE_EXPR, type, arg,
6653 build_real (TREE_TYPE (arg),
6654 dconst0)),
6655 fold_build2 (code, type, arg,
6656 build_real (TREE_TYPE (arg),
6657 c2)));
6658 }
6659 }
6660 }
6661
6662 return NULL_TREE;
6663 }
6664
6665 /* Subroutine of fold() that optimizes comparisons against Infinities,
6666 either +Inf or -Inf.
6667
6668 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6669 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6670 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6671
6672 The function returns the constant folded tree if a simplification
6673 can be made, and NULL_TREE otherwise. */
6674
6675 static tree
6676 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6677 {
6678 enum machine_mode mode;
6679 REAL_VALUE_TYPE max;
6680 tree temp;
6681 bool neg;
6682
6683 mode = TYPE_MODE (TREE_TYPE (arg0));
6684
6685 /* For negative infinity swap the sense of the comparison. */
6686 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6687 if (neg)
6688 code = swap_tree_comparison (code);
6689
6690 switch (code)
6691 {
6692 case GT_EXPR:
6693 /* x > +Inf is always false, if with ignore sNANs. */
6694 if (HONOR_SNANS (mode))
6695 return NULL_TREE;
6696 return omit_one_operand (type, integer_zero_node, arg0);
6697
6698 case LE_EXPR:
6699 /* x <= +Inf is always true, if we don't case about NaNs. */
6700 if (! HONOR_NANS (mode))
6701 return omit_one_operand (type, integer_one_node, arg0);
6702
6703 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6704 if (lang_hooks.decls.global_bindings_p () == 0
6705 && ! CONTAINS_PLACEHOLDER_P (arg0))
6706 {
6707 arg0 = save_expr (arg0);
6708 return fold_build2 (EQ_EXPR, type, arg0, arg0);
6709 }
6710 break;
6711
6712 case EQ_EXPR:
6713 case GE_EXPR:
6714 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6715 real_maxval (&max, neg, mode);
6716 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6717 arg0, build_real (TREE_TYPE (arg0), max));
6718
6719 case LT_EXPR:
6720 /* x < +Inf is always equal to x <= DBL_MAX. */
6721 real_maxval (&max, neg, mode);
6722 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6723 arg0, build_real (TREE_TYPE (arg0), max));
6724
6725 case NE_EXPR:
6726 /* x != +Inf is always equal to !(x > DBL_MAX). */
6727 real_maxval (&max, neg, mode);
6728 if (! HONOR_NANS (mode))
6729 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6730 arg0, build_real (TREE_TYPE (arg0), max));
6731
6732 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6733 arg0, build_real (TREE_TYPE (arg0), max));
6734 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
6735
6736 default:
6737 break;
6738 }
6739
6740 return NULL_TREE;
6741 }
6742
6743 /* Subroutine of fold() that optimizes comparisons of a division by
6744 a nonzero integer constant against an integer constant, i.e.
6745 X/C1 op C2.
6746
6747 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6748 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6749 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6750
6751 The function returns the constant folded tree if a simplification
6752 can be made, and NULL_TREE otherwise. */
6753
6754 static tree
6755 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6756 {
6757 tree prod, tmp, hi, lo;
6758 tree arg00 = TREE_OPERAND (arg0, 0);
6759 tree arg01 = TREE_OPERAND (arg0, 1);
6760 unsigned HOST_WIDE_INT lpart;
6761 HOST_WIDE_INT hpart;
6762 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6763 bool neg_overflow;
6764 int overflow;
6765
6766 /* We have to do this the hard way to detect unsigned overflow.
6767 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6768 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6769 TREE_INT_CST_HIGH (arg01),
6770 TREE_INT_CST_LOW (arg1),
6771 TREE_INT_CST_HIGH (arg1),
6772 &lpart, &hpart, unsigned_p);
6773 prod = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6774 -1, overflow);
6775 neg_overflow = false;
6776
6777 if (unsigned_p)
6778 {
6779 tmp = int_const_binop (MINUS_EXPR, arg01,
6780 build_int_cst (TREE_TYPE (arg01), 1), 0);
6781 lo = prod;
6782
6783 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6784 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6785 TREE_INT_CST_HIGH (prod),
6786 TREE_INT_CST_LOW (tmp),
6787 TREE_INT_CST_HIGH (tmp),
6788 &lpart, &hpart, unsigned_p);
6789 hi = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6790 -1, overflow | TREE_OVERFLOW (prod));
6791 }
6792 else if (tree_int_cst_sgn (arg01) >= 0)
6793 {
6794 tmp = int_const_binop (MINUS_EXPR, arg01,
6795 build_int_cst (TREE_TYPE (arg01), 1), 0);
6796 switch (tree_int_cst_sgn (arg1))
6797 {
6798 case -1:
6799 neg_overflow = true;
6800 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6801 hi = prod;
6802 break;
6803
6804 case 0:
6805 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6806 hi = tmp;
6807 break;
6808
6809 case 1:
6810 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6811 lo = prod;
6812 break;
6813
6814 default:
6815 gcc_unreachable ();
6816 }
6817 }
6818 else
6819 {
6820 /* A negative divisor reverses the relational operators. */
6821 code = swap_tree_comparison (code);
6822
6823 tmp = int_const_binop (PLUS_EXPR, arg01,
6824 build_int_cst (TREE_TYPE (arg01), 1), 0);
6825 switch (tree_int_cst_sgn (arg1))
6826 {
6827 case -1:
6828 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6829 lo = prod;
6830 break;
6831
6832 case 0:
6833 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6834 lo = tmp;
6835 break;
6836
6837 case 1:
6838 neg_overflow = true;
6839 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6840 hi = prod;
6841 break;
6842
6843 default:
6844 gcc_unreachable ();
6845 }
6846 }
6847
6848 switch (code)
6849 {
6850 case EQ_EXPR:
6851 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6852 return omit_one_operand (type, integer_zero_node, arg00);
6853 if (TREE_OVERFLOW (hi))
6854 return fold_build2 (GE_EXPR, type, arg00, lo);
6855 if (TREE_OVERFLOW (lo))
6856 return fold_build2 (LE_EXPR, type, arg00, hi);
6857 return build_range_check (type, arg00, 1, lo, hi);
6858
6859 case NE_EXPR:
6860 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6861 return omit_one_operand (type, integer_one_node, arg00);
6862 if (TREE_OVERFLOW (hi))
6863 return fold_build2 (LT_EXPR, type, arg00, lo);
6864 if (TREE_OVERFLOW (lo))
6865 return fold_build2 (GT_EXPR, type, arg00, hi);
6866 return build_range_check (type, arg00, 0, lo, hi);
6867
6868 case LT_EXPR:
6869 if (TREE_OVERFLOW (lo))
6870 {
6871 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6872 return omit_one_operand (type, tmp, arg00);
6873 }
6874 return fold_build2 (LT_EXPR, type, arg00, lo);
6875
6876 case LE_EXPR:
6877 if (TREE_OVERFLOW (hi))
6878 {
6879 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6880 return omit_one_operand (type, tmp, arg00);
6881 }
6882 return fold_build2 (LE_EXPR, type, arg00, hi);
6883
6884 case GT_EXPR:
6885 if (TREE_OVERFLOW (hi))
6886 {
6887 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6888 return omit_one_operand (type, tmp, arg00);
6889 }
6890 return fold_build2 (GT_EXPR, type, arg00, hi);
6891
6892 case GE_EXPR:
6893 if (TREE_OVERFLOW (lo))
6894 {
6895 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6896 return omit_one_operand (type, tmp, arg00);
6897 }
6898 return fold_build2 (GE_EXPR, type, arg00, lo);
6899
6900 default:
6901 break;
6902 }
6903
6904 return NULL_TREE;
6905 }
6906
6907
6908 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6909 equality/inequality test, then return a simplified form of the test
6910 using a sign testing. Otherwise return NULL. TYPE is the desired
6911 result type. */
6912
6913 static tree
6914 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6915 tree result_type)
6916 {
6917 /* If this is testing a single bit, we can optimize the test. */
6918 if ((code == NE_EXPR || code == EQ_EXPR)
6919 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6920 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6921 {
6922 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6923 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6924 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6925
6926 if (arg00 != NULL_TREE
6927 /* This is only a win if casting to a signed type is cheap,
6928 i.e. when arg00's type is not a partial mode. */
6929 && TYPE_PRECISION (TREE_TYPE (arg00))
6930 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6931 {
6932 tree stype = signed_type_for (TREE_TYPE (arg00));
6933 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6934 result_type, fold_convert (stype, arg00),
6935 build_int_cst (stype, 0));
6936 }
6937 }
6938
6939 return NULL_TREE;
6940 }
6941
6942 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6943 equality/inequality test, then return a simplified form of
6944 the test using shifts and logical operations. Otherwise return
6945 NULL. TYPE is the desired result type. */
6946
6947 tree
6948 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6949 tree result_type)
6950 {
6951 /* If this is testing a single bit, we can optimize the test. */
6952 if ((code == NE_EXPR || code == EQ_EXPR)
6953 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6954 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6955 {
6956 tree inner = TREE_OPERAND (arg0, 0);
6957 tree type = TREE_TYPE (arg0);
6958 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6959 enum machine_mode operand_mode = TYPE_MODE (type);
6960 int ops_unsigned;
6961 tree signed_type, unsigned_type, intermediate_type;
6962 tree tem, one;
6963
6964 /* First, see if we can fold the single bit test into a sign-bit
6965 test. */
6966 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6967 result_type);
6968 if (tem)
6969 return tem;
6970
6971 /* Otherwise we have (A & C) != 0 where C is a single bit,
6972 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6973 Similarly for (A & C) == 0. */
6974
6975 /* If INNER is a right shift of a constant and it plus BITNUM does
6976 not overflow, adjust BITNUM and INNER. */
6977 if (TREE_CODE (inner) == RSHIFT_EXPR
6978 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6979 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6980 && bitnum < TYPE_PRECISION (type)
6981 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6982 bitnum - TYPE_PRECISION (type)))
6983 {
6984 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6985 inner = TREE_OPERAND (inner, 0);
6986 }
6987
6988 /* If we are going to be able to omit the AND below, we must do our
6989 operations as unsigned. If we must use the AND, we have a choice.
6990 Normally unsigned is faster, but for some machines signed is. */
6991 #ifdef LOAD_EXTEND_OP
6992 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6993 && !flag_syntax_only) ? 0 : 1;
6994 #else
6995 ops_unsigned = 1;
6996 #endif
6997
6998 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6999 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
7000 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
7001 inner = fold_convert (intermediate_type, inner);
7002
7003 if (bitnum != 0)
7004 inner = build2 (RSHIFT_EXPR, intermediate_type,
7005 inner, size_int (bitnum));
7006
7007 one = build_int_cst (intermediate_type, 1);
7008
7009 if (code == EQ_EXPR)
7010 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type, inner, one);
7011
7012 /* Put the AND last so it can combine with more things. */
7013 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
7014
7015 /* Make sure to return the proper type. */
7016 inner = fold_convert (result_type, inner);
7017
7018 return inner;
7019 }
7020 return NULL_TREE;
7021 }
7022
7023 /* Check whether we are allowed to reorder operands arg0 and arg1,
7024 such that the evaluation of arg1 occurs before arg0. */
7025
7026 static bool
7027 reorder_operands_p (const_tree arg0, const_tree arg1)
7028 {
7029 if (! flag_evaluation_order)
7030 return true;
7031 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
7032 return true;
7033 return ! TREE_SIDE_EFFECTS (arg0)
7034 && ! TREE_SIDE_EFFECTS (arg1);
7035 }
7036
7037 /* Test whether it is preferable two swap two operands, ARG0 and
7038 ARG1, for example because ARG0 is an integer constant and ARG1
7039 isn't. If REORDER is true, only recommend swapping if we can
7040 evaluate the operands in reverse order. */
7041
7042 bool
7043 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
7044 {
7045 STRIP_SIGN_NOPS (arg0);
7046 STRIP_SIGN_NOPS (arg1);
7047
7048 if (TREE_CODE (arg1) == INTEGER_CST)
7049 return 0;
7050 if (TREE_CODE (arg0) == INTEGER_CST)
7051 return 1;
7052
7053 if (TREE_CODE (arg1) == REAL_CST)
7054 return 0;
7055 if (TREE_CODE (arg0) == REAL_CST)
7056 return 1;
7057
7058 if (TREE_CODE (arg1) == FIXED_CST)
7059 return 0;
7060 if (TREE_CODE (arg0) == FIXED_CST)
7061 return 1;
7062
7063 if (TREE_CODE (arg1) == COMPLEX_CST)
7064 return 0;
7065 if (TREE_CODE (arg0) == COMPLEX_CST)
7066 return 1;
7067
7068 if (TREE_CONSTANT (arg1))
7069 return 0;
7070 if (TREE_CONSTANT (arg0))
7071 return 1;
7072
7073 if (optimize_function_for_size_p (cfun))
7074 return 0;
7075
7076 if (reorder && flag_evaluation_order
7077 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
7078 return 0;
7079
7080 /* It is preferable to swap two SSA_NAME to ensure a canonical form
7081 for commutative and comparison operators. Ensuring a canonical
7082 form allows the optimizers to find additional redundancies without
7083 having to explicitly check for both orderings. */
7084 if (TREE_CODE (arg0) == SSA_NAME
7085 && TREE_CODE (arg1) == SSA_NAME
7086 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
7087 return 1;
7088
7089 /* Put SSA_NAMEs last. */
7090 if (TREE_CODE (arg1) == SSA_NAME)
7091 return 0;
7092 if (TREE_CODE (arg0) == SSA_NAME)
7093 return 1;
7094
7095 /* Put variables last. */
7096 if (DECL_P (arg1))
7097 return 0;
7098 if (DECL_P (arg0))
7099 return 1;
7100
7101 return 0;
7102 }
7103
7104 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
7105 ARG0 is extended to a wider type. */
7106
7107 static tree
7108 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
7109 {
7110 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
7111 tree arg1_unw;
7112 tree shorter_type, outer_type;
7113 tree min, max;
7114 bool above, below;
7115
7116 if (arg0_unw == arg0)
7117 return NULL_TREE;
7118 shorter_type = TREE_TYPE (arg0_unw);
7119
7120 #ifdef HAVE_canonicalize_funcptr_for_compare
7121 /* Disable this optimization if we're casting a function pointer
7122 type on targets that require function pointer canonicalization. */
7123 if (HAVE_canonicalize_funcptr_for_compare
7124 && TREE_CODE (shorter_type) == POINTER_TYPE
7125 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
7126 return NULL_TREE;
7127 #endif
7128
7129 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
7130 return NULL_TREE;
7131
7132 arg1_unw = get_unwidened (arg1, NULL_TREE);
7133
7134 /* If possible, express the comparison in the shorter mode. */
7135 if ((code == EQ_EXPR || code == NE_EXPR
7136 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
7137 && (TREE_TYPE (arg1_unw) == shorter_type
7138 || ((TYPE_PRECISION (shorter_type)
7139 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
7140 && (TYPE_UNSIGNED (shorter_type)
7141 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
7142 || (TREE_CODE (arg1_unw) == INTEGER_CST
7143 && (TREE_CODE (shorter_type) == INTEGER_TYPE
7144 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
7145 && int_fits_type_p (arg1_unw, shorter_type))))
7146 return fold_build2 (code, type, arg0_unw,
7147 fold_convert (shorter_type, arg1_unw));
7148
7149 if (TREE_CODE (arg1_unw) != INTEGER_CST
7150 || TREE_CODE (shorter_type) != INTEGER_TYPE
7151 || !int_fits_type_p (arg1_unw, shorter_type))
7152 return NULL_TREE;
7153
7154 /* If we are comparing with the integer that does not fit into the range
7155 of the shorter type, the result is known. */
7156 outer_type = TREE_TYPE (arg1_unw);
7157 min = lower_bound_in_type (outer_type, shorter_type);
7158 max = upper_bound_in_type (outer_type, shorter_type);
7159
7160 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
7161 max, arg1_unw));
7162 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
7163 arg1_unw, min));
7164
7165 switch (code)
7166 {
7167 case EQ_EXPR:
7168 if (above || below)
7169 return omit_one_operand (type, integer_zero_node, arg0);
7170 break;
7171
7172 case NE_EXPR:
7173 if (above || below)
7174 return omit_one_operand (type, integer_one_node, arg0);
7175 break;
7176
7177 case LT_EXPR:
7178 case LE_EXPR:
7179 if (above)
7180 return omit_one_operand (type, integer_one_node, arg0);
7181 else if (below)
7182 return omit_one_operand (type, integer_zero_node, arg0);
7183
7184 case GT_EXPR:
7185 case GE_EXPR:
7186 if (above)
7187 return omit_one_operand (type, integer_zero_node, arg0);
7188 else if (below)
7189 return omit_one_operand (type, integer_one_node, arg0);
7190
7191 default:
7192 break;
7193 }
7194
7195 return NULL_TREE;
7196 }
7197
7198 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
7199 ARG0 just the signedness is changed. */
7200
7201 static tree
7202 fold_sign_changed_comparison (enum tree_code code, tree type,
7203 tree arg0, tree arg1)
7204 {
7205 tree arg0_inner;
7206 tree inner_type, outer_type;
7207
7208 if (!CONVERT_EXPR_P (arg0))
7209 return NULL_TREE;
7210
7211 outer_type = TREE_TYPE (arg0);
7212 arg0_inner = TREE_OPERAND (arg0, 0);
7213 inner_type = TREE_TYPE (arg0_inner);
7214
7215 #ifdef HAVE_canonicalize_funcptr_for_compare
7216 /* Disable this optimization if we're casting a function pointer
7217 type on targets that require function pointer canonicalization. */
7218 if (HAVE_canonicalize_funcptr_for_compare
7219 && TREE_CODE (inner_type) == POINTER_TYPE
7220 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
7221 return NULL_TREE;
7222 #endif
7223
7224 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
7225 return NULL_TREE;
7226
7227 if (TREE_CODE (arg1) != INTEGER_CST
7228 && !(CONVERT_EXPR_P (arg1)
7229 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
7230 return NULL_TREE;
7231
7232 if ((TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
7233 || POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
7234 && code != NE_EXPR
7235 && code != EQ_EXPR)
7236 return NULL_TREE;
7237
7238 if (TREE_CODE (arg1) == INTEGER_CST)
7239 arg1 = force_fit_type_double (inner_type, TREE_INT_CST_LOW (arg1),
7240 TREE_INT_CST_HIGH (arg1), 0,
7241 TREE_OVERFLOW (arg1));
7242 else
7243 arg1 = fold_convert (inner_type, arg1);
7244
7245 return fold_build2 (code, type, arg0_inner, arg1);
7246 }
7247
7248 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
7249 step of the array. Reconstructs s and delta in the case of s * delta
7250 being an integer constant (and thus already folded).
7251 ADDR is the address. MULT is the multiplicative expression.
7252 If the function succeeds, the new address expression is returned. Otherwise
7253 NULL_TREE is returned. */
7254
7255 static tree
7256 try_move_mult_to_index (tree addr, tree op1)
7257 {
7258 tree s, delta, step;
7259 tree ref = TREE_OPERAND (addr, 0), pref;
7260 tree ret, pos;
7261 tree itype;
7262 bool mdim = false;
7263
7264 /* Strip the nops that might be added when converting op1 to sizetype. */
7265 STRIP_NOPS (op1);
7266
7267 /* Canonicalize op1 into a possibly non-constant delta
7268 and an INTEGER_CST s. */
7269 if (TREE_CODE (op1) == MULT_EXPR)
7270 {
7271 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
7272
7273 STRIP_NOPS (arg0);
7274 STRIP_NOPS (arg1);
7275
7276 if (TREE_CODE (arg0) == INTEGER_CST)
7277 {
7278 s = arg0;
7279 delta = arg1;
7280 }
7281 else if (TREE_CODE (arg1) == INTEGER_CST)
7282 {
7283 s = arg1;
7284 delta = arg0;
7285 }
7286 else
7287 return NULL_TREE;
7288 }
7289 else if (TREE_CODE (op1) == INTEGER_CST)
7290 {
7291 delta = op1;
7292 s = NULL_TREE;
7293 }
7294 else
7295 {
7296 /* Simulate we are delta * 1. */
7297 delta = op1;
7298 s = integer_one_node;
7299 }
7300
7301 for (;; ref = TREE_OPERAND (ref, 0))
7302 {
7303 if (TREE_CODE (ref) == ARRAY_REF)
7304 {
7305 /* Remember if this was a multi-dimensional array. */
7306 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
7307 mdim = true;
7308
7309 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
7310 if (! itype)
7311 continue;
7312
7313 step = array_ref_element_size (ref);
7314 if (TREE_CODE (step) != INTEGER_CST)
7315 continue;
7316
7317 if (s)
7318 {
7319 if (! tree_int_cst_equal (step, s))
7320 continue;
7321 }
7322 else
7323 {
7324 /* Try if delta is a multiple of step. */
7325 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
7326 if (! tmp)
7327 continue;
7328 delta = tmp;
7329 }
7330
7331 /* Only fold here if we can verify we do not overflow one
7332 dimension of a multi-dimensional array. */
7333 if (mdim)
7334 {
7335 tree tmp;
7336
7337 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
7338 || !INTEGRAL_TYPE_P (itype)
7339 || !TYPE_MAX_VALUE (itype)
7340 || TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST)
7341 continue;
7342
7343 tmp = fold_binary (PLUS_EXPR, itype,
7344 fold_convert (itype,
7345 TREE_OPERAND (ref, 1)),
7346 fold_convert (itype, delta));
7347 if (!tmp
7348 || TREE_CODE (tmp) != INTEGER_CST
7349 || tree_int_cst_lt (TYPE_MAX_VALUE (itype), tmp))
7350 continue;
7351 }
7352
7353 break;
7354 }
7355 else
7356 mdim = false;
7357
7358 if (!handled_component_p (ref))
7359 return NULL_TREE;
7360 }
7361
7362 /* We found the suitable array reference. So copy everything up to it,
7363 and replace the index. */
7364
7365 pref = TREE_OPERAND (addr, 0);
7366 ret = copy_node (pref);
7367 pos = ret;
7368
7369 while (pref != ref)
7370 {
7371 pref = TREE_OPERAND (pref, 0);
7372 TREE_OPERAND (pos, 0) = copy_node (pref);
7373 pos = TREE_OPERAND (pos, 0);
7374 }
7375
7376 TREE_OPERAND (pos, 1) = fold_build2 (PLUS_EXPR, itype,
7377 fold_convert (itype,
7378 TREE_OPERAND (pos, 1)),
7379 fold_convert (itype, delta));
7380
7381 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
7382 }
7383
7384
7385 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7386 means A >= Y && A != MAX, but in this case we know that
7387 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7388
7389 static tree
7390 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
7391 {
7392 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7393
7394 if (TREE_CODE (bound) == LT_EXPR)
7395 a = TREE_OPERAND (bound, 0);
7396 else if (TREE_CODE (bound) == GT_EXPR)
7397 a = TREE_OPERAND (bound, 1);
7398 else
7399 return NULL_TREE;
7400
7401 typea = TREE_TYPE (a);
7402 if (!INTEGRAL_TYPE_P (typea)
7403 && !POINTER_TYPE_P (typea))
7404 return NULL_TREE;
7405
7406 if (TREE_CODE (ineq) == LT_EXPR)
7407 {
7408 a1 = TREE_OPERAND (ineq, 1);
7409 y = TREE_OPERAND (ineq, 0);
7410 }
7411 else if (TREE_CODE (ineq) == GT_EXPR)
7412 {
7413 a1 = TREE_OPERAND (ineq, 0);
7414 y = TREE_OPERAND (ineq, 1);
7415 }
7416 else
7417 return NULL_TREE;
7418
7419 if (TREE_TYPE (a1) != typea)
7420 return NULL_TREE;
7421
7422 if (POINTER_TYPE_P (typea))
7423 {
7424 /* Convert the pointer types into integer before taking the difference. */
7425 tree ta = fold_convert (ssizetype, a);
7426 tree ta1 = fold_convert (ssizetype, a1);
7427 diff = fold_binary (MINUS_EXPR, ssizetype, ta1, ta);
7428 }
7429 else
7430 diff = fold_binary (MINUS_EXPR, typea, a1, a);
7431
7432 if (!diff || !integer_onep (diff))
7433 return NULL_TREE;
7434
7435 return fold_build2 (GE_EXPR, type, a, y);
7436 }
7437
7438 /* Fold a sum or difference of at least one multiplication.
7439 Returns the folded tree or NULL if no simplification could be made. */
7440
7441 static tree
7442 fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
7443 {
7444 tree arg00, arg01, arg10, arg11;
7445 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7446
7447 /* (A * C) +- (B * C) -> (A+-B) * C.
7448 (A * C) +- A -> A * (C+-1).
7449 We are most concerned about the case where C is a constant,
7450 but other combinations show up during loop reduction. Since
7451 it is not difficult, try all four possibilities. */
7452
7453 if (TREE_CODE (arg0) == MULT_EXPR)
7454 {
7455 arg00 = TREE_OPERAND (arg0, 0);
7456 arg01 = TREE_OPERAND (arg0, 1);
7457 }
7458 else if (TREE_CODE (arg0) == INTEGER_CST)
7459 {
7460 arg00 = build_one_cst (type);
7461 arg01 = arg0;
7462 }
7463 else
7464 {
7465 /* We cannot generate constant 1 for fract. */
7466 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7467 return NULL_TREE;
7468 arg00 = arg0;
7469 arg01 = build_one_cst (type);
7470 }
7471 if (TREE_CODE (arg1) == MULT_EXPR)
7472 {
7473 arg10 = TREE_OPERAND (arg1, 0);
7474 arg11 = TREE_OPERAND (arg1, 1);
7475 }
7476 else if (TREE_CODE (arg1) == INTEGER_CST)
7477 {
7478 arg10 = build_one_cst (type);
7479 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7480 the purpose of this canonicalization. */
7481 if (TREE_INT_CST_HIGH (arg1) == -1
7482 && negate_expr_p (arg1)
7483 && code == PLUS_EXPR)
7484 {
7485 arg11 = negate_expr (arg1);
7486 code = MINUS_EXPR;
7487 }
7488 else
7489 arg11 = arg1;
7490 }
7491 else
7492 {
7493 /* We cannot generate constant 1 for fract. */
7494 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7495 return NULL_TREE;
7496 arg10 = arg1;
7497 arg11 = build_one_cst (type);
7498 }
7499 same = NULL_TREE;
7500
7501 if (operand_equal_p (arg01, arg11, 0))
7502 same = arg01, alt0 = arg00, alt1 = arg10;
7503 else if (operand_equal_p (arg00, arg10, 0))
7504 same = arg00, alt0 = arg01, alt1 = arg11;
7505 else if (operand_equal_p (arg00, arg11, 0))
7506 same = arg00, alt0 = arg01, alt1 = arg10;
7507 else if (operand_equal_p (arg01, arg10, 0))
7508 same = arg01, alt0 = arg00, alt1 = arg11;
7509
7510 /* No identical multiplicands; see if we can find a common
7511 power-of-two factor in non-power-of-two multiplies. This
7512 can help in multi-dimensional array access. */
7513 else if (host_integerp (arg01, 0)
7514 && host_integerp (arg11, 0))
7515 {
7516 HOST_WIDE_INT int01, int11, tmp;
7517 bool swap = false;
7518 tree maybe_same;
7519 int01 = TREE_INT_CST_LOW (arg01);
7520 int11 = TREE_INT_CST_LOW (arg11);
7521
7522 /* Move min of absolute values to int11. */
7523 if ((int01 >= 0 ? int01 : -int01)
7524 < (int11 >= 0 ? int11 : -int11))
7525 {
7526 tmp = int01, int01 = int11, int11 = tmp;
7527 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7528 maybe_same = arg01;
7529 swap = true;
7530 }
7531 else
7532 maybe_same = arg11;
7533
7534 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0
7535 /* The remainder should not be a constant, otherwise we
7536 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7537 increased the number of multiplications necessary. */
7538 && TREE_CODE (arg10) != INTEGER_CST)
7539 {
7540 alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
7541 build_int_cst (TREE_TYPE (arg00),
7542 int01 / int11));
7543 alt1 = arg10;
7544 same = maybe_same;
7545 if (swap)
7546 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7547 }
7548 }
7549
7550 if (same)
7551 return fold_build2 (MULT_EXPR, type,
7552 fold_build2 (code, type,
7553 fold_convert (type, alt0),
7554 fold_convert (type, alt1)),
7555 fold_convert (type, same));
7556
7557 return NULL_TREE;
7558 }
7559
7560 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7561 specified by EXPR into the buffer PTR of length LEN bytes.
7562 Return the number of bytes placed in the buffer, or zero
7563 upon failure. */
7564
7565 static int
7566 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7567 {
7568 tree type = TREE_TYPE (expr);
7569 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7570 int byte, offset, word, words;
7571 unsigned char value;
7572
7573 if (total_bytes > len)
7574 return 0;
7575 words = total_bytes / UNITS_PER_WORD;
7576
7577 for (byte = 0; byte < total_bytes; byte++)
7578 {
7579 int bitpos = byte * BITS_PER_UNIT;
7580 if (bitpos < HOST_BITS_PER_WIDE_INT)
7581 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7582 else
7583 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7584 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7585
7586 if (total_bytes > UNITS_PER_WORD)
7587 {
7588 word = byte / UNITS_PER_WORD;
7589 if (WORDS_BIG_ENDIAN)
7590 word = (words - 1) - word;
7591 offset = word * UNITS_PER_WORD;
7592 if (BYTES_BIG_ENDIAN)
7593 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7594 else
7595 offset += byte % UNITS_PER_WORD;
7596 }
7597 else
7598 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7599 ptr[offset] = value;
7600 }
7601 return total_bytes;
7602 }
7603
7604
7605 /* Subroutine of native_encode_expr. Encode the REAL_CST
7606 specified by EXPR into the buffer PTR of length LEN bytes.
7607 Return the number of bytes placed in the buffer, or zero
7608 upon failure. */
7609
7610 static int
7611 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7612 {
7613 tree type = TREE_TYPE (expr);
7614 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7615 int byte, offset, word, words, bitpos;
7616 unsigned char value;
7617
7618 /* There are always 32 bits in each long, no matter the size of
7619 the hosts long. We handle floating point representations with
7620 up to 192 bits. */
7621 long tmp[6];
7622
7623 if (total_bytes > len)
7624 return 0;
7625 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7626
7627 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7628
7629 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7630 bitpos += BITS_PER_UNIT)
7631 {
7632 byte = (bitpos / BITS_PER_UNIT) & 3;
7633 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7634
7635 if (UNITS_PER_WORD < 4)
7636 {
7637 word = byte / UNITS_PER_WORD;
7638 if (WORDS_BIG_ENDIAN)
7639 word = (words - 1) - word;
7640 offset = word * UNITS_PER_WORD;
7641 if (BYTES_BIG_ENDIAN)
7642 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7643 else
7644 offset += byte % UNITS_PER_WORD;
7645 }
7646 else
7647 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7648 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7649 }
7650 return total_bytes;
7651 }
7652
7653 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7654 specified by EXPR into the buffer PTR of length LEN bytes.
7655 Return the number of bytes placed in the buffer, or zero
7656 upon failure. */
7657
7658 static int
7659 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7660 {
7661 int rsize, isize;
7662 tree part;
7663
7664 part = TREE_REALPART (expr);
7665 rsize = native_encode_expr (part, ptr, len);
7666 if (rsize == 0)
7667 return 0;
7668 part = TREE_IMAGPART (expr);
7669 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7670 if (isize != rsize)
7671 return 0;
7672 return rsize + isize;
7673 }
7674
7675
7676 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7677 specified by EXPR into the buffer PTR of length LEN bytes.
7678 Return the number of bytes placed in the buffer, or zero
7679 upon failure. */
7680
7681 static int
7682 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7683 {
7684 int i, size, offset, count;
7685 tree itype, elem, elements;
7686
7687 offset = 0;
7688 elements = TREE_VECTOR_CST_ELTS (expr);
7689 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7690 itype = TREE_TYPE (TREE_TYPE (expr));
7691 size = GET_MODE_SIZE (TYPE_MODE (itype));
7692 for (i = 0; i < count; i++)
7693 {
7694 if (elements)
7695 {
7696 elem = TREE_VALUE (elements);
7697 elements = TREE_CHAIN (elements);
7698 }
7699 else
7700 elem = NULL_TREE;
7701
7702 if (elem)
7703 {
7704 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7705 return 0;
7706 }
7707 else
7708 {
7709 if (offset + size > len)
7710 return 0;
7711 memset (ptr+offset, 0, size);
7712 }
7713 offset += size;
7714 }
7715 return offset;
7716 }
7717
7718
7719 /* Subroutine of native_encode_expr. Encode the STRING_CST
7720 specified by EXPR into the buffer PTR of length LEN bytes.
7721 Return the number of bytes placed in the buffer, or zero
7722 upon failure. */
7723
7724 static int
7725 native_encode_string (const_tree expr, unsigned char *ptr, int len)
7726 {
7727 tree type = TREE_TYPE (expr);
7728 HOST_WIDE_INT total_bytes;
7729
7730 if (TREE_CODE (type) != ARRAY_TYPE
7731 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7732 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7733 || !host_integerp (TYPE_SIZE_UNIT (type), 0))
7734 return 0;
7735 total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0);
7736 if (total_bytes > len)
7737 return 0;
7738 if (TREE_STRING_LENGTH (expr) < total_bytes)
7739 {
7740 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7741 memset (ptr + TREE_STRING_LENGTH (expr), 0,
7742 total_bytes - TREE_STRING_LENGTH (expr));
7743 }
7744 else
7745 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7746 return total_bytes;
7747 }
7748
7749
7750 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7751 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7752 buffer PTR of length LEN bytes. Return the number of bytes
7753 placed in the buffer, or zero upon failure. */
7754
7755 int
7756 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7757 {
7758 switch (TREE_CODE (expr))
7759 {
7760 case INTEGER_CST:
7761 return native_encode_int (expr, ptr, len);
7762
7763 case REAL_CST:
7764 return native_encode_real (expr, ptr, len);
7765
7766 case COMPLEX_CST:
7767 return native_encode_complex (expr, ptr, len);
7768
7769 case VECTOR_CST:
7770 return native_encode_vector (expr, ptr, len);
7771
7772 case STRING_CST:
7773 return native_encode_string (expr, ptr, len);
7774
7775 default:
7776 return 0;
7777 }
7778 }
7779
7780
7781 /* Subroutine of native_interpret_expr. Interpret the contents of
7782 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7783 If the buffer cannot be interpreted, return NULL_TREE. */
7784
7785 static tree
7786 native_interpret_int (tree type, const unsigned char *ptr, int len)
7787 {
7788 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7789 int byte, offset, word, words;
7790 unsigned char value;
7791 unsigned int HOST_WIDE_INT lo = 0;
7792 HOST_WIDE_INT hi = 0;
7793
7794 if (total_bytes > len)
7795 return NULL_TREE;
7796 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7797 return NULL_TREE;
7798 words = total_bytes / UNITS_PER_WORD;
7799
7800 for (byte = 0; byte < total_bytes; byte++)
7801 {
7802 int bitpos = byte * BITS_PER_UNIT;
7803 if (total_bytes > UNITS_PER_WORD)
7804 {
7805 word = byte / UNITS_PER_WORD;
7806 if (WORDS_BIG_ENDIAN)
7807 word = (words - 1) - word;
7808 offset = word * UNITS_PER_WORD;
7809 if (BYTES_BIG_ENDIAN)
7810 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7811 else
7812 offset += byte % UNITS_PER_WORD;
7813 }
7814 else
7815 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7816 value = ptr[offset];
7817
7818 if (bitpos < HOST_BITS_PER_WIDE_INT)
7819 lo |= (unsigned HOST_WIDE_INT) value << bitpos;
7820 else
7821 hi |= (unsigned HOST_WIDE_INT) value
7822 << (bitpos - HOST_BITS_PER_WIDE_INT);
7823 }
7824
7825 return build_int_cst_wide_type (type, lo, hi);
7826 }
7827
7828
7829 /* Subroutine of native_interpret_expr. Interpret the contents of
7830 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7831 If the buffer cannot be interpreted, return NULL_TREE. */
7832
7833 static tree
7834 native_interpret_real (tree type, const unsigned char *ptr, int len)
7835 {
7836 enum machine_mode mode = TYPE_MODE (type);
7837 int total_bytes = GET_MODE_SIZE (mode);
7838 int byte, offset, word, words, bitpos;
7839 unsigned char value;
7840 /* There are always 32 bits in each long, no matter the size of
7841 the hosts long. We handle floating point representations with
7842 up to 192 bits. */
7843 REAL_VALUE_TYPE r;
7844 long tmp[6];
7845
7846 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7847 if (total_bytes > len || total_bytes > 24)
7848 return NULL_TREE;
7849 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7850
7851 memset (tmp, 0, sizeof (tmp));
7852 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7853 bitpos += BITS_PER_UNIT)
7854 {
7855 byte = (bitpos / BITS_PER_UNIT) & 3;
7856 if (UNITS_PER_WORD < 4)
7857 {
7858 word = byte / UNITS_PER_WORD;
7859 if (WORDS_BIG_ENDIAN)
7860 word = (words - 1) - word;
7861 offset = word * UNITS_PER_WORD;
7862 if (BYTES_BIG_ENDIAN)
7863 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7864 else
7865 offset += byte % UNITS_PER_WORD;
7866 }
7867 else
7868 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7869 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7870
7871 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7872 }
7873
7874 real_from_target (&r, tmp, mode);
7875 return build_real (type, r);
7876 }
7877
7878
7879 /* Subroutine of native_interpret_expr. Interpret the contents of
7880 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7881 If the buffer cannot be interpreted, return NULL_TREE. */
7882
7883 static tree
7884 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7885 {
7886 tree etype, rpart, ipart;
7887 int size;
7888
7889 etype = TREE_TYPE (type);
7890 size = GET_MODE_SIZE (TYPE_MODE (etype));
7891 if (size * 2 > len)
7892 return NULL_TREE;
7893 rpart = native_interpret_expr (etype, ptr, size);
7894 if (!rpart)
7895 return NULL_TREE;
7896 ipart = native_interpret_expr (etype, ptr+size, size);
7897 if (!ipart)
7898 return NULL_TREE;
7899 return build_complex (type, rpart, ipart);
7900 }
7901
7902
7903 /* Subroutine of native_interpret_expr. Interpret the contents of
7904 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7905 If the buffer cannot be interpreted, return NULL_TREE. */
7906
7907 static tree
7908 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7909 {
7910 tree etype, elem, elements;
7911 int i, size, count;
7912
7913 etype = TREE_TYPE (type);
7914 size = GET_MODE_SIZE (TYPE_MODE (etype));
7915 count = TYPE_VECTOR_SUBPARTS (type);
7916 if (size * count > len)
7917 return NULL_TREE;
7918
7919 elements = NULL_TREE;
7920 for (i = count - 1; i >= 0; i--)
7921 {
7922 elem = native_interpret_expr (etype, ptr+(i*size), size);
7923 if (!elem)
7924 return NULL_TREE;
7925 elements = tree_cons (NULL_TREE, elem, elements);
7926 }
7927 return build_vector (type, elements);
7928 }
7929
7930
7931 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7932 the buffer PTR of length LEN as a constant of type TYPE. For
7933 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7934 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7935 return NULL_TREE. */
7936
7937 tree
7938 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7939 {
7940 switch (TREE_CODE (type))
7941 {
7942 case INTEGER_TYPE:
7943 case ENUMERAL_TYPE:
7944 case BOOLEAN_TYPE:
7945 return native_interpret_int (type, ptr, len);
7946
7947 case REAL_TYPE:
7948 return native_interpret_real (type, ptr, len);
7949
7950 case COMPLEX_TYPE:
7951 return native_interpret_complex (type, ptr, len);
7952
7953 case VECTOR_TYPE:
7954 return native_interpret_vector (type, ptr, len);
7955
7956 default:
7957 return NULL_TREE;
7958 }
7959 }
7960
7961
7962 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7963 TYPE at compile-time. If we're unable to perform the conversion
7964 return NULL_TREE. */
7965
7966 static tree
7967 fold_view_convert_expr (tree type, tree expr)
7968 {
7969 /* We support up to 512-bit values (for V8DFmode). */
7970 unsigned char buffer[64];
7971 int len;
7972
7973 /* Check that the host and target are sane. */
7974 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7975 return NULL_TREE;
7976
7977 len = native_encode_expr (expr, buffer, sizeof (buffer));
7978 if (len == 0)
7979 return NULL_TREE;
7980
7981 return native_interpret_expr (type, buffer, len);
7982 }
7983
7984 /* Build an expression for the address of T. Folds away INDIRECT_REF
7985 to avoid confusing the gimplify process. */
7986
7987 tree
7988 build_fold_addr_expr_with_type (tree t, tree ptrtype)
7989 {
7990 /* The size of the object is not relevant when talking about its address. */
7991 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7992 t = TREE_OPERAND (t, 0);
7993
7994 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
7995 if (TREE_CODE (t) == INDIRECT_REF
7996 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
7997 {
7998 t = TREE_OPERAND (t, 0);
7999
8000 if (TREE_TYPE (t) != ptrtype)
8001 t = build1 (NOP_EXPR, ptrtype, t);
8002 }
8003 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
8004 {
8005 t = build_fold_addr_expr (TREE_OPERAND (t, 0));
8006
8007 if (TREE_TYPE (t) != ptrtype)
8008 t = fold_convert (ptrtype, t);
8009 }
8010 else
8011 t = build1 (ADDR_EXPR, ptrtype, t);
8012
8013 return t;
8014 }
8015
8016 /* Build an expression for the address of T. */
8017
8018 tree
8019 build_fold_addr_expr (tree t)
8020 {
8021 tree ptrtype = build_pointer_type (TREE_TYPE (t));
8022
8023 return build_fold_addr_expr_with_type (t, ptrtype);
8024 }
8025
8026 /* Fold a unary expression of code CODE and type TYPE with operand
8027 OP0. Return the folded expression if folding is successful.
8028 Otherwise, return NULL_TREE. */
8029
8030 tree
8031 fold_unary (enum tree_code code, tree type, tree op0)
8032 {
8033 tree tem;
8034 tree arg0;
8035 enum tree_code_class kind = TREE_CODE_CLASS (code);
8036
8037 gcc_assert (IS_EXPR_CODE_CLASS (kind)
8038 && TREE_CODE_LENGTH (code) == 1);
8039
8040 arg0 = op0;
8041 if (arg0)
8042 {
8043 if (CONVERT_EXPR_CODE_P (code)
8044 || code == FLOAT_EXPR || code == ABS_EXPR)
8045 {
8046 /* Don't use STRIP_NOPS, because signedness of argument type
8047 matters. */
8048 STRIP_SIGN_NOPS (arg0);
8049 }
8050 else
8051 {
8052 /* Strip any conversions that don't change the mode. This
8053 is safe for every expression, except for a comparison
8054 expression because its signedness is derived from its
8055 operands.
8056
8057 Note that this is done as an internal manipulation within
8058 the constant folder, in order to find the simplest
8059 representation of the arguments so that their form can be
8060 studied. In any cases, the appropriate type conversions
8061 should be put back in the tree that will get out of the
8062 constant folder. */
8063 STRIP_NOPS (arg0);
8064 }
8065 }
8066
8067 if (TREE_CODE_CLASS (code) == tcc_unary)
8068 {
8069 if (TREE_CODE (arg0) == COMPOUND_EXPR)
8070 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
8071 fold_build1 (code, type,
8072 fold_convert (TREE_TYPE (op0),
8073 TREE_OPERAND (arg0, 1))));
8074 else if (TREE_CODE (arg0) == COND_EXPR)
8075 {
8076 tree arg01 = TREE_OPERAND (arg0, 1);
8077 tree arg02 = TREE_OPERAND (arg0, 2);
8078 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
8079 arg01 = fold_build1 (code, type,
8080 fold_convert (TREE_TYPE (op0), arg01));
8081 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
8082 arg02 = fold_build1 (code, type,
8083 fold_convert (TREE_TYPE (op0), arg02));
8084 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
8085 arg01, arg02);
8086
8087 /* If this was a conversion, and all we did was to move into
8088 inside the COND_EXPR, bring it back out. But leave it if
8089 it is a conversion from integer to integer and the
8090 result precision is no wider than a word since such a
8091 conversion is cheap and may be optimized away by combine,
8092 while it couldn't if it were outside the COND_EXPR. Then return
8093 so we don't get into an infinite recursion loop taking the
8094 conversion out and then back in. */
8095
8096 if ((CONVERT_EXPR_CODE_P (code)
8097 || code == NON_LVALUE_EXPR)
8098 && TREE_CODE (tem) == COND_EXPR
8099 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
8100 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
8101 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
8102 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
8103 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
8104 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
8105 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8106 && (INTEGRAL_TYPE_P
8107 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
8108 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
8109 || flag_syntax_only))
8110 tem = build1 (code, type,
8111 build3 (COND_EXPR,
8112 TREE_TYPE (TREE_OPERAND
8113 (TREE_OPERAND (tem, 1), 0)),
8114 TREE_OPERAND (tem, 0),
8115 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
8116 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
8117 return tem;
8118 }
8119 else if (COMPARISON_CLASS_P (arg0))
8120 {
8121 if (TREE_CODE (type) == BOOLEAN_TYPE)
8122 {
8123 arg0 = copy_node (arg0);
8124 TREE_TYPE (arg0) = type;
8125 return arg0;
8126 }
8127 else if (TREE_CODE (type) != INTEGER_TYPE)
8128 return fold_build3 (COND_EXPR, type, arg0,
8129 fold_build1 (code, type,
8130 integer_one_node),
8131 fold_build1 (code, type,
8132 integer_zero_node));
8133 }
8134 }
8135
8136 switch (code)
8137 {
8138 case PAREN_EXPR:
8139 /* Re-association barriers around constants and other re-association
8140 barriers can be removed. */
8141 if (CONSTANT_CLASS_P (op0)
8142 || TREE_CODE (op0) == PAREN_EXPR)
8143 return fold_convert (type, op0);
8144 return NULL_TREE;
8145
8146 CASE_CONVERT:
8147 case FLOAT_EXPR:
8148 case FIX_TRUNC_EXPR:
8149 if (TREE_TYPE (op0) == type)
8150 return op0;
8151
8152 /* If we have (type) (a CMP b) and type is an integral type, return
8153 new expression involving the new type. */
8154 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
8155 return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
8156 TREE_OPERAND (op0, 1));
8157
8158 /* Handle cases of two conversions in a row. */
8159 if (CONVERT_EXPR_P (op0))
8160 {
8161 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
8162 tree inter_type = TREE_TYPE (op0);
8163 int inside_int = INTEGRAL_TYPE_P (inside_type);
8164 int inside_ptr = POINTER_TYPE_P (inside_type);
8165 int inside_float = FLOAT_TYPE_P (inside_type);
8166 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
8167 unsigned int inside_prec = TYPE_PRECISION (inside_type);
8168 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
8169 int inter_int = INTEGRAL_TYPE_P (inter_type);
8170 int inter_ptr = POINTER_TYPE_P (inter_type);
8171 int inter_float = FLOAT_TYPE_P (inter_type);
8172 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
8173 unsigned int inter_prec = TYPE_PRECISION (inter_type);
8174 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
8175 int final_int = INTEGRAL_TYPE_P (type);
8176 int final_ptr = POINTER_TYPE_P (type);
8177 int final_float = FLOAT_TYPE_P (type);
8178 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
8179 unsigned int final_prec = TYPE_PRECISION (type);
8180 int final_unsignedp = TYPE_UNSIGNED (type);
8181
8182 /* In addition to the cases of two conversions in a row
8183 handled below, if we are converting something to its own
8184 type via an object of identical or wider precision, neither
8185 conversion is needed. */
8186 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
8187 && (((inter_int || inter_ptr) && final_int)
8188 || (inter_float && final_float))
8189 && inter_prec >= final_prec)
8190 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
8191
8192 /* Likewise, if the intermediate and initial types are either both
8193 float or both integer, we don't need the middle conversion if the
8194 former is wider than the latter and doesn't change the signedness
8195 (for integers). Avoid this if the final type is a pointer since
8196 then we sometimes need the middle conversion. Likewise if the
8197 final type has a precision not equal to the size of its mode. */
8198 if (((inter_int && inside_int)
8199 || (inter_float && inside_float)
8200 || (inter_vec && inside_vec))
8201 && inter_prec >= inside_prec
8202 && (inter_float || inter_vec
8203 || inter_unsignedp == inside_unsignedp)
8204 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
8205 && TYPE_MODE (type) == TYPE_MODE (inter_type))
8206 && ! final_ptr
8207 && (! final_vec || inter_prec == inside_prec))
8208 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
8209
8210 /* If we have a sign-extension of a zero-extended value, we can
8211 replace that by a single zero-extension. */
8212 if (inside_int && inter_int && final_int
8213 && inside_prec < inter_prec && inter_prec < final_prec
8214 && inside_unsignedp && !inter_unsignedp)
8215 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
8216
8217 /* Two conversions in a row are not needed unless:
8218 - some conversion is floating-point (overstrict for now), or
8219 - some conversion is a vector (overstrict for now), or
8220 - the intermediate type is narrower than both initial and
8221 final, or
8222 - the intermediate type and innermost type differ in signedness,
8223 and the outermost type is wider than the intermediate, or
8224 - the initial type is a pointer type and the precisions of the
8225 intermediate and final types differ, or
8226 - the final type is a pointer type and the precisions of the
8227 initial and intermediate types differ. */
8228 if (! inside_float && ! inter_float && ! final_float
8229 && ! inside_vec && ! inter_vec && ! final_vec
8230 && (inter_prec >= inside_prec || inter_prec >= final_prec)
8231 && ! (inside_int && inter_int
8232 && inter_unsignedp != inside_unsignedp
8233 && inter_prec < final_prec)
8234 && ((inter_unsignedp && inter_prec > inside_prec)
8235 == (final_unsignedp && final_prec > inter_prec))
8236 && ! (inside_ptr && inter_prec != final_prec)
8237 && ! (final_ptr && inside_prec != inter_prec)
8238 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
8239 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
8240 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
8241 }
8242
8243 /* Handle (T *)&A.B.C for A being of type T and B and C
8244 living at offset zero. This occurs frequently in
8245 C++ upcasting and then accessing the base. */
8246 if (TREE_CODE (op0) == ADDR_EXPR
8247 && POINTER_TYPE_P (type)
8248 && handled_component_p (TREE_OPERAND (op0, 0)))
8249 {
8250 HOST_WIDE_INT bitsize, bitpos;
8251 tree offset;
8252 enum machine_mode mode;
8253 int unsignedp, volatilep;
8254 tree base = TREE_OPERAND (op0, 0);
8255 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
8256 &mode, &unsignedp, &volatilep, false);
8257 /* If the reference was to a (constant) zero offset, we can use
8258 the address of the base if it has the same base type
8259 as the result type. */
8260 if (! offset && bitpos == 0
8261 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
8262 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
8263 return fold_convert (type, build_fold_addr_expr (base));
8264 }
8265
8266 if (TREE_CODE (op0) == MODIFY_EXPR
8267 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
8268 /* Detect assigning a bitfield. */
8269 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
8270 && DECL_BIT_FIELD
8271 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
8272 {
8273 /* Don't leave an assignment inside a conversion
8274 unless assigning a bitfield. */
8275 tem = fold_build1 (code, type, TREE_OPERAND (op0, 1));
8276 /* First do the assignment, then return converted constant. */
8277 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
8278 TREE_NO_WARNING (tem) = 1;
8279 TREE_USED (tem) = 1;
8280 return tem;
8281 }
8282
8283 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8284 constants (if x has signed type, the sign bit cannot be set
8285 in c). This folds extension into the BIT_AND_EXPR.
8286 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
8287 very likely don't have maximal range for their precision and this
8288 transformation effectively doesn't preserve non-maximal ranges. */
8289 if (TREE_CODE (type) == INTEGER_TYPE
8290 && TREE_CODE (op0) == BIT_AND_EXPR
8291 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
8292 {
8293 tree and_expr = op0;
8294 tree and0 = TREE_OPERAND (and_expr, 0);
8295 tree and1 = TREE_OPERAND (and_expr, 1);
8296 int change = 0;
8297
8298 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
8299 || (TYPE_PRECISION (type)
8300 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
8301 change = 1;
8302 else if (TYPE_PRECISION (TREE_TYPE (and1))
8303 <= HOST_BITS_PER_WIDE_INT
8304 && host_integerp (and1, 1))
8305 {
8306 unsigned HOST_WIDE_INT cst;
8307
8308 cst = tree_low_cst (and1, 1);
8309 cst &= (HOST_WIDE_INT) -1
8310 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
8311 change = (cst == 0);
8312 #ifdef LOAD_EXTEND_OP
8313 if (change
8314 && !flag_syntax_only
8315 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
8316 == ZERO_EXTEND))
8317 {
8318 tree uns = unsigned_type_for (TREE_TYPE (and0));
8319 and0 = fold_convert (uns, and0);
8320 and1 = fold_convert (uns, and1);
8321 }
8322 #endif
8323 }
8324 if (change)
8325 {
8326 tem = force_fit_type_double (type, TREE_INT_CST_LOW (and1),
8327 TREE_INT_CST_HIGH (and1), 0,
8328 TREE_OVERFLOW (and1));
8329 return fold_build2 (BIT_AND_EXPR, type,
8330 fold_convert (type, and0), tem);
8331 }
8332 }
8333
8334 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8335 when one of the new casts will fold away. Conservatively we assume
8336 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8337 if (POINTER_TYPE_P (type)
8338 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
8339 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8340 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
8341 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
8342 {
8343 tree arg00 = TREE_OPERAND (arg0, 0);
8344 tree arg01 = TREE_OPERAND (arg0, 1);
8345
8346 return fold_build2 (TREE_CODE (arg0), type, fold_convert (type, arg00),
8347 fold_convert (sizetype, arg01));
8348 }
8349
8350 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8351 of the same precision, and X is an integer type not narrower than
8352 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8353 if (INTEGRAL_TYPE_P (type)
8354 && TREE_CODE (op0) == BIT_NOT_EXPR
8355 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8356 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8357 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8358 {
8359 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8360 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8361 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8362 return fold_build1 (BIT_NOT_EXPR, type, fold_convert (type, tem));
8363 }
8364
8365 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8366 type of X and Y (integer types only). */
8367 if (INTEGRAL_TYPE_P (type)
8368 && TREE_CODE (op0) == MULT_EXPR
8369 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8370 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8371 {
8372 /* Be careful not to introduce new overflows. */
8373 tree mult_type;
8374 if (TYPE_OVERFLOW_WRAPS (type))
8375 mult_type = type;
8376 else
8377 mult_type = unsigned_type_for (type);
8378
8379 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8380 {
8381 tem = fold_build2 (MULT_EXPR, mult_type,
8382 fold_convert (mult_type,
8383 TREE_OPERAND (op0, 0)),
8384 fold_convert (mult_type,
8385 TREE_OPERAND (op0, 1)));
8386 return fold_convert (type, tem);
8387 }
8388 }
8389
8390 tem = fold_convert_const (code, type, op0);
8391 return tem ? tem : NULL_TREE;
8392
8393 case FIXED_CONVERT_EXPR:
8394 tem = fold_convert_const (code, type, arg0);
8395 return tem ? tem : NULL_TREE;
8396
8397 case VIEW_CONVERT_EXPR:
8398 if (TREE_TYPE (op0) == type)
8399 return op0;
8400 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8401 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
8402
8403 /* For integral conversions with the same precision or pointer
8404 conversions use a NOP_EXPR instead. */
8405 if ((INTEGRAL_TYPE_P (type)
8406 || POINTER_TYPE_P (type))
8407 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8408 || POINTER_TYPE_P (TREE_TYPE (op0)))
8409 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8410 return fold_convert (type, op0);
8411
8412 /* Strip inner integral conversions that do not change the precision. */
8413 if (CONVERT_EXPR_P (op0)
8414 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8415 || POINTER_TYPE_P (TREE_TYPE (op0)))
8416 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8417 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
8418 && (TYPE_PRECISION (TREE_TYPE (op0))
8419 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8420 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
8421
8422 return fold_view_convert_expr (type, op0);
8423
8424 case NEGATE_EXPR:
8425 tem = fold_negate_expr (arg0);
8426 if (tem)
8427 return fold_convert (type, tem);
8428 return NULL_TREE;
8429
8430 case ABS_EXPR:
8431 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8432 return fold_abs_const (arg0, type);
8433 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8434 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8435 /* Convert fabs((double)float) into (double)fabsf(float). */
8436 else if (TREE_CODE (arg0) == NOP_EXPR
8437 && TREE_CODE (type) == REAL_TYPE)
8438 {
8439 tree targ0 = strip_float_extensions (arg0);
8440 if (targ0 != arg0)
8441 return fold_convert (type, fold_build1 (ABS_EXPR,
8442 TREE_TYPE (targ0),
8443 targ0));
8444 }
8445 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8446 else if (TREE_CODE (arg0) == ABS_EXPR)
8447 return arg0;
8448 else if (tree_expr_nonnegative_p (arg0))
8449 return arg0;
8450
8451 /* Strip sign ops from argument. */
8452 if (TREE_CODE (type) == REAL_TYPE)
8453 {
8454 tem = fold_strip_sign_ops (arg0);
8455 if (tem)
8456 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
8457 }
8458 return NULL_TREE;
8459
8460 case CONJ_EXPR:
8461 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8462 return fold_convert (type, arg0);
8463 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8464 {
8465 tree itype = TREE_TYPE (type);
8466 tree rpart = fold_convert (itype, TREE_OPERAND (arg0, 0));
8467 tree ipart = fold_convert (itype, TREE_OPERAND (arg0, 1));
8468 return fold_build2 (COMPLEX_EXPR, type, rpart, negate_expr (ipart));
8469 }
8470 if (TREE_CODE (arg0) == COMPLEX_CST)
8471 {
8472 tree itype = TREE_TYPE (type);
8473 tree rpart = fold_convert (itype, TREE_REALPART (arg0));
8474 tree ipart = fold_convert (itype, TREE_IMAGPART (arg0));
8475 return build_complex (type, rpart, negate_expr (ipart));
8476 }
8477 if (TREE_CODE (arg0) == CONJ_EXPR)
8478 return fold_convert (type, TREE_OPERAND (arg0, 0));
8479 return NULL_TREE;
8480
8481 case BIT_NOT_EXPR:
8482 if (TREE_CODE (arg0) == INTEGER_CST)
8483 return fold_not_const (arg0, type);
8484 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8485 return fold_convert (type, TREE_OPERAND (arg0, 0));
8486 /* Convert ~ (-A) to A - 1. */
8487 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8488 return fold_build2 (MINUS_EXPR, type,
8489 fold_convert (type, TREE_OPERAND (arg0, 0)),
8490 build_int_cst (type, 1));
8491 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8492 else if (INTEGRAL_TYPE_P (type)
8493 && ((TREE_CODE (arg0) == MINUS_EXPR
8494 && integer_onep (TREE_OPERAND (arg0, 1)))
8495 || (TREE_CODE (arg0) == PLUS_EXPR
8496 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8497 return fold_build1 (NEGATE_EXPR, type,
8498 fold_convert (type, TREE_OPERAND (arg0, 0)));
8499 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8500 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8501 && (tem = fold_unary (BIT_NOT_EXPR, type,
8502 fold_convert (type,
8503 TREE_OPERAND (arg0, 0)))))
8504 return fold_build2 (BIT_XOR_EXPR, type, tem,
8505 fold_convert (type, TREE_OPERAND (arg0, 1)));
8506 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8507 && (tem = fold_unary (BIT_NOT_EXPR, type,
8508 fold_convert (type,
8509 TREE_OPERAND (arg0, 1)))))
8510 return fold_build2 (BIT_XOR_EXPR, type,
8511 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
8512 /* Perform BIT_NOT_EXPR on each element individually. */
8513 else if (TREE_CODE (arg0) == VECTOR_CST)
8514 {
8515 tree elements = TREE_VECTOR_CST_ELTS (arg0), elem, list = NULL_TREE;
8516 int count = TYPE_VECTOR_SUBPARTS (type), i;
8517
8518 for (i = 0; i < count; i++)
8519 {
8520 if (elements)
8521 {
8522 elem = TREE_VALUE (elements);
8523 elem = fold_unary (BIT_NOT_EXPR, TREE_TYPE (type), elem);
8524 if (elem == NULL_TREE)
8525 break;
8526 elements = TREE_CHAIN (elements);
8527 }
8528 else
8529 elem = build_int_cst (TREE_TYPE (type), -1);
8530 list = tree_cons (NULL_TREE, elem, list);
8531 }
8532 if (i == count)
8533 return build_vector (type, nreverse (list));
8534 }
8535
8536 return NULL_TREE;
8537
8538 case TRUTH_NOT_EXPR:
8539 /* The argument to invert_truthvalue must have Boolean type. */
8540 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8541 arg0 = fold_convert (boolean_type_node, arg0);
8542
8543 /* Note that the operand of this must be an int
8544 and its values must be 0 or 1.
8545 ("true" is a fixed value perhaps depending on the language,
8546 but we don't handle values other than 1 correctly yet.) */
8547 tem = fold_truth_not_expr (arg0);
8548 if (!tem)
8549 return NULL_TREE;
8550 return fold_convert (type, tem);
8551
8552 case REALPART_EXPR:
8553 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8554 return fold_convert (type, arg0);
8555 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8556 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
8557 TREE_OPERAND (arg0, 1));
8558 if (TREE_CODE (arg0) == COMPLEX_CST)
8559 return fold_convert (type, TREE_REALPART (arg0));
8560 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8561 {
8562 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8563 tem = fold_build2 (TREE_CODE (arg0), itype,
8564 fold_build1 (REALPART_EXPR, itype,
8565 TREE_OPERAND (arg0, 0)),
8566 fold_build1 (REALPART_EXPR, itype,
8567 TREE_OPERAND (arg0, 1)));
8568 return fold_convert (type, tem);
8569 }
8570 if (TREE_CODE (arg0) == CONJ_EXPR)
8571 {
8572 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8573 tem = fold_build1 (REALPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8574 return fold_convert (type, tem);
8575 }
8576 if (TREE_CODE (arg0) == CALL_EXPR)
8577 {
8578 tree fn = get_callee_fndecl (arg0);
8579 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8580 switch (DECL_FUNCTION_CODE (fn))
8581 {
8582 CASE_FLT_FN (BUILT_IN_CEXPI):
8583 fn = mathfn_built_in (type, BUILT_IN_COS);
8584 if (fn)
8585 return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0));
8586 break;
8587
8588 default:
8589 break;
8590 }
8591 }
8592 return NULL_TREE;
8593
8594 case IMAGPART_EXPR:
8595 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8596 return fold_convert (type, integer_zero_node);
8597 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8598 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
8599 TREE_OPERAND (arg0, 0));
8600 if (TREE_CODE (arg0) == COMPLEX_CST)
8601 return fold_convert (type, TREE_IMAGPART (arg0));
8602 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8603 {
8604 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8605 tem = fold_build2 (TREE_CODE (arg0), itype,
8606 fold_build1 (IMAGPART_EXPR, itype,
8607 TREE_OPERAND (arg0, 0)),
8608 fold_build1 (IMAGPART_EXPR, itype,
8609 TREE_OPERAND (arg0, 1)));
8610 return fold_convert (type, tem);
8611 }
8612 if (TREE_CODE (arg0) == CONJ_EXPR)
8613 {
8614 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8615 tem = fold_build1 (IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8616 return fold_convert (type, negate_expr (tem));
8617 }
8618 if (TREE_CODE (arg0) == CALL_EXPR)
8619 {
8620 tree fn = get_callee_fndecl (arg0);
8621 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8622 switch (DECL_FUNCTION_CODE (fn))
8623 {
8624 CASE_FLT_FN (BUILT_IN_CEXPI):
8625 fn = mathfn_built_in (type, BUILT_IN_SIN);
8626 if (fn)
8627 return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0));
8628 break;
8629
8630 default:
8631 break;
8632 }
8633 }
8634 return NULL_TREE;
8635
8636 default:
8637 return NULL_TREE;
8638 } /* switch (code) */
8639 }
8640
8641
8642 /* If the operation was a conversion do _not_ mark a resulting constant
8643 with TREE_OVERFLOW if the original constant was not. These conversions
8644 have implementation defined behavior and retaining the TREE_OVERFLOW
8645 flag here would confuse later passes such as VRP. */
8646 tree
8647 fold_unary_ignore_overflow (enum tree_code code, tree type, tree op0)
8648 {
8649 tree res = fold_unary (code, type, op0);
8650 if (res
8651 && TREE_CODE (res) == INTEGER_CST
8652 && TREE_CODE (op0) == INTEGER_CST
8653 && CONVERT_EXPR_CODE_P (code))
8654 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8655
8656 return res;
8657 }
8658
8659 /* Fold a binary expression of code CODE and type TYPE with operands
8660 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8661 Return the folded expression if folding is successful. Otherwise,
8662 return NULL_TREE. */
8663
8664 static tree
8665 fold_minmax (enum tree_code code, tree type, tree op0, tree op1)
8666 {
8667 enum tree_code compl_code;
8668
8669 if (code == MIN_EXPR)
8670 compl_code = MAX_EXPR;
8671 else if (code == MAX_EXPR)
8672 compl_code = MIN_EXPR;
8673 else
8674 gcc_unreachable ();
8675
8676 /* MIN (MAX (a, b), b) == b. */
8677 if (TREE_CODE (op0) == compl_code
8678 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8679 return omit_one_operand (type, op1, TREE_OPERAND (op0, 0));
8680
8681 /* MIN (MAX (b, a), b) == b. */
8682 if (TREE_CODE (op0) == compl_code
8683 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8684 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8685 return omit_one_operand (type, op1, TREE_OPERAND (op0, 1));
8686
8687 /* MIN (a, MAX (a, b)) == a. */
8688 if (TREE_CODE (op1) == compl_code
8689 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8690 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8691 return omit_one_operand (type, op0, TREE_OPERAND (op1, 1));
8692
8693 /* MIN (a, MAX (b, a)) == a. */
8694 if (TREE_CODE (op1) == compl_code
8695 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8696 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8697 return omit_one_operand (type, op0, TREE_OPERAND (op1, 0));
8698
8699 return NULL_TREE;
8700 }
8701
8702 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8703 by changing CODE to reduce the magnitude of constants involved in
8704 ARG0 of the comparison.
8705 Returns a canonicalized comparison tree if a simplification was
8706 possible, otherwise returns NULL_TREE.
8707 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8708 valid if signed overflow is undefined. */
8709
8710 static tree
8711 maybe_canonicalize_comparison_1 (enum tree_code code, tree type,
8712 tree arg0, tree arg1,
8713 bool *strict_overflow_p)
8714 {
8715 enum tree_code code0 = TREE_CODE (arg0);
8716 tree t, cst0 = NULL_TREE;
8717 int sgn0;
8718 bool swap = false;
8719
8720 /* Match A +- CST code arg1 and CST code arg1. We can change the
8721 first form only if overflow is undefined. */
8722 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8723 /* In principle pointers also have undefined overflow behavior,
8724 but that causes problems elsewhere. */
8725 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8726 && (code0 == MINUS_EXPR
8727 || code0 == PLUS_EXPR)
8728 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8729 || code0 == INTEGER_CST))
8730 return NULL_TREE;
8731
8732 /* Identify the constant in arg0 and its sign. */
8733 if (code0 == INTEGER_CST)
8734 cst0 = arg0;
8735 else
8736 cst0 = TREE_OPERAND (arg0, 1);
8737 sgn0 = tree_int_cst_sgn (cst0);
8738
8739 /* Overflowed constants and zero will cause problems. */
8740 if (integer_zerop (cst0)
8741 || TREE_OVERFLOW (cst0))
8742 return NULL_TREE;
8743
8744 /* See if we can reduce the magnitude of the constant in
8745 arg0 by changing the comparison code. */
8746 if (code0 == INTEGER_CST)
8747 {
8748 /* CST <= arg1 -> CST-1 < arg1. */
8749 if (code == LE_EXPR && sgn0 == 1)
8750 code = LT_EXPR;
8751 /* -CST < arg1 -> -CST-1 <= arg1. */
8752 else if (code == LT_EXPR && sgn0 == -1)
8753 code = LE_EXPR;
8754 /* CST > arg1 -> CST-1 >= arg1. */
8755 else if (code == GT_EXPR && sgn0 == 1)
8756 code = GE_EXPR;
8757 /* -CST >= arg1 -> -CST-1 > arg1. */
8758 else if (code == GE_EXPR && sgn0 == -1)
8759 code = GT_EXPR;
8760 else
8761 return NULL_TREE;
8762 /* arg1 code' CST' might be more canonical. */
8763 swap = true;
8764 }
8765 else
8766 {
8767 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8768 if (code == LT_EXPR
8769 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8770 code = LE_EXPR;
8771 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8772 else if (code == GT_EXPR
8773 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8774 code = GE_EXPR;
8775 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8776 else if (code == LE_EXPR
8777 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8778 code = LT_EXPR;
8779 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8780 else if (code == GE_EXPR
8781 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8782 code = GT_EXPR;
8783 else
8784 return NULL_TREE;
8785 *strict_overflow_p = true;
8786 }
8787
8788 /* Now build the constant reduced in magnitude. But not if that
8789 would produce one outside of its types range. */
8790 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8791 && ((sgn0 == 1
8792 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8793 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8794 || (sgn0 == -1
8795 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8796 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8797 /* We cannot swap the comparison here as that would cause us to
8798 endlessly recurse. */
8799 return NULL_TREE;
8800
8801 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8802 cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
8803 if (code0 != INTEGER_CST)
8804 t = fold_build2 (code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8805
8806 /* If swapping might yield to a more canonical form, do so. */
8807 if (swap)
8808 return fold_build2 (swap_tree_comparison (code), type, arg1, t);
8809 else
8810 return fold_build2 (code, type, t, arg1);
8811 }
8812
8813 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8814 overflow further. Try to decrease the magnitude of constants involved
8815 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8816 and put sole constants at the second argument position.
8817 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8818
8819 static tree
8820 maybe_canonicalize_comparison (enum tree_code code, tree type,
8821 tree arg0, tree arg1)
8822 {
8823 tree t;
8824 bool strict_overflow_p;
8825 const char * const warnmsg = G_("assuming signed overflow does not occur "
8826 "when reducing constant in comparison");
8827
8828 /* Try canonicalization by simplifying arg0. */
8829 strict_overflow_p = false;
8830 t = maybe_canonicalize_comparison_1 (code, type, arg0, arg1,
8831 &strict_overflow_p);
8832 if (t)
8833 {
8834 if (strict_overflow_p)
8835 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8836 return t;
8837 }
8838
8839 /* Try canonicalization by simplifying arg1 using the swapped
8840 comparison. */
8841 code = swap_tree_comparison (code);
8842 strict_overflow_p = false;
8843 t = maybe_canonicalize_comparison_1 (code, type, arg1, arg0,
8844 &strict_overflow_p);
8845 if (t && strict_overflow_p)
8846 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8847 return t;
8848 }
8849
8850 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8851 space. This is used to avoid issuing overflow warnings for
8852 expressions like &p->x which can not wrap. */
8853
8854 static bool
8855 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8856 {
8857 unsigned HOST_WIDE_INT offset_low, total_low;
8858 HOST_WIDE_INT size, offset_high, total_high;
8859
8860 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8861 return true;
8862
8863 if (bitpos < 0)
8864 return true;
8865
8866 if (offset == NULL_TREE)
8867 {
8868 offset_low = 0;
8869 offset_high = 0;
8870 }
8871 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8872 return true;
8873 else
8874 {
8875 offset_low = TREE_INT_CST_LOW (offset);
8876 offset_high = TREE_INT_CST_HIGH (offset);
8877 }
8878
8879 if (add_double_with_sign (offset_low, offset_high,
8880 bitpos / BITS_PER_UNIT, 0,
8881 &total_low, &total_high,
8882 true))
8883 return true;
8884
8885 if (total_high != 0)
8886 return true;
8887
8888 size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8889 if (size <= 0)
8890 return true;
8891
8892 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8893 array. */
8894 if (TREE_CODE (base) == ADDR_EXPR)
8895 {
8896 HOST_WIDE_INT base_size;
8897
8898 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8899 if (base_size > 0 && size < base_size)
8900 size = base_size;
8901 }
8902
8903 return total_low > (unsigned HOST_WIDE_INT) size;
8904 }
8905
8906 /* Subroutine of fold_binary. This routine performs all of the
8907 transformations that are common to the equality/inequality
8908 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8909 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8910 fold_binary should call fold_binary. Fold a comparison with
8911 tree code CODE and type TYPE with operands OP0 and OP1. Return
8912 the folded comparison or NULL_TREE. */
8913
8914 static tree
8915 fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
8916 {
8917 tree arg0, arg1, tem;
8918
8919 arg0 = op0;
8920 arg1 = op1;
8921
8922 STRIP_SIGN_NOPS (arg0);
8923 STRIP_SIGN_NOPS (arg1);
8924
8925 tem = fold_relational_const (code, type, arg0, arg1);
8926 if (tem != NULL_TREE)
8927 return tem;
8928
8929 /* If one arg is a real or integer constant, put it last. */
8930 if (tree_swap_operands_p (arg0, arg1, true))
8931 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
8932
8933 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8934 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8935 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8936 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8937 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8938 && (TREE_CODE (arg1) == INTEGER_CST
8939 && !TREE_OVERFLOW (arg1)))
8940 {
8941 tree const1 = TREE_OPERAND (arg0, 1);
8942 tree const2 = arg1;
8943 tree variable = TREE_OPERAND (arg0, 0);
8944 tree lhs;
8945 int lhs_add;
8946 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8947
8948 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
8949 TREE_TYPE (arg1), const2, const1);
8950
8951 /* If the constant operation overflowed this can be
8952 simplified as a comparison against INT_MAX/INT_MIN. */
8953 if (TREE_CODE (lhs) == INTEGER_CST
8954 && TREE_OVERFLOW (lhs))
8955 {
8956 int const1_sgn = tree_int_cst_sgn (const1);
8957 enum tree_code code2 = code;
8958
8959 /* Get the sign of the constant on the lhs if the
8960 operation were VARIABLE + CONST1. */
8961 if (TREE_CODE (arg0) == MINUS_EXPR)
8962 const1_sgn = -const1_sgn;
8963
8964 /* The sign of the constant determines if we overflowed
8965 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8966 Canonicalize to the INT_MIN overflow by swapping the comparison
8967 if necessary. */
8968 if (const1_sgn == -1)
8969 code2 = swap_tree_comparison (code);
8970
8971 /* We now can look at the canonicalized case
8972 VARIABLE + 1 CODE2 INT_MIN
8973 and decide on the result. */
8974 if (code2 == LT_EXPR
8975 || code2 == LE_EXPR
8976 || code2 == EQ_EXPR)
8977 return omit_one_operand (type, boolean_false_node, variable);
8978 else if (code2 == NE_EXPR
8979 || code2 == GE_EXPR
8980 || code2 == GT_EXPR)
8981 return omit_one_operand (type, boolean_true_node, variable);
8982 }
8983
8984 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8985 && (TREE_CODE (lhs) != INTEGER_CST
8986 || !TREE_OVERFLOW (lhs)))
8987 {
8988 fold_overflow_warning (("assuming signed overflow does not occur "
8989 "when changing X +- C1 cmp C2 to "
8990 "X cmp C1 +- C2"),
8991 WARN_STRICT_OVERFLOW_COMPARISON);
8992 return fold_build2 (code, type, variable, lhs);
8993 }
8994 }
8995
8996 /* For comparisons of pointers we can decompose it to a compile time
8997 comparison of the base objects and the offsets into the object.
8998 This requires at least one operand being an ADDR_EXPR or a
8999 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
9000 if (POINTER_TYPE_P (TREE_TYPE (arg0))
9001 && (TREE_CODE (arg0) == ADDR_EXPR
9002 || TREE_CODE (arg1) == ADDR_EXPR
9003 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
9004 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
9005 {
9006 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
9007 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
9008 enum machine_mode mode;
9009 int volatilep, unsignedp;
9010 bool indirect_base0 = false, indirect_base1 = false;
9011
9012 /* Get base and offset for the access. Strip ADDR_EXPR for
9013 get_inner_reference, but put it back by stripping INDIRECT_REF
9014 off the base object if possible. indirect_baseN will be true
9015 if baseN is not an address but refers to the object itself. */
9016 base0 = arg0;
9017 if (TREE_CODE (arg0) == ADDR_EXPR)
9018 {
9019 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
9020 &bitsize, &bitpos0, &offset0, &mode,
9021 &unsignedp, &volatilep, false);
9022 if (TREE_CODE (base0) == INDIRECT_REF)
9023 base0 = TREE_OPERAND (base0, 0);
9024 else
9025 indirect_base0 = true;
9026 }
9027 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9028 {
9029 base0 = TREE_OPERAND (arg0, 0);
9030 offset0 = TREE_OPERAND (arg0, 1);
9031 }
9032
9033 base1 = arg1;
9034 if (TREE_CODE (arg1) == ADDR_EXPR)
9035 {
9036 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
9037 &bitsize, &bitpos1, &offset1, &mode,
9038 &unsignedp, &volatilep, false);
9039 if (TREE_CODE (base1) == INDIRECT_REF)
9040 base1 = TREE_OPERAND (base1, 0);
9041 else
9042 indirect_base1 = true;
9043 }
9044 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9045 {
9046 base1 = TREE_OPERAND (arg1, 0);
9047 offset1 = TREE_OPERAND (arg1, 1);
9048 }
9049
9050 /* If we have equivalent bases we might be able to simplify. */
9051 if (indirect_base0 == indirect_base1
9052 && operand_equal_p (base0, base1, 0))
9053 {
9054 /* We can fold this expression to a constant if the non-constant
9055 offset parts are equal. */
9056 if ((offset0 == offset1
9057 || (offset0 && offset1
9058 && operand_equal_p (offset0, offset1, 0)))
9059 && (code == EQ_EXPR
9060 || code == NE_EXPR
9061 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9062
9063 {
9064 if (code != EQ_EXPR
9065 && code != NE_EXPR
9066 && bitpos0 != bitpos1
9067 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9068 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9069 fold_overflow_warning (("assuming pointer wraparound does not "
9070 "occur when comparing P +- C1 with "
9071 "P +- C2"),
9072 WARN_STRICT_OVERFLOW_CONDITIONAL);
9073
9074 switch (code)
9075 {
9076 case EQ_EXPR:
9077 return constant_boolean_node (bitpos0 == bitpos1, type);
9078 case NE_EXPR:
9079 return constant_boolean_node (bitpos0 != bitpos1, type);
9080 case LT_EXPR:
9081 return constant_boolean_node (bitpos0 < bitpos1, type);
9082 case LE_EXPR:
9083 return constant_boolean_node (bitpos0 <= bitpos1, type);
9084 case GE_EXPR:
9085 return constant_boolean_node (bitpos0 >= bitpos1, type);
9086 case GT_EXPR:
9087 return constant_boolean_node (bitpos0 > bitpos1, type);
9088 default:;
9089 }
9090 }
9091 /* We can simplify the comparison to a comparison of the variable
9092 offset parts if the constant offset parts are equal.
9093 Be careful to use signed size type here because otherwise we
9094 mess with array offsets in the wrong way. This is possible
9095 because pointer arithmetic is restricted to retain within an
9096 object and overflow on pointer differences is undefined as of
9097 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9098 else if (bitpos0 == bitpos1
9099 && ((code == EQ_EXPR || code == NE_EXPR)
9100 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9101 {
9102 tree signed_size_type_node;
9103 signed_size_type_node = signed_type_for (size_type_node);
9104
9105 /* By converting to signed size type we cover middle-end pointer
9106 arithmetic which operates on unsigned pointer types of size
9107 type size and ARRAY_REF offsets which are properly sign or
9108 zero extended from their type in case it is narrower than
9109 size type. */
9110 if (offset0 == NULL_TREE)
9111 offset0 = build_int_cst (signed_size_type_node, 0);
9112 else
9113 offset0 = fold_convert (signed_size_type_node, offset0);
9114 if (offset1 == NULL_TREE)
9115 offset1 = build_int_cst (signed_size_type_node, 0);
9116 else
9117 offset1 = fold_convert (signed_size_type_node, offset1);
9118
9119 if (code != EQ_EXPR
9120 && code != NE_EXPR
9121 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9122 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9123 fold_overflow_warning (("assuming pointer wraparound does not "
9124 "occur when comparing P +- C1 with "
9125 "P +- C2"),
9126 WARN_STRICT_OVERFLOW_COMPARISON);
9127
9128 return fold_build2 (code, type, offset0, offset1);
9129 }
9130 }
9131 /* For non-equal bases we can simplify if they are addresses
9132 of local binding decls or constants. */
9133 else if (indirect_base0 && indirect_base1
9134 /* We know that !operand_equal_p (base0, base1, 0)
9135 because the if condition was false. But make
9136 sure two decls are not the same. */
9137 && base0 != base1
9138 && TREE_CODE (arg0) == ADDR_EXPR
9139 && TREE_CODE (arg1) == ADDR_EXPR
9140 && (((TREE_CODE (base0) == VAR_DECL
9141 || TREE_CODE (base0) == PARM_DECL)
9142 && (targetm.binds_local_p (base0)
9143 || CONSTANT_CLASS_P (base1)))
9144 || CONSTANT_CLASS_P (base0))
9145 && (((TREE_CODE (base1) == VAR_DECL
9146 || TREE_CODE (base1) == PARM_DECL)
9147 && (targetm.binds_local_p (base1)
9148 || CONSTANT_CLASS_P (base0)))
9149 || CONSTANT_CLASS_P (base1)))
9150 {
9151 if (code == EQ_EXPR)
9152 return omit_two_operands (type, boolean_false_node, arg0, arg1);
9153 else if (code == NE_EXPR)
9154 return omit_two_operands (type, boolean_true_node, arg0, arg1);
9155 }
9156 /* For equal offsets we can simplify to a comparison of the
9157 base addresses. */
9158 else if (bitpos0 == bitpos1
9159 && (indirect_base0
9160 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9161 && (indirect_base1
9162 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9163 && ((offset0 == offset1)
9164 || (offset0 && offset1
9165 && operand_equal_p (offset0, offset1, 0))))
9166 {
9167 if (indirect_base0)
9168 base0 = build_fold_addr_expr (base0);
9169 if (indirect_base1)
9170 base1 = build_fold_addr_expr (base1);
9171 return fold_build2 (code, type, base0, base1);
9172 }
9173 }
9174
9175 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9176 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9177 the resulting offset is smaller in absolute value than the
9178 original one. */
9179 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9180 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9181 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9182 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9183 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9184 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9185 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9186 {
9187 tree const1 = TREE_OPERAND (arg0, 1);
9188 tree const2 = TREE_OPERAND (arg1, 1);
9189 tree variable1 = TREE_OPERAND (arg0, 0);
9190 tree variable2 = TREE_OPERAND (arg1, 0);
9191 tree cst;
9192 const char * const warnmsg = G_("assuming signed overflow does not "
9193 "occur when combining constants around "
9194 "a comparison");
9195
9196 /* Put the constant on the side where it doesn't overflow and is
9197 of lower absolute value than before. */
9198 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9199 ? MINUS_EXPR : PLUS_EXPR,
9200 const2, const1, 0);
9201 if (!TREE_OVERFLOW (cst)
9202 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
9203 {
9204 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9205 return fold_build2 (code, type,
9206 variable1,
9207 fold_build2 (TREE_CODE (arg1), TREE_TYPE (arg1),
9208 variable2, cst));
9209 }
9210
9211 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9212 ? MINUS_EXPR : PLUS_EXPR,
9213 const1, const2, 0);
9214 if (!TREE_OVERFLOW (cst)
9215 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
9216 {
9217 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9218 return fold_build2 (code, type,
9219 fold_build2 (TREE_CODE (arg0), TREE_TYPE (arg0),
9220 variable1, cst),
9221 variable2);
9222 }
9223 }
9224
9225 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9226 signed arithmetic case. That form is created by the compiler
9227 often enough for folding it to be of value. One example is in
9228 computing loop trip counts after Operator Strength Reduction. */
9229 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9230 && TREE_CODE (arg0) == MULT_EXPR
9231 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9232 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9233 && integer_zerop (arg1))
9234 {
9235 tree const1 = TREE_OPERAND (arg0, 1);
9236 tree const2 = arg1; /* zero */
9237 tree variable1 = TREE_OPERAND (arg0, 0);
9238 enum tree_code cmp_code = code;
9239
9240 gcc_assert (!integer_zerop (const1));
9241
9242 fold_overflow_warning (("assuming signed overflow does not occur when "
9243 "eliminating multiplication in comparison "
9244 "with zero"),
9245 WARN_STRICT_OVERFLOW_COMPARISON);
9246
9247 /* If const1 is negative we swap the sense of the comparison. */
9248 if (tree_int_cst_sgn (const1) < 0)
9249 cmp_code = swap_tree_comparison (cmp_code);
9250
9251 return fold_build2 (cmp_code, type, variable1, const2);
9252 }
9253
9254 tem = maybe_canonicalize_comparison (code, type, op0, op1);
9255 if (tem)
9256 return tem;
9257
9258 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9259 {
9260 tree targ0 = strip_float_extensions (arg0);
9261 tree targ1 = strip_float_extensions (arg1);
9262 tree newtype = TREE_TYPE (targ0);
9263
9264 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9265 newtype = TREE_TYPE (targ1);
9266
9267 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9268 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9269 return fold_build2 (code, type, fold_convert (newtype, targ0),
9270 fold_convert (newtype, targ1));
9271
9272 /* (-a) CMP (-b) -> b CMP a */
9273 if (TREE_CODE (arg0) == NEGATE_EXPR
9274 && TREE_CODE (arg1) == NEGATE_EXPR)
9275 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
9276 TREE_OPERAND (arg0, 0));
9277
9278 if (TREE_CODE (arg1) == REAL_CST)
9279 {
9280 REAL_VALUE_TYPE cst;
9281 cst = TREE_REAL_CST (arg1);
9282
9283 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9284 if (TREE_CODE (arg0) == NEGATE_EXPR)
9285 return fold_build2 (swap_tree_comparison (code), type,
9286 TREE_OPERAND (arg0, 0),
9287 build_real (TREE_TYPE (arg1),
9288 REAL_VALUE_NEGATE (cst)));
9289
9290 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9291 /* a CMP (-0) -> a CMP 0 */
9292 if (REAL_VALUE_MINUS_ZERO (cst))
9293 return fold_build2 (code, type, arg0,
9294 build_real (TREE_TYPE (arg1), dconst0));
9295
9296 /* x != NaN is always true, other ops are always false. */
9297 if (REAL_VALUE_ISNAN (cst)
9298 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9299 {
9300 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9301 return omit_one_operand (type, tem, arg0);
9302 }
9303
9304 /* Fold comparisons against infinity. */
9305 if (REAL_VALUE_ISINF (cst)
9306 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9307 {
9308 tem = fold_inf_compare (code, type, arg0, arg1);
9309 if (tem != NULL_TREE)
9310 return tem;
9311 }
9312 }
9313
9314 /* If this is a comparison of a real constant with a PLUS_EXPR
9315 or a MINUS_EXPR of a real constant, we can convert it into a
9316 comparison with a revised real constant as long as no overflow
9317 occurs when unsafe_math_optimizations are enabled. */
9318 if (flag_unsafe_math_optimizations
9319 && TREE_CODE (arg1) == REAL_CST
9320 && (TREE_CODE (arg0) == PLUS_EXPR
9321 || TREE_CODE (arg0) == MINUS_EXPR)
9322 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9323 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9324 ? MINUS_EXPR : PLUS_EXPR,
9325 arg1, TREE_OPERAND (arg0, 1), 0))
9326 && !TREE_OVERFLOW (tem))
9327 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9328
9329 /* Likewise, we can simplify a comparison of a real constant with
9330 a MINUS_EXPR whose first operand is also a real constant, i.e.
9331 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9332 floating-point types only if -fassociative-math is set. */
9333 if (flag_associative_math
9334 && TREE_CODE (arg1) == REAL_CST
9335 && TREE_CODE (arg0) == MINUS_EXPR
9336 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9337 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9338 arg1, 0))
9339 && !TREE_OVERFLOW (tem))
9340 return fold_build2 (swap_tree_comparison (code), type,
9341 TREE_OPERAND (arg0, 1), tem);
9342
9343 /* Fold comparisons against built-in math functions. */
9344 if (TREE_CODE (arg1) == REAL_CST
9345 && flag_unsafe_math_optimizations
9346 && ! flag_errno_math)
9347 {
9348 enum built_in_function fcode = builtin_mathfn_code (arg0);
9349
9350 if (fcode != END_BUILTINS)
9351 {
9352 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
9353 if (tem != NULL_TREE)
9354 return tem;
9355 }
9356 }
9357 }
9358
9359 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9360 && CONVERT_EXPR_P (arg0))
9361 {
9362 /* If we are widening one operand of an integer comparison,
9363 see if the other operand is similarly being widened. Perhaps we
9364 can do the comparison in the narrower type. */
9365 tem = fold_widened_comparison (code, type, arg0, arg1);
9366 if (tem)
9367 return tem;
9368
9369 /* Or if we are changing signedness. */
9370 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
9371 if (tem)
9372 return tem;
9373 }
9374
9375 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9376 constant, we can simplify it. */
9377 if (TREE_CODE (arg1) == INTEGER_CST
9378 && (TREE_CODE (arg0) == MIN_EXPR
9379 || TREE_CODE (arg0) == MAX_EXPR)
9380 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9381 {
9382 tem = optimize_minmax_comparison (code, type, op0, op1);
9383 if (tem)
9384 return tem;
9385 }
9386
9387 /* Simplify comparison of something with itself. (For IEEE
9388 floating-point, we can only do some of these simplifications.) */
9389 if (operand_equal_p (arg0, arg1, 0))
9390 {
9391 switch (code)
9392 {
9393 case EQ_EXPR:
9394 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9395 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9396 return constant_boolean_node (1, type);
9397 break;
9398
9399 case GE_EXPR:
9400 case LE_EXPR:
9401 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9402 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9403 return constant_boolean_node (1, type);
9404 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9405
9406 case NE_EXPR:
9407 /* For NE, we can only do this simplification if integer
9408 or we don't honor IEEE floating point NaNs. */
9409 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9410 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9411 break;
9412 /* ... fall through ... */
9413 case GT_EXPR:
9414 case LT_EXPR:
9415 return constant_boolean_node (0, type);
9416 default:
9417 gcc_unreachable ();
9418 }
9419 }
9420
9421 /* If we are comparing an expression that just has comparisons
9422 of two integer values, arithmetic expressions of those comparisons,
9423 and constants, we can simplify it. There are only three cases
9424 to check: the two values can either be equal, the first can be
9425 greater, or the second can be greater. Fold the expression for
9426 those three values. Since each value must be 0 or 1, we have
9427 eight possibilities, each of which corresponds to the constant 0
9428 or 1 or one of the six possible comparisons.
9429
9430 This handles common cases like (a > b) == 0 but also handles
9431 expressions like ((x > y) - (y > x)) > 0, which supposedly
9432 occur in macroized code. */
9433
9434 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9435 {
9436 tree cval1 = 0, cval2 = 0;
9437 int save_p = 0;
9438
9439 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9440 /* Don't handle degenerate cases here; they should already
9441 have been handled anyway. */
9442 && cval1 != 0 && cval2 != 0
9443 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9444 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9445 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9446 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9447 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9448 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9449 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9450 {
9451 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9452 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9453
9454 /* We can't just pass T to eval_subst in case cval1 or cval2
9455 was the same as ARG1. */
9456
9457 tree high_result
9458 = fold_build2 (code, type,
9459 eval_subst (arg0, cval1, maxval,
9460 cval2, minval),
9461 arg1);
9462 tree equal_result
9463 = fold_build2 (code, type,
9464 eval_subst (arg0, cval1, maxval,
9465 cval2, maxval),
9466 arg1);
9467 tree low_result
9468 = fold_build2 (code, type,
9469 eval_subst (arg0, cval1, minval,
9470 cval2, maxval),
9471 arg1);
9472
9473 /* All three of these results should be 0 or 1. Confirm they are.
9474 Then use those values to select the proper code to use. */
9475
9476 if (TREE_CODE (high_result) == INTEGER_CST
9477 && TREE_CODE (equal_result) == INTEGER_CST
9478 && TREE_CODE (low_result) == INTEGER_CST)
9479 {
9480 /* Make a 3-bit mask with the high-order bit being the
9481 value for `>', the next for '=', and the low for '<'. */
9482 switch ((integer_onep (high_result) * 4)
9483 + (integer_onep (equal_result) * 2)
9484 + integer_onep (low_result))
9485 {
9486 case 0:
9487 /* Always false. */
9488 return omit_one_operand (type, integer_zero_node, arg0);
9489 case 1:
9490 code = LT_EXPR;
9491 break;
9492 case 2:
9493 code = EQ_EXPR;
9494 break;
9495 case 3:
9496 code = LE_EXPR;
9497 break;
9498 case 4:
9499 code = GT_EXPR;
9500 break;
9501 case 5:
9502 code = NE_EXPR;
9503 break;
9504 case 6:
9505 code = GE_EXPR;
9506 break;
9507 case 7:
9508 /* Always true. */
9509 return omit_one_operand (type, integer_one_node, arg0);
9510 }
9511
9512 if (save_p)
9513 return save_expr (build2 (code, type, cval1, cval2));
9514 return fold_build2 (code, type, cval1, cval2);
9515 }
9516 }
9517 }
9518
9519 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9520 into a single range test. */
9521 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9522 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9523 && TREE_CODE (arg1) == INTEGER_CST
9524 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9525 && !integer_zerop (TREE_OPERAND (arg0, 1))
9526 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9527 && !TREE_OVERFLOW (arg1))
9528 {
9529 tem = fold_div_compare (code, type, arg0, arg1);
9530 if (tem != NULL_TREE)
9531 return tem;
9532 }
9533
9534 /* Fold ~X op ~Y as Y op X. */
9535 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9536 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9537 {
9538 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9539 return fold_build2 (code, type,
9540 fold_convert (cmp_type, TREE_OPERAND (arg1, 0)),
9541 TREE_OPERAND (arg0, 0));
9542 }
9543
9544 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9545 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9546 && TREE_CODE (arg1) == INTEGER_CST)
9547 {
9548 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9549 return fold_build2 (swap_tree_comparison (code), type,
9550 TREE_OPERAND (arg0, 0),
9551 fold_build1 (BIT_NOT_EXPR, cmp_type,
9552 fold_convert (cmp_type, arg1)));
9553 }
9554
9555 return NULL_TREE;
9556 }
9557
9558
9559 /* Subroutine of fold_binary. Optimize complex multiplications of the
9560 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9561 argument EXPR represents the expression "z" of type TYPE. */
9562
9563 static tree
9564 fold_mult_zconjz (tree type, tree expr)
9565 {
9566 tree itype = TREE_TYPE (type);
9567 tree rpart, ipart, tem;
9568
9569 if (TREE_CODE (expr) == COMPLEX_EXPR)
9570 {
9571 rpart = TREE_OPERAND (expr, 0);
9572 ipart = TREE_OPERAND (expr, 1);
9573 }
9574 else if (TREE_CODE (expr) == COMPLEX_CST)
9575 {
9576 rpart = TREE_REALPART (expr);
9577 ipart = TREE_IMAGPART (expr);
9578 }
9579 else
9580 {
9581 expr = save_expr (expr);
9582 rpart = fold_build1 (REALPART_EXPR, itype, expr);
9583 ipart = fold_build1 (IMAGPART_EXPR, itype, expr);
9584 }
9585
9586 rpart = save_expr (rpart);
9587 ipart = save_expr (ipart);
9588 tem = fold_build2 (PLUS_EXPR, itype,
9589 fold_build2 (MULT_EXPR, itype, rpart, rpart),
9590 fold_build2 (MULT_EXPR, itype, ipart, ipart));
9591 return fold_build2 (COMPLEX_EXPR, type, tem,
9592 fold_convert (itype, integer_zero_node));
9593 }
9594
9595
9596 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9597 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9598 guarantees that P and N have the same least significant log2(M) bits.
9599 N is not otherwise constrained. In particular, N is not normalized to
9600 0 <= N < M as is common. In general, the precise value of P is unknown.
9601 M is chosen as large as possible such that constant N can be determined.
9602
9603 Returns M and sets *RESIDUE to N.
9604
9605 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9606 account. This is not always possible due to PR 35705.
9607 */
9608
9609 static unsigned HOST_WIDE_INT
9610 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9611 bool allow_func_align)
9612 {
9613 enum tree_code code;
9614
9615 *residue = 0;
9616
9617 code = TREE_CODE (expr);
9618 if (code == ADDR_EXPR)
9619 {
9620 expr = TREE_OPERAND (expr, 0);
9621 if (handled_component_p (expr))
9622 {
9623 HOST_WIDE_INT bitsize, bitpos;
9624 tree offset;
9625 enum machine_mode mode;
9626 int unsignedp, volatilep;
9627
9628 expr = get_inner_reference (expr, &bitsize, &bitpos, &offset,
9629 &mode, &unsignedp, &volatilep, false);
9630 *residue = bitpos / BITS_PER_UNIT;
9631 if (offset)
9632 {
9633 if (TREE_CODE (offset) == INTEGER_CST)
9634 *residue += TREE_INT_CST_LOW (offset);
9635 else
9636 /* We don't handle more complicated offset expressions. */
9637 return 1;
9638 }
9639 }
9640
9641 if (DECL_P (expr)
9642 && (allow_func_align || TREE_CODE (expr) != FUNCTION_DECL))
9643 return DECL_ALIGN_UNIT (expr);
9644 }
9645 else if (code == POINTER_PLUS_EXPR)
9646 {
9647 tree op0, op1;
9648 unsigned HOST_WIDE_INT modulus;
9649 enum tree_code inner_code;
9650
9651 op0 = TREE_OPERAND (expr, 0);
9652 STRIP_NOPS (op0);
9653 modulus = get_pointer_modulus_and_residue (op0, residue,
9654 allow_func_align);
9655
9656 op1 = TREE_OPERAND (expr, 1);
9657 STRIP_NOPS (op1);
9658 inner_code = TREE_CODE (op1);
9659 if (inner_code == INTEGER_CST)
9660 {
9661 *residue += TREE_INT_CST_LOW (op1);
9662 return modulus;
9663 }
9664 else if (inner_code == MULT_EXPR)
9665 {
9666 op1 = TREE_OPERAND (op1, 1);
9667 if (TREE_CODE (op1) == INTEGER_CST)
9668 {
9669 unsigned HOST_WIDE_INT align;
9670
9671 /* Compute the greatest power-of-2 divisor of op1. */
9672 align = TREE_INT_CST_LOW (op1);
9673 align &= -align;
9674
9675 /* If align is non-zero and less than *modulus, replace
9676 *modulus with align., If align is 0, then either op1 is 0
9677 or the greatest power-of-2 divisor of op1 doesn't fit in an
9678 unsigned HOST_WIDE_INT. In either case, no additional
9679 constraint is imposed. */
9680 if (align)
9681 modulus = MIN (modulus, align);
9682
9683 return modulus;
9684 }
9685 }
9686 }
9687
9688 /* If we get here, we were unable to determine anything useful about the
9689 expression. */
9690 return 1;
9691 }
9692
9693
9694 /* Fold a binary expression of code CODE and type TYPE with operands
9695 OP0 and OP1. Return the folded expression if folding is
9696 successful. Otherwise, return NULL_TREE. */
9697
9698 tree
9699 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
9700 {
9701 enum tree_code_class kind = TREE_CODE_CLASS (code);
9702 tree arg0, arg1, tem;
9703 tree t1 = NULL_TREE;
9704 bool strict_overflow_p;
9705
9706 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9707 && TREE_CODE_LENGTH (code) == 2
9708 && op0 != NULL_TREE
9709 && op1 != NULL_TREE);
9710
9711 arg0 = op0;
9712 arg1 = op1;
9713
9714 /* Strip any conversions that don't change the mode. This is
9715 safe for every expression, except for a comparison expression
9716 because its signedness is derived from its operands. So, in
9717 the latter case, only strip conversions that don't change the
9718 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9719 preserved.
9720
9721 Note that this is done as an internal manipulation within the
9722 constant folder, in order to find the simplest representation
9723 of the arguments so that their form can be studied. In any
9724 cases, the appropriate type conversions should be put back in
9725 the tree that will get out of the constant folder. */
9726
9727 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9728 {
9729 STRIP_SIGN_NOPS (arg0);
9730 STRIP_SIGN_NOPS (arg1);
9731 }
9732 else
9733 {
9734 STRIP_NOPS (arg0);
9735 STRIP_NOPS (arg1);
9736 }
9737
9738 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9739 constant but we can't do arithmetic on them. */
9740 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9741 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9742 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9743 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9744 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9745 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9746 {
9747 if (kind == tcc_binary)
9748 {
9749 /* Make sure type and arg0 have the same saturating flag. */
9750 gcc_assert (TYPE_SATURATING (type)
9751 == TYPE_SATURATING (TREE_TYPE (arg0)));
9752 tem = const_binop (code, arg0, arg1, 0);
9753 }
9754 else if (kind == tcc_comparison)
9755 tem = fold_relational_const (code, type, arg0, arg1);
9756 else
9757 tem = NULL_TREE;
9758
9759 if (tem != NULL_TREE)
9760 {
9761 if (TREE_TYPE (tem) != type)
9762 tem = fold_convert (type, tem);
9763 return tem;
9764 }
9765 }
9766
9767 /* If this is a commutative operation, and ARG0 is a constant, move it
9768 to ARG1 to reduce the number of tests below. */
9769 if (commutative_tree_code (code)
9770 && tree_swap_operands_p (arg0, arg1, true))
9771 return fold_build2 (code, type, op1, op0);
9772
9773 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9774
9775 First check for cases where an arithmetic operation is applied to a
9776 compound, conditional, or comparison operation. Push the arithmetic
9777 operation inside the compound or conditional to see if any folding
9778 can then be done. Convert comparison to conditional for this purpose.
9779 The also optimizes non-constant cases that used to be done in
9780 expand_expr.
9781
9782 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9783 one of the operands is a comparison and the other is a comparison, a
9784 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9785 code below would make the expression more complex. Change it to a
9786 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9787 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9788
9789 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9790 || code == EQ_EXPR || code == NE_EXPR)
9791 && ((truth_value_p (TREE_CODE (arg0))
9792 && (truth_value_p (TREE_CODE (arg1))
9793 || (TREE_CODE (arg1) == BIT_AND_EXPR
9794 && integer_onep (TREE_OPERAND (arg1, 1)))))
9795 || (truth_value_p (TREE_CODE (arg1))
9796 && (truth_value_p (TREE_CODE (arg0))
9797 || (TREE_CODE (arg0) == BIT_AND_EXPR
9798 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9799 {
9800 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9801 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9802 : TRUTH_XOR_EXPR,
9803 boolean_type_node,
9804 fold_convert (boolean_type_node, arg0),
9805 fold_convert (boolean_type_node, arg1));
9806
9807 if (code == EQ_EXPR)
9808 tem = invert_truthvalue (tem);
9809
9810 return fold_convert (type, tem);
9811 }
9812
9813 if (TREE_CODE_CLASS (code) == tcc_binary
9814 || TREE_CODE_CLASS (code) == tcc_comparison)
9815 {
9816 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9817 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9818 fold_build2 (code, type,
9819 fold_convert (TREE_TYPE (op0),
9820 TREE_OPERAND (arg0, 1)),
9821 op1));
9822 if (TREE_CODE (arg1) == COMPOUND_EXPR
9823 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9824 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9825 fold_build2 (code, type, op0,
9826 fold_convert (TREE_TYPE (op1),
9827 TREE_OPERAND (arg1, 1))));
9828
9829 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
9830 {
9831 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
9832 arg0, arg1,
9833 /*cond_first_p=*/1);
9834 if (tem != NULL_TREE)
9835 return tem;
9836 }
9837
9838 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
9839 {
9840 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
9841 arg1, arg0,
9842 /*cond_first_p=*/0);
9843 if (tem != NULL_TREE)
9844 return tem;
9845 }
9846 }
9847
9848 switch (code)
9849 {
9850 case POINTER_PLUS_EXPR:
9851 /* 0 +p index -> (type)index */
9852 if (integer_zerop (arg0))
9853 return non_lvalue (fold_convert (type, arg1));
9854
9855 /* PTR +p 0 -> PTR */
9856 if (integer_zerop (arg1))
9857 return non_lvalue (fold_convert (type, arg0));
9858
9859 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9860 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9861 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9862 return fold_convert (type, fold_build2 (PLUS_EXPR, sizetype,
9863 fold_convert (sizetype, arg1),
9864 fold_convert (sizetype, arg0)));
9865
9866 /* index +p PTR -> PTR +p index */
9867 if (POINTER_TYPE_P (TREE_TYPE (arg1))
9868 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9869 return fold_build2 (POINTER_PLUS_EXPR, type,
9870 fold_convert (type, arg1),
9871 fold_convert (sizetype, arg0));
9872
9873 /* (PTR +p B) +p A -> PTR +p (B + A) */
9874 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9875 {
9876 tree inner;
9877 tree arg01 = fold_convert (sizetype, TREE_OPERAND (arg0, 1));
9878 tree arg00 = TREE_OPERAND (arg0, 0);
9879 inner = fold_build2 (PLUS_EXPR, sizetype,
9880 arg01, fold_convert (sizetype, arg1));
9881 return fold_convert (type,
9882 fold_build2 (POINTER_PLUS_EXPR,
9883 TREE_TYPE (arg00), arg00, inner));
9884 }
9885
9886 /* PTR_CST +p CST -> CST1 */
9887 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9888 return fold_build2 (PLUS_EXPR, type, arg0, fold_convert (type, arg1));
9889
9890 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9891 of the array. Loop optimizer sometimes produce this type of
9892 expressions. */
9893 if (TREE_CODE (arg0) == ADDR_EXPR)
9894 {
9895 tem = try_move_mult_to_index (arg0, fold_convert (sizetype, arg1));
9896 if (tem)
9897 return fold_convert (type, tem);
9898 }
9899
9900 return NULL_TREE;
9901
9902 case PLUS_EXPR:
9903 /* A + (-B) -> A - B */
9904 if (TREE_CODE (arg1) == NEGATE_EXPR)
9905 return fold_build2 (MINUS_EXPR, type,
9906 fold_convert (type, arg0),
9907 fold_convert (type, TREE_OPERAND (arg1, 0)));
9908 /* (-A) + B -> B - A */
9909 if (TREE_CODE (arg0) == NEGATE_EXPR
9910 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
9911 return fold_build2 (MINUS_EXPR, type,
9912 fold_convert (type, arg1),
9913 fold_convert (type, TREE_OPERAND (arg0, 0)));
9914
9915 if (INTEGRAL_TYPE_P (type))
9916 {
9917 /* Convert ~A + 1 to -A. */
9918 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9919 && integer_onep (arg1))
9920 return fold_build1 (NEGATE_EXPR, type,
9921 fold_convert (type, TREE_OPERAND (arg0, 0)));
9922
9923 /* ~X + X is -1. */
9924 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9925 && !TYPE_OVERFLOW_TRAPS (type))
9926 {
9927 tree tem = TREE_OPERAND (arg0, 0);
9928
9929 STRIP_NOPS (tem);
9930 if (operand_equal_p (tem, arg1, 0))
9931 {
9932 t1 = build_int_cst_type (type, -1);
9933 return omit_one_operand (type, t1, arg1);
9934 }
9935 }
9936
9937 /* X + ~X is -1. */
9938 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9939 && !TYPE_OVERFLOW_TRAPS (type))
9940 {
9941 tree tem = TREE_OPERAND (arg1, 0);
9942
9943 STRIP_NOPS (tem);
9944 if (operand_equal_p (arg0, tem, 0))
9945 {
9946 t1 = build_int_cst_type (type, -1);
9947 return omit_one_operand (type, t1, arg0);
9948 }
9949 }
9950
9951 /* X + (X / CST) * -CST is X % CST. */
9952 if (TREE_CODE (arg1) == MULT_EXPR
9953 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9954 && operand_equal_p (arg0,
9955 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9956 {
9957 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9958 tree cst1 = TREE_OPERAND (arg1, 1);
9959 tree sum = fold_binary (PLUS_EXPR, TREE_TYPE (cst1), cst1, cst0);
9960 if (sum && integer_zerop (sum))
9961 return fold_convert (type,
9962 fold_build2 (TRUNC_MOD_EXPR,
9963 TREE_TYPE (arg0), arg0, cst0));
9964 }
9965 }
9966
9967 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
9968 same or one. Make sure type is not saturating.
9969 fold_plusminus_mult_expr will re-associate. */
9970 if ((TREE_CODE (arg0) == MULT_EXPR
9971 || TREE_CODE (arg1) == MULT_EXPR)
9972 && !TYPE_SATURATING (type)
9973 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9974 {
9975 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
9976 if (tem)
9977 return tem;
9978 }
9979
9980 if (! FLOAT_TYPE_P (type))
9981 {
9982 if (integer_zerop (arg1))
9983 return non_lvalue (fold_convert (type, arg0));
9984
9985 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9986 with a constant, and the two constants have no bits in common,
9987 we should treat this as a BIT_IOR_EXPR since this may produce more
9988 simplifications. */
9989 if (TREE_CODE (arg0) == BIT_AND_EXPR
9990 && TREE_CODE (arg1) == BIT_AND_EXPR
9991 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9992 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9993 && integer_zerop (const_binop (BIT_AND_EXPR,
9994 TREE_OPERAND (arg0, 1),
9995 TREE_OPERAND (arg1, 1), 0)))
9996 {
9997 code = BIT_IOR_EXPR;
9998 goto bit_ior;
9999 }
10000
10001 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10002 (plus (plus (mult) (mult)) (foo)) so that we can
10003 take advantage of the factoring cases below. */
10004 if (((TREE_CODE (arg0) == PLUS_EXPR
10005 || TREE_CODE (arg0) == MINUS_EXPR)
10006 && TREE_CODE (arg1) == MULT_EXPR)
10007 || ((TREE_CODE (arg1) == PLUS_EXPR
10008 || TREE_CODE (arg1) == MINUS_EXPR)
10009 && TREE_CODE (arg0) == MULT_EXPR))
10010 {
10011 tree parg0, parg1, parg, marg;
10012 enum tree_code pcode;
10013
10014 if (TREE_CODE (arg1) == MULT_EXPR)
10015 parg = arg0, marg = arg1;
10016 else
10017 parg = arg1, marg = arg0;
10018 pcode = TREE_CODE (parg);
10019 parg0 = TREE_OPERAND (parg, 0);
10020 parg1 = TREE_OPERAND (parg, 1);
10021 STRIP_NOPS (parg0);
10022 STRIP_NOPS (parg1);
10023
10024 if (TREE_CODE (parg0) == MULT_EXPR
10025 && TREE_CODE (parg1) != MULT_EXPR)
10026 return fold_build2 (pcode, type,
10027 fold_build2 (PLUS_EXPR, type,
10028 fold_convert (type, parg0),
10029 fold_convert (type, marg)),
10030 fold_convert (type, parg1));
10031 if (TREE_CODE (parg0) != MULT_EXPR
10032 && TREE_CODE (parg1) == MULT_EXPR)
10033 return fold_build2 (PLUS_EXPR, type,
10034 fold_convert (type, parg0),
10035 fold_build2 (pcode, type,
10036 fold_convert (type, marg),
10037 fold_convert (type,
10038 parg1)));
10039 }
10040 }
10041 else
10042 {
10043 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10044 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
10045 return non_lvalue (fold_convert (type, arg0));
10046
10047 /* Likewise if the operands are reversed. */
10048 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10049 return non_lvalue (fold_convert (type, arg1));
10050
10051 /* Convert X + -C into X - C. */
10052 if (TREE_CODE (arg1) == REAL_CST
10053 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
10054 {
10055 tem = fold_negate_const (arg1, type);
10056 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
10057 return fold_build2 (MINUS_EXPR, type,
10058 fold_convert (type, arg0),
10059 fold_convert (type, tem));
10060 }
10061
10062 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10063 to __complex__ ( x, y ). This is not the same for SNaNs or
10064 if signed zeros are involved. */
10065 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10066 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10067 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10068 {
10069 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10070 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
10071 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
10072 bool arg0rz = false, arg0iz = false;
10073 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10074 || (arg0i && (arg0iz = real_zerop (arg0i))))
10075 {
10076 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
10077 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
10078 if (arg0rz && arg1i && real_zerop (arg1i))
10079 {
10080 tree rp = arg1r ? arg1r
10081 : build1 (REALPART_EXPR, rtype, arg1);
10082 tree ip = arg0i ? arg0i
10083 : build1 (IMAGPART_EXPR, rtype, arg0);
10084 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
10085 }
10086 else if (arg0iz && arg1r && real_zerop (arg1r))
10087 {
10088 tree rp = arg0r ? arg0r
10089 : build1 (REALPART_EXPR, rtype, arg0);
10090 tree ip = arg1i ? arg1i
10091 : build1 (IMAGPART_EXPR, rtype, arg1);
10092 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
10093 }
10094 }
10095 }
10096
10097 if (flag_unsafe_math_optimizations
10098 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10099 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10100 && (tem = distribute_real_division (code, type, arg0, arg1)))
10101 return tem;
10102
10103 /* Convert x+x into x*2.0. */
10104 if (operand_equal_p (arg0, arg1, 0)
10105 && SCALAR_FLOAT_TYPE_P (type))
10106 return fold_build2 (MULT_EXPR, type, arg0,
10107 build_real (type, dconst2));
10108
10109 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10110 We associate floats only if the user has specified
10111 -fassociative-math. */
10112 if (flag_associative_math
10113 && TREE_CODE (arg1) == PLUS_EXPR
10114 && TREE_CODE (arg0) != MULT_EXPR)
10115 {
10116 tree tree10 = TREE_OPERAND (arg1, 0);
10117 tree tree11 = TREE_OPERAND (arg1, 1);
10118 if (TREE_CODE (tree11) == MULT_EXPR
10119 && TREE_CODE (tree10) == MULT_EXPR)
10120 {
10121 tree tree0;
10122 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
10123 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
10124 }
10125 }
10126 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10127 We associate floats only if the user has specified
10128 -fassociative-math. */
10129 if (flag_associative_math
10130 && TREE_CODE (arg0) == PLUS_EXPR
10131 && TREE_CODE (arg1) != MULT_EXPR)
10132 {
10133 tree tree00 = TREE_OPERAND (arg0, 0);
10134 tree tree01 = TREE_OPERAND (arg0, 1);
10135 if (TREE_CODE (tree01) == MULT_EXPR
10136 && TREE_CODE (tree00) == MULT_EXPR)
10137 {
10138 tree tree0;
10139 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
10140 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
10141 }
10142 }
10143 }
10144
10145 bit_rotate:
10146 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10147 is a rotate of A by C1 bits. */
10148 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10149 is a rotate of A by B bits. */
10150 {
10151 enum tree_code code0, code1;
10152 tree rtype;
10153 code0 = TREE_CODE (arg0);
10154 code1 = TREE_CODE (arg1);
10155 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10156 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10157 && operand_equal_p (TREE_OPERAND (arg0, 0),
10158 TREE_OPERAND (arg1, 0), 0)
10159 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10160 TYPE_UNSIGNED (rtype))
10161 /* Only create rotates in complete modes. Other cases are not
10162 expanded properly. */
10163 && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
10164 {
10165 tree tree01, tree11;
10166 enum tree_code code01, code11;
10167
10168 tree01 = TREE_OPERAND (arg0, 1);
10169 tree11 = TREE_OPERAND (arg1, 1);
10170 STRIP_NOPS (tree01);
10171 STRIP_NOPS (tree11);
10172 code01 = TREE_CODE (tree01);
10173 code11 = TREE_CODE (tree11);
10174 if (code01 == INTEGER_CST
10175 && code11 == INTEGER_CST
10176 && TREE_INT_CST_HIGH (tree01) == 0
10177 && TREE_INT_CST_HIGH (tree11) == 0
10178 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
10179 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10180 return fold_convert (type,
10181 build2 (LROTATE_EXPR,
10182 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10183 TREE_OPERAND (arg0, 0),
10184 code0 == LSHIFT_EXPR
10185 ? tree01 : tree11));
10186 else if (code11 == MINUS_EXPR)
10187 {
10188 tree tree110, tree111;
10189 tree110 = TREE_OPERAND (tree11, 0);
10190 tree111 = TREE_OPERAND (tree11, 1);
10191 STRIP_NOPS (tree110);
10192 STRIP_NOPS (tree111);
10193 if (TREE_CODE (tree110) == INTEGER_CST
10194 && 0 == compare_tree_int (tree110,
10195 TYPE_PRECISION
10196 (TREE_TYPE (TREE_OPERAND
10197 (arg0, 0))))
10198 && operand_equal_p (tree01, tree111, 0))
10199 return fold_convert (type,
10200 build2 ((code0 == LSHIFT_EXPR
10201 ? LROTATE_EXPR
10202 : RROTATE_EXPR),
10203 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10204 TREE_OPERAND (arg0, 0), tree01));
10205 }
10206 else if (code01 == MINUS_EXPR)
10207 {
10208 tree tree010, tree011;
10209 tree010 = TREE_OPERAND (tree01, 0);
10210 tree011 = TREE_OPERAND (tree01, 1);
10211 STRIP_NOPS (tree010);
10212 STRIP_NOPS (tree011);
10213 if (TREE_CODE (tree010) == INTEGER_CST
10214 && 0 == compare_tree_int (tree010,
10215 TYPE_PRECISION
10216 (TREE_TYPE (TREE_OPERAND
10217 (arg0, 0))))
10218 && operand_equal_p (tree11, tree011, 0))
10219 return fold_convert (type,
10220 build2 ((code0 != LSHIFT_EXPR
10221 ? LROTATE_EXPR
10222 : RROTATE_EXPR),
10223 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10224 TREE_OPERAND (arg0, 0), tree11));
10225 }
10226 }
10227 }
10228
10229 associate:
10230 /* In most languages, can't associate operations on floats through
10231 parentheses. Rather than remember where the parentheses were, we
10232 don't associate floats at all, unless the user has specified
10233 -fassociative-math.
10234 And, we need to make sure type is not saturating. */
10235
10236 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10237 && !TYPE_SATURATING (type))
10238 {
10239 tree var0, con0, lit0, minus_lit0;
10240 tree var1, con1, lit1, minus_lit1;
10241 bool ok = true;
10242
10243 /* Split both trees into variables, constants, and literals. Then
10244 associate each group together, the constants with literals,
10245 then the result with variables. This increases the chances of
10246 literals being recombined later and of generating relocatable
10247 expressions for the sum of a constant and literal. */
10248 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10249 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10250 code == MINUS_EXPR);
10251
10252 /* With undefined overflow we can only associate constants
10253 with one variable. */
10254 if (((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10255 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10256 && var0 && var1)
10257 {
10258 tree tmp0 = var0;
10259 tree tmp1 = var1;
10260
10261 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10262 tmp0 = TREE_OPERAND (tmp0, 0);
10263 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10264 tmp1 = TREE_OPERAND (tmp1, 0);
10265 /* The only case we can still associate with two variables
10266 is if they are the same, modulo negation. */
10267 if (!operand_equal_p (tmp0, tmp1, 0))
10268 ok = false;
10269 }
10270
10271 /* Only do something if we found more than two objects. Otherwise,
10272 nothing has changed and we risk infinite recursion. */
10273 if (ok
10274 && (2 < ((var0 != 0) + (var1 != 0)
10275 + (con0 != 0) + (con1 != 0)
10276 + (lit0 != 0) + (lit1 != 0)
10277 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10278 {
10279 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10280 if (code == MINUS_EXPR)
10281 code = PLUS_EXPR;
10282
10283 var0 = associate_trees (var0, var1, code, type);
10284 con0 = associate_trees (con0, con1, code, type);
10285 lit0 = associate_trees (lit0, lit1, code, type);
10286 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
10287
10288 /* Preserve the MINUS_EXPR if the negative part of the literal is
10289 greater than the positive part. Otherwise, the multiplicative
10290 folding code (i.e extract_muldiv) may be fooled in case
10291 unsigned constants are subtracted, like in the following
10292 example: ((X*2 + 4) - 8U)/2. */
10293 if (minus_lit0 && lit0)
10294 {
10295 if (TREE_CODE (lit0) == INTEGER_CST
10296 && TREE_CODE (minus_lit0) == INTEGER_CST
10297 && tree_int_cst_lt (lit0, minus_lit0))
10298 {
10299 minus_lit0 = associate_trees (minus_lit0, lit0,
10300 MINUS_EXPR, type);
10301 lit0 = 0;
10302 }
10303 else
10304 {
10305 lit0 = associate_trees (lit0, minus_lit0,
10306 MINUS_EXPR, type);
10307 minus_lit0 = 0;
10308 }
10309 }
10310 if (minus_lit0)
10311 {
10312 if (con0 == 0)
10313 return fold_convert (type,
10314 associate_trees (var0, minus_lit0,
10315 MINUS_EXPR, type));
10316 else
10317 {
10318 con0 = associate_trees (con0, minus_lit0,
10319 MINUS_EXPR, type);
10320 return fold_convert (type,
10321 associate_trees (var0, con0,
10322 PLUS_EXPR, type));
10323 }
10324 }
10325
10326 con0 = associate_trees (con0, lit0, code, type);
10327 return fold_convert (type, associate_trees (var0, con0,
10328 code, type));
10329 }
10330 }
10331
10332 return NULL_TREE;
10333
10334 case MINUS_EXPR:
10335 /* Pointer simplifications for subtraction, simple reassociations. */
10336 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10337 {
10338 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10339 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10340 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10341 {
10342 tree arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
10343 tree arg01 = fold_convert (type, TREE_OPERAND (arg0, 1));
10344 tree arg10 = fold_convert (type, TREE_OPERAND (arg1, 0));
10345 tree arg11 = fold_convert (type, TREE_OPERAND (arg1, 1));
10346 return fold_build2 (PLUS_EXPR, type,
10347 fold_build2 (MINUS_EXPR, type, arg00, arg10),
10348 fold_build2 (MINUS_EXPR, type, arg01, arg11));
10349 }
10350 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10351 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10352 {
10353 tree arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
10354 tree arg01 = fold_convert (type, TREE_OPERAND (arg0, 1));
10355 tree tmp = fold_binary (MINUS_EXPR, type, arg00, fold_convert (type, arg1));
10356 if (tmp)
10357 return fold_build2 (PLUS_EXPR, type, tmp, arg01);
10358 }
10359 }
10360 /* A - (-B) -> A + B */
10361 if (TREE_CODE (arg1) == NEGATE_EXPR)
10362 return fold_build2 (PLUS_EXPR, type, op0,
10363 fold_convert (type, TREE_OPERAND (arg1, 0)));
10364 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10365 if (TREE_CODE (arg0) == NEGATE_EXPR
10366 && (FLOAT_TYPE_P (type)
10367 || INTEGRAL_TYPE_P (type))
10368 && negate_expr_p (arg1)
10369 && reorder_operands_p (arg0, arg1))
10370 return fold_build2 (MINUS_EXPR, type,
10371 fold_convert (type, negate_expr (arg1)),
10372 fold_convert (type, TREE_OPERAND (arg0, 0)));
10373 /* Convert -A - 1 to ~A. */
10374 if (INTEGRAL_TYPE_P (type)
10375 && TREE_CODE (arg0) == NEGATE_EXPR
10376 && integer_onep (arg1)
10377 && !TYPE_OVERFLOW_TRAPS (type))
10378 return fold_build1 (BIT_NOT_EXPR, type,
10379 fold_convert (type, TREE_OPERAND (arg0, 0)));
10380
10381 /* Convert -1 - A to ~A. */
10382 if (INTEGRAL_TYPE_P (type)
10383 && integer_all_onesp (arg0))
10384 return fold_build1 (BIT_NOT_EXPR, type, op1);
10385
10386
10387 /* X - (X / CST) * CST is X % CST. */
10388 if (INTEGRAL_TYPE_P (type)
10389 && TREE_CODE (arg1) == MULT_EXPR
10390 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10391 && operand_equal_p (arg0,
10392 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10393 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10394 TREE_OPERAND (arg1, 1), 0))
10395 return fold_convert (type,
10396 fold_build2 (TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10397 arg0, TREE_OPERAND (arg1, 1)));
10398
10399 if (! FLOAT_TYPE_P (type))
10400 {
10401 if (integer_zerop (arg0))
10402 return negate_expr (fold_convert (type, arg1));
10403 if (integer_zerop (arg1))
10404 return non_lvalue (fold_convert (type, arg0));
10405
10406 /* Fold A - (A & B) into ~B & A. */
10407 if (!TREE_SIDE_EFFECTS (arg0)
10408 && TREE_CODE (arg1) == BIT_AND_EXPR)
10409 {
10410 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10411 {
10412 tree arg10 = fold_convert (type, TREE_OPERAND (arg1, 0));
10413 return fold_build2 (BIT_AND_EXPR, type,
10414 fold_build1 (BIT_NOT_EXPR, type, arg10),
10415 fold_convert (type, arg0));
10416 }
10417 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10418 {
10419 tree arg11 = fold_convert (type, TREE_OPERAND (arg1, 1));
10420 return fold_build2 (BIT_AND_EXPR, type,
10421 fold_build1 (BIT_NOT_EXPR, type, arg11),
10422 fold_convert (type, arg0));
10423 }
10424 }
10425
10426 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10427 any power of 2 minus 1. */
10428 if (TREE_CODE (arg0) == BIT_AND_EXPR
10429 && TREE_CODE (arg1) == BIT_AND_EXPR
10430 && operand_equal_p (TREE_OPERAND (arg0, 0),
10431 TREE_OPERAND (arg1, 0), 0))
10432 {
10433 tree mask0 = TREE_OPERAND (arg0, 1);
10434 tree mask1 = TREE_OPERAND (arg1, 1);
10435 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
10436
10437 if (operand_equal_p (tem, mask1, 0))
10438 {
10439 tem = fold_build2 (BIT_XOR_EXPR, type,
10440 TREE_OPERAND (arg0, 0), mask1);
10441 return fold_build2 (MINUS_EXPR, type, tem, mask1);
10442 }
10443 }
10444 }
10445
10446 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10447 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10448 return non_lvalue (fold_convert (type, arg0));
10449
10450 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10451 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10452 (-ARG1 + ARG0) reduces to -ARG1. */
10453 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10454 return negate_expr (fold_convert (type, arg1));
10455
10456 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10457 __complex__ ( x, -y ). This is not the same for SNaNs or if
10458 signed zeros are involved. */
10459 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10460 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10461 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10462 {
10463 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10464 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
10465 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
10466 bool arg0rz = false, arg0iz = false;
10467 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10468 || (arg0i && (arg0iz = real_zerop (arg0i))))
10469 {
10470 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
10471 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
10472 if (arg0rz && arg1i && real_zerop (arg1i))
10473 {
10474 tree rp = fold_build1 (NEGATE_EXPR, rtype,
10475 arg1r ? arg1r
10476 : build1 (REALPART_EXPR, rtype, arg1));
10477 tree ip = arg0i ? arg0i
10478 : build1 (IMAGPART_EXPR, rtype, arg0);
10479 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
10480 }
10481 else if (arg0iz && arg1r && real_zerop (arg1r))
10482 {
10483 tree rp = arg0r ? arg0r
10484 : build1 (REALPART_EXPR, rtype, arg0);
10485 tree ip = fold_build1 (NEGATE_EXPR, rtype,
10486 arg1i ? arg1i
10487 : build1 (IMAGPART_EXPR, rtype, arg1));
10488 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
10489 }
10490 }
10491 }
10492
10493 /* Fold &x - &x. This can happen from &x.foo - &x.
10494 This is unsafe for certain floats even in non-IEEE formats.
10495 In IEEE, it is unsafe because it does wrong for NaNs.
10496 Also note that operand_equal_p is always false if an operand
10497 is volatile. */
10498
10499 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10500 && operand_equal_p (arg0, arg1, 0))
10501 return fold_convert (type, integer_zero_node);
10502
10503 /* A - B -> A + (-B) if B is easily negatable. */
10504 if (negate_expr_p (arg1)
10505 && ((FLOAT_TYPE_P (type)
10506 /* Avoid this transformation if B is a positive REAL_CST. */
10507 && (TREE_CODE (arg1) != REAL_CST
10508 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10509 || INTEGRAL_TYPE_P (type)))
10510 return fold_build2 (PLUS_EXPR, type,
10511 fold_convert (type, arg0),
10512 fold_convert (type, negate_expr (arg1)));
10513
10514 /* Try folding difference of addresses. */
10515 {
10516 HOST_WIDE_INT diff;
10517
10518 if ((TREE_CODE (arg0) == ADDR_EXPR
10519 || TREE_CODE (arg1) == ADDR_EXPR)
10520 && ptr_difference_const (arg0, arg1, &diff))
10521 return build_int_cst_type (type, diff);
10522 }
10523
10524 /* Fold &a[i] - &a[j] to i-j. */
10525 if (TREE_CODE (arg0) == ADDR_EXPR
10526 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10527 && TREE_CODE (arg1) == ADDR_EXPR
10528 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10529 {
10530 tree aref0 = TREE_OPERAND (arg0, 0);
10531 tree aref1 = TREE_OPERAND (arg1, 0);
10532 if (operand_equal_p (TREE_OPERAND (aref0, 0),
10533 TREE_OPERAND (aref1, 0), 0))
10534 {
10535 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
10536 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
10537 tree esz = array_ref_element_size (aref0);
10538 tree diff = build2 (MINUS_EXPR, type, op0, op1);
10539 return fold_build2 (MULT_EXPR, type, diff,
10540 fold_convert (type, esz));
10541
10542 }
10543 }
10544
10545 if (FLOAT_TYPE_P (type)
10546 && flag_unsafe_math_optimizations
10547 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10548 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10549 && (tem = distribute_real_division (code, type, arg0, arg1)))
10550 return tem;
10551
10552 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
10553 same or one. Make sure type is not saturating.
10554 fold_plusminus_mult_expr will re-associate. */
10555 if ((TREE_CODE (arg0) == MULT_EXPR
10556 || TREE_CODE (arg1) == MULT_EXPR)
10557 && !TYPE_SATURATING (type)
10558 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10559 {
10560 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
10561 if (tem)
10562 return tem;
10563 }
10564
10565 goto associate;
10566
10567 case MULT_EXPR:
10568 /* (-A) * (-B) -> A * B */
10569 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10570 return fold_build2 (MULT_EXPR, type,
10571 fold_convert (type, TREE_OPERAND (arg0, 0)),
10572 fold_convert (type, negate_expr (arg1)));
10573 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10574 return fold_build2 (MULT_EXPR, type,
10575 fold_convert (type, negate_expr (arg0)),
10576 fold_convert (type, TREE_OPERAND (arg1, 0)));
10577
10578 if (! FLOAT_TYPE_P (type))
10579 {
10580 if (integer_zerop (arg1))
10581 return omit_one_operand (type, arg1, arg0);
10582 if (integer_onep (arg1))
10583 return non_lvalue (fold_convert (type, arg0));
10584 /* Transform x * -1 into -x. Make sure to do the negation
10585 on the original operand with conversions not stripped
10586 because we can only strip non-sign-changing conversions. */
10587 if (integer_all_onesp (arg1))
10588 return fold_convert (type, negate_expr (op0));
10589 /* Transform x * -C into -x * C if x is easily negatable. */
10590 if (TREE_CODE (arg1) == INTEGER_CST
10591 && tree_int_cst_sgn (arg1) == -1
10592 && negate_expr_p (arg0)
10593 && (tem = negate_expr (arg1)) != arg1
10594 && !TREE_OVERFLOW (tem))
10595 return fold_build2 (MULT_EXPR, type,
10596 fold_convert (type, negate_expr (arg0)), tem);
10597
10598 /* (a * (1 << b)) is (a << b) */
10599 if (TREE_CODE (arg1) == LSHIFT_EXPR
10600 && integer_onep (TREE_OPERAND (arg1, 0)))
10601 return fold_build2 (LSHIFT_EXPR, type, op0,
10602 TREE_OPERAND (arg1, 1));
10603 if (TREE_CODE (arg0) == LSHIFT_EXPR
10604 && integer_onep (TREE_OPERAND (arg0, 0)))
10605 return fold_build2 (LSHIFT_EXPR, type, op1,
10606 TREE_OPERAND (arg0, 1));
10607
10608 /* (A + A) * C -> A * 2 * C */
10609 if (TREE_CODE (arg0) == PLUS_EXPR
10610 && TREE_CODE (arg1) == INTEGER_CST
10611 && operand_equal_p (TREE_OPERAND (arg0, 0),
10612 TREE_OPERAND (arg0, 1), 0))
10613 return fold_build2 (MULT_EXPR, type,
10614 omit_one_operand (type, TREE_OPERAND (arg0, 0),
10615 TREE_OPERAND (arg0, 1)),
10616 fold_build2 (MULT_EXPR, type,
10617 build_int_cst (type, 2) , arg1));
10618
10619 strict_overflow_p = false;
10620 if (TREE_CODE (arg1) == INTEGER_CST
10621 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10622 &strict_overflow_p)))
10623 {
10624 if (strict_overflow_p)
10625 fold_overflow_warning (("assuming signed overflow does not "
10626 "occur when simplifying "
10627 "multiplication"),
10628 WARN_STRICT_OVERFLOW_MISC);
10629 return fold_convert (type, tem);
10630 }
10631
10632 /* Optimize z * conj(z) for integer complex numbers. */
10633 if (TREE_CODE (arg0) == CONJ_EXPR
10634 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10635 return fold_mult_zconjz (type, arg1);
10636 if (TREE_CODE (arg1) == CONJ_EXPR
10637 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10638 return fold_mult_zconjz (type, arg0);
10639 }
10640 else
10641 {
10642 /* Maybe fold x * 0 to 0. The expressions aren't the same
10643 when x is NaN, since x * 0 is also NaN. Nor are they the
10644 same in modes with signed zeros, since multiplying a
10645 negative value by 0 gives -0, not +0. */
10646 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10647 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10648 && real_zerop (arg1))
10649 return omit_one_operand (type, arg1, arg0);
10650 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10651 Likewise for complex arithmetic with signed zeros. */
10652 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10653 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10654 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10655 && real_onep (arg1))
10656 return non_lvalue (fold_convert (type, arg0));
10657
10658 /* Transform x * -1.0 into -x. */
10659 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10660 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10661 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10662 && real_minus_onep (arg1))
10663 return fold_convert (type, negate_expr (arg0));
10664
10665 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10666 the result for floating point types due to rounding so it is applied
10667 only if -fassociative-math was specify. */
10668 if (flag_associative_math
10669 && TREE_CODE (arg0) == RDIV_EXPR
10670 && TREE_CODE (arg1) == REAL_CST
10671 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10672 {
10673 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10674 arg1, 0);
10675 if (tem)
10676 return fold_build2 (RDIV_EXPR, type, tem,
10677 TREE_OPERAND (arg0, 1));
10678 }
10679
10680 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10681 if (operand_equal_p (arg0, arg1, 0))
10682 {
10683 tree tem = fold_strip_sign_ops (arg0);
10684 if (tem != NULL_TREE)
10685 {
10686 tem = fold_convert (type, tem);
10687 return fold_build2 (MULT_EXPR, type, tem, tem);
10688 }
10689 }
10690
10691 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10692 This is not the same for NaNs or if signed zeros are
10693 involved. */
10694 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10695 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10696 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10697 && TREE_CODE (arg1) == COMPLEX_CST
10698 && real_zerop (TREE_REALPART (arg1)))
10699 {
10700 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10701 if (real_onep (TREE_IMAGPART (arg1)))
10702 return fold_build2 (COMPLEX_EXPR, type,
10703 negate_expr (fold_build1 (IMAGPART_EXPR,
10704 rtype, arg0)),
10705 fold_build1 (REALPART_EXPR, rtype, arg0));
10706 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10707 return fold_build2 (COMPLEX_EXPR, type,
10708 fold_build1 (IMAGPART_EXPR, rtype, arg0),
10709 negate_expr (fold_build1 (REALPART_EXPR,
10710 rtype, arg0)));
10711 }
10712
10713 /* Optimize z * conj(z) for floating point complex numbers.
10714 Guarded by flag_unsafe_math_optimizations as non-finite
10715 imaginary components don't produce scalar results. */
10716 if (flag_unsafe_math_optimizations
10717 && TREE_CODE (arg0) == CONJ_EXPR
10718 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10719 return fold_mult_zconjz (type, arg1);
10720 if (flag_unsafe_math_optimizations
10721 && TREE_CODE (arg1) == CONJ_EXPR
10722 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10723 return fold_mult_zconjz (type, arg0);
10724
10725 if (flag_unsafe_math_optimizations)
10726 {
10727 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10728 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10729
10730 /* Optimizations of root(...)*root(...). */
10731 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10732 {
10733 tree rootfn, arg;
10734 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10735 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10736
10737 /* Optimize sqrt(x)*sqrt(x) as x. */
10738 if (BUILTIN_SQRT_P (fcode0)
10739 && operand_equal_p (arg00, arg10, 0)
10740 && ! HONOR_SNANS (TYPE_MODE (type)))
10741 return arg00;
10742
10743 /* Optimize root(x)*root(y) as root(x*y). */
10744 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10745 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
10746 return build_call_expr (rootfn, 1, arg);
10747 }
10748
10749 /* Optimize expN(x)*expN(y) as expN(x+y). */
10750 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10751 {
10752 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10753 tree arg = fold_build2 (PLUS_EXPR, type,
10754 CALL_EXPR_ARG (arg0, 0),
10755 CALL_EXPR_ARG (arg1, 0));
10756 return build_call_expr (expfn, 1, arg);
10757 }
10758
10759 /* Optimizations of pow(...)*pow(...). */
10760 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10761 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10762 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10763 {
10764 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10765 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10766 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10767 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10768
10769 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10770 if (operand_equal_p (arg01, arg11, 0))
10771 {
10772 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10773 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
10774 return build_call_expr (powfn, 2, arg, arg01);
10775 }
10776
10777 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10778 if (operand_equal_p (arg00, arg10, 0))
10779 {
10780 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10781 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
10782 return build_call_expr (powfn, 2, arg00, arg);
10783 }
10784 }
10785
10786 /* Optimize tan(x)*cos(x) as sin(x). */
10787 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10788 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10789 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10790 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10791 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10792 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10793 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10794 CALL_EXPR_ARG (arg1, 0), 0))
10795 {
10796 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10797
10798 if (sinfn != NULL_TREE)
10799 return build_call_expr (sinfn, 1, CALL_EXPR_ARG (arg0, 0));
10800 }
10801
10802 /* Optimize x*pow(x,c) as pow(x,c+1). */
10803 if (fcode1 == BUILT_IN_POW
10804 || fcode1 == BUILT_IN_POWF
10805 || fcode1 == BUILT_IN_POWL)
10806 {
10807 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10808 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10809 if (TREE_CODE (arg11) == REAL_CST
10810 && !TREE_OVERFLOW (arg11)
10811 && operand_equal_p (arg0, arg10, 0))
10812 {
10813 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10814 REAL_VALUE_TYPE c;
10815 tree arg;
10816
10817 c = TREE_REAL_CST (arg11);
10818 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10819 arg = build_real (type, c);
10820 return build_call_expr (powfn, 2, arg0, arg);
10821 }
10822 }
10823
10824 /* Optimize pow(x,c)*x as pow(x,c+1). */
10825 if (fcode0 == BUILT_IN_POW
10826 || fcode0 == BUILT_IN_POWF
10827 || fcode0 == BUILT_IN_POWL)
10828 {
10829 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10830 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10831 if (TREE_CODE (arg01) == REAL_CST
10832 && !TREE_OVERFLOW (arg01)
10833 && operand_equal_p (arg1, arg00, 0))
10834 {
10835 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10836 REAL_VALUE_TYPE c;
10837 tree arg;
10838
10839 c = TREE_REAL_CST (arg01);
10840 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10841 arg = build_real (type, c);
10842 return build_call_expr (powfn, 2, arg1, arg);
10843 }
10844 }
10845
10846 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
10847 if (optimize_function_for_speed_p (cfun)
10848 && operand_equal_p (arg0, arg1, 0))
10849 {
10850 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10851
10852 if (powfn)
10853 {
10854 tree arg = build_real (type, dconst2);
10855 return build_call_expr (powfn, 2, arg0, arg);
10856 }
10857 }
10858 }
10859 }
10860 goto associate;
10861
10862 case BIT_IOR_EXPR:
10863 bit_ior:
10864 if (integer_all_onesp (arg1))
10865 return omit_one_operand (type, arg1, arg0);
10866 if (integer_zerop (arg1))
10867 return non_lvalue (fold_convert (type, arg0));
10868 if (operand_equal_p (arg0, arg1, 0))
10869 return non_lvalue (fold_convert (type, arg0));
10870
10871 /* ~X | X is -1. */
10872 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10873 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10874 {
10875 t1 = fold_convert (type, integer_zero_node);
10876 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10877 return omit_one_operand (type, t1, arg1);
10878 }
10879
10880 /* X | ~X is -1. */
10881 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10882 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10883 {
10884 t1 = fold_convert (type, integer_zero_node);
10885 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10886 return omit_one_operand (type, t1, arg0);
10887 }
10888
10889 /* Canonicalize (X & C1) | C2. */
10890 if (TREE_CODE (arg0) == BIT_AND_EXPR
10891 && TREE_CODE (arg1) == INTEGER_CST
10892 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10893 {
10894 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, hi3, lo3, mlo, mhi;
10895 int width = TYPE_PRECISION (type), w;
10896 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
10897 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10898 hi2 = TREE_INT_CST_HIGH (arg1);
10899 lo2 = TREE_INT_CST_LOW (arg1);
10900
10901 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10902 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
10903 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10904
10905 if (width > HOST_BITS_PER_WIDE_INT)
10906 {
10907 mhi = (unsigned HOST_WIDE_INT) -1
10908 >> (2 * HOST_BITS_PER_WIDE_INT - width);
10909 mlo = -1;
10910 }
10911 else
10912 {
10913 mhi = 0;
10914 mlo = (unsigned HOST_WIDE_INT) -1
10915 >> (HOST_BITS_PER_WIDE_INT - width);
10916 }
10917
10918 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10919 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
10920 return fold_build2 (BIT_IOR_EXPR, type,
10921 TREE_OPERAND (arg0, 0), arg1);
10922
10923 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10924 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10925 mode which allows further optimizations. */
10926 hi1 &= mhi;
10927 lo1 &= mlo;
10928 hi2 &= mhi;
10929 lo2 &= mlo;
10930 hi3 = hi1 & ~hi2;
10931 lo3 = lo1 & ~lo2;
10932 for (w = BITS_PER_UNIT;
10933 w <= width && w <= HOST_BITS_PER_WIDE_INT;
10934 w <<= 1)
10935 {
10936 unsigned HOST_WIDE_INT mask
10937 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
10938 if (((lo1 | lo2) & mask) == mask
10939 && (lo1 & ~mask) == 0 && hi1 == 0)
10940 {
10941 hi3 = 0;
10942 lo3 = mask;
10943 break;
10944 }
10945 }
10946 if (hi3 != hi1 || lo3 != lo1)
10947 return fold_build2 (BIT_IOR_EXPR, type,
10948 fold_build2 (BIT_AND_EXPR, type,
10949 TREE_OPERAND (arg0, 0),
10950 build_int_cst_wide (type,
10951 lo3, hi3)),
10952 arg1);
10953 }
10954
10955 /* (X & Y) | Y is (X, Y). */
10956 if (TREE_CODE (arg0) == BIT_AND_EXPR
10957 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10958 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10959 /* (X & Y) | X is (Y, X). */
10960 if (TREE_CODE (arg0) == BIT_AND_EXPR
10961 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10962 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10963 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
10964 /* X | (X & Y) is (Y, X). */
10965 if (TREE_CODE (arg1) == BIT_AND_EXPR
10966 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10967 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10968 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
10969 /* X | (Y & X) is (Y, X). */
10970 if (TREE_CODE (arg1) == BIT_AND_EXPR
10971 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10972 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10973 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
10974
10975 t1 = distribute_bit_expr (code, type, arg0, arg1);
10976 if (t1 != NULL_TREE)
10977 return t1;
10978
10979 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
10980
10981 This results in more efficient code for machines without a NAND
10982 instruction. Combine will canonicalize to the first form
10983 which will allow use of NAND instructions provided by the
10984 backend if they exist. */
10985 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10986 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10987 {
10988 return fold_build1 (BIT_NOT_EXPR, type,
10989 build2 (BIT_AND_EXPR, type,
10990 fold_convert (type,
10991 TREE_OPERAND (arg0, 0)),
10992 fold_convert (type,
10993 TREE_OPERAND (arg1, 0))));
10994 }
10995
10996 /* See if this can be simplified into a rotate first. If that
10997 is unsuccessful continue in the association code. */
10998 goto bit_rotate;
10999
11000 case BIT_XOR_EXPR:
11001 if (integer_zerop (arg1))
11002 return non_lvalue (fold_convert (type, arg0));
11003 if (integer_all_onesp (arg1))
11004 return fold_build1 (BIT_NOT_EXPR, type, op0);
11005 if (operand_equal_p (arg0, arg1, 0))
11006 return omit_one_operand (type, integer_zero_node, arg0);
11007
11008 /* ~X ^ X is -1. */
11009 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11010 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11011 {
11012 t1 = fold_convert (type, integer_zero_node);
11013 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
11014 return omit_one_operand (type, t1, arg1);
11015 }
11016
11017 /* X ^ ~X is -1. */
11018 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11019 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11020 {
11021 t1 = fold_convert (type, integer_zero_node);
11022 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
11023 return omit_one_operand (type, t1, arg0);
11024 }
11025
11026 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11027 with a constant, and the two constants have no bits in common,
11028 we should treat this as a BIT_IOR_EXPR since this may produce more
11029 simplifications. */
11030 if (TREE_CODE (arg0) == BIT_AND_EXPR
11031 && TREE_CODE (arg1) == BIT_AND_EXPR
11032 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11033 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11034 && integer_zerop (const_binop (BIT_AND_EXPR,
11035 TREE_OPERAND (arg0, 1),
11036 TREE_OPERAND (arg1, 1), 0)))
11037 {
11038 code = BIT_IOR_EXPR;
11039 goto bit_ior;
11040 }
11041
11042 /* (X | Y) ^ X -> Y & ~ X*/
11043 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11044 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11045 {
11046 tree t2 = TREE_OPERAND (arg0, 1);
11047 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
11048 arg1);
11049 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
11050 fold_convert (type, t1));
11051 return t1;
11052 }
11053
11054 /* (Y | X) ^ X -> Y & ~ X*/
11055 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11056 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11057 {
11058 tree t2 = TREE_OPERAND (arg0, 0);
11059 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
11060 arg1);
11061 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
11062 fold_convert (type, t1));
11063 return t1;
11064 }
11065
11066 /* X ^ (X | Y) -> Y & ~ X*/
11067 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11068 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11069 {
11070 tree t2 = TREE_OPERAND (arg1, 1);
11071 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
11072 arg0);
11073 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
11074 fold_convert (type, t1));
11075 return t1;
11076 }
11077
11078 /* X ^ (Y | X) -> Y & ~ X*/
11079 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11080 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11081 {
11082 tree t2 = TREE_OPERAND (arg1, 0);
11083 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
11084 arg0);
11085 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
11086 fold_convert (type, t1));
11087 return t1;
11088 }
11089
11090 /* Convert ~X ^ ~Y to X ^ Y. */
11091 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11092 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11093 return fold_build2 (code, type,
11094 fold_convert (type, TREE_OPERAND (arg0, 0)),
11095 fold_convert (type, TREE_OPERAND (arg1, 0)));
11096
11097 /* Convert ~X ^ C to X ^ ~C. */
11098 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11099 && TREE_CODE (arg1) == INTEGER_CST)
11100 return fold_build2 (code, type,
11101 fold_convert (type, TREE_OPERAND (arg0, 0)),
11102 fold_build1 (BIT_NOT_EXPR, type, arg1));
11103
11104 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11105 if (TREE_CODE (arg0) == BIT_AND_EXPR
11106 && integer_onep (TREE_OPERAND (arg0, 1))
11107 && integer_onep (arg1))
11108 return fold_build2 (EQ_EXPR, type, arg0,
11109 build_int_cst (TREE_TYPE (arg0), 0));
11110
11111 /* Fold (X & Y) ^ Y as ~X & Y. */
11112 if (TREE_CODE (arg0) == BIT_AND_EXPR
11113 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11114 {
11115 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
11116 return fold_build2 (BIT_AND_EXPR, type,
11117 fold_build1 (BIT_NOT_EXPR, type, tem),
11118 fold_convert (type, arg1));
11119 }
11120 /* Fold (X & Y) ^ X as ~Y & X. */
11121 if (TREE_CODE (arg0) == BIT_AND_EXPR
11122 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11123 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11124 {
11125 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
11126 return fold_build2 (BIT_AND_EXPR, type,
11127 fold_build1 (BIT_NOT_EXPR, type, tem),
11128 fold_convert (type, arg1));
11129 }
11130 /* Fold X ^ (X & Y) as X & ~Y. */
11131 if (TREE_CODE (arg1) == BIT_AND_EXPR
11132 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11133 {
11134 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
11135 return fold_build2 (BIT_AND_EXPR, type,
11136 fold_convert (type, arg0),
11137 fold_build1 (BIT_NOT_EXPR, type, tem));
11138 }
11139 /* Fold X ^ (Y & X) as ~Y & X. */
11140 if (TREE_CODE (arg1) == BIT_AND_EXPR
11141 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11142 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11143 {
11144 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
11145 return fold_build2 (BIT_AND_EXPR, type,
11146 fold_build1 (BIT_NOT_EXPR, type, tem),
11147 fold_convert (type, arg0));
11148 }
11149
11150 /* See if this can be simplified into a rotate first. If that
11151 is unsuccessful continue in the association code. */
11152 goto bit_rotate;
11153
11154 case BIT_AND_EXPR:
11155 if (integer_all_onesp (arg1))
11156 return non_lvalue (fold_convert (type, arg0));
11157 if (integer_zerop (arg1))
11158 return omit_one_operand (type, arg1, arg0);
11159 if (operand_equal_p (arg0, arg1, 0))
11160 return non_lvalue (fold_convert (type, arg0));
11161
11162 /* ~X & X is always zero. */
11163 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11164 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11165 return omit_one_operand (type, integer_zero_node, arg1);
11166
11167 /* X & ~X is always zero. */
11168 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11169 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11170 return omit_one_operand (type, integer_zero_node, arg0);
11171
11172 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11173 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11174 && TREE_CODE (arg1) == INTEGER_CST
11175 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11176 {
11177 tree tmp1 = fold_convert (type, arg1);
11178 tree tmp2 = fold_convert (type, TREE_OPERAND (arg0, 0));
11179 tree tmp3 = fold_convert (type, TREE_OPERAND (arg0, 1));
11180 tmp2 = fold_build2 (BIT_AND_EXPR, type, tmp2, tmp1);
11181 tmp3 = fold_build2 (BIT_AND_EXPR, type, tmp3, tmp1);
11182 return fold_convert (type,
11183 fold_build2 (BIT_IOR_EXPR, type, tmp2, tmp3));
11184 }
11185
11186 /* (X | Y) & Y is (X, Y). */
11187 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11188 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11189 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
11190 /* (X | Y) & X is (Y, X). */
11191 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11192 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11193 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11194 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
11195 /* X & (X | Y) is (Y, X). */
11196 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11197 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11198 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11199 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
11200 /* X & (Y | X) is (Y, X). */
11201 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11202 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11203 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11204 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
11205
11206 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11207 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11208 && integer_onep (TREE_OPERAND (arg0, 1))
11209 && integer_onep (arg1))
11210 {
11211 tem = TREE_OPERAND (arg0, 0);
11212 return fold_build2 (EQ_EXPR, type,
11213 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
11214 build_int_cst (TREE_TYPE (tem), 1)),
11215 build_int_cst (TREE_TYPE (tem), 0));
11216 }
11217 /* Fold ~X & 1 as (X & 1) == 0. */
11218 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11219 && integer_onep (arg1))
11220 {
11221 tem = TREE_OPERAND (arg0, 0);
11222 return fold_build2 (EQ_EXPR, type,
11223 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
11224 build_int_cst (TREE_TYPE (tem), 1)),
11225 build_int_cst (TREE_TYPE (tem), 0));
11226 }
11227
11228 /* Fold (X ^ Y) & Y as ~X & Y. */
11229 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11230 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11231 {
11232 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
11233 return fold_build2 (BIT_AND_EXPR, type,
11234 fold_build1 (BIT_NOT_EXPR, type, tem),
11235 fold_convert (type, arg1));
11236 }
11237 /* Fold (X ^ Y) & X as ~Y & X. */
11238 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11239 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11240 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11241 {
11242 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
11243 return fold_build2 (BIT_AND_EXPR, type,
11244 fold_build1 (BIT_NOT_EXPR, type, tem),
11245 fold_convert (type, arg1));
11246 }
11247 /* Fold X & (X ^ Y) as X & ~Y. */
11248 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11249 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11250 {
11251 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
11252 return fold_build2 (BIT_AND_EXPR, type,
11253 fold_convert (type, arg0),
11254 fold_build1 (BIT_NOT_EXPR, type, tem));
11255 }
11256 /* Fold X & (Y ^ X) as ~Y & X. */
11257 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11258 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11259 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11260 {
11261 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
11262 return fold_build2 (BIT_AND_EXPR, type,
11263 fold_build1 (BIT_NOT_EXPR, type, tem),
11264 fold_convert (type, arg0));
11265 }
11266
11267 t1 = distribute_bit_expr (code, type, arg0, arg1);
11268 if (t1 != NULL_TREE)
11269 return t1;
11270 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11271 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11272 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11273 {
11274 unsigned int prec
11275 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11276
11277 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
11278 && (~TREE_INT_CST_LOW (arg1)
11279 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
11280 return fold_convert (type, TREE_OPERAND (arg0, 0));
11281 }
11282
11283 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11284
11285 This results in more efficient code for machines without a NOR
11286 instruction. Combine will canonicalize to the first form
11287 which will allow use of NOR instructions provided by the
11288 backend if they exist. */
11289 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11290 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11291 {
11292 return fold_build1 (BIT_NOT_EXPR, type,
11293 build2 (BIT_IOR_EXPR, type,
11294 fold_convert (type,
11295 TREE_OPERAND (arg0, 0)),
11296 fold_convert (type,
11297 TREE_OPERAND (arg1, 0))));
11298 }
11299
11300 /* If arg0 is derived from the address of an object or function, we may
11301 be able to fold this expression using the object or function's
11302 alignment. */
11303 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
11304 {
11305 unsigned HOST_WIDE_INT modulus, residue;
11306 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
11307
11308 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11309 integer_onep (arg1));
11310
11311 /* This works because modulus is a power of 2. If this weren't the
11312 case, we'd have to replace it by its greatest power-of-2
11313 divisor: modulus & -modulus. */
11314 if (low < modulus)
11315 return build_int_cst (type, residue & low);
11316 }
11317
11318 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11319 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11320 if the new mask might be further optimized. */
11321 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11322 || TREE_CODE (arg0) == RSHIFT_EXPR)
11323 && host_integerp (TREE_OPERAND (arg0, 1), 1)
11324 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
11325 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
11326 < TYPE_PRECISION (TREE_TYPE (arg0))
11327 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11328 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
11329 {
11330 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
11331 unsigned HOST_WIDE_INT mask
11332 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
11333 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11334 tree shift_type = TREE_TYPE (arg0);
11335
11336 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11337 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11338 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11339 && TYPE_PRECISION (TREE_TYPE (arg0))
11340 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
11341 {
11342 unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
11343 tree arg00 = TREE_OPERAND (arg0, 0);
11344 /* See if more bits can be proven as zero because of
11345 zero extension. */
11346 if (TREE_CODE (arg00) == NOP_EXPR
11347 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11348 {
11349 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11350 if (TYPE_PRECISION (inner_type)
11351 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
11352 && TYPE_PRECISION (inner_type) < prec)
11353 {
11354 prec = TYPE_PRECISION (inner_type);
11355 /* See if we can shorten the right shift. */
11356 if (shiftc < prec)
11357 shift_type = inner_type;
11358 }
11359 }
11360 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11361 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11362 zerobits <<= prec - shiftc;
11363 /* For arithmetic shift if sign bit could be set, zerobits
11364 can contain actually sign bits, so no transformation is
11365 possible, unless MASK masks them all away. In that
11366 case the shift needs to be converted into logical shift. */
11367 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11368 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11369 {
11370 if ((mask & zerobits) == 0)
11371 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11372 else
11373 zerobits = 0;
11374 }
11375 }
11376
11377 /* ((X << 16) & 0xff00) is (X, 0). */
11378 if ((mask & zerobits) == mask)
11379 return omit_one_operand (type, build_int_cst (type, 0), arg0);
11380
11381 newmask = mask | zerobits;
11382 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11383 {
11384 unsigned int prec;
11385
11386 /* Only do the transformation if NEWMASK is some integer
11387 mode's mask. */
11388 for (prec = BITS_PER_UNIT;
11389 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11390 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11391 break;
11392 if (prec < HOST_BITS_PER_WIDE_INT
11393 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11394 {
11395 tree newmaskt;
11396
11397 if (shift_type != TREE_TYPE (arg0))
11398 {
11399 tem = fold_build2 (TREE_CODE (arg0), shift_type,
11400 fold_convert (shift_type,
11401 TREE_OPERAND (arg0, 0)),
11402 TREE_OPERAND (arg0, 1));
11403 tem = fold_convert (type, tem);
11404 }
11405 else
11406 tem = op0;
11407 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11408 if (!tree_int_cst_equal (newmaskt, arg1))
11409 return fold_build2 (BIT_AND_EXPR, type, tem, newmaskt);
11410 }
11411 }
11412 }
11413
11414 goto associate;
11415
11416 case RDIV_EXPR:
11417 /* Don't touch a floating-point divide by zero unless the mode
11418 of the constant can represent infinity. */
11419 if (TREE_CODE (arg1) == REAL_CST
11420 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11421 && real_zerop (arg1))
11422 return NULL_TREE;
11423
11424 /* Optimize A / A to 1.0 if we don't care about
11425 NaNs or Infinities. Skip the transformation
11426 for non-real operands. */
11427 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11428 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11429 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11430 && operand_equal_p (arg0, arg1, 0))
11431 {
11432 tree r = build_real (TREE_TYPE (arg0), dconst1);
11433
11434 return omit_two_operands (type, r, arg0, arg1);
11435 }
11436
11437 /* The complex version of the above A / A optimization. */
11438 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11439 && operand_equal_p (arg0, arg1, 0))
11440 {
11441 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11442 if (! HONOR_NANS (TYPE_MODE (elem_type))
11443 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11444 {
11445 tree r = build_real (elem_type, dconst1);
11446 /* omit_two_operands will call fold_convert for us. */
11447 return omit_two_operands (type, r, arg0, arg1);
11448 }
11449 }
11450
11451 /* (-A) / (-B) -> A / B */
11452 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11453 return fold_build2 (RDIV_EXPR, type,
11454 TREE_OPERAND (arg0, 0),
11455 negate_expr (arg1));
11456 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11457 return fold_build2 (RDIV_EXPR, type,
11458 negate_expr (arg0),
11459 TREE_OPERAND (arg1, 0));
11460
11461 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11462 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11463 && real_onep (arg1))
11464 return non_lvalue (fold_convert (type, arg0));
11465
11466 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11467 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11468 && real_minus_onep (arg1))
11469 return non_lvalue (fold_convert (type, negate_expr (arg0)));
11470
11471 /* If ARG1 is a constant, we can convert this to a multiply by the
11472 reciprocal. This does not have the same rounding properties,
11473 so only do this if -freciprocal-math. We can actually
11474 always safely do it if ARG1 is a power of two, but it's hard to
11475 tell if it is or not in a portable manner. */
11476 if (TREE_CODE (arg1) == REAL_CST)
11477 {
11478 if (flag_reciprocal_math
11479 && 0 != (tem = const_binop (code, build_real (type, dconst1),
11480 arg1, 0)))
11481 return fold_build2 (MULT_EXPR, type, arg0, tem);
11482 /* Find the reciprocal if optimizing and the result is exact. */
11483 if (optimize)
11484 {
11485 REAL_VALUE_TYPE r;
11486 r = TREE_REAL_CST (arg1);
11487 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
11488 {
11489 tem = build_real (type, r);
11490 return fold_build2 (MULT_EXPR, type,
11491 fold_convert (type, arg0), tem);
11492 }
11493 }
11494 }
11495 /* Convert A/B/C to A/(B*C). */
11496 if (flag_reciprocal_math
11497 && TREE_CODE (arg0) == RDIV_EXPR)
11498 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11499 fold_build2 (MULT_EXPR, type,
11500 TREE_OPERAND (arg0, 1), arg1));
11501
11502 /* Convert A/(B/C) to (A/B)*C. */
11503 if (flag_reciprocal_math
11504 && TREE_CODE (arg1) == RDIV_EXPR)
11505 return fold_build2 (MULT_EXPR, type,
11506 fold_build2 (RDIV_EXPR, type, arg0,
11507 TREE_OPERAND (arg1, 0)),
11508 TREE_OPERAND (arg1, 1));
11509
11510 /* Convert C1/(X*C2) into (C1/C2)/X. */
11511 if (flag_reciprocal_math
11512 && TREE_CODE (arg1) == MULT_EXPR
11513 && TREE_CODE (arg0) == REAL_CST
11514 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11515 {
11516 tree tem = const_binop (RDIV_EXPR, arg0,
11517 TREE_OPERAND (arg1, 1), 0);
11518 if (tem)
11519 return fold_build2 (RDIV_EXPR, type, tem,
11520 TREE_OPERAND (arg1, 0));
11521 }
11522
11523 if (flag_unsafe_math_optimizations)
11524 {
11525 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11526 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11527
11528 /* Optimize sin(x)/cos(x) as tan(x). */
11529 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11530 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11531 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11532 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11533 CALL_EXPR_ARG (arg1, 0), 0))
11534 {
11535 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11536
11537 if (tanfn != NULL_TREE)
11538 return build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11539 }
11540
11541 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11542 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11543 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11544 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11545 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11546 CALL_EXPR_ARG (arg1, 0), 0))
11547 {
11548 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11549
11550 if (tanfn != NULL_TREE)
11551 {
11552 tree tmp = build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11553 return fold_build2 (RDIV_EXPR, type,
11554 build_real (type, dconst1), tmp);
11555 }
11556 }
11557
11558 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11559 NaNs or Infinities. */
11560 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11561 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11562 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11563 {
11564 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11565 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11566
11567 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11568 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11569 && operand_equal_p (arg00, arg01, 0))
11570 {
11571 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11572
11573 if (cosfn != NULL_TREE)
11574 return build_call_expr (cosfn, 1, arg00);
11575 }
11576 }
11577
11578 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11579 NaNs or Infinities. */
11580 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11581 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11582 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11583 {
11584 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11585 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11586
11587 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11588 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11589 && operand_equal_p (arg00, arg01, 0))
11590 {
11591 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11592
11593 if (cosfn != NULL_TREE)
11594 {
11595 tree tmp = build_call_expr (cosfn, 1, arg00);
11596 return fold_build2 (RDIV_EXPR, type,
11597 build_real (type, dconst1),
11598 tmp);
11599 }
11600 }
11601 }
11602
11603 /* Optimize pow(x,c)/x as pow(x,c-1). */
11604 if (fcode0 == BUILT_IN_POW
11605 || fcode0 == BUILT_IN_POWF
11606 || fcode0 == BUILT_IN_POWL)
11607 {
11608 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11609 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11610 if (TREE_CODE (arg01) == REAL_CST
11611 && !TREE_OVERFLOW (arg01)
11612 && operand_equal_p (arg1, arg00, 0))
11613 {
11614 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11615 REAL_VALUE_TYPE c;
11616 tree arg;
11617
11618 c = TREE_REAL_CST (arg01);
11619 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11620 arg = build_real (type, c);
11621 return build_call_expr (powfn, 2, arg1, arg);
11622 }
11623 }
11624
11625 /* Optimize a/root(b/c) into a*root(c/b). */
11626 if (BUILTIN_ROOT_P (fcode1))
11627 {
11628 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11629
11630 if (TREE_CODE (rootarg) == RDIV_EXPR)
11631 {
11632 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11633 tree b = TREE_OPERAND (rootarg, 0);
11634 tree c = TREE_OPERAND (rootarg, 1);
11635
11636 tree tmp = fold_build2 (RDIV_EXPR, type, c, b);
11637
11638 tmp = build_call_expr (rootfn, 1, tmp);
11639 return fold_build2 (MULT_EXPR, type, arg0, tmp);
11640 }
11641 }
11642
11643 /* Optimize x/expN(y) into x*expN(-y). */
11644 if (BUILTIN_EXPONENT_P (fcode1))
11645 {
11646 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11647 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11648 arg1 = build_call_expr (expfn, 1, fold_convert (type, arg));
11649 return fold_build2 (MULT_EXPR, type, arg0, arg1);
11650 }
11651
11652 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11653 if (fcode1 == BUILT_IN_POW
11654 || fcode1 == BUILT_IN_POWF
11655 || fcode1 == BUILT_IN_POWL)
11656 {
11657 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11658 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11659 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11660 tree neg11 = fold_convert (type, negate_expr (arg11));
11661 arg1 = build_call_expr (powfn, 2, arg10, neg11);
11662 return fold_build2 (MULT_EXPR, type, arg0, arg1);
11663 }
11664 }
11665 return NULL_TREE;
11666
11667 case TRUNC_DIV_EXPR:
11668 case FLOOR_DIV_EXPR:
11669 /* Simplify A / (B << N) where A and B are positive and B is
11670 a power of 2, to A >> (N + log2(B)). */
11671 strict_overflow_p = false;
11672 if (TREE_CODE (arg1) == LSHIFT_EXPR
11673 && (TYPE_UNSIGNED (type)
11674 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11675 {
11676 tree sval = TREE_OPERAND (arg1, 0);
11677 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11678 {
11679 tree sh_cnt = TREE_OPERAND (arg1, 1);
11680 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
11681
11682 if (strict_overflow_p)
11683 fold_overflow_warning (("assuming signed overflow does not "
11684 "occur when simplifying A / (B << N)"),
11685 WARN_STRICT_OVERFLOW_MISC);
11686
11687 sh_cnt = fold_build2 (PLUS_EXPR, TREE_TYPE (sh_cnt),
11688 sh_cnt, build_int_cst (NULL_TREE, pow2));
11689 return fold_build2 (RSHIFT_EXPR, type,
11690 fold_convert (type, arg0), sh_cnt);
11691 }
11692 }
11693
11694 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
11695 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
11696 if (INTEGRAL_TYPE_P (type)
11697 && TYPE_UNSIGNED (type)
11698 && code == FLOOR_DIV_EXPR)
11699 return fold_build2 (TRUNC_DIV_EXPR, type, op0, op1);
11700
11701 /* Fall thru */
11702
11703 case ROUND_DIV_EXPR:
11704 case CEIL_DIV_EXPR:
11705 case EXACT_DIV_EXPR:
11706 if (integer_onep (arg1))
11707 return non_lvalue (fold_convert (type, arg0));
11708 if (integer_zerop (arg1))
11709 return NULL_TREE;
11710 /* X / -1 is -X. */
11711 if (!TYPE_UNSIGNED (type)
11712 && TREE_CODE (arg1) == INTEGER_CST
11713 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11714 && TREE_INT_CST_HIGH (arg1) == -1)
11715 return fold_convert (type, negate_expr (arg0));
11716
11717 /* Convert -A / -B to A / B when the type is signed and overflow is
11718 undefined. */
11719 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11720 && TREE_CODE (arg0) == NEGATE_EXPR
11721 && negate_expr_p (arg1))
11722 {
11723 if (INTEGRAL_TYPE_P (type))
11724 fold_overflow_warning (("assuming signed overflow does not occur "
11725 "when distributing negation across "
11726 "division"),
11727 WARN_STRICT_OVERFLOW_MISC);
11728 return fold_build2 (code, type,
11729 fold_convert (type, TREE_OPERAND (arg0, 0)),
11730 fold_convert (type, negate_expr (arg1)));
11731 }
11732 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11733 && TREE_CODE (arg1) == NEGATE_EXPR
11734 && negate_expr_p (arg0))
11735 {
11736 if (INTEGRAL_TYPE_P (type))
11737 fold_overflow_warning (("assuming signed overflow does not occur "
11738 "when distributing negation across "
11739 "division"),
11740 WARN_STRICT_OVERFLOW_MISC);
11741 return fold_build2 (code, type,
11742 fold_convert (type, negate_expr (arg0)),
11743 fold_convert (type, TREE_OPERAND (arg1, 0)));
11744 }
11745
11746 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11747 operation, EXACT_DIV_EXPR.
11748
11749 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11750 At one time others generated faster code, it's not clear if they do
11751 after the last round to changes to the DIV code in expmed.c. */
11752 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11753 && multiple_of_p (type, arg0, arg1))
11754 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
11755
11756 strict_overflow_p = false;
11757 if (TREE_CODE (arg1) == INTEGER_CST
11758 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11759 &strict_overflow_p)))
11760 {
11761 if (strict_overflow_p)
11762 fold_overflow_warning (("assuming signed overflow does not occur "
11763 "when simplifying division"),
11764 WARN_STRICT_OVERFLOW_MISC);
11765 return fold_convert (type, tem);
11766 }
11767
11768 return NULL_TREE;
11769
11770 case CEIL_MOD_EXPR:
11771 case FLOOR_MOD_EXPR:
11772 case ROUND_MOD_EXPR:
11773 case TRUNC_MOD_EXPR:
11774 /* X % 1 is always zero, but be sure to preserve any side
11775 effects in X. */
11776 if (integer_onep (arg1))
11777 return omit_one_operand (type, integer_zero_node, arg0);
11778
11779 /* X % 0, return X % 0 unchanged so that we can get the
11780 proper warnings and errors. */
11781 if (integer_zerop (arg1))
11782 return NULL_TREE;
11783
11784 /* 0 % X is always zero, but be sure to preserve any side
11785 effects in X. Place this after checking for X == 0. */
11786 if (integer_zerop (arg0))
11787 return omit_one_operand (type, integer_zero_node, arg1);
11788
11789 /* X % -1 is zero. */
11790 if (!TYPE_UNSIGNED (type)
11791 && TREE_CODE (arg1) == INTEGER_CST
11792 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11793 && TREE_INT_CST_HIGH (arg1) == -1)
11794 return omit_one_operand (type, integer_zero_node, arg0);
11795
11796 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
11797 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
11798 strict_overflow_p = false;
11799 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
11800 && (TYPE_UNSIGNED (type)
11801 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11802 {
11803 tree c = arg1;
11804 /* Also optimize A % (C << N) where C is a power of 2,
11805 to A & ((C << N) - 1). */
11806 if (TREE_CODE (arg1) == LSHIFT_EXPR)
11807 c = TREE_OPERAND (arg1, 0);
11808
11809 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
11810 {
11811 tree mask = fold_build2 (MINUS_EXPR, TREE_TYPE (arg1), arg1,
11812 build_int_cst (TREE_TYPE (arg1), 1));
11813 if (strict_overflow_p)
11814 fold_overflow_warning (("assuming signed overflow does not "
11815 "occur when simplifying "
11816 "X % (power of two)"),
11817 WARN_STRICT_OVERFLOW_MISC);
11818 return fold_build2 (BIT_AND_EXPR, type,
11819 fold_convert (type, arg0),
11820 fold_convert (type, mask));
11821 }
11822 }
11823
11824 /* X % -C is the same as X % C. */
11825 if (code == TRUNC_MOD_EXPR
11826 && !TYPE_UNSIGNED (type)
11827 && TREE_CODE (arg1) == INTEGER_CST
11828 && !TREE_OVERFLOW (arg1)
11829 && TREE_INT_CST_HIGH (arg1) < 0
11830 && !TYPE_OVERFLOW_TRAPS (type)
11831 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
11832 && !sign_bit_p (arg1, arg1))
11833 return fold_build2 (code, type, fold_convert (type, arg0),
11834 fold_convert (type, negate_expr (arg1)));
11835
11836 /* X % -Y is the same as X % Y. */
11837 if (code == TRUNC_MOD_EXPR
11838 && !TYPE_UNSIGNED (type)
11839 && TREE_CODE (arg1) == NEGATE_EXPR
11840 && !TYPE_OVERFLOW_TRAPS (type))
11841 return fold_build2 (code, type, fold_convert (type, arg0),
11842 fold_convert (type, TREE_OPERAND (arg1, 0)));
11843
11844 if (TREE_CODE (arg1) == INTEGER_CST
11845 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11846 &strict_overflow_p)))
11847 {
11848 if (strict_overflow_p)
11849 fold_overflow_warning (("assuming signed overflow does not occur "
11850 "when simplifying modulus"),
11851 WARN_STRICT_OVERFLOW_MISC);
11852 return fold_convert (type, tem);
11853 }
11854
11855 return NULL_TREE;
11856
11857 case LROTATE_EXPR:
11858 case RROTATE_EXPR:
11859 if (integer_all_onesp (arg0))
11860 return omit_one_operand (type, arg0, arg1);
11861 goto shift;
11862
11863 case RSHIFT_EXPR:
11864 /* Optimize -1 >> x for arithmetic right shifts. */
11865 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
11866 && tree_expr_nonnegative_p (arg1))
11867 return omit_one_operand (type, arg0, arg1);
11868 /* ... fall through ... */
11869
11870 case LSHIFT_EXPR:
11871 shift:
11872 if (integer_zerop (arg1))
11873 return non_lvalue (fold_convert (type, arg0));
11874 if (integer_zerop (arg0))
11875 return omit_one_operand (type, arg0, arg1);
11876
11877 /* Since negative shift count is not well-defined,
11878 don't try to compute it in the compiler. */
11879 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11880 return NULL_TREE;
11881
11882 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
11883 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
11884 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11885 && host_integerp (TREE_OPERAND (arg0, 1), false)
11886 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11887 {
11888 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
11889 + TREE_INT_CST_LOW (arg1));
11890
11891 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11892 being well defined. */
11893 if (low >= TYPE_PRECISION (type))
11894 {
11895 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
11896 low = low % TYPE_PRECISION (type);
11897 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
11898 return omit_one_operand (type, build_int_cst (type, 0),
11899 TREE_OPERAND (arg0, 0));
11900 else
11901 low = TYPE_PRECISION (type) - 1;
11902 }
11903
11904 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11905 build_int_cst (type, low));
11906 }
11907
11908 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
11909 into x & ((unsigned)-1 >> c) for unsigned types. */
11910 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
11911 || (TYPE_UNSIGNED (type)
11912 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
11913 && host_integerp (arg1, false)
11914 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11915 && host_integerp (TREE_OPERAND (arg0, 1), false)
11916 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11917 {
11918 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
11919 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
11920 tree lshift;
11921 tree arg00;
11922
11923 if (low0 == low1)
11924 {
11925 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
11926
11927 lshift = build_int_cst (type, -1);
11928 lshift = int_const_binop (code, lshift, arg1, 0);
11929
11930 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
11931 }
11932 }
11933
11934 /* Rewrite an LROTATE_EXPR by a constant into an
11935 RROTATE_EXPR by a new constant. */
11936 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
11937 {
11938 tree tem = build_int_cst (TREE_TYPE (arg1),
11939 TYPE_PRECISION (type));
11940 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
11941 return fold_build2 (RROTATE_EXPR, type, op0, tem);
11942 }
11943
11944 /* If we have a rotate of a bit operation with the rotate count and
11945 the second operand of the bit operation both constant,
11946 permute the two operations. */
11947 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11948 && (TREE_CODE (arg0) == BIT_AND_EXPR
11949 || TREE_CODE (arg0) == BIT_IOR_EXPR
11950 || TREE_CODE (arg0) == BIT_XOR_EXPR)
11951 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11952 return fold_build2 (TREE_CODE (arg0), type,
11953 fold_build2 (code, type,
11954 TREE_OPERAND (arg0, 0), arg1),
11955 fold_build2 (code, type,
11956 TREE_OPERAND (arg0, 1), arg1));
11957
11958 /* Two consecutive rotates adding up to the precision of the
11959 type can be ignored. */
11960 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11961 && TREE_CODE (arg0) == RROTATE_EXPR
11962 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11963 && TREE_INT_CST_HIGH (arg1) == 0
11964 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
11965 && ((TREE_INT_CST_LOW (arg1)
11966 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
11967 == (unsigned int) TYPE_PRECISION (type)))
11968 return TREE_OPERAND (arg0, 0);
11969
11970 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
11971 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
11972 if the latter can be further optimized. */
11973 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
11974 && TREE_CODE (arg0) == BIT_AND_EXPR
11975 && TREE_CODE (arg1) == INTEGER_CST
11976 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11977 {
11978 tree mask = fold_build2 (code, type,
11979 fold_convert (type, TREE_OPERAND (arg0, 1)),
11980 arg1);
11981 tree shift = fold_build2 (code, type,
11982 fold_convert (type, TREE_OPERAND (arg0, 0)),
11983 arg1);
11984 tem = fold_binary (BIT_AND_EXPR, type, shift, mask);
11985 if (tem)
11986 return tem;
11987 }
11988
11989 return NULL_TREE;
11990
11991 case MIN_EXPR:
11992 if (operand_equal_p (arg0, arg1, 0))
11993 return omit_one_operand (type, arg0, arg1);
11994 if (INTEGRAL_TYPE_P (type)
11995 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
11996 return omit_one_operand (type, arg1, arg0);
11997 tem = fold_minmax (MIN_EXPR, type, arg0, arg1);
11998 if (tem)
11999 return tem;
12000 goto associate;
12001
12002 case MAX_EXPR:
12003 if (operand_equal_p (arg0, arg1, 0))
12004 return omit_one_operand (type, arg0, arg1);
12005 if (INTEGRAL_TYPE_P (type)
12006 && TYPE_MAX_VALUE (type)
12007 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
12008 return omit_one_operand (type, arg1, arg0);
12009 tem = fold_minmax (MAX_EXPR, type, arg0, arg1);
12010 if (tem)
12011 return tem;
12012 goto associate;
12013
12014 case TRUTH_ANDIF_EXPR:
12015 /* Note that the operands of this must be ints
12016 and their values must be 0 or 1.
12017 ("true" is a fixed value perhaps depending on the language.) */
12018 /* If first arg is constant zero, return it. */
12019 if (integer_zerop (arg0))
12020 return fold_convert (type, arg0);
12021 case TRUTH_AND_EXPR:
12022 /* If either arg is constant true, drop it. */
12023 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12024 return non_lvalue (fold_convert (type, arg1));
12025 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12026 /* Preserve sequence points. */
12027 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12028 return non_lvalue (fold_convert (type, arg0));
12029 /* If second arg is constant zero, result is zero, but first arg
12030 must be evaluated. */
12031 if (integer_zerop (arg1))
12032 return omit_one_operand (type, arg1, arg0);
12033 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12034 case will be handled here. */
12035 if (integer_zerop (arg0))
12036 return omit_one_operand (type, arg0, arg1);
12037
12038 /* !X && X is always false. */
12039 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12040 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12041 return omit_one_operand (type, integer_zero_node, arg1);
12042 /* X && !X is always false. */
12043 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12044 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12045 return omit_one_operand (type, integer_zero_node, arg0);
12046
12047 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12048 means A >= Y && A != MAX, but in this case we know that
12049 A < X <= MAX. */
12050
12051 if (!TREE_SIDE_EFFECTS (arg0)
12052 && !TREE_SIDE_EFFECTS (arg1))
12053 {
12054 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
12055 if (tem && !operand_equal_p (tem, arg0, 0))
12056 return fold_build2 (code, type, tem, arg1);
12057
12058 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
12059 if (tem && !operand_equal_p (tem, arg1, 0))
12060 return fold_build2 (code, type, arg0, tem);
12061 }
12062
12063 truth_andor:
12064 /* We only do these simplifications if we are optimizing. */
12065 if (!optimize)
12066 return NULL_TREE;
12067
12068 /* Check for things like (A || B) && (A || C). We can convert this
12069 to A || (B && C). Note that either operator can be any of the four
12070 truth and/or operations and the transformation will still be
12071 valid. Also note that we only care about order for the
12072 ANDIF and ORIF operators. If B contains side effects, this
12073 might change the truth-value of A. */
12074 if (TREE_CODE (arg0) == TREE_CODE (arg1)
12075 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
12076 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
12077 || TREE_CODE (arg0) == TRUTH_AND_EXPR
12078 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
12079 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
12080 {
12081 tree a00 = TREE_OPERAND (arg0, 0);
12082 tree a01 = TREE_OPERAND (arg0, 1);
12083 tree a10 = TREE_OPERAND (arg1, 0);
12084 tree a11 = TREE_OPERAND (arg1, 1);
12085 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
12086 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
12087 && (code == TRUTH_AND_EXPR
12088 || code == TRUTH_OR_EXPR));
12089
12090 if (operand_equal_p (a00, a10, 0))
12091 return fold_build2 (TREE_CODE (arg0), type, a00,
12092 fold_build2 (code, type, a01, a11));
12093 else if (commutative && operand_equal_p (a00, a11, 0))
12094 return fold_build2 (TREE_CODE (arg0), type, a00,
12095 fold_build2 (code, type, a01, a10));
12096 else if (commutative && operand_equal_p (a01, a10, 0))
12097 return fold_build2 (TREE_CODE (arg0), type, a01,
12098 fold_build2 (code, type, a00, a11));
12099
12100 /* This case if tricky because we must either have commutative
12101 operators or else A10 must not have side-effects. */
12102
12103 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
12104 && operand_equal_p (a01, a11, 0))
12105 return fold_build2 (TREE_CODE (arg0), type,
12106 fold_build2 (code, type, a00, a10),
12107 a01);
12108 }
12109
12110 /* See if we can build a range comparison. */
12111 if (0 != (tem = fold_range_test (code, type, op0, op1)))
12112 return tem;
12113
12114 /* Check for the possibility of merging component references. If our
12115 lhs is another similar operation, try to merge its rhs with our
12116 rhs. Then try to merge our lhs and rhs. */
12117 if (TREE_CODE (arg0) == code
12118 && 0 != (tem = fold_truthop (code, type,
12119 TREE_OPERAND (arg0, 1), arg1)))
12120 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
12121
12122 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
12123 return tem;
12124
12125 return NULL_TREE;
12126
12127 case TRUTH_ORIF_EXPR:
12128 /* Note that the operands of this must be ints
12129 and their values must be 0 or true.
12130 ("true" is a fixed value perhaps depending on the language.) */
12131 /* If first arg is constant true, return it. */
12132 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12133 return fold_convert (type, arg0);
12134 case TRUTH_OR_EXPR:
12135 /* If either arg is constant zero, drop it. */
12136 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12137 return non_lvalue (fold_convert (type, arg1));
12138 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12139 /* Preserve sequence points. */
12140 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12141 return non_lvalue (fold_convert (type, arg0));
12142 /* If second arg is constant true, result is true, but we must
12143 evaluate first arg. */
12144 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12145 return omit_one_operand (type, arg1, arg0);
12146 /* Likewise for first arg, but note this only occurs here for
12147 TRUTH_OR_EXPR. */
12148 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12149 return omit_one_operand (type, arg0, arg1);
12150
12151 /* !X || X is always true. */
12152 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12153 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12154 return omit_one_operand (type, integer_one_node, arg1);
12155 /* X || !X is always true. */
12156 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12157 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12158 return omit_one_operand (type, integer_one_node, arg0);
12159
12160 goto truth_andor;
12161
12162 case TRUTH_XOR_EXPR:
12163 /* If the second arg is constant zero, drop it. */
12164 if (integer_zerop (arg1))
12165 return non_lvalue (fold_convert (type, arg0));
12166 /* If the second arg is constant true, this is a logical inversion. */
12167 if (integer_onep (arg1))
12168 {
12169 /* Only call invert_truthvalue if operand is a truth value. */
12170 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
12171 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
12172 else
12173 tem = invert_truthvalue (arg0);
12174 return non_lvalue (fold_convert (type, tem));
12175 }
12176 /* Identical arguments cancel to zero. */
12177 if (operand_equal_p (arg0, arg1, 0))
12178 return omit_one_operand (type, integer_zero_node, arg0);
12179
12180 /* !X ^ X is always true. */
12181 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12182 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12183 return omit_one_operand (type, integer_one_node, arg1);
12184
12185 /* X ^ !X is always true. */
12186 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12187 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12188 return omit_one_operand (type, integer_one_node, arg0);
12189
12190 return NULL_TREE;
12191
12192 case EQ_EXPR:
12193 case NE_EXPR:
12194 tem = fold_comparison (code, type, op0, op1);
12195 if (tem != NULL_TREE)
12196 return tem;
12197
12198 /* bool_var != 0 becomes bool_var. */
12199 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12200 && code == NE_EXPR)
12201 return non_lvalue (fold_convert (type, arg0));
12202
12203 /* bool_var == 1 becomes bool_var. */
12204 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12205 && code == EQ_EXPR)
12206 return non_lvalue (fold_convert (type, arg0));
12207
12208 /* bool_var != 1 becomes !bool_var. */
12209 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12210 && code == NE_EXPR)
12211 return fold_build1 (TRUTH_NOT_EXPR, type, fold_convert (type, arg0));
12212
12213 /* bool_var == 0 becomes !bool_var. */
12214 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12215 && code == EQ_EXPR)
12216 return fold_build1 (TRUTH_NOT_EXPR, type, fold_convert (type, arg0));
12217
12218 /* If this is an equality comparison of the address of two non-weak,
12219 unaliased symbols neither of which are extern (since we do not
12220 have access to attributes for externs), then we know the result. */
12221 if (TREE_CODE (arg0) == ADDR_EXPR
12222 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12223 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12224 && ! lookup_attribute ("alias",
12225 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12226 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12227 && TREE_CODE (arg1) == ADDR_EXPR
12228 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12229 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12230 && ! lookup_attribute ("alias",
12231 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12232 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12233 {
12234 /* We know that we're looking at the address of two
12235 non-weak, unaliased, static _DECL nodes.
12236
12237 It is both wasteful and incorrect to call operand_equal_p
12238 to compare the two ADDR_EXPR nodes. It is wasteful in that
12239 all we need to do is test pointer equality for the arguments
12240 to the two ADDR_EXPR nodes. It is incorrect to use
12241 operand_equal_p as that function is NOT equivalent to a
12242 C equality test. It can in fact return false for two
12243 objects which would test as equal using the C equality
12244 operator. */
12245 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12246 return constant_boolean_node (equal
12247 ? code == EQ_EXPR : code != EQ_EXPR,
12248 type);
12249 }
12250
12251 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12252 a MINUS_EXPR of a constant, we can convert it into a comparison with
12253 a revised constant as long as no overflow occurs. */
12254 if (TREE_CODE (arg1) == INTEGER_CST
12255 && (TREE_CODE (arg0) == PLUS_EXPR
12256 || TREE_CODE (arg0) == MINUS_EXPR)
12257 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12258 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
12259 ? MINUS_EXPR : PLUS_EXPR,
12260 fold_convert (TREE_TYPE (arg0), arg1),
12261 TREE_OPERAND (arg0, 1), 0))
12262 && !TREE_OVERFLOW (tem))
12263 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
12264
12265 /* Similarly for a NEGATE_EXPR. */
12266 if (TREE_CODE (arg0) == NEGATE_EXPR
12267 && TREE_CODE (arg1) == INTEGER_CST
12268 && 0 != (tem = negate_expr (arg1))
12269 && TREE_CODE (tem) == INTEGER_CST
12270 && !TREE_OVERFLOW (tem))
12271 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
12272
12273 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12274 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12275 && TREE_CODE (arg1) == INTEGER_CST
12276 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12277 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12278 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg0),
12279 fold_convert (TREE_TYPE (arg0), arg1),
12280 TREE_OPERAND (arg0, 1)));
12281
12282 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12283 if ((TREE_CODE (arg0) == PLUS_EXPR
12284 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12285 || TREE_CODE (arg0) == MINUS_EXPR)
12286 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12287 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12288 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12289 {
12290 tree val = TREE_OPERAND (arg0, 1);
12291 return omit_two_operands (type,
12292 fold_build2 (code, type,
12293 val,
12294 build_int_cst (TREE_TYPE (val),
12295 0)),
12296 TREE_OPERAND (arg0, 0), arg1);
12297 }
12298
12299 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12300 if (TREE_CODE (arg0) == MINUS_EXPR
12301 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12302 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0)
12303 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
12304 {
12305 return omit_two_operands (type,
12306 code == NE_EXPR
12307 ? boolean_true_node : boolean_false_node,
12308 TREE_OPERAND (arg0, 1), arg1);
12309 }
12310
12311 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12312 for !=. Don't do this for ordered comparisons due to overflow. */
12313 if (TREE_CODE (arg0) == MINUS_EXPR
12314 && integer_zerop (arg1))
12315 return fold_build2 (code, type,
12316 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
12317
12318 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12319 if (TREE_CODE (arg0) == ABS_EXPR
12320 && (integer_zerop (arg1) || real_zerop (arg1)))
12321 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
12322
12323 /* If this is an EQ or NE comparison with zero and ARG0 is
12324 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12325 two operations, but the latter can be done in one less insn
12326 on machines that have only two-operand insns or on which a
12327 constant cannot be the first operand. */
12328 if (TREE_CODE (arg0) == BIT_AND_EXPR
12329 && integer_zerop (arg1))
12330 {
12331 tree arg00 = TREE_OPERAND (arg0, 0);
12332 tree arg01 = TREE_OPERAND (arg0, 1);
12333 if (TREE_CODE (arg00) == LSHIFT_EXPR
12334 && integer_onep (TREE_OPERAND (arg00, 0)))
12335 {
12336 tree tem = fold_build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
12337 arg01, TREE_OPERAND (arg00, 1));
12338 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12339 build_int_cst (TREE_TYPE (arg0), 1));
12340 return fold_build2 (code, type,
12341 fold_convert (TREE_TYPE (arg1), tem), arg1);
12342 }
12343 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12344 && integer_onep (TREE_OPERAND (arg01, 0)))
12345 {
12346 tree tem = fold_build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
12347 arg00, TREE_OPERAND (arg01, 1));
12348 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12349 build_int_cst (TREE_TYPE (arg0), 1));
12350 return fold_build2 (code, type,
12351 fold_convert (TREE_TYPE (arg1), tem), arg1);
12352 }
12353 }
12354
12355 /* If this is an NE or EQ comparison of zero against the result of a
12356 signed MOD operation whose second operand is a power of 2, make
12357 the MOD operation unsigned since it is simpler and equivalent. */
12358 if (integer_zerop (arg1)
12359 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12360 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12361 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12362 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12363 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12364 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12365 {
12366 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12367 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
12368 fold_convert (newtype,
12369 TREE_OPERAND (arg0, 0)),
12370 fold_convert (newtype,
12371 TREE_OPERAND (arg0, 1)));
12372
12373 return fold_build2 (code, type, newmod,
12374 fold_convert (newtype, arg1));
12375 }
12376
12377 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12378 C1 is a valid shift constant, and C2 is a power of two, i.e.
12379 a single bit. */
12380 if (TREE_CODE (arg0) == BIT_AND_EXPR
12381 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12382 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12383 == INTEGER_CST
12384 && integer_pow2p (TREE_OPERAND (arg0, 1))
12385 && integer_zerop (arg1))
12386 {
12387 tree itype = TREE_TYPE (arg0);
12388 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
12389 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12390
12391 /* Check for a valid shift count. */
12392 if (TREE_INT_CST_HIGH (arg001) == 0
12393 && TREE_INT_CST_LOW (arg001) < prec)
12394 {
12395 tree arg01 = TREE_OPERAND (arg0, 1);
12396 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12397 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12398 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12399 can be rewritten as (X & (C2 << C1)) != 0. */
12400 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12401 {
12402 tem = fold_build2 (LSHIFT_EXPR, itype, arg01, arg001);
12403 tem = fold_build2 (BIT_AND_EXPR, itype, arg000, tem);
12404 return fold_build2 (code, type, tem, arg1);
12405 }
12406 /* Otherwise, for signed (arithmetic) shifts,
12407 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12408 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12409 else if (!TYPE_UNSIGNED (itype))
12410 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12411 arg000, build_int_cst (itype, 0));
12412 /* Otherwise, of unsigned (logical) shifts,
12413 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12414 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12415 else
12416 return omit_one_operand (type,
12417 code == EQ_EXPR ? integer_one_node
12418 : integer_zero_node,
12419 arg000);
12420 }
12421 }
12422
12423 /* If this is an NE comparison of zero with an AND of one, remove the
12424 comparison since the AND will give the correct value. */
12425 if (code == NE_EXPR
12426 && integer_zerop (arg1)
12427 && TREE_CODE (arg0) == BIT_AND_EXPR
12428 && integer_onep (TREE_OPERAND (arg0, 1)))
12429 return fold_convert (type, arg0);
12430
12431 /* If we have (A & C) == C where C is a power of 2, convert this into
12432 (A & C) != 0. Similarly for NE_EXPR. */
12433 if (TREE_CODE (arg0) == BIT_AND_EXPR
12434 && integer_pow2p (TREE_OPERAND (arg0, 1))
12435 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12436 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12437 arg0, fold_convert (TREE_TYPE (arg0),
12438 integer_zero_node));
12439
12440 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12441 bit, then fold the expression into A < 0 or A >= 0. */
12442 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
12443 if (tem)
12444 return tem;
12445
12446 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12447 Similarly for NE_EXPR. */
12448 if (TREE_CODE (arg0) == BIT_AND_EXPR
12449 && TREE_CODE (arg1) == INTEGER_CST
12450 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12451 {
12452 tree notc = fold_build1 (BIT_NOT_EXPR,
12453 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12454 TREE_OPERAND (arg0, 1));
12455 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
12456 arg1, notc);
12457 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12458 if (integer_nonzerop (dandnotc))
12459 return omit_one_operand (type, rslt, arg0);
12460 }
12461
12462 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12463 Similarly for NE_EXPR. */
12464 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12465 && TREE_CODE (arg1) == INTEGER_CST
12466 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12467 {
12468 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12469 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
12470 TREE_OPERAND (arg0, 1), notd);
12471 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12472 if (integer_nonzerop (candnotd))
12473 return omit_one_operand (type, rslt, arg0);
12474 }
12475
12476 /* If this is a comparison of a field, we may be able to simplify it. */
12477 if ((TREE_CODE (arg0) == COMPONENT_REF
12478 || TREE_CODE (arg0) == BIT_FIELD_REF)
12479 /* Handle the constant case even without -O
12480 to make sure the warnings are given. */
12481 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12482 {
12483 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
12484 if (t1)
12485 return t1;
12486 }
12487
12488 /* Optimize comparisons of strlen vs zero to a compare of the
12489 first character of the string vs zero. To wit,
12490 strlen(ptr) == 0 => *ptr == 0
12491 strlen(ptr) != 0 => *ptr != 0
12492 Other cases should reduce to one of these two (or a constant)
12493 due to the return value of strlen being unsigned. */
12494 if (TREE_CODE (arg0) == CALL_EXPR
12495 && integer_zerop (arg1))
12496 {
12497 tree fndecl = get_callee_fndecl (arg0);
12498
12499 if (fndecl
12500 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12501 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12502 && call_expr_nargs (arg0) == 1
12503 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12504 {
12505 tree iref = build_fold_indirect_ref (CALL_EXPR_ARG (arg0, 0));
12506 return fold_build2 (code, type, iref,
12507 build_int_cst (TREE_TYPE (iref), 0));
12508 }
12509 }
12510
12511 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12512 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12513 if (TREE_CODE (arg0) == RSHIFT_EXPR
12514 && integer_zerop (arg1)
12515 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12516 {
12517 tree arg00 = TREE_OPERAND (arg0, 0);
12518 tree arg01 = TREE_OPERAND (arg0, 1);
12519 tree itype = TREE_TYPE (arg00);
12520 if (TREE_INT_CST_HIGH (arg01) == 0
12521 && TREE_INT_CST_LOW (arg01)
12522 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
12523 {
12524 if (TYPE_UNSIGNED (itype))
12525 {
12526 itype = signed_type_for (itype);
12527 arg00 = fold_convert (itype, arg00);
12528 }
12529 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12530 type, arg00, build_int_cst (itype, 0));
12531 }
12532 }
12533
12534 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12535 if (integer_zerop (arg1)
12536 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12537 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12538 TREE_OPERAND (arg0, 1));
12539
12540 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12541 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12542 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12543 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12544 build_int_cst (TREE_TYPE (arg1), 0));
12545 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12546 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12547 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12548 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12549 return fold_build2 (code, type, TREE_OPERAND (arg0, 1),
12550 build_int_cst (TREE_TYPE (arg1), 0));
12551
12552 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12553 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12554 && TREE_CODE (arg1) == INTEGER_CST
12555 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12556 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12557 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg1),
12558 TREE_OPERAND (arg0, 1), arg1));
12559
12560 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12561 (X & C) == 0 when C is a single bit. */
12562 if (TREE_CODE (arg0) == BIT_AND_EXPR
12563 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12564 && integer_zerop (arg1)
12565 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12566 {
12567 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
12568 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12569 TREE_OPERAND (arg0, 1));
12570 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12571 type, tem, arg1);
12572 }
12573
12574 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12575 constant C is a power of two, i.e. a single bit. */
12576 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12577 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12578 && integer_zerop (arg1)
12579 && integer_pow2p (TREE_OPERAND (arg0, 1))
12580 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12581 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12582 {
12583 tree arg00 = TREE_OPERAND (arg0, 0);
12584 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12585 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12586 }
12587
12588 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12589 when is C is a power of two, i.e. a single bit. */
12590 if (TREE_CODE (arg0) == BIT_AND_EXPR
12591 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12592 && integer_zerop (arg1)
12593 && integer_pow2p (TREE_OPERAND (arg0, 1))
12594 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12595 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12596 {
12597 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12598 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg000),
12599 arg000, TREE_OPERAND (arg0, 1));
12600 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12601 tem, build_int_cst (TREE_TYPE (tem), 0));
12602 }
12603
12604 if (integer_zerop (arg1)
12605 && tree_expr_nonzero_p (arg0))
12606 {
12607 tree res = constant_boolean_node (code==NE_EXPR, type);
12608 return omit_one_operand (type, res, arg0);
12609 }
12610
12611 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12612 if (TREE_CODE (arg0) == NEGATE_EXPR
12613 && TREE_CODE (arg1) == NEGATE_EXPR)
12614 return fold_build2 (code, type,
12615 TREE_OPERAND (arg0, 0),
12616 TREE_OPERAND (arg1, 0));
12617
12618 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12619 if (TREE_CODE (arg0) == BIT_AND_EXPR
12620 && TREE_CODE (arg1) == BIT_AND_EXPR)
12621 {
12622 tree arg00 = TREE_OPERAND (arg0, 0);
12623 tree arg01 = TREE_OPERAND (arg0, 1);
12624 tree arg10 = TREE_OPERAND (arg1, 0);
12625 tree arg11 = TREE_OPERAND (arg1, 1);
12626 tree itype = TREE_TYPE (arg0);
12627
12628 if (operand_equal_p (arg01, arg11, 0))
12629 return fold_build2 (code, type,
12630 fold_build2 (BIT_AND_EXPR, itype,
12631 fold_build2 (BIT_XOR_EXPR, itype,
12632 arg00, arg10),
12633 arg01),
12634 build_int_cst (itype, 0));
12635
12636 if (operand_equal_p (arg01, arg10, 0))
12637 return fold_build2 (code, type,
12638 fold_build2 (BIT_AND_EXPR, itype,
12639 fold_build2 (BIT_XOR_EXPR, itype,
12640 arg00, arg11),
12641 arg01),
12642 build_int_cst (itype, 0));
12643
12644 if (operand_equal_p (arg00, arg11, 0))
12645 return fold_build2 (code, type,
12646 fold_build2 (BIT_AND_EXPR, itype,
12647 fold_build2 (BIT_XOR_EXPR, itype,
12648 arg01, arg10),
12649 arg00),
12650 build_int_cst (itype, 0));
12651
12652 if (operand_equal_p (arg00, arg10, 0))
12653 return fold_build2 (code, type,
12654 fold_build2 (BIT_AND_EXPR, itype,
12655 fold_build2 (BIT_XOR_EXPR, itype,
12656 arg01, arg11),
12657 arg00),
12658 build_int_cst (itype, 0));
12659 }
12660
12661 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12662 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12663 {
12664 tree arg00 = TREE_OPERAND (arg0, 0);
12665 tree arg01 = TREE_OPERAND (arg0, 1);
12666 tree arg10 = TREE_OPERAND (arg1, 0);
12667 tree arg11 = TREE_OPERAND (arg1, 1);
12668 tree itype = TREE_TYPE (arg0);
12669
12670 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12671 operand_equal_p guarantees no side-effects so we don't need
12672 to use omit_one_operand on Z. */
12673 if (operand_equal_p (arg01, arg11, 0))
12674 return fold_build2 (code, type, arg00, arg10);
12675 if (operand_equal_p (arg01, arg10, 0))
12676 return fold_build2 (code, type, arg00, arg11);
12677 if (operand_equal_p (arg00, arg11, 0))
12678 return fold_build2 (code, type, arg01, arg10);
12679 if (operand_equal_p (arg00, arg10, 0))
12680 return fold_build2 (code, type, arg01, arg11);
12681
12682 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12683 if (TREE_CODE (arg01) == INTEGER_CST
12684 && TREE_CODE (arg11) == INTEGER_CST)
12685 return fold_build2 (code, type,
12686 fold_build2 (BIT_XOR_EXPR, itype, arg00,
12687 fold_build2 (BIT_XOR_EXPR, itype,
12688 arg01, arg11)),
12689 arg10);
12690 }
12691
12692 /* Attempt to simplify equality/inequality comparisons of complex
12693 values. Only lower the comparison if the result is known or
12694 can be simplified to a single scalar comparison. */
12695 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12696 || TREE_CODE (arg0) == COMPLEX_CST)
12697 && (TREE_CODE (arg1) == COMPLEX_EXPR
12698 || TREE_CODE (arg1) == COMPLEX_CST))
12699 {
12700 tree real0, imag0, real1, imag1;
12701 tree rcond, icond;
12702
12703 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12704 {
12705 real0 = TREE_OPERAND (arg0, 0);
12706 imag0 = TREE_OPERAND (arg0, 1);
12707 }
12708 else
12709 {
12710 real0 = TREE_REALPART (arg0);
12711 imag0 = TREE_IMAGPART (arg0);
12712 }
12713
12714 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12715 {
12716 real1 = TREE_OPERAND (arg1, 0);
12717 imag1 = TREE_OPERAND (arg1, 1);
12718 }
12719 else
12720 {
12721 real1 = TREE_REALPART (arg1);
12722 imag1 = TREE_IMAGPART (arg1);
12723 }
12724
12725 rcond = fold_binary (code, type, real0, real1);
12726 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12727 {
12728 if (integer_zerop (rcond))
12729 {
12730 if (code == EQ_EXPR)
12731 return omit_two_operands (type, boolean_false_node,
12732 imag0, imag1);
12733 return fold_build2 (NE_EXPR, type, imag0, imag1);
12734 }
12735 else
12736 {
12737 if (code == NE_EXPR)
12738 return omit_two_operands (type, boolean_true_node,
12739 imag0, imag1);
12740 return fold_build2 (EQ_EXPR, type, imag0, imag1);
12741 }
12742 }
12743
12744 icond = fold_binary (code, type, imag0, imag1);
12745 if (icond && TREE_CODE (icond) == INTEGER_CST)
12746 {
12747 if (integer_zerop (icond))
12748 {
12749 if (code == EQ_EXPR)
12750 return omit_two_operands (type, boolean_false_node,
12751 real0, real1);
12752 return fold_build2 (NE_EXPR, type, real0, real1);
12753 }
12754 else
12755 {
12756 if (code == NE_EXPR)
12757 return omit_two_operands (type, boolean_true_node,
12758 real0, real1);
12759 return fold_build2 (EQ_EXPR, type, real0, real1);
12760 }
12761 }
12762 }
12763
12764 return NULL_TREE;
12765
12766 case LT_EXPR:
12767 case GT_EXPR:
12768 case LE_EXPR:
12769 case GE_EXPR:
12770 tem = fold_comparison (code, type, op0, op1);
12771 if (tem != NULL_TREE)
12772 return tem;
12773
12774 /* Transform comparisons of the form X +- C CMP X. */
12775 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12776 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12777 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12778 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
12779 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12780 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
12781 {
12782 tree arg01 = TREE_OPERAND (arg0, 1);
12783 enum tree_code code0 = TREE_CODE (arg0);
12784 int is_positive;
12785
12786 if (TREE_CODE (arg01) == REAL_CST)
12787 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12788 else
12789 is_positive = tree_int_cst_sgn (arg01);
12790
12791 /* (X - c) > X becomes false. */
12792 if (code == GT_EXPR
12793 && ((code0 == MINUS_EXPR && is_positive >= 0)
12794 || (code0 == PLUS_EXPR && is_positive <= 0)))
12795 {
12796 if (TREE_CODE (arg01) == INTEGER_CST
12797 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12798 fold_overflow_warning (("assuming signed overflow does not "
12799 "occur when assuming that (X - c) > X "
12800 "is always false"),
12801 WARN_STRICT_OVERFLOW_ALL);
12802 return constant_boolean_node (0, type);
12803 }
12804
12805 /* Likewise (X + c) < X becomes false. */
12806 if (code == LT_EXPR
12807 && ((code0 == PLUS_EXPR && is_positive >= 0)
12808 || (code0 == MINUS_EXPR && is_positive <= 0)))
12809 {
12810 if (TREE_CODE (arg01) == INTEGER_CST
12811 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12812 fold_overflow_warning (("assuming signed overflow does not "
12813 "occur when assuming that "
12814 "(X + c) < X is always false"),
12815 WARN_STRICT_OVERFLOW_ALL);
12816 return constant_boolean_node (0, type);
12817 }
12818
12819 /* Convert (X - c) <= X to true. */
12820 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12821 && code == LE_EXPR
12822 && ((code0 == MINUS_EXPR && is_positive >= 0)
12823 || (code0 == PLUS_EXPR && is_positive <= 0)))
12824 {
12825 if (TREE_CODE (arg01) == INTEGER_CST
12826 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12827 fold_overflow_warning (("assuming signed overflow does not "
12828 "occur when assuming that "
12829 "(X - c) <= X is always true"),
12830 WARN_STRICT_OVERFLOW_ALL);
12831 return constant_boolean_node (1, type);
12832 }
12833
12834 /* Convert (X + c) >= X to true. */
12835 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12836 && code == GE_EXPR
12837 && ((code0 == PLUS_EXPR && is_positive >= 0)
12838 || (code0 == MINUS_EXPR && is_positive <= 0)))
12839 {
12840 if (TREE_CODE (arg01) == INTEGER_CST
12841 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12842 fold_overflow_warning (("assuming signed overflow does not "
12843 "occur when assuming that "
12844 "(X + c) >= X is always true"),
12845 WARN_STRICT_OVERFLOW_ALL);
12846 return constant_boolean_node (1, type);
12847 }
12848
12849 if (TREE_CODE (arg01) == INTEGER_CST)
12850 {
12851 /* Convert X + c > X and X - c < X to true for integers. */
12852 if (code == GT_EXPR
12853 && ((code0 == PLUS_EXPR && is_positive > 0)
12854 || (code0 == MINUS_EXPR && is_positive < 0)))
12855 {
12856 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12857 fold_overflow_warning (("assuming signed overflow does "
12858 "not occur when assuming that "
12859 "(X + c) > X is always true"),
12860 WARN_STRICT_OVERFLOW_ALL);
12861 return constant_boolean_node (1, type);
12862 }
12863
12864 if (code == LT_EXPR
12865 && ((code0 == MINUS_EXPR && is_positive > 0)
12866 || (code0 == PLUS_EXPR && is_positive < 0)))
12867 {
12868 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12869 fold_overflow_warning (("assuming signed overflow does "
12870 "not occur when assuming that "
12871 "(X - c) < X is always true"),
12872 WARN_STRICT_OVERFLOW_ALL);
12873 return constant_boolean_node (1, type);
12874 }
12875
12876 /* Convert X + c <= X and X - c >= X to false for integers. */
12877 if (code == LE_EXPR
12878 && ((code0 == PLUS_EXPR && is_positive > 0)
12879 || (code0 == MINUS_EXPR && is_positive < 0)))
12880 {
12881 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12882 fold_overflow_warning (("assuming signed overflow does "
12883 "not occur when assuming that "
12884 "(X + c) <= X is always false"),
12885 WARN_STRICT_OVERFLOW_ALL);
12886 return constant_boolean_node (0, type);
12887 }
12888
12889 if (code == GE_EXPR
12890 && ((code0 == MINUS_EXPR && is_positive > 0)
12891 || (code0 == PLUS_EXPR && is_positive < 0)))
12892 {
12893 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12894 fold_overflow_warning (("assuming signed overflow does "
12895 "not occur when assuming that "
12896 "(X - c) >= X is always false"),
12897 WARN_STRICT_OVERFLOW_ALL);
12898 return constant_boolean_node (0, type);
12899 }
12900 }
12901 }
12902
12903 /* Comparisons with the highest or lowest possible integer of
12904 the specified precision will have known values. */
12905 {
12906 tree arg1_type = TREE_TYPE (arg1);
12907 unsigned int width = TYPE_PRECISION (arg1_type);
12908
12909 if (TREE_CODE (arg1) == INTEGER_CST
12910 && width <= 2 * HOST_BITS_PER_WIDE_INT
12911 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
12912 {
12913 HOST_WIDE_INT signed_max_hi;
12914 unsigned HOST_WIDE_INT signed_max_lo;
12915 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
12916
12917 if (width <= HOST_BITS_PER_WIDE_INT)
12918 {
12919 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12920 - 1;
12921 signed_max_hi = 0;
12922 max_hi = 0;
12923
12924 if (TYPE_UNSIGNED (arg1_type))
12925 {
12926 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12927 min_lo = 0;
12928 min_hi = 0;
12929 }
12930 else
12931 {
12932 max_lo = signed_max_lo;
12933 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12934 min_hi = -1;
12935 }
12936 }
12937 else
12938 {
12939 width -= HOST_BITS_PER_WIDE_INT;
12940 signed_max_lo = -1;
12941 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12942 - 1;
12943 max_lo = -1;
12944 min_lo = 0;
12945
12946 if (TYPE_UNSIGNED (arg1_type))
12947 {
12948 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12949 min_hi = 0;
12950 }
12951 else
12952 {
12953 max_hi = signed_max_hi;
12954 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12955 }
12956 }
12957
12958 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
12959 && TREE_INT_CST_LOW (arg1) == max_lo)
12960 switch (code)
12961 {
12962 case GT_EXPR:
12963 return omit_one_operand (type, integer_zero_node, arg0);
12964
12965 case GE_EXPR:
12966 return fold_build2 (EQ_EXPR, type, op0, op1);
12967
12968 case LE_EXPR:
12969 return omit_one_operand (type, integer_one_node, arg0);
12970
12971 case LT_EXPR:
12972 return fold_build2 (NE_EXPR, type, op0, op1);
12973
12974 /* The GE_EXPR and LT_EXPR cases above are not normally
12975 reached because of previous transformations. */
12976
12977 default:
12978 break;
12979 }
12980 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12981 == max_hi
12982 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
12983 switch (code)
12984 {
12985 case GT_EXPR:
12986 arg1 = const_binop (PLUS_EXPR, arg1,
12987 build_int_cst (TREE_TYPE (arg1), 1), 0);
12988 return fold_build2 (EQ_EXPR, type,
12989 fold_convert (TREE_TYPE (arg1), arg0),
12990 arg1);
12991 case LE_EXPR:
12992 arg1 = const_binop (PLUS_EXPR, arg1,
12993 build_int_cst (TREE_TYPE (arg1), 1), 0);
12994 return fold_build2 (NE_EXPR, type,
12995 fold_convert (TREE_TYPE (arg1), arg0),
12996 arg1);
12997 default:
12998 break;
12999 }
13000 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13001 == min_hi
13002 && TREE_INT_CST_LOW (arg1) == min_lo)
13003 switch (code)
13004 {
13005 case LT_EXPR:
13006 return omit_one_operand (type, integer_zero_node, arg0);
13007
13008 case LE_EXPR:
13009 return fold_build2 (EQ_EXPR, type, op0, op1);
13010
13011 case GE_EXPR:
13012 return omit_one_operand (type, integer_one_node, arg0);
13013
13014 case GT_EXPR:
13015 return fold_build2 (NE_EXPR, type, op0, op1);
13016
13017 default:
13018 break;
13019 }
13020 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13021 == min_hi
13022 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
13023 switch (code)
13024 {
13025 case GE_EXPR:
13026 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
13027 return fold_build2 (NE_EXPR, type,
13028 fold_convert (TREE_TYPE (arg1), arg0),
13029 arg1);
13030 case LT_EXPR:
13031 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
13032 return fold_build2 (EQ_EXPR, type,
13033 fold_convert (TREE_TYPE (arg1), arg0),
13034 arg1);
13035 default:
13036 break;
13037 }
13038
13039 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
13040 && TREE_INT_CST_LOW (arg1) == signed_max_lo
13041 && TYPE_UNSIGNED (arg1_type)
13042 /* We will flip the signedness of the comparison operator
13043 associated with the mode of arg1, so the sign bit is
13044 specified by this mode. Check that arg1 is the signed
13045 max associated with this sign bit. */
13046 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
13047 /* signed_type does not work on pointer types. */
13048 && INTEGRAL_TYPE_P (arg1_type))
13049 {
13050 /* The following case also applies to X < signed_max+1
13051 and X >= signed_max+1 because previous transformations. */
13052 if (code == LE_EXPR || code == GT_EXPR)
13053 {
13054 tree st;
13055 st = signed_type_for (TREE_TYPE (arg1));
13056 return fold_build2 (code == LE_EXPR ? GE_EXPR : LT_EXPR,
13057 type, fold_convert (st, arg0),
13058 build_int_cst (st, 0));
13059 }
13060 }
13061 }
13062 }
13063
13064 /* If we are comparing an ABS_EXPR with a constant, we can
13065 convert all the cases into explicit comparisons, but they may
13066 well not be faster than doing the ABS and one comparison.
13067 But ABS (X) <= C is a range comparison, which becomes a subtraction
13068 and a comparison, and is probably faster. */
13069 if (code == LE_EXPR
13070 && TREE_CODE (arg1) == INTEGER_CST
13071 && TREE_CODE (arg0) == ABS_EXPR
13072 && ! TREE_SIDE_EFFECTS (arg0)
13073 && (0 != (tem = negate_expr (arg1)))
13074 && TREE_CODE (tem) == INTEGER_CST
13075 && !TREE_OVERFLOW (tem))
13076 return fold_build2 (TRUTH_ANDIF_EXPR, type,
13077 build2 (GE_EXPR, type,
13078 TREE_OPERAND (arg0, 0), tem),
13079 build2 (LE_EXPR, type,
13080 TREE_OPERAND (arg0, 0), arg1));
13081
13082 /* Convert ABS_EXPR<x> >= 0 to true. */
13083 strict_overflow_p = false;
13084 if (code == GE_EXPR
13085 && (integer_zerop (arg1)
13086 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13087 && real_zerop (arg1)))
13088 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13089 {
13090 if (strict_overflow_p)
13091 fold_overflow_warning (("assuming signed overflow does not occur "
13092 "when simplifying comparison of "
13093 "absolute value and zero"),
13094 WARN_STRICT_OVERFLOW_CONDITIONAL);
13095 return omit_one_operand (type, integer_one_node, arg0);
13096 }
13097
13098 /* Convert ABS_EXPR<x> < 0 to false. */
13099 strict_overflow_p = false;
13100 if (code == LT_EXPR
13101 && (integer_zerop (arg1) || real_zerop (arg1))
13102 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13103 {
13104 if (strict_overflow_p)
13105 fold_overflow_warning (("assuming signed overflow does not occur "
13106 "when simplifying comparison of "
13107 "absolute value and zero"),
13108 WARN_STRICT_OVERFLOW_CONDITIONAL);
13109 return omit_one_operand (type, integer_zero_node, arg0);
13110 }
13111
13112 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13113 and similarly for >= into !=. */
13114 if ((code == LT_EXPR || code == GE_EXPR)
13115 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13116 && TREE_CODE (arg1) == LSHIFT_EXPR
13117 && integer_onep (TREE_OPERAND (arg1, 0)))
13118 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13119 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13120 TREE_OPERAND (arg1, 1)),
13121 build_int_cst (TREE_TYPE (arg0), 0));
13122
13123 if ((code == LT_EXPR || code == GE_EXPR)
13124 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13125 && CONVERT_EXPR_P (arg1)
13126 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13127 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13128 return
13129 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13130 fold_convert (TREE_TYPE (arg0),
13131 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13132 TREE_OPERAND (TREE_OPERAND (arg1, 0),
13133 1))),
13134 build_int_cst (TREE_TYPE (arg0), 0));
13135
13136 return NULL_TREE;
13137
13138 case UNORDERED_EXPR:
13139 case ORDERED_EXPR:
13140 case UNLT_EXPR:
13141 case UNLE_EXPR:
13142 case UNGT_EXPR:
13143 case UNGE_EXPR:
13144 case UNEQ_EXPR:
13145 case LTGT_EXPR:
13146 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13147 {
13148 t1 = fold_relational_const (code, type, arg0, arg1);
13149 if (t1 != NULL_TREE)
13150 return t1;
13151 }
13152
13153 /* If the first operand is NaN, the result is constant. */
13154 if (TREE_CODE (arg0) == REAL_CST
13155 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13156 && (code != LTGT_EXPR || ! flag_trapping_math))
13157 {
13158 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13159 ? integer_zero_node
13160 : integer_one_node;
13161 return omit_one_operand (type, t1, arg1);
13162 }
13163
13164 /* If the second operand is NaN, the result is constant. */
13165 if (TREE_CODE (arg1) == REAL_CST
13166 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13167 && (code != LTGT_EXPR || ! flag_trapping_math))
13168 {
13169 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13170 ? integer_zero_node
13171 : integer_one_node;
13172 return omit_one_operand (type, t1, arg0);
13173 }
13174
13175 /* Simplify unordered comparison of something with itself. */
13176 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13177 && operand_equal_p (arg0, arg1, 0))
13178 return constant_boolean_node (1, type);
13179
13180 if (code == LTGT_EXPR
13181 && !flag_trapping_math
13182 && operand_equal_p (arg0, arg1, 0))
13183 return constant_boolean_node (0, type);
13184
13185 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13186 {
13187 tree targ0 = strip_float_extensions (arg0);
13188 tree targ1 = strip_float_extensions (arg1);
13189 tree newtype = TREE_TYPE (targ0);
13190
13191 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13192 newtype = TREE_TYPE (targ1);
13193
13194 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13195 return fold_build2 (code, type, fold_convert (newtype, targ0),
13196 fold_convert (newtype, targ1));
13197 }
13198
13199 return NULL_TREE;
13200
13201 case COMPOUND_EXPR:
13202 /* When pedantic, a compound expression can be neither an lvalue
13203 nor an integer constant expression. */
13204 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13205 return NULL_TREE;
13206 /* Don't let (0, 0) be null pointer constant. */
13207 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13208 : fold_convert (type, arg1);
13209 return pedantic_non_lvalue (tem);
13210
13211 case COMPLEX_EXPR:
13212 if ((TREE_CODE (arg0) == REAL_CST
13213 && TREE_CODE (arg1) == REAL_CST)
13214 || (TREE_CODE (arg0) == INTEGER_CST
13215 && TREE_CODE (arg1) == INTEGER_CST))
13216 return build_complex (type, arg0, arg1);
13217 return NULL_TREE;
13218
13219 case ASSERT_EXPR:
13220 /* An ASSERT_EXPR should never be passed to fold_binary. */
13221 gcc_unreachable ();
13222
13223 default:
13224 return NULL_TREE;
13225 } /* switch (code) */
13226 }
13227
13228 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13229 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13230 of GOTO_EXPR. */
13231
13232 static tree
13233 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13234 {
13235 switch (TREE_CODE (*tp))
13236 {
13237 case LABEL_EXPR:
13238 return *tp;
13239
13240 case GOTO_EXPR:
13241 *walk_subtrees = 0;
13242
13243 /* ... fall through ... */
13244
13245 default:
13246 return NULL_TREE;
13247 }
13248 }
13249
13250 /* Return whether the sub-tree ST contains a label which is accessible from
13251 outside the sub-tree. */
13252
13253 static bool
13254 contains_label_p (tree st)
13255 {
13256 return
13257 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13258 }
13259
13260 /* Fold a ternary expression of code CODE and type TYPE with operands
13261 OP0, OP1, and OP2. Return the folded expression if folding is
13262 successful. Otherwise, return NULL_TREE. */
13263
13264 tree
13265 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
13266 {
13267 tree tem;
13268 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
13269 enum tree_code_class kind = TREE_CODE_CLASS (code);
13270
13271 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13272 && TREE_CODE_LENGTH (code) == 3);
13273
13274 /* Strip any conversions that don't change the mode. This is safe
13275 for every expression, except for a comparison expression because
13276 its signedness is derived from its operands. So, in the latter
13277 case, only strip conversions that don't change the signedness.
13278
13279 Note that this is done as an internal manipulation within the
13280 constant folder, in order to find the simplest representation of
13281 the arguments so that their form can be studied. In any cases,
13282 the appropriate type conversions should be put back in the tree
13283 that will get out of the constant folder. */
13284 if (op0)
13285 {
13286 arg0 = op0;
13287 STRIP_NOPS (arg0);
13288 }
13289
13290 if (op1)
13291 {
13292 arg1 = op1;
13293 STRIP_NOPS (arg1);
13294 }
13295
13296 switch (code)
13297 {
13298 case COMPONENT_REF:
13299 if (TREE_CODE (arg0) == CONSTRUCTOR
13300 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13301 {
13302 unsigned HOST_WIDE_INT idx;
13303 tree field, value;
13304 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13305 if (field == arg1)
13306 return value;
13307 }
13308 return NULL_TREE;
13309
13310 case COND_EXPR:
13311 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13312 so all simple results must be passed through pedantic_non_lvalue. */
13313 if (TREE_CODE (arg0) == INTEGER_CST)
13314 {
13315 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13316 tem = integer_zerop (arg0) ? op2 : op1;
13317 /* Only optimize constant conditions when the selected branch
13318 has the same type as the COND_EXPR. This avoids optimizing
13319 away "c ? x : throw", where the throw has a void type.
13320 Avoid throwing away that operand which contains label. */
13321 if ((!TREE_SIDE_EFFECTS (unused_op)
13322 || !contains_label_p (unused_op))
13323 && (! VOID_TYPE_P (TREE_TYPE (tem))
13324 || VOID_TYPE_P (type)))
13325 return pedantic_non_lvalue (tem);
13326 return NULL_TREE;
13327 }
13328 if (operand_equal_p (arg1, op2, 0))
13329 return pedantic_omit_one_operand (type, arg1, arg0);
13330
13331 /* If we have A op B ? A : C, we may be able to convert this to a
13332 simpler expression, depending on the operation and the values
13333 of B and C. Signed zeros prevent all of these transformations,
13334 for reasons given above each one.
13335
13336 Also try swapping the arguments and inverting the conditional. */
13337 if (COMPARISON_CLASS_P (arg0)
13338 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13339 arg1, TREE_OPERAND (arg0, 1))
13340 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13341 {
13342 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
13343 if (tem)
13344 return tem;
13345 }
13346
13347 if (COMPARISON_CLASS_P (arg0)
13348 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13349 op2,
13350 TREE_OPERAND (arg0, 1))
13351 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
13352 {
13353 tem = fold_truth_not_expr (arg0);
13354 if (tem && COMPARISON_CLASS_P (tem))
13355 {
13356 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
13357 if (tem)
13358 return tem;
13359 }
13360 }
13361
13362 /* If the second operand is simpler than the third, swap them
13363 since that produces better jump optimization results. */
13364 if (truth_value_p (TREE_CODE (arg0))
13365 && tree_swap_operands_p (op1, op2, false))
13366 {
13367 /* See if this can be inverted. If it can't, possibly because
13368 it was a floating-point inequality comparison, don't do
13369 anything. */
13370 tem = fold_truth_not_expr (arg0);
13371 if (tem)
13372 return fold_build3 (code, type, tem, op2, op1);
13373 }
13374
13375 /* Convert A ? 1 : 0 to simply A. */
13376 if (integer_onep (op1)
13377 && integer_zerop (op2)
13378 /* If we try to convert OP0 to our type, the
13379 call to fold will try to move the conversion inside
13380 a COND, which will recurse. In that case, the COND_EXPR
13381 is probably the best choice, so leave it alone. */
13382 && type == TREE_TYPE (arg0))
13383 return pedantic_non_lvalue (arg0);
13384
13385 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13386 over COND_EXPR in cases such as floating point comparisons. */
13387 if (integer_zerop (op1)
13388 && integer_onep (op2)
13389 && truth_value_p (TREE_CODE (arg0)))
13390 return pedantic_non_lvalue (fold_convert (type,
13391 invert_truthvalue (arg0)));
13392
13393 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13394 if (TREE_CODE (arg0) == LT_EXPR
13395 && integer_zerop (TREE_OPERAND (arg0, 1))
13396 && integer_zerop (op2)
13397 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13398 {
13399 /* sign_bit_p only checks ARG1 bits within A's precision.
13400 If <sign bit of A> has wider type than A, bits outside
13401 of A's precision in <sign bit of A> need to be checked.
13402 If they are all 0, this optimization needs to be done
13403 in unsigned A's type, if they are all 1 in signed A's type,
13404 otherwise this can't be done. */
13405 if (TYPE_PRECISION (TREE_TYPE (tem))
13406 < TYPE_PRECISION (TREE_TYPE (arg1))
13407 && TYPE_PRECISION (TREE_TYPE (tem))
13408 < TYPE_PRECISION (type))
13409 {
13410 unsigned HOST_WIDE_INT mask_lo;
13411 HOST_WIDE_INT mask_hi;
13412 int inner_width, outer_width;
13413 tree tem_type;
13414
13415 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13416 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13417 if (outer_width > TYPE_PRECISION (type))
13418 outer_width = TYPE_PRECISION (type);
13419
13420 if (outer_width > HOST_BITS_PER_WIDE_INT)
13421 {
13422 mask_hi = ((unsigned HOST_WIDE_INT) -1
13423 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
13424 mask_lo = -1;
13425 }
13426 else
13427 {
13428 mask_hi = 0;
13429 mask_lo = ((unsigned HOST_WIDE_INT) -1
13430 >> (HOST_BITS_PER_WIDE_INT - outer_width));
13431 }
13432 if (inner_width > HOST_BITS_PER_WIDE_INT)
13433 {
13434 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
13435 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13436 mask_lo = 0;
13437 }
13438 else
13439 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
13440 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13441
13442 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
13443 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
13444 {
13445 tem_type = signed_type_for (TREE_TYPE (tem));
13446 tem = fold_convert (tem_type, tem);
13447 }
13448 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
13449 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
13450 {
13451 tem_type = unsigned_type_for (TREE_TYPE (tem));
13452 tem = fold_convert (tem_type, tem);
13453 }
13454 else
13455 tem = NULL;
13456 }
13457
13458 if (tem)
13459 return fold_convert (type,
13460 fold_build2 (BIT_AND_EXPR,
13461 TREE_TYPE (tem), tem,
13462 fold_convert (TREE_TYPE (tem),
13463 arg1)));
13464 }
13465
13466 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13467 already handled above. */
13468 if (TREE_CODE (arg0) == BIT_AND_EXPR
13469 && integer_onep (TREE_OPERAND (arg0, 1))
13470 && integer_zerop (op2)
13471 && integer_pow2p (arg1))
13472 {
13473 tree tem = TREE_OPERAND (arg0, 0);
13474 STRIP_NOPS (tem);
13475 if (TREE_CODE (tem) == RSHIFT_EXPR
13476 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
13477 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13478 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
13479 return fold_build2 (BIT_AND_EXPR, type,
13480 TREE_OPERAND (tem, 0), arg1);
13481 }
13482
13483 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13484 is probably obsolete because the first operand should be a
13485 truth value (that's why we have the two cases above), but let's
13486 leave it in until we can confirm this for all front-ends. */
13487 if (integer_zerop (op2)
13488 && TREE_CODE (arg0) == NE_EXPR
13489 && integer_zerop (TREE_OPERAND (arg0, 1))
13490 && integer_pow2p (arg1)
13491 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13492 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13493 arg1, OEP_ONLY_CONST))
13494 return pedantic_non_lvalue (fold_convert (type,
13495 TREE_OPERAND (arg0, 0)));
13496
13497 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13498 if (integer_zerop (op2)
13499 && truth_value_p (TREE_CODE (arg0))
13500 && truth_value_p (TREE_CODE (arg1)))
13501 return fold_build2 (TRUTH_ANDIF_EXPR, type,
13502 fold_convert (type, arg0),
13503 arg1);
13504
13505 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13506 if (integer_onep (op2)
13507 && truth_value_p (TREE_CODE (arg0))
13508 && truth_value_p (TREE_CODE (arg1)))
13509 {
13510 /* Only perform transformation if ARG0 is easily inverted. */
13511 tem = fold_truth_not_expr (arg0);
13512 if (tem)
13513 return fold_build2 (TRUTH_ORIF_EXPR, type,
13514 fold_convert (type, tem),
13515 arg1);
13516 }
13517
13518 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13519 if (integer_zerop (arg1)
13520 && truth_value_p (TREE_CODE (arg0))
13521 && truth_value_p (TREE_CODE (op2)))
13522 {
13523 /* Only perform transformation if ARG0 is easily inverted. */
13524 tem = fold_truth_not_expr (arg0);
13525 if (tem)
13526 return fold_build2 (TRUTH_ANDIF_EXPR, type,
13527 fold_convert (type, tem),
13528 op2);
13529 }
13530
13531 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13532 if (integer_onep (arg1)
13533 && truth_value_p (TREE_CODE (arg0))
13534 && truth_value_p (TREE_CODE (op2)))
13535 return fold_build2 (TRUTH_ORIF_EXPR, type,
13536 fold_convert (type, arg0),
13537 op2);
13538
13539 return NULL_TREE;
13540
13541 case CALL_EXPR:
13542 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13543 of fold_ternary on them. */
13544 gcc_unreachable ();
13545
13546 case BIT_FIELD_REF:
13547 if ((TREE_CODE (arg0) == VECTOR_CST
13548 || (TREE_CODE (arg0) == CONSTRUCTOR && TREE_CONSTANT (arg0)))
13549 && type == TREE_TYPE (TREE_TYPE (arg0)))
13550 {
13551 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
13552 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
13553
13554 if (width != 0
13555 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
13556 && (idx % width) == 0
13557 && (idx = idx / width)
13558 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
13559 {
13560 tree elements = NULL_TREE;
13561
13562 if (TREE_CODE (arg0) == VECTOR_CST)
13563 elements = TREE_VECTOR_CST_ELTS (arg0);
13564 else
13565 {
13566 unsigned HOST_WIDE_INT idx;
13567 tree value;
13568
13569 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (arg0), idx, value)
13570 elements = tree_cons (NULL_TREE, value, elements);
13571 }
13572 while (idx-- > 0 && elements)
13573 elements = TREE_CHAIN (elements);
13574 if (elements)
13575 return TREE_VALUE (elements);
13576 else
13577 return fold_convert (type, integer_zero_node);
13578 }
13579 }
13580
13581 /* A bit-field-ref that referenced the full argument can be stripped. */
13582 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
13583 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
13584 && integer_zerop (op2))
13585 return fold_convert (type, arg0);
13586
13587 return NULL_TREE;
13588
13589 default:
13590 return NULL_TREE;
13591 } /* switch (code) */
13592 }
13593
13594 /* Perform constant folding and related simplification of EXPR.
13595 The related simplifications include x*1 => x, x*0 => 0, etc.,
13596 and application of the associative law.
13597 NOP_EXPR conversions may be removed freely (as long as we
13598 are careful not to change the type of the overall expression).
13599 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13600 but we can constant-fold them if they have constant operands. */
13601
13602 #ifdef ENABLE_FOLD_CHECKING
13603 # define fold(x) fold_1 (x)
13604 static tree fold_1 (tree);
13605 static
13606 #endif
13607 tree
13608 fold (tree expr)
13609 {
13610 const tree t = expr;
13611 enum tree_code code = TREE_CODE (t);
13612 enum tree_code_class kind = TREE_CODE_CLASS (code);
13613 tree tem;
13614
13615 /* Return right away if a constant. */
13616 if (kind == tcc_constant)
13617 return t;
13618
13619 /* CALL_EXPR-like objects with variable numbers of operands are
13620 treated specially. */
13621 if (kind == tcc_vl_exp)
13622 {
13623 if (code == CALL_EXPR)
13624 {
13625 tem = fold_call_expr (expr, false);
13626 return tem ? tem : expr;
13627 }
13628 return expr;
13629 }
13630
13631 if (IS_EXPR_CODE_CLASS (kind))
13632 {
13633 tree type = TREE_TYPE (t);
13634 tree op0, op1, op2;
13635
13636 switch (TREE_CODE_LENGTH (code))
13637 {
13638 case 1:
13639 op0 = TREE_OPERAND (t, 0);
13640 tem = fold_unary (code, type, op0);
13641 return tem ? tem : expr;
13642 case 2:
13643 op0 = TREE_OPERAND (t, 0);
13644 op1 = TREE_OPERAND (t, 1);
13645 tem = fold_binary (code, type, op0, op1);
13646 return tem ? tem : expr;
13647 case 3:
13648 op0 = TREE_OPERAND (t, 0);
13649 op1 = TREE_OPERAND (t, 1);
13650 op2 = TREE_OPERAND (t, 2);
13651 tem = fold_ternary (code, type, op0, op1, op2);
13652 return tem ? tem : expr;
13653 default:
13654 break;
13655 }
13656 }
13657
13658 switch (code)
13659 {
13660 case ARRAY_REF:
13661 {
13662 tree op0 = TREE_OPERAND (t, 0);
13663 tree op1 = TREE_OPERAND (t, 1);
13664
13665 if (TREE_CODE (op1) == INTEGER_CST
13666 && TREE_CODE (op0) == CONSTRUCTOR
13667 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13668 {
13669 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (op0);
13670 unsigned HOST_WIDE_INT end = VEC_length (constructor_elt, elts);
13671 unsigned HOST_WIDE_INT begin = 0;
13672
13673 /* Find a matching index by means of a binary search. */
13674 while (begin != end)
13675 {
13676 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
13677 tree index = VEC_index (constructor_elt, elts, middle)->index;
13678
13679 if (TREE_CODE (index) == INTEGER_CST
13680 && tree_int_cst_lt (index, op1))
13681 begin = middle + 1;
13682 else if (TREE_CODE (index) == INTEGER_CST
13683 && tree_int_cst_lt (op1, index))
13684 end = middle;
13685 else if (TREE_CODE (index) == RANGE_EXPR
13686 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
13687 begin = middle + 1;
13688 else if (TREE_CODE (index) == RANGE_EXPR
13689 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
13690 end = middle;
13691 else
13692 return VEC_index (constructor_elt, elts, middle)->value;
13693 }
13694 }
13695
13696 return t;
13697 }
13698
13699 case CONST_DECL:
13700 return fold (DECL_INITIAL (t));
13701
13702 default:
13703 return t;
13704 } /* switch (code) */
13705 }
13706
13707 #ifdef ENABLE_FOLD_CHECKING
13708 #undef fold
13709
13710 static void fold_checksum_tree (const_tree, struct md5_ctx *, htab_t);
13711 static void fold_check_failed (const_tree, const_tree);
13712 void print_fold_checksum (const_tree);
13713
13714 /* When --enable-checking=fold, compute a digest of expr before
13715 and after actual fold call to see if fold did not accidentally
13716 change original expr. */
13717
13718 tree
13719 fold (tree expr)
13720 {
13721 tree ret;
13722 struct md5_ctx ctx;
13723 unsigned char checksum_before[16], checksum_after[16];
13724 htab_t ht;
13725
13726 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13727 md5_init_ctx (&ctx);
13728 fold_checksum_tree (expr, &ctx, ht);
13729 md5_finish_ctx (&ctx, checksum_before);
13730 htab_empty (ht);
13731
13732 ret = fold_1 (expr);
13733
13734 md5_init_ctx (&ctx);
13735 fold_checksum_tree (expr, &ctx, ht);
13736 md5_finish_ctx (&ctx, checksum_after);
13737 htab_delete (ht);
13738
13739 if (memcmp (checksum_before, checksum_after, 16))
13740 fold_check_failed (expr, ret);
13741
13742 return ret;
13743 }
13744
13745 void
13746 print_fold_checksum (const_tree expr)
13747 {
13748 struct md5_ctx ctx;
13749 unsigned char checksum[16], cnt;
13750 htab_t ht;
13751
13752 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13753 md5_init_ctx (&ctx);
13754 fold_checksum_tree (expr, &ctx, ht);
13755 md5_finish_ctx (&ctx, checksum);
13756 htab_delete (ht);
13757 for (cnt = 0; cnt < 16; ++cnt)
13758 fprintf (stderr, "%02x", checksum[cnt]);
13759 putc ('\n', stderr);
13760 }
13761
13762 static void
13763 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
13764 {
13765 internal_error ("fold check: original tree changed by fold");
13766 }
13767
13768 static void
13769 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx, htab_t ht)
13770 {
13771 const void **slot;
13772 enum tree_code code;
13773 union tree_node buf;
13774 int i, len;
13775
13776 recursive_label:
13777
13778 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
13779 <= sizeof (struct tree_function_decl))
13780 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
13781 if (expr == NULL)
13782 return;
13783 slot = (const void **) htab_find_slot (ht, expr, INSERT);
13784 if (*slot != NULL)
13785 return;
13786 *slot = expr;
13787 code = TREE_CODE (expr);
13788 if (TREE_CODE_CLASS (code) == tcc_declaration
13789 && DECL_ASSEMBLER_NAME_SET_P (expr))
13790 {
13791 /* Allow DECL_ASSEMBLER_NAME to be modified. */
13792 memcpy ((char *) &buf, expr, tree_size (expr));
13793 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
13794 expr = (tree) &buf;
13795 }
13796 else if (TREE_CODE_CLASS (code) == tcc_type
13797 && (TYPE_POINTER_TO (expr)
13798 || TYPE_REFERENCE_TO (expr)
13799 || TYPE_CACHED_VALUES_P (expr)
13800 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
13801 || TYPE_NEXT_VARIANT (expr)))
13802 {
13803 /* Allow these fields to be modified. */
13804 tree tmp;
13805 memcpy ((char *) &buf, expr, tree_size (expr));
13806 expr = tmp = (tree) &buf;
13807 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
13808 TYPE_POINTER_TO (tmp) = NULL;
13809 TYPE_REFERENCE_TO (tmp) = NULL;
13810 TYPE_NEXT_VARIANT (tmp) = NULL;
13811 if (TYPE_CACHED_VALUES_P (tmp))
13812 {
13813 TYPE_CACHED_VALUES_P (tmp) = 0;
13814 TYPE_CACHED_VALUES (tmp) = NULL;
13815 }
13816 }
13817 md5_process_bytes (expr, tree_size (expr), ctx);
13818 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
13819 if (TREE_CODE_CLASS (code) != tcc_type
13820 && TREE_CODE_CLASS (code) != tcc_declaration
13821 && code != TREE_LIST
13822 && code != SSA_NAME)
13823 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
13824 switch (TREE_CODE_CLASS (code))
13825 {
13826 case tcc_constant:
13827 switch (code)
13828 {
13829 case STRING_CST:
13830 md5_process_bytes (TREE_STRING_POINTER (expr),
13831 TREE_STRING_LENGTH (expr), ctx);
13832 break;
13833 case COMPLEX_CST:
13834 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
13835 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
13836 break;
13837 case VECTOR_CST:
13838 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
13839 break;
13840 default:
13841 break;
13842 }
13843 break;
13844 case tcc_exceptional:
13845 switch (code)
13846 {
13847 case TREE_LIST:
13848 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
13849 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
13850 expr = TREE_CHAIN (expr);
13851 goto recursive_label;
13852 break;
13853 case TREE_VEC:
13854 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
13855 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
13856 break;
13857 default:
13858 break;
13859 }
13860 break;
13861 case tcc_expression:
13862 case tcc_reference:
13863 case tcc_comparison:
13864 case tcc_unary:
13865 case tcc_binary:
13866 case tcc_statement:
13867 case tcc_vl_exp:
13868 len = TREE_OPERAND_LENGTH (expr);
13869 for (i = 0; i < len; ++i)
13870 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
13871 break;
13872 case tcc_declaration:
13873 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
13874 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
13875 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
13876 {
13877 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
13878 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
13879 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
13880 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
13881 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
13882 }
13883 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
13884 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
13885
13886 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
13887 {
13888 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
13889 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
13890 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
13891 }
13892 break;
13893 case tcc_type:
13894 if (TREE_CODE (expr) == ENUMERAL_TYPE)
13895 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
13896 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
13897 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
13898 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
13899 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
13900 if (INTEGRAL_TYPE_P (expr)
13901 || SCALAR_FLOAT_TYPE_P (expr))
13902 {
13903 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
13904 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
13905 }
13906 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
13907 if (TREE_CODE (expr) == RECORD_TYPE
13908 || TREE_CODE (expr) == UNION_TYPE
13909 || TREE_CODE (expr) == QUAL_UNION_TYPE)
13910 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
13911 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
13912 break;
13913 default:
13914 break;
13915 }
13916 }
13917
13918 /* Helper function for outputting the checksum of a tree T. When
13919 debugging with gdb, you can "define mynext" to be "next" followed
13920 by "call debug_fold_checksum (op0)", then just trace down till the
13921 outputs differ. */
13922
13923 void
13924 debug_fold_checksum (const_tree t)
13925 {
13926 int i;
13927 unsigned char checksum[16];
13928 struct md5_ctx ctx;
13929 htab_t ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13930
13931 md5_init_ctx (&ctx);
13932 fold_checksum_tree (t, &ctx, ht);
13933 md5_finish_ctx (&ctx, checksum);
13934 htab_empty (ht);
13935
13936 for (i = 0; i < 16; i++)
13937 fprintf (stderr, "%d ", checksum[i]);
13938
13939 fprintf (stderr, "\n");
13940 }
13941
13942 #endif
13943
13944 /* Fold a unary tree expression with code CODE of type TYPE with an
13945 operand OP0. Return a folded expression if successful. Otherwise,
13946 return a tree expression with code CODE of type TYPE with an
13947 operand OP0. */
13948
13949 tree
13950 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
13951 {
13952 tree tem;
13953 #ifdef ENABLE_FOLD_CHECKING
13954 unsigned char checksum_before[16], checksum_after[16];
13955 struct md5_ctx ctx;
13956 htab_t ht;
13957
13958 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13959 md5_init_ctx (&ctx);
13960 fold_checksum_tree (op0, &ctx, ht);
13961 md5_finish_ctx (&ctx, checksum_before);
13962 htab_empty (ht);
13963 #endif
13964
13965 tem = fold_unary (code, type, op0);
13966 if (!tem)
13967 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
13968
13969 #ifdef ENABLE_FOLD_CHECKING
13970 md5_init_ctx (&ctx);
13971 fold_checksum_tree (op0, &ctx, ht);
13972 md5_finish_ctx (&ctx, checksum_after);
13973 htab_delete (ht);
13974
13975 if (memcmp (checksum_before, checksum_after, 16))
13976 fold_check_failed (op0, tem);
13977 #endif
13978 return tem;
13979 }
13980
13981 /* Fold a binary tree expression with code CODE of type TYPE with
13982 operands OP0 and OP1. Return a folded expression if successful.
13983 Otherwise, return a tree expression with code CODE of type TYPE
13984 with operands OP0 and OP1. */
13985
13986 tree
13987 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
13988 MEM_STAT_DECL)
13989 {
13990 tree tem;
13991 #ifdef ENABLE_FOLD_CHECKING
13992 unsigned char checksum_before_op0[16],
13993 checksum_before_op1[16],
13994 checksum_after_op0[16],
13995 checksum_after_op1[16];
13996 struct md5_ctx ctx;
13997 htab_t ht;
13998
13999 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14000 md5_init_ctx (&ctx);
14001 fold_checksum_tree (op0, &ctx, ht);
14002 md5_finish_ctx (&ctx, checksum_before_op0);
14003 htab_empty (ht);
14004
14005 md5_init_ctx (&ctx);
14006 fold_checksum_tree (op1, &ctx, ht);
14007 md5_finish_ctx (&ctx, checksum_before_op1);
14008 htab_empty (ht);
14009 #endif
14010
14011 tem = fold_binary (code, type, op0, op1);
14012 if (!tem)
14013 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
14014
14015 #ifdef ENABLE_FOLD_CHECKING
14016 md5_init_ctx (&ctx);
14017 fold_checksum_tree (op0, &ctx, ht);
14018 md5_finish_ctx (&ctx, checksum_after_op0);
14019 htab_empty (ht);
14020
14021 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14022 fold_check_failed (op0, tem);
14023
14024 md5_init_ctx (&ctx);
14025 fold_checksum_tree (op1, &ctx, ht);
14026 md5_finish_ctx (&ctx, checksum_after_op1);
14027 htab_delete (ht);
14028
14029 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14030 fold_check_failed (op1, tem);
14031 #endif
14032 return tem;
14033 }
14034
14035 /* Fold a ternary tree expression with code CODE of type TYPE with
14036 operands OP0, OP1, and OP2. Return a folded expression if
14037 successful. Otherwise, return a tree expression with code CODE of
14038 type TYPE with operands OP0, OP1, and OP2. */
14039
14040 tree
14041 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
14042 MEM_STAT_DECL)
14043 {
14044 tree tem;
14045 #ifdef ENABLE_FOLD_CHECKING
14046 unsigned char checksum_before_op0[16],
14047 checksum_before_op1[16],
14048 checksum_before_op2[16],
14049 checksum_after_op0[16],
14050 checksum_after_op1[16],
14051 checksum_after_op2[16];
14052 struct md5_ctx ctx;
14053 htab_t ht;
14054
14055 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14056 md5_init_ctx (&ctx);
14057 fold_checksum_tree (op0, &ctx, ht);
14058 md5_finish_ctx (&ctx, checksum_before_op0);
14059 htab_empty (ht);
14060
14061 md5_init_ctx (&ctx);
14062 fold_checksum_tree (op1, &ctx, ht);
14063 md5_finish_ctx (&ctx, checksum_before_op1);
14064 htab_empty (ht);
14065
14066 md5_init_ctx (&ctx);
14067 fold_checksum_tree (op2, &ctx, ht);
14068 md5_finish_ctx (&ctx, checksum_before_op2);
14069 htab_empty (ht);
14070 #endif
14071
14072 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14073 tem = fold_ternary (code, type, op0, op1, op2);
14074 if (!tem)
14075 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
14076
14077 #ifdef ENABLE_FOLD_CHECKING
14078 md5_init_ctx (&ctx);
14079 fold_checksum_tree (op0, &ctx, ht);
14080 md5_finish_ctx (&ctx, checksum_after_op0);
14081 htab_empty (ht);
14082
14083 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14084 fold_check_failed (op0, tem);
14085
14086 md5_init_ctx (&ctx);
14087 fold_checksum_tree (op1, &ctx, ht);
14088 md5_finish_ctx (&ctx, checksum_after_op1);
14089 htab_empty (ht);
14090
14091 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14092 fold_check_failed (op1, tem);
14093
14094 md5_init_ctx (&ctx);
14095 fold_checksum_tree (op2, &ctx, ht);
14096 md5_finish_ctx (&ctx, checksum_after_op2);
14097 htab_delete (ht);
14098
14099 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14100 fold_check_failed (op2, tem);
14101 #endif
14102 return tem;
14103 }
14104
14105 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14106 arguments in ARGARRAY, and a null static chain.
14107 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14108 of type TYPE from the given operands as constructed by build_call_array. */
14109
14110 tree
14111 fold_build_call_array (tree type, tree fn, int nargs, tree *argarray)
14112 {
14113 tree tem;
14114 #ifdef ENABLE_FOLD_CHECKING
14115 unsigned char checksum_before_fn[16],
14116 checksum_before_arglist[16],
14117 checksum_after_fn[16],
14118 checksum_after_arglist[16];
14119 struct md5_ctx ctx;
14120 htab_t ht;
14121 int i;
14122
14123 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14124 md5_init_ctx (&ctx);
14125 fold_checksum_tree (fn, &ctx, ht);
14126 md5_finish_ctx (&ctx, checksum_before_fn);
14127 htab_empty (ht);
14128
14129 md5_init_ctx (&ctx);
14130 for (i = 0; i < nargs; i++)
14131 fold_checksum_tree (argarray[i], &ctx, ht);
14132 md5_finish_ctx (&ctx, checksum_before_arglist);
14133 htab_empty (ht);
14134 #endif
14135
14136 tem = fold_builtin_call_array (type, fn, nargs, argarray);
14137
14138 #ifdef ENABLE_FOLD_CHECKING
14139 md5_init_ctx (&ctx);
14140 fold_checksum_tree (fn, &ctx, ht);
14141 md5_finish_ctx (&ctx, checksum_after_fn);
14142 htab_empty (ht);
14143
14144 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14145 fold_check_failed (fn, tem);
14146
14147 md5_init_ctx (&ctx);
14148 for (i = 0; i < nargs; i++)
14149 fold_checksum_tree (argarray[i], &ctx, ht);
14150 md5_finish_ctx (&ctx, checksum_after_arglist);
14151 htab_delete (ht);
14152
14153 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14154 fold_check_failed (NULL_TREE, tem);
14155 #endif
14156 return tem;
14157 }
14158
14159 /* Perform constant folding and related simplification of initializer
14160 expression EXPR. These behave identically to "fold_buildN" but ignore
14161 potential run-time traps and exceptions that fold must preserve. */
14162
14163 #define START_FOLD_INIT \
14164 int saved_signaling_nans = flag_signaling_nans;\
14165 int saved_trapping_math = flag_trapping_math;\
14166 int saved_rounding_math = flag_rounding_math;\
14167 int saved_trapv = flag_trapv;\
14168 int saved_folding_initializer = folding_initializer;\
14169 flag_signaling_nans = 0;\
14170 flag_trapping_math = 0;\
14171 flag_rounding_math = 0;\
14172 flag_trapv = 0;\
14173 folding_initializer = 1;
14174
14175 #define END_FOLD_INIT \
14176 flag_signaling_nans = saved_signaling_nans;\
14177 flag_trapping_math = saved_trapping_math;\
14178 flag_rounding_math = saved_rounding_math;\
14179 flag_trapv = saved_trapv;\
14180 folding_initializer = saved_folding_initializer;
14181
14182 tree
14183 fold_build1_initializer (enum tree_code code, tree type, tree op)
14184 {
14185 tree result;
14186 START_FOLD_INIT;
14187
14188 result = fold_build1 (code, type, op);
14189
14190 END_FOLD_INIT;
14191 return result;
14192 }
14193
14194 tree
14195 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
14196 {
14197 tree result;
14198 START_FOLD_INIT;
14199
14200 result = fold_build2 (code, type, op0, op1);
14201
14202 END_FOLD_INIT;
14203 return result;
14204 }
14205
14206 tree
14207 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
14208 tree op2)
14209 {
14210 tree result;
14211 START_FOLD_INIT;
14212
14213 result = fold_build3 (code, type, op0, op1, op2);
14214
14215 END_FOLD_INIT;
14216 return result;
14217 }
14218
14219 tree
14220 fold_build_call_array_initializer (tree type, tree fn,
14221 int nargs, tree *argarray)
14222 {
14223 tree result;
14224 START_FOLD_INIT;
14225
14226 result = fold_build_call_array (type, fn, nargs, argarray);
14227
14228 END_FOLD_INIT;
14229 return result;
14230 }
14231
14232 #undef START_FOLD_INIT
14233 #undef END_FOLD_INIT
14234
14235 /* Determine if first argument is a multiple of second argument. Return 0 if
14236 it is not, or we cannot easily determined it to be.
14237
14238 An example of the sort of thing we care about (at this point; this routine
14239 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14240 fold cases do now) is discovering that
14241
14242 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14243
14244 is a multiple of
14245
14246 SAVE_EXPR (J * 8)
14247
14248 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14249
14250 This code also handles discovering that
14251
14252 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14253
14254 is a multiple of 8 so we don't have to worry about dealing with a
14255 possible remainder.
14256
14257 Note that we *look* inside a SAVE_EXPR only to determine how it was
14258 calculated; it is not safe for fold to do much of anything else with the
14259 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14260 at run time. For example, the latter example above *cannot* be implemented
14261 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14262 evaluation time of the original SAVE_EXPR is not necessarily the same at
14263 the time the new expression is evaluated. The only optimization of this
14264 sort that would be valid is changing
14265
14266 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14267
14268 divided by 8 to
14269
14270 SAVE_EXPR (I) * SAVE_EXPR (J)
14271
14272 (where the same SAVE_EXPR (J) is used in the original and the
14273 transformed version). */
14274
14275 int
14276 multiple_of_p (tree type, const_tree top, const_tree bottom)
14277 {
14278 if (operand_equal_p (top, bottom, 0))
14279 return 1;
14280
14281 if (TREE_CODE (type) != INTEGER_TYPE)
14282 return 0;
14283
14284 switch (TREE_CODE (top))
14285 {
14286 case BIT_AND_EXPR:
14287 /* Bitwise and provides a power of two multiple. If the mask is
14288 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14289 if (!integer_pow2p (bottom))
14290 return 0;
14291 /* FALLTHRU */
14292
14293 case MULT_EXPR:
14294 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14295 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14296
14297 case PLUS_EXPR:
14298 case MINUS_EXPR:
14299 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14300 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14301
14302 case LSHIFT_EXPR:
14303 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14304 {
14305 tree op1, t1;
14306
14307 op1 = TREE_OPERAND (top, 1);
14308 /* const_binop may not detect overflow correctly,
14309 so check for it explicitly here. */
14310 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
14311 > TREE_INT_CST_LOW (op1)
14312 && TREE_INT_CST_HIGH (op1) == 0
14313 && 0 != (t1 = fold_convert (type,
14314 const_binop (LSHIFT_EXPR,
14315 size_one_node,
14316 op1, 0)))
14317 && !TREE_OVERFLOW (t1))
14318 return multiple_of_p (type, t1, bottom);
14319 }
14320 return 0;
14321
14322 case NOP_EXPR:
14323 /* Can't handle conversions from non-integral or wider integral type. */
14324 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14325 || (TYPE_PRECISION (type)
14326 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14327 return 0;
14328
14329 /* .. fall through ... */
14330
14331 case SAVE_EXPR:
14332 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14333
14334 case INTEGER_CST:
14335 if (TREE_CODE (bottom) != INTEGER_CST
14336 || integer_zerop (bottom)
14337 || (TYPE_UNSIGNED (type)
14338 && (tree_int_cst_sgn (top) < 0
14339 || tree_int_cst_sgn (bottom) < 0)))
14340 return 0;
14341 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
14342 top, bottom, 0));
14343
14344 default:
14345 return 0;
14346 }
14347 }
14348
14349 /* Return true if CODE or TYPE is known to be non-negative. */
14350
14351 static bool
14352 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14353 {
14354 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14355 && truth_value_p (code))
14356 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14357 have a signed:1 type (where the value is -1 and 0). */
14358 return true;
14359 return false;
14360 }
14361
14362 /* Return true if (CODE OP0) is known to be non-negative. If the return
14363 value is based on the assumption that signed overflow is undefined,
14364 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14365 *STRICT_OVERFLOW_P. */
14366
14367 bool
14368 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14369 bool *strict_overflow_p)
14370 {
14371 if (TYPE_UNSIGNED (type))
14372 return true;
14373
14374 switch (code)
14375 {
14376 case ABS_EXPR:
14377 /* We can't return 1 if flag_wrapv is set because
14378 ABS_EXPR<INT_MIN> = INT_MIN. */
14379 if (!INTEGRAL_TYPE_P (type))
14380 return true;
14381 if (TYPE_OVERFLOW_UNDEFINED (type))
14382 {
14383 *strict_overflow_p = true;
14384 return true;
14385 }
14386 break;
14387
14388 case NON_LVALUE_EXPR:
14389 case FLOAT_EXPR:
14390 case FIX_TRUNC_EXPR:
14391 return tree_expr_nonnegative_warnv_p (op0,
14392 strict_overflow_p);
14393
14394 case NOP_EXPR:
14395 {
14396 tree inner_type = TREE_TYPE (op0);
14397 tree outer_type = type;
14398
14399 if (TREE_CODE (outer_type) == REAL_TYPE)
14400 {
14401 if (TREE_CODE (inner_type) == REAL_TYPE)
14402 return tree_expr_nonnegative_warnv_p (op0,
14403 strict_overflow_p);
14404 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14405 {
14406 if (TYPE_UNSIGNED (inner_type))
14407 return true;
14408 return tree_expr_nonnegative_warnv_p (op0,
14409 strict_overflow_p);
14410 }
14411 }
14412 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
14413 {
14414 if (TREE_CODE (inner_type) == REAL_TYPE)
14415 return tree_expr_nonnegative_warnv_p (op0,
14416 strict_overflow_p);
14417 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14418 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14419 && TYPE_UNSIGNED (inner_type);
14420 }
14421 }
14422 break;
14423
14424 default:
14425 return tree_simple_nonnegative_warnv_p (code, type);
14426 }
14427
14428 /* We don't know sign of `t', so be conservative and return false. */
14429 return false;
14430 }
14431
14432 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14433 value is based on the assumption that signed overflow is undefined,
14434 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14435 *STRICT_OVERFLOW_P. */
14436
14437 bool
14438 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14439 tree op1, bool *strict_overflow_p)
14440 {
14441 if (TYPE_UNSIGNED (type))
14442 return true;
14443
14444 switch (code)
14445 {
14446 case POINTER_PLUS_EXPR:
14447 case PLUS_EXPR:
14448 if (FLOAT_TYPE_P (type))
14449 return (tree_expr_nonnegative_warnv_p (op0,
14450 strict_overflow_p)
14451 && tree_expr_nonnegative_warnv_p (op1,
14452 strict_overflow_p));
14453
14454 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14455 both unsigned and at least 2 bits shorter than the result. */
14456 if (TREE_CODE (type) == INTEGER_TYPE
14457 && TREE_CODE (op0) == NOP_EXPR
14458 && TREE_CODE (op1) == NOP_EXPR)
14459 {
14460 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14461 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14462 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14463 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14464 {
14465 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14466 TYPE_PRECISION (inner2)) + 1;
14467 return prec < TYPE_PRECISION (type);
14468 }
14469 }
14470 break;
14471
14472 case MULT_EXPR:
14473 if (FLOAT_TYPE_P (type))
14474 {
14475 /* x * x for floating point x is always non-negative. */
14476 if (operand_equal_p (op0, op1, 0))
14477 return true;
14478 return (tree_expr_nonnegative_warnv_p (op0,
14479 strict_overflow_p)
14480 && tree_expr_nonnegative_warnv_p (op1,
14481 strict_overflow_p));
14482 }
14483
14484 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14485 both unsigned and their total bits is shorter than the result. */
14486 if (TREE_CODE (type) == INTEGER_TYPE
14487 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14488 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14489 {
14490 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14491 ? TREE_TYPE (TREE_OPERAND (op0, 0))
14492 : TREE_TYPE (op0);
14493 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14494 ? TREE_TYPE (TREE_OPERAND (op1, 0))
14495 : TREE_TYPE (op1);
14496
14497 bool unsigned0 = TYPE_UNSIGNED (inner0);
14498 bool unsigned1 = TYPE_UNSIGNED (inner1);
14499
14500 if (TREE_CODE (op0) == INTEGER_CST)
14501 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14502
14503 if (TREE_CODE (op1) == INTEGER_CST)
14504 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14505
14506 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14507 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14508 {
14509 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14510 ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
14511 : TYPE_PRECISION (inner0);
14512
14513 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14514 ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
14515 : TYPE_PRECISION (inner1);
14516
14517 return precision0 + precision1 < TYPE_PRECISION (type);
14518 }
14519 }
14520 return false;
14521
14522 case BIT_AND_EXPR:
14523 case MAX_EXPR:
14524 return (tree_expr_nonnegative_warnv_p (op0,
14525 strict_overflow_p)
14526 || tree_expr_nonnegative_warnv_p (op1,
14527 strict_overflow_p));
14528
14529 case BIT_IOR_EXPR:
14530 case BIT_XOR_EXPR:
14531 case MIN_EXPR:
14532 case RDIV_EXPR:
14533 case TRUNC_DIV_EXPR:
14534 case CEIL_DIV_EXPR:
14535 case FLOOR_DIV_EXPR:
14536 case ROUND_DIV_EXPR:
14537 return (tree_expr_nonnegative_warnv_p (op0,
14538 strict_overflow_p)
14539 && tree_expr_nonnegative_warnv_p (op1,
14540 strict_overflow_p));
14541
14542 case TRUNC_MOD_EXPR:
14543 case CEIL_MOD_EXPR:
14544 case FLOOR_MOD_EXPR:
14545 case ROUND_MOD_EXPR:
14546 return tree_expr_nonnegative_warnv_p (op0,
14547 strict_overflow_p);
14548 default:
14549 return tree_simple_nonnegative_warnv_p (code, type);
14550 }
14551
14552 /* We don't know sign of `t', so be conservative and return false. */
14553 return false;
14554 }
14555
14556 /* Return true if T is known to be non-negative. If the return
14557 value is based on the assumption that signed overflow is undefined,
14558 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14559 *STRICT_OVERFLOW_P. */
14560
14561 bool
14562 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14563 {
14564 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14565 return true;
14566
14567 switch (TREE_CODE (t))
14568 {
14569 case INTEGER_CST:
14570 return tree_int_cst_sgn (t) >= 0;
14571
14572 case REAL_CST:
14573 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14574
14575 case FIXED_CST:
14576 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14577
14578 case COND_EXPR:
14579 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14580 strict_overflow_p)
14581 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
14582 strict_overflow_p));
14583 default:
14584 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14585 TREE_TYPE (t));
14586 }
14587 /* We don't know sign of `t', so be conservative and return false. */
14588 return false;
14589 }
14590
14591 /* Return true if T is known to be non-negative. If the return
14592 value is based on the assumption that signed overflow is undefined,
14593 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14594 *STRICT_OVERFLOW_P. */
14595
14596 bool
14597 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
14598 tree arg0, tree arg1, bool *strict_overflow_p)
14599 {
14600 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
14601 switch (DECL_FUNCTION_CODE (fndecl))
14602 {
14603 CASE_FLT_FN (BUILT_IN_ACOS):
14604 CASE_FLT_FN (BUILT_IN_ACOSH):
14605 CASE_FLT_FN (BUILT_IN_CABS):
14606 CASE_FLT_FN (BUILT_IN_COSH):
14607 CASE_FLT_FN (BUILT_IN_ERFC):
14608 CASE_FLT_FN (BUILT_IN_EXP):
14609 CASE_FLT_FN (BUILT_IN_EXP10):
14610 CASE_FLT_FN (BUILT_IN_EXP2):
14611 CASE_FLT_FN (BUILT_IN_FABS):
14612 CASE_FLT_FN (BUILT_IN_FDIM):
14613 CASE_FLT_FN (BUILT_IN_HYPOT):
14614 CASE_FLT_FN (BUILT_IN_POW10):
14615 CASE_INT_FN (BUILT_IN_FFS):
14616 CASE_INT_FN (BUILT_IN_PARITY):
14617 CASE_INT_FN (BUILT_IN_POPCOUNT):
14618 case BUILT_IN_BSWAP32:
14619 case BUILT_IN_BSWAP64:
14620 /* Always true. */
14621 return true;
14622
14623 CASE_FLT_FN (BUILT_IN_SQRT):
14624 /* sqrt(-0.0) is -0.0. */
14625 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
14626 return true;
14627 return tree_expr_nonnegative_warnv_p (arg0,
14628 strict_overflow_p);
14629
14630 CASE_FLT_FN (BUILT_IN_ASINH):
14631 CASE_FLT_FN (BUILT_IN_ATAN):
14632 CASE_FLT_FN (BUILT_IN_ATANH):
14633 CASE_FLT_FN (BUILT_IN_CBRT):
14634 CASE_FLT_FN (BUILT_IN_CEIL):
14635 CASE_FLT_FN (BUILT_IN_ERF):
14636 CASE_FLT_FN (BUILT_IN_EXPM1):
14637 CASE_FLT_FN (BUILT_IN_FLOOR):
14638 CASE_FLT_FN (BUILT_IN_FMOD):
14639 CASE_FLT_FN (BUILT_IN_FREXP):
14640 CASE_FLT_FN (BUILT_IN_LCEIL):
14641 CASE_FLT_FN (BUILT_IN_LDEXP):
14642 CASE_FLT_FN (BUILT_IN_LFLOOR):
14643 CASE_FLT_FN (BUILT_IN_LLCEIL):
14644 CASE_FLT_FN (BUILT_IN_LLFLOOR):
14645 CASE_FLT_FN (BUILT_IN_LLRINT):
14646 CASE_FLT_FN (BUILT_IN_LLROUND):
14647 CASE_FLT_FN (BUILT_IN_LRINT):
14648 CASE_FLT_FN (BUILT_IN_LROUND):
14649 CASE_FLT_FN (BUILT_IN_MODF):
14650 CASE_FLT_FN (BUILT_IN_NEARBYINT):
14651 CASE_FLT_FN (BUILT_IN_RINT):
14652 CASE_FLT_FN (BUILT_IN_ROUND):
14653 CASE_FLT_FN (BUILT_IN_SCALB):
14654 CASE_FLT_FN (BUILT_IN_SCALBLN):
14655 CASE_FLT_FN (BUILT_IN_SCALBN):
14656 CASE_FLT_FN (BUILT_IN_SIGNBIT):
14657 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
14658 CASE_FLT_FN (BUILT_IN_SINH):
14659 CASE_FLT_FN (BUILT_IN_TANH):
14660 CASE_FLT_FN (BUILT_IN_TRUNC):
14661 /* True if the 1st argument is nonnegative. */
14662 return tree_expr_nonnegative_warnv_p (arg0,
14663 strict_overflow_p);
14664
14665 CASE_FLT_FN (BUILT_IN_FMAX):
14666 /* True if the 1st OR 2nd arguments are nonnegative. */
14667 return (tree_expr_nonnegative_warnv_p (arg0,
14668 strict_overflow_p)
14669 || (tree_expr_nonnegative_warnv_p (arg1,
14670 strict_overflow_p)));
14671
14672 CASE_FLT_FN (BUILT_IN_FMIN):
14673 /* True if the 1st AND 2nd arguments are nonnegative. */
14674 return (tree_expr_nonnegative_warnv_p (arg0,
14675 strict_overflow_p)
14676 && (tree_expr_nonnegative_warnv_p (arg1,
14677 strict_overflow_p)));
14678
14679 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14680 /* True if the 2nd argument is nonnegative. */
14681 return tree_expr_nonnegative_warnv_p (arg1,
14682 strict_overflow_p);
14683
14684 CASE_FLT_FN (BUILT_IN_POWI):
14685 /* True if the 1st argument is nonnegative or the second
14686 argument is an even integer. */
14687 if (TREE_CODE (arg1) == INTEGER_CST
14688 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
14689 return true;
14690 return tree_expr_nonnegative_warnv_p (arg0,
14691 strict_overflow_p);
14692
14693 CASE_FLT_FN (BUILT_IN_POW):
14694 /* True if the 1st argument is nonnegative or the second
14695 argument is an even integer valued real. */
14696 if (TREE_CODE (arg1) == REAL_CST)
14697 {
14698 REAL_VALUE_TYPE c;
14699 HOST_WIDE_INT n;
14700
14701 c = TREE_REAL_CST (arg1);
14702 n = real_to_integer (&c);
14703 if ((n & 1) == 0)
14704 {
14705 REAL_VALUE_TYPE cint;
14706 real_from_integer (&cint, VOIDmode, n,
14707 n < 0 ? -1 : 0, 0);
14708 if (real_identical (&c, &cint))
14709 return true;
14710 }
14711 }
14712 return tree_expr_nonnegative_warnv_p (arg0,
14713 strict_overflow_p);
14714
14715 default:
14716 break;
14717 }
14718 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
14719 type);
14720 }
14721
14722 /* Return true if T is known to be non-negative. If the return
14723 value is based on the assumption that signed overflow is undefined,
14724 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14725 *STRICT_OVERFLOW_P. */
14726
14727 bool
14728 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14729 {
14730 enum tree_code code = TREE_CODE (t);
14731 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14732 return true;
14733
14734 switch (code)
14735 {
14736 case TARGET_EXPR:
14737 {
14738 tree temp = TARGET_EXPR_SLOT (t);
14739 t = TARGET_EXPR_INITIAL (t);
14740
14741 /* If the initializer is non-void, then it's a normal expression
14742 that will be assigned to the slot. */
14743 if (!VOID_TYPE_P (t))
14744 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
14745
14746 /* Otherwise, the initializer sets the slot in some way. One common
14747 way is an assignment statement at the end of the initializer. */
14748 while (1)
14749 {
14750 if (TREE_CODE (t) == BIND_EXPR)
14751 t = expr_last (BIND_EXPR_BODY (t));
14752 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
14753 || TREE_CODE (t) == TRY_CATCH_EXPR)
14754 t = expr_last (TREE_OPERAND (t, 0));
14755 else if (TREE_CODE (t) == STATEMENT_LIST)
14756 t = expr_last (t);
14757 else
14758 break;
14759 }
14760 if (TREE_CODE (t) == MODIFY_EXPR
14761 && TREE_OPERAND (t, 0) == temp)
14762 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14763 strict_overflow_p);
14764
14765 return false;
14766 }
14767
14768 case CALL_EXPR:
14769 {
14770 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
14771 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
14772
14773 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
14774 get_callee_fndecl (t),
14775 arg0,
14776 arg1,
14777 strict_overflow_p);
14778 }
14779 case COMPOUND_EXPR:
14780 case MODIFY_EXPR:
14781 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14782 strict_overflow_p);
14783 case BIND_EXPR:
14784 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
14785 strict_overflow_p);
14786 case SAVE_EXPR:
14787 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14788 strict_overflow_p);
14789
14790 default:
14791 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14792 TREE_TYPE (t));
14793 }
14794
14795 /* We don't know sign of `t', so be conservative and return false. */
14796 return false;
14797 }
14798
14799 /* Return true if T is known to be non-negative. If the return
14800 value is based on the assumption that signed overflow is undefined,
14801 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14802 *STRICT_OVERFLOW_P. */
14803
14804 bool
14805 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14806 {
14807 enum tree_code code;
14808 if (t == error_mark_node)
14809 return false;
14810
14811 code = TREE_CODE (t);
14812 switch (TREE_CODE_CLASS (code))
14813 {
14814 case tcc_binary:
14815 case tcc_comparison:
14816 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14817 TREE_TYPE (t),
14818 TREE_OPERAND (t, 0),
14819 TREE_OPERAND (t, 1),
14820 strict_overflow_p);
14821
14822 case tcc_unary:
14823 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14824 TREE_TYPE (t),
14825 TREE_OPERAND (t, 0),
14826 strict_overflow_p);
14827
14828 case tcc_constant:
14829 case tcc_declaration:
14830 case tcc_reference:
14831 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14832
14833 default:
14834 break;
14835 }
14836
14837 switch (code)
14838 {
14839 case TRUTH_AND_EXPR:
14840 case TRUTH_OR_EXPR:
14841 case TRUTH_XOR_EXPR:
14842 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14843 TREE_TYPE (t),
14844 TREE_OPERAND (t, 0),
14845 TREE_OPERAND (t, 1),
14846 strict_overflow_p);
14847 case TRUTH_NOT_EXPR:
14848 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14849 TREE_TYPE (t),
14850 TREE_OPERAND (t, 0),
14851 strict_overflow_p);
14852
14853 case COND_EXPR:
14854 case CONSTRUCTOR:
14855 case OBJ_TYPE_REF:
14856 case ASSERT_EXPR:
14857 case ADDR_EXPR:
14858 case WITH_SIZE_EXPR:
14859 case EXC_PTR_EXPR:
14860 case SSA_NAME:
14861 case FILTER_EXPR:
14862 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14863
14864 default:
14865 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
14866 }
14867 }
14868
14869 /* Return true if `t' is known to be non-negative. Handle warnings
14870 about undefined signed overflow. */
14871
14872 bool
14873 tree_expr_nonnegative_p (tree t)
14874 {
14875 bool ret, strict_overflow_p;
14876
14877 strict_overflow_p = false;
14878 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
14879 if (strict_overflow_p)
14880 fold_overflow_warning (("assuming signed overflow does not occur when "
14881 "determining that expression is always "
14882 "non-negative"),
14883 WARN_STRICT_OVERFLOW_MISC);
14884 return ret;
14885 }
14886
14887
14888 /* Return true when (CODE OP0) is an address and is known to be nonzero.
14889 For floating point we further ensure that T is not denormal.
14890 Similar logic is present in nonzero_address in rtlanal.h.
14891
14892 If the return value is based on the assumption that signed overflow
14893 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14894 change *STRICT_OVERFLOW_P. */
14895
14896 bool
14897 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
14898 bool *strict_overflow_p)
14899 {
14900 switch (code)
14901 {
14902 case ABS_EXPR:
14903 return tree_expr_nonzero_warnv_p (op0,
14904 strict_overflow_p);
14905
14906 case NOP_EXPR:
14907 {
14908 tree inner_type = TREE_TYPE (op0);
14909 tree outer_type = type;
14910
14911 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
14912 && tree_expr_nonzero_warnv_p (op0,
14913 strict_overflow_p));
14914 }
14915 break;
14916
14917 case NON_LVALUE_EXPR:
14918 return tree_expr_nonzero_warnv_p (op0,
14919 strict_overflow_p);
14920
14921 default:
14922 break;
14923 }
14924
14925 return false;
14926 }
14927
14928 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
14929 For floating point we further ensure that T is not denormal.
14930 Similar logic is present in nonzero_address in rtlanal.h.
14931
14932 If the return value is based on the assumption that signed overflow
14933 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14934 change *STRICT_OVERFLOW_P. */
14935
14936 bool
14937 tree_binary_nonzero_warnv_p (enum tree_code code,
14938 tree type,
14939 tree op0,
14940 tree op1, bool *strict_overflow_p)
14941 {
14942 bool sub_strict_overflow_p;
14943 switch (code)
14944 {
14945 case POINTER_PLUS_EXPR:
14946 case PLUS_EXPR:
14947 if (TYPE_OVERFLOW_UNDEFINED (type))
14948 {
14949 /* With the presence of negative values it is hard
14950 to say something. */
14951 sub_strict_overflow_p = false;
14952 if (!tree_expr_nonnegative_warnv_p (op0,
14953 &sub_strict_overflow_p)
14954 || !tree_expr_nonnegative_warnv_p (op1,
14955 &sub_strict_overflow_p))
14956 return false;
14957 /* One of operands must be positive and the other non-negative. */
14958 /* We don't set *STRICT_OVERFLOW_P here: even if this value
14959 overflows, on a twos-complement machine the sum of two
14960 nonnegative numbers can never be zero. */
14961 return (tree_expr_nonzero_warnv_p (op0,
14962 strict_overflow_p)
14963 || tree_expr_nonzero_warnv_p (op1,
14964 strict_overflow_p));
14965 }
14966 break;
14967
14968 case MULT_EXPR:
14969 if (TYPE_OVERFLOW_UNDEFINED (type))
14970 {
14971 if (tree_expr_nonzero_warnv_p (op0,
14972 strict_overflow_p)
14973 && tree_expr_nonzero_warnv_p (op1,
14974 strict_overflow_p))
14975 {
14976 *strict_overflow_p = true;
14977 return true;
14978 }
14979 }
14980 break;
14981
14982 case MIN_EXPR:
14983 sub_strict_overflow_p = false;
14984 if (tree_expr_nonzero_warnv_p (op0,
14985 &sub_strict_overflow_p)
14986 && tree_expr_nonzero_warnv_p (op1,
14987 &sub_strict_overflow_p))
14988 {
14989 if (sub_strict_overflow_p)
14990 *strict_overflow_p = true;
14991 }
14992 break;
14993
14994 case MAX_EXPR:
14995 sub_strict_overflow_p = false;
14996 if (tree_expr_nonzero_warnv_p (op0,
14997 &sub_strict_overflow_p))
14998 {
14999 if (sub_strict_overflow_p)
15000 *strict_overflow_p = true;
15001
15002 /* When both operands are nonzero, then MAX must be too. */
15003 if (tree_expr_nonzero_warnv_p (op1,
15004 strict_overflow_p))
15005 return true;
15006
15007 /* MAX where operand 0 is positive is positive. */
15008 return tree_expr_nonnegative_warnv_p (op0,
15009 strict_overflow_p);
15010 }
15011 /* MAX where operand 1 is positive is positive. */
15012 else if (tree_expr_nonzero_warnv_p (op1,
15013 &sub_strict_overflow_p)
15014 && tree_expr_nonnegative_warnv_p (op1,
15015 &sub_strict_overflow_p))
15016 {
15017 if (sub_strict_overflow_p)
15018 *strict_overflow_p = true;
15019 return true;
15020 }
15021 break;
15022
15023 case BIT_IOR_EXPR:
15024 return (tree_expr_nonzero_warnv_p (op1,
15025 strict_overflow_p)
15026 || tree_expr_nonzero_warnv_p (op0,
15027 strict_overflow_p));
15028
15029 default:
15030 break;
15031 }
15032
15033 return false;
15034 }
15035
15036 /* Return true when T is an address and is known to be nonzero.
15037 For floating point we further ensure that T is not denormal.
15038 Similar logic is present in nonzero_address in rtlanal.h.
15039
15040 If the return value is based on the assumption that signed overflow
15041 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15042 change *STRICT_OVERFLOW_P. */
15043
15044 bool
15045 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15046 {
15047 bool sub_strict_overflow_p;
15048 switch (TREE_CODE (t))
15049 {
15050 case INTEGER_CST:
15051 return !integer_zerop (t);
15052
15053 case ADDR_EXPR:
15054 {
15055 tree base = get_base_address (TREE_OPERAND (t, 0));
15056
15057 if (!base)
15058 return false;
15059
15060 /* Weak declarations may link to NULL. Other things may also be NULL
15061 so protect with -fdelete-null-pointer-checks; but not variables
15062 allocated on the stack. */
15063 if (DECL_P (base)
15064 && (flag_delete_null_pointer_checks
15065 || (TREE_CODE (base) == VAR_DECL && !TREE_STATIC (base))))
15066 return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
15067
15068 /* Constants are never weak. */
15069 if (CONSTANT_CLASS_P (base))
15070 return true;
15071
15072 return false;
15073 }
15074
15075 case COND_EXPR:
15076 sub_strict_overflow_p = false;
15077 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15078 &sub_strict_overflow_p)
15079 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15080 &sub_strict_overflow_p))
15081 {
15082 if (sub_strict_overflow_p)
15083 *strict_overflow_p = true;
15084 return true;
15085 }
15086 break;
15087
15088 default:
15089 break;
15090 }
15091 return false;
15092 }
15093
15094 /* Return true when T is an address and is known to be nonzero.
15095 For floating point we further ensure that T is not denormal.
15096 Similar logic is present in nonzero_address in rtlanal.h.
15097
15098 If the return value is based on the assumption that signed overflow
15099 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15100 change *STRICT_OVERFLOW_P. */
15101
15102 bool
15103 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15104 {
15105 tree type = TREE_TYPE (t);
15106 enum tree_code code;
15107
15108 /* Doing something useful for floating point would need more work. */
15109 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
15110 return false;
15111
15112 code = TREE_CODE (t);
15113 switch (TREE_CODE_CLASS (code))
15114 {
15115 case tcc_unary:
15116 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15117 strict_overflow_p);
15118 case tcc_binary:
15119 case tcc_comparison:
15120 return tree_binary_nonzero_warnv_p (code, type,
15121 TREE_OPERAND (t, 0),
15122 TREE_OPERAND (t, 1),
15123 strict_overflow_p);
15124 case tcc_constant:
15125 case tcc_declaration:
15126 case tcc_reference:
15127 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15128
15129 default:
15130 break;
15131 }
15132
15133 switch (code)
15134 {
15135 case TRUTH_NOT_EXPR:
15136 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15137 strict_overflow_p);
15138
15139 case TRUTH_AND_EXPR:
15140 case TRUTH_OR_EXPR:
15141 case TRUTH_XOR_EXPR:
15142 return tree_binary_nonzero_warnv_p (code, type,
15143 TREE_OPERAND (t, 0),
15144 TREE_OPERAND (t, 1),
15145 strict_overflow_p);
15146
15147 case COND_EXPR:
15148 case CONSTRUCTOR:
15149 case OBJ_TYPE_REF:
15150 case ASSERT_EXPR:
15151 case ADDR_EXPR:
15152 case WITH_SIZE_EXPR:
15153 case EXC_PTR_EXPR:
15154 case SSA_NAME:
15155 case FILTER_EXPR:
15156 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15157
15158 case COMPOUND_EXPR:
15159 case MODIFY_EXPR:
15160 case BIND_EXPR:
15161 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15162 strict_overflow_p);
15163
15164 case SAVE_EXPR:
15165 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
15166 strict_overflow_p);
15167
15168 case CALL_EXPR:
15169 return alloca_call_p (t);
15170
15171 default:
15172 break;
15173 }
15174 return false;
15175 }
15176
15177 /* Return true when T is an address and is known to be nonzero.
15178 Handle warnings about undefined signed overflow. */
15179
15180 bool
15181 tree_expr_nonzero_p (tree t)
15182 {
15183 bool ret, strict_overflow_p;
15184
15185 strict_overflow_p = false;
15186 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
15187 if (strict_overflow_p)
15188 fold_overflow_warning (("assuming signed overflow does not occur when "
15189 "determining that expression is always "
15190 "non-zero"),
15191 WARN_STRICT_OVERFLOW_MISC);
15192 return ret;
15193 }
15194
15195 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15196 attempt to fold the expression to a constant without modifying TYPE,
15197 OP0 or OP1.
15198
15199 If the expression could be simplified to a constant, then return
15200 the constant. If the expression would not be simplified to a
15201 constant, then return NULL_TREE. */
15202
15203 tree
15204 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15205 {
15206 tree tem = fold_binary (code, type, op0, op1);
15207 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15208 }
15209
15210 /* Given the components of a unary expression CODE, TYPE and OP0,
15211 attempt to fold the expression to a constant without modifying
15212 TYPE or OP0.
15213
15214 If the expression could be simplified to a constant, then return
15215 the constant. If the expression would not be simplified to a
15216 constant, then return NULL_TREE. */
15217
15218 tree
15219 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15220 {
15221 tree tem = fold_unary (code, type, op0);
15222 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15223 }
15224
15225 /* If EXP represents referencing an element in a constant string
15226 (either via pointer arithmetic or array indexing), return the
15227 tree representing the value accessed, otherwise return NULL. */
15228
15229 tree
15230 fold_read_from_constant_string (tree exp)
15231 {
15232 if ((TREE_CODE (exp) == INDIRECT_REF
15233 || TREE_CODE (exp) == ARRAY_REF)
15234 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15235 {
15236 tree exp1 = TREE_OPERAND (exp, 0);
15237 tree index;
15238 tree string;
15239
15240 if (TREE_CODE (exp) == INDIRECT_REF)
15241 string = string_constant (exp1, &index);
15242 else
15243 {
15244 tree low_bound = array_ref_low_bound (exp);
15245 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
15246
15247 /* Optimize the special-case of a zero lower bound.
15248
15249 We convert the low_bound to sizetype to avoid some problems
15250 with constant folding. (E.g. suppose the lower bound is 1,
15251 and its mode is QI. Without the conversion,l (ARRAY
15252 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15253 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15254 if (! integer_zerop (low_bound))
15255 index = size_diffop (index, fold_convert (sizetype, low_bound));
15256
15257 string = exp1;
15258 }
15259
15260 if (string
15261 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15262 && TREE_CODE (string) == STRING_CST
15263 && TREE_CODE (index) == INTEGER_CST
15264 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15265 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15266 == MODE_INT)
15267 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15268 return build_int_cst_type (TREE_TYPE (exp),
15269 (TREE_STRING_POINTER (string)
15270 [TREE_INT_CST_LOW (index)]));
15271 }
15272 return NULL;
15273 }
15274
15275 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15276 an integer constant, real, or fixed-point constant.
15277
15278 TYPE is the type of the result. */
15279
15280 static tree
15281 fold_negate_const (tree arg0, tree type)
15282 {
15283 tree t = NULL_TREE;
15284
15285 switch (TREE_CODE (arg0))
15286 {
15287 case INTEGER_CST:
15288 {
15289 unsigned HOST_WIDE_INT low;
15290 HOST_WIDE_INT high;
15291 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
15292 TREE_INT_CST_HIGH (arg0),
15293 &low, &high);
15294 t = force_fit_type_double (type, low, high, 1,
15295 (overflow | TREE_OVERFLOW (arg0))
15296 && !TYPE_UNSIGNED (type));
15297 break;
15298 }
15299
15300 case REAL_CST:
15301 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
15302 break;
15303
15304 case FIXED_CST:
15305 {
15306 FIXED_VALUE_TYPE f;
15307 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15308 &(TREE_FIXED_CST (arg0)), NULL,
15309 TYPE_SATURATING (type));
15310 t = build_fixed (type, f);
15311 /* Propagate overflow flags. */
15312 if (overflow_p | TREE_OVERFLOW (arg0))
15313 TREE_OVERFLOW (t) = 1;
15314 break;
15315 }
15316
15317 default:
15318 gcc_unreachable ();
15319 }
15320
15321 return t;
15322 }
15323
15324 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15325 an integer constant or real constant.
15326
15327 TYPE is the type of the result. */
15328
15329 tree
15330 fold_abs_const (tree arg0, tree type)
15331 {
15332 tree t = NULL_TREE;
15333
15334 switch (TREE_CODE (arg0))
15335 {
15336 case INTEGER_CST:
15337 /* If the value is unsigned, then the absolute value is
15338 the same as the ordinary value. */
15339 if (TYPE_UNSIGNED (type))
15340 t = arg0;
15341 /* Similarly, if the value is non-negative. */
15342 else if (INT_CST_LT (integer_minus_one_node, arg0))
15343 t = arg0;
15344 /* If the value is negative, then the absolute value is
15345 its negation. */
15346 else
15347 {
15348 unsigned HOST_WIDE_INT low;
15349 HOST_WIDE_INT high;
15350 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
15351 TREE_INT_CST_HIGH (arg0),
15352 &low, &high);
15353 t = force_fit_type_double (type, low, high, -1,
15354 overflow | TREE_OVERFLOW (arg0));
15355 }
15356 break;
15357
15358 case REAL_CST:
15359 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15360 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
15361 else
15362 t = arg0;
15363 break;
15364
15365 default:
15366 gcc_unreachable ();
15367 }
15368
15369 return t;
15370 }
15371
15372 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15373 constant. TYPE is the type of the result. */
15374
15375 static tree
15376 fold_not_const (tree arg0, tree type)
15377 {
15378 tree t = NULL_TREE;
15379
15380 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15381
15382 t = force_fit_type_double (type, ~TREE_INT_CST_LOW (arg0),
15383 ~TREE_INT_CST_HIGH (arg0), 0,
15384 TREE_OVERFLOW (arg0));
15385
15386 return t;
15387 }
15388
15389 /* Given CODE, a relational operator, the target type, TYPE and two
15390 constant operands OP0 and OP1, return the result of the
15391 relational operation. If the result is not a compile time
15392 constant, then return NULL_TREE. */
15393
15394 static tree
15395 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15396 {
15397 int result, invert;
15398
15399 /* From here on, the only cases we handle are when the result is
15400 known to be a constant. */
15401
15402 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15403 {
15404 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15405 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15406
15407 /* Handle the cases where either operand is a NaN. */
15408 if (real_isnan (c0) || real_isnan (c1))
15409 {
15410 switch (code)
15411 {
15412 case EQ_EXPR:
15413 case ORDERED_EXPR:
15414 result = 0;
15415 break;
15416
15417 case NE_EXPR:
15418 case UNORDERED_EXPR:
15419 case UNLT_EXPR:
15420 case UNLE_EXPR:
15421 case UNGT_EXPR:
15422 case UNGE_EXPR:
15423 case UNEQ_EXPR:
15424 result = 1;
15425 break;
15426
15427 case LT_EXPR:
15428 case LE_EXPR:
15429 case GT_EXPR:
15430 case GE_EXPR:
15431 case LTGT_EXPR:
15432 if (flag_trapping_math)
15433 return NULL_TREE;
15434 result = 0;
15435 break;
15436
15437 default:
15438 gcc_unreachable ();
15439 }
15440
15441 return constant_boolean_node (result, type);
15442 }
15443
15444 return constant_boolean_node (real_compare (code, c0, c1), type);
15445 }
15446
15447 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15448 {
15449 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15450 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15451 return constant_boolean_node (fixed_compare (code, c0, c1), type);
15452 }
15453
15454 /* Handle equality/inequality of complex constants. */
15455 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15456 {
15457 tree rcond = fold_relational_const (code, type,
15458 TREE_REALPART (op0),
15459 TREE_REALPART (op1));
15460 tree icond = fold_relational_const (code, type,
15461 TREE_IMAGPART (op0),
15462 TREE_IMAGPART (op1));
15463 if (code == EQ_EXPR)
15464 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15465 else if (code == NE_EXPR)
15466 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15467 else
15468 return NULL_TREE;
15469 }
15470
15471 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15472
15473 To compute GT, swap the arguments and do LT.
15474 To compute GE, do LT and invert the result.
15475 To compute LE, swap the arguments, do LT and invert the result.
15476 To compute NE, do EQ and invert the result.
15477
15478 Therefore, the code below must handle only EQ and LT. */
15479
15480 if (code == LE_EXPR || code == GT_EXPR)
15481 {
15482 tree tem = op0;
15483 op0 = op1;
15484 op1 = tem;
15485 code = swap_tree_comparison (code);
15486 }
15487
15488 /* Note that it is safe to invert for real values here because we
15489 have already handled the one case that it matters. */
15490
15491 invert = 0;
15492 if (code == NE_EXPR || code == GE_EXPR)
15493 {
15494 invert = 1;
15495 code = invert_tree_comparison (code, false);
15496 }
15497
15498 /* Compute a result for LT or EQ if args permit;
15499 Otherwise return T. */
15500 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15501 {
15502 if (code == EQ_EXPR)
15503 result = tree_int_cst_equal (op0, op1);
15504 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
15505 result = INT_CST_LT_UNSIGNED (op0, op1);
15506 else
15507 result = INT_CST_LT (op0, op1);
15508 }
15509 else
15510 return NULL_TREE;
15511
15512 if (invert)
15513 result ^= 1;
15514 return constant_boolean_node (result, type);
15515 }
15516
15517 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15518 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15519 itself. */
15520
15521 tree
15522 fold_build_cleanup_point_expr (tree type, tree expr)
15523 {
15524 /* If the expression does not have side effects then we don't have to wrap
15525 it with a cleanup point expression. */
15526 if (!TREE_SIDE_EFFECTS (expr))
15527 return expr;
15528
15529 /* If the expression is a return, check to see if the expression inside the
15530 return has no side effects or the right hand side of the modify expression
15531 inside the return. If either don't have side effects set we don't need to
15532 wrap the expression in a cleanup point expression. Note we don't check the
15533 left hand side of the modify because it should always be a return decl. */
15534 if (TREE_CODE (expr) == RETURN_EXPR)
15535 {
15536 tree op = TREE_OPERAND (expr, 0);
15537 if (!op || !TREE_SIDE_EFFECTS (op))
15538 return expr;
15539 op = TREE_OPERAND (op, 1);
15540 if (!TREE_SIDE_EFFECTS (op))
15541 return expr;
15542 }
15543
15544 return build1 (CLEANUP_POINT_EXPR, type, expr);
15545 }
15546
15547 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15548 of an indirection through OP0, or NULL_TREE if no simplification is
15549 possible. */
15550
15551 tree
15552 fold_indirect_ref_1 (tree type, tree op0)
15553 {
15554 tree sub = op0;
15555 tree subtype;
15556
15557 STRIP_NOPS (sub);
15558 subtype = TREE_TYPE (sub);
15559 if (!POINTER_TYPE_P (subtype))
15560 return NULL_TREE;
15561
15562 if (TREE_CODE (sub) == ADDR_EXPR)
15563 {
15564 tree op = TREE_OPERAND (sub, 0);
15565 tree optype = TREE_TYPE (op);
15566 /* *&CONST_DECL -> to the value of the const decl. */
15567 if (TREE_CODE (op) == CONST_DECL)
15568 return DECL_INITIAL (op);
15569 /* *&p => p; make sure to handle *&"str"[cst] here. */
15570 if (type == optype)
15571 {
15572 tree fop = fold_read_from_constant_string (op);
15573 if (fop)
15574 return fop;
15575 else
15576 return op;
15577 }
15578 /* *(foo *)&fooarray => fooarray[0] */
15579 else if (TREE_CODE (optype) == ARRAY_TYPE
15580 && type == TREE_TYPE (optype))
15581 {
15582 tree type_domain = TYPE_DOMAIN (optype);
15583 tree min_val = size_zero_node;
15584 if (type_domain && TYPE_MIN_VALUE (type_domain))
15585 min_val = TYPE_MIN_VALUE (type_domain);
15586 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
15587 }
15588 /* *(foo *)&complexfoo => __real__ complexfoo */
15589 else if (TREE_CODE (optype) == COMPLEX_TYPE
15590 && type == TREE_TYPE (optype))
15591 return fold_build1 (REALPART_EXPR, type, op);
15592 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15593 else if (TREE_CODE (optype) == VECTOR_TYPE
15594 && type == TREE_TYPE (optype))
15595 {
15596 tree part_width = TYPE_SIZE (type);
15597 tree index = bitsize_int (0);
15598 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
15599 }
15600 }
15601
15602 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15603 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15604 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15605 {
15606 tree op00 = TREE_OPERAND (sub, 0);
15607 tree op01 = TREE_OPERAND (sub, 1);
15608 tree op00type;
15609
15610 STRIP_NOPS (op00);
15611 op00type = TREE_TYPE (op00);
15612 if (TREE_CODE (op00) == ADDR_EXPR
15613 && TREE_CODE (TREE_TYPE (op00type)) == VECTOR_TYPE
15614 && type == TREE_TYPE (TREE_TYPE (op00type)))
15615 {
15616 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
15617 tree part_width = TYPE_SIZE (type);
15618 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
15619 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
15620 tree index = bitsize_int (indexi);
15621
15622 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (op00type)))
15623 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (op00, 0),
15624 part_width, index);
15625
15626 }
15627 }
15628
15629
15630 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15631 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15632 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15633 {
15634 tree op00 = TREE_OPERAND (sub, 0);
15635 tree op01 = TREE_OPERAND (sub, 1);
15636 tree op00type;
15637
15638 STRIP_NOPS (op00);
15639 op00type = TREE_TYPE (op00);
15640 if (TREE_CODE (op00) == ADDR_EXPR
15641 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
15642 && type == TREE_TYPE (TREE_TYPE (op00type)))
15643 {
15644 tree size = TYPE_SIZE_UNIT (type);
15645 if (tree_int_cst_equal (size, op01))
15646 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
15647 }
15648 }
15649
15650 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15651 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
15652 && type == TREE_TYPE (TREE_TYPE (subtype)))
15653 {
15654 tree type_domain;
15655 tree min_val = size_zero_node;
15656 sub = build_fold_indirect_ref (sub);
15657 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
15658 if (type_domain && TYPE_MIN_VALUE (type_domain))
15659 min_val = TYPE_MIN_VALUE (type_domain);
15660 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
15661 }
15662
15663 return NULL_TREE;
15664 }
15665
15666 /* Builds an expression for an indirection through T, simplifying some
15667 cases. */
15668
15669 tree
15670 build_fold_indirect_ref (tree t)
15671 {
15672 tree type = TREE_TYPE (TREE_TYPE (t));
15673 tree sub = fold_indirect_ref_1 (type, t);
15674
15675 if (sub)
15676 return sub;
15677 else
15678 return build1 (INDIRECT_REF, type, t);
15679 }
15680
15681 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15682
15683 tree
15684 fold_indirect_ref (tree t)
15685 {
15686 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
15687
15688 if (sub)
15689 return sub;
15690 else
15691 return t;
15692 }
15693
15694 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15695 whose result is ignored. The type of the returned tree need not be
15696 the same as the original expression. */
15697
15698 tree
15699 fold_ignored_result (tree t)
15700 {
15701 if (!TREE_SIDE_EFFECTS (t))
15702 return integer_zero_node;
15703
15704 for (;;)
15705 switch (TREE_CODE_CLASS (TREE_CODE (t)))
15706 {
15707 case tcc_unary:
15708 t = TREE_OPERAND (t, 0);
15709 break;
15710
15711 case tcc_binary:
15712 case tcc_comparison:
15713 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15714 t = TREE_OPERAND (t, 0);
15715 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
15716 t = TREE_OPERAND (t, 1);
15717 else
15718 return t;
15719 break;
15720
15721 case tcc_expression:
15722 switch (TREE_CODE (t))
15723 {
15724 case COMPOUND_EXPR:
15725 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15726 return t;
15727 t = TREE_OPERAND (t, 0);
15728 break;
15729
15730 case COND_EXPR:
15731 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
15732 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
15733 return t;
15734 t = TREE_OPERAND (t, 0);
15735 break;
15736
15737 default:
15738 return t;
15739 }
15740 break;
15741
15742 default:
15743 return t;
15744 }
15745 }
15746
15747 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
15748 This can only be applied to objects of a sizetype. */
15749
15750 tree
15751 round_up (tree value, int divisor)
15752 {
15753 tree div = NULL_TREE;
15754
15755 gcc_assert (divisor > 0);
15756 if (divisor == 1)
15757 return value;
15758
15759 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15760 have to do anything. Only do this when we are not given a const,
15761 because in that case, this check is more expensive than just
15762 doing it. */
15763 if (TREE_CODE (value) != INTEGER_CST)
15764 {
15765 div = build_int_cst (TREE_TYPE (value), divisor);
15766
15767 if (multiple_of_p (TREE_TYPE (value), value, div))
15768 return value;
15769 }
15770
15771 /* If divisor is a power of two, simplify this to bit manipulation. */
15772 if (divisor == (divisor & -divisor))
15773 {
15774 if (TREE_CODE (value) == INTEGER_CST)
15775 {
15776 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (value);
15777 unsigned HOST_WIDE_INT high;
15778 bool overflow_p;
15779
15780 if ((low & (divisor - 1)) == 0)
15781 return value;
15782
15783 overflow_p = TREE_OVERFLOW (value);
15784 high = TREE_INT_CST_HIGH (value);
15785 low &= ~(divisor - 1);
15786 low += divisor;
15787 if (low == 0)
15788 {
15789 high++;
15790 if (high == 0)
15791 overflow_p = true;
15792 }
15793
15794 return force_fit_type_double (TREE_TYPE (value), low, high,
15795 -1, overflow_p);
15796 }
15797 else
15798 {
15799 tree t;
15800
15801 t = build_int_cst (TREE_TYPE (value), divisor - 1);
15802 value = size_binop (PLUS_EXPR, value, t);
15803 t = build_int_cst (TREE_TYPE (value), -divisor);
15804 value = size_binop (BIT_AND_EXPR, value, t);
15805 }
15806 }
15807 else
15808 {
15809 if (!div)
15810 div = build_int_cst (TREE_TYPE (value), divisor);
15811 value = size_binop (CEIL_DIV_EXPR, value, div);
15812 value = size_binop (MULT_EXPR, value, div);
15813 }
15814
15815 return value;
15816 }
15817
15818 /* Likewise, but round down. */
15819
15820 tree
15821 round_down (tree value, int divisor)
15822 {
15823 tree div = NULL_TREE;
15824
15825 gcc_assert (divisor > 0);
15826 if (divisor == 1)
15827 return value;
15828
15829 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15830 have to do anything. Only do this when we are not given a const,
15831 because in that case, this check is more expensive than just
15832 doing it. */
15833 if (TREE_CODE (value) != INTEGER_CST)
15834 {
15835 div = build_int_cst (TREE_TYPE (value), divisor);
15836
15837 if (multiple_of_p (TREE_TYPE (value), value, div))
15838 return value;
15839 }
15840
15841 /* If divisor is a power of two, simplify this to bit manipulation. */
15842 if (divisor == (divisor & -divisor))
15843 {
15844 tree t;
15845
15846 t = build_int_cst (TREE_TYPE (value), -divisor);
15847 value = size_binop (BIT_AND_EXPR, value, t);
15848 }
15849 else
15850 {
15851 if (!div)
15852 div = build_int_cst (TREE_TYPE (value), divisor);
15853 value = size_binop (FLOOR_DIV_EXPR, value, div);
15854 value = size_binop (MULT_EXPR, value, div);
15855 }
15856
15857 return value;
15858 }
15859
15860 /* Returns the pointer to the base of the object addressed by EXP and
15861 extracts the information about the offset of the access, storing it
15862 to PBITPOS and POFFSET. */
15863
15864 static tree
15865 split_address_to_core_and_offset (tree exp,
15866 HOST_WIDE_INT *pbitpos, tree *poffset)
15867 {
15868 tree core;
15869 enum machine_mode mode;
15870 int unsignedp, volatilep;
15871 HOST_WIDE_INT bitsize;
15872
15873 if (TREE_CODE (exp) == ADDR_EXPR)
15874 {
15875 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
15876 poffset, &mode, &unsignedp, &volatilep,
15877 false);
15878 core = build_fold_addr_expr (core);
15879 }
15880 else
15881 {
15882 core = exp;
15883 *pbitpos = 0;
15884 *poffset = NULL_TREE;
15885 }
15886
15887 return core;
15888 }
15889
15890 /* Returns true if addresses of E1 and E2 differ by a constant, false
15891 otherwise. If they do, E1 - E2 is stored in *DIFF. */
15892
15893 bool
15894 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
15895 {
15896 tree core1, core2;
15897 HOST_WIDE_INT bitpos1, bitpos2;
15898 tree toffset1, toffset2, tdiff, type;
15899
15900 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
15901 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
15902
15903 if (bitpos1 % BITS_PER_UNIT != 0
15904 || bitpos2 % BITS_PER_UNIT != 0
15905 || !operand_equal_p (core1, core2, 0))
15906 return false;
15907
15908 if (toffset1 && toffset2)
15909 {
15910 type = TREE_TYPE (toffset1);
15911 if (type != TREE_TYPE (toffset2))
15912 toffset2 = fold_convert (type, toffset2);
15913
15914 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
15915 if (!cst_and_fits_in_hwi (tdiff))
15916 return false;
15917
15918 *diff = int_cst_value (tdiff);
15919 }
15920 else if (toffset1 || toffset2)
15921 {
15922 /* If only one of the offsets is non-constant, the difference cannot
15923 be a constant. */
15924 return false;
15925 }
15926 else
15927 *diff = 0;
15928
15929 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
15930 return true;
15931 }
15932
15933 /* Simplify the floating point expression EXP when the sign of the
15934 result is not significant. Return NULL_TREE if no simplification
15935 is possible. */
15936
15937 tree
15938 fold_strip_sign_ops (tree exp)
15939 {
15940 tree arg0, arg1;
15941
15942 switch (TREE_CODE (exp))
15943 {
15944 case ABS_EXPR:
15945 case NEGATE_EXPR:
15946 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15947 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
15948
15949 case MULT_EXPR:
15950 case RDIV_EXPR:
15951 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
15952 return NULL_TREE;
15953 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15954 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15955 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
15956 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
15957 arg0 ? arg0 : TREE_OPERAND (exp, 0),
15958 arg1 ? arg1 : TREE_OPERAND (exp, 1));
15959 break;
15960
15961 case COMPOUND_EXPR:
15962 arg0 = TREE_OPERAND (exp, 0);
15963 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15964 if (arg1)
15965 return fold_build2 (COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
15966 break;
15967
15968 case COND_EXPR:
15969 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15970 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
15971 if (arg0 || arg1)
15972 return fold_build3 (COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
15973 arg0 ? arg0 : TREE_OPERAND (exp, 1),
15974 arg1 ? arg1 : TREE_OPERAND (exp, 2));
15975 break;
15976
15977 case CALL_EXPR:
15978 {
15979 const enum built_in_function fcode = builtin_mathfn_code (exp);
15980 switch (fcode)
15981 {
15982 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15983 /* Strip copysign function call, return the 1st argument. */
15984 arg0 = CALL_EXPR_ARG (exp, 0);
15985 arg1 = CALL_EXPR_ARG (exp, 1);
15986 return omit_one_operand (TREE_TYPE (exp), arg0, arg1);
15987
15988 default:
15989 /* Strip sign ops from the argument of "odd" math functions. */
15990 if (negate_mathfn_p (fcode))
15991 {
15992 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
15993 if (arg0)
15994 return build_call_expr (get_callee_fndecl (exp), 1, arg0);
15995 }
15996 break;
15997 }
15998 }
15999 break;
16000
16001 default:
16002 break;
16003 }
16004 return NULL_TREE;
16005 }