Collections.java (UnmodifiableMap.toArray): Imported changes from Classpath.
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
21 02110-1301, USA. */
22
23 /*@@ This file should be rewritten to use an arbitrary precision
24 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
25 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
26 @@ The routines that translate from the ap rep should
27 @@ warn if precision et. al. is lost.
28 @@ This would also make life easier when this technology is used
29 @@ for cross-compilers. */
30
31 /* The entry points in this file are fold, size_int_wide, size_binop
32 and force_fit_type_double.
33
34 fold takes a tree as argument and returns a simplified tree.
35
36 size_binop takes a tree code for an arithmetic operation
37 and two operands that are trees, and produces a tree for the
38 result, assuming the type comes from `sizetype'.
39
40 size_int takes an integer value, and creates a tree constant
41 with type from `sizetype'.
42
43 force_fit_type_double takes a constant, an overflowable flag and a
44 prior overflow indicator. It forces the value to fit the type and
45 sets TREE_OVERFLOW.
46
47 Note: Since the folders get called on non-gimple code as well as
48 gimple code, we need to handle GIMPLE tuples as well as their
49 corresponding tree equivalents. */
50
51 #include "config.h"
52 #include "system.h"
53 #include "coretypes.h"
54 #include "tm.h"
55 #include "flags.h"
56 #include "tree.h"
57 #include "real.h"
58 #include "rtl.h"
59 #include "expr.h"
60 #include "tm_p.h"
61 #include "toplev.h"
62 #include "ggc.h"
63 #include "hashtab.h"
64 #include "langhooks.h"
65 #include "md5.h"
66
67 /* Non-zero if we are folding constants inside an initializer; zero
68 otherwise. */
69 int folding_initializer = 0;
70
71 /* The following constants represent a bit based encoding of GCC's
72 comparison operators. This encoding simplifies transformations
73 on relational comparison operators, such as AND and OR. */
74 enum comparison_code {
75 COMPCODE_FALSE = 0,
76 COMPCODE_LT = 1,
77 COMPCODE_EQ = 2,
78 COMPCODE_LE = 3,
79 COMPCODE_GT = 4,
80 COMPCODE_LTGT = 5,
81 COMPCODE_GE = 6,
82 COMPCODE_ORD = 7,
83 COMPCODE_UNORD = 8,
84 COMPCODE_UNLT = 9,
85 COMPCODE_UNEQ = 10,
86 COMPCODE_UNLE = 11,
87 COMPCODE_UNGT = 12,
88 COMPCODE_NE = 13,
89 COMPCODE_UNGE = 14,
90 COMPCODE_TRUE = 15
91 };
92
93 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
94 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
95 static bool negate_mathfn_p (enum built_in_function);
96 static bool negate_expr_p (tree);
97 static tree negate_expr (tree);
98 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
99 static tree associate_trees (tree, tree, enum tree_code, tree);
100 static tree const_binop (enum tree_code, tree, tree, int);
101 static enum comparison_code comparison_to_compcode (enum tree_code);
102 static enum tree_code compcode_to_comparison (enum comparison_code);
103 static tree combine_comparisons (enum tree_code, enum tree_code,
104 enum tree_code, tree, tree, tree);
105 static int truth_value_p (enum tree_code);
106 static int operand_equal_for_comparison_p (tree, tree, tree);
107 static int twoval_comparison_p (tree, tree *, tree *, int *);
108 static tree eval_subst (tree, tree, tree, tree, tree);
109 static tree pedantic_omit_one_operand (tree, tree, tree);
110 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
111 static tree make_bit_field_ref (tree, tree, int, int, int);
112 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
113 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
114 enum machine_mode *, int *, int *,
115 tree *, tree *);
116 static int all_ones_mask_p (tree, int);
117 static tree sign_bit_p (tree, tree);
118 static int simple_operand_p (tree);
119 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
120 static tree range_predecessor (tree);
121 static tree range_successor (tree);
122 static tree make_range (tree, int *, tree *, tree *);
123 static tree build_range_check (tree, tree, int, tree, tree);
124 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
125 tree);
126 static tree fold_range_test (enum tree_code, tree, tree, tree);
127 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
128 static tree unextend (tree, int, int, tree);
129 static tree fold_truthop (enum tree_code, tree, tree, tree);
130 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
131 static tree extract_muldiv (tree, tree, enum tree_code, tree);
132 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
133 static int multiple_of_p (tree, tree, tree);
134 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
135 tree, tree,
136 tree, tree, int);
137 static bool fold_real_zero_addition_p (tree, tree, int);
138 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
139 tree, tree, tree);
140 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
141 static tree fold_div_compare (enum tree_code, tree, tree, tree);
142 static bool reorder_operands_p (tree, tree);
143 static tree fold_negate_const (tree, tree);
144 static tree fold_not_const (tree, tree);
145 static tree fold_relational_const (enum tree_code, tree, tree, tree);
146 static int native_encode_expr (tree, unsigned char *, int);
147 static tree native_interpret_expr (tree, unsigned char *, int);
148
149
150 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
151 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
152 and SUM1. Then this yields nonzero if overflow occurred during the
153 addition.
154
155 Overflow occurs if A and B have the same sign, but A and SUM differ in
156 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
157 sign. */
158 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
159 \f
160 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
161 We do that by representing the two-word integer in 4 words, with only
162 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
163 number. The value of the word is LOWPART + HIGHPART * BASE. */
164
165 #define LOWPART(x) \
166 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
167 #define HIGHPART(x) \
168 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
169 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
170
171 /* Unpack a two-word integer into 4 words.
172 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
173 WORDS points to the array of HOST_WIDE_INTs. */
174
175 static void
176 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
177 {
178 words[0] = LOWPART (low);
179 words[1] = HIGHPART (low);
180 words[2] = LOWPART (hi);
181 words[3] = HIGHPART (hi);
182 }
183
184 /* Pack an array of 4 words into a two-word integer.
185 WORDS points to the array of words.
186 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
187
188 static void
189 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
190 HOST_WIDE_INT *hi)
191 {
192 *low = words[0] + words[1] * BASE;
193 *hi = words[2] + words[3] * BASE;
194 }
195 \f
196 /* Force the double-word integer L1, H1 to be within the range of the
197 integer type TYPE. Stores the properly truncated and sign-extended
198 double-word integer in *LV, *HV. Returns true if the operation
199 overflows, that is, argument and result are different. */
200
201 int
202 fit_double_type (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
203 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, tree type)
204 {
205 unsigned HOST_WIDE_INT low0 = l1;
206 HOST_WIDE_INT high0 = h1;
207 unsigned int prec;
208 int sign_extended_type;
209
210 if (POINTER_TYPE_P (type)
211 || TREE_CODE (type) == OFFSET_TYPE)
212 prec = POINTER_SIZE;
213 else
214 prec = TYPE_PRECISION (type);
215
216 /* Size types *are* sign extended. */
217 sign_extended_type = (!TYPE_UNSIGNED (type)
218 || (TREE_CODE (type) == INTEGER_TYPE
219 && TYPE_IS_SIZETYPE (type)));
220
221 /* First clear all bits that are beyond the type's precision. */
222 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
223 ;
224 else if (prec > HOST_BITS_PER_WIDE_INT)
225 h1 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
226 else
227 {
228 h1 = 0;
229 if (prec < HOST_BITS_PER_WIDE_INT)
230 l1 &= ~((HOST_WIDE_INT) (-1) << prec);
231 }
232
233 /* Then do sign extension if necessary. */
234 if (!sign_extended_type)
235 /* No sign extension */;
236 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
237 /* Correct width already. */;
238 else if (prec > HOST_BITS_PER_WIDE_INT)
239 {
240 /* Sign extend top half? */
241 if (h1 & ((unsigned HOST_WIDE_INT)1
242 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
243 h1 |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
244 }
245 else if (prec == HOST_BITS_PER_WIDE_INT)
246 {
247 if ((HOST_WIDE_INT)l1 < 0)
248 h1 = -1;
249 }
250 else
251 {
252 /* Sign extend bottom half? */
253 if (l1 & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
254 {
255 h1 = -1;
256 l1 |= (HOST_WIDE_INT)(-1) << prec;
257 }
258 }
259
260 *lv = l1;
261 *hv = h1;
262
263 /* If the value didn't fit, signal overflow. */
264 return l1 != low0 || h1 != high0;
265 }
266
267 /* We force the double-int HIGH:LOW to the range of the type TYPE by
268 sign or zero extending it.
269 OVERFLOWABLE indicates if we are interested
270 in overflow of the value, when >0 we are only interested in signed
271 overflow, for <0 we are interested in any overflow. OVERFLOWED
272 indicates whether overflow has already occurred. CONST_OVERFLOWED
273 indicates whether constant overflow has already occurred. We force
274 T's value to be within range of T's type (by setting to 0 or 1 all
275 the bits outside the type's range). We set TREE_OVERFLOWED if,
276 OVERFLOWED is nonzero,
277 or OVERFLOWABLE is >0 and signed overflow occurs
278 or OVERFLOWABLE is <0 and any overflow occurs
279 We return a new tree node for the extended double-int. The node
280 is shared if no overflow flags are set. */
281
282 tree
283 force_fit_type_double (tree type, unsigned HOST_WIDE_INT low,
284 HOST_WIDE_INT high, int overflowable,
285 bool overflowed)
286 {
287 int sign_extended_type;
288 bool overflow;
289
290 /* Size types *are* sign extended. */
291 sign_extended_type = (!TYPE_UNSIGNED (type)
292 || (TREE_CODE (type) == INTEGER_TYPE
293 && TYPE_IS_SIZETYPE (type)));
294
295 overflow = fit_double_type (low, high, &low, &high, type);
296
297 /* If we need to set overflow flags, return a new unshared node. */
298 if (overflowed || overflow)
299 {
300 if (overflowed
301 || overflowable < 0
302 || (overflowable > 0 && sign_extended_type))
303 {
304 tree t = make_node (INTEGER_CST);
305 TREE_INT_CST_LOW (t) = low;
306 TREE_INT_CST_HIGH (t) = high;
307 TREE_TYPE (t) = type;
308 TREE_OVERFLOW (t) = 1;
309 return t;
310 }
311 }
312
313 /* Else build a shared node. */
314 return build_int_cst_wide (type, low, high);
315 }
316 \f
317 /* Add two doubleword integers with doubleword result.
318 Return nonzero if the operation overflows according to UNSIGNED_P.
319 Each argument is given as two `HOST_WIDE_INT' pieces.
320 One argument is L1 and H1; the other, L2 and H2.
321 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
322
323 int
324 add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
325 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
326 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
327 bool unsigned_p)
328 {
329 unsigned HOST_WIDE_INT l;
330 HOST_WIDE_INT h;
331
332 l = l1 + l2;
333 h = h1 + h2 + (l < l1);
334
335 *lv = l;
336 *hv = h;
337
338 if (unsigned_p)
339 return (unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1;
340 else
341 return OVERFLOW_SUM_SIGN (h1, h2, h);
342 }
343
344 /* Negate a doubleword integer with doubleword result.
345 Return nonzero if the operation overflows, assuming it's signed.
346 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
347 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
348
349 int
350 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
351 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
352 {
353 if (l1 == 0)
354 {
355 *lv = 0;
356 *hv = - h1;
357 return (*hv & h1) < 0;
358 }
359 else
360 {
361 *lv = -l1;
362 *hv = ~h1;
363 return 0;
364 }
365 }
366 \f
367 /* Multiply two doubleword integers with doubleword result.
368 Return nonzero if the operation overflows according to UNSIGNED_P.
369 Each argument is given as two `HOST_WIDE_INT' pieces.
370 One argument is L1 and H1; the other, L2 and H2.
371 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
372
373 int
374 mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
375 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
376 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
377 bool unsigned_p)
378 {
379 HOST_WIDE_INT arg1[4];
380 HOST_WIDE_INT arg2[4];
381 HOST_WIDE_INT prod[4 * 2];
382 unsigned HOST_WIDE_INT carry;
383 int i, j, k;
384 unsigned HOST_WIDE_INT toplow, neglow;
385 HOST_WIDE_INT tophigh, neghigh;
386
387 encode (arg1, l1, h1);
388 encode (arg2, l2, h2);
389
390 memset (prod, 0, sizeof prod);
391
392 for (i = 0; i < 4; i++)
393 {
394 carry = 0;
395 for (j = 0; j < 4; j++)
396 {
397 k = i + j;
398 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
399 carry += arg1[i] * arg2[j];
400 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
401 carry += prod[k];
402 prod[k] = LOWPART (carry);
403 carry = HIGHPART (carry);
404 }
405 prod[i + 4] = carry;
406 }
407
408 decode (prod, lv, hv);
409 decode (prod + 4, &toplow, &tophigh);
410
411 /* Unsigned overflow is immediate. */
412 if (unsigned_p)
413 return (toplow | tophigh) != 0;
414
415 /* Check for signed overflow by calculating the signed representation of the
416 top half of the result; it should agree with the low half's sign bit. */
417 if (h1 < 0)
418 {
419 neg_double (l2, h2, &neglow, &neghigh);
420 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
421 }
422 if (h2 < 0)
423 {
424 neg_double (l1, h1, &neglow, &neghigh);
425 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
426 }
427 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
428 }
429 \f
430 /* Shift the doubleword integer in L1, H1 left by COUNT places
431 keeping only PREC bits of result.
432 Shift right if COUNT is negative.
433 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
434 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
435
436 void
437 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
438 HOST_WIDE_INT count, unsigned int prec,
439 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
440 {
441 unsigned HOST_WIDE_INT signmask;
442
443 if (count < 0)
444 {
445 rshift_double (l1, h1, -count, prec, lv, hv, arith);
446 return;
447 }
448
449 if (SHIFT_COUNT_TRUNCATED)
450 count %= prec;
451
452 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
453 {
454 /* Shifting by the host word size is undefined according to the
455 ANSI standard, so we must handle this as a special case. */
456 *hv = 0;
457 *lv = 0;
458 }
459 else if (count >= HOST_BITS_PER_WIDE_INT)
460 {
461 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
462 *lv = 0;
463 }
464 else
465 {
466 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
467 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
468 *lv = l1 << count;
469 }
470
471 /* Sign extend all bits that are beyond the precision. */
472
473 signmask = -((prec > HOST_BITS_PER_WIDE_INT
474 ? ((unsigned HOST_WIDE_INT) *hv
475 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
476 : (*lv >> (prec - 1))) & 1);
477
478 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
479 ;
480 else if (prec >= HOST_BITS_PER_WIDE_INT)
481 {
482 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
483 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
484 }
485 else
486 {
487 *hv = signmask;
488 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
489 *lv |= signmask << prec;
490 }
491 }
492
493 /* Shift the doubleword integer in L1, H1 right by COUNT places
494 keeping only PREC bits of result. COUNT must be positive.
495 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
496 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
497
498 void
499 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
500 HOST_WIDE_INT count, unsigned int prec,
501 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
502 int arith)
503 {
504 unsigned HOST_WIDE_INT signmask;
505
506 signmask = (arith
507 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
508 : 0);
509
510 if (SHIFT_COUNT_TRUNCATED)
511 count %= prec;
512
513 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
514 {
515 /* Shifting by the host word size is undefined according to the
516 ANSI standard, so we must handle this as a special case. */
517 *hv = 0;
518 *lv = 0;
519 }
520 else if (count >= HOST_BITS_PER_WIDE_INT)
521 {
522 *hv = 0;
523 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
524 }
525 else
526 {
527 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
528 *lv = ((l1 >> count)
529 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
530 }
531
532 /* Zero / sign extend all bits that are beyond the precision. */
533
534 if (count >= (HOST_WIDE_INT)prec)
535 {
536 *hv = signmask;
537 *lv = signmask;
538 }
539 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
540 ;
541 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
542 {
543 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
544 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
545 }
546 else
547 {
548 *hv = signmask;
549 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
550 *lv |= signmask << (prec - count);
551 }
552 }
553 \f
554 /* Rotate the doubleword integer in L1, H1 left by COUNT places
555 keeping only PREC bits of result.
556 Rotate right if COUNT is negative.
557 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
558
559 void
560 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
561 HOST_WIDE_INT count, unsigned int prec,
562 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
563 {
564 unsigned HOST_WIDE_INT s1l, s2l;
565 HOST_WIDE_INT s1h, s2h;
566
567 count %= prec;
568 if (count < 0)
569 count += prec;
570
571 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
572 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
573 *lv = s1l | s2l;
574 *hv = s1h | s2h;
575 }
576
577 /* Rotate the doubleword integer in L1, H1 left by COUNT places
578 keeping only PREC bits of result. COUNT must be positive.
579 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
580
581 void
582 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
583 HOST_WIDE_INT count, unsigned int prec,
584 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
585 {
586 unsigned HOST_WIDE_INT s1l, s2l;
587 HOST_WIDE_INT s1h, s2h;
588
589 count %= prec;
590 if (count < 0)
591 count += prec;
592
593 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
594 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
595 *lv = s1l | s2l;
596 *hv = s1h | s2h;
597 }
598 \f
599 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
600 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
601 CODE is a tree code for a kind of division, one of
602 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
603 or EXACT_DIV_EXPR
604 It controls how the quotient is rounded to an integer.
605 Return nonzero if the operation overflows.
606 UNS nonzero says do unsigned division. */
607
608 int
609 div_and_round_double (enum tree_code code, int uns,
610 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
611 HOST_WIDE_INT hnum_orig,
612 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
613 HOST_WIDE_INT hden_orig,
614 unsigned HOST_WIDE_INT *lquo,
615 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
616 HOST_WIDE_INT *hrem)
617 {
618 int quo_neg = 0;
619 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
620 HOST_WIDE_INT den[4], quo[4];
621 int i, j;
622 unsigned HOST_WIDE_INT work;
623 unsigned HOST_WIDE_INT carry = 0;
624 unsigned HOST_WIDE_INT lnum = lnum_orig;
625 HOST_WIDE_INT hnum = hnum_orig;
626 unsigned HOST_WIDE_INT lden = lden_orig;
627 HOST_WIDE_INT hden = hden_orig;
628 int overflow = 0;
629
630 if (hden == 0 && lden == 0)
631 overflow = 1, lden = 1;
632
633 /* Calculate quotient sign and convert operands to unsigned. */
634 if (!uns)
635 {
636 if (hnum < 0)
637 {
638 quo_neg = ~ quo_neg;
639 /* (minimum integer) / (-1) is the only overflow case. */
640 if (neg_double (lnum, hnum, &lnum, &hnum)
641 && ((HOST_WIDE_INT) lden & hden) == -1)
642 overflow = 1;
643 }
644 if (hden < 0)
645 {
646 quo_neg = ~ quo_neg;
647 neg_double (lden, hden, &lden, &hden);
648 }
649 }
650
651 if (hnum == 0 && hden == 0)
652 { /* single precision */
653 *hquo = *hrem = 0;
654 /* This unsigned division rounds toward zero. */
655 *lquo = lnum / lden;
656 goto finish_up;
657 }
658
659 if (hnum == 0)
660 { /* trivial case: dividend < divisor */
661 /* hden != 0 already checked. */
662 *hquo = *lquo = 0;
663 *hrem = hnum;
664 *lrem = lnum;
665 goto finish_up;
666 }
667
668 memset (quo, 0, sizeof quo);
669
670 memset (num, 0, sizeof num); /* to zero 9th element */
671 memset (den, 0, sizeof den);
672
673 encode (num, lnum, hnum);
674 encode (den, lden, hden);
675
676 /* Special code for when the divisor < BASE. */
677 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
678 {
679 /* hnum != 0 already checked. */
680 for (i = 4 - 1; i >= 0; i--)
681 {
682 work = num[i] + carry * BASE;
683 quo[i] = work / lden;
684 carry = work % lden;
685 }
686 }
687 else
688 {
689 /* Full double precision division,
690 with thanks to Don Knuth's "Seminumerical Algorithms". */
691 int num_hi_sig, den_hi_sig;
692 unsigned HOST_WIDE_INT quo_est, scale;
693
694 /* Find the highest nonzero divisor digit. */
695 for (i = 4 - 1;; i--)
696 if (den[i] != 0)
697 {
698 den_hi_sig = i;
699 break;
700 }
701
702 /* Insure that the first digit of the divisor is at least BASE/2.
703 This is required by the quotient digit estimation algorithm. */
704
705 scale = BASE / (den[den_hi_sig] + 1);
706 if (scale > 1)
707 { /* scale divisor and dividend */
708 carry = 0;
709 for (i = 0; i <= 4 - 1; i++)
710 {
711 work = (num[i] * scale) + carry;
712 num[i] = LOWPART (work);
713 carry = HIGHPART (work);
714 }
715
716 num[4] = carry;
717 carry = 0;
718 for (i = 0; i <= 4 - 1; i++)
719 {
720 work = (den[i] * scale) + carry;
721 den[i] = LOWPART (work);
722 carry = HIGHPART (work);
723 if (den[i] != 0) den_hi_sig = i;
724 }
725 }
726
727 num_hi_sig = 4;
728
729 /* Main loop */
730 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
731 {
732 /* Guess the next quotient digit, quo_est, by dividing the first
733 two remaining dividend digits by the high order quotient digit.
734 quo_est is never low and is at most 2 high. */
735 unsigned HOST_WIDE_INT tmp;
736
737 num_hi_sig = i + den_hi_sig + 1;
738 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
739 if (num[num_hi_sig] != den[den_hi_sig])
740 quo_est = work / den[den_hi_sig];
741 else
742 quo_est = BASE - 1;
743
744 /* Refine quo_est so it's usually correct, and at most one high. */
745 tmp = work - quo_est * den[den_hi_sig];
746 if (tmp < BASE
747 && (den[den_hi_sig - 1] * quo_est
748 > (tmp * BASE + num[num_hi_sig - 2])))
749 quo_est--;
750
751 /* Try QUO_EST as the quotient digit, by multiplying the
752 divisor by QUO_EST and subtracting from the remaining dividend.
753 Keep in mind that QUO_EST is the I - 1st digit. */
754
755 carry = 0;
756 for (j = 0; j <= den_hi_sig; j++)
757 {
758 work = quo_est * den[j] + carry;
759 carry = HIGHPART (work);
760 work = num[i + j] - LOWPART (work);
761 num[i + j] = LOWPART (work);
762 carry += HIGHPART (work) != 0;
763 }
764
765 /* If quo_est was high by one, then num[i] went negative and
766 we need to correct things. */
767 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
768 {
769 quo_est--;
770 carry = 0; /* add divisor back in */
771 for (j = 0; j <= den_hi_sig; j++)
772 {
773 work = num[i + j] + den[j] + carry;
774 carry = HIGHPART (work);
775 num[i + j] = LOWPART (work);
776 }
777
778 num [num_hi_sig] += carry;
779 }
780
781 /* Store the quotient digit. */
782 quo[i] = quo_est;
783 }
784 }
785
786 decode (quo, lquo, hquo);
787
788 finish_up:
789 /* If result is negative, make it so. */
790 if (quo_neg)
791 neg_double (*lquo, *hquo, lquo, hquo);
792
793 /* Compute trial remainder: rem = num - (quo * den) */
794 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
795 neg_double (*lrem, *hrem, lrem, hrem);
796 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
797
798 switch (code)
799 {
800 case TRUNC_DIV_EXPR:
801 case TRUNC_MOD_EXPR: /* round toward zero */
802 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
803 return overflow;
804
805 case FLOOR_DIV_EXPR:
806 case FLOOR_MOD_EXPR: /* round toward negative infinity */
807 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
808 {
809 /* quo = quo - 1; */
810 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
811 lquo, hquo);
812 }
813 else
814 return overflow;
815 break;
816
817 case CEIL_DIV_EXPR:
818 case CEIL_MOD_EXPR: /* round toward positive infinity */
819 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
820 {
821 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
822 lquo, hquo);
823 }
824 else
825 return overflow;
826 break;
827
828 case ROUND_DIV_EXPR:
829 case ROUND_MOD_EXPR: /* round to closest integer */
830 {
831 unsigned HOST_WIDE_INT labs_rem = *lrem;
832 HOST_WIDE_INT habs_rem = *hrem;
833 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
834 HOST_WIDE_INT habs_den = hden, htwice;
835
836 /* Get absolute values. */
837 if (*hrem < 0)
838 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
839 if (hden < 0)
840 neg_double (lden, hden, &labs_den, &habs_den);
841
842 /* If (2 * abs (lrem) >= abs (lden)) */
843 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
844 labs_rem, habs_rem, &ltwice, &htwice);
845
846 if (((unsigned HOST_WIDE_INT) habs_den
847 < (unsigned HOST_WIDE_INT) htwice)
848 || (((unsigned HOST_WIDE_INT) habs_den
849 == (unsigned HOST_WIDE_INT) htwice)
850 && (labs_den < ltwice)))
851 {
852 if (*hquo < 0)
853 /* quo = quo - 1; */
854 add_double (*lquo, *hquo,
855 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
856 else
857 /* quo = quo + 1; */
858 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
859 lquo, hquo);
860 }
861 else
862 return overflow;
863 }
864 break;
865
866 default:
867 gcc_unreachable ();
868 }
869
870 /* Compute true remainder: rem = num - (quo * den) */
871 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
872 neg_double (*lrem, *hrem, lrem, hrem);
873 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
874 return overflow;
875 }
876
877 /* If ARG2 divides ARG1 with zero remainder, carries out the division
878 of type CODE and returns the quotient.
879 Otherwise returns NULL_TREE. */
880
881 static tree
882 div_if_zero_remainder (enum tree_code code, tree arg1, tree arg2)
883 {
884 unsigned HOST_WIDE_INT int1l, int2l;
885 HOST_WIDE_INT int1h, int2h;
886 unsigned HOST_WIDE_INT quol, reml;
887 HOST_WIDE_INT quoh, remh;
888 tree type = TREE_TYPE (arg1);
889 int uns = TYPE_UNSIGNED (type);
890
891 int1l = TREE_INT_CST_LOW (arg1);
892 int1h = TREE_INT_CST_HIGH (arg1);
893 int2l = TREE_INT_CST_LOW (arg2);
894 int2h = TREE_INT_CST_HIGH (arg2);
895
896 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
897 &quol, &quoh, &reml, &remh);
898 if (remh != 0 || reml != 0)
899 return NULL_TREE;
900
901 return build_int_cst_wide (type, quol, quoh);
902 }
903 \f
904 /* Return true if the built-in mathematical function specified by CODE
905 is odd, i.e. -f(x) == f(-x). */
906
907 static bool
908 negate_mathfn_p (enum built_in_function code)
909 {
910 switch (code)
911 {
912 CASE_FLT_FN (BUILT_IN_ASIN):
913 CASE_FLT_FN (BUILT_IN_ASINH):
914 CASE_FLT_FN (BUILT_IN_ATAN):
915 CASE_FLT_FN (BUILT_IN_ATANH):
916 CASE_FLT_FN (BUILT_IN_CASIN):
917 CASE_FLT_FN (BUILT_IN_CASINH):
918 CASE_FLT_FN (BUILT_IN_CATAN):
919 CASE_FLT_FN (BUILT_IN_CATANH):
920 CASE_FLT_FN (BUILT_IN_CBRT):
921 CASE_FLT_FN (BUILT_IN_CPROJ):
922 CASE_FLT_FN (BUILT_IN_CSIN):
923 CASE_FLT_FN (BUILT_IN_CSINH):
924 CASE_FLT_FN (BUILT_IN_CTAN):
925 CASE_FLT_FN (BUILT_IN_CTANH):
926 CASE_FLT_FN (BUILT_IN_ERF):
927 CASE_FLT_FN (BUILT_IN_LLROUND):
928 CASE_FLT_FN (BUILT_IN_LROUND):
929 CASE_FLT_FN (BUILT_IN_ROUND):
930 CASE_FLT_FN (BUILT_IN_SIN):
931 CASE_FLT_FN (BUILT_IN_SINH):
932 CASE_FLT_FN (BUILT_IN_TAN):
933 CASE_FLT_FN (BUILT_IN_TANH):
934 CASE_FLT_FN (BUILT_IN_TRUNC):
935 return true;
936
937 CASE_FLT_FN (BUILT_IN_LLRINT):
938 CASE_FLT_FN (BUILT_IN_LRINT):
939 CASE_FLT_FN (BUILT_IN_NEARBYINT):
940 CASE_FLT_FN (BUILT_IN_RINT):
941 return !flag_rounding_math;
942
943 default:
944 break;
945 }
946 return false;
947 }
948
949 /* Check whether we may negate an integer constant T without causing
950 overflow. */
951
952 bool
953 may_negate_without_overflow_p (tree t)
954 {
955 unsigned HOST_WIDE_INT val;
956 unsigned int prec;
957 tree type;
958
959 gcc_assert (TREE_CODE (t) == INTEGER_CST);
960
961 type = TREE_TYPE (t);
962 if (TYPE_UNSIGNED (type))
963 return false;
964
965 prec = TYPE_PRECISION (type);
966 if (prec > HOST_BITS_PER_WIDE_INT)
967 {
968 if (TREE_INT_CST_LOW (t) != 0)
969 return true;
970 prec -= HOST_BITS_PER_WIDE_INT;
971 val = TREE_INT_CST_HIGH (t);
972 }
973 else
974 val = TREE_INT_CST_LOW (t);
975 if (prec < HOST_BITS_PER_WIDE_INT)
976 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
977 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
978 }
979
980 /* Determine whether an expression T can be cheaply negated using
981 the function negate_expr without introducing undefined overflow. */
982
983 static bool
984 negate_expr_p (tree t)
985 {
986 tree type;
987
988 if (t == 0)
989 return false;
990
991 type = TREE_TYPE (t);
992
993 STRIP_SIGN_NOPS (t);
994 switch (TREE_CODE (t))
995 {
996 case INTEGER_CST:
997 if (TYPE_OVERFLOW_WRAPS (type))
998 return true;
999
1000 /* Check that -CST will not overflow type. */
1001 return may_negate_without_overflow_p (t);
1002 case BIT_NOT_EXPR:
1003 return (INTEGRAL_TYPE_P (type)
1004 && TYPE_OVERFLOW_WRAPS (type));
1005
1006 case REAL_CST:
1007 case NEGATE_EXPR:
1008 return true;
1009
1010 case COMPLEX_CST:
1011 return negate_expr_p (TREE_REALPART (t))
1012 && negate_expr_p (TREE_IMAGPART (t));
1013
1014 case COMPLEX_EXPR:
1015 return negate_expr_p (TREE_OPERAND (t, 0))
1016 && negate_expr_p (TREE_OPERAND (t, 1));
1017
1018 case CONJ_EXPR:
1019 return negate_expr_p (TREE_OPERAND (t, 0));
1020
1021 case PLUS_EXPR:
1022 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1023 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1024 return false;
1025 /* -(A + B) -> (-B) - A. */
1026 if (negate_expr_p (TREE_OPERAND (t, 1))
1027 && reorder_operands_p (TREE_OPERAND (t, 0),
1028 TREE_OPERAND (t, 1)))
1029 return true;
1030 /* -(A + B) -> (-A) - B. */
1031 return negate_expr_p (TREE_OPERAND (t, 0));
1032
1033 case MINUS_EXPR:
1034 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
1035 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1036 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1037 && reorder_operands_p (TREE_OPERAND (t, 0),
1038 TREE_OPERAND (t, 1));
1039
1040 case MULT_EXPR:
1041 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1042 break;
1043
1044 /* Fall through. */
1045
1046 case RDIV_EXPR:
1047 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1048 return negate_expr_p (TREE_OPERAND (t, 1))
1049 || negate_expr_p (TREE_OPERAND (t, 0));
1050 break;
1051
1052 case TRUNC_DIV_EXPR:
1053 case ROUND_DIV_EXPR:
1054 case FLOOR_DIV_EXPR:
1055 case CEIL_DIV_EXPR:
1056 case EXACT_DIV_EXPR:
1057 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
1058 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
1059 break;
1060 return negate_expr_p (TREE_OPERAND (t, 1))
1061 || negate_expr_p (TREE_OPERAND (t, 0));
1062
1063 case NOP_EXPR:
1064 /* Negate -((double)float) as (double)(-float). */
1065 if (TREE_CODE (type) == REAL_TYPE)
1066 {
1067 tree tem = strip_float_extensions (t);
1068 if (tem != t)
1069 return negate_expr_p (tem);
1070 }
1071 break;
1072
1073 case CALL_EXPR:
1074 /* Negate -f(x) as f(-x). */
1075 if (negate_mathfn_p (builtin_mathfn_code (t)))
1076 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
1077 break;
1078
1079 case RSHIFT_EXPR:
1080 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1081 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1082 {
1083 tree op1 = TREE_OPERAND (t, 1);
1084 if (TREE_INT_CST_HIGH (op1) == 0
1085 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1086 == TREE_INT_CST_LOW (op1))
1087 return true;
1088 }
1089 break;
1090
1091 default:
1092 break;
1093 }
1094 return false;
1095 }
1096
1097 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1098 simplification is possible.
1099 If negate_expr_p would return true for T, NULL_TREE will never be
1100 returned. */
1101
1102 static tree
1103 fold_negate_expr (tree t)
1104 {
1105 tree type = TREE_TYPE (t);
1106 tree tem;
1107
1108 switch (TREE_CODE (t))
1109 {
1110 /* Convert - (~A) to A + 1. */
1111 case BIT_NOT_EXPR:
1112 if (INTEGRAL_TYPE_P (type))
1113 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0),
1114 build_int_cst (type, 1));
1115 break;
1116
1117 case INTEGER_CST:
1118 tem = fold_negate_const (t, type);
1119 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
1120 || !TYPE_OVERFLOW_TRAPS (type))
1121 return tem;
1122 break;
1123
1124 case REAL_CST:
1125 tem = fold_negate_const (t, type);
1126 /* Two's complement FP formats, such as c4x, may overflow. */
1127 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
1128 return tem;
1129 break;
1130
1131 case COMPLEX_CST:
1132 {
1133 tree rpart = negate_expr (TREE_REALPART (t));
1134 tree ipart = negate_expr (TREE_IMAGPART (t));
1135
1136 if ((TREE_CODE (rpart) == REAL_CST
1137 && TREE_CODE (ipart) == REAL_CST)
1138 || (TREE_CODE (rpart) == INTEGER_CST
1139 && TREE_CODE (ipart) == INTEGER_CST))
1140 return build_complex (type, rpart, ipart);
1141 }
1142 break;
1143
1144 case COMPLEX_EXPR:
1145 if (negate_expr_p (t))
1146 return fold_build2 (COMPLEX_EXPR, type,
1147 fold_negate_expr (TREE_OPERAND (t, 0)),
1148 fold_negate_expr (TREE_OPERAND (t, 1)));
1149 break;
1150
1151 case CONJ_EXPR:
1152 if (negate_expr_p (t))
1153 return fold_build1 (CONJ_EXPR, type,
1154 fold_negate_expr (TREE_OPERAND (t, 0)));
1155 break;
1156
1157 case NEGATE_EXPR:
1158 return TREE_OPERAND (t, 0);
1159
1160 case PLUS_EXPR:
1161 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1162 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1163 {
1164 /* -(A + B) -> (-B) - A. */
1165 if (negate_expr_p (TREE_OPERAND (t, 1))
1166 && reorder_operands_p (TREE_OPERAND (t, 0),
1167 TREE_OPERAND (t, 1)))
1168 {
1169 tem = negate_expr (TREE_OPERAND (t, 1));
1170 return fold_build2 (MINUS_EXPR, type,
1171 tem, TREE_OPERAND (t, 0));
1172 }
1173
1174 /* -(A + B) -> (-A) - B. */
1175 if (negate_expr_p (TREE_OPERAND (t, 0)))
1176 {
1177 tem = negate_expr (TREE_OPERAND (t, 0));
1178 return fold_build2 (MINUS_EXPR, type,
1179 tem, TREE_OPERAND (t, 1));
1180 }
1181 }
1182 break;
1183
1184 case MINUS_EXPR:
1185 /* - (A - B) -> B - A */
1186 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1187 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1188 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1189 return fold_build2 (MINUS_EXPR, type,
1190 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
1191 break;
1192
1193 case MULT_EXPR:
1194 if (TYPE_UNSIGNED (type))
1195 break;
1196
1197 /* Fall through. */
1198
1199 case RDIV_EXPR:
1200 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
1201 {
1202 tem = TREE_OPERAND (t, 1);
1203 if (negate_expr_p (tem))
1204 return fold_build2 (TREE_CODE (t), type,
1205 TREE_OPERAND (t, 0), negate_expr (tem));
1206 tem = TREE_OPERAND (t, 0);
1207 if (negate_expr_p (tem))
1208 return fold_build2 (TREE_CODE (t), type,
1209 negate_expr (tem), TREE_OPERAND (t, 1));
1210 }
1211 break;
1212
1213 case TRUNC_DIV_EXPR:
1214 case ROUND_DIV_EXPR:
1215 case FLOOR_DIV_EXPR:
1216 case CEIL_DIV_EXPR:
1217 case EXACT_DIV_EXPR:
1218 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
1219 {
1220 tem = TREE_OPERAND (t, 1);
1221 if (negate_expr_p (tem))
1222 return fold_build2 (TREE_CODE (t), type,
1223 TREE_OPERAND (t, 0), negate_expr (tem));
1224 tem = TREE_OPERAND (t, 0);
1225 if (negate_expr_p (tem))
1226 return fold_build2 (TREE_CODE (t), type,
1227 negate_expr (tem), TREE_OPERAND (t, 1));
1228 }
1229 break;
1230
1231 case NOP_EXPR:
1232 /* Convert -((double)float) into (double)(-float). */
1233 if (TREE_CODE (type) == REAL_TYPE)
1234 {
1235 tem = strip_float_extensions (t);
1236 if (tem != t && negate_expr_p (tem))
1237 return negate_expr (tem);
1238 }
1239 break;
1240
1241 case CALL_EXPR:
1242 /* Negate -f(x) as f(-x). */
1243 if (negate_mathfn_p (builtin_mathfn_code (t))
1244 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1245 {
1246 tree fndecl, arg, arglist;
1247
1248 fndecl = get_callee_fndecl (t);
1249 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1250 arglist = build_tree_list (NULL_TREE, arg);
1251 return build_function_call_expr (fndecl, arglist);
1252 }
1253 break;
1254
1255 case RSHIFT_EXPR:
1256 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1257 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1258 {
1259 tree op1 = TREE_OPERAND (t, 1);
1260 if (TREE_INT_CST_HIGH (op1) == 0
1261 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1262 == TREE_INT_CST_LOW (op1))
1263 {
1264 tree ntype = TYPE_UNSIGNED (type)
1265 ? lang_hooks.types.signed_type (type)
1266 : lang_hooks.types.unsigned_type (type);
1267 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1268 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1269 return fold_convert (type, temp);
1270 }
1271 }
1272 break;
1273
1274 default:
1275 break;
1276 }
1277
1278 return NULL_TREE;
1279 }
1280
1281 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1282 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1283 return NULL_TREE. */
1284
1285 static tree
1286 negate_expr (tree t)
1287 {
1288 tree type, tem;
1289
1290 if (t == NULL_TREE)
1291 return NULL_TREE;
1292
1293 type = TREE_TYPE (t);
1294 STRIP_SIGN_NOPS (t);
1295
1296 tem = fold_negate_expr (t);
1297 if (!tem)
1298 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1299 return fold_convert (type, tem);
1300 }
1301 \f
1302 /* Split a tree IN into a constant, literal and variable parts that could be
1303 combined with CODE to make IN. "constant" means an expression with
1304 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1305 commutative arithmetic operation. Store the constant part into *CONP,
1306 the literal in *LITP and return the variable part. If a part isn't
1307 present, set it to null. If the tree does not decompose in this way,
1308 return the entire tree as the variable part and the other parts as null.
1309
1310 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1311 case, we negate an operand that was subtracted. Except if it is a
1312 literal for which we use *MINUS_LITP instead.
1313
1314 If NEGATE_P is true, we are negating all of IN, again except a literal
1315 for which we use *MINUS_LITP instead.
1316
1317 If IN is itself a literal or constant, return it as appropriate.
1318
1319 Note that we do not guarantee that any of the three values will be the
1320 same type as IN, but they will have the same signedness and mode. */
1321
1322 static tree
1323 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1324 tree *minus_litp, int negate_p)
1325 {
1326 tree var = 0;
1327
1328 *conp = 0;
1329 *litp = 0;
1330 *minus_litp = 0;
1331
1332 /* Strip any conversions that don't change the machine mode or signedness. */
1333 STRIP_SIGN_NOPS (in);
1334
1335 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1336 *litp = in;
1337 else if (TREE_CODE (in) == code
1338 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1339 /* We can associate addition and subtraction together (even
1340 though the C standard doesn't say so) for integers because
1341 the value is not affected. For reals, the value might be
1342 affected, so we can't. */
1343 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1344 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1345 {
1346 tree op0 = TREE_OPERAND (in, 0);
1347 tree op1 = TREE_OPERAND (in, 1);
1348 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1349 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1350
1351 /* First see if either of the operands is a literal, then a constant. */
1352 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1353 *litp = op0, op0 = 0;
1354 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1355 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1356
1357 if (op0 != 0 && TREE_CONSTANT (op0))
1358 *conp = op0, op0 = 0;
1359 else if (op1 != 0 && TREE_CONSTANT (op1))
1360 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1361
1362 /* If we haven't dealt with either operand, this is not a case we can
1363 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1364 if (op0 != 0 && op1 != 0)
1365 var = in;
1366 else if (op0 != 0)
1367 var = op0;
1368 else
1369 var = op1, neg_var_p = neg1_p;
1370
1371 /* Now do any needed negations. */
1372 if (neg_litp_p)
1373 *minus_litp = *litp, *litp = 0;
1374 if (neg_conp_p)
1375 *conp = negate_expr (*conp);
1376 if (neg_var_p)
1377 var = negate_expr (var);
1378 }
1379 else if (TREE_CONSTANT (in))
1380 *conp = in;
1381 else
1382 var = in;
1383
1384 if (negate_p)
1385 {
1386 if (*litp)
1387 *minus_litp = *litp, *litp = 0;
1388 else if (*minus_litp)
1389 *litp = *minus_litp, *minus_litp = 0;
1390 *conp = negate_expr (*conp);
1391 var = negate_expr (var);
1392 }
1393
1394 return var;
1395 }
1396
1397 /* Re-associate trees split by the above function. T1 and T2 are either
1398 expressions to associate or null. Return the new expression, if any. If
1399 we build an operation, do it in TYPE and with CODE. */
1400
1401 static tree
1402 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1403 {
1404 if (t1 == 0)
1405 return t2;
1406 else if (t2 == 0)
1407 return t1;
1408
1409 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1410 try to fold this since we will have infinite recursion. But do
1411 deal with any NEGATE_EXPRs. */
1412 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1413 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1414 {
1415 if (code == PLUS_EXPR)
1416 {
1417 if (TREE_CODE (t1) == NEGATE_EXPR)
1418 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1419 fold_convert (type, TREE_OPERAND (t1, 0)));
1420 else if (TREE_CODE (t2) == NEGATE_EXPR)
1421 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1422 fold_convert (type, TREE_OPERAND (t2, 0)));
1423 else if (integer_zerop (t2))
1424 return fold_convert (type, t1);
1425 }
1426 else if (code == MINUS_EXPR)
1427 {
1428 if (integer_zerop (t2))
1429 return fold_convert (type, t1);
1430 }
1431
1432 return build2 (code, type, fold_convert (type, t1),
1433 fold_convert (type, t2));
1434 }
1435
1436 return fold_build2 (code, type, fold_convert (type, t1),
1437 fold_convert (type, t2));
1438 }
1439 \f
1440 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1441 for use in int_const_binop, size_binop and size_diffop. */
1442
1443 static bool
1444 int_binop_types_match_p (enum tree_code code, tree type1, tree type2)
1445 {
1446 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
1447 return false;
1448 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
1449 return false;
1450
1451 switch (code)
1452 {
1453 case LSHIFT_EXPR:
1454 case RSHIFT_EXPR:
1455 case LROTATE_EXPR:
1456 case RROTATE_EXPR:
1457 return true;
1458
1459 default:
1460 break;
1461 }
1462
1463 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
1464 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
1465 && TYPE_MODE (type1) == TYPE_MODE (type2);
1466 }
1467
1468
1469 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1470 to produce a new constant. Return NULL_TREE if we don't know how
1471 to evaluate CODE at compile-time.
1472
1473 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1474
1475 tree
1476 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1477 {
1478 unsigned HOST_WIDE_INT int1l, int2l;
1479 HOST_WIDE_INT int1h, int2h;
1480 unsigned HOST_WIDE_INT low;
1481 HOST_WIDE_INT hi;
1482 unsigned HOST_WIDE_INT garbagel;
1483 HOST_WIDE_INT garbageh;
1484 tree t;
1485 tree type = TREE_TYPE (arg1);
1486 int uns = TYPE_UNSIGNED (type);
1487 int is_sizetype
1488 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1489 int overflow = 0;
1490
1491 int1l = TREE_INT_CST_LOW (arg1);
1492 int1h = TREE_INT_CST_HIGH (arg1);
1493 int2l = TREE_INT_CST_LOW (arg2);
1494 int2h = TREE_INT_CST_HIGH (arg2);
1495
1496 switch (code)
1497 {
1498 case BIT_IOR_EXPR:
1499 low = int1l | int2l, hi = int1h | int2h;
1500 break;
1501
1502 case BIT_XOR_EXPR:
1503 low = int1l ^ int2l, hi = int1h ^ int2h;
1504 break;
1505
1506 case BIT_AND_EXPR:
1507 low = int1l & int2l, hi = int1h & int2h;
1508 break;
1509
1510 case RSHIFT_EXPR:
1511 int2l = -int2l;
1512 case LSHIFT_EXPR:
1513 /* It's unclear from the C standard whether shifts can overflow.
1514 The following code ignores overflow; perhaps a C standard
1515 interpretation ruling is needed. */
1516 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1517 &low, &hi, !uns);
1518 break;
1519
1520 case RROTATE_EXPR:
1521 int2l = - int2l;
1522 case LROTATE_EXPR:
1523 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1524 &low, &hi);
1525 break;
1526
1527 case PLUS_EXPR:
1528 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1529 break;
1530
1531 case MINUS_EXPR:
1532 neg_double (int2l, int2h, &low, &hi);
1533 add_double (int1l, int1h, low, hi, &low, &hi);
1534 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1535 break;
1536
1537 case MULT_EXPR:
1538 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1539 break;
1540
1541 case TRUNC_DIV_EXPR:
1542 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1543 case EXACT_DIV_EXPR:
1544 /* This is a shortcut for a common special case. */
1545 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1546 && !TREE_OVERFLOW (arg1)
1547 && !TREE_OVERFLOW (arg2)
1548 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1549 {
1550 if (code == CEIL_DIV_EXPR)
1551 int1l += int2l - 1;
1552
1553 low = int1l / int2l, hi = 0;
1554 break;
1555 }
1556
1557 /* ... fall through ... */
1558
1559 case ROUND_DIV_EXPR:
1560 if (int2h == 0 && int2l == 0)
1561 return NULL_TREE;
1562 if (int2h == 0 && int2l == 1)
1563 {
1564 low = int1l, hi = int1h;
1565 break;
1566 }
1567 if (int1l == int2l && int1h == int2h
1568 && ! (int1l == 0 && int1h == 0))
1569 {
1570 low = 1, hi = 0;
1571 break;
1572 }
1573 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1574 &low, &hi, &garbagel, &garbageh);
1575 break;
1576
1577 case TRUNC_MOD_EXPR:
1578 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1579 /* This is a shortcut for a common special case. */
1580 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1581 && !TREE_OVERFLOW (arg1)
1582 && !TREE_OVERFLOW (arg2)
1583 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1584 {
1585 if (code == CEIL_MOD_EXPR)
1586 int1l += int2l - 1;
1587 low = int1l % int2l, hi = 0;
1588 break;
1589 }
1590
1591 /* ... fall through ... */
1592
1593 case ROUND_MOD_EXPR:
1594 if (int2h == 0 && int2l == 0)
1595 return NULL_TREE;
1596 overflow = div_and_round_double (code, uns,
1597 int1l, int1h, int2l, int2h,
1598 &garbagel, &garbageh, &low, &hi);
1599 break;
1600
1601 case MIN_EXPR:
1602 case MAX_EXPR:
1603 if (uns)
1604 low = (((unsigned HOST_WIDE_INT) int1h
1605 < (unsigned HOST_WIDE_INT) int2h)
1606 || (((unsigned HOST_WIDE_INT) int1h
1607 == (unsigned HOST_WIDE_INT) int2h)
1608 && int1l < int2l));
1609 else
1610 low = (int1h < int2h
1611 || (int1h == int2h && int1l < int2l));
1612
1613 if (low == (code == MIN_EXPR))
1614 low = int1l, hi = int1h;
1615 else
1616 low = int2l, hi = int2h;
1617 break;
1618
1619 default:
1620 return NULL_TREE;
1621 }
1622
1623 if (notrunc)
1624 {
1625 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1626
1627 /* Propagate overflow flags ourselves. */
1628 if (((!uns || is_sizetype) && overflow)
1629 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1630 {
1631 t = copy_node (t);
1632 TREE_OVERFLOW (t) = 1;
1633 }
1634 }
1635 else
1636 t = force_fit_type_double (TREE_TYPE (arg1), low, hi, 1,
1637 ((!uns || is_sizetype) && overflow)
1638 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1639
1640 return t;
1641 }
1642
1643 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1644 constant. We assume ARG1 and ARG2 have the same data type, or at least
1645 are the same kind of constant and the same machine mode. Return zero if
1646 combining the constants is not allowed in the current operating mode.
1647
1648 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1649
1650 static tree
1651 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1652 {
1653 /* Sanity check for the recursive cases. */
1654 if (!arg1 || !arg2)
1655 return NULL_TREE;
1656
1657 STRIP_NOPS (arg1);
1658 STRIP_NOPS (arg2);
1659
1660 if (TREE_CODE (arg1) == INTEGER_CST)
1661 return int_const_binop (code, arg1, arg2, notrunc);
1662
1663 if (TREE_CODE (arg1) == REAL_CST)
1664 {
1665 enum machine_mode mode;
1666 REAL_VALUE_TYPE d1;
1667 REAL_VALUE_TYPE d2;
1668 REAL_VALUE_TYPE value;
1669 REAL_VALUE_TYPE result;
1670 bool inexact;
1671 tree t, type;
1672
1673 /* The following codes are handled by real_arithmetic. */
1674 switch (code)
1675 {
1676 case PLUS_EXPR:
1677 case MINUS_EXPR:
1678 case MULT_EXPR:
1679 case RDIV_EXPR:
1680 case MIN_EXPR:
1681 case MAX_EXPR:
1682 break;
1683
1684 default:
1685 return NULL_TREE;
1686 }
1687
1688 d1 = TREE_REAL_CST (arg1);
1689 d2 = TREE_REAL_CST (arg2);
1690
1691 type = TREE_TYPE (arg1);
1692 mode = TYPE_MODE (type);
1693
1694 /* Don't perform operation if we honor signaling NaNs and
1695 either operand is a NaN. */
1696 if (HONOR_SNANS (mode)
1697 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1698 return NULL_TREE;
1699
1700 /* Don't perform operation if it would raise a division
1701 by zero exception. */
1702 if (code == RDIV_EXPR
1703 && REAL_VALUES_EQUAL (d2, dconst0)
1704 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1705 return NULL_TREE;
1706
1707 /* If either operand is a NaN, just return it. Otherwise, set up
1708 for floating-point trap; we return an overflow. */
1709 if (REAL_VALUE_ISNAN (d1))
1710 return arg1;
1711 else if (REAL_VALUE_ISNAN (d2))
1712 return arg2;
1713
1714 inexact = real_arithmetic (&value, code, &d1, &d2);
1715 real_convert (&result, mode, &value);
1716
1717 /* Don't constant fold this floating point operation if
1718 the result has overflowed and flag_trapping_math. */
1719 if (flag_trapping_math
1720 && MODE_HAS_INFINITIES (mode)
1721 && REAL_VALUE_ISINF (result)
1722 && !REAL_VALUE_ISINF (d1)
1723 && !REAL_VALUE_ISINF (d2))
1724 return NULL_TREE;
1725
1726 /* Don't constant fold this floating point operation if the
1727 result may dependent upon the run-time rounding mode and
1728 flag_rounding_math is set, or if GCC's software emulation
1729 is unable to accurately represent the result. */
1730 if ((flag_rounding_math
1731 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1732 && !flag_unsafe_math_optimizations))
1733 && (inexact || !real_identical (&result, &value)))
1734 return NULL_TREE;
1735
1736 t = build_real (type, result);
1737
1738 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1739 return t;
1740 }
1741
1742 if (TREE_CODE (arg1) == COMPLEX_CST)
1743 {
1744 tree type = TREE_TYPE (arg1);
1745 tree r1 = TREE_REALPART (arg1);
1746 tree i1 = TREE_IMAGPART (arg1);
1747 tree r2 = TREE_REALPART (arg2);
1748 tree i2 = TREE_IMAGPART (arg2);
1749 tree real, imag;
1750
1751 switch (code)
1752 {
1753 case PLUS_EXPR:
1754 case MINUS_EXPR:
1755 real = const_binop (code, r1, r2, notrunc);
1756 imag = const_binop (code, i1, i2, notrunc);
1757 break;
1758
1759 case MULT_EXPR:
1760 real = const_binop (MINUS_EXPR,
1761 const_binop (MULT_EXPR, r1, r2, notrunc),
1762 const_binop (MULT_EXPR, i1, i2, notrunc),
1763 notrunc);
1764 imag = const_binop (PLUS_EXPR,
1765 const_binop (MULT_EXPR, r1, i2, notrunc),
1766 const_binop (MULT_EXPR, i1, r2, notrunc),
1767 notrunc);
1768 break;
1769
1770 case RDIV_EXPR:
1771 {
1772 tree magsquared
1773 = const_binop (PLUS_EXPR,
1774 const_binop (MULT_EXPR, r2, r2, notrunc),
1775 const_binop (MULT_EXPR, i2, i2, notrunc),
1776 notrunc);
1777 tree t1
1778 = const_binop (PLUS_EXPR,
1779 const_binop (MULT_EXPR, r1, r2, notrunc),
1780 const_binop (MULT_EXPR, i1, i2, notrunc),
1781 notrunc);
1782 tree t2
1783 = const_binop (MINUS_EXPR,
1784 const_binop (MULT_EXPR, i1, r2, notrunc),
1785 const_binop (MULT_EXPR, r1, i2, notrunc),
1786 notrunc);
1787
1788 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1789 code = TRUNC_DIV_EXPR;
1790
1791 real = const_binop (code, t1, magsquared, notrunc);
1792 imag = const_binop (code, t2, magsquared, notrunc);
1793 }
1794 break;
1795
1796 default:
1797 return NULL_TREE;
1798 }
1799
1800 if (real && imag)
1801 return build_complex (type, real, imag);
1802 }
1803
1804 return NULL_TREE;
1805 }
1806
1807 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1808 indicates which particular sizetype to create. */
1809
1810 tree
1811 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1812 {
1813 return build_int_cst (sizetype_tab[(int) kind], number);
1814 }
1815 \f
1816 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1817 is a tree code. The type of the result is taken from the operands.
1818 Both must be equivalent integer types, ala int_binop_types_match_p.
1819 If the operands are constant, so is the result. */
1820
1821 tree
1822 size_binop (enum tree_code code, tree arg0, tree arg1)
1823 {
1824 tree type = TREE_TYPE (arg0);
1825
1826 if (arg0 == error_mark_node || arg1 == error_mark_node)
1827 return error_mark_node;
1828
1829 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1830 TREE_TYPE (arg1)));
1831
1832 /* Handle the special case of two integer constants faster. */
1833 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1834 {
1835 /* And some specific cases even faster than that. */
1836 if (code == PLUS_EXPR)
1837 {
1838 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1839 return arg1;
1840 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1841 return arg0;
1842 }
1843 else if (code == MINUS_EXPR)
1844 {
1845 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1846 return arg0;
1847 }
1848 else if (code == MULT_EXPR)
1849 {
1850 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1851 return arg1;
1852 }
1853
1854 /* Handle general case of two integer constants. */
1855 return int_const_binop (code, arg0, arg1, 0);
1856 }
1857
1858 return fold_build2 (code, type, arg0, arg1);
1859 }
1860
1861 /* Given two values, either both of sizetype or both of bitsizetype,
1862 compute the difference between the two values. Return the value
1863 in signed type corresponding to the type of the operands. */
1864
1865 tree
1866 size_diffop (tree arg0, tree arg1)
1867 {
1868 tree type = TREE_TYPE (arg0);
1869 tree ctype;
1870
1871 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1872 TREE_TYPE (arg1)));
1873
1874 /* If the type is already signed, just do the simple thing. */
1875 if (!TYPE_UNSIGNED (type))
1876 return size_binop (MINUS_EXPR, arg0, arg1);
1877
1878 if (type == sizetype)
1879 ctype = ssizetype;
1880 else if (type == bitsizetype)
1881 ctype = sbitsizetype;
1882 else
1883 ctype = lang_hooks.types.signed_type (type);
1884
1885 /* If either operand is not a constant, do the conversions to the signed
1886 type and subtract. The hardware will do the right thing with any
1887 overflow in the subtraction. */
1888 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1889 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1890 fold_convert (ctype, arg1));
1891
1892 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1893 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1894 overflow) and negate (which can't either). Special-case a result
1895 of zero while we're here. */
1896 if (tree_int_cst_equal (arg0, arg1))
1897 return build_int_cst (ctype, 0);
1898 else if (tree_int_cst_lt (arg1, arg0))
1899 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1900 else
1901 return size_binop (MINUS_EXPR, build_int_cst (ctype, 0),
1902 fold_convert (ctype, size_binop (MINUS_EXPR,
1903 arg1, arg0)));
1904 }
1905 \f
1906 /* A subroutine of fold_convert_const handling conversions of an
1907 INTEGER_CST to another integer type. */
1908
1909 static tree
1910 fold_convert_const_int_from_int (tree type, tree arg1)
1911 {
1912 tree t;
1913
1914 /* Given an integer constant, make new constant with new type,
1915 appropriately sign-extended or truncated. */
1916 t = force_fit_type_double (type, TREE_INT_CST_LOW (arg1),
1917 TREE_INT_CST_HIGH (arg1),
1918 /* Don't set the overflow when
1919 converting a pointer */
1920 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1921 (TREE_INT_CST_HIGH (arg1) < 0
1922 && (TYPE_UNSIGNED (type)
1923 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1924 | TREE_OVERFLOW (arg1));
1925
1926 return t;
1927 }
1928
1929 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1930 to an integer type. */
1931
1932 static tree
1933 fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
1934 {
1935 int overflow = 0;
1936 tree t;
1937
1938 /* The following code implements the floating point to integer
1939 conversion rules required by the Java Language Specification,
1940 that IEEE NaNs are mapped to zero and values that overflow
1941 the target precision saturate, i.e. values greater than
1942 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1943 are mapped to INT_MIN. These semantics are allowed by the
1944 C and C++ standards that simply state that the behavior of
1945 FP-to-integer conversion is unspecified upon overflow. */
1946
1947 HOST_WIDE_INT high, low;
1948 REAL_VALUE_TYPE r;
1949 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1950
1951 switch (code)
1952 {
1953 case FIX_TRUNC_EXPR:
1954 real_trunc (&r, VOIDmode, &x);
1955 break;
1956
1957 default:
1958 gcc_unreachable ();
1959 }
1960
1961 /* If R is NaN, return zero and show we have an overflow. */
1962 if (REAL_VALUE_ISNAN (r))
1963 {
1964 overflow = 1;
1965 high = 0;
1966 low = 0;
1967 }
1968
1969 /* See if R is less than the lower bound or greater than the
1970 upper bound. */
1971
1972 if (! overflow)
1973 {
1974 tree lt = TYPE_MIN_VALUE (type);
1975 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1976 if (REAL_VALUES_LESS (r, l))
1977 {
1978 overflow = 1;
1979 high = TREE_INT_CST_HIGH (lt);
1980 low = TREE_INT_CST_LOW (lt);
1981 }
1982 }
1983
1984 if (! overflow)
1985 {
1986 tree ut = TYPE_MAX_VALUE (type);
1987 if (ut)
1988 {
1989 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1990 if (REAL_VALUES_LESS (u, r))
1991 {
1992 overflow = 1;
1993 high = TREE_INT_CST_HIGH (ut);
1994 low = TREE_INT_CST_LOW (ut);
1995 }
1996 }
1997 }
1998
1999 if (! overflow)
2000 REAL_VALUE_TO_INT (&low, &high, r);
2001
2002 t = force_fit_type_double (type, low, high, -1,
2003 overflow | TREE_OVERFLOW (arg1));
2004 return t;
2005 }
2006
2007 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2008 to another floating point type. */
2009
2010 static tree
2011 fold_convert_const_real_from_real (tree type, tree arg1)
2012 {
2013 REAL_VALUE_TYPE value;
2014 tree t;
2015
2016 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2017 t = build_real (type, value);
2018
2019 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2020 return t;
2021 }
2022
2023 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2024 type TYPE. If no simplification can be done return NULL_TREE. */
2025
2026 static tree
2027 fold_convert_const (enum tree_code code, tree type, tree arg1)
2028 {
2029 if (TREE_TYPE (arg1) == type)
2030 return arg1;
2031
2032 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2033 {
2034 if (TREE_CODE (arg1) == INTEGER_CST)
2035 return fold_convert_const_int_from_int (type, arg1);
2036 else if (TREE_CODE (arg1) == REAL_CST)
2037 return fold_convert_const_int_from_real (code, type, arg1);
2038 }
2039 else if (TREE_CODE (type) == REAL_TYPE)
2040 {
2041 if (TREE_CODE (arg1) == INTEGER_CST)
2042 return build_real_from_int_cst (type, arg1);
2043 if (TREE_CODE (arg1) == REAL_CST)
2044 return fold_convert_const_real_from_real (type, arg1);
2045 }
2046 return NULL_TREE;
2047 }
2048
2049 /* Construct a vector of zero elements of vector type TYPE. */
2050
2051 static tree
2052 build_zero_vector (tree type)
2053 {
2054 tree elem, list;
2055 int i, units;
2056
2057 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2058 units = TYPE_VECTOR_SUBPARTS (type);
2059
2060 list = NULL_TREE;
2061 for (i = 0; i < units; i++)
2062 list = tree_cons (NULL_TREE, elem, list);
2063 return build_vector (type, list);
2064 }
2065
2066 /* Convert expression ARG to type TYPE. Used by the middle-end for
2067 simple conversions in preference to calling the front-end's convert. */
2068
2069 tree
2070 fold_convert (tree type, tree arg)
2071 {
2072 tree orig = TREE_TYPE (arg);
2073 tree tem;
2074
2075 if (type == orig)
2076 return arg;
2077
2078 if (TREE_CODE (arg) == ERROR_MARK
2079 || TREE_CODE (type) == ERROR_MARK
2080 || TREE_CODE (orig) == ERROR_MARK)
2081 return error_mark_node;
2082
2083 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
2084 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
2085 TYPE_MAIN_VARIANT (orig)))
2086 return fold_build1 (NOP_EXPR, type, arg);
2087
2088 switch (TREE_CODE (type))
2089 {
2090 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2091 case POINTER_TYPE: case REFERENCE_TYPE:
2092 case OFFSET_TYPE:
2093 if (TREE_CODE (arg) == INTEGER_CST)
2094 {
2095 tem = fold_convert_const (NOP_EXPR, type, arg);
2096 if (tem != NULL_TREE)
2097 return tem;
2098 }
2099 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2100 || TREE_CODE (orig) == OFFSET_TYPE)
2101 return fold_build1 (NOP_EXPR, type, arg);
2102 if (TREE_CODE (orig) == COMPLEX_TYPE)
2103 {
2104 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2105 return fold_convert (type, tem);
2106 }
2107 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2108 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2109 return fold_build1 (NOP_EXPR, type, arg);
2110
2111 case REAL_TYPE:
2112 if (TREE_CODE (arg) == INTEGER_CST)
2113 {
2114 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2115 if (tem != NULL_TREE)
2116 return tem;
2117 }
2118 else if (TREE_CODE (arg) == REAL_CST)
2119 {
2120 tem = fold_convert_const (NOP_EXPR, type, arg);
2121 if (tem != NULL_TREE)
2122 return tem;
2123 }
2124
2125 switch (TREE_CODE (orig))
2126 {
2127 case INTEGER_TYPE:
2128 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2129 case POINTER_TYPE: case REFERENCE_TYPE:
2130 return fold_build1 (FLOAT_EXPR, type, arg);
2131
2132 case REAL_TYPE:
2133 return fold_build1 (NOP_EXPR, type, arg);
2134
2135 case COMPLEX_TYPE:
2136 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2137 return fold_convert (type, tem);
2138
2139 default:
2140 gcc_unreachable ();
2141 }
2142
2143 case COMPLEX_TYPE:
2144 switch (TREE_CODE (orig))
2145 {
2146 case INTEGER_TYPE:
2147 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2148 case POINTER_TYPE: case REFERENCE_TYPE:
2149 case REAL_TYPE:
2150 return build2 (COMPLEX_EXPR, type,
2151 fold_convert (TREE_TYPE (type), arg),
2152 fold_convert (TREE_TYPE (type), integer_zero_node));
2153 case COMPLEX_TYPE:
2154 {
2155 tree rpart, ipart;
2156
2157 if (TREE_CODE (arg) == COMPLEX_EXPR)
2158 {
2159 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2160 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2161 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2162 }
2163
2164 arg = save_expr (arg);
2165 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2166 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2167 rpart = fold_convert (TREE_TYPE (type), rpart);
2168 ipart = fold_convert (TREE_TYPE (type), ipart);
2169 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2170 }
2171
2172 default:
2173 gcc_unreachable ();
2174 }
2175
2176 case VECTOR_TYPE:
2177 if (integer_zerop (arg))
2178 return build_zero_vector (type);
2179 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2180 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2181 || TREE_CODE (orig) == VECTOR_TYPE);
2182 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2183
2184 case VOID_TYPE:
2185 tem = fold_ignored_result (arg);
2186 if (TREE_CODE (tem) == GIMPLE_MODIFY_STMT)
2187 return tem;
2188 return fold_build1 (NOP_EXPR, type, tem);
2189
2190 default:
2191 gcc_unreachable ();
2192 }
2193 }
2194 \f
2195 /* Return false if expr can be assumed not to be an lvalue, true
2196 otherwise. */
2197
2198 static bool
2199 maybe_lvalue_p (tree x)
2200 {
2201 /* We only need to wrap lvalue tree codes. */
2202 switch (TREE_CODE (x))
2203 {
2204 case VAR_DECL:
2205 case PARM_DECL:
2206 case RESULT_DECL:
2207 case LABEL_DECL:
2208 case FUNCTION_DECL:
2209 case SSA_NAME:
2210
2211 case COMPONENT_REF:
2212 case INDIRECT_REF:
2213 case ALIGN_INDIRECT_REF:
2214 case MISALIGNED_INDIRECT_REF:
2215 case ARRAY_REF:
2216 case ARRAY_RANGE_REF:
2217 case BIT_FIELD_REF:
2218 case OBJ_TYPE_REF:
2219
2220 case REALPART_EXPR:
2221 case IMAGPART_EXPR:
2222 case PREINCREMENT_EXPR:
2223 case PREDECREMENT_EXPR:
2224 case SAVE_EXPR:
2225 case TRY_CATCH_EXPR:
2226 case WITH_CLEANUP_EXPR:
2227 case COMPOUND_EXPR:
2228 case MODIFY_EXPR:
2229 case GIMPLE_MODIFY_STMT:
2230 case TARGET_EXPR:
2231 case COND_EXPR:
2232 case BIND_EXPR:
2233 case MIN_EXPR:
2234 case MAX_EXPR:
2235 break;
2236
2237 default:
2238 /* Assume the worst for front-end tree codes. */
2239 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2240 break;
2241 return false;
2242 }
2243
2244 return true;
2245 }
2246
2247 /* Return an expr equal to X but certainly not valid as an lvalue. */
2248
2249 tree
2250 non_lvalue (tree x)
2251 {
2252 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2253 us. */
2254 if (in_gimple_form)
2255 return x;
2256
2257 if (! maybe_lvalue_p (x))
2258 return x;
2259 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2260 }
2261
2262 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2263 Zero means allow extended lvalues. */
2264
2265 int pedantic_lvalues;
2266
2267 /* When pedantic, return an expr equal to X but certainly not valid as a
2268 pedantic lvalue. Otherwise, return X. */
2269
2270 static tree
2271 pedantic_non_lvalue (tree x)
2272 {
2273 if (pedantic_lvalues)
2274 return non_lvalue (x);
2275 else
2276 return x;
2277 }
2278 \f
2279 /* Given a tree comparison code, return the code that is the logical inverse
2280 of the given code. It is not safe to do this for floating-point
2281 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2282 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2283
2284 enum tree_code
2285 invert_tree_comparison (enum tree_code code, bool honor_nans)
2286 {
2287 if (honor_nans && flag_trapping_math)
2288 return ERROR_MARK;
2289
2290 switch (code)
2291 {
2292 case EQ_EXPR:
2293 return NE_EXPR;
2294 case NE_EXPR:
2295 return EQ_EXPR;
2296 case GT_EXPR:
2297 return honor_nans ? UNLE_EXPR : LE_EXPR;
2298 case GE_EXPR:
2299 return honor_nans ? UNLT_EXPR : LT_EXPR;
2300 case LT_EXPR:
2301 return honor_nans ? UNGE_EXPR : GE_EXPR;
2302 case LE_EXPR:
2303 return honor_nans ? UNGT_EXPR : GT_EXPR;
2304 case LTGT_EXPR:
2305 return UNEQ_EXPR;
2306 case UNEQ_EXPR:
2307 return LTGT_EXPR;
2308 case UNGT_EXPR:
2309 return LE_EXPR;
2310 case UNGE_EXPR:
2311 return LT_EXPR;
2312 case UNLT_EXPR:
2313 return GE_EXPR;
2314 case UNLE_EXPR:
2315 return GT_EXPR;
2316 case ORDERED_EXPR:
2317 return UNORDERED_EXPR;
2318 case UNORDERED_EXPR:
2319 return ORDERED_EXPR;
2320 default:
2321 gcc_unreachable ();
2322 }
2323 }
2324
2325 /* Similar, but return the comparison that results if the operands are
2326 swapped. This is safe for floating-point. */
2327
2328 enum tree_code
2329 swap_tree_comparison (enum tree_code code)
2330 {
2331 switch (code)
2332 {
2333 case EQ_EXPR:
2334 case NE_EXPR:
2335 case ORDERED_EXPR:
2336 case UNORDERED_EXPR:
2337 case LTGT_EXPR:
2338 case UNEQ_EXPR:
2339 return code;
2340 case GT_EXPR:
2341 return LT_EXPR;
2342 case GE_EXPR:
2343 return LE_EXPR;
2344 case LT_EXPR:
2345 return GT_EXPR;
2346 case LE_EXPR:
2347 return GE_EXPR;
2348 case UNGT_EXPR:
2349 return UNLT_EXPR;
2350 case UNGE_EXPR:
2351 return UNLE_EXPR;
2352 case UNLT_EXPR:
2353 return UNGT_EXPR;
2354 case UNLE_EXPR:
2355 return UNGE_EXPR;
2356 default:
2357 gcc_unreachable ();
2358 }
2359 }
2360
2361
2362 /* Convert a comparison tree code from an enum tree_code representation
2363 into a compcode bit-based encoding. This function is the inverse of
2364 compcode_to_comparison. */
2365
2366 static enum comparison_code
2367 comparison_to_compcode (enum tree_code code)
2368 {
2369 switch (code)
2370 {
2371 case LT_EXPR:
2372 return COMPCODE_LT;
2373 case EQ_EXPR:
2374 return COMPCODE_EQ;
2375 case LE_EXPR:
2376 return COMPCODE_LE;
2377 case GT_EXPR:
2378 return COMPCODE_GT;
2379 case NE_EXPR:
2380 return COMPCODE_NE;
2381 case GE_EXPR:
2382 return COMPCODE_GE;
2383 case ORDERED_EXPR:
2384 return COMPCODE_ORD;
2385 case UNORDERED_EXPR:
2386 return COMPCODE_UNORD;
2387 case UNLT_EXPR:
2388 return COMPCODE_UNLT;
2389 case UNEQ_EXPR:
2390 return COMPCODE_UNEQ;
2391 case UNLE_EXPR:
2392 return COMPCODE_UNLE;
2393 case UNGT_EXPR:
2394 return COMPCODE_UNGT;
2395 case LTGT_EXPR:
2396 return COMPCODE_LTGT;
2397 case UNGE_EXPR:
2398 return COMPCODE_UNGE;
2399 default:
2400 gcc_unreachable ();
2401 }
2402 }
2403
2404 /* Convert a compcode bit-based encoding of a comparison operator back
2405 to GCC's enum tree_code representation. This function is the
2406 inverse of comparison_to_compcode. */
2407
2408 static enum tree_code
2409 compcode_to_comparison (enum comparison_code code)
2410 {
2411 switch (code)
2412 {
2413 case COMPCODE_LT:
2414 return LT_EXPR;
2415 case COMPCODE_EQ:
2416 return EQ_EXPR;
2417 case COMPCODE_LE:
2418 return LE_EXPR;
2419 case COMPCODE_GT:
2420 return GT_EXPR;
2421 case COMPCODE_NE:
2422 return NE_EXPR;
2423 case COMPCODE_GE:
2424 return GE_EXPR;
2425 case COMPCODE_ORD:
2426 return ORDERED_EXPR;
2427 case COMPCODE_UNORD:
2428 return UNORDERED_EXPR;
2429 case COMPCODE_UNLT:
2430 return UNLT_EXPR;
2431 case COMPCODE_UNEQ:
2432 return UNEQ_EXPR;
2433 case COMPCODE_UNLE:
2434 return UNLE_EXPR;
2435 case COMPCODE_UNGT:
2436 return UNGT_EXPR;
2437 case COMPCODE_LTGT:
2438 return LTGT_EXPR;
2439 case COMPCODE_UNGE:
2440 return UNGE_EXPR;
2441 default:
2442 gcc_unreachable ();
2443 }
2444 }
2445
2446 /* Return a tree for the comparison which is the combination of
2447 doing the AND or OR (depending on CODE) of the two operations LCODE
2448 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2449 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2450 if this makes the transformation invalid. */
2451
2452 tree
2453 combine_comparisons (enum tree_code code, enum tree_code lcode,
2454 enum tree_code rcode, tree truth_type,
2455 tree ll_arg, tree lr_arg)
2456 {
2457 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2458 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2459 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2460 enum comparison_code compcode;
2461
2462 switch (code)
2463 {
2464 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2465 compcode = lcompcode & rcompcode;
2466 break;
2467
2468 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2469 compcode = lcompcode | rcompcode;
2470 break;
2471
2472 default:
2473 return NULL_TREE;
2474 }
2475
2476 if (!honor_nans)
2477 {
2478 /* Eliminate unordered comparisons, as well as LTGT and ORD
2479 which are not used unless the mode has NaNs. */
2480 compcode &= ~COMPCODE_UNORD;
2481 if (compcode == COMPCODE_LTGT)
2482 compcode = COMPCODE_NE;
2483 else if (compcode == COMPCODE_ORD)
2484 compcode = COMPCODE_TRUE;
2485 }
2486 else if (flag_trapping_math)
2487 {
2488 /* Check that the original operation and the optimized ones will trap
2489 under the same condition. */
2490 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2491 && (lcompcode != COMPCODE_EQ)
2492 && (lcompcode != COMPCODE_ORD);
2493 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2494 && (rcompcode != COMPCODE_EQ)
2495 && (rcompcode != COMPCODE_ORD);
2496 bool trap = (compcode & COMPCODE_UNORD) == 0
2497 && (compcode != COMPCODE_EQ)
2498 && (compcode != COMPCODE_ORD);
2499
2500 /* In a short-circuited boolean expression the LHS might be
2501 such that the RHS, if evaluated, will never trap. For
2502 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2503 if neither x nor y is NaN. (This is a mixed blessing: for
2504 example, the expression above will never trap, hence
2505 optimizing it to x < y would be invalid). */
2506 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2507 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2508 rtrap = false;
2509
2510 /* If the comparison was short-circuited, and only the RHS
2511 trapped, we may now generate a spurious trap. */
2512 if (rtrap && !ltrap
2513 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2514 return NULL_TREE;
2515
2516 /* If we changed the conditions that cause a trap, we lose. */
2517 if ((ltrap || rtrap) != trap)
2518 return NULL_TREE;
2519 }
2520
2521 if (compcode == COMPCODE_TRUE)
2522 return constant_boolean_node (true, truth_type);
2523 else if (compcode == COMPCODE_FALSE)
2524 return constant_boolean_node (false, truth_type);
2525 else
2526 return fold_build2 (compcode_to_comparison (compcode),
2527 truth_type, ll_arg, lr_arg);
2528 }
2529
2530 /* Return nonzero if CODE is a tree code that represents a truth value. */
2531
2532 static int
2533 truth_value_p (enum tree_code code)
2534 {
2535 return (TREE_CODE_CLASS (code) == tcc_comparison
2536 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2537 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2538 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2539 }
2540 \f
2541 /* Return nonzero if two operands (typically of the same tree node)
2542 are necessarily equal. If either argument has side-effects this
2543 function returns zero. FLAGS modifies behavior as follows:
2544
2545 If OEP_ONLY_CONST is set, only return nonzero for constants.
2546 This function tests whether the operands are indistinguishable;
2547 it does not test whether they are equal using C's == operation.
2548 The distinction is important for IEEE floating point, because
2549 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2550 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2551
2552 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2553 even though it may hold multiple values during a function.
2554 This is because a GCC tree node guarantees that nothing else is
2555 executed between the evaluation of its "operands" (which may often
2556 be evaluated in arbitrary order). Hence if the operands themselves
2557 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2558 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2559 unset means assuming isochronic (or instantaneous) tree equivalence.
2560 Unless comparing arbitrary expression trees, such as from different
2561 statements, this flag can usually be left unset.
2562
2563 If OEP_PURE_SAME is set, then pure functions with identical arguments
2564 are considered the same. It is used when the caller has other ways
2565 to ensure that global memory is unchanged in between. */
2566
2567 int
2568 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2569 {
2570 /* If either is ERROR_MARK, they aren't equal. */
2571 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2572 return 0;
2573
2574 /* If both types don't have the same signedness, then we can't consider
2575 them equal. We must check this before the STRIP_NOPS calls
2576 because they may change the signedness of the arguments. */
2577 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2578 return 0;
2579
2580 /* If both types don't have the same precision, then it is not safe
2581 to strip NOPs. */
2582 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2583 return 0;
2584
2585 STRIP_NOPS (arg0);
2586 STRIP_NOPS (arg1);
2587
2588 /* In case both args are comparisons but with different comparison
2589 code, try to swap the comparison operands of one arg to produce
2590 a match and compare that variant. */
2591 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2592 && COMPARISON_CLASS_P (arg0)
2593 && COMPARISON_CLASS_P (arg1))
2594 {
2595 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2596
2597 if (TREE_CODE (arg0) == swap_code)
2598 return operand_equal_p (TREE_OPERAND (arg0, 0),
2599 TREE_OPERAND (arg1, 1), flags)
2600 && operand_equal_p (TREE_OPERAND (arg0, 1),
2601 TREE_OPERAND (arg1, 0), flags);
2602 }
2603
2604 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2605 /* This is needed for conversions and for COMPONENT_REF.
2606 Might as well play it safe and always test this. */
2607 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2608 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2609 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2610 return 0;
2611
2612 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2613 We don't care about side effects in that case because the SAVE_EXPR
2614 takes care of that for us. In all other cases, two expressions are
2615 equal if they have no side effects. If we have two identical
2616 expressions with side effects that should be treated the same due
2617 to the only side effects being identical SAVE_EXPR's, that will
2618 be detected in the recursive calls below. */
2619 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2620 && (TREE_CODE (arg0) == SAVE_EXPR
2621 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2622 return 1;
2623
2624 /* Next handle constant cases, those for which we can return 1 even
2625 if ONLY_CONST is set. */
2626 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2627 switch (TREE_CODE (arg0))
2628 {
2629 case INTEGER_CST:
2630 return tree_int_cst_equal (arg0, arg1);
2631
2632 case REAL_CST:
2633 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2634 TREE_REAL_CST (arg1)))
2635 return 1;
2636
2637
2638 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2639 {
2640 /* If we do not distinguish between signed and unsigned zero,
2641 consider them equal. */
2642 if (real_zerop (arg0) && real_zerop (arg1))
2643 return 1;
2644 }
2645 return 0;
2646
2647 case VECTOR_CST:
2648 {
2649 tree v1, v2;
2650
2651 v1 = TREE_VECTOR_CST_ELTS (arg0);
2652 v2 = TREE_VECTOR_CST_ELTS (arg1);
2653 while (v1 && v2)
2654 {
2655 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2656 flags))
2657 return 0;
2658 v1 = TREE_CHAIN (v1);
2659 v2 = TREE_CHAIN (v2);
2660 }
2661
2662 return v1 == v2;
2663 }
2664
2665 case COMPLEX_CST:
2666 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2667 flags)
2668 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2669 flags));
2670
2671 case STRING_CST:
2672 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2673 && ! memcmp (TREE_STRING_POINTER (arg0),
2674 TREE_STRING_POINTER (arg1),
2675 TREE_STRING_LENGTH (arg0)));
2676
2677 case ADDR_EXPR:
2678 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2679 0);
2680 default:
2681 break;
2682 }
2683
2684 if (flags & OEP_ONLY_CONST)
2685 return 0;
2686
2687 /* Define macros to test an operand from arg0 and arg1 for equality and a
2688 variant that allows null and views null as being different from any
2689 non-null value. In the latter case, if either is null, the both
2690 must be; otherwise, do the normal comparison. */
2691 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2692 TREE_OPERAND (arg1, N), flags)
2693
2694 #define OP_SAME_WITH_NULL(N) \
2695 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2696 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2697
2698 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2699 {
2700 case tcc_unary:
2701 /* Two conversions are equal only if signedness and modes match. */
2702 switch (TREE_CODE (arg0))
2703 {
2704 case NOP_EXPR:
2705 case CONVERT_EXPR:
2706 case FIX_TRUNC_EXPR:
2707 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2708 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2709 return 0;
2710 break;
2711 default:
2712 break;
2713 }
2714
2715 return OP_SAME (0);
2716
2717
2718 case tcc_comparison:
2719 case tcc_binary:
2720 if (OP_SAME (0) && OP_SAME (1))
2721 return 1;
2722
2723 /* For commutative ops, allow the other order. */
2724 return (commutative_tree_code (TREE_CODE (arg0))
2725 && operand_equal_p (TREE_OPERAND (arg0, 0),
2726 TREE_OPERAND (arg1, 1), flags)
2727 && operand_equal_p (TREE_OPERAND (arg0, 1),
2728 TREE_OPERAND (arg1, 0), flags));
2729
2730 case tcc_reference:
2731 /* If either of the pointer (or reference) expressions we are
2732 dereferencing contain a side effect, these cannot be equal. */
2733 if (TREE_SIDE_EFFECTS (arg0)
2734 || TREE_SIDE_EFFECTS (arg1))
2735 return 0;
2736
2737 switch (TREE_CODE (arg0))
2738 {
2739 case INDIRECT_REF:
2740 case ALIGN_INDIRECT_REF:
2741 case MISALIGNED_INDIRECT_REF:
2742 case REALPART_EXPR:
2743 case IMAGPART_EXPR:
2744 return OP_SAME (0);
2745
2746 case ARRAY_REF:
2747 case ARRAY_RANGE_REF:
2748 /* Operands 2 and 3 may be null. */
2749 return (OP_SAME (0)
2750 && OP_SAME (1)
2751 && OP_SAME_WITH_NULL (2)
2752 && OP_SAME_WITH_NULL (3));
2753
2754 case COMPONENT_REF:
2755 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2756 may be NULL when we're called to compare MEM_EXPRs. */
2757 return OP_SAME_WITH_NULL (0)
2758 && OP_SAME (1)
2759 && OP_SAME_WITH_NULL (2);
2760
2761 case BIT_FIELD_REF:
2762 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2763
2764 default:
2765 return 0;
2766 }
2767
2768 case tcc_expression:
2769 switch (TREE_CODE (arg0))
2770 {
2771 case ADDR_EXPR:
2772 case TRUTH_NOT_EXPR:
2773 return OP_SAME (0);
2774
2775 case TRUTH_ANDIF_EXPR:
2776 case TRUTH_ORIF_EXPR:
2777 return OP_SAME (0) && OP_SAME (1);
2778
2779 case TRUTH_AND_EXPR:
2780 case TRUTH_OR_EXPR:
2781 case TRUTH_XOR_EXPR:
2782 if (OP_SAME (0) && OP_SAME (1))
2783 return 1;
2784
2785 /* Otherwise take into account this is a commutative operation. */
2786 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2787 TREE_OPERAND (arg1, 1), flags)
2788 && operand_equal_p (TREE_OPERAND (arg0, 1),
2789 TREE_OPERAND (arg1, 0), flags));
2790
2791 case CALL_EXPR:
2792 /* If the CALL_EXPRs call different functions, then they
2793 clearly can not be equal. */
2794 if (!OP_SAME (0))
2795 return 0;
2796
2797 {
2798 unsigned int cef = call_expr_flags (arg0);
2799 if (flags & OEP_PURE_SAME)
2800 cef &= ECF_CONST | ECF_PURE;
2801 else
2802 cef &= ECF_CONST;
2803 if (!cef)
2804 return 0;
2805 }
2806
2807 /* Now see if all the arguments are the same. operand_equal_p
2808 does not handle TREE_LIST, so we walk the operands here
2809 feeding them to operand_equal_p. */
2810 arg0 = TREE_OPERAND (arg0, 1);
2811 arg1 = TREE_OPERAND (arg1, 1);
2812 while (arg0 && arg1)
2813 {
2814 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2815 flags))
2816 return 0;
2817
2818 arg0 = TREE_CHAIN (arg0);
2819 arg1 = TREE_CHAIN (arg1);
2820 }
2821
2822 /* If we get here and both argument lists are exhausted
2823 then the CALL_EXPRs are equal. */
2824 return ! (arg0 || arg1);
2825
2826 default:
2827 return 0;
2828 }
2829
2830 case tcc_declaration:
2831 /* Consider __builtin_sqrt equal to sqrt. */
2832 return (TREE_CODE (arg0) == FUNCTION_DECL
2833 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2834 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2835 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2836
2837 default:
2838 return 0;
2839 }
2840
2841 #undef OP_SAME
2842 #undef OP_SAME_WITH_NULL
2843 }
2844 \f
2845 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2846 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2847
2848 When in doubt, return 0. */
2849
2850 static int
2851 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2852 {
2853 int unsignedp1, unsignedpo;
2854 tree primarg0, primarg1, primother;
2855 unsigned int correct_width;
2856
2857 if (operand_equal_p (arg0, arg1, 0))
2858 return 1;
2859
2860 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2861 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2862 return 0;
2863
2864 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2865 and see if the inner values are the same. This removes any
2866 signedness comparison, which doesn't matter here. */
2867 primarg0 = arg0, primarg1 = arg1;
2868 STRIP_NOPS (primarg0);
2869 STRIP_NOPS (primarg1);
2870 if (operand_equal_p (primarg0, primarg1, 0))
2871 return 1;
2872
2873 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2874 actual comparison operand, ARG0.
2875
2876 First throw away any conversions to wider types
2877 already present in the operands. */
2878
2879 primarg1 = get_narrower (arg1, &unsignedp1);
2880 primother = get_narrower (other, &unsignedpo);
2881
2882 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2883 if (unsignedp1 == unsignedpo
2884 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2885 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2886 {
2887 tree type = TREE_TYPE (arg0);
2888
2889 /* Make sure shorter operand is extended the right way
2890 to match the longer operand. */
2891 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2892 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2893
2894 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2895 return 1;
2896 }
2897
2898 return 0;
2899 }
2900 \f
2901 /* See if ARG is an expression that is either a comparison or is performing
2902 arithmetic on comparisons. The comparisons must only be comparing
2903 two different values, which will be stored in *CVAL1 and *CVAL2; if
2904 they are nonzero it means that some operands have already been found.
2905 No variables may be used anywhere else in the expression except in the
2906 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2907 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2908
2909 If this is true, return 1. Otherwise, return zero. */
2910
2911 static int
2912 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2913 {
2914 enum tree_code code = TREE_CODE (arg);
2915 enum tree_code_class class = TREE_CODE_CLASS (code);
2916
2917 /* We can handle some of the tcc_expression cases here. */
2918 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2919 class = tcc_unary;
2920 else if (class == tcc_expression
2921 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2922 || code == COMPOUND_EXPR))
2923 class = tcc_binary;
2924
2925 else if (class == tcc_expression && code == SAVE_EXPR
2926 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2927 {
2928 /* If we've already found a CVAL1 or CVAL2, this expression is
2929 two complex to handle. */
2930 if (*cval1 || *cval2)
2931 return 0;
2932
2933 class = tcc_unary;
2934 *save_p = 1;
2935 }
2936
2937 switch (class)
2938 {
2939 case tcc_unary:
2940 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2941
2942 case tcc_binary:
2943 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2944 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2945 cval1, cval2, save_p));
2946
2947 case tcc_constant:
2948 return 1;
2949
2950 case tcc_expression:
2951 if (code == COND_EXPR)
2952 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2953 cval1, cval2, save_p)
2954 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2955 cval1, cval2, save_p)
2956 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2957 cval1, cval2, save_p));
2958 return 0;
2959
2960 case tcc_comparison:
2961 /* First see if we can handle the first operand, then the second. For
2962 the second operand, we know *CVAL1 can't be zero. It must be that
2963 one side of the comparison is each of the values; test for the
2964 case where this isn't true by failing if the two operands
2965 are the same. */
2966
2967 if (operand_equal_p (TREE_OPERAND (arg, 0),
2968 TREE_OPERAND (arg, 1), 0))
2969 return 0;
2970
2971 if (*cval1 == 0)
2972 *cval1 = TREE_OPERAND (arg, 0);
2973 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2974 ;
2975 else if (*cval2 == 0)
2976 *cval2 = TREE_OPERAND (arg, 0);
2977 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2978 ;
2979 else
2980 return 0;
2981
2982 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2983 ;
2984 else if (*cval2 == 0)
2985 *cval2 = TREE_OPERAND (arg, 1);
2986 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2987 ;
2988 else
2989 return 0;
2990
2991 return 1;
2992
2993 default:
2994 return 0;
2995 }
2996 }
2997 \f
2998 /* ARG is a tree that is known to contain just arithmetic operations and
2999 comparisons. Evaluate the operations in the tree substituting NEW0 for
3000 any occurrence of OLD0 as an operand of a comparison and likewise for
3001 NEW1 and OLD1. */
3002
3003 static tree
3004 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
3005 {
3006 tree type = TREE_TYPE (arg);
3007 enum tree_code code = TREE_CODE (arg);
3008 enum tree_code_class class = TREE_CODE_CLASS (code);
3009
3010 /* We can handle some of the tcc_expression cases here. */
3011 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
3012 class = tcc_unary;
3013 else if (class == tcc_expression
3014 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3015 class = tcc_binary;
3016
3017 switch (class)
3018 {
3019 case tcc_unary:
3020 return fold_build1 (code, type,
3021 eval_subst (TREE_OPERAND (arg, 0),
3022 old0, new0, old1, new1));
3023
3024 case tcc_binary:
3025 return fold_build2 (code, type,
3026 eval_subst (TREE_OPERAND (arg, 0),
3027 old0, new0, old1, new1),
3028 eval_subst (TREE_OPERAND (arg, 1),
3029 old0, new0, old1, new1));
3030
3031 case tcc_expression:
3032 switch (code)
3033 {
3034 case SAVE_EXPR:
3035 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
3036
3037 case COMPOUND_EXPR:
3038 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
3039
3040 case COND_EXPR:
3041 return fold_build3 (code, type,
3042 eval_subst (TREE_OPERAND (arg, 0),
3043 old0, new0, old1, new1),
3044 eval_subst (TREE_OPERAND (arg, 1),
3045 old0, new0, old1, new1),
3046 eval_subst (TREE_OPERAND (arg, 2),
3047 old0, new0, old1, new1));
3048 default:
3049 break;
3050 }
3051 /* Fall through - ??? */
3052
3053 case tcc_comparison:
3054 {
3055 tree arg0 = TREE_OPERAND (arg, 0);
3056 tree arg1 = TREE_OPERAND (arg, 1);
3057
3058 /* We need to check both for exact equality and tree equality. The
3059 former will be true if the operand has a side-effect. In that
3060 case, we know the operand occurred exactly once. */
3061
3062 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3063 arg0 = new0;
3064 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3065 arg0 = new1;
3066
3067 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3068 arg1 = new0;
3069 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3070 arg1 = new1;
3071
3072 return fold_build2 (code, type, arg0, arg1);
3073 }
3074
3075 default:
3076 return arg;
3077 }
3078 }
3079 \f
3080 /* Return a tree for the case when the result of an expression is RESULT
3081 converted to TYPE and OMITTED was previously an operand of the expression
3082 but is now not needed (e.g., we folded OMITTED * 0).
3083
3084 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3085 the conversion of RESULT to TYPE. */
3086
3087 tree
3088 omit_one_operand (tree type, tree result, tree omitted)
3089 {
3090 tree t = fold_convert (type, result);
3091
3092 if (TREE_SIDE_EFFECTS (omitted))
3093 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3094
3095 return non_lvalue (t);
3096 }
3097
3098 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3099
3100 static tree
3101 pedantic_omit_one_operand (tree type, tree result, tree omitted)
3102 {
3103 tree t = fold_convert (type, result);
3104
3105 if (TREE_SIDE_EFFECTS (omitted))
3106 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3107
3108 return pedantic_non_lvalue (t);
3109 }
3110
3111 /* Return a tree for the case when the result of an expression is RESULT
3112 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3113 of the expression but are now not needed.
3114
3115 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3116 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3117 evaluated before OMITTED2. Otherwise, if neither has side effects,
3118 just do the conversion of RESULT to TYPE. */
3119
3120 tree
3121 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
3122 {
3123 tree t = fold_convert (type, result);
3124
3125 if (TREE_SIDE_EFFECTS (omitted2))
3126 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3127 if (TREE_SIDE_EFFECTS (omitted1))
3128 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3129
3130 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
3131 }
3132
3133 \f
3134 /* Return a simplified tree node for the truth-negation of ARG. This
3135 never alters ARG itself. We assume that ARG is an operation that
3136 returns a truth value (0 or 1).
3137
3138 FIXME: one would think we would fold the result, but it causes
3139 problems with the dominator optimizer. */
3140
3141 tree
3142 fold_truth_not_expr (tree arg)
3143 {
3144 tree type = TREE_TYPE (arg);
3145 enum tree_code code = TREE_CODE (arg);
3146
3147 /* If this is a comparison, we can simply invert it, except for
3148 floating-point non-equality comparisons, in which case we just
3149 enclose a TRUTH_NOT_EXPR around what we have. */
3150
3151 if (TREE_CODE_CLASS (code) == tcc_comparison)
3152 {
3153 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3154 if (FLOAT_TYPE_P (op_type)
3155 && flag_trapping_math
3156 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3157 && code != NE_EXPR && code != EQ_EXPR)
3158 return NULL_TREE;
3159 else
3160 {
3161 code = invert_tree_comparison (code,
3162 HONOR_NANS (TYPE_MODE (op_type)));
3163 if (code == ERROR_MARK)
3164 return NULL_TREE;
3165 else
3166 return build2 (code, type,
3167 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3168 }
3169 }
3170
3171 switch (code)
3172 {
3173 case INTEGER_CST:
3174 return constant_boolean_node (integer_zerop (arg), type);
3175
3176 case TRUTH_AND_EXPR:
3177 return build2 (TRUTH_OR_EXPR, type,
3178 invert_truthvalue (TREE_OPERAND (arg, 0)),
3179 invert_truthvalue (TREE_OPERAND (arg, 1)));
3180
3181 case TRUTH_OR_EXPR:
3182 return build2 (TRUTH_AND_EXPR, type,
3183 invert_truthvalue (TREE_OPERAND (arg, 0)),
3184 invert_truthvalue (TREE_OPERAND (arg, 1)));
3185
3186 case TRUTH_XOR_EXPR:
3187 /* Here we can invert either operand. We invert the first operand
3188 unless the second operand is a TRUTH_NOT_EXPR in which case our
3189 result is the XOR of the first operand with the inside of the
3190 negation of the second operand. */
3191
3192 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3193 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3194 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3195 else
3196 return build2 (TRUTH_XOR_EXPR, type,
3197 invert_truthvalue (TREE_OPERAND (arg, 0)),
3198 TREE_OPERAND (arg, 1));
3199
3200 case TRUTH_ANDIF_EXPR:
3201 return build2 (TRUTH_ORIF_EXPR, type,
3202 invert_truthvalue (TREE_OPERAND (arg, 0)),
3203 invert_truthvalue (TREE_OPERAND (arg, 1)));
3204
3205 case TRUTH_ORIF_EXPR:
3206 return build2 (TRUTH_ANDIF_EXPR, type,
3207 invert_truthvalue (TREE_OPERAND (arg, 0)),
3208 invert_truthvalue (TREE_OPERAND (arg, 1)));
3209
3210 case TRUTH_NOT_EXPR:
3211 return TREE_OPERAND (arg, 0);
3212
3213 case COND_EXPR:
3214 {
3215 tree arg1 = TREE_OPERAND (arg, 1);
3216 tree arg2 = TREE_OPERAND (arg, 2);
3217 /* A COND_EXPR may have a throw as one operand, which
3218 then has void type. Just leave void operands
3219 as they are. */
3220 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3221 VOID_TYPE_P (TREE_TYPE (arg1))
3222 ? arg1 : invert_truthvalue (arg1),
3223 VOID_TYPE_P (TREE_TYPE (arg2))
3224 ? arg2 : invert_truthvalue (arg2));
3225 }
3226
3227 case COMPOUND_EXPR:
3228 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3229 invert_truthvalue (TREE_OPERAND (arg, 1)));
3230
3231 case NON_LVALUE_EXPR:
3232 return invert_truthvalue (TREE_OPERAND (arg, 0));
3233
3234 case NOP_EXPR:
3235 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3236 return build1 (TRUTH_NOT_EXPR, type, arg);
3237
3238 case CONVERT_EXPR:
3239 case FLOAT_EXPR:
3240 return build1 (TREE_CODE (arg), type,
3241 invert_truthvalue (TREE_OPERAND (arg, 0)));
3242
3243 case BIT_AND_EXPR:
3244 if (!integer_onep (TREE_OPERAND (arg, 1)))
3245 break;
3246 return build2 (EQ_EXPR, type, arg,
3247 build_int_cst (type, 0));
3248
3249 case SAVE_EXPR:
3250 return build1 (TRUTH_NOT_EXPR, type, arg);
3251
3252 case CLEANUP_POINT_EXPR:
3253 return build1 (CLEANUP_POINT_EXPR, type,
3254 invert_truthvalue (TREE_OPERAND (arg, 0)));
3255
3256 default:
3257 break;
3258 }
3259
3260 return NULL_TREE;
3261 }
3262
3263 /* Return a simplified tree node for the truth-negation of ARG. This
3264 never alters ARG itself. We assume that ARG is an operation that
3265 returns a truth value (0 or 1).
3266
3267 FIXME: one would think we would fold the result, but it causes
3268 problems with the dominator optimizer. */
3269
3270 tree
3271 invert_truthvalue (tree arg)
3272 {
3273 tree tem;
3274
3275 if (TREE_CODE (arg) == ERROR_MARK)
3276 return arg;
3277
3278 tem = fold_truth_not_expr (arg);
3279 if (!tem)
3280 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3281
3282 return tem;
3283 }
3284
3285 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3286 operands are another bit-wise operation with a common input. If so,
3287 distribute the bit operations to save an operation and possibly two if
3288 constants are involved. For example, convert
3289 (A | B) & (A | C) into A | (B & C)
3290 Further simplification will occur if B and C are constants.
3291
3292 If this optimization cannot be done, 0 will be returned. */
3293
3294 static tree
3295 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3296 {
3297 tree common;
3298 tree left, right;
3299
3300 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3301 || TREE_CODE (arg0) == code
3302 || (TREE_CODE (arg0) != BIT_AND_EXPR
3303 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3304 return 0;
3305
3306 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3307 {
3308 common = TREE_OPERAND (arg0, 0);
3309 left = TREE_OPERAND (arg0, 1);
3310 right = TREE_OPERAND (arg1, 1);
3311 }
3312 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3313 {
3314 common = TREE_OPERAND (arg0, 0);
3315 left = TREE_OPERAND (arg0, 1);
3316 right = TREE_OPERAND (arg1, 0);
3317 }
3318 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3319 {
3320 common = TREE_OPERAND (arg0, 1);
3321 left = TREE_OPERAND (arg0, 0);
3322 right = TREE_OPERAND (arg1, 1);
3323 }
3324 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3325 {
3326 common = TREE_OPERAND (arg0, 1);
3327 left = TREE_OPERAND (arg0, 0);
3328 right = TREE_OPERAND (arg1, 0);
3329 }
3330 else
3331 return 0;
3332
3333 return fold_build2 (TREE_CODE (arg0), type, common,
3334 fold_build2 (code, type, left, right));
3335 }
3336
3337 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3338 with code CODE. This optimization is unsafe. */
3339 static tree
3340 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3341 {
3342 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3343 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3344
3345 /* (A / C) +- (B / C) -> (A +- B) / C. */
3346 if (mul0 == mul1
3347 && operand_equal_p (TREE_OPERAND (arg0, 1),
3348 TREE_OPERAND (arg1, 1), 0))
3349 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3350 fold_build2 (code, type,
3351 TREE_OPERAND (arg0, 0),
3352 TREE_OPERAND (arg1, 0)),
3353 TREE_OPERAND (arg0, 1));
3354
3355 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3356 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3357 TREE_OPERAND (arg1, 0), 0)
3358 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3359 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3360 {
3361 REAL_VALUE_TYPE r0, r1;
3362 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3363 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3364 if (!mul0)
3365 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3366 if (!mul1)
3367 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3368 real_arithmetic (&r0, code, &r0, &r1);
3369 return fold_build2 (MULT_EXPR, type,
3370 TREE_OPERAND (arg0, 0),
3371 build_real (type, r0));
3372 }
3373
3374 return NULL_TREE;
3375 }
3376 \f
3377 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3378 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3379
3380 static tree
3381 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3382 int unsignedp)
3383 {
3384 tree result;
3385
3386 if (bitpos == 0)
3387 {
3388 tree size = TYPE_SIZE (TREE_TYPE (inner));
3389 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3390 || POINTER_TYPE_P (TREE_TYPE (inner)))
3391 && host_integerp (size, 0)
3392 && tree_low_cst (size, 0) == bitsize)
3393 return fold_convert (type, inner);
3394 }
3395
3396 result = build3 (BIT_FIELD_REF, type, inner,
3397 size_int (bitsize), bitsize_int (bitpos));
3398
3399 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3400
3401 return result;
3402 }
3403
3404 /* Optimize a bit-field compare.
3405
3406 There are two cases: First is a compare against a constant and the
3407 second is a comparison of two items where the fields are at the same
3408 bit position relative to the start of a chunk (byte, halfword, word)
3409 large enough to contain it. In these cases we can avoid the shift
3410 implicit in bitfield extractions.
3411
3412 For constants, we emit a compare of the shifted constant with the
3413 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3414 compared. For two fields at the same position, we do the ANDs with the
3415 similar mask and compare the result of the ANDs.
3416
3417 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3418 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3419 are the left and right operands of the comparison, respectively.
3420
3421 If the optimization described above can be done, we return the resulting
3422 tree. Otherwise we return zero. */
3423
3424 static tree
3425 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3426 tree lhs, tree rhs)
3427 {
3428 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3429 tree type = TREE_TYPE (lhs);
3430 tree signed_type, unsigned_type;
3431 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3432 enum machine_mode lmode, rmode, nmode;
3433 int lunsignedp, runsignedp;
3434 int lvolatilep = 0, rvolatilep = 0;
3435 tree linner, rinner = NULL_TREE;
3436 tree mask;
3437 tree offset;
3438
3439 /* Get all the information about the extractions being done. If the bit size
3440 if the same as the size of the underlying object, we aren't doing an
3441 extraction at all and so can do nothing. We also don't want to
3442 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3443 then will no longer be able to replace it. */
3444 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3445 &lunsignedp, &lvolatilep, false);
3446 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3447 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3448 return 0;
3449
3450 if (!const_p)
3451 {
3452 /* If this is not a constant, we can only do something if bit positions,
3453 sizes, and signedness are the same. */
3454 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3455 &runsignedp, &rvolatilep, false);
3456
3457 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3458 || lunsignedp != runsignedp || offset != 0
3459 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3460 return 0;
3461 }
3462
3463 /* See if we can find a mode to refer to this field. We should be able to,
3464 but fail if we can't. */
3465 nmode = get_best_mode (lbitsize, lbitpos,
3466 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3467 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3468 TYPE_ALIGN (TREE_TYPE (rinner))),
3469 word_mode, lvolatilep || rvolatilep);
3470 if (nmode == VOIDmode)
3471 return 0;
3472
3473 /* Set signed and unsigned types of the precision of this mode for the
3474 shifts below. */
3475 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3476 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3477
3478 /* Compute the bit position and size for the new reference and our offset
3479 within it. If the new reference is the same size as the original, we
3480 won't optimize anything, so return zero. */
3481 nbitsize = GET_MODE_BITSIZE (nmode);
3482 nbitpos = lbitpos & ~ (nbitsize - 1);
3483 lbitpos -= nbitpos;
3484 if (nbitsize == lbitsize)
3485 return 0;
3486
3487 if (BYTES_BIG_ENDIAN)
3488 lbitpos = nbitsize - lbitsize - lbitpos;
3489
3490 /* Make the mask to be used against the extracted field. */
3491 mask = build_int_cst_type (unsigned_type, -1);
3492 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3493 mask = const_binop (RSHIFT_EXPR, mask,
3494 size_int (nbitsize - lbitsize - lbitpos), 0);
3495
3496 if (! const_p)
3497 /* If not comparing with constant, just rework the comparison
3498 and return. */
3499 return fold_build2 (code, compare_type,
3500 fold_build2 (BIT_AND_EXPR, unsigned_type,
3501 make_bit_field_ref (linner,
3502 unsigned_type,
3503 nbitsize, nbitpos,
3504 1),
3505 mask),
3506 fold_build2 (BIT_AND_EXPR, unsigned_type,
3507 make_bit_field_ref (rinner,
3508 unsigned_type,
3509 nbitsize, nbitpos,
3510 1),
3511 mask));
3512
3513 /* Otherwise, we are handling the constant case. See if the constant is too
3514 big for the field. Warn and return a tree of for 0 (false) if so. We do
3515 this not only for its own sake, but to avoid having to test for this
3516 error case below. If we didn't, we might generate wrong code.
3517
3518 For unsigned fields, the constant shifted right by the field length should
3519 be all zero. For signed fields, the high-order bits should agree with
3520 the sign bit. */
3521
3522 if (lunsignedp)
3523 {
3524 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3525 fold_convert (unsigned_type, rhs),
3526 size_int (lbitsize), 0)))
3527 {
3528 warning (0, "comparison is always %d due to width of bit-field",
3529 code == NE_EXPR);
3530 return constant_boolean_node (code == NE_EXPR, compare_type);
3531 }
3532 }
3533 else
3534 {
3535 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3536 size_int (lbitsize - 1), 0);
3537 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3538 {
3539 warning (0, "comparison is always %d due to width of bit-field",
3540 code == NE_EXPR);
3541 return constant_boolean_node (code == NE_EXPR, compare_type);
3542 }
3543 }
3544
3545 /* Single-bit compares should always be against zero. */
3546 if (lbitsize == 1 && ! integer_zerop (rhs))
3547 {
3548 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3549 rhs = build_int_cst (type, 0);
3550 }
3551
3552 /* Make a new bitfield reference, shift the constant over the
3553 appropriate number of bits and mask it with the computed mask
3554 (in case this was a signed field). If we changed it, make a new one. */
3555 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3556 if (lvolatilep)
3557 {
3558 TREE_SIDE_EFFECTS (lhs) = 1;
3559 TREE_THIS_VOLATILE (lhs) = 1;
3560 }
3561
3562 rhs = const_binop (BIT_AND_EXPR,
3563 const_binop (LSHIFT_EXPR,
3564 fold_convert (unsigned_type, rhs),
3565 size_int (lbitpos), 0),
3566 mask, 0);
3567
3568 return build2 (code, compare_type,
3569 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3570 rhs);
3571 }
3572 \f
3573 /* Subroutine for fold_truthop: decode a field reference.
3574
3575 If EXP is a comparison reference, we return the innermost reference.
3576
3577 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3578 set to the starting bit number.
3579
3580 If the innermost field can be completely contained in a mode-sized
3581 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3582
3583 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3584 otherwise it is not changed.
3585
3586 *PUNSIGNEDP is set to the signedness of the field.
3587
3588 *PMASK is set to the mask used. This is either contained in a
3589 BIT_AND_EXPR or derived from the width of the field.
3590
3591 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3592
3593 Return 0 if this is not a component reference or is one that we can't
3594 do anything with. */
3595
3596 static tree
3597 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3598 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3599 int *punsignedp, int *pvolatilep,
3600 tree *pmask, tree *pand_mask)
3601 {
3602 tree outer_type = 0;
3603 tree and_mask = 0;
3604 tree mask, inner, offset;
3605 tree unsigned_type;
3606 unsigned int precision;
3607
3608 /* All the optimizations using this function assume integer fields.
3609 There are problems with FP fields since the type_for_size call
3610 below can fail for, e.g., XFmode. */
3611 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3612 return 0;
3613
3614 /* We are interested in the bare arrangement of bits, so strip everything
3615 that doesn't affect the machine mode. However, record the type of the
3616 outermost expression if it may matter below. */
3617 if (TREE_CODE (exp) == NOP_EXPR
3618 || TREE_CODE (exp) == CONVERT_EXPR
3619 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3620 outer_type = TREE_TYPE (exp);
3621 STRIP_NOPS (exp);
3622
3623 if (TREE_CODE (exp) == BIT_AND_EXPR)
3624 {
3625 and_mask = TREE_OPERAND (exp, 1);
3626 exp = TREE_OPERAND (exp, 0);
3627 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3628 if (TREE_CODE (and_mask) != INTEGER_CST)
3629 return 0;
3630 }
3631
3632 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3633 punsignedp, pvolatilep, false);
3634 if ((inner == exp && and_mask == 0)
3635 || *pbitsize < 0 || offset != 0
3636 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3637 return 0;
3638
3639 /* If the number of bits in the reference is the same as the bitsize of
3640 the outer type, then the outer type gives the signedness. Otherwise
3641 (in case of a small bitfield) the signedness is unchanged. */
3642 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3643 *punsignedp = TYPE_UNSIGNED (outer_type);
3644
3645 /* Compute the mask to access the bitfield. */
3646 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3647 precision = TYPE_PRECISION (unsigned_type);
3648
3649 mask = build_int_cst_type (unsigned_type, -1);
3650
3651 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3652 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3653
3654 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3655 if (and_mask != 0)
3656 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3657 fold_convert (unsigned_type, and_mask), mask);
3658
3659 *pmask = mask;
3660 *pand_mask = and_mask;
3661 return inner;
3662 }
3663
3664 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3665 bit positions. */
3666
3667 static int
3668 all_ones_mask_p (tree mask, int size)
3669 {
3670 tree type = TREE_TYPE (mask);
3671 unsigned int precision = TYPE_PRECISION (type);
3672 tree tmask;
3673
3674 tmask = build_int_cst_type (lang_hooks.types.signed_type (type), -1);
3675
3676 return
3677 tree_int_cst_equal (mask,
3678 const_binop (RSHIFT_EXPR,
3679 const_binop (LSHIFT_EXPR, tmask,
3680 size_int (precision - size),
3681 0),
3682 size_int (precision - size), 0));
3683 }
3684
3685 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3686 represents the sign bit of EXP's type. If EXP represents a sign
3687 or zero extension, also test VAL against the unextended type.
3688 The return value is the (sub)expression whose sign bit is VAL,
3689 or NULL_TREE otherwise. */
3690
3691 static tree
3692 sign_bit_p (tree exp, tree val)
3693 {
3694 unsigned HOST_WIDE_INT mask_lo, lo;
3695 HOST_WIDE_INT mask_hi, hi;
3696 int width;
3697 tree t;
3698
3699 /* Tree EXP must have an integral type. */
3700 t = TREE_TYPE (exp);
3701 if (! INTEGRAL_TYPE_P (t))
3702 return NULL_TREE;
3703
3704 /* Tree VAL must be an integer constant. */
3705 if (TREE_CODE (val) != INTEGER_CST
3706 || TREE_OVERFLOW (val))
3707 return NULL_TREE;
3708
3709 width = TYPE_PRECISION (t);
3710 if (width > HOST_BITS_PER_WIDE_INT)
3711 {
3712 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3713 lo = 0;
3714
3715 mask_hi = ((unsigned HOST_WIDE_INT) -1
3716 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3717 mask_lo = -1;
3718 }
3719 else
3720 {
3721 hi = 0;
3722 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3723
3724 mask_hi = 0;
3725 mask_lo = ((unsigned HOST_WIDE_INT) -1
3726 >> (HOST_BITS_PER_WIDE_INT - width));
3727 }
3728
3729 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3730 treat VAL as if it were unsigned. */
3731 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3732 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3733 return exp;
3734
3735 /* Handle extension from a narrower type. */
3736 if (TREE_CODE (exp) == NOP_EXPR
3737 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3738 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3739
3740 return NULL_TREE;
3741 }
3742
3743 /* Subroutine for fold_truthop: determine if an operand is simple enough
3744 to be evaluated unconditionally. */
3745
3746 static int
3747 simple_operand_p (tree exp)
3748 {
3749 /* Strip any conversions that don't change the machine mode. */
3750 STRIP_NOPS (exp);
3751
3752 return (CONSTANT_CLASS_P (exp)
3753 || TREE_CODE (exp) == SSA_NAME
3754 || (DECL_P (exp)
3755 && ! TREE_ADDRESSABLE (exp)
3756 && ! TREE_THIS_VOLATILE (exp)
3757 && ! DECL_NONLOCAL (exp)
3758 /* Don't regard global variables as simple. They may be
3759 allocated in ways unknown to the compiler (shared memory,
3760 #pragma weak, etc). */
3761 && ! TREE_PUBLIC (exp)
3762 && ! DECL_EXTERNAL (exp)
3763 /* Loading a static variable is unduly expensive, but global
3764 registers aren't expensive. */
3765 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3766 }
3767 \f
3768 /* The following functions are subroutines to fold_range_test and allow it to
3769 try to change a logical combination of comparisons into a range test.
3770
3771 For example, both
3772 X == 2 || X == 3 || X == 4 || X == 5
3773 and
3774 X >= 2 && X <= 5
3775 are converted to
3776 (unsigned) (X - 2) <= 3
3777
3778 We describe each set of comparisons as being either inside or outside
3779 a range, using a variable named like IN_P, and then describe the
3780 range with a lower and upper bound. If one of the bounds is omitted,
3781 it represents either the highest or lowest value of the type.
3782
3783 In the comments below, we represent a range by two numbers in brackets
3784 preceded by a "+" to designate being inside that range, or a "-" to
3785 designate being outside that range, so the condition can be inverted by
3786 flipping the prefix. An omitted bound is represented by a "-". For
3787 example, "- [-, 10]" means being outside the range starting at the lowest
3788 possible value and ending at 10, in other words, being greater than 10.
3789 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3790 always false.
3791
3792 We set up things so that the missing bounds are handled in a consistent
3793 manner so neither a missing bound nor "true" and "false" need to be
3794 handled using a special case. */
3795
3796 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3797 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3798 and UPPER1_P are nonzero if the respective argument is an upper bound
3799 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3800 must be specified for a comparison. ARG1 will be converted to ARG0's
3801 type if both are specified. */
3802
3803 static tree
3804 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3805 tree arg1, int upper1_p)
3806 {
3807 tree tem;
3808 int result;
3809 int sgn0, sgn1;
3810
3811 /* If neither arg represents infinity, do the normal operation.
3812 Else, if not a comparison, return infinity. Else handle the special
3813 comparison rules. Note that most of the cases below won't occur, but
3814 are handled for consistency. */
3815
3816 if (arg0 != 0 && arg1 != 0)
3817 {
3818 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3819 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3820 STRIP_NOPS (tem);
3821 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3822 }
3823
3824 if (TREE_CODE_CLASS (code) != tcc_comparison)
3825 return 0;
3826
3827 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3828 for neither. In real maths, we cannot assume open ended ranges are
3829 the same. But, this is computer arithmetic, where numbers are finite.
3830 We can therefore make the transformation of any unbounded range with
3831 the value Z, Z being greater than any representable number. This permits
3832 us to treat unbounded ranges as equal. */
3833 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3834 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3835 switch (code)
3836 {
3837 case EQ_EXPR:
3838 result = sgn0 == sgn1;
3839 break;
3840 case NE_EXPR:
3841 result = sgn0 != sgn1;
3842 break;
3843 case LT_EXPR:
3844 result = sgn0 < sgn1;
3845 break;
3846 case LE_EXPR:
3847 result = sgn0 <= sgn1;
3848 break;
3849 case GT_EXPR:
3850 result = sgn0 > sgn1;
3851 break;
3852 case GE_EXPR:
3853 result = sgn0 >= sgn1;
3854 break;
3855 default:
3856 gcc_unreachable ();
3857 }
3858
3859 return constant_boolean_node (result, type);
3860 }
3861 \f
3862 /* Given EXP, a logical expression, set the range it is testing into
3863 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3864 actually being tested. *PLOW and *PHIGH will be made of the same type
3865 as the returned expression. If EXP is not a comparison, we will most
3866 likely not be returning a useful value and range. */
3867
3868 static tree
3869 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3870 {
3871 enum tree_code code;
3872 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3873 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3874 int in_p, n_in_p;
3875 tree low, high, n_low, n_high;
3876
3877 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3878 and see if we can refine the range. Some of the cases below may not
3879 happen, but it doesn't seem worth worrying about this. We "continue"
3880 the outer loop when we've changed something; otherwise we "break"
3881 the switch, which will "break" the while. */
3882
3883 in_p = 0;
3884 low = high = build_int_cst (TREE_TYPE (exp), 0);
3885
3886 while (1)
3887 {
3888 code = TREE_CODE (exp);
3889 exp_type = TREE_TYPE (exp);
3890
3891 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3892 {
3893 if (TREE_CODE_LENGTH (code) > 0)
3894 arg0 = TREE_OPERAND (exp, 0);
3895 if (TREE_CODE_CLASS (code) == tcc_comparison
3896 || TREE_CODE_CLASS (code) == tcc_unary
3897 || TREE_CODE_CLASS (code) == tcc_binary)
3898 arg0_type = TREE_TYPE (arg0);
3899 if (TREE_CODE_CLASS (code) == tcc_binary
3900 || TREE_CODE_CLASS (code) == tcc_comparison
3901 || (TREE_CODE_CLASS (code) == tcc_expression
3902 && TREE_CODE_LENGTH (code) > 1))
3903 arg1 = TREE_OPERAND (exp, 1);
3904 }
3905
3906 switch (code)
3907 {
3908 case TRUTH_NOT_EXPR:
3909 in_p = ! in_p, exp = arg0;
3910 continue;
3911
3912 case EQ_EXPR: case NE_EXPR:
3913 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3914 /* We can only do something if the range is testing for zero
3915 and if the second operand is an integer constant. Note that
3916 saying something is "in" the range we make is done by
3917 complementing IN_P since it will set in the initial case of
3918 being not equal to zero; "out" is leaving it alone. */
3919 if (low == 0 || high == 0
3920 || ! integer_zerop (low) || ! integer_zerop (high)
3921 || TREE_CODE (arg1) != INTEGER_CST)
3922 break;
3923
3924 switch (code)
3925 {
3926 case NE_EXPR: /* - [c, c] */
3927 low = high = arg1;
3928 break;
3929 case EQ_EXPR: /* + [c, c] */
3930 in_p = ! in_p, low = high = arg1;
3931 break;
3932 case GT_EXPR: /* - [-, c] */
3933 low = 0, high = arg1;
3934 break;
3935 case GE_EXPR: /* + [c, -] */
3936 in_p = ! in_p, low = arg1, high = 0;
3937 break;
3938 case LT_EXPR: /* - [c, -] */
3939 low = arg1, high = 0;
3940 break;
3941 case LE_EXPR: /* + [-, c] */
3942 in_p = ! in_p, low = 0, high = arg1;
3943 break;
3944 default:
3945 gcc_unreachable ();
3946 }
3947
3948 /* If this is an unsigned comparison, we also know that EXP is
3949 greater than or equal to zero. We base the range tests we make
3950 on that fact, so we record it here so we can parse existing
3951 range tests. We test arg0_type since often the return type
3952 of, e.g. EQ_EXPR, is boolean. */
3953 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3954 {
3955 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3956 in_p, low, high, 1,
3957 build_int_cst (arg0_type, 0),
3958 NULL_TREE))
3959 break;
3960
3961 in_p = n_in_p, low = n_low, high = n_high;
3962
3963 /* If the high bound is missing, but we have a nonzero low
3964 bound, reverse the range so it goes from zero to the low bound
3965 minus 1. */
3966 if (high == 0 && low && ! integer_zerop (low))
3967 {
3968 in_p = ! in_p;
3969 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3970 integer_one_node, 0);
3971 low = build_int_cst (arg0_type, 0);
3972 }
3973 }
3974
3975 exp = arg0;
3976 continue;
3977
3978 case NEGATE_EXPR:
3979 /* (-x) IN [a,b] -> x in [-b, -a] */
3980 n_low = range_binop (MINUS_EXPR, exp_type,
3981 build_int_cst (exp_type, 0),
3982 0, high, 1);
3983 n_high = range_binop (MINUS_EXPR, exp_type,
3984 build_int_cst (exp_type, 0),
3985 0, low, 0);
3986 low = n_low, high = n_high;
3987 exp = arg0;
3988 continue;
3989
3990 case BIT_NOT_EXPR:
3991 /* ~ X -> -X - 1 */
3992 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
3993 build_int_cst (exp_type, 1));
3994 continue;
3995
3996 case PLUS_EXPR: case MINUS_EXPR:
3997 if (TREE_CODE (arg1) != INTEGER_CST)
3998 break;
3999
4000 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4001 move a constant to the other side. */
4002 if (!TYPE_UNSIGNED (arg0_type)
4003 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4004 break;
4005
4006 /* If EXP is signed, any overflow in the computation is undefined,
4007 so we don't worry about it so long as our computations on
4008 the bounds don't overflow. For unsigned, overflow is defined
4009 and this is exactly the right thing. */
4010 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4011 arg0_type, low, 0, arg1, 0);
4012 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4013 arg0_type, high, 1, arg1, 0);
4014 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4015 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4016 break;
4017
4018 /* Check for an unsigned range which has wrapped around the maximum
4019 value thus making n_high < n_low, and normalize it. */
4020 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4021 {
4022 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4023 integer_one_node, 0);
4024 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4025 integer_one_node, 0);
4026
4027 /* If the range is of the form +/- [ x+1, x ], we won't
4028 be able to normalize it. But then, it represents the
4029 whole range or the empty set, so make it
4030 +/- [ -, - ]. */
4031 if (tree_int_cst_equal (n_low, low)
4032 && tree_int_cst_equal (n_high, high))
4033 low = high = 0;
4034 else
4035 in_p = ! in_p;
4036 }
4037 else
4038 low = n_low, high = n_high;
4039
4040 exp = arg0;
4041 continue;
4042
4043 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
4044 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4045 break;
4046
4047 if (! INTEGRAL_TYPE_P (arg0_type)
4048 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4049 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4050 break;
4051
4052 n_low = low, n_high = high;
4053
4054 if (n_low != 0)
4055 n_low = fold_convert (arg0_type, n_low);
4056
4057 if (n_high != 0)
4058 n_high = fold_convert (arg0_type, n_high);
4059
4060
4061 /* If we're converting arg0 from an unsigned type, to exp,
4062 a signed type, we will be doing the comparison as unsigned.
4063 The tests above have already verified that LOW and HIGH
4064 are both positive.
4065
4066 So we have to ensure that we will handle large unsigned
4067 values the same way that the current signed bounds treat
4068 negative values. */
4069
4070 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4071 {
4072 tree high_positive;
4073 tree equiv_type = lang_hooks.types.type_for_mode
4074 (TYPE_MODE (arg0_type), 1);
4075
4076 /* A range without an upper bound is, naturally, unbounded.
4077 Since convert would have cropped a very large value, use
4078 the max value for the destination type. */
4079 high_positive
4080 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4081 : TYPE_MAX_VALUE (arg0_type);
4082
4083 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4084 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
4085 fold_convert (arg0_type,
4086 high_positive),
4087 build_int_cst (arg0_type, 1));
4088
4089 /* If the low bound is specified, "and" the range with the
4090 range for which the original unsigned value will be
4091 positive. */
4092 if (low != 0)
4093 {
4094 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4095 1, n_low, n_high, 1,
4096 fold_convert (arg0_type,
4097 integer_zero_node),
4098 high_positive))
4099 break;
4100
4101 in_p = (n_in_p == in_p);
4102 }
4103 else
4104 {
4105 /* Otherwise, "or" the range with the range of the input
4106 that will be interpreted as negative. */
4107 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4108 0, n_low, n_high, 1,
4109 fold_convert (arg0_type,
4110 integer_zero_node),
4111 high_positive))
4112 break;
4113
4114 in_p = (in_p != n_in_p);
4115 }
4116 }
4117
4118 exp = arg0;
4119 low = n_low, high = n_high;
4120 continue;
4121
4122 default:
4123 break;
4124 }
4125
4126 break;
4127 }
4128
4129 /* If EXP is a constant, we can evaluate whether this is true or false. */
4130 if (TREE_CODE (exp) == INTEGER_CST)
4131 {
4132 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4133 exp, 0, low, 0))
4134 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4135 exp, 1, high, 1)));
4136 low = high = 0;
4137 exp = 0;
4138 }
4139
4140 *pin_p = in_p, *plow = low, *phigh = high;
4141 return exp;
4142 }
4143 \f
4144 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4145 type, TYPE, return an expression to test if EXP is in (or out of, depending
4146 on IN_P) the range. Return 0 if the test couldn't be created. */
4147
4148 static tree
4149 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
4150 {
4151 tree etype = TREE_TYPE (exp);
4152 tree value;
4153
4154 #ifdef HAVE_canonicalize_funcptr_for_compare
4155 /* Disable this optimization for function pointer expressions
4156 on targets that require function pointer canonicalization. */
4157 if (HAVE_canonicalize_funcptr_for_compare
4158 && TREE_CODE (etype) == POINTER_TYPE
4159 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4160 return NULL_TREE;
4161 #endif
4162
4163 if (! in_p)
4164 {
4165 value = build_range_check (type, exp, 1, low, high);
4166 if (value != 0)
4167 return invert_truthvalue (value);
4168
4169 return 0;
4170 }
4171
4172 if (low == 0 && high == 0)
4173 return build_int_cst (type, 1);
4174
4175 if (low == 0)
4176 return fold_build2 (LE_EXPR, type, exp,
4177 fold_convert (etype, high));
4178
4179 if (high == 0)
4180 return fold_build2 (GE_EXPR, type, exp,
4181 fold_convert (etype, low));
4182
4183 if (operand_equal_p (low, high, 0))
4184 return fold_build2 (EQ_EXPR, type, exp,
4185 fold_convert (etype, low));
4186
4187 if (integer_zerop (low))
4188 {
4189 if (! TYPE_UNSIGNED (etype))
4190 {
4191 etype = lang_hooks.types.unsigned_type (etype);
4192 high = fold_convert (etype, high);
4193 exp = fold_convert (etype, exp);
4194 }
4195 return build_range_check (type, exp, 1, 0, high);
4196 }
4197
4198 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4199 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4200 {
4201 unsigned HOST_WIDE_INT lo;
4202 HOST_WIDE_INT hi;
4203 int prec;
4204
4205 prec = TYPE_PRECISION (etype);
4206 if (prec <= HOST_BITS_PER_WIDE_INT)
4207 {
4208 hi = 0;
4209 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4210 }
4211 else
4212 {
4213 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4214 lo = (unsigned HOST_WIDE_INT) -1;
4215 }
4216
4217 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4218 {
4219 if (TYPE_UNSIGNED (etype))
4220 {
4221 etype = lang_hooks.types.signed_type (etype);
4222 exp = fold_convert (etype, exp);
4223 }
4224 return fold_build2 (GT_EXPR, type, exp,
4225 build_int_cst (etype, 0));
4226 }
4227 }
4228
4229 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4230 This requires wrap-around arithmetics for the type of the expression. */
4231 switch (TREE_CODE (etype))
4232 {
4233 case INTEGER_TYPE:
4234 /* There is no requirement that LOW be within the range of ETYPE
4235 if the latter is a subtype. It must, however, be within the base
4236 type of ETYPE. So be sure we do the subtraction in that type. */
4237 if (TREE_TYPE (etype))
4238 etype = TREE_TYPE (etype);
4239 break;
4240
4241 case ENUMERAL_TYPE:
4242 case BOOLEAN_TYPE:
4243 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4244 TYPE_UNSIGNED (etype));
4245 break;
4246
4247 default:
4248 break;
4249 }
4250
4251 /* If we don't have wrap-around arithmetics upfront, try to force it. */
4252 if (TREE_CODE (etype) == INTEGER_TYPE
4253 && !TYPE_OVERFLOW_WRAPS (etype))
4254 {
4255 tree utype, minv, maxv;
4256
4257 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4258 for the type in question, as we rely on this here. */
4259 utype = lang_hooks.types.unsigned_type (etype);
4260 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4261 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4262 integer_one_node, 1);
4263 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4264
4265 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4266 minv, 1, maxv, 1)))
4267 etype = utype;
4268 else
4269 return 0;
4270 }
4271
4272 high = fold_convert (etype, high);
4273 low = fold_convert (etype, low);
4274 exp = fold_convert (etype, exp);
4275
4276 value = const_binop (MINUS_EXPR, high, low, 0);
4277
4278 if (value != 0 && !TREE_OVERFLOW (value))
4279 return build_range_check (type,
4280 fold_build2 (MINUS_EXPR, etype, exp, low),
4281 1, build_int_cst (etype, 0), value);
4282
4283 return 0;
4284 }
4285 \f
4286 /* Return the predecessor of VAL in its type, handling the infinite case. */
4287
4288 static tree
4289 range_predecessor (tree val)
4290 {
4291 tree type = TREE_TYPE (val);
4292
4293 if (INTEGRAL_TYPE_P (type)
4294 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4295 return 0;
4296 else
4297 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4298 }
4299
4300 /* Return the successor of VAL in its type, handling the infinite case. */
4301
4302 static tree
4303 range_successor (tree val)
4304 {
4305 tree type = TREE_TYPE (val);
4306
4307 if (INTEGRAL_TYPE_P (type)
4308 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4309 return 0;
4310 else
4311 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4312 }
4313
4314 /* Given two ranges, see if we can merge them into one. Return 1 if we
4315 can, 0 if we can't. Set the output range into the specified parameters. */
4316
4317 static int
4318 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4319 tree high0, int in1_p, tree low1, tree high1)
4320 {
4321 int no_overlap;
4322 int subset;
4323 int temp;
4324 tree tem;
4325 int in_p;
4326 tree low, high;
4327 int lowequal = ((low0 == 0 && low1 == 0)
4328 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4329 low0, 0, low1, 0)));
4330 int highequal = ((high0 == 0 && high1 == 0)
4331 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4332 high0, 1, high1, 1)));
4333
4334 /* Make range 0 be the range that starts first, or ends last if they
4335 start at the same value. Swap them if it isn't. */
4336 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4337 low0, 0, low1, 0))
4338 || (lowequal
4339 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4340 high1, 1, high0, 1))))
4341 {
4342 temp = in0_p, in0_p = in1_p, in1_p = temp;
4343 tem = low0, low0 = low1, low1 = tem;
4344 tem = high0, high0 = high1, high1 = tem;
4345 }
4346
4347 /* Now flag two cases, whether the ranges are disjoint or whether the
4348 second range is totally subsumed in the first. Note that the tests
4349 below are simplified by the ones above. */
4350 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4351 high0, 1, low1, 0));
4352 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4353 high1, 1, high0, 1));
4354
4355 /* We now have four cases, depending on whether we are including or
4356 excluding the two ranges. */
4357 if (in0_p && in1_p)
4358 {
4359 /* If they don't overlap, the result is false. If the second range
4360 is a subset it is the result. Otherwise, the range is from the start
4361 of the second to the end of the first. */
4362 if (no_overlap)
4363 in_p = 0, low = high = 0;
4364 else if (subset)
4365 in_p = 1, low = low1, high = high1;
4366 else
4367 in_p = 1, low = low1, high = high0;
4368 }
4369
4370 else if (in0_p && ! in1_p)
4371 {
4372 /* If they don't overlap, the result is the first range. If they are
4373 equal, the result is false. If the second range is a subset of the
4374 first, and the ranges begin at the same place, we go from just after
4375 the end of the second range to the end of the first. If the second
4376 range is not a subset of the first, or if it is a subset and both
4377 ranges end at the same place, the range starts at the start of the
4378 first range and ends just before the second range.
4379 Otherwise, we can't describe this as a single range. */
4380 if (no_overlap)
4381 in_p = 1, low = low0, high = high0;
4382 else if (lowequal && highequal)
4383 in_p = 0, low = high = 0;
4384 else if (subset && lowequal)
4385 {
4386 low = range_successor (high1);
4387 high = high0;
4388 in_p = (low != 0);
4389 }
4390 else if (! subset || highequal)
4391 {
4392 low = low0;
4393 high = range_predecessor (low1);
4394 in_p = (high != 0);
4395 }
4396 else
4397 return 0;
4398 }
4399
4400 else if (! in0_p && in1_p)
4401 {
4402 /* If they don't overlap, the result is the second range. If the second
4403 is a subset of the first, the result is false. Otherwise,
4404 the range starts just after the first range and ends at the
4405 end of the second. */
4406 if (no_overlap)
4407 in_p = 1, low = low1, high = high1;
4408 else if (subset || highequal)
4409 in_p = 0, low = high = 0;
4410 else
4411 {
4412 low = range_successor (high0);
4413 high = high1;
4414 in_p = (low != 0);
4415 }
4416 }
4417
4418 else
4419 {
4420 /* The case where we are excluding both ranges. Here the complex case
4421 is if they don't overlap. In that case, the only time we have a
4422 range is if they are adjacent. If the second is a subset of the
4423 first, the result is the first. Otherwise, the range to exclude
4424 starts at the beginning of the first range and ends at the end of the
4425 second. */
4426 if (no_overlap)
4427 {
4428 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4429 range_successor (high0),
4430 1, low1, 0)))
4431 in_p = 0, low = low0, high = high1;
4432 else
4433 {
4434 /* Canonicalize - [min, x] into - [-, x]. */
4435 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4436 switch (TREE_CODE (TREE_TYPE (low0)))
4437 {
4438 case ENUMERAL_TYPE:
4439 if (TYPE_PRECISION (TREE_TYPE (low0))
4440 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4441 break;
4442 /* FALLTHROUGH */
4443 case INTEGER_TYPE:
4444 if (tree_int_cst_equal (low0,
4445 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4446 low0 = 0;
4447 break;
4448 case POINTER_TYPE:
4449 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4450 && integer_zerop (low0))
4451 low0 = 0;
4452 break;
4453 default:
4454 break;
4455 }
4456
4457 /* Canonicalize - [x, max] into - [x, -]. */
4458 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4459 switch (TREE_CODE (TREE_TYPE (high1)))
4460 {
4461 case ENUMERAL_TYPE:
4462 if (TYPE_PRECISION (TREE_TYPE (high1))
4463 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4464 break;
4465 /* FALLTHROUGH */
4466 case INTEGER_TYPE:
4467 if (tree_int_cst_equal (high1,
4468 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4469 high1 = 0;
4470 break;
4471 case POINTER_TYPE:
4472 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4473 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4474 high1, 1,
4475 integer_one_node, 1)))
4476 high1 = 0;
4477 break;
4478 default:
4479 break;
4480 }
4481
4482 /* The ranges might be also adjacent between the maximum and
4483 minimum values of the given type. For
4484 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4485 return + [x + 1, y - 1]. */
4486 if (low0 == 0 && high1 == 0)
4487 {
4488 low = range_successor (high0);
4489 high = range_predecessor (low1);
4490 if (low == 0 || high == 0)
4491 return 0;
4492
4493 in_p = 1;
4494 }
4495 else
4496 return 0;
4497 }
4498 }
4499 else if (subset)
4500 in_p = 0, low = low0, high = high0;
4501 else
4502 in_p = 0, low = low0, high = high1;
4503 }
4504
4505 *pin_p = in_p, *plow = low, *phigh = high;
4506 return 1;
4507 }
4508 \f
4509
4510 /* Subroutine of fold, looking inside expressions of the form
4511 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4512 of the COND_EXPR. This function is being used also to optimize
4513 A op B ? C : A, by reversing the comparison first.
4514
4515 Return a folded expression whose code is not a COND_EXPR
4516 anymore, or NULL_TREE if no folding opportunity is found. */
4517
4518 static tree
4519 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4520 {
4521 enum tree_code comp_code = TREE_CODE (arg0);
4522 tree arg00 = TREE_OPERAND (arg0, 0);
4523 tree arg01 = TREE_OPERAND (arg0, 1);
4524 tree arg1_type = TREE_TYPE (arg1);
4525 tree tem;
4526
4527 STRIP_NOPS (arg1);
4528 STRIP_NOPS (arg2);
4529
4530 /* If we have A op 0 ? A : -A, consider applying the following
4531 transformations:
4532
4533 A == 0? A : -A same as -A
4534 A != 0? A : -A same as A
4535 A >= 0? A : -A same as abs (A)
4536 A > 0? A : -A same as abs (A)
4537 A <= 0? A : -A same as -abs (A)
4538 A < 0? A : -A same as -abs (A)
4539
4540 None of these transformations work for modes with signed
4541 zeros. If A is +/-0, the first two transformations will
4542 change the sign of the result (from +0 to -0, or vice
4543 versa). The last four will fix the sign of the result,
4544 even though the original expressions could be positive or
4545 negative, depending on the sign of A.
4546
4547 Note that all these transformations are correct if A is
4548 NaN, since the two alternatives (A and -A) are also NaNs. */
4549 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4550 ? real_zerop (arg01)
4551 : integer_zerop (arg01))
4552 && ((TREE_CODE (arg2) == NEGATE_EXPR
4553 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4554 /* In the case that A is of the form X-Y, '-A' (arg2) may
4555 have already been folded to Y-X, check for that. */
4556 || (TREE_CODE (arg1) == MINUS_EXPR
4557 && TREE_CODE (arg2) == MINUS_EXPR
4558 && operand_equal_p (TREE_OPERAND (arg1, 0),
4559 TREE_OPERAND (arg2, 1), 0)
4560 && operand_equal_p (TREE_OPERAND (arg1, 1),
4561 TREE_OPERAND (arg2, 0), 0))))
4562 switch (comp_code)
4563 {
4564 case EQ_EXPR:
4565 case UNEQ_EXPR:
4566 tem = fold_convert (arg1_type, arg1);
4567 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4568 case NE_EXPR:
4569 case LTGT_EXPR:
4570 return pedantic_non_lvalue (fold_convert (type, arg1));
4571 case UNGE_EXPR:
4572 case UNGT_EXPR:
4573 if (flag_trapping_math)
4574 break;
4575 /* Fall through. */
4576 case GE_EXPR:
4577 case GT_EXPR:
4578 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4579 arg1 = fold_convert (lang_hooks.types.signed_type
4580 (TREE_TYPE (arg1)), arg1);
4581 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4582 return pedantic_non_lvalue (fold_convert (type, tem));
4583 case UNLE_EXPR:
4584 case UNLT_EXPR:
4585 if (flag_trapping_math)
4586 break;
4587 case LE_EXPR:
4588 case LT_EXPR:
4589 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4590 arg1 = fold_convert (lang_hooks.types.signed_type
4591 (TREE_TYPE (arg1)), arg1);
4592 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4593 return negate_expr (fold_convert (type, tem));
4594 default:
4595 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4596 break;
4597 }
4598
4599 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4600 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4601 both transformations are correct when A is NaN: A != 0
4602 is then true, and A == 0 is false. */
4603
4604 if (integer_zerop (arg01) && integer_zerop (arg2))
4605 {
4606 if (comp_code == NE_EXPR)
4607 return pedantic_non_lvalue (fold_convert (type, arg1));
4608 else if (comp_code == EQ_EXPR)
4609 return build_int_cst (type, 0);
4610 }
4611
4612 /* Try some transformations of A op B ? A : B.
4613
4614 A == B? A : B same as B
4615 A != B? A : B same as A
4616 A >= B? A : B same as max (A, B)
4617 A > B? A : B same as max (B, A)
4618 A <= B? A : B same as min (A, B)
4619 A < B? A : B same as min (B, A)
4620
4621 As above, these transformations don't work in the presence
4622 of signed zeros. For example, if A and B are zeros of
4623 opposite sign, the first two transformations will change
4624 the sign of the result. In the last four, the original
4625 expressions give different results for (A=+0, B=-0) and
4626 (A=-0, B=+0), but the transformed expressions do not.
4627
4628 The first two transformations are correct if either A or B
4629 is a NaN. In the first transformation, the condition will
4630 be false, and B will indeed be chosen. In the case of the
4631 second transformation, the condition A != B will be true,
4632 and A will be chosen.
4633
4634 The conversions to max() and min() are not correct if B is
4635 a number and A is not. The conditions in the original
4636 expressions will be false, so all four give B. The min()
4637 and max() versions would give a NaN instead. */
4638 if (operand_equal_for_comparison_p (arg01, arg2, arg00)
4639 /* Avoid these transformations if the COND_EXPR may be used
4640 as an lvalue in the C++ front-end. PR c++/19199. */
4641 && (in_gimple_form
4642 || (strcmp (lang_hooks.name, "GNU C++") != 0
4643 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4644 || ! maybe_lvalue_p (arg1)
4645 || ! maybe_lvalue_p (arg2)))
4646 {
4647 tree comp_op0 = arg00;
4648 tree comp_op1 = arg01;
4649 tree comp_type = TREE_TYPE (comp_op0);
4650
4651 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4652 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4653 {
4654 comp_type = type;
4655 comp_op0 = arg1;
4656 comp_op1 = arg2;
4657 }
4658
4659 switch (comp_code)
4660 {
4661 case EQ_EXPR:
4662 return pedantic_non_lvalue (fold_convert (type, arg2));
4663 case NE_EXPR:
4664 return pedantic_non_lvalue (fold_convert (type, arg1));
4665 case LE_EXPR:
4666 case LT_EXPR:
4667 case UNLE_EXPR:
4668 case UNLT_EXPR:
4669 /* In C++ a ?: expression can be an lvalue, so put the
4670 operand which will be used if they are equal first
4671 so that we can convert this back to the
4672 corresponding COND_EXPR. */
4673 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4674 {
4675 comp_op0 = fold_convert (comp_type, comp_op0);
4676 comp_op1 = fold_convert (comp_type, comp_op1);
4677 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4678 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4679 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4680 return pedantic_non_lvalue (fold_convert (type, tem));
4681 }
4682 break;
4683 case GE_EXPR:
4684 case GT_EXPR:
4685 case UNGE_EXPR:
4686 case UNGT_EXPR:
4687 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4688 {
4689 comp_op0 = fold_convert (comp_type, comp_op0);
4690 comp_op1 = fold_convert (comp_type, comp_op1);
4691 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4692 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
4693 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
4694 return pedantic_non_lvalue (fold_convert (type, tem));
4695 }
4696 break;
4697 case UNEQ_EXPR:
4698 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4699 return pedantic_non_lvalue (fold_convert (type, arg2));
4700 break;
4701 case LTGT_EXPR:
4702 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4703 return pedantic_non_lvalue (fold_convert (type, arg1));
4704 break;
4705 default:
4706 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4707 break;
4708 }
4709 }
4710
4711 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4712 we might still be able to simplify this. For example,
4713 if C1 is one less or one more than C2, this might have started
4714 out as a MIN or MAX and been transformed by this function.
4715 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4716
4717 if (INTEGRAL_TYPE_P (type)
4718 && TREE_CODE (arg01) == INTEGER_CST
4719 && TREE_CODE (arg2) == INTEGER_CST)
4720 switch (comp_code)
4721 {
4722 case EQ_EXPR:
4723 /* We can replace A with C1 in this case. */
4724 arg1 = fold_convert (type, arg01);
4725 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
4726
4727 case LT_EXPR:
4728 /* If C1 is C2 + 1, this is min(A, C2). */
4729 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4730 OEP_ONLY_CONST)
4731 && operand_equal_p (arg01,
4732 const_binop (PLUS_EXPR, arg2,
4733 build_int_cst (type, 1), 0),
4734 OEP_ONLY_CONST))
4735 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4736 type, arg1, arg2));
4737 break;
4738
4739 case LE_EXPR:
4740 /* If C1 is C2 - 1, this is min(A, C2). */
4741 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4742 OEP_ONLY_CONST)
4743 && operand_equal_p (arg01,
4744 const_binop (MINUS_EXPR, arg2,
4745 build_int_cst (type, 1), 0),
4746 OEP_ONLY_CONST))
4747 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4748 type, arg1, arg2));
4749 break;
4750
4751 case GT_EXPR:
4752 /* If C1 is C2 - 1, this is max(A, C2). */
4753 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4754 OEP_ONLY_CONST)
4755 && operand_equal_p (arg01,
4756 const_binop (MINUS_EXPR, arg2,
4757 build_int_cst (type, 1), 0),
4758 OEP_ONLY_CONST))
4759 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4760 type, arg1, arg2));
4761 break;
4762
4763 case GE_EXPR:
4764 /* If C1 is C2 + 1, this is max(A, C2). */
4765 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4766 OEP_ONLY_CONST)
4767 && operand_equal_p (arg01,
4768 const_binop (PLUS_EXPR, arg2,
4769 build_int_cst (type, 1), 0),
4770 OEP_ONLY_CONST))
4771 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4772 type, arg1, arg2));
4773 break;
4774 case NE_EXPR:
4775 break;
4776 default:
4777 gcc_unreachable ();
4778 }
4779
4780 return NULL_TREE;
4781 }
4782
4783
4784 \f
4785 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4786 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4787 #endif
4788
4789 /* EXP is some logical combination of boolean tests. See if we can
4790 merge it into some range test. Return the new tree if so. */
4791
4792 static tree
4793 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
4794 {
4795 int or_op = (code == TRUTH_ORIF_EXPR
4796 || code == TRUTH_OR_EXPR);
4797 int in0_p, in1_p, in_p;
4798 tree low0, low1, low, high0, high1, high;
4799 tree lhs = make_range (op0, &in0_p, &low0, &high0);
4800 tree rhs = make_range (op1, &in1_p, &low1, &high1);
4801 tree tem;
4802
4803 /* If this is an OR operation, invert both sides; we will invert
4804 again at the end. */
4805 if (or_op)
4806 in0_p = ! in0_p, in1_p = ! in1_p;
4807
4808 /* If both expressions are the same, if we can merge the ranges, and we
4809 can build the range test, return it or it inverted. If one of the
4810 ranges is always true or always false, consider it to be the same
4811 expression as the other. */
4812 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4813 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4814 in1_p, low1, high1)
4815 && 0 != (tem = (build_range_check (type,
4816 lhs != 0 ? lhs
4817 : rhs != 0 ? rhs : integer_zero_node,
4818 in_p, low, high))))
4819 return or_op ? invert_truthvalue (tem) : tem;
4820
4821 /* On machines where the branch cost is expensive, if this is a
4822 short-circuited branch and the underlying object on both sides
4823 is the same, make a non-short-circuit operation. */
4824 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4825 && lhs != 0 && rhs != 0
4826 && (code == TRUTH_ANDIF_EXPR
4827 || code == TRUTH_ORIF_EXPR)
4828 && operand_equal_p (lhs, rhs, 0))
4829 {
4830 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4831 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4832 which cases we can't do this. */
4833 if (simple_operand_p (lhs))
4834 return build2 (code == TRUTH_ANDIF_EXPR
4835 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4836 type, op0, op1);
4837
4838 else if (lang_hooks.decls.global_bindings_p () == 0
4839 && ! CONTAINS_PLACEHOLDER_P (lhs))
4840 {
4841 tree common = save_expr (lhs);
4842
4843 if (0 != (lhs = build_range_check (type, common,
4844 or_op ? ! in0_p : in0_p,
4845 low0, high0))
4846 && (0 != (rhs = build_range_check (type, common,
4847 or_op ? ! in1_p : in1_p,
4848 low1, high1))))
4849 return build2 (code == TRUTH_ANDIF_EXPR
4850 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4851 type, lhs, rhs);
4852 }
4853 }
4854
4855 return 0;
4856 }
4857 \f
4858 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4859 bit value. Arrange things so the extra bits will be set to zero if and
4860 only if C is signed-extended to its full width. If MASK is nonzero,
4861 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4862
4863 static tree
4864 unextend (tree c, int p, int unsignedp, tree mask)
4865 {
4866 tree type = TREE_TYPE (c);
4867 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4868 tree temp;
4869
4870 if (p == modesize || unsignedp)
4871 return c;
4872
4873 /* We work by getting just the sign bit into the low-order bit, then
4874 into the high-order bit, then sign-extend. We then XOR that value
4875 with C. */
4876 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4877 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4878
4879 /* We must use a signed type in order to get an arithmetic right shift.
4880 However, we must also avoid introducing accidental overflows, so that
4881 a subsequent call to integer_zerop will work. Hence we must
4882 do the type conversion here. At this point, the constant is either
4883 zero or one, and the conversion to a signed type can never overflow.
4884 We could get an overflow if this conversion is done anywhere else. */
4885 if (TYPE_UNSIGNED (type))
4886 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4887
4888 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4889 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4890 if (mask != 0)
4891 temp = const_binop (BIT_AND_EXPR, temp,
4892 fold_convert (TREE_TYPE (c), mask), 0);
4893 /* If necessary, convert the type back to match the type of C. */
4894 if (TYPE_UNSIGNED (type))
4895 temp = fold_convert (type, temp);
4896
4897 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4898 }
4899 \f
4900 /* Find ways of folding logical expressions of LHS and RHS:
4901 Try to merge two comparisons to the same innermost item.
4902 Look for range tests like "ch >= '0' && ch <= '9'".
4903 Look for combinations of simple terms on machines with expensive branches
4904 and evaluate the RHS unconditionally.
4905
4906 For example, if we have p->a == 2 && p->b == 4 and we can make an
4907 object large enough to span both A and B, we can do this with a comparison
4908 against the object ANDed with the a mask.
4909
4910 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4911 operations to do this with one comparison.
4912
4913 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4914 function and the one above.
4915
4916 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4917 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4918
4919 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4920 two operands.
4921
4922 We return the simplified tree or 0 if no optimization is possible. */
4923
4924 static tree
4925 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
4926 {
4927 /* If this is the "or" of two comparisons, we can do something if
4928 the comparisons are NE_EXPR. If this is the "and", we can do something
4929 if the comparisons are EQ_EXPR. I.e.,
4930 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4931
4932 WANTED_CODE is this operation code. For single bit fields, we can
4933 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4934 comparison for one-bit fields. */
4935
4936 enum tree_code wanted_code;
4937 enum tree_code lcode, rcode;
4938 tree ll_arg, lr_arg, rl_arg, rr_arg;
4939 tree ll_inner, lr_inner, rl_inner, rr_inner;
4940 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
4941 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
4942 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
4943 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
4944 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
4945 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
4946 enum machine_mode lnmode, rnmode;
4947 tree ll_mask, lr_mask, rl_mask, rr_mask;
4948 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
4949 tree l_const, r_const;
4950 tree lntype, rntype, result;
4951 int first_bit, end_bit;
4952 int volatilep;
4953 tree orig_lhs = lhs, orig_rhs = rhs;
4954 enum tree_code orig_code = code;
4955
4956 /* Start by getting the comparison codes. Fail if anything is volatile.
4957 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4958 it were surrounded with a NE_EXPR. */
4959
4960 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
4961 return 0;
4962
4963 lcode = TREE_CODE (lhs);
4964 rcode = TREE_CODE (rhs);
4965
4966 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
4967 {
4968 lhs = build2 (NE_EXPR, truth_type, lhs,
4969 build_int_cst (TREE_TYPE (lhs), 0));
4970 lcode = NE_EXPR;
4971 }
4972
4973 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
4974 {
4975 rhs = build2 (NE_EXPR, truth_type, rhs,
4976 build_int_cst (TREE_TYPE (rhs), 0));
4977 rcode = NE_EXPR;
4978 }
4979
4980 if (TREE_CODE_CLASS (lcode) != tcc_comparison
4981 || TREE_CODE_CLASS (rcode) != tcc_comparison)
4982 return 0;
4983
4984 ll_arg = TREE_OPERAND (lhs, 0);
4985 lr_arg = TREE_OPERAND (lhs, 1);
4986 rl_arg = TREE_OPERAND (rhs, 0);
4987 rr_arg = TREE_OPERAND (rhs, 1);
4988
4989 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4990 if (simple_operand_p (ll_arg)
4991 && simple_operand_p (lr_arg))
4992 {
4993 tree result;
4994 if (operand_equal_p (ll_arg, rl_arg, 0)
4995 && operand_equal_p (lr_arg, rr_arg, 0))
4996 {
4997 result = combine_comparisons (code, lcode, rcode,
4998 truth_type, ll_arg, lr_arg);
4999 if (result)
5000 return result;
5001 }
5002 else if (operand_equal_p (ll_arg, rr_arg, 0)
5003 && operand_equal_p (lr_arg, rl_arg, 0))
5004 {
5005 result = combine_comparisons (code, lcode,
5006 swap_tree_comparison (rcode),
5007 truth_type, ll_arg, lr_arg);
5008 if (result)
5009 return result;
5010 }
5011 }
5012
5013 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5014 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5015
5016 /* If the RHS can be evaluated unconditionally and its operands are
5017 simple, it wins to evaluate the RHS unconditionally on machines
5018 with expensive branches. In this case, this isn't a comparison
5019 that can be merged. Avoid doing this if the RHS is a floating-point
5020 comparison since those can trap. */
5021
5022 if (BRANCH_COST >= 2
5023 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5024 && simple_operand_p (rl_arg)
5025 && simple_operand_p (rr_arg))
5026 {
5027 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5028 if (code == TRUTH_OR_EXPR
5029 && lcode == NE_EXPR && integer_zerop (lr_arg)
5030 && rcode == NE_EXPR && integer_zerop (rr_arg)
5031 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
5032 return build2 (NE_EXPR, truth_type,
5033 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5034 ll_arg, rl_arg),
5035 build_int_cst (TREE_TYPE (ll_arg), 0));
5036
5037 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5038 if (code == TRUTH_AND_EXPR
5039 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5040 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5041 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
5042 return build2 (EQ_EXPR, truth_type,
5043 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5044 ll_arg, rl_arg),
5045 build_int_cst (TREE_TYPE (ll_arg), 0));
5046
5047 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5048 {
5049 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5050 return build2 (code, truth_type, lhs, rhs);
5051 return NULL_TREE;
5052 }
5053 }
5054
5055 /* See if the comparisons can be merged. Then get all the parameters for
5056 each side. */
5057
5058 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5059 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5060 return 0;
5061
5062 volatilep = 0;
5063 ll_inner = decode_field_reference (ll_arg,
5064 &ll_bitsize, &ll_bitpos, &ll_mode,
5065 &ll_unsignedp, &volatilep, &ll_mask,
5066 &ll_and_mask);
5067 lr_inner = decode_field_reference (lr_arg,
5068 &lr_bitsize, &lr_bitpos, &lr_mode,
5069 &lr_unsignedp, &volatilep, &lr_mask,
5070 &lr_and_mask);
5071 rl_inner = decode_field_reference (rl_arg,
5072 &rl_bitsize, &rl_bitpos, &rl_mode,
5073 &rl_unsignedp, &volatilep, &rl_mask,
5074 &rl_and_mask);
5075 rr_inner = decode_field_reference (rr_arg,
5076 &rr_bitsize, &rr_bitpos, &rr_mode,
5077 &rr_unsignedp, &volatilep, &rr_mask,
5078 &rr_and_mask);
5079
5080 /* It must be true that the inner operation on the lhs of each
5081 comparison must be the same if we are to be able to do anything.
5082 Then see if we have constants. If not, the same must be true for
5083 the rhs's. */
5084 if (volatilep || ll_inner == 0 || rl_inner == 0
5085 || ! operand_equal_p (ll_inner, rl_inner, 0))
5086 return 0;
5087
5088 if (TREE_CODE (lr_arg) == INTEGER_CST
5089 && TREE_CODE (rr_arg) == INTEGER_CST)
5090 l_const = lr_arg, r_const = rr_arg;
5091 else if (lr_inner == 0 || rr_inner == 0
5092 || ! operand_equal_p (lr_inner, rr_inner, 0))
5093 return 0;
5094 else
5095 l_const = r_const = 0;
5096
5097 /* If either comparison code is not correct for our logical operation,
5098 fail. However, we can convert a one-bit comparison against zero into
5099 the opposite comparison against that bit being set in the field. */
5100
5101 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5102 if (lcode != wanted_code)
5103 {
5104 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5105 {
5106 /* Make the left operand unsigned, since we are only interested
5107 in the value of one bit. Otherwise we are doing the wrong
5108 thing below. */
5109 ll_unsignedp = 1;
5110 l_const = ll_mask;
5111 }
5112 else
5113 return 0;
5114 }
5115
5116 /* This is analogous to the code for l_const above. */
5117 if (rcode != wanted_code)
5118 {
5119 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5120 {
5121 rl_unsignedp = 1;
5122 r_const = rl_mask;
5123 }
5124 else
5125 return 0;
5126 }
5127
5128 /* See if we can find a mode that contains both fields being compared on
5129 the left. If we can't, fail. Otherwise, update all constants and masks
5130 to be relative to a field of that size. */
5131 first_bit = MIN (ll_bitpos, rl_bitpos);
5132 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5133 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5134 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5135 volatilep);
5136 if (lnmode == VOIDmode)
5137 return 0;
5138
5139 lnbitsize = GET_MODE_BITSIZE (lnmode);
5140 lnbitpos = first_bit & ~ (lnbitsize - 1);
5141 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5142 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5143
5144 if (BYTES_BIG_ENDIAN)
5145 {
5146 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5147 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5148 }
5149
5150 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
5151 size_int (xll_bitpos), 0);
5152 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
5153 size_int (xrl_bitpos), 0);
5154
5155 if (l_const)
5156 {
5157 l_const = fold_convert (lntype, l_const);
5158 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5159 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
5160 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5161 fold_build1 (BIT_NOT_EXPR,
5162 lntype, ll_mask),
5163 0)))
5164 {
5165 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5166
5167 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5168 }
5169 }
5170 if (r_const)
5171 {
5172 r_const = fold_convert (lntype, r_const);
5173 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5174 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
5175 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5176 fold_build1 (BIT_NOT_EXPR,
5177 lntype, rl_mask),
5178 0)))
5179 {
5180 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5181
5182 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5183 }
5184 }
5185
5186 /* If the right sides are not constant, do the same for it. Also,
5187 disallow this optimization if a size or signedness mismatch occurs
5188 between the left and right sides. */
5189 if (l_const == 0)
5190 {
5191 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5192 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5193 /* Make sure the two fields on the right
5194 correspond to the left without being swapped. */
5195 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5196 return 0;
5197
5198 first_bit = MIN (lr_bitpos, rr_bitpos);
5199 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5200 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5201 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5202 volatilep);
5203 if (rnmode == VOIDmode)
5204 return 0;
5205
5206 rnbitsize = GET_MODE_BITSIZE (rnmode);
5207 rnbitpos = first_bit & ~ (rnbitsize - 1);
5208 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5209 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5210
5211 if (BYTES_BIG_ENDIAN)
5212 {
5213 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5214 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5215 }
5216
5217 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
5218 size_int (xlr_bitpos), 0);
5219 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
5220 size_int (xrr_bitpos), 0);
5221
5222 /* Make a mask that corresponds to both fields being compared.
5223 Do this for both items being compared. If the operands are the
5224 same size and the bits being compared are in the same position
5225 then we can do this by masking both and comparing the masked
5226 results. */
5227 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5228 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
5229 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5230 {
5231 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5232 ll_unsignedp || rl_unsignedp);
5233 if (! all_ones_mask_p (ll_mask, lnbitsize))
5234 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5235
5236 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
5237 lr_unsignedp || rr_unsignedp);
5238 if (! all_ones_mask_p (lr_mask, rnbitsize))
5239 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5240
5241 return build2 (wanted_code, truth_type, lhs, rhs);
5242 }
5243
5244 /* There is still another way we can do something: If both pairs of
5245 fields being compared are adjacent, we may be able to make a wider
5246 field containing them both.
5247
5248 Note that we still must mask the lhs/rhs expressions. Furthermore,
5249 the mask must be shifted to account for the shift done by
5250 make_bit_field_ref. */
5251 if ((ll_bitsize + ll_bitpos == rl_bitpos
5252 && lr_bitsize + lr_bitpos == rr_bitpos)
5253 || (ll_bitpos == rl_bitpos + rl_bitsize
5254 && lr_bitpos == rr_bitpos + rr_bitsize))
5255 {
5256 tree type;
5257
5258 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
5259 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5260 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
5261 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5262
5263 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5264 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
5265 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5266 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
5267
5268 /* Convert to the smaller type before masking out unwanted bits. */
5269 type = lntype;
5270 if (lntype != rntype)
5271 {
5272 if (lnbitsize > rnbitsize)
5273 {
5274 lhs = fold_convert (rntype, lhs);
5275 ll_mask = fold_convert (rntype, ll_mask);
5276 type = rntype;
5277 }
5278 else if (lnbitsize < rnbitsize)
5279 {
5280 rhs = fold_convert (lntype, rhs);
5281 lr_mask = fold_convert (lntype, lr_mask);
5282 type = lntype;
5283 }
5284 }
5285
5286 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5287 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5288
5289 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5290 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5291
5292 return build2 (wanted_code, truth_type, lhs, rhs);
5293 }
5294
5295 return 0;
5296 }
5297
5298 /* Handle the case of comparisons with constants. If there is something in
5299 common between the masks, those bits of the constants must be the same.
5300 If not, the condition is always false. Test for this to avoid generating
5301 incorrect code below. */
5302 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5303 if (! integer_zerop (result)
5304 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5305 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5306 {
5307 if (wanted_code == NE_EXPR)
5308 {
5309 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5310 return constant_boolean_node (true, truth_type);
5311 }
5312 else
5313 {
5314 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5315 return constant_boolean_node (false, truth_type);
5316 }
5317 }
5318
5319 /* Construct the expression we will return. First get the component
5320 reference we will make. Unless the mask is all ones the width of
5321 that field, perform the mask operation. Then compare with the
5322 merged constant. */
5323 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5324 ll_unsignedp || rl_unsignedp);
5325
5326 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5327 if (! all_ones_mask_p (ll_mask, lnbitsize))
5328 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5329
5330 return build2 (wanted_code, truth_type, result,
5331 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5332 }
5333 \f
5334 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5335 constant. */
5336
5337 static tree
5338 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5339 {
5340 tree arg0 = op0;
5341 enum tree_code op_code;
5342 tree comp_const = op1;
5343 tree minmax_const;
5344 int consts_equal, consts_lt;
5345 tree inner;
5346
5347 STRIP_SIGN_NOPS (arg0);
5348
5349 op_code = TREE_CODE (arg0);
5350 minmax_const = TREE_OPERAND (arg0, 1);
5351 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5352 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5353 inner = TREE_OPERAND (arg0, 0);
5354
5355 /* If something does not permit us to optimize, return the original tree. */
5356 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5357 || TREE_CODE (comp_const) != INTEGER_CST
5358 || TREE_OVERFLOW (comp_const)
5359 || TREE_CODE (minmax_const) != INTEGER_CST
5360 || TREE_OVERFLOW (minmax_const))
5361 return NULL_TREE;
5362
5363 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5364 and GT_EXPR, doing the rest with recursive calls using logical
5365 simplifications. */
5366 switch (code)
5367 {
5368 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5369 {
5370 tree tem = optimize_minmax_comparison (invert_tree_comparison (code, false),
5371 type, op0, op1);
5372 if (tem)
5373 return invert_truthvalue (tem);
5374 return NULL_TREE;
5375 }
5376
5377 case GE_EXPR:
5378 return
5379 fold_build2 (TRUTH_ORIF_EXPR, type,
5380 optimize_minmax_comparison
5381 (EQ_EXPR, type, arg0, comp_const),
5382 optimize_minmax_comparison
5383 (GT_EXPR, type, arg0, comp_const));
5384
5385 case EQ_EXPR:
5386 if (op_code == MAX_EXPR && consts_equal)
5387 /* MAX (X, 0) == 0 -> X <= 0 */
5388 return fold_build2 (LE_EXPR, type, inner, comp_const);
5389
5390 else if (op_code == MAX_EXPR && consts_lt)
5391 /* MAX (X, 0) == 5 -> X == 5 */
5392 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5393
5394 else if (op_code == MAX_EXPR)
5395 /* MAX (X, 0) == -1 -> false */
5396 return omit_one_operand (type, integer_zero_node, inner);
5397
5398 else if (consts_equal)
5399 /* MIN (X, 0) == 0 -> X >= 0 */
5400 return fold_build2 (GE_EXPR, type, inner, comp_const);
5401
5402 else if (consts_lt)
5403 /* MIN (X, 0) == 5 -> false */
5404 return omit_one_operand (type, integer_zero_node, inner);
5405
5406 else
5407 /* MIN (X, 0) == -1 -> X == -1 */
5408 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5409
5410 case GT_EXPR:
5411 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5412 /* MAX (X, 0) > 0 -> X > 0
5413 MAX (X, 0) > 5 -> X > 5 */
5414 return fold_build2 (GT_EXPR, type, inner, comp_const);
5415
5416 else if (op_code == MAX_EXPR)
5417 /* MAX (X, 0) > -1 -> true */
5418 return omit_one_operand (type, integer_one_node, inner);
5419
5420 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5421 /* MIN (X, 0) > 0 -> false
5422 MIN (X, 0) > 5 -> false */
5423 return omit_one_operand (type, integer_zero_node, inner);
5424
5425 else
5426 /* MIN (X, 0) > -1 -> X > -1 */
5427 return fold_build2 (GT_EXPR, type, inner, comp_const);
5428
5429 default:
5430 return NULL_TREE;
5431 }
5432 }
5433 \f
5434 /* T is an integer expression that is being multiplied, divided, or taken a
5435 modulus (CODE says which and what kind of divide or modulus) by a
5436 constant C. See if we can eliminate that operation by folding it with
5437 other operations already in T. WIDE_TYPE, if non-null, is a type that
5438 should be used for the computation if wider than our type.
5439
5440 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5441 (X * 2) + (Y * 4). We must, however, be assured that either the original
5442 expression would not overflow or that overflow is undefined for the type
5443 in the language in question.
5444
5445 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5446 the machine has a multiply-accumulate insn or that this is part of an
5447 addressing calculation.
5448
5449 If we return a non-null expression, it is an equivalent form of the
5450 original computation, but need not be in the original type. */
5451
5452 static tree
5453 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
5454 {
5455 /* To avoid exponential search depth, refuse to allow recursion past
5456 three levels. Beyond that (1) it's highly unlikely that we'll find
5457 something interesting and (2) we've probably processed it before
5458 when we built the inner expression. */
5459
5460 static int depth;
5461 tree ret;
5462
5463 if (depth > 3)
5464 return NULL;
5465
5466 depth++;
5467 ret = extract_muldiv_1 (t, c, code, wide_type);
5468 depth--;
5469
5470 return ret;
5471 }
5472
5473 static tree
5474 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
5475 {
5476 tree type = TREE_TYPE (t);
5477 enum tree_code tcode = TREE_CODE (t);
5478 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5479 > GET_MODE_SIZE (TYPE_MODE (type)))
5480 ? wide_type : type);
5481 tree t1, t2;
5482 int same_p = tcode == code;
5483 tree op0 = NULL_TREE, op1 = NULL_TREE;
5484
5485 /* Don't deal with constants of zero here; they confuse the code below. */
5486 if (integer_zerop (c))
5487 return NULL_TREE;
5488
5489 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5490 op0 = TREE_OPERAND (t, 0);
5491
5492 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5493 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5494
5495 /* Note that we need not handle conditional operations here since fold
5496 already handles those cases. So just do arithmetic here. */
5497 switch (tcode)
5498 {
5499 case INTEGER_CST:
5500 /* For a constant, we can always simplify if we are a multiply
5501 or (for divide and modulus) if it is a multiple of our constant. */
5502 if (code == MULT_EXPR
5503 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5504 return const_binop (code, fold_convert (ctype, t),
5505 fold_convert (ctype, c), 0);
5506 break;
5507
5508 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5509 /* If op0 is an expression ... */
5510 if ((COMPARISON_CLASS_P (op0)
5511 || UNARY_CLASS_P (op0)
5512 || BINARY_CLASS_P (op0)
5513 || EXPRESSION_CLASS_P (op0))
5514 /* ... and is unsigned, and its type is smaller than ctype,
5515 then we cannot pass through as widening. */
5516 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5517 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5518 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5519 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5520 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5521 /* ... or this is a truncation (t is narrower than op0),
5522 then we cannot pass through this narrowing. */
5523 || (GET_MODE_SIZE (TYPE_MODE (type))
5524 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5525 /* ... or signedness changes for division or modulus,
5526 then we cannot pass through this conversion. */
5527 || (code != MULT_EXPR
5528 && (TYPE_UNSIGNED (ctype)
5529 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5530 break;
5531
5532 /* Pass the constant down and see if we can make a simplification. If
5533 we can, replace this expression with the inner simplification for
5534 possible later conversion to our or some other type. */
5535 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5536 && TREE_CODE (t2) == INTEGER_CST
5537 && !TREE_OVERFLOW (t2)
5538 && (0 != (t1 = extract_muldiv (op0, t2, code,
5539 code == MULT_EXPR
5540 ? ctype : NULL_TREE))))
5541 return t1;
5542 break;
5543
5544 case ABS_EXPR:
5545 /* If widening the type changes it from signed to unsigned, then we
5546 must avoid building ABS_EXPR itself as unsigned. */
5547 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5548 {
5549 tree cstype = (*lang_hooks.types.signed_type) (ctype);
5550 if ((t1 = extract_muldiv (op0, c, code, cstype)) != 0)
5551 {
5552 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5553 return fold_convert (ctype, t1);
5554 }
5555 break;
5556 }
5557 /* FALLTHROUGH */
5558 case NEGATE_EXPR:
5559 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5560 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5561 break;
5562
5563 case MIN_EXPR: case MAX_EXPR:
5564 /* If widening the type changes the signedness, then we can't perform
5565 this optimization as that changes the result. */
5566 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5567 break;
5568
5569 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5570 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
5571 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
5572 {
5573 if (tree_int_cst_sgn (c) < 0)
5574 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5575
5576 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5577 fold_convert (ctype, t2));
5578 }
5579 break;
5580
5581 case LSHIFT_EXPR: case RSHIFT_EXPR:
5582 /* If the second operand is constant, this is a multiplication
5583 or floor division, by a power of two, so we can treat it that
5584 way unless the multiplier or divisor overflows. Signed
5585 left-shift overflow is implementation-defined rather than
5586 undefined in C90, so do not convert signed left shift into
5587 multiplication. */
5588 if (TREE_CODE (op1) == INTEGER_CST
5589 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5590 /* const_binop may not detect overflow correctly,
5591 so check for it explicitly here. */
5592 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5593 && TREE_INT_CST_HIGH (op1) == 0
5594 && 0 != (t1 = fold_convert (ctype,
5595 const_binop (LSHIFT_EXPR,
5596 size_one_node,
5597 op1, 0)))
5598 && !TREE_OVERFLOW (t1))
5599 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5600 ? MULT_EXPR : FLOOR_DIV_EXPR,
5601 ctype, fold_convert (ctype, op0), t1),
5602 c, code, wide_type);
5603 break;
5604
5605 case PLUS_EXPR: case MINUS_EXPR:
5606 /* See if we can eliminate the operation on both sides. If we can, we
5607 can return a new PLUS or MINUS. If we can't, the only remaining
5608 cases where we can do anything are if the second operand is a
5609 constant. */
5610 t1 = extract_muldiv (op0, c, code, wide_type);
5611 t2 = extract_muldiv (op1, c, code, wide_type);
5612 if (t1 != 0 && t2 != 0
5613 && (code == MULT_EXPR
5614 /* If not multiplication, we can only do this if both operands
5615 are divisible by c. */
5616 || (multiple_of_p (ctype, op0, c)
5617 && multiple_of_p (ctype, op1, c))))
5618 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5619 fold_convert (ctype, t2));
5620
5621 /* If this was a subtraction, negate OP1 and set it to be an addition.
5622 This simplifies the logic below. */
5623 if (tcode == MINUS_EXPR)
5624 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5625
5626 if (TREE_CODE (op1) != INTEGER_CST)
5627 break;
5628
5629 /* If either OP1 or C are negative, this optimization is not safe for
5630 some of the division and remainder types while for others we need
5631 to change the code. */
5632 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5633 {
5634 if (code == CEIL_DIV_EXPR)
5635 code = FLOOR_DIV_EXPR;
5636 else if (code == FLOOR_DIV_EXPR)
5637 code = CEIL_DIV_EXPR;
5638 else if (code != MULT_EXPR
5639 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5640 break;
5641 }
5642
5643 /* If it's a multiply or a division/modulus operation of a multiple
5644 of our constant, do the operation and verify it doesn't overflow. */
5645 if (code == MULT_EXPR
5646 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5647 {
5648 op1 = const_binop (code, fold_convert (ctype, op1),
5649 fold_convert (ctype, c), 0);
5650 /* We allow the constant to overflow with wrapping semantics. */
5651 if (op1 == 0
5652 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5653 break;
5654 }
5655 else
5656 break;
5657
5658 /* If we have an unsigned type is not a sizetype, we cannot widen
5659 the operation since it will change the result if the original
5660 computation overflowed. */
5661 if (TYPE_UNSIGNED (ctype)
5662 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5663 && ctype != type)
5664 break;
5665
5666 /* If we were able to eliminate our operation from the first side,
5667 apply our operation to the second side and reform the PLUS. */
5668 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5669 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5670
5671 /* The last case is if we are a multiply. In that case, we can
5672 apply the distributive law to commute the multiply and addition
5673 if the multiplication of the constants doesn't overflow. */
5674 if (code == MULT_EXPR)
5675 return fold_build2 (tcode, ctype,
5676 fold_build2 (code, ctype,
5677 fold_convert (ctype, op0),
5678 fold_convert (ctype, c)),
5679 op1);
5680
5681 break;
5682
5683 case MULT_EXPR:
5684 /* We have a special case here if we are doing something like
5685 (C * 8) % 4 since we know that's zero. */
5686 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5687 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5688 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5689 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5690 return omit_one_operand (type, integer_zero_node, op0);
5691
5692 /* ... fall through ... */
5693
5694 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5695 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5696 /* If we can extract our operation from the LHS, do so and return a
5697 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5698 do something only if the second operand is a constant. */
5699 if (same_p
5700 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5701 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5702 fold_convert (ctype, op1));
5703 else if (tcode == MULT_EXPR && code == MULT_EXPR
5704 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
5705 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5706 fold_convert (ctype, t1));
5707 else if (TREE_CODE (op1) != INTEGER_CST)
5708 return 0;
5709
5710 /* If these are the same operation types, we can associate them
5711 assuming no overflow. */
5712 if (tcode == code
5713 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5714 fold_convert (ctype, c), 0))
5715 && !TREE_OVERFLOW (t1))
5716 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5717
5718 /* If these operations "cancel" each other, we have the main
5719 optimizations of this pass, which occur when either constant is a
5720 multiple of the other, in which case we replace this with either an
5721 operation or CODE or TCODE.
5722
5723 If we have an unsigned type that is not a sizetype, we cannot do
5724 this since it will change the result if the original computation
5725 overflowed. */
5726 if ((TYPE_OVERFLOW_UNDEFINED (ctype)
5727 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5728 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5729 || (tcode == MULT_EXPR
5730 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5731 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5732 {
5733 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5734 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5735 fold_convert (ctype,
5736 const_binop (TRUNC_DIV_EXPR,
5737 op1, c, 0)));
5738 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5739 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5740 fold_convert (ctype,
5741 const_binop (TRUNC_DIV_EXPR,
5742 c, op1, 0)));
5743 }
5744 break;
5745
5746 default:
5747 break;
5748 }
5749
5750 return 0;
5751 }
5752 \f
5753 /* Return a node which has the indicated constant VALUE (either 0 or
5754 1), and is of the indicated TYPE. */
5755
5756 tree
5757 constant_boolean_node (int value, tree type)
5758 {
5759 if (type == integer_type_node)
5760 return value ? integer_one_node : integer_zero_node;
5761 else if (type == boolean_type_node)
5762 return value ? boolean_true_node : boolean_false_node;
5763 else
5764 return build_int_cst (type, value);
5765 }
5766
5767
5768 /* Return true if expr looks like an ARRAY_REF and set base and
5769 offset to the appropriate trees. If there is no offset,
5770 offset is set to NULL_TREE. Base will be canonicalized to
5771 something you can get the element type from using
5772 TREE_TYPE (TREE_TYPE (base)). Offset will be the offset
5773 in bytes to the base. */
5774
5775 static bool
5776 extract_array_ref (tree expr, tree *base, tree *offset)
5777 {
5778 /* One canonical form is a PLUS_EXPR with the first
5779 argument being an ADDR_EXPR with a possible NOP_EXPR
5780 attached. */
5781 if (TREE_CODE (expr) == PLUS_EXPR)
5782 {
5783 tree op0 = TREE_OPERAND (expr, 0);
5784 tree inner_base, dummy1;
5785 /* Strip NOP_EXPRs here because the C frontends and/or
5786 folders present us (int *)&x.a + 4B possibly. */
5787 STRIP_NOPS (op0);
5788 if (extract_array_ref (op0, &inner_base, &dummy1))
5789 {
5790 *base = inner_base;
5791 if (dummy1 == NULL_TREE)
5792 *offset = TREE_OPERAND (expr, 1);
5793 else
5794 *offset = fold_build2 (PLUS_EXPR, TREE_TYPE (expr),
5795 dummy1, TREE_OPERAND (expr, 1));
5796 return true;
5797 }
5798 }
5799 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5800 which we transform into an ADDR_EXPR with appropriate
5801 offset. For other arguments to the ADDR_EXPR we assume
5802 zero offset and as such do not care about the ADDR_EXPR
5803 type and strip possible nops from it. */
5804 else if (TREE_CODE (expr) == ADDR_EXPR)
5805 {
5806 tree op0 = TREE_OPERAND (expr, 0);
5807 if (TREE_CODE (op0) == ARRAY_REF)
5808 {
5809 tree idx = TREE_OPERAND (op0, 1);
5810 *base = TREE_OPERAND (op0, 0);
5811 *offset = fold_build2 (MULT_EXPR, TREE_TYPE (idx), idx,
5812 array_ref_element_size (op0));
5813 }
5814 else
5815 {
5816 /* Handle array-to-pointer decay as &a. */
5817 if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
5818 *base = TREE_OPERAND (expr, 0);
5819 else
5820 *base = expr;
5821 *offset = NULL_TREE;
5822 }
5823 return true;
5824 }
5825 /* The next canonical form is a VAR_DECL with POINTER_TYPE. */
5826 else if (SSA_VAR_P (expr)
5827 && TREE_CODE (TREE_TYPE (expr)) == POINTER_TYPE)
5828 {
5829 *base = expr;
5830 *offset = NULL_TREE;
5831 return true;
5832 }
5833
5834 return false;
5835 }
5836
5837
5838 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5839 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5840 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5841 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5842 COND is the first argument to CODE; otherwise (as in the example
5843 given here), it is the second argument. TYPE is the type of the
5844 original expression. Return NULL_TREE if no simplification is
5845 possible. */
5846
5847 static tree
5848 fold_binary_op_with_conditional_arg (enum tree_code code,
5849 tree type, tree op0, tree op1,
5850 tree cond, tree arg, int cond_first_p)
5851 {
5852 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5853 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5854 tree test, true_value, false_value;
5855 tree lhs = NULL_TREE;
5856 tree rhs = NULL_TREE;
5857
5858 /* This transformation is only worthwhile if we don't have to wrap
5859 arg in a SAVE_EXPR, and the operation can be simplified on at least
5860 one of the branches once its pushed inside the COND_EXPR. */
5861 if (!TREE_CONSTANT (arg))
5862 return NULL_TREE;
5863
5864 if (TREE_CODE (cond) == COND_EXPR)
5865 {
5866 test = TREE_OPERAND (cond, 0);
5867 true_value = TREE_OPERAND (cond, 1);
5868 false_value = TREE_OPERAND (cond, 2);
5869 /* If this operand throws an expression, then it does not make
5870 sense to try to perform a logical or arithmetic operation
5871 involving it. */
5872 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5873 lhs = true_value;
5874 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5875 rhs = false_value;
5876 }
5877 else
5878 {
5879 tree testtype = TREE_TYPE (cond);
5880 test = cond;
5881 true_value = constant_boolean_node (true, testtype);
5882 false_value = constant_boolean_node (false, testtype);
5883 }
5884
5885 arg = fold_convert (arg_type, arg);
5886 if (lhs == 0)
5887 {
5888 true_value = fold_convert (cond_type, true_value);
5889 if (cond_first_p)
5890 lhs = fold_build2 (code, type, true_value, arg);
5891 else
5892 lhs = fold_build2 (code, type, arg, true_value);
5893 }
5894 if (rhs == 0)
5895 {
5896 false_value = fold_convert (cond_type, false_value);
5897 if (cond_first_p)
5898 rhs = fold_build2 (code, type, false_value, arg);
5899 else
5900 rhs = fold_build2 (code, type, arg, false_value);
5901 }
5902
5903 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
5904 return fold_convert (type, test);
5905 }
5906
5907 \f
5908 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5909
5910 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5911 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5912 ADDEND is the same as X.
5913
5914 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5915 and finite. The problematic cases are when X is zero, and its mode
5916 has signed zeros. In the case of rounding towards -infinity,
5917 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5918 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5919
5920 static bool
5921 fold_real_zero_addition_p (tree type, tree addend, int negate)
5922 {
5923 if (!real_zerop (addend))
5924 return false;
5925
5926 /* Don't allow the fold with -fsignaling-nans. */
5927 if (HONOR_SNANS (TYPE_MODE (type)))
5928 return false;
5929
5930 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5931 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
5932 return true;
5933
5934 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5935 if (TREE_CODE (addend) == REAL_CST
5936 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
5937 negate = !negate;
5938
5939 /* The mode has signed zeros, and we have to honor their sign.
5940 In this situation, there is only one case we can return true for.
5941 X - 0 is the same as X unless rounding towards -infinity is
5942 supported. */
5943 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
5944 }
5945
5946 /* Subroutine of fold() that checks comparisons of built-in math
5947 functions against real constants.
5948
5949 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5950 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5951 is the type of the result and ARG0 and ARG1 are the operands of the
5952 comparison. ARG1 must be a TREE_REAL_CST.
5953
5954 The function returns the constant folded tree if a simplification
5955 can be made, and NULL_TREE otherwise. */
5956
5957 static tree
5958 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5959 tree type, tree arg0, tree arg1)
5960 {
5961 REAL_VALUE_TYPE c;
5962
5963 if (BUILTIN_SQRT_P (fcode))
5964 {
5965 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5966 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5967
5968 c = TREE_REAL_CST (arg1);
5969 if (REAL_VALUE_NEGATIVE (c))
5970 {
5971 /* sqrt(x) < y is always false, if y is negative. */
5972 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5973 return omit_one_operand (type, integer_zero_node, arg);
5974
5975 /* sqrt(x) > y is always true, if y is negative and we
5976 don't care about NaNs, i.e. negative values of x. */
5977 if (code == NE_EXPR || !HONOR_NANS (mode))
5978 return omit_one_operand (type, integer_one_node, arg);
5979
5980 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5981 return fold_build2 (GE_EXPR, type, arg,
5982 build_real (TREE_TYPE (arg), dconst0));
5983 }
5984 else if (code == GT_EXPR || code == GE_EXPR)
5985 {
5986 REAL_VALUE_TYPE c2;
5987
5988 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5989 real_convert (&c2, mode, &c2);
5990
5991 if (REAL_VALUE_ISINF (c2))
5992 {
5993 /* sqrt(x) > y is x == +Inf, when y is very large. */
5994 if (HONOR_INFINITIES (mode))
5995 return fold_build2 (EQ_EXPR, type, arg,
5996 build_real (TREE_TYPE (arg), c2));
5997
5998 /* sqrt(x) > y is always false, when y is very large
5999 and we don't care about infinities. */
6000 return omit_one_operand (type, integer_zero_node, arg);
6001 }
6002
6003 /* sqrt(x) > c is the same as x > c*c. */
6004 return fold_build2 (code, type, arg,
6005 build_real (TREE_TYPE (arg), c2));
6006 }
6007 else if (code == LT_EXPR || code == LE_EXPR)
6008 {
6009 REAL_VALUE_TYPE c2;
6010
6011 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6012 real_convert (&c2, mode, &c2);
6013
6014 if (REAL_VALUE_ISINF (c2))
6015 {
6016 /* sqrt(x) < y is always true, when y is a very large
6017 value and we don't care about NaNs or Infinities. */
6018 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6019 return omit_one_operand (type, integer_one_node, arg);
6020
6021 /* sqrt(x) < y is x != +Inf when y is very large and we
6022 don't care about NaNs. */
6023 if (! HONOR_NANS (mode))
6024 return fold_build2 (NE_EXPR, type, arg,
6025 build_real (TREE_TYPE (arg), c2));
6026
6027 /* sqrt(x) < y is x >= 0 when y is very large and we
6028 don't care about Infinities. */
6029 if (! HONOR_INFINITIES (mode))
6030 return fold_build2 (GE_EXPR, type, arg,
6031 build_real (TREE_TYPE (arg), dconst0));
6032
6033 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6034 if (lang_hooks.decls.global_bindings_p () != 0
6035 || CONTAINS_PLACEHOLDER_P (arg))
6036 return NULL_TREE;
6037
6038 arg = save_expr (arg);
6039 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6040 fold_build2 (GE_EXPR, type, arg,
6041 build_real (TREE_TYPE (arg),
6042 dconst0)),
6043 fold_build2 (NE_EXPR, type, arg,
6044 build_real (TREE_TYPE (arg),
6045 c2)));
6046 }
6047
6048 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6049 if (! HONOR_NANS (mode))
6050 return fold_build2 (code, type, arg,
6051 build_real (TREE_TYPE (arg), c2));
6052
6053 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6054 if (lang_hooks.decls.global_bindings_p () == 0
6055 && ! CONTAINS_PLACEHOLDER_P (arg))
6056 {
6057 arg = save_expr (arg);
6058 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6059 fold_build2 (GE_EXPR, type, arg,
6060 build_real (TREE_TYPE (arg),
6061 dconst0)),
6062 fold_build2 (code, type, arg,
6063 build_real (TREE_TYPE (arg),
6064 c2)));
6065 }
6066 }
6067 }
6068
6069 return NULL_TREE;
6070 }
6071
6072 /* Subroutine of fold() that optimizes comparisons against Infinities,
6073 either +Inf or -Inf.
6074
6075 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6076 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6077 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6078
6079 The function returns the constant folded tree if a simplification
6080 can be made, and NULL_TREE otherwise. */
6081
6082 static tree
6083 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6084 {
6085 enum machine_mode mode;
6086 REAL_VALUE_TYPE max;
6087 tree temp;
6088 bool neg;
6089
6090 mode = TYPE_MODE (TREE_TYPE (arg0));
6091
6092 /* For negative infinity swap the sense of the comparison. */
6093 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6094 if (neg)
6095 code = swap_tree_comparison (code);
6096
6097 switch (code)
6098 {
6099 case GT_EXPR:
6100 /* x > +Inf is always false, if with ignore sNANs. */
6101 if (HONOR_SNANS (mode))
6102 return NULL_TREE;
6103 return omit_one_operand (type, integer_zero_node, arg0);
6104
6105 case LE_EXPR:
6106 /* x <= +Inf is always true, if we don't case about NaNs. */
6107 if (! HONOR_NANS (mode))
6108 return omit_one_operand (type, integer_one_node, arg0);
6109
6110 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6111 if (lang_hooks.decls.global_bindings_p () == 0
6112 && ! CONTAINS_PLACEHOLDER_P (arg0))
6113 {
6114 arg0 = save_expr (arg0);
6115 return fold_build2 (EQ_EXPR, type, arg0, arg0);
6116 }
6117 break;
6118
6119 case EQ_EXPR:
6120 case GE_EXPR:
6121 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6122 real_maxval (&max, neg, mode);
6123 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6124 arg0, build_real (TREE_TYPE (arg0), max));
6125
6126 case LT_EXPR:
6127 /* x < +Inf is always equal to x <= DBL_MAX. */
6128 real_maxval (&max, neg, mode);
6129 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6130 arg0, build_real (TREE_TYPE (arg0), max));
6131
6132 case NE_EXPR:
6133 /* x != +Inf is always equal to !(x > DBL_MAX). */
6134 real_maxval (&max, neg, mode);
6135 if (! HONOR_NANS (mode))
6136 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6137 arg0, build_real (TREE_TYPE (arg0), max));
6138
6139 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6140 arg0, build_real (TREE_TYPE (arg0), max));
6141 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
6142
6143 default:
6144 break;
6145 }
6146
6147 return NULL_TREE;
6148 }
6149
6150 /* Subroutine of fold() that optimizes comparisons of a division by
6151 a nonzero integer constant against an integer constant, i.e.
6152 X/C1 op C2.
6153
6154 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6155 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6156 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6157
6158 The function returns the constant folded tree if a simplification
6159 can be made, and NULL_TREE otherwise. */
6160
6161 static tree
6162 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6163 {
6164 tree prod, tmp, hi, lo;
6165 tree arg00 = TREE_OPERAND (arg0, 0);
6166 tree arg01 = TREE_OPERAND (arg0, 1);
6167 unsigned HOST_WIDE_INT lpart;
6168 HOST_WIDE_INT hpart;
6169 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6170 bool neg_overflow;
6171 int overflow;
6172
6173 /* We have to do this the hard way to detect unsigned overflow.
6174 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6175 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6176 TREE_INT_CST_HIGH (arg01),
6177 TREE_INT_CST_LOW (arg1),
6178 TREE_INT_CST_HIGH (arg1),
6179 &lpart, &hpart, unsigned_p);
6180 prod = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6181 -1, overflow);
6182 neg_overflow = false;
6183
6184 if (unsigned_p)
6185 {
6186 tmp = int_const_binop (MINUS_EXPR, arg01,
6187 build_int_cst (TREE_TYPE (arg01), 1), 0);
6188 lo = prod;
6189
6190 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6191 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6192 TREE_INT_CST_HIGH (prod),
6193 TREE_INT_CST_LOW (tmp),
6194 TREE_INT_CST_HIGH (tmp),
6195 &lpart, &hpart, unsigned_p);
6196 hi = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6197 -1, overflow | TREE_OVERFLOW (prod));
6198 }
6199 else if (tree_int_cst_sgn (arg01) >= 0)
6200 {
6201 tmp = int_const_binop (MINUS_EXPR, arg01,
6202 build_int_cst (TREE_TYPE (arg01), 1), 0);
6203 switch (tree_int_cst_sgn (arg1))
6204 {
6205 case -1:
6206 neg_overflow = true;
6207 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6208 hi = prod;
6209 break;
6210
6211 case 0:
6212 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6213 hi = tmp;
6214 break;
6215
6216 case 1:
6217 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6218 lo = prod;
6219 break;
6220
6221 default:
6222 gcc_unreachable ();
6223 }
6224 }
6225 else
6226 {
6227 /* A negative divisor reverses the relational operators. */
6228 code = swap_tree_comparison (code);
6229
6230 tmp = int_const_binop (PLUS_EXPR, arg01,
6231 build_int_cst (TREE_TYPE (arg01), 1), 0);
6232 switch (tree_int_cst_sgn (arg1))
6233 {
6234 case -1:
6235 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6236 lo = prod;
6237 break;
6238
6239 case 0:
6240 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6241 lo = tmp;
6242 break;
6243
6244 case 1:
6245 neg_overflow = true;
6246 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6247 hi = prod;
6248 break;
6249
6250 default:
6251 gcc_unreachable ();
6252 }
6253 }
6254
6255 switch (code)
6256 {
6257 case EQ_EXPR:
6258 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6259 return omit_one_operand (type, integer_zero_node, arg00);
6260 if (TREE_OVERFLOW (hi))
6261 return fold_build2 (GE_EXPR, type, arg00, lo);
6262 if (TREE_OVERFLOW (lo))
6263 return fold_build2 (LE_EXPR, type, arg00, hi);
6264 return build_range_check (type, arg00, 1, lo, hi);
6265
6266 case NE_EXPR:
6267 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6268 return omit_one_operand (type, integer_one_node, arg00);
6269 if (TREE_OVERFLOW (hi))
6270 return fold_build2 (LT_EXPR, type, arg00, lo);
6271 if (TREE_OVERFLOW (lo))
6272 return fold_build2 (GT_EXPR, type, arg00, hi);
6273 return build_range_check (type, arg00, 0, lo, hi);
6274
6275 case LT_EXPR:
6276 if (TREE_OVERFLOW (lo))
6277 {
6278 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6279 return omit_one_operand (type, tmp, arg00);
6280 }
6281 return fold_build2 (LT_EXPR, type, arg00, lo);
6282
6283 case LE_EXPR:
6284 if (TREE_OVERFLOW (hi))
6285 {
6286 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6287 return omit_one_operand (type, tmp, arg00);
6288 }
6289 return fold_build2 (LE_EXPR, type, arg00, hi);
6290
6291 case GT_EXPR:
6292 if (TREE_OVERFLOW (hi))
6293 {
6294 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6295 return omit_one_operand (type, tmp, arg00);
6296 }
6297 return fold_build2 (GT_EXPR, type, arg00, hi);
6298
6299 case GE_EXPR:
6300 if (TREE_OVERFLOW (lo))
6301 {
6302 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6303 return omit_one_operand (type, tmp, arg00);
6304 }
6305 return fold_build2 (GE_EXPR, type, arg00, lo);
6306
6307 default:
6308 break;
6309 }
6310
6311 return NULL_TREE;
6312 }
6313
6314
6315 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6316 equality/inequality test, then return a simplified form of the test
6317 using a sign testing. Otherwise return NULL. TYPE is the desired
6318 result type. */
6319
6320 static tree
6321 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6322 tree result_type)
6323 {
6324 /* If this is testing a single bit, we can optimize the test. */
6325 if ((code == NE_EXPR || code == EQ_EXPR)
6326 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6327 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6328 {
6329 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6330 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6331 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6332
6333 if (arg00 != NULL_TREE
6334 /* This is only a win if casting to a signed type is cheap,
6335 i.e. when arg00's type is not a partial mode. */
6336 && TYPE_PRECISION (TREE_TYPE (arg00))
6337 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6338 {
6339 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
6340 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6341 result_type, fold_convert (stype, arg00),
6342 build_int_cst (stype, 0));
6343 }
6344 }
6345
6346 return NULL_TREE;
6347 }
6348
6349 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6350 equality/inequality test, then return a simplified form of
6351 the test using shifts and logical operations. Otherwise return
6352 NULL. TYPE is the desired result type. */
6353
6354 tree
6355 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6356 tree result_type)
6357 {
6358 /* If this is testing a single bit, we can optimize the test. */
6359 if ((code == NE_EXPR || code == EQ_EXPR)
6360 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6361 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6362 {
6363 tree inner = TREE_OPERAND (arg0, 0);
6364 tree type = TREE_TYPE (arg0);
6365 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6366 enum machine_mode operand_mode = TYPE_MODE (type);
6367 int ops_unsigned;
6368 tree signed_type, unsigned_type, intermediate_type;
6369 tree tem, one;
6370
6371 /* First, see if we can fold the single bit test into a sign-bit
6372 test. */
6373 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6374 result_type);
6375 if (tem)
6376 return tem;
6377
6378 /* Otherwise we have (A & C) != 0 where C is a single bit,
6379 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6380 Similarly for (A & C) == 0. */
6381
6382 /* If INNER is a right shift of a constant and it plus BITNUM does
6383 not overflow, adjust BITNUM and INNER. */
6384 if (TREE_CODE (inner) == RSHIFT_EXPR
6385 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6386 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6387 && bitnum < TYPE_PRECISION (type)
6388 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6389 bitnum - TYPE_PRECISION (type)))
6390 {
6391 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6392 inner = TREE_OPERAND (inner, 0);
6393 }
6394
6395 /* If we are going to be able to omit the AND below, we must do our
6396 operations as unsigned. If we must use the AND, we have a choice.
6397 Normally unsigned is faster, but for some machines signed is. */
6398 #ifdef LOAD_EXTEND_OP
6399 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6400 && !flag_syntax_only) ? 0 : 1;
6401 #else
6402 ops_unsigned = 1;
6403 #endif
6404
6405 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6406 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6407 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6408 inner = fold_convert (intermediate_type, inner);
6409
6410 if (bitnum != 0)
6411 inner = build2 (RSHIFT_EXPR, intermediate_type,
6412 inner, size_int (bitnum));
6413
6414 one = build_int_cst (intermediate_type, 1);
6415
6416 if (code == EQ_EXPR)
6417 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type, inner, one);
6418
6419 /* Put the AND last so it can combine with more things. */
6420 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6421
6422 /* Make sure to return the proper type. */
6423 inner = fold_convert (result_type, inner);
6424
6425 return inner;
6426 }
6427 return NULL_TREE;
6428 }
6429
6430 /* Check whether we are allowed to reorder operands arg0 and arg1,
6431 such that the evaluation of arg1 occurs before arg0. */
6432
6433 static bool
6434 reorder_operands_p (tree arg0, tree arg1)
6435 {
6436 if (! flag_evaluation_order)
6437 return true;
6438 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6439 return true;
6440 return ! TREE_SIDE_EFFECTS (arg0)
6441 && ! TREE_SIDE_EFFECTS (arg1);
6442 }
6443
6444 /* Test whether it is preferable two swap two operands, ARG0 and
6445 ARG1, for example because ARG0 is an integer constant and ARG1
6446 isn't. If REORDER is true, only recommend swapping if we can
6447 evaluate the operands in reverse order. */
6448
6449 bool
6450 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
6451 {
6452 STRIP_SIGN_NOPS (arg0);
6453 STRIP_SIGN_NOPS (arg1);
6454
6455 if (TREE_CODE (arg1) == INTEGER_CST)
6456 return 0;
6457 if (TREE_CODE (arg0) == INTEGER_CST)
6458 return 1;
6459
6460 if (TREE_CODE (arg1) == REAL_CST)
6461 return 0;
6462 if (TREE_CODE (arg0) == REAL_CST)
6463 return 1;
6464
6465 if (TREE_CODE (arg1) == COMPLEX_CST)
6466 return 0;
6467 if (TREE_CODE (arg0) == COMPLEX_CST)
6468 return 1;
6469
6470 if (TREE_CONSTANT (arg1))
6471 return 0;
6472 if (TREE_CONSTANT (arg0))
6473 return 1;
6474
6475 if (optimize_size)
6476 return 0;
6477
6478 if (reorder && flag_evaluation_order
6479 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6480 return 0;
6481
6482 if (DECL_P (arg1))
6483 return 0;
6484 if (DECL_P (arg0))
6485 return 1;
6486
6487 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6488 for commutative and comparison operators. Ensuring a canonical
6489 form allows the optimizers to find additional redundancies without
6490 having to explicitly check for both orderings. */
6491 if (TREE_CODE (arg0) == SSA_NAME
6492 && TREE_CODE (arg1) == SSA_NAME
6493 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6494 return 1;
6495
6496 return 0;
6497 }
6498
6499 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6500 ARG0 is extended to a wider type. */
6501
6502 static tree
6503 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6504 {
6505 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6506 tree arg1_unw;
6507 tree shorter_type, outer_type;
6508 tree min, max;
6509 bool above, below;
6510
6511 if (arg0_unw == arg0)
6512 return NULL_TREE;
6513 shorter_type = TREE_TYPE (arg0_unw);
6514
6515 #ifdef HAVE_canonicalize_funcptr_for_compare
6516 /* Disable this optimization if we're casting a function pointer
6517 type on targets that require function pointer canonicalization. */
6518 if (HAVE_canonicalize_funcptr_for_compare
6519 && TREE_CODE (shorter_type) == POINTER_TYPE
6520 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6521 return NULL_TREE;
6522 #endif
6523
6524 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6525 return NULL_TREE;
6526
6527 arg1_unw = get_unwidened (arg1, shorter_type);
6528
6529 /* If possible, express the comparison in the shorter mode. */
6530 if ((code == EQ_EXPR || code == NE_EXPR
6531 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6532 && (TREE_TYPE (arg1_unw) == shorter_type
6533 || (TREE_CODE (arg1_unw) == INTEGER_CST
6534 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6535 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6536 && int_fits_type_p (arg1_unw, shorter_type))))
6537 return fold_build2 (code, type, arg0_unw,
6538 fold_convert (shorter_type, arg1_unw));
6539
6540 if (TREE_CODE (arg1_unw) != INTEGER_CST
6541 || TREE_CODE (shorter_type) != INTEGER_TYPE
6542 || !int_fits_type_p (arg1_unw, shorter_type))
6543 return NULL_TREE;
6544
6545 /* If we are comparing with the integer that does not fit into the range
6546 of the shorter type, the result is known. */
6547 outer_type = TREE_TYPE (arg1_unw);
6548 min = lower_bound_in_type (outer_type, shorter_type);
6549 max = upper_bound_in_type (outer_type, shorter_type);
6550
6551 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6552 max, arg1_unw));
6553 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6554 arg1_unw, min));
6555
6556 switch (code)
6557 {
6558 case EQ_EXPR:
6559 if (above || below)
6560 return omit_one_operand (type, integer_zero_node, arg0);
6561 break;
6562
6563 case NE_EXPR:
6564 if (above || below)
6565 return omit_one_operand (type, integer_one_node, arg0);
6566 break;
6567
6568 case LT_EXPR:
6569 case LE_EXPR:
6570 if (above)
6571 return omit_one_operand (type, integer_one_node, arg0);
6572 else if (below)
6573 return omit_one_operand (type, integer_zero_node, arg0);
6574
6575 case GT_EXPR:
6576 case GE_EXPR:
6577 if (above)
6578 return omit_one_operand (type, integer_zero_node, arg0);
6579 else if (below)
6580 return omit_one_operand (type, integer_one_node, arg0);
6581
6582 default:
6583 break;
6584 }
6585
6586 return NULL_TREE;
6587 }
6588
6589 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6590 ARG0 just the signedness is changed. */
6591
6592 static tree
6593 fold_sign_changed_comparison (enum tree_code code, tree type,
6594 tree arg0, tree arg1)
6595 {
6596 tree arg0_inner;
6597 tree inner_type, outer_type;
6598
6599 if (TREE_CODE (arg0) != NOP_EXPR
6600 && TREE_CODE (arg0) != CONVERT_EXPR)
6601 return NULL_TREE;
6602
6603 outer_type = TREE_TYPE (arg0);
6604 arg0_inner = TREE_OPERAND (arg0, 0);
6605 inner_type = TREE_TYPE (arg0_inner);
6606
6607 #ifdef HAVE_canonicalize_funcptr_for_compare
6608 /* Disable this optimization if we're casting a function pointer
6609 type on targets that require function pointer canonicalization. */
6610 if (HAVE_canonicalize_funcptr_for_compare
6611 && TREE_CODE (inner_type) == POINTER_TYPE
6612 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6613 return NULL_TREE;
6614 #endif
6615
6616 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6617 return NULL_TREE;
6618
6619 if (TREE_CODE (arg1) != INTEGER_CST
6620 && !((TREE_CODE (arg1) == NOP_EXPR
6621 || TREE_CODE (arg1) == CONVERT_EXPR)
6622 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6623 return NULL_TREE;
6624
6625 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6626 && code != NE_EXPR
6627 && code != EQ_EXPR)
6628 return NULL_TREE;
6629
6630 if (TREE_CODE (arg1) == INTEGER_CST)
6631 arg1 = force_fit_type_double (inner_type, TREE_INT_CST_LOW (arg1),
6632 TREE_INT_CST_HIGH (arg1), 0,
6633 TREE_OVERFLOW (arg1));
6634 else
6635 arg1 = fold_convert (inner_type, arg1);
6636
6637 return fold_build2 (code, type, arg0_inner, arg1);
6638 }
6639
6640 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6641 step of the array. Reconstructs s and delta in the case of s * delta
6642 being an integer constant (and thus already folded).
6643 ADDR is the address. MULT is the multiplicative expression.
6644 If the function succeeds, the new address expression is returned. Otherwise
6645 NULL_TREE is returned. */
6646
6647 static tree
6648 try_move_mult_to_index (enum tree_code code, tree addr, tree op1)
6649 {
6650 tree s, delta, step;
6651 tree ref = TREE_OPERAND (addr, 0), pref;
6652 tree ret, pos;
6653 tree itype;
6654 bool mdim = false;
6655
6656 /* Canonicalize op1 into a possibly non-constant delta
6657 and an INTEGER_CST s. */
6658 if (TREE_CODE (op1) == MULT_EXPR)
6659 {
6660 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6661
6662 STRIP_NOPS (arg0);
6663 STRIP_NOPS (arg1);
6664
6665 if (TREE_CODE (arg0) == INTEGER_CST)
6666 {
6667 s = arg0;
6668 delta = arg1;
6669 }
6670 else if (TREE_CODE (arg1) == INTEGER_CST)
6671 {
6672 s = arg1;
6673 delta = arg0;
6674 }
6675 else
6676 return NULL_TREE;
6677 }
6678 else if (TREE_CODE (op1) == INTEGER_CST)
6679 {
6680 delta = op1;
6681 s = NULL_TREE;
6682 }
6683 else
6684 {
6685 /* Simulate we are delta * 1. */
6686 delta = op1;
6687 s = integer_one_node;
6688 }
6689
6690 for (;; ref = TREE_OPERAND (ref, 0))
6691 {
6692 if (TREE_CODE (ref) == ARRAY_REF)
6693 {
6694 /* Remember if this was a multi-dimensional array. */
6695 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6696 mdim = true;
6697
6698 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6699 if (! itype)
6700 continue;
6701
6702 step = array_ref_element_size (ref);
6703 if (TREE_CODE (step) != INTEGER_CST)
6704 continue;
6705
6706 if (s)
6707 {
6708 if (! tree_int_cst_equal (step, s))
6709 continue;
6710 }
6711 else
6712 {
6713 /* Try if delta is a multiple of step. */
6714 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step);
6715 if (! tmp)
6716 continue;
6717 delta = tmp;
6718 }
6719
6720 /* Only fold here if we can verify we do not overflow one
6721 dimension of a multi-dimensional array. */
6722 if (mdim)
6723 {
6724 tree tmp;
6725
6726 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
6727 || !INTEGRAL_TYPE_P (itype)
6728 || !TYPE_MAX_VALUE (itype)
6729 || TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST)
6730 continue;
6731
6732 tmp = fold_binary (code, itype,
6733 fold_convert (itype,
6734 TREE_OPERAND (ref, 1)),
6735 fold_convert (itype, delta));
6736 if (!tmp
6737 || TREE_CODE (tmp) != INTEGER_CST
6738 || tree_int_cst_lt (TYPE_MAX_VALUE (itype), tmp))
6739 continue;
6740 }
6741
6742 break;
6743 }
6744 else
6745 mdim = false;
6746
6747 if (!handled_component_p (ref))
6748 return NULL_TREE;
6749 }
6750
6751 /* We found the suitable array reference. So copy everything up to it,
6752 and replace the index. */
6753
6754 pref = TREE_OPERAND (addr, 0);
6755 ret = copy_node (pref);
6756 pos = ret;
6757
6758 while (pref != ref)
6759 {
6760 pref = TREE_OPERAND (pref, 0);
6761 TREE_OPERAND (pos, 0) = copy_node (pref);
6762 pos = TREE_OPERAND (pos, 0);
6763 }
6764
6765 TREE_OPERAND (pos, 1) = fold_build2 (code, itype,
6766 fold_convert (itype,
6767 TREE_OPERAND (pos, 1)),
6768 fold_convert (itype, delta));
6769
6770 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6771 }
6772
6773
6774 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6775 means A >= Y && A != MAX, but in this case we know that
6776 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6777
6778 static tree
6779 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6780 {
6781 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6782
6783 if (TREE_CODE (bound) == LT_EXPR)
6784 a = TREE_OPERAND (bound, 0);
6785 else if (TREE_CODE (bound) == GT_EXPR)
6786 a = TREE_OPERAND (bound, 1);
6787 else
6788 return NULL_TREE;
6789
6790 typea = TREE_TYPE (a);
6791 if (!INTEGRAL_TYPE_P (typea)
6792 && !POINTER_TYPE_P (typea))
6793 return NULL_TREE;
6794
6795 if (TREE_CODE (ineq) == LT_EXPR)
6796 {
6797 a1 = TREE_OPERAND (ineq, 1);
6798 y = TREE_OPERAND (ineq, 0);
6799 }
6800 else if (TREE_CODE (ineq) == GT_EXPR)
6801 {
6802 a1 = TREE_OPERAND (ineq, 0);
6803 y = TREE_OPERAND (ineq, 1);
6804 }
6805 else
6806 return NULL_TREE;
6807
6808 if (TREE_TYPE (a1) != typea)
6809 return NULL_TREE;
6810
6811 diff = fold_build2 (MINUS_EXPR, typea, a1, a);
6812 if (!integer_onep (diff))
6813 return NULL_TREE;
6814
6815 return fold_build2 (GE_EXPR, type, a, y);
6816 }
6817
6818 /* Fold a sum or difference of at least one multiplication.
6819 Returns the folded tree or NULL if no simplification could be made. */
6820
6821 static tree
6822 fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
6823 {
6824 tree arg00, arg01, arg10, arg11;
6825 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6826
6827 /* (A * C) +- (B * C) -> (A+-B) * C.
6828 (A * C) +- A -> A * (C+-1).
6829 We are most concerned about the case where C is a constant,
6830 but other combinations show up during loop reduction. Since
6831 it is not difficult, try all four possibilities. */
6832
6833 if (TREE_CODE (arg0) == MULT_EXPR)
6834 {
6835 arg00 = TREE_OPERAND (arg0, 0);
6836 arg01 = TREE_OPERAND (arg0, 1);
6837 }
6838 else
6839 {
6840 arg00 = arg0;
6841 arg01 = build_one_cst (type);
6842 }
6843 if (TREE_CODE (arg1) == MULT_EXPR)
6844 {
6845 arg10 = TREE_OPERAND (arg1, 0);
6846 arg11 = TREE_OPERAND (arg1, 1);
6847 }
6848 else
6849 {
6850 arg10 = arg1;
6851 arg11 = build_one_cst (type);
6852 }
6853 same = NULL_TREE;
6854
6855 if (operand_equal_p (arg01, arg11, 0))
6856 same = arg01, alt0 = arg00, alt1 = arg10;
6857 else if (operand_equal_p (arg00, arg10, 0))
6858 same = arg00, alt0 = arg01, alt1 = arg11;
6859 else if (operand_equal_p (arg00, arg11, 0))
6860 same = arg00, alt0 = arg01, alt1 = arg10;
6861 else if (operand_equal_p (arg01, arg10, 0))
6862 same = arg01, alt0 = arg00, alt1 = arg11;
6863
6864 /* No identical multiplicands; see if we can find a common
6865 power-of-two factor in non-power-of-two multiplies. This
6866 can help in multi-dimensional array access. */
6867 else if (host_integerp (arg01, 0)
6868 && host_integerp (arg11, 0))
6869 {
6870 HOST_WIDE_INT int01, int11, tmp;
6871 bool swap = false;
6872 tree maybe_same;
6873 int01 = TREE_INT_CST_LOW (arg01);
6874 int11 = TREE_INT_CST_LOW (arg11);
6875
6876 /* Move min of absolute values to int11. */
6877 if ((int01 >= 0 ? int01 : -int01)
6878 < (int11 >= 0 ? int11 : -int11))
6879 {
6880 tmp = int01, int01 = int11, int11 = tmp;
6881 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6882 maybe_same = arg01;
6883 swap = true;
6884 }
6885 else
6886 maybe_same = arg11;
6887
6888 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0)
6889 {
6890 alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
6891 build_int_cst (TREE_TYPE (arg00),
6892 int01 / int11));
6893 alt1 = arg10;
6894 same = maybe_same;
6895 if (swap)
6896 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
6897 }
6898 }
6899
6900 if (same)
6901 return fold_build2 (MULT_EXPR, type,
6902 fold_build2 (code, type,
6903 fold_convert (type, alt0),
6904 fold_convert (type, alt1)),
6905 fold_convert (type, same));
6906
6907 return NULL_TREE;
6908 }
6909
6910 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
6911 specified by EXPR into the buffer PTR of length LEN bytes.
6912 Return the number of bytes placed in the buffer, or zero
6913 upon failure. */
6914
6915 static int
6916 native_encode_int (tree expr, unsigned char *ptr, int len)
6917 {
6918 tree type = TREE_TYPE (expr);
6919 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
6920 int byte, offset, word, words;
6921 unsigned char value;
6922
6923 if (total_bytes > len)
6924 return 0;
6925 words = total_bytes / UNITS_PER_WORD;
6926
6927 for (byte = 0; byte < total_bytes; byte++)
6928 {
6929 int bitpos = byte * BITS_PER_UNIT;
6930 if (bitpos < HOST_BITS_PER_WIDE_INT)
6931 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
6932 else
6933 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
6934 >> (bitpos - HOST_BITS_PER_WIDE_INT));
6935
6936 if (total_bytes > UNITS_PER_WORD)
6937 {
6938 word = byte / UNITS_PER_WORD;
6939 if (WORDS_BIG_ENDIAN)
6940 word = (words - 1) - word;
6941 offset = word * UNITS_PER_WORD;
6942 if (BYTES_BIG_ENDIAN)
6943 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
6944 else
6945 offset += byte % UNITS_PER_WORD;
6946 }
6947 else
6948 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
6949 ptr[offset] = value;
6950 }
6951 return total_bytes;
6952 }
6953
6954
6955 /* Subroutine of native_encode_expr. Encode the REAL_CST
6956 specified by EXPR into the buffer PTR of length LEN bytes.
6957 Return the number of bytes placed in the buffer, or zero
6958 upon failure. */
6959
6960 static int
6961 native_encode_real (tree expr, unsigned char *ptr, int len)
6962 {
6963 tree type = TREE_TYPE (expr);
6964 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
6965 int byte, offset, word, words;
6966 unsigned char value;
6967
6968 /* There are always 32 bits in each long, no matter the size of
6969 the hosts long. We handle floating point representations with
6970 up to 192 bits. */
6971 long tmp[6];
6972
6973 if (total_bytes > len)
6974 return 0;
6975 words = total_bytes / UNITS_PER_WORD;
6976
6977 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
6978
6979 for (byte = 0; byte < total_bytes; byte++)
6980 {
6981 int bitpos = byte * BITS_PER_UNIT;
6982 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
6983
6984 if (total_bytes > UNITS_PER_WORD)
6985 {
6986 word = byte / UNITS_PER_WORD;
6987 if (FLOAT_WORDS_BIG_ENDIAN)
6988 word = (words - 1) - word;
6989 offset = word * UNITS_PER_WORD;
6990 if (BYTES_BIG_ENDIAN)
6991 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
6992 else
6993 offset += byte % UNITS_PER_WORD;
6994 }
6995 else
6996 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
6997 ptr[offset] = value;
6998 }
6999 return total_bytes;
7000 }
7001
7002 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7003 specified by EXPR into the buffer PTR of length LEN bytes.
7004 Return the number of bytes placed in the buffer, or zero
7005 upon failure. */
7006
7007 static int
7008 native_encode_complex (tree expr, unsigned char *ptr, int len)
7009 {
7010 int rsize, isize;
7011 tree part;
7012
7013 part = TREE_REALPART (expr);
7014 rsize = native_encode_expr (part, ptr, len);
7015 if (rsize == 0)
7016 return 0;
7017 part = TREE_IMAGPART (expr);
7018 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7019 if (isize != rsize)
7020 return 0;
7021 return rsize + isize;
7022 }
7023
7024
7025 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7026 specified by EXPR into the buffer PTR of length LEN bytes.
7027 Return the number of bytes placed in the buffer, or zero
7028 upon failure. */
7029
7030 static int
7031 native_encode_vector (tree expr, unsigned char *ptr, int len)
7032 {
7033 int i, size, offset, count;
7034 tree itype, elem, elements;
7035
7036 offset = 0;
7037 elements = TREE_VECTOR_CST_ELTS (expr);
7038 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7039 itype = TREE_TYPE (TREE_TYPE (expr));
7040 size = GET_MODE_SIZE (TYPE_MODE (itype));
7041 for (i = 0; i < count; i++)
7042 {
7043 if (elements)
7044 {
7045 elem = TREE_VALUE (elements);
7046 elements = TREE_CHAIN (elements);
7047 }
7048 else
7049 elem = NULL_TREE;
7050
7051 if (elem)
7052 {
7053 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7054 return 0;
7055 }
7056 else
7057 {
7058 if (offset + size > len)
7059 return 0;
7060 memset (ptr+offset, 0, size);
7061 }
7062 offset += size;
7063 }
7064 return offset;
7065 }
7066
7067
7068 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7069 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7070 buffer PTR of length LEN bytes. Return the number of bytes
7071 placed in the buffer, or zero upon failure. */
7072
7073 static int
7074 native_encode_expr (tree expr, unsigned char *ptr, int len)
7075 {
7076 switch (TREE_CODE (expr))
7077 {
7078 case INTEGER_CST:
7079 return native_encode_int (expr, ptr, len);
7080
7081 case REAL_CST:
7082 return native_encode_real (expr, ptr, len);
7083
7084 case COMPLEX_CST:
7085 return native_encode_complex (expr, ptr, len);
7086
7087 case VECTOR_CST:
7088 return native_encode_vector (expr, ptr, len);
7089
7090 default:
7091 return 0;
7092 }
7093 }
7094
7095
7096 /* Subroutine of native_interpret_expr. Interpret the contents of
7097 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7098 If the buffer cannot be interpreted, return NULL_TREE. */
7099
7100 static tree
7101 native_interpret_int (tree type, unsigned char *ptr, int len)
7102 {
7103 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7104 int byte, offset, word, words;
7105 unsigned char value;
7106 unsigned int HOST_WIDE_INT lo = 0;
7107 HOST_WIDE_INT hi = 0;
7108
7109 if (total_bytes > len)
7110 return NULL_TREE;
7111 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7112 return NULL_TREE;
7113 words = total_bytes / UNITS_PER_WORD;
7114
7115 for (byte = 0; byte < total_bytes; byte++)
7116 {
7117 int bitpos = byte * BITS_PER_UNIT;
7118 if (total_bytes > UNITS_PER_WORD)
7119 {
7120 word = byte / UNITS_PER_WORD;
7121 if (WORDS_BIG_ENDIAN)
7122 word = (words - 1) - word;
7123 offset = word * UNITS_PER_WORD;
7124 if (BYTES_BIG_ENDIAN)
7125 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7126 else
7127 offset += byte % UNITS_PER_WORD;
7128 }
7129 else
7130 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7131 value = ptr[offset];
7132
7133 if (bitpos < HOST_BITS_PER_WIDE_INT)
7134 lo |= (unsigned HOST_WIDE_INT) value << bitpos;
7135 else
7136 hi |= (unsigned HOST_WIDE_INT) value
7137 << (bitpos - HOST_BITS_PER_WIDE_INT);
7138 }
7139
7140 return build_int_cst_wide_type (type, lo, hi);
7141 }
7142
7143
7144 /* Subroutine of native_interpret_expr. Interpret the contents of
7145 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7146 If the buffer cannot be interpreted, return NULL_TREE. */
7147
7148 static tree
7149 native_interpret_real (tree type, unsigned char *ptr, int len)
7150 {
7151 enum machine_mode mode = TYPE_MODE (type);
7152 int total_bytes = GET_MODE_SIZE (mode);
7153 int byte, offset, word, words;
7154 unsigned char value;
7155 /* There are always 32 bits in each long, no matter the size of
7156 the hosts long. We handle floating point representations with
7157 up to 192 bits. */
7158 REAL_VALUE_TYPE r;
7159 long tmp[6];
7160
7161 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7162 if (total_bytes > len || total_bytes > 24)
7163 return NULL_TREE;
7164 words = total_bytes / UNITS_PER_WORD;
7165
7166 memset (tmp, 0, sizeof (tmp));
7167 for (byte = 0; byte < total_bytes; byte++)
7168 {
7169 int bitpos = byte * BITS_PER_UNIT;
7170 if (total_bytes > UNITS_PER_WORD)
7171 {
7172 word = byte / UNITS_PER_WORD;
7173 if (FLOAT_WORDS_BIG_ENDIAN)
7174 word = (words - 1) - word;
7175 offset = word * UNITS_PER_WORD;
7176 if (BYTES_BIG_ENDIAN)
7177 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7178 else
7179 offset += byte % UNITS_PER_WORD;
7180 }
7181 else
7182 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7183 value = ptr[offset];
7184
7185 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7186 }
7187
7188 real_from_target (&r, tmp, mode);
7189 return build_real (type, r);
7190 }
7191
7192
7193 /* Subroutine of native_interpret_expr. Interpret the contents of
7194 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7195 If the buffer cannot be interpreted, return NULL_TREE. */
7196
7197 static tree
7198 native_interpret_complex (tree type, unsigned char *ptr, int len)
7199 {
7200 tree etype, rpart, ipart;
7201 int size;
7202
7203 etype = TREE_TYPE (type);
7204 size = GET_MODE_SIZE (TYPE_MODE (etype));
7205 if (size * 2 > len)
7206 return NULL_TREE;
7207 rpart = native_interpret_expr (etype, ptr, size);
7208 if (!rpart)
7209 return NULL_TREE;
7210 ipart = native_interpret_expr (etype, ptr+size, size);
7211 if (!ipart)
7212 return NULL_TREE;
7213 return build_complex (type, rpart, ipart);
7214 }
7215
7216
7217 /* Subroutine of native_interpret_expr. Interpret the contents of
7218 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7219 If the buffer cannot be interpreted, return NULL_TREE. */
7220
7221 static tree
7222 native_interpret_vector (tree type, unsigned char *ptr, int len)
7223 {
7224 tree etype, elem, elements;
7225 int i, size, count;
7226
7227 etype = TREE_TYPE (type);
7228 size = GET_MODE_SIZE (TYPE_MODE (etype));
7229 count = TYPE_VECTOR_SUBPARTS (type);
7230 if (size * count > len)
7231 return NULL_TREE;
7232
7233 elements = NULL_TREE;
7234 for (i = count - 1; i >= 0; i--)
7235 {
7236 elem = native_interpret_expr (etype, ptr+(i*size), size);
7237 if (!elem)
7238 return NULL_TREE;
7239 elements = tree_cons (NULL_TREE, elem, elements);
7240 }
7241 return build_vector (type, elements);
7242 }
7243
7244
7245 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7246 the buffer PTR of length LEN as a constant of type TYPE. For
7247 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7248 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7249 return NULL_TREE. */
7250
7251 static tree
7252 native_interpret_expr (tree type, unsigned char *ptr, int len)
7253 {
7254 switch (TREE_CODE (type))
7255 {
7256 case INTEGER_TYPE:
7257 case ENUMERAL_TYPE:
7258 case BOOLEAN_TYPE:
7259 return native_interpret_int (type, ptr, len);
7260
7261 case REAL_TYPE:
7262 return native_interpret_real (type, ptr, len);
7263
7264 case COMPLEX_TYPE:
7265 return native_interpret_complex (type, ptr, len);
7266
7267 case VECTOR_TYPE:
7268 return native_interpret_vector (type, ptr, len);
7269
7270 default:
7271 return NULL_TREE;
7272 }
7273 }
7274
7275
7276 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7277 TYPE at compile-time. If we're unable to perform the conversion
7278 return NULL_TREE. */
7279
7280 static tree
7281 fold_view_convert_expr (tree type, tree expr)
7282 {
7283 /* We support up to 512-bit values (for V8DFmode). */
7284 unsigned char buffer[64];
7285 int len;
7286
7287 /* Check that the host and target are sane. */
7288 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7289 return NULL_TREE;
7290
7291 len = native_encode_expr (expr, buffer, sizeof (buffer));
7292 if (len == 0)
7293 return NULL_TREE;
7294
7295 return native_interpret_expr (type, buffer, len);
7296 }
7297
7298
7299 /* Fold a unary expression of code CODE and type TYPE with operand
7300 OP0. Return the folded expression if folding is successful.
7301 Otherwise, return NULL_TREE. */
7302
7303 tree
7304 fold_unary (enum tree_code code, tree type, tree op0)
7305 {
7306 tree tem;
7307 tree arg0;
7308 enum tree_code_class kind = TREE_CODE_CLASS (code);
7309
7310 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7311 && TREE_CODE_LENGTH (code) == 1);
7312
7313 arg0 = op0;
7314 if (arg0)
7315 {
7316 if (code == NOP_EXPR || code == CONVERT_EXPR
7317 || code == FLOAT_EXPR || code == ABS_EXPR)
7318 {
7319 /* Don't use STRIP_NOPS, because signedness of argument type
7320 matters. */
7321 STRIP_SIGN_NOPS (arg0);
7322 }
7323 else
7324 {
7325 /* Strip any conversions that don't change the mode. This
7326 is safe for every expression, except for a comparison
7327 expression because its signedness is derived from its
7328 operands.
7329
7330 Note that this is done as an internal manipulation within
7331 the constant folder, in order to find the simplest
7332 representation of the arguments so that their form can be
7333 studied. In any cases, the appropriate type conversions
7334 should be put back in the tree that will get out of the
7335 constant folder. */
7336 STRIP_NOPS (arg0);
7337 }
7338 }
7339
7340 if (TREE_CODE_CLASS (code) == tcc_unary)
7341 {
7342 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7343 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7344 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
7345 else if (TREE_CODE (arg0) == COND_EXPR)
7346 {
7347 tree arg01 = TREE_OPERAND (arg0, 1);
7348 tree arg02 = TREE_OPERAND (arg0, 2);
7349 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7350 arg01 = fold_build1 (code, type, arg01);
7351 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7352 arg02 = fold_build1 (code, type, arg02);
7353 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
7354 arg01, arg02);
7355
7356 /* If this was a conversion, and all we did was to move into
7357 inside the COND_EXPR, bring it back out. But leave it if
7358 it is a conversion from integer to integer and the
7359 result precision is no wider than a word since such a
7360 conversion is cheap and may be optimized away by combine,
7361 while it couldn't if it were outside the COND_EXPR. Then return
7362 so we don't get into an infinite recursion loop taking the
7363 conversion out and then back in. */
7364
7365 if ((code == NOP_EXPR || code == CONVERT_EXPR
7366 || code == NON_LVALUE_EXPR)
7367 && TREE_CODE (tem) == COND_EXPR
7368 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7369 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7370 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7371 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7372 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7373 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7374 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7375 && (INTEGRAL_TYPE_P
7376 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7377 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7378 || flag_syntax_only))
7379 tem = build1 (code, type,
7380 build3 (COND_EXPR,
7381 TREE_TYPE (TREE_OPERAND
7382 (TREE_OPERAND (tem, 1), 0)),
7383 TREE_OPERAND (tem, 0),
7384 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7385 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
7386 return tem;
7387 }
7388 else if (COMPARISON_CLASS_P (arg0))
7389 {
7390 if (TREE_CODE (type) == BOOLEAN_TYPE)
7391 {
7392 arg0 = copy_node (arg0);
7393 TREE_TYPE (arg0) = type;
7394 return arg0;
7395 }
7396 else if (TREE_CODE (type) != INTEGER_TYPE)
7397 return fold_build3 (COND_EXPR, type, arg0,
7398 fold_build1 (code, type,
7399 integer_one_node),
7400 fold_build1 (code, type,
7401 integer_zero_node));
7402 }
7403 }
7404
7405 switch (code)
7406 {
7407 case NOP_EXPR:
7408 case FLOAT_EXPR:
7409 case CONVERT_EXPR:
7410 case FIX_TRUNC_EXPR:
7411 if (TREE_TYPE (op0) == type)
7412 return op0;
7413
7414 /* If we have (type) (a CMP b) and type is an integral type, return
7415 new expression involving the new type. */
7416 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
7417 return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
7418 TREE_OPERAND (op0, 1));
7419
7420 /* Handle cases of two conversions in a row. */
7421 if (TREE_CODE (op0) == NOP_EXPR
7422 || TREE_CODE (op0) == CONVERT_EXPR)
7423 {
7424 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7425 tree inter_type = TREE_TYPE (op0);
7426 int inside_int = INTEGRAL_TYPE_P (inside_type);
7427 int inside_ptr = POINTER_TYPE_P (inside_type);
7428 int inside_float = FLOAT_TYPE_P (inside_type);
7429 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7430 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7431 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7432 int inter_int = INTEGRAL_TYPE_P (inter_type);
7433 int inter_ptr = POINTER_TYPE_P (inter_type);
7434 int inter_float = FLOAT_TYPE_P (inter_type);
7435 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7436 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7437 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7438 int final_int = INTEGRAL_TYPE_P (type);
7439 int final_ptr = POINTER_TYPE_P (type);
7440 int final_float = FLOAT_TYPE_P (type);
7441 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7442 unsigned int final_prec = TYPE_PRECISION (type);
7443 int final_unsignedp = TYPE_UNSIGNED (type);
7444
7445 /* In addition to the cases of two conversions in a row
7446 handled below, if we are converting something to its own
7447 type via an object of identical or wider precision, neither
7448 conversion is needed. */
7449 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7450 && (((inter_int || inter_ptr) && final_int)
7451 || (inter_float && final_float))
7452 && inter_prec >= final_prec)
7453 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7454
7455 /* Likewise, if the intermediate and final types are either both
7456 float or both integer, we don't need the middle conversion if
7457 it is wider than the final type and doesn't change the signedness
7458 (for integers). Avoid this if the final type is a pointer
7459 since then we sometimes need the inner conversion. Likewise if
7460 the outer has a precision not equal to the size of its mode. */
7461 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
7462 || (inter_float && inside_float)
7463 || (inter_vec && inside_vec))
7464 && inter_prec >= inside_prec
7465 && (inter_float || inter_vec
7466 || inter_unsignedp == inside_unsignedp)
7467 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7468 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7469 && ! final_ptr
7470 && (! final_vec || inter_prec == inside_prec))
7471 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7472
7473 /* If we have a sign-extension of a zero-extended value, we can
7474 replace that by a single zero-extension. */
7475 if (inside_int && inter_int && final_int
7476 && inside_prec < inter_prec && inter_prec < final_prec
7477 && inside_unsignedp && !inter_unsignedp)
7478 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7479
7480 /* Two conversions in a row are not needed unless:
7481 - some conversion is floating-point (overstrict for now), or
7482 - some conversion is a vector (overstrict for now), or
7483 - the intermediate type is narrower than both initial and
7484 final, or
7485 - the intermediate type and innermost type differ in signedness,
7486 and the outermost type is wider than the intermediate, or
7487 - the initial type is a pointer type and the precisions of the
7488 intermediate and final types differ, or
7489 - the final type is a pointer type and the precisions of the
7490 initial and intermediate types differ.
7491 - the final type is a pointer type and the initial type not
7492 - the initial type is a pointer to an array and the final type
7493 not. */
7494 if (! inside_float && ! inter_float && ! final_float
7495 && ! inside_vec && ! inter_vec && ! final_vec
7496 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7497 && ! (inside_int && inter_int
7498 && inter_unsignedp != inside_unsignedp
7499 && inter_prec < final_prec)
7500 && ((inter_unsignedp && inter_prec > inside_prec)
7501 == (final_unsignedp && final_prec > inter_prec))
7502 && ! (inside_ptr && inter_prec != final_prec)
7503 && ! (final_ptr && inside_prec != inter_prec)
7504 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7505 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7506 && final_ptr == inside_ptr
7507 && ! (inside_ptr
7508 && TREE_CODE (TREE_TYPE (inside_type)) == ARRAY_TYPE
7509 && TREE_CODE (TREE_TYPE (type)) != ARRAY_TYPE))
7510 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7511 }
7512
7513 /* Handle (T *)&A.B.C for A being of type T and B and C
7514 living at offset zero. This occurs frequently in
7515 C++ upcasting and then accessing the base. */
7516 if (TREE_CODE (op0) == ADDR_EXPR
7517 && POINTER_TYPE_P (type)
7518 && handled_component_p (TREE_OPERAND (op0, 0)))
7519 {
7520 HOST_WIDE_INT bitsize, bitpos;
7521 tree offset;
7522 enum machine_mode mode;
7523 int unsignedp, volatilep;
7524 tree base = TREE_OPERAND (op0, 0);
7525 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7526 &mode, &unsignedp, &volatilep, false);
7527 /* If the reference was to a (constant) zero offset, we can use
7528 the address of the base if it has the same base type
7529 as the result type. */
7530 if (! offset && bitpos == 0
7531 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
7532 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7533 return fold_convert (type, build_fold_addr_expr (base));
7534 }
7535
7536 if ((TREE_CODE (op0) == MODIFY_EXPR
7537 || TREE_CODE (op0) == GIMPLE_MODIFY_STMT)
7538 && TREE_CONSTANT (GENERIC_TREE_OPERAND (op0, 1))
7539 /* Detect assigning a bitfield. */
7540 && !(TREE_CODE (GENERIC_TREE_OPERAND (op0, 0)) == COMPONENT_REF
7541 && DECL_BIT_FIELD
7542 (TREE_OPERAND (GENERIC_TREE_OPERAND (op0, 0), 1))))
7543 {
7544 /* Don't leave an assignment inside a conversion
7545 unless assigning a bitfield. */
7546 tem = fold_build1 (code, type, GENERIC_TREE_OPERAND (op0, 1));
7547 /* First do the assignment, then return converted constant. */
7548 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7549 TREE_NO_WARNING (tem) = 1;
7550 TREE_USED (tem) = 1;
7551 return tem;
7552 }
7553
7554 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7555 constants (if x has signed type, the sign bit cannot be set
7556 in c). This folds extension into the BIT_AND_EXPR. */
7557 if (INTEGRAL_TYPE_P (type)
7558 && TREE_CODE (type) != BOOLEAN_TYPE
7559 && TREE_CODE (op0) == BIT_AND_EXPR
7560 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7561 {
7562 tree and = op0;
7563 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
7564 int change = 0;
7565
7566 if (TYPE_UNSIGNED (TREE_TYPE (and))
7567 || (TYPE_PRECISION (type)
7568 <= TYPE_PRECISION (TREE_TYPE (and))))
7569 change = 1;
7570 else if (TYPE_PRECISION (TREE_TYPE (and1))
7571 <= HOST_BITS_PER_WIDE_INT
7572 && host_integerp (and1, 1))
7573 {
7574 unsigned HOST_WIDE_INT cst;
7575
7576 cst = tree_low_cst (and1, 1);
7577 cst &= (HOST_WIDE_INT) -1
7578 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7579 change = (cst == 0);
7580 #ifdef LOAD_EXTEND_OP
7581 if (change
7582 && !flag_syntax_only
7583 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7584 == ZERO_EXTEND))
7585 {
7586 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
7587 and0 = fold_convert (uns, and0);
7588 and1 = fold_convert (uns, and1);
7589 }
7590 #endif
7591 }
7592 if (change)
7593 {
7594 tem = force_fit_type_double (type, TREE_INT_CST_LOW (and1),
7595 TREE_INT_CST_HIGH (and1), 0,
7596 TREE_OVERFLOW (and1));
7597 return fold_build2 (BIT_AND_EXPR, type,
7598 fold_convert (type, and0), tem);
7599 }
7600 }
7601
7602 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
7603 T2 being pointers to types of the same size. */
7604 if (POINTER_TYPE_P (type)
7605 && BINARY_CLASS_P (arg0)
7606 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7607 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7608 {
7609 tree arg00 = TREE_OPERAND (arg0, 0);
7610 tree t0 = type;
7611 tree t1 = TREE_TYPE (arg00);
7612 tree tt0 = TREE_TYPE (t0);
7613 tree tt1 = TREE_TYPE (t1);
7614 tree s0 = TYPE_SIZE (tt0);
7615 tree s1 = TYPE_SIZE (tt1);
7616
7617 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
7618 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
7619 TREE_OPERAND (arg0, 1));
7620 }
7621
7622 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7623 of the same precision, and X is a integer type not narrower than
7624 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7625 if (INTEGRAL_TYPE_P (type)
7626 && TREE_CODE (op0) == BIT_NOT_EXPR
7627 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7628 && (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
7629 || TREE_CODE (TREE_OPERAND (op0, 0)) == CONVERT_EXPR)
7630 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7631 {
7632 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7633 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7634 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7635 return fold_build1 (BIT_NOT_EXPR, type, fold_convert (type, tem));
7636 }
7637
7638 tem = fold_convert_const (code, type, arg0);
7639 return tem ? tem : NULL_TREE;
7640
7641 case VIEW_CONVERT_EXPR:
7642 if (TREE_TYPE (op0) == type)
7643 return op0;
7644 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
7645 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
7646 return fold_view_convert_expr (type, op0);
7647
7648 case NEGATE_EXPR:
7649 tem = fold_negate_expr (arg0);
7650 if (tem)
7651 return fold_convert (type, tem);
7652 return NULL_TREE;
7653
7654 case ABS_EXPR:
7655 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
7656 return fold_abs_const (arg0, type);
7657 else if (TREE_CODE (arg0) == NEGATE_EXPR)
7658 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
7659 /* Convert fabs((double)float) into (double)fabsf(float). */
7660 else if (TREE_CODE (arg0) == NOP_EXPR
7661 && TREE_CODE (type) == REAL_TYPE)
7662 {
7663 tree targ0 = strip_float_extensions (arg0);
7664 if (targ0 != arg0)
7665 return fold_convert (type, fold_build1 (ABS_EXPR,
7666 TREE_TYPE (targ0),
7667 targ0));
7668 }
7669 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
7670 else if (tree_expr_nonnegative_p (arg0) || TREE_CODE (arg0) == ABS_EXPR)
7671 return arg0;
7672
7673 /* Strip sign ops from argument. */
7674 if (TREE_CODE (type) == REAL_TYPE)
7675 {
7676 tem = fold_strip_sign_ops (arg0);
7677 if (tem)
7678 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
7679 }
7680 return NULL_TREE;
7681
7682 case CONJ_EXPR:
7683 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7684 return fold_convert (type, arg0);
7685 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7686 {
7687 tree itype = TREE_TYPE (type);
7688 tree rpart = fold_convert (itype, TREE_OPERAND (arg0, 0));
7689 tree ipart = fold_convert (itype, TREE_OPERAND (arg0, 1));
7690 return fold_build2 (COMPLEX_EXPR, type, rpart, negate_expr (ipart));
7691 }
7692 if (TREE_CODE (arg0) == COMPLEX_CST)
7693 {
7694 tree itype = TREE_TYPE (type);
7695 tree rpart = fold_convert (itype, TREE_REALPART (arg0));
7696 tree ipart = fold_convert (itype, TREE_IMAGPART (arg0));
7697 return build_complex (type, rpart, negate_expr (ipart));
7698 }
7699 if (TREE_CODE (arg0) == CONJ_EXPR)
7700 return fold_convert (type, TREE_OPERAND (arg0, 0));
7701 return NULL_TREE;
7702
7703 case BIT_NOT_EXPR:
7704 if (TREE_CODE (arg0) == INTEGER_CST)
7705 return fold_not_const (arg0, type);
7706 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
7707 return TREE_OPERAND (arg0, 0);
7708 /* Convert ~ (-A) to A - 1. */
7709 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
7710 return fold_build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0),
7711 build_int_cst (type, 1));
7712 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
7713 else if (INTEGRAL_TYPE_P (type)
7714 && ((TREE_CODE (arg0) == MINUS_EXPR
7715 && integer_onep (TREE_OPERAND (arg0, 1)))
7716 || (TREE_CODE (arg0) == PLUS_EXPR
7717 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
7718 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7719 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7720 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7721 && (tem = fold_unary (BIT_NOT_EXPR, type,
7722 fold_convert (type,
7723 TREE_OPERAND (arg0, 0)))))
7724 return fold_build2 (BIT_XOR_EXPR, type, tem,
7725 fold_convert (type, TREE_OPERAND (arg0, 1)));
7726 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7727 && (tem = fold_unary (BIT_NOT_EXPR, type,
7728 fold_convert (type,
7729 TREE_OPERAND (arg0, 1)))))
7730 return fold_build2 (BIT_XOR_EXPR, type,
7731 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
7732
7733 return NULL_TREE;
7734
7735 case TRUTH_NOT_EXPR:
7736 /* The argument to invert_truthvalue must have Boolean type. */
7737 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
7738 arg0 = fold_convert (boolean_type_node, arg0);
7739
7740 /* Note that the operand of this must be an int
7741 and its values must be 0 or 1.
7742 ("true" is a fixed value perhaps depending on the language,
7743 but we don't handle values other than 1 correctly yet.) */
7744 tem = fold_truth_not_expr (arg0);
7745 if (!tem)
7746 return NULL_TREE;
7747 return fold_convert (type, tem);
7748
7749 case REALPART_EXPR:
7750 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7751 return fold_convert (type, arg0);
7752 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7753 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
7754 TREE_OPERAND (arg0, 1));
7755 if (TREE_CODE (arg0) == COMPLEX_CST)
7756 return fold_convert (type, TREE_REALPART (arg0));
7757 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7758 {
7759 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7760 tem = fold_build2 (TREE_CODE (arg0), itype,
7761 fold_build1 (REALPART_EXPR, itype,
7762 TREE_OPERAND (arg0, 0)),
7763 fold_build1 (REALPART_EXPR, itype,
7764 TREE_OPERAND (arg0, 1)));
7765 return fold_convert (type, tem);
7766 }
7767 if (TREE_CODE (arg0) == CONJ_EXPR)
7768 {
7769 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7770 tem = fold_build1 (REALPART_EXPR, itype, TREE_OPERAND (arg0, 0));
7771 return fold_convert (type, tem);
7772 }
7773 if (TREE_CODE (arg0) == CALL_EXPR)
7774 {
7775 tree fn = get_callee_fndecl (arg0);
7776 if (DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
7777 switch (DECL_FUNCTION_CODE (fn))
7778 {
7779 CASE_FLT_FN (BUILT_IN_CEXPI):
7780 fn = mathfn_built_in (type, BUILT_IN_COS);
7781 if (fn)
7782 return build_function_call_expr (fn,
7783 TREE_OPERAND (arg0, 1));
7784 break;
7785
7786 default:
7787 break;
7788 }
7789 }
7790 return NULL_TREE;
7791
7792 case IMAGPART_EXPR:
7793 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7794 return fold_convert (type, integer_zero_node);
7795 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7796 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
7797 TREE_OPERAND (arg0, 0));
7798 if (TREE_CODE (arg0) == COMPLEX_CST)
7799 return fold_convert (type, TREE_IMAGPART (arg0));
7800 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7801 {
7802 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7803 tem = fold_build2 (TREE_CODE (arg0), itype,
7804 fold_build1 (IMAGPART_EXPR, itype,
7805 TREE_OPERAND (arg0, 0)),
7806 fold_build1 (IMAGPART_EXPR, itype,
7807 TREE_OPERAND (arg0, 1)));
7808 return fold_convert (type, tem);
7809 }
7810 if (TREE_CODE (arg0) == CONJ_EXPR)
7811 {
7812 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7813 tem = fold_build1 (IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
7814 return fold_convert (type, negate_expr (tem));
7815 }
7816 if (TREE_CODE (arg0) == CALL_EXPR)
7817 {
7818 tree fn = get_callee_fndecl (arg0);
7819 if (DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
7820 switch (DECL_FUNCTION_CODE (fn))
7821 {
7822 CASE_FLT_FN (BUILT_IN_CEXPI):
7823 fn = mathfn_built_in (type, BUILT_IN_SIN);
7824 if (fn)
7825 return build_function_call_expr (fn,
7826 TREE_OPERAND (arg0, 1));
7827 break;
7828
7829 default:
7830 break;
7831 }
7832 }
7833 return NULL_TREE;
7834
7835 default:
7836 return NULL_TREE;
7837 } /* switch (code) */
7838 }
7839
7840 /* Fold a binary expression of code CODE and type TYPE with operands
7841 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
7842 Return the folded expression if folding is successful. Otherwise,
7843 return NULL_TREE. */
7844
7845 static tree
7846 fold_minmax (enum tree_code code, tree type, tree op0, tree op1)
7847 {
7848 enum tree_code compl_code;
7849
7850 if (code == MIN_EXPR)
7851 compl_code = MAX_EXPR;
7852 else if (code == MAX_EXPR)
7853 compl_code = MIN_EXPR;
7854 else
7855 gcc_unreachable ();
7856
7857 /* MIN (MAX (a, b), b) == b. */
7858 if (TREE_CODE (op0) == compl_code
7859 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
7860 return omit_one_operand (type, op1, TREE_OPERAND (op0, 0));
7861
7862 /* MIN (MAX (b, a), b) == b. */
7863 if (TREE_CODE (op0) == compl_code
7864 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
7865 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
7866 return omit_one_operand (type, op1, TREE_OPERAND (op0, 1));
7867
7868 /* MIN (a, MAX (a, b)) == a. */
7869 if (TREE_CODE (op1) == compl_code
7870 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
7871 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
7872 return omit_one_operand (type, op0, TREE_OPERAND (op1, 1));
7873
7874 /* MIN (a, MAX (b, a)) == a. */
7875 if (TREE_CODE (op1) == compl_code
7876 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
7877 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
7878 return omit_one_operand (type, op0, TREE_OPERAND (op1, 0));
7879
7880 return NULL_TREE;
7881 }
7882
7883 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
7884 by changing CODE to reduce the magnitude of constants involved in
7885 ARG0 of the comparison.
7886 Returns a canonicalized comparison tree if a simplification was
7887 possible, otherwise returns NULL_TREE. */
7888
7889 static tree
7890 maybe_canonicalize_comparison_1 (enum tree_code code, tree type,
7891 tree arg0, tree arg1)
7892 {
7893 enum tree_code code0 = TREE_CODE (arg0);
7894 tree t, cst0 = NULL_TREE;
7895 int sgn0;
7896 bool swap = false;
7897
7898 /* Match A +- CST code arg1 and CST code arg1. */
7899 if (!(((code0 == MINUS_EXPR
7900 || code0 == PLUS_EXPR)
7901 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7902 || code0 == INTEGER_CST))
7903 return NULL_TREE;
7904
7905 /* Identify the constant in arg0 and its sign. */
7906 if (code0 == INTEGER_CST)
7907 cst0 = arg0;
7908 else
7909 cst0 = TREE_OPERAND (arg0, 1);
7910 sgn0 = tree_int_cst_sgn (cst0);
7911
7912 /* Overflowed constants and zero will cause problems. */
7913 if (integer_zerop (cst0)
7914 || TREE_OVERFLOW (cst0))
7915 return NULL_TREE;
7916
7917 /* See if we can reduce the magnitude of the constant in
7918 arg0 by changing the comparison code. */
7919 if (code0 == INTEGER_CST)
7920 {
7921 /* CST <= arg1 -> CST-1 < arg1. */
7922 if (code == LE_EXPR && sgn0 == 1)
7923 code = LT_EXPR;
7924 /* -CST < arg1 -> -CST-1 <= arg1. */
7925 else if (code == LT_EXPR && sgn0 == -1)
7926 code = LE_EXPR;
7927 /* CST > arg1 -> CST-1 >= arg1. */
7928 else if (code == GT_EXPR && sgn0 == 1)
7929 code = GE_EXPR;
7930 /* -CST >= arg1 -> -CST-1 > arg1. */
7931 else if (code == GE_EXPR && sgn0 == -1)
7932 code = GT_EXPR;
7933 else
7934 return NULL_TREE;
7935 /* arg1 code' CST' might be more canonical. */
7936 swap = true;
7937 }
7938 else
7939 {
7940 /* A - CST < arg1 -> A - CST-1 <= arg1. */
7941 if (code == LT_EXPR
7942 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
7943 code = LE_EXPR;
7944 /* A + CST > arg1 -> A + CST-1 >= arg1. */
7945 else if (code == GT_EXPR
7946 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
7947 code = GE_EXPR;
7948 /* A + CST <= arg1 -> A + CST-1 < arg1. */
7949 else if (code == LE_EXPR
7950 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
7951 code = LT_EXPR;
7952 /* A - CST >= arg1 -> A - CST-1 > arg1. */
7953 else if (code == GE_EXPR
7954 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
7955 code = GT_EXPR;
7956 else
7957 return NULL_TREE;
7958 }
7959
7960 /* Now build the constant reduced in magnitude. */
7961 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
7962 cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
7963 if (code0 != INTEGER_CST)
7964 t = fold_build2 (code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
7965
7966 /* If swapping might yield to a more canonical form, do so. */
7967 if (swap)
7968 return fold_build2 (swap_tree_comparison (code), type, arg1, t);
7969 else
7970 return fold_build2 (code, type, t, arg1);
7971 }
7972
7973 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
7974 overflow further. Try to decrease the magnitude of constants involved
7975 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
7976 and put sole constants at the second argument position.
7977 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
7978
7979 static tree
7980 maybe_canonicalize_comparison (enum tree_code code, tree type,
7981 tree arg0, tree arg1)
7982 {
7983 tree t;
7984
7985 /* In principle pointers also have undefined overflow behavior,
7986 but that causes problems elsewhere. */
7987 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
7988 || POINTER_TYPE_P (TREE_TYPE (arg0)))
7989 return NULL_TREE;
7990
7991 /* Try canonicalization by simplifying arg0. */
7992 t = maybe_canonicalize_comparison_1 (code, type, arg0, arg1);
7993 if (t)
7994 return t;
7995
7996 /* Try canonicalization by simplifying arg1 using the swapped
7997 comparison. */
7998 code = swap_tree_comparison (code);
7999 return maybe_canonicalize_comparison_1 (code, type, arg1, arg0);
8000 }
8001
8002 /* Subroutine of fold_binary. This routine performs all of the
8003 transformations that are common to the equality/inequality
8004 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8005 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8006 fold_binary should call fold_binary. Fold a comparison with
8007 tree code CODE and type TYPE with operands OP0 and OP1. Return
8008 the folded comparison or NULL_TREE. */
8009
8010 static tree
8011 fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
8012 {
8013 tree arg0, arg1, tem;
8014
8015 arg0 = op0;
8016 arg1 = op1;
8017
8018 STRIP_SIGN_NOPS (arg0);
8019 STRIP_SIGN_NOPS (arg1);
8020
8021 tem = fold_relational_const (code, type, arg0, arg1);
8022 if (tem != NULL_TREE)
8023 return tem;
8024
8025 /* If one arg is a real or integer constant, put it last. */
8026 if (tree_swap_operands_p (arg0, arg1, true))
8027 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
8028
8029 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8030 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8031 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8032 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8033 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8034 && (TREE_CODE (arg1) == INTEGER_CST
8035 && !TREE_OVERFLOW (arg1)))
8036 {
8037 tree const1 = TREE_OPERAND (arg0, 1);
8038 tree const2 = arg1;
8039 tree variable = TREE_OPERAND (arg0, 0);
8040 tree lhs;
8041 int lhs_add;
8042 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8043
8044 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
8045 TREE_TYPE (arg1), const2, const1);
8046
8047 /* If the constant operation overflowed this can be
8048 simplified as a comparison against INT_MAX/INT_MIN. */
8049 if (TREE_CODE (lhs) == INTEGER_CST
8050 && TREE_OVERFLOW (lhs))
8051 {
8052 int const1_sgn = tree_int_cst_sgn (const1);
8053 enum tree_code code2 = code;
8054
8055 /* Get the sign of the constant on the lhs if the
8056 operation were VARIABLE + CONST1. */
8057 if (TREE_CODE (arg0) == MINUS_EXPR)
8058 const1_sgn = -const1_sgn;
8059
8060 /* The sign of the constant determines if we overflowed
8061 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8062 Canonicalize to the INT_MIN overflow by swapping the comparison
8063 if necessary. */
8064 if (const1_sgn == -1)
8065 code2 = swap_tree_comparison (code);
8066
8067 /* We now can look at the canonicalized case
8068 VARIABLE + 1 CODE2 INT_MIN
8069 and decide on the result. */
8070 if (code2 == LT_EXPR
8071 || code2 == LE_EXPR
8072 || code2 == EQ_EXPR)
8073 return omit_one_operand (type, boolean_false_node, variable);
8074 else if (code2 == NE_EXPR
8075 || code2 == GE_EXPR
8076 || code2 == GT_EXPR)
8077 return omit_one_operand (type, boolean_true_node, variable);
8078 }
8079
8080 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8081 && (TREE_CODE (lhs) != INTEGER_CST
8082 || !TREE_OVERFLOW (lhs)))
8083 return fold_build2 (code, type, variable, lhs);
8084 }
8085
8086 /* For comparisons of pointers we can decompose it to a compile time
8087 comparison of the base objects and the offsets into the object.
8088 This requires at least one operand being an ADDR_EXPR to do more
8089 than the operand_equal_p test below. */
8090 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8091 && (TREE_CODE (arg0) == ADDR_EXPR
8092 || TREE_CODE (arg1) == ADDR_EXPR))
8093 {
8094 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8095 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8096 enum machine_mode mode;
8097 int volatilep, unsignedp;
8098 bool indirect_base0 = false;
8099
8100 /* Get base and offset for the access. Strip ADDR_EXPR for
8101 get_inner_reference, but put it back by stripping INDIRECT_REF
8102 off the base object if possible. */
8103 base0 = arg0;
8104 if (TREE_CODE (arg0) == ADDR_EXPR)
8105 {
8106 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8107 &bitsize, &bitpos0, &offset0, &mode,
8108 &unsignedp, &volatilep, false);
8109 if (TREE_CODE (base0) == INDIRECT_REF)
8110 base0 = TREE_OPERAND (base0, 0);
8111 else
8112 indirect_base0 = true;
8113 }
8114
8115 base1 = arg1;
8116 if (TREE_CODE (arg1) == ADDR_EXPR)
8117 {
8118 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8119 &bitsize, &bitpos1, &offset1, &mode,
8120 &unsignedp, &volatilep, false);
8121 /* We have to make sure to have an indirect/non-indirect base1
8122 just the same as we did for base0. */
8123 if (TREE_CODE (base1) == INDIRECT_REF
8124 && !indirect_base0)
8125 base1 = TREE_OPERAND (base1, 0);
8126 else if (!indirect_base0)
8127 base1 = NULL_TREE;
8128 }
8129 else if (indirect_base0)
8130 base1 = NULL_TREE;
8131
8132 /* If we have equivalent bases we might be able to simplify. */
8133 if (base0 && base1
8134 && operand_equal_p (base0, base1, 0))
8135 {
8136 /* We can fold this expression to a constant if the non-constant
8137 offset parts are equal. */
8138 if (offset0 == offset1
8139 || (offset0 && offset1
8140 && operand_equal_p (offset0, offset1, 0)))
8141 {
8142 switch (code)
8143 {
8144 case EQ_EXPR:
8145 return build_int_cst (boolean_type_node, bitpos0 == bitpos1);
8146 case NE_EXPR:
8147 return build_int_cst (boolean_type_node, bitpos0 != bitpos1);
8148 case LT_EXPR:
8149 return build_int_cst (boolean_type_node, bitpos0 < bitpos1);
8150 case LE_EXPR:
8151 return build_int_cst (boolean_type_node, bitpos0 <= bitpos1);
8152 case GE_EXPR:
8153 return build_int_cst (boolean_type_node, bitpos0 >= bitpos1);
8154 case GT_EXPR:
8155 return build_int_cst (boolean_type_node, bitpos0 > bitpos1);
8156 default:;
8157 }
8158 }
8159 /* We can simplify the comparison to a comparison of the variable
8160 offset parts if the constant offset parts are equal.
8161 Be careful to use signed size type here because otherwise we
8162 mess with array offsets in the wrong way. This is possible
8163 because pointer arithmetic is restricted to retain within an
8164 object and overflow on pointer differences is undefined as of
8165 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8166 else if (bitpos0 == bitpos1)
8167 {
8168 tree signed_size_type_node;
8169 signed_size_type_node = signed_type_for (size_type_node);
8170
8171 /* By converting to signed size type we cover middle-end pointer
8172 arithmetic which operates on unsigned pointer types of size
8173 type size and ARRAY_REF offsets which are properly sign or
8174 zero extended from their type in case it is narrower than
8175 size type. */
8176 if (offset0 == NULL_TREE)
8177 offset0 = build_int_cst (signed_size_type_node, 0);
8178 else
8179 offset0 = fold_convert (signed_size_type_node, offset0);
8180 if (offset1 == NULL_TREE)
8181 offset1 = build_int_cst (signed_size_type_node, 0);
8182 else
8183 offset1 = fold_convert (signed_size_type_node, offset1);
8184
8185 return fold_build2 (code, type, offset0, offset1);
8186 }
8187 }
8188 }
8189
8190 /* If this is a comparison of two exprs that look like an ARRAY_REF of the
8191 same object, then we can fold this to a comparison of the two offsets in
8192 signed size type. This is possible because pointer arithmetic is
8193 restricted to retain within an object and overflow on pointer differences
8194 is undefined as of 6.5.6/8 and /9 with respect to the signed ptrdiff_t.
8195
8196 We check flag_wrapv directly because pointers types are unsigned,
8197 and therefore TYPE_OVERFLOW_WRAPS returns true for them. That is
8198 normally what we want to avoid certain odd overflow cases, but
8199 not here. */
8200 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8201 && !flag_wrapv
8202 && !TYPE_OVERFLOW_TRAPS (TREE_TYPE (arg0)))
8203 {
8204 tree base0, offset0, base1, offset1;
8205
8206 if (extract_array_ref (arg0, &base0, &offset0)
8207 && extract_array_ref (arg1, &base1, &offset1)
8208 && operand_equal_p (base0, base1, 0))
8209 {
8210 tree signed_size_type_node;
8211 signed_size_type_node = signed_type_for (size_type_node);
8212
8213 /* By converting to signed size type we cover middle-end pointer
8214 arithmetic which operates on unsigned pointer types of size
8215 type size and ARRAY_REF offsets which are properly sign or
8216 zero extended from their type in case it is narrower than
8217 size type. */
8218 if (offset0 == NULL_TREE)
8219 offset0 = build_int_cst (signed_size_type_node, 0);
8220 else
8221 offset0 = fold_convert (signed_size_type_node, offset0);
8222 if (offset1 == NULL_TREE)
8223 offset1 = build_int_cst (signed_size_type_node, 0);
8224 else
8225 offset1 = fold_convert (signed_size_type_node, offset1);
8226
8227 return fold_build2 (code, type, offset0, offset1);
8228 }
8229 }
8230
8231 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8232 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8233 the resulting offset is smaller in absolute value than the
8234 original one. */
8235 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8236 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8237 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8238 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8239 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8240 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8241 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8242 {
8243 tree const1 = TREE_OPERAND (arg0, 1);
8244 tree const2 = TREE_OPERAND (arg1, 1);
8245 tree variable1 = TREE_OPERAND (arg0, 0);
8246 tree variable2 = TREE_OPERAND (arg1, 0);
8247 tree cst;
8248
8249 /* Put the constant on the side where it doesn't overflow and is
8250 of lower absolute value than before. */
8251 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8252 ? MINUS_EXPR : PLUS_EXPR,
8253 const2, const1, 0);
8254 if (!TREE_OVERFLOW (cst)
8255 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
8256 return fold_build2 (code, type,
8257 variable1,
8258 fold_build2 (TREE_CODE (arg1), TREE_TYPE (arg1),
8259 variable2, cst));
8260
8261 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8262 ? MINUS_EXPR : PLUS_EXPR,
8263 const1, const2, 0);
8264 if (!TREE_OVERFLOW (cst)
8265 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
8266 return fold_build2 (code, type,
8267 fold_build2 (TREE_CODE (arg0), TREE_TYPE (arg0),
8268 variable1, cst),
8269 variable2);
8270 }
8271
8272 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
8273 signed arithmetic case. That form is created by the compiler
8274 often enough for folding it to be of value. One example is in
8275 computing loop trip counts after Operator Strength Reduction. */
8276 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8277 && TREE_CODE (arg0) == MULT_EXPR
8278 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8279 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8280 && integer_zerop (arg1))
8281 {
8282 tree const1 = TREE_OPERAND (arg0, 1);
8283 tree const2 = arg1; /* zero */
8284 tree variable1 = TREE_OPERAND (arg0, 0);
8285 enum tree_code cmp_code = code;
8286
8287 gcc_assert (!integer_zerop (const1));
8288
8289 /* If const1 is negative we swap the sense of the comparison. */
8290 if (tree_int_cst_sgn (const1) < 0)
8291 cmp_code = swap_tree_comparison (cmp_code);
8292
8293 return fold_build2 (cmp_code, type, variable1, const2);
8294 }
8295
8296 tem = maybe_canonicalize_comparison (code, type, arg0, arg1);
8297 if (tem)
8298 return tem;
8299
8300 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8301 {
8302 tree targ0 = strip_float_extensions (arg0);
8303 tree targ1 = strip_float_extensions (arg1);
8304 tree newtype = TREE_TYPE (targ0);
8305
8306 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8307 newtype = TREE_TYPE (targ1);
8308
8309 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8310 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8311 return fold_build2 (code, type, fold_convert (newtype, targ0),
8312 fold_convert (newtype, targ1));
8313
8314 /* (-a) CMP (-b) -> b CMP a */
8315 if (TREE_CODE (arg0) == NEGATE_EXPR
8316 && TREE_CODE (arg1) == NEGATE_EXPR)
8317 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
8318 TREE_OPERAND (arg0, 0));
8319
8320 if (TREE_CODE (arg1) == REAL_CST)
8321 {
8322 REAL_VALUE_TYPE cst;
8323 cst = TREE_REAL_CST (arg1);
8324
8325 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8326 if (TREE_CODE (arg0) == NEGATE_EXPR)
8327 return fold_build2 (swap_tree_comparison (code), type,
8328 TREE_OPERAND (arg0, 0),
8329 build_real (TREE_TYPE (arg1),
8330 REAL_VALUE_NEGATE (cst)));
8331
8332 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8333 /* a CMP (-0) -> a CMP 0 */
8334 if (REAL_VALUE_MINUS_ZERO (cst))
8335 return fold_build2 (code, type, arg0,
8336 build_real (TREE_TYPE (arg1), dconst0));
8337
8338 /* x != NaN is always true, other ops are always false. */
8339 if (REAL_VALUE_ISNAN (cst)
8340 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8341 {
8342 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8343 return omit_one_operand (type, tem, arg0);
8344 }
8345
8346 /* Fold comparisons against infinity. */
8347 if (REAL_VALUE_ISINF (cst))
8348 {
8349 tem = fold_inf_compare (code, type, arg0, arg1);
8350 if (tem != NULL_TREE)
8351 return tem;
8352 }
8353 }
8354
8355 /* If this is a comparison of a real constant with a PLUS_EXPR
8356 or a MINUS_EXPR of a real constant, we can convert it into a
8357 comparison with a revised real constant as long as no overflow
8358 occurs when unsafe_math_optimizations are enabled. */
8359 if (flag_unsafe_math_optimizations
8360 && TREE_CODE (arg1) == REAL_CST
8361 && (TREE_CODE (arg0) == PLUS_EXPR
8362 || TREE_CODE (arg0) == MINUS_EXPR)
8363 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8364 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8365 ? MINUS_EXPR : PLUS_EXPR,
8366 arg1, TREE_OPERAND (arg0, 1), 0))
8367 && !TREE_OVERFLOW (tem))
8368 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
8369
8370 /* Likewise, we can simplify a comparison of a real constant with
8371 a MINUS_EXPR whose first operand is also a real constant, i.e.
8372 (c1 - x) < c2 becomes x > c1-c2. */
8373 if (flag_unsafe_math_optimizations
8374 && TREE_CODE (arg1) == REAL_CST
8375 && TREE_CODE (arg0) == MINUS_EXPR
8376 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
8377 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
8378 arg1, 0))
8379 && !TREE_OVERFLOW (tem))
8380 return fold_build2 (swap_tree_comparison (code), type,
8381 TREE_OPERAND (arg0, 1), tem);
8382
8383 /* Fold comparisons against built-in math functions. */
8384 if (TREE_CODE (arg1) == REAL_CST
8385 && flag_unsafe_math_optimizations
8386 && ! flag_errno_math)
8387 {
8388 enum built_in_function fcode = builtin_mathfn_code (arg0);
8389
8390 if (fcode != END_BUILTINS)
8391 {
8392 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
8393 if (tem != NULL_TREE)
8394 return tem;
8395 }
8396 }
8397 }
8398
8399 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
8400 if (TREE_CONSTANT (arg1)
8401 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
8402 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
8403 /* This optimization is invalid for ordered comparisons
8404 if CONST+INCR overflows or if foo+incr might overflow.
8405 This optimization is invalid for floating point due to rounding.
8406 For pointer types we assume overflow doesn't happen. */
8407 && (POINTER_TYPE_P (TREE_TYPE (arg0))
8408 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8409 && (code == EQ_EXPR || code == NE_EXPR))))
8410 {
8411 tree varop, newconst;
8412
8413 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
8414 {
8415 newconst = fold_build2 (PLUS_EXPR, TREE_TYPE (arg0),
8416 arg1, TREE_OPERAND (arg0, 1));
8417 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
8418 TREE_OPERAND (arg0, 0),
8419 TREE_OPERAND (arg0, 1));
8420 }
8421 else
8422 {
8423 newconst = fold_build2 (MINUS_EXPR, TREE_TYPE (arg0),
8424 arg1, TREE_OPERAND (arg0, 1));
8425 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
8426 TREE_OPERAND (arg0, 0),
8427 TREE_OPERAND (arg0, 1));
8428 }
8429
8430
8431 /* If VAROP is a reference to a bitfield, we must mask
8432 the constant by the width of the field. */
8433 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
8434 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
8435 && host_integerp (DECL_SIZE (TREE_OPERAND
8436 (TREE_OPERAND (varop, 0), 1)), 1))
8437 {
8438 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
8439 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
8440 tree folded_compare, shift;
8441
8442 /* First check whether the comparison would come out
8443 always the same. If we don't do that we would
8444 change the meaning with the masking. */
8445 folded_compare = fold_build2 (code, type,
8446 TREE_OPERAND (varop, 0), arg1);
8447 if (TREE_CODE (folded_compare) == INTEGER_CST)
8448 return omit_one_operand (type, folded_compare, varop);
8449
8450 shift = build_int_cst (NULL_TREE,
8451 TYPE_PRECISION (TREE_TYPE (varop)) - size);
8452 shift = fold_convert (TREE_TYPE (varop), shift);
8453 newconst = fold_build2 (LSHIFT_EXPR, TREE_TYPE (varop),
8454 newconst, shift);
8455 newconst = fold_build2 (RSHIFT_EXPR, TREE_TYPE (varop),
8456 newconst, shift);
8457 }
8458
8459 return fold_build2 (code, type, varop, newconst);
8460 }
8461
8462 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
8463 && (TREE_CODE (arg0) == NOP_EXPR
8464 || TREE_CODE (arg0) == CONVERT_EXPR))
8465 {
8466 /* If we are widening one operand of an integer comparison,
8467 see if the other operand is similarly being widened. Perhaps we
8468 can do the comparison in the narrower type. */
8469 tem = fold_widened_comparison (code, type, arg0, arg1);
8470 if (tem)
8471 return tem;
8472
8473 /* Or if we are changing signedness. */
8474 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
8475 if (tem)
8476 return tem;
8477 }
8478
8479 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8480 constant, we can simplify it. */
8481 if (TREE_CODE (arg1) == INTEGER_CST
8482 && (TREE_CODE (arg0) == MIN_EXPR
8483 || TREE_CODE (arg0) == MAX_EXPR)
8484 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8485 {
8486 tem = optimize_minmax_comparison (code, type, op0, op1);
8487 if (tem)
8488 return tem;
8489 }
8490
8491 /* Simplify comparison of something with itself. (For IEEE
8492 floating-point, we can only do some of these simplifications.) */
8493 if (operand_equal_p (arg0, arg1, 0))
8494 {
8495 switch (code)
8496 {
8497 case EQ_EXPR:
8498 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8499 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8500 return constant_boolean_node (1, type);
8501 break;
8502
8503 case GE_EXPR:
8504 case LE_EXPR:
8505 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8506 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8507 return constant_boolean_node (1, type);
8508 return fold_build2 (EQ_EXPR, type, arg0, arg1);
8509
8510 case NE_EXPR:
8511 /* For NE, we can only do this simplification if integer
8512 or we don't honor IEEE floating point NaNs. */
8513 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
8514 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8515 break;
8516 /* ... fall through ... */
8517 case GT_EXPR:
8518 case LT_EXPR:
8519 return constant_boolean_node (0, type);
8520 default:
8521 gcc_unreachable ();
8522 }
8523 }
8524
8525 /* If we are comparing an expression that just has comparisons
8526 of two integer values, arithmetic expressions of those comparisons,
8527 and constants, we can simplify it. There are only three cases
8528 to check: the two values can either be equal, the first can be
8529 greater, or the second can be greater. Fold the expression for
8530 those three values. Since each value must be 0 or 1, we have
8531 eight possibilities, each of which corresponds to the constant 0
8532 or 1 or one of the six possible comparisons.
8533
8534 This handles common cases like (a > b) == 0 but also handles
8535 expressions like ((x > y) - (y > x)) > 0, which supposedly
8536 occur in macroized code. */
8537
8538 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8539 {
8540 tree cval1 = 0, cval2 = 0;
8541 int save_p = 0;
8542
8543 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8544 /* Don't handle degenerate cases here; they should already
8545 have been handled anyway. */
8546 && cval1 != 0 && cval2 != 0
8547 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8548 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8549 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8550 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8551 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8552 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8553 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8554 {
8555 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8556 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8557
8558 /* We can't just pass T to eval_subst in case cval1 or cval2
8559 was the same as ARG1. */
8560
8561 tree high_result
8562 = fold_build2 (code, type,
8563 eval_subst (arg0, cval1, maxval,
8564 cval2, minval),
8565 arg1);
8566 tree equal_result
8567 = fold_build2 (code, type,
8568 eval_subst (arg0, cval1, maxval,
8569 cval2, maxval),
8570 arg1);
8571 tree low_result
8572 = fold_build2 (code, type,
8573 eval_subst (arg0, cval1, minval,
8574 cval2, maxval),
8575 arg1);
8576
8577 /* All three of these results should be 0 or 1. Confirm they are.
8578 Then use those values to select the proper code to use. */
8579
8580 if (TREE_CODE (high_result) == INTEGER_CST
8581 && TREE_CODE (equal_result) == INTEGER_CST
8582 && TREE_CODE (low_result) == INTEGER_CST)
8583 {
8584 /* Make a 3-bit mask with the high-order bit being the
8585 value for `>', the next for '=', and the low for '<'. */
8586 switch ((integer_onep (high_result) * 4)
8587 + (integer_onep (equal_result) * 2)
8588 + integer_onep (low_result))
8589 {
8590 case 0:
8591 /* Always false. */
8592 return omit_one_operand (type, integer_zero_node, arg0);
8593 case 1:
8594 code = LT_EXPR;
8595 break;
8596 case 2:
8597 code = EQ_EXPR;
8598 break;
8599 case 3:
8600 code = LE_EXPR;
8601 break;
8602 case 4:
8603 code = GT_EXPR;
8604 break;
8605 case 5:
8606 code = NE_EXPR;
8607 break;
8608 case 6:
8609 code = GE_EXPR;
8610 break;
8611 case 7:
8612 /* Always true. */
8613 return omit_one_operand (type, integer_one_node, arg0);
8614 }
8615
8616 if (save_p)
8617 return save_expr (build2 (code, type, cval1, cval2));
8618 return fold_build2 (code, type, cval1, cval2);
8619 }
8620 }
8621 }
8622
8623 /* Fold a comparison of the address of COMPONENT_REFs with the same
8624 type and component to a comparison of the address of the base
8625 object. In short, &x->a OP &y->a to x OP y and
8626 &x->a OP &y.a to x OP &y */
8627 if (TREE_CODE (arg0) == ADDR_EXPR
8628 && TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
8629 && TREE_CODE (arg1) == ADDR_EXPR
8630 && TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
8631 {
8632 tree cref0 = TREE_OPERAND (arg0, 0);
8633 tree cref1 = TREE_OPERAND (arg1, 0);
8634 if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
8635 {
8636 tree op0 = TREE_OPERAND (cref0, 0);
8637 tree op1 = TREE_OPERAND (cref1, 0);
8638 return fold_build2 (code, type,
8639 build_fold_addr_expr (op0),
8640 build_fold_addr_expr (op1));
8641 }
8642 }
8643
8644 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8645 into a single range test. */
8646 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
8647 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
8648 && TREE_CODE (arg1) == INTEGER_CST
8649 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8650 && !integer_zerop (TREE_OPERAND (arg0, 1))
8651 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8652 && !TREE_OVERFLOW (arg1))
8653 {
8654 tem = fold_div_compare (code, type, arg0, arg1);
8655 if (tem != NULL_TREE)
8656 return tem;
8657 }
8658
8659 /* Fold ~X op ~Y as Y op X. */
8660 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8661 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8662 return fold_build2 (code, type,
8663 TREE_OPERAND (arg1, 0),
8664 TREE_OPERAND (arg0, 0));
8665
8666 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
8667 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8668 && TREE_CODE (arg1) == INTEGER_CST)
8669 return fold_build2 (swap_tree_comparison (code), type,
8670 TREE_OPERAND (arg0, 0),
8671 fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1));
8672
8673 return NULL_TREE;
8674 }
8675
8676
8677 /* Subroutine of fold_binary. Optimize complex multiplications of the
8678 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8679 argument EXPR represents the expression "z" of type TYPE. */
8680
8681 static tree
8682 fold_mult_zconjz (tree type, tree expr)
8683 {
8684 tree itype = TREE_TYPE (type);
8685 tree rpart, ipart, tem;
8686
8687 if (TREE_CODE (expr) == COMPLEX_EXPR)
8688 {
8689 rpart = TREE_OPERAND (expr, 0);
8690 ipart = TREE_OPERAND (expr, 1);
8691 }
8692 else if (TREE_CODE (expr) == COMPLEX_CST)
8693 {
8694 rpart = TREE_REALPART (expr);
8695 ipart = TREE_IMAGPART (expr);
8696 }
8697 else
8698 {
8699 expr = save_expr (expr);
8700 rpart = fold_build1 (REALPART_EXPR, itype, expr);
8701 ipart = fold_build1 (IMAGPART_EXPR, itype, expr);
8702 }
8703
8704 rpart = save_expr (rpart);
8705 ipart = save_expr (ipart);
8706 tem = fold_build2 (PLUS_EXPR, itype,
8707 fold_build2 (MULT_EXPR, itype, rpart, rpart),
8708 fold_build2 (MULT_EXPR, itype, ipart, ipart));
8709 return fold_build2 (COMPLEX_EXPR, type, tem,
8710 fold_convert (itype, integer_zero_node));
8711 }
8712
8713
8714 /* Fold a binary expression of code CODE and type TYPE with operands
8715 OP0 and OP1. Return the folded expression if folding is
8716 successful. Otherwise, return NULL_TREE. */
8717
8718 tree
8719 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
8720 {
8721 enum tree_code_class kind = TREE_CODE_CLASS (code);
8722 tree arg0, arg1, tem;
8723 tree t1 = NULL_TREE;
8724
8725 gcc_assert ((IS_EXPR_CODE_CLASS (kind)
8726 || IS_GIMPLE_STMT_CODE_CLASS (kind))
8727 && TREE_CODE_LENGTH (code) == 2
8728 && op0 != NULL_TREE
8729 && op1 != NULL_TREE);
8730
8731 arg0 = op0;
8732 arg1 = op1;
8733
8734 /* Strip any conversions that don't change the mode. This is
8735 safe for every expression, except for a comparison expression
8736 because its signedness is derived from its operands. So, in
8737 the latter case, only strip conversions that don't change the
8738 signedness.
8739
8740 Note that this is done as an internal manipulation within the
8741 constant folder, in order to find the simplest representation
8742 of the arguments so that their form can be studied. In any
8743 cases, the appropriate type conversions should be put back in
8744 the tree that will get out of the constant folder. */
8745
8746 if (kind == tcc_comparison)
8747 {
8748 STRIP_SIGN_NOPS (arg0);
8749 STRIP_SIGN_NOPS (arg1);
8750 }
8751 else
8752 {
8753 STRIP_NOPS (arg0);
8754 STRIP_NOPS (arg1);
8755 }
8756
8757 /* Note that TREE_CONSTANT isn't enough: static var addresses are
8758 constant but we can't do arithmetic on them. */
8759 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
8760 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
8761 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
8762 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
8763 {
8764 if (kind == tcc_binary)
8765 tem = const_binop (code, arg0, arg1, 0);
8766 else if (kind == tcc_comparison)
8767 tem = fold_relational_const (code, type, arg0, arg1);
8768 else
8769 tem = NULL_TREE;
8770
8771 if (tem != NULL_TREE)
8772 {
8773 if (TREE_TYPE (tem) != type)
8774 tem = fold_convert (type, tem);
8775 return tem;
8776 }
8777 }
8778
8779 /* If this is a commutative operation, and ARG0 is a constant, move it
8780 to ARG1 to reduce the number of tests below. */
8781 if (commutative_tree_code (code)
8782 && tree_swap_operands_p (arg0, arg1, true))
8783 return fold_build2 (code, type, op1, op0);
8784
8785 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
8786
8787 First check for cases where an arithmetic operation is applied to a
8788 compound, conditional, or comparison operation. Push the arithmetic
8789 operation inside the compound or conditional to see if any folding
8790 can then be done. Convert comparison to conditional for this purpose.
8791 The also optimizes non-constant cases that used to be done in
8792 expand_expr.
8793
8794 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
8795 one of the operands is a comparison and the other is a comparison, a
8796 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
8797 code below would make the expression more complex. Change it to a
8798 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
8799 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
8800
8801 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
8802 || code == EQ_EXPR || code == NE_EXPR)
8803 && ((truth_value_p (TREE_CODE (arg0))
8804 && (truth_value_p (TREE_CODE (arg1))
8805 || (TREE_CODE (arg1) == BIT_AND_EXPR
8806 && integer_onep (TREE_OPERAND (arg1, 1)))))
8807 || (truth_value_p (TREE_CODE (arg1))
8808 && (truth_value_p (TREE_CODE (arg0))
8809 || (TREE_CODE (arg0) == BIT_AND_EXPR
8810 && integer_onep (TREE_OPERAND (arg0, 1)))))))
8811 {
8812 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
8813 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
8814 : TRUTH_XOR_EXPR,
8815 boolean_type_node,
8816 fold_convert (boolean_type_node, arg0),
8817 fold_convert (boolean_type_node, arg1));
8818
8819 if (code == EQ_EXPR)
8820 tem = invert_truthvalue (tem);
8821
8822 return fold_convert (type, tem);
8823 }
8824
8825 if (TREE_CODE_CLASS (code) == tcc_binary
8826 || TREE_CODE_CLASS (code) == tcc_comparison)
8827 {
8828 if (TREE_CODE (arg0) == COMPOUND_EXPR)
8829 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
8830 fold_build2 (code, type,
8831 TREE_OPERAND (arg0, 1), op1));
8832 if (TREE_CODE (arg1) == COMPOUND_EXPR
8833 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
8834 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
8835 fold_build2 (code, type,
8836 op0, TREE_OPERAND (arg1, 1)));
8837
8838 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
8839 {
8840 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
8841 arg0, arg1,
8842 /*cond_first_p=*/1);
8843 if (tem != NULL_TREE)
8844 return tem;
8845 }
8846
8847 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
8848 {
8849 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
8850 arg1, arg0,
8851 /*cond_first_p=*/0);
8852 if (tem != NULL_TREE)
8853 return tem;
8854 }
8855 }
8856
8857 switch (code)
8858 {
8859 case PLUS_EXPR:
8860 /* A + (-B) -> A - B */
8861 if (TREE_CODE (arg1) == NEGATE_EXPR)
8862 return fold_build2 (MINUS_EXPR, type,
8863 fold_convert (type, arg0),
8864 fold_convert (type, TREE_OPERAND (arg1, 0)));
8865 /* (-A) + B -> B - A */
8866 if (TREE_CODE (arg0) == NEGATE_EXPR
8867 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
8868 return fold_build2 (MINUS_EXPR, type,
8869 fold_convert (type, arg1),
8870 fold_convert (type, TREE_OPERAND (arg0, 0)));
8871 /* Convert ~A + 1 to -A. */
8872 if (INTEGRAL_TYPE_P (type)
8873 && TREE_CODE (arg0) == BIT_NOT_EXPR
8874 && integer_onep (arg1))
8875 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
8876
8877 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
8878 same or one. */
8879 if ((TREE_CODE (arg0) == MULT_EXPR
8880 || TREE_CODE (arg1) == MULT_EXPR)
8881 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
8882 {
8883 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
8884 if (tem)
8885 return tem;
8886 }
8887
8888 if (! FLOAT_TYPE_P (type))
8889 {
8890 if (integer_zerop (arg1))
8891 return non_lvalue (fold_convert (type, arg0));
8892
8893 /* ~X + X is -1. */
8894 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8895 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
8896 && !TYPE_OVERFLOW_TRAPS (type))
8897 {
8898 t1 = build_int_cst_type (type, -1);
8899 return omit_one_operand (type, t1, arg1);
8900 }
8901
8902 /* X + ~X is -1. */
8903 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8904 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
8905 && !TYPE_OVERFLOW_TRAPS (type))
8906 {
8907 t1 = build_int_cst_type (type, -1);
8908 return omit_one_operand (type, t1, arg0);
8909 }
8910
8911 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
8912 with a constant, and the two constants have no bits in common,
8913 we should treat this as a BIT_IOR_EXPR since this may produce more
8914 simplifications. */
8915 if (TREE_CODE (arg0) == BIT_AND_EXPR
8916 && TREE_CODE (arg1) == BIT_AND_EXPR
8917 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8918 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8919 && integer_zerop (const_binop (BIT_AND_EXPR,
8920 TREE_OPERAND (arg0, 1),
8921 TREE_OPERAND (arg1, 1), 0)))
8922 {
8923 code = BIT_IOR_EXPR;
8924 goto bit_ior;
8925 }
8926
8927 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
8928 (plus (plus (mult) (mult)) (foo)) so that we can
8929 take advantage of the factoring cases below. */
8930 if (((TREE_CODE (arg0) == PLUS_EXPR
8931 || TREE_CODE (arg0) == MINUS_EXPR)
8932 && TREE_CODE (arg1) == MULT_EXPR)
8933 || ((TREE_CODE (arg1) == PLUS_EXPR
8934 || TREE_CODE (arg1) == MINUS_EXPR)
8935 && TREE_CODE (arg0) == MULT_EXPR))
8936 {
8937 tree parg0, parg1, parg, marg;
8938 enum tree_code pcode;
8939
8940 if (TREE_CODE (arg1) == MULT_EXPR)
8941 parg = arg0, marg = arg1;
8942 else
8943 parg = arg1, marg = arg0;
8944 pcode = TREE_CODE (parg);
8945 parg0 = TREE_OPERAND (parg, 0);
8946 parg1 = TREE_OPERAND (parg, 1);
8947 STRIP_NOPS (parg0);
8948 STRIP_NOPS (parg1);
8949
8950 if (TREE_CODE (parg0) == MULT_EXPR
8951 && TREE_CODE (parg1) != MULT_EXPR)
8952 return fold_build2 (pcode, type,
8953 fold_build2 (PLUS_EXPR, type,
8954 fold_convert (type, parg0),
8955 fold_convert (type, marg)),
8956 fold_convert (type, parg1));
8957 if (TREE_CODE (parg0) != MULT_EXPR
8958 && TREE_CODE (parg1) == MULT_EXPR)
8959 return fold_build2 (PLUS_EXPR, type,
8960 fold_convert (type, parg0),
8961 fold_build2 (pcode, type,
8962 fold_convert (type, marg),
8963 fold_convert (type,
8964 parg1)));
8965 }
8966
8967 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
8968 of the array. Loop optimizer sometimes produce this type of
8969 expressions. */
8970 if (TREE_CODE (arg0) == ADDR_EXPR)
8971 {
8972 tem = try_move_mult_to_index (PLUS_EXPR, arg0, arg1);
8973 if (tem)
8974 return fold_convert (type, tem);
8975 }
8976 else if (TREE_CODE (arg1) == ADDR_EXPR)
8977 {
8978 tem = try_move_mult_to_index (PLUS_EXPR, arg1, arg0);
8979 if (tem)
8980 return fold_convert (type, tem);
8981 }
8982 }
8983 else
8984 {
8985 /* See if ARG1 is zero and X + ARG1 reduces to X. */
8986 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
8987 return non_lvalue (fold_convert (type, arg0));
8988
8989 /* Likewise if the operands are reversed. */
8990 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
8991 return non_lvalue (fold_convert (type, arg1));
8992
8993 /* Convert X + -C into X - C. */
8994 if (TREE_CODE (arg1) == REAL_CST
8995 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
8996 {
8997 tem = fold_negate_const (arg1, type);
8998 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
8999 return fold_build2 (MINUS_EXPR, type,
9000 fold_convert (type, arg0),
9001 fold_convert (type, tem));
9002 }
9003
9004 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9005 to __complex__ ( x, y ). This is not the same for SNaNs or
9006 if signed zeros are involved. */
9007 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9008 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9009 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9010 {
9011 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9012 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
9013 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
9014 bool arg0rz = false, arg0iz = false;
9015 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9016 || (arg0i && (arg0iz = real_zerop (arg0i))))
9017 {
9018 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
9019 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
9020 if (arg0rz && arg1i && real_zerop (arg1i))
9021 {
9022 tree rp = arg1r ? arg1r
9023 : build1 (REALPART_EXPR, rtype, arg1);
9024 tree ip = arg0i ? arg0i
9025 : build1 (IMAGPART_EXPR, rtype, arg0);
9026 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9027 }
9028 else if (arg0iz && arg1r && real_zerop (arg1r))
9029 {
9030 tree rp = arg0r ? arg0r
9031 : build1 (REALPART_EXPR, rtype, arg0);
9032 tree ip = arg1i ? arg1i
9033 : build1 (IMAGPART_EXPR, rtype, arg1);
9034 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9035 }
9036 }
9037 }
9038
9039 if (flag_unsafe_math_optimizations
9040 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9041 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9042 && (tem = distribute_real_division (code, type, arg0, arg1)))
9043 return tem;
9044
9045 /* Convert x+x into x*2.0. */
9046 if (operand_equal_p (arg0, arg1, 0)
9047 && SCALAR_FLOAT_TYPE_P (type))
9048 return fold_build2 (MULT_EXPR, type, arg0,
9049 build_real (type, dconst2));
9050
9051 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
9052 if (flag_unsafe_math_optimizations
9053 && TREE_CODE (arg1) == PLUS_EXPR
9054 && TREE_CODE (arg0) != MULT_EXPR)
9055 {
9056 tree tree10 = TREE_OPERAND (arg1, 0);
9057 tree tree11 = TREE_OPERAND (arg1, 1);
9058 if (TREE_CODE (tree11) == MULT_EXPR
9059 && TREE_CODE (tree10) == MULT_EXPR)
9060 {
9061 tree tree0;
9062 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
9063 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
9064 }
9065 }
9066 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
9067 if (flag_unsafe_math_optimizations
9068 && TREE_CODE (arg0) == PLUS_EXPR
9069 && TREE_CODE (arg1) != MULT_EXPR)
9070 {
9071 tree tree00 = TREE_OPERAND (arg0, 0);
9072 tree tree01 = TREE_OPERAND (arg0, 1);
9073 if (TREE_CODE (tree01) == MULT_EXPR
9074 && TREE_CODE (tree00) == MULT_EXPR)
9075 {
9076 tree tree0;
9077 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
9078 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
9079 }
9080 }
9081 }
9082
9083 bit_rotate:
9084 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9085 is a rotate of A by C1 bits. */
9086 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9087 is a rotate of A by B bits. */
9088 {
9089 enum tree_code code0, code1;
9090 code0 = TREE_CODE (arg0);
9091 code1 = TREE_CODE (arg1);
9092 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9093 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9094 && operand_equal_p (TREE_OPERAND (arg0, 0),
9095 TREE_OPERAND (arg1, 0), 0)
9096 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
9097 {
9098 tree tree01, tree11;
9099 enum tree_code code01, code11;
9100
9101 tree01 = TREE_OPERAND (arg0, 1);
9102 tree11 = TREE_OPERAND (arg1, 1);
9103 STRIP_NOPS (tree01);
9104 STRIP_NOPS (tree11);
9105 code01 = TREE_CODE (tree01);
9106 code11 = TREE_CODE (tree11);
9107 if (code01 == INTEGER_CST
9108 && code11 == INTEGER_CST
9109 && TREE_INT_CST_HIGH (tree01) == 0
9110 && TREE_INT_CST_HIGH (tree11) == 0
9111 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
9112 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9113 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
9114 code0 == LSHIFT_EXPR ? tree01 : tree11);
9115 else if (code11 == MINUS_EXPR)
9116 {
9117 tree tree110, tree111;
9118 tree110 = TREE_OPERAND (tree11, 0);
9119 tree111 = TREE_OPERAND (tree11, 1);
9120 STRIP_NOPS (tree110);
9121 STRIP_NOPS (tree111);
9122 if (TREE_CODE (tree110) == INTEGER_CST
9123 && 0 == compare_tree_int (tree110,
9124 TYPE_PRECISION
9125 (TREE_TYPE (TREE_OPERAND
9126 (arg0, 0))))
9127 && operand_equal_p (tree01, tree111, 0))
9128 return build2 ((code0 == LSHIFT_EXPR
9129 ? LROTATE_EXPR
9130 : RROTATE_EXPR),
9131 type, TREE_OPERAND (arg0, 0), tree01);
9132 }
9133 else if (code01 == MINUS_EXPR)
9134 {
9135 tree tree010, tree011;
9136 tree010 = TREE_OPERAND (tree01, 0);
9137 tree011 = TREE_OPERAND (tree01, 1);
9138 STRIP_NOPS (tree010);
9139 STRIP_NOPS (tree011);
9140 if (TREE_CODE (tree010) == INTEGER_CST
9141 && 0 == compare_tree_int (tree010,
9142 TYPE_PRECISION
9143 (TREE_TYPE (TREE_OPERAND
9144 (arg0, 0))))
9145 && operand_equal_p (tree11, tree011, 0))
9146 return build2 ((code0 != LSHIFT_EXPR
9147 ? LROTATE_EXPR
9148 : RROTATE_EXPR),
9149 type, TREE_OPERAND (arg0, 0), tree11);
9150 }
9151 }
9152 }
9153
9154 associate:
9155 /* In most languages, can't associate operations on floats through
9156 parentheses. Rather than remember where the parentheses were, we
9157 don't associate floats at all, unless the user has specified
9158 -funsafe-math-optimizations. */
9159
9160 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
9161 {
9162 tree var0, con0, lit0, minus_lit0;
9163 tree var1, con1, lit1, minus_lit1;
9164
9165 /* Split both trees into variables, constants, and literals. Then
9166 associate each group together, the constants with literals,
9167 then the result with variables. This increases the chances of
9168 literals being recombined later and of generating relocatable
9169 expressions for the sum of a constant and literal. */
9170 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
9171 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
9172 code == MINUS_EXPR);
9173
9174 /* Only do something if we found more than two objects. Otherwise,
9175 nothing has changed and we risk infinite recursion. */
9176 if (2 < ((var0 != 0) + (var1 != 0)
9177 + (con0 != 0) + (con1 != 0)
9178 + (lit0 != 0) + (lit1 != 0)
9179 + (minus_lit0 != 0) + (minus_lit1 != 0)))
9180 {
9181 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9182 if (code == MINUS_EXPR)
9183 code = PLUS_EXPR;
9184
9185 var0 = associate_trees (var0, var1, code, type);
9186 con0 = associate_trees (con0, con1, code, type);
9187 lit0 = associate_trees (lit0, lit1, code, type);
9188 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
9189
9190 /* Preserve the MINUS_EXPR if the negative part of the literal is
9191 greater than the positive part. Otherwise, the multiplicative
9192 folding code (i.e extract_muldiv) may be fooled in case
9193 unsigned constants are subtracted, like in the following
9194 example: ((X*2 + 4) - 8U)/2. */
9195 if (minus_lit0 && lit0)
9196 {
9197 if (TREE_CODE (lit0) == INTEGER_CST
9198 && TREE_CODE (minus_lit0) == INTEGER_CST
9199 && tree_int_cst_lt (lit0, minus_lit0))
9200 {
9201 minus_lit0 = associate_trees (minus_lit0, lit0,
9202 MINUS_EXPR, type);
9203 lit0 = 0;
9204 }
9205 else
9206 {
9207 lit0 = associate_trees (lit0, minus_lit0,
9208 MINUS_EXPR, type);
9209 minus_lit0 = 0;
9210 }
9211 }
9212 if (minus_lit0)
9213 {
9214 if (con0 == 0)
9215 return fold_convert (type,
9216 associate_trees (var0, minus_lit0,
9217 MINUS_EXPR, type));
9218 else
9219 {
9220 con0 = associate_trees (con0, minus_lit0,
9221 MINUS_EXPR, type);
9222 return fold_convert (type,
9223 associate_trees (var0, con0,
9224 PLUS_EXPR, type));
9225 }
9226 }
9227
9228 con0 = associate_trees (con0, lit0, code, type);
9229 return fold_convert (type, associate_trees (var0, con0,
9230 code, type));
9231 }
9232 }
9233
9234 return NULL_TREE;
9235
9236 case MINUS_EXPR:
9237 /* A - (-B) -> A + B */
9238 if (TREE_CODE (arg1) == NEGATE_EXPR)
9239 return fold_build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0));
9240 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9241 if (TREE_CODE (arg0) == NEGATE_EXPR
9242 && (FLOAT_TYPE_P (type)
9243 || INTEGRAL_TYPE_P (type))
9244 && negate_expr_p (arg1)
9245 && reorder_operands_p (arg0, arg1))
9246 return fold_build2 (MINUS_EXPR, type, negate_expr (arg1),
9247 TREE_OPERAND (arg0, 0));
9248 /* Convert -A - 1 to ~A. */
9249 if (INTEGRAL_TYPE_P (type)
9250 && TREE_CODE (arg0) == NEGATE_EXPR
9251 && integer_onep (arg1)
9252 && !TYPE_OVERFLOW_TRAPS (type))
9253 return fold_build1 (BIT_NOT_EXPR, type,
9254 fold_convert (type, TREE_OPERAND (arg0, 0)));
9255
9256 /* Convert -1 - A to ~A. */
9257 if (INTEGRAL_TYPE_P (type)
9258 && integer_all_onesp (arg0))
9259 return fold_build1 (BIT_NOT_EXPR, type, op1);
9260
9261 if (! FLOAT_TYPE_P (type))
9262 {
9263 if (integer_zerop (arg0))
9264 return negate_expr (fold_convert (type, arg1));
9265 if (integer_zerop (arg1))
9266 return non_lvalue (fold_convert (type, arg0));
9267
9268 /* Fold A - (A & B) into ~B & A. */
9269 if (!TREE_SIDE_EFFECTS (arg0)
9270 && TREE_CODE (arg1) == BIT_AND_EXPR)
9271 {
9272 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
9273 return fold_build2 (BIT_AND_EXPR, type,
9274 fold_build1 (BIT_NOT_EXPR, type,
9275 TREE_OPERAND (arg1, 0)),
9276 arg0);
9277 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9278 return fold_build2 (BIT_AND_EXPR, type,
9279 fold_build1 (BIT_NOT_EXPR, type,
9280 TREE_OPERAND (arg1, 1)),
9281 arg0);
9282 }
9283
9284 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
9285 any power of 2 minus 1. */
9286 if (TREE_CODE (arg0) == BIT_AND_EXPR
9287 && TREE_CODE (arg1) == BIT_AND_EXPR
9288 && operand_equal_p (TREE_OPERAND (arg0, 0),
9289 TREE_OPERAND (arg1, 0), 0))
9290 {
9291 tree mask0 = TREE_OPERAND (arg0, 1);
9292 tree mask1 = TREE_OPERAND (arg1, 1);
9293 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
9294
9295 if (operand_equal_p (tem, mask1, 0))
9296 {
9297 tem = fold_build2 (BIT_XOR_EXPR, type,
9298 TREE_OPERAND (arg0, 0), mask1);
9299 return fold_build2 (MINUS_EXPR, type, tem, mask1);
9300 }
9301 }
9302 }
9303
9304 /* See if ARG1 is zero and X - ARG1 reduces to X. */
9305 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
9306 return non_lvalue (fold_convert (type, arg0));
9307
9308 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
9309 ARG0 is zero and X + ARG0 reduces to X, since that would mean
9310 (-ARG1 + ARG0) reduces to -ARG1. */
9311 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9312 return negate_expr (fold_convert (type, arg1));
9313
9314 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
9315 __complex__ ( x, -y ). This is not the same for SNaNs or if
9316 signed zeros are involved. */
9317 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9318 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9319 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9320 {
9321 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9322 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
9323 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
9324 bool arg0rz = false, arg0iz = false;
9325 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9326 || (arg0i && (arg0iz = real_zerop (arg0i))))
9327 {
9328 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
9329 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
9330 if (arg0rz && arg1i && real_zerop (arg1i))
9331 {
9332 tree rp = fold_build1 (NEGATE_EXPR, rtype,
9333 arg1r ? arg1r
9334 : build1 (REALPART_EXPR, rtype, arg1));
9335 tree ip = arg0i ? arg0i
9336 : build1 (IMAGPART_EXPR, rtype, arg0);
9337 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9338 }
9339 else if (arg0iz && arg1r && real_zerop (arg1r))
9340 {
9341 tree rp = arg0r ? arg0r
9342 : build1 (REALPART_EXPR, rtype, arg0);
9343 tree ip = fold_build1 (NEGATE_EXPR, rtype,
9344 arg1i ? arg1i
9345 : build1 (IMAGPART_EXPR, rtype, arg1));
9346 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9347 }
9348 }
9349 }
9350
9351 /* Fold &x - &x. This can happen from &x.foo - &x.
9352 This is unsafe for certain floats even in non-IEEE formats.
9353 In IEEE, it is unsafe because it does wrong for NaNs.
9354 Also note that operand_equal_p is always false if an operand
9355 is volatile. */
9356
9357 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
9358 && operand_equal_p (arg0, arg1, 0))
9359 return fold_convert (type, integer_zero_node);
9360
9361 /* A - B -> A + (-B) if B is easily negatable. */
9362 if (negate_expr_p (arg1)
9363 && ((FLOAT_TYPE_P (type)
9364 /* Avoid this transformation if B is a positive REAL_CST. */
9365 && (TREE_CODE (arg1) != REAL_CST
9366 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
9367 || INTEGRAL_TYPE_P (type)))
9368 return fold_build2 (PLUS_EXPR, type,
9369 fold_convert (type, arg0),
9370 fold_convert (type, negate_expr (arg1)));
9371
9372 /* Try folding difference of addresses. */
9373 {
9374 HOST_WIDE_INT diff;
9375
9376 if ((TREE_CODE (arg0) == ADDR_EXPR
9377 || TREE_CODE (arg1) == ADDR_EXPR)
9378 && ptr_difference_const (arg0, arg1, &diff))
9379 return build_int_cst_type (type, diff);
9380 }
9381
9382 /* Fold &a[i] - &a[j] to i-j. */
9383 if (TREE_CODE (arg0) == ADDR_EXPR
9384 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
9385 && TREE_CODE (arg1) == ADDR_EXPR
9386 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
9387 {
9388 tree aref0 = TREE_OPERAND (arg0, 0);
9389 tree aref1 = TREE_OPERAND (arg1, 0);
9390 if (operand_equal_p (TREE_OPERAND (aref0, 0),
9391 TREE_OPERAND (aref1, 0), 0))
9392 {
9393 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
9394 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
9395 tree esz = array_ref_element_size (aref0);
9396 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9397 return fold_build2 (MULT_EXPR, type, diff,
9398 fold_convert (type, esz));
9399
9400 }
9401 }
9402
9403 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
9404 of the array. Loop optimizer sometimes produce this type of
9405 expressions. */
9406 if (TREE_CODE (arg0) == ADDR_EXPR)
9407 {
9408 tem = try_move_mult_to_index (MINUS_EXPR, arg0, arg1);
9409 if (tem)
9410 return fold_convert (type, tem);
9411 }
9412
9413 if (flag_unsafe_math_optimizations
9414 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9415 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9416 && (tem = distribute_real_division (code, type, arg0, arg1)))
9417 return tem;
9418
9419 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
9420 same or one. */
9421 if ((TREE_CODE (arg0) == MULT_EXPR
9422 || TREE_CODE (arg1) == MULT_EXPR)
9423 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
9424 {
9425 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
9426 if (tem)
9427 return tem;
9428 }
9429
9430 goto associate;
9431
9432 case MULT_EXPR:
9433 /* (-A) * (-B) -> A * B */
9434 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
9435 return fold_build2 (MULT_EXPR, type,
9436 fold_convert (type, TREE_OPERAND (arg0, 0)),
9437 fold_convert (type, negate_expr (arg1)));
9438 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
9439 return fold_build2 (MULT_EXPR, type,
9440 fold_convert (type, negate_expr (arg0)),
9441 fold_convert (type, TREE_OPERAND (arg1, 0)));
9442
9443 if (! FLOAT_TYPE_P (type))
9444 {
9445 if (integer_zerop (arg1))
9446 return omit_one_operand (type, arg1, arg0);
9447 if (integer_onep (arg1))
9448 return non_lvalue (fold_convert (type, arg0));
9449 /* Transform x * -1 into -x. */
9450 if (integer_all_onesp (arg1))
9451 return fold_convert (type, negate_expr (arg0));
9452 /* Transform x * -C into -x * C if x is easily negatable. */
9453 if (TREE_CODE (arg1) == INTEGER_CST
9454 && tree_int_cst_sgn (arg1) == -1
9455 && negate_expr_p (arg0)
9456 && (tem = negate_expr (arg1)) != arg1
9457 && !TREE_OVERFLOW (tem))
9458 return fold_build2 (MULT_EXPR, type,
9459 negate_expr (arg0), tem);
9460
9461 /* (a * (1 << b)) is (a << b) */
9462 if (TREE_CODE (arg1) == LSHIFT_EXPR
9463 && integer_onep (TREE_OPERAND (arg1, 0)))
9464 return fold_build2 (LSHIFT_EXPR, type, arg0,
9465 TREE_OPERAND (arg1, 1));
9466 if (TREE_CODE (arg0) == LSHIFT_EXPR
9467 && integer_onep (TREE_OPERAND (arg0, 0)))
9468 return fold_build2 (LSHIFT_EXPR, type, arg1,
9469 TREE_OPERAND (arg0, 1));
9470
9471 if (TREE_CODE (arg1) == INTEGER_CST
9472 && 0 != (tem = extract_muldiv (op0,
9473 fold_convert (type, arg1),
9474 code, NULL_TREE)))
9475 return fold_convert (type, tem);
9476
9477 /* Optimize z * conj(z) for integer complex numbers. */
9478 if (TREE_CODE (arg0) == CONJ_EXPR
9479 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9480 return fold_mult_zconjz (type, arg1);
9481 if (TREE_CODE (arg1) == CONJ_EXPR
9482 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9483 return fold_mult_zconjz (type, arg0);
9484 }
9485 else
9486 {
9487 /* Maybe fold x * 0 to 0. The expressions aren't the same
9488 when x is NaN, since x * 0 is also NaN. Nor are they the
9489 same in modes with signed zeros, since multiplying a
9490 negative value by 0 gives -0, not +0. */
9491 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9492 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9493 && real_zerop (arg1))
9494 return omit_one_operand (type, arg1, arg0);
9495 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
9496 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9497 && real_onep (arg1))
9498 return non_lvalue (fold_convert (type, arg0));
9499
9500 /* Transform x * -1.0 into -x. */
9501 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9502 && real_minus_onep (arg1))
9503 return fold_convert (type, negate_expr (arg0));
9504
9505 /* Convert (C1/X)*C2 into (C1*C2)/X. */
9506 if (flag_unsafe_math_optimizations
9507 && TREE_CODE (arg0) == RDIV_EXPR
9508 && TREE_CODE (arg1) == REAL_CST
9509 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
9510 {
9511 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
9512 arg1, 0);
9513 if (tem)
9514 return fold_build2 (RDIV_EXPR, type, tem,
9515 TREE_OPERAND (arg0, 1));
9516 }
9517
9518 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
9519 if (operand_equal_p (arg0, arg1, 0))
9520 {
9521 tree tem = fold_strip_sign_ops (arg0);
9522 if (tem != NULL_TREE)
9523 {
9524 tem = fold_convert (type, tem);
9525 return fold_build2 (MULT_EXPR, type, tem, tem);
9526 }
9527 }
9528
9529 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
9530 This is not the same for NaNs or if signed zeros are
9531 involved. */
9532 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9533 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9534 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
9535 && TREE_CODE (arg1) == COMPLEX_CST
9536 && real_zerop (TREE_REALPART (arg1)))
9537 {
9538 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9539 if (real_onep (TREE_IMAGPART (arg1)))
9540 return fold_build2 (COMPLEX_EXPR, type,
9541 negate_expr (fold_build1 (IMAGPART_EXPR,
9542 rtype, arg0)),
9543 fold_build1 (REALPART_EXPR, rtype, arg0));
9544 else if (real_minus_onep (TREE_IMAGPART (arg1)))
9545 return fold_build2 (COMPLEX_EXPR, type,
9546 fold_build1 (IMAGPART_EXPR, rtype, arg0),
9547 negate_expr (fold_build1 (REALPART_EXPR,
9548 rtype, arg0)));
9549 }
9550
9551 /* Optimize z * conj(z) for floating point complex numbers.
9552 Guarded by flag_unsafe_math_optimizations as non-finite
9553 imaginary components don't produce scalar results. */
9554 if (flag_unsafe_math_optimizations
9555 && TREE_CODE (arg0) == CONJ_EXPR
9556 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9557 return fold_mult_zconjz (type, arg1);
9558 if (flag_unsafe_math_optimizations
9559 && TREE_CODE (arg1) == CONJ_EXPR
9560 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9561 return fold_mult_zconjz (type, arg0);
9562
9563 if (flag_unsafe_math_optimizations)
9564 {
9565 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
9566 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
9567
9568 /* Optimizations of root(...)*root(...). */
9569 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
9570 {
9571 tree rootfn, arg, arglist;
9572 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9573 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9574
9575 /* Optimize sqrt(x)*sqrt(x) as x. */
9576 if (BUILTIN_SQRT_P (fcode0)
9577 && operand_equal_p (arg00, arg10, 0)
9578 && ! HONOR_SNANS (TYPE_MODE (type)))
9579 return arg00;
9580
9581 /* Optimize root(x)*root(y) as root(x*y). */
9582 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9583 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
9584 arglist = build_tree_list (NULL_TREE, arg);
9585 return build_function_call_expr (rootfn, arglist);
9586 }
9587
9588 /* Optimize expN(x)*expN(y) as expN(x+y). */
9589 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
9590 {
9591 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9592 tree arg = fold_build2 (PLUS_EXPR, type,
9593 TREE_VALUE (TREE_OPERAND (arg0, 1)),
9594 TREE_VALUE (TREE_OPERAND (arg1, 1)));
9595 tree arglist = build_tree_list (NULL_TREE, arg);
9596 return build_function_call_expr (expfn, arglist);
9597 }
9598
9599 /* Optimizations of pow(...)*pow(...). */
9600 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
9601 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
9602 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
9603 {
9604 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9605 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
9606 1)));
9607 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9608 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
9609 1)));
9610
9611 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
9612 if (operand_equal_p (arg01, arg11, 0))
9613 {
9614 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9615 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
9616 tree arglist = tree_cons (NULL_TREE, arg,
9617 build_tree_list (NULL_TREE,
9618 arg01));
9619 return build_function_call_expr (powfn, arglist);
9620 }
9621
9622 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
9623 if (operand_equal_p (arg00, arg10, 0))
9624 {
9625 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9626 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
9627 tree arglist = tree_cons (NULL_TREE, arg00,
9628 build_tree_list (NULL_TREE,
9629 arg));
9630 return build_function_call_expr (powfn, arglist);
9631 }
9632 }
9633
9634 /* Optimize tan(x)*cos(x) as sin(x). */
9635 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
9636 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
9637 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
9638 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
9639 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
9640 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
9641 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
9642 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
9643 {
9644 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
9645
9646 if (sinfn != NULL_TREE)
9647 return build_function_call_expr (sinfn,
9648 TREE_OPERAND (arg0, 1));
9649 }
9650
9651 /* Optimize x*pow(x,c) as pow(x,c+1). */
9652 if (fcode1 == BUILT_IN_POW
9653 || fcode1 == BUILT_IN_POWF
9654 || fcode1 == BUILT_IN_POWL)
9655 {
9656 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9657 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
9658 1)));
9659 if (TREE_CODE (arg11) == REAL_CST
9660 && !TREE_OVERFLOW (arg11)
9661 && operand_equal_p (arg0, arg10, 0))
9662 {
9663 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
9664 REAL_VALUE_TYPE c;
9665 tree arg, arglist;
9666
9667 c = TREE_REAL_CST (arg11);
9668 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
9669 arg = build_real (type, c);
9670 arglist = build_tree_list (NULL_TREE, arg);
9671 arglist = tree_cons (NULL_TREE, arg0, arglist);
9672 return build_function_call_expr (powfn, arglist);
9673 }
9674 }
9675
9676 /* Optimize pow(x,c)*x as pow(x,c+1). */
9677 if (fcode0 == BUILT_IN_POW
9678 || fcode0 == BUILT_IN_POWF
9679 || fcode0 == BUILT_IN_POWL)
9680 {
9681 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9682 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
9683 1)));
9684 if (TREE_CODE (arg01) == REAL_CST
9685 && !TREE_OVERFLOW (arg01)
9686 && operand_equal_p (arg1, arg00, 0))
9687 {
9688 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9689 REAL_VALUE_TYPE c;
9690 tree arg, arglist;
9691
9692 c = TREE_REAL_CST (arg01);
9693 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
9694 arg = build_real (type, c);
9695 arglist = build_tree_list (NULL_TREE, arg);
9696 arglist = tree_cons (NULL_TREE, arg1, arglist);
9697 return build_function_call_expr (powfn, arglist);
9698 }
9699 }
9700
9701 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
9702 if (! optimize_size
9703 && operand_equal_p (arg0, arg1, 0))
9704 {
9705 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
9706
9707 if (powfn)
9708 {
9709 tree arg = build_real (type, dconst2);
9710 tree arglist = build_tree_list (NULL_TREE, arg);
9711 arglist = tree_cons (NULL_TREE, arg0, arglist);
9712 return build_function_call_expr (powfn, arglist);
9713 }
9714 }
9715 }
9716 }
9717 goto associate;
9718
9719 case BIT_IOR_EXPR:
9720 bit_ior:
9721 if (integer_all_onesp (arg1))
9722 return omit_one_operand (type, arg1, arg0);
9723 if (integer_zerop (arg1))
9724 return non_lvalue (fold_convert (type, arg0));
9725 if (operand_equal_p (arg0, arg1, 0))
9726 return non_lvalue (fold_convert (type, arg0));
9727
9728 /* ~X | X is -1. */
9729 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9730 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9731 {
9732 t1 = build_int_cst_type (type, -1);
9733 return omit_one_operand (type, t1, arg1);
9734 }
9735
9736 /* X | ~X is -1. */
9737 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9738 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9739 {
9740 t1 = build_int_cst_type (type, -1);
9741 return omit_one_operand (type, t1, arg0);
9742 }
9743
9744 /* Canonicalize (X & C1) | C2. */
9745 if (TREE_CODE (arg0) == BIT_AND_EXPR
9746 && TREE_CODE (arg1) == INTEGER_CST
9747 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9748 {
9749 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, mlo, mhi;
9750 int width = TYPE_PRECISION (type);
9751 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
9752 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
9753 hi2 = TREE_INT_CST_HIGH (arg1);
9754 lo2 = TREE_INT_CST_LOW (arg1);
9755
9756 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
9757 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
9758 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9759
9760 if (width > HOST_BITS_PER_WIDE_INT)
9761 {
9762 mhi = (unsigned HOST_WIDE_INT) -1
9763 >> (2 * HOST_BITS_PER_WIDE_INT - width);
9764 mlo = -1;
9765 }
9766 else
9767 {
9768 mhi = 0;
9769 mlo = (unsigned HOST_WIDE_INT) -1
9770 >> (HOST_BITS_PER_WIDE_INT - width);
9771 }
9772
9773 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
9774 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
9775 return fold_build2 (BIT_IOR_EXPR, type,
9776 TREE_OPERAND (arg0, 0), arg1);
9777
9778 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2. */
9779 hi1 &= mhi;
9780 lo1 &= mlo;
9781 if ((hi1 & ~hi2) != hi1 || (lo1 & ~lo2) != lo1)
9782 return fold_build2 (BIT_IOR_EXPR, type,
9783 fold_build2 (BIT_AND_EXPR, type,
9784 TREE_OPERAND (arg0, 0),
9785 build_int_cst_wide (type,
9786 lo1 & ~lo2,
9787 hi1 & ~hi2)),
9788 arg1);
9789 }
9790
9791 /* (X & Y) | Y is (X, Y). */
9792 if (TREE_CODE (arg0) == BIT_AND_EXPR
9793 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9794 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9795 /* (X & Y) | X is (Y, X). */
9796 if (TREE_CODE (arg0) == BIT_AND_EXPR
9797 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9798 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9799 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
9800 /* X | (X & Y) is (Y, X). */
9801 if (TREE_CODE (arg1) == BIT_AND_EXPR
9802 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
9803 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
9804 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
9805 /* X | (Y & X) is (Y, X). */
9806 if (TREE_CODE (arg1) == BIT_AND_EXPR
9807 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9808 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9809 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
9810
9811 t1 = distribute_bit_expr (code, type, arg0, arg1);
9812 if (t1 != NULL_TREE)
9813 return t1;
9814
9815 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
9816
9817 This results in more efficient code for machines without a NAND
9818 instruction. Combine will canonicalize to the first form
9819 which will allow use of NAND instructions provided by the
9820 backend if they exist. */
9821 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9822 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9823 {
9824 return fold_build1 (BIT_NOT_EXPR, type,
9825 build2 (BIT_AND_EXPR, type,
9826 TREE_OPERAND (arg0, 0),
9827 TREE_OPERAND (arg1, 0)));
9828 }
9829
9830 /* See if this can be simplified into a rotate first. If that
9831 is unsuccessful continue in the association code. */
9832 goto bit_rotate;
9833
9834 case BIT_XOR_EXPR:
9835 if (integer_zerop (arg1))
9836 return non_lvalue (fold_convert (type, arg0));
9837 if (integer_all_onesp (arg1))
9838 return fold_build1 (BIT_NOT_EXPR, type, arg0);
9839 if (operand_equal_p (arg0, arg1, 0))
9840 return omit_one_operand (type, integer_zero_node, arg0);
9841
9842 /* ~X ^ X is -1. */
9843 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9844 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9845 {
9846 t1 = build_int_cst_type (type, -1);
9847 return omit_one_operand (type, t1, arg1);
9848 }
9849
9850 /* X ^ ~X is -1. */
9851 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9852 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9853 {
9854 t1 = build_int_cst_type (type, -1);
9855 return omit_one_operand (type, t1, arg0);
9856 }
9857
9858 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
9859 with a constant, and the two constants have no bits in common,
9860 we should treat this as a BIT_IOR_EXPR since this may produce more
9861 simplifications. */
9862 if (TREE_CODE (arg0) == BIT_AND_EXPR
9863 && TREE_CODE (arg1) == BIT_AND_EXPR
9864 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9865 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9866 && integer_zerop (const_binop (BIT_AND_EXPR,
9867 TREE_OPERAND (arg0, 1),
9868 TREE_OPERAND (arg1, 1), 0)))
9869 {
9870 code = BIT_IOR_EXPR;
9871 goto bit_ior;
9872 }
9873
9874 /* (X | Y) ^ X -> Y & ~ X*/
9875 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9876 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9877 {
9878 tree t2 = TREE_OPERAND (arg0, 1);
9879 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
9880 arg1);
9881 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9882 fold_convert (type, t1));
9883 return t1;
9884 }
9885
9886 /* (Y | X) ^ X -> Y & ~ X*/
9887 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9888 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9889 {
9890 tree t2 = TREE_OPERAND (arg0, 0);
9891 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
9892 arg1);
9893 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9894 fold_convert (type, t1));
9895 return t1;
9896 }
9897
9898 /* X ^ (X | Y) -> Y & ~ X*/
9899 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9900 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
9901 {
9902 tree t2 = TREE_OPERAND (arg1, 1);
9903 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
9904 arg0);
9905 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9906 fold_convert (type, t1));
9907 return t1;
9908 }
9909
9910 /* X ^ (Y | X) -> Y & ~ X*/
9911 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9912 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
9913 {
9914 tree t2 = TREE_OPERAND (arg1, 0);
9915 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
9916 arg0);
9917 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9918 fold_convert (type, t1));
9919 return t1;
9920 }
9921
9922 /* Convert ~X ^ ~Y to X ^ Y. */
9923 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9924 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9925 return fold_build2 (code, type,
9926 fold_convert (type, TREE_OPERAND (arg0, 0)),
9927 fold_convert (type, TREE_OPERAND (arg1, 0)));
9928
9929 /* Convert ~X ^ C to X ^ ~C. */
9930 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9931 && TREE_CODE (arg1) == INTEGER_CST)
9932 return fold_build2 (code, type,
9933 fold_convert (type, TREE_OPERAND (arg0, 0)),
9934 fold_build1 (BIT_NOT_EXPR, type, arg1));
9935
9936 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
9937 if (TREE_CODE (arg0) == BIT_AND_EXPR
9938 && integer_onep (TREE_OPERAND (arg0, 1))
9939 && integer_onep (arg1))
9940 return fold_build2 (EQ_EXPR, type, arg0,
9941 build_int_cst (TREE_TYPE (arg0), 0));
9942
9943 /* Fold (X & Y) ^ Y as ~X & Y. */
9944 if (TREE_CODE (arg0) == BIT_AND_EXPR
9945 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9946 {
9947 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
9948 return fold_build2 (BIT_AND_EXPR, type,
9949 fold_build1 (BIT_NOT_EXPR, type, tem),
9950 fold_convert (type, arg1));
9951 }
9952 /* Fold (X & Y) ^ X as ~Y & X. */
9953 if (TREE_CODE (arg0) == BIT_AND_EXPR
9954 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9955 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9956 {
9957 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
9958 return fold_build2 (BIT_AND_EXPR, type,
9959 fold_build1 (BIT_NOT_EXPR, type, tem),
9960 fold_convert (type, arg1));
9961 }
9962 /* Fold X ^ (X & Y) as X & ~Y. */
9963 if (TREE_CODE (arg1) == BIT_AND_EXPR
9964 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9965 {
9966 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
9967 return fold_build2 (BIT_AND_EXPR, type,
9968 fold_convert (type, arg0),
9969 fold_build1 (BIT_NOT_EXPR, type, tem));
9970 }
9971 /* Fold X ^ (Y & X) as ~Y & X. */
9972 if (TREE_CODE (arg1) == BIT_AND_EXPR
9973 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9974 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9975 {
9976 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
9977 return fold_build2 (BIT_AND_EXPR, type,
9978 fold_build1 (BIT_NOT_EXPR, type, tem),
9979 fold_convert (type, arg0));
9980 }
9981
9982 /* See if this can be simplified into a rotate first. If that
9983 is unsuccessful continue in the association code. */
9984 goto bit_rotate;
9985
9986 case BIT_AND_EXPR:
9987 if (integer_all_onesp (arg1))
9988 return non_lvalue (fold_convert (type, arg0));
9989 if (integer_zerop (arg1))
9990 return omit_one_operand (type, arg1, arg0);
9991 if (operand_equal_p (arg0, arg1, 0))
9992 return non_lvalue (fold_convert (type, arg0));
9993
9994 /* ~X & X is always zero. */
9995 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9996 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9997 return omit_one_operand (type, integer_zero_node, arg1);
9998
9999 /* X & ~X is always zero. */
10000 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10001 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10002 return omit_one_operand (type, integer_zero_node, arg0);
10003
10004 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
10005 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10006 && TREE_CODE (arg1) == INTEGER_CST
10007 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10008 return fold_build2 (BIT_IOR_EXPR, type,
10009 fold_build2 (BIT_AND_EXPR, type,
10010 TREE_OPERAND (arg0, 0), arg1),
10011 fold_build2 (BIT_AND_EXPR, type,
10012 TREE_OPERAND (arg0, 1), arg1));
10013
10014 /* (X | Y) & Y is (X, Y). */
10015 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10016 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10017 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10018 /* (X | Y) & X is (Y, X). */
10019 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10020 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10021 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10022 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
10023 /* X & (X | Y) is (Y, X). */
10024 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10025 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10026 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10027 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
10028 /* X & (Y | X) is (Y, X). */
10029 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10030 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10031 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10032 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
10033
10034 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
10035 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10036 && integer_onep (TREE_OPERAND (arg0, 1))
10037 && integer_onep (arg1))
10038 {
10039 tem = TREE_OPERAND (arg0, 0);
10040 return fold_build2 (EQ_EXPR, type,
10041 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
10042 build_int_cst (TREE_TYPE (tem), 1)),
10043 build_int_cst (TREE_TYPE (tem), 0));
10044 }
10045 /* Fold ~X & 1 as (X & 1) == 0. */
10046 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10047 && integer_onep (arg1))
10048 {
10049 tem = TREE_OPERAND (arg0, 0);
10050 return fold_build2 (EQ_EXPR, type,
10051 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
10052 build_int_cst (TREE_TYPE (tem), 1)),
10053 build_int_cst (TREE_TYPE (tem), 0));
10054 }
10055
10056 /* Fold (X ^ Y) & Y as ~X & Y. */
10057 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10058 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10059 {
10060 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
10061 return fold_build2 (BIT_AND_EXPR, type,
10062 fold_build1 (BIT_NOT_EXPR, type, tem),
10063 fold_convert (type, arg1));
10064 }
10065 /* Fold (X ^ Y) & X as ~Y & X. */
10066 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10067 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10068 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10069 {
10070 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
10071 return fold_build2 (BIT_AND_EXPR, type,
10072 fold_build1 (BIT_NOT_EXPR, type, tem),
10073 fold_convert (type, arg1));
10074 }
10075 /* Fold X & (X ^ Y) as X & ~Y. */
10076 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10077 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10078 {
10079 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
10080 return fold_build2 (BIT_AND_EXPR, type,
10081 fold_convert (type, arg0),
10082 fold_build1 (BIT_NOT_EXPR, type, tem));
10083 }
10084 /* Fold X & (Y ^ X) as ~Y & X. */
10085 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10086 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10087 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10088 {
10089 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
10090 return fold_build2 (BIT_AND_EXPR, type,
10091 fold_build1 (BIT_NOT_EXPR, type, tem),
10092 fold_convert (type, arg0));
10093 }
10094
10095 t1 = distribute_bit_expr (code, type, arg0, arg1);
10096 if (t1 != NULL_TREE)
10097 return t1;
10098 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10099 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
10100 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
10101 {
10102 unsigned int prec
10103 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
10104
10105 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
10106 && (~TREE_INT_CST_LOW (arg1)
10107 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
10108 return fold_convert (type, TREE_OPERAND (arg0, 0));
10109 }
10110
10111 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
10112
10113 This results in more efficient code for machines without a NOR
10114 instruction. Combine will canonicalize to the first form
10115 which will allow use of NOR instructions provided by the
10116 backend if they exist. */
10117 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10118 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10119 {
10120 return fold_build1 (BIT_NOT_EXPR, type,
10121 build2 (BIT_IOR_EXPR, type,
10122 TREE_OPERAND (arg0, 0),
10123 TREE_OPERAND (arg1, 0)));
10124 }
10125
10126 goto associate;
10127
10128 case RDIV_EXPR:
10129 /* Don't touch a floating-point divide by zero unless the mode
10130 of the constant can represent infinity. */
10131 if (TREE_CODE (arg1) == REAL_CST
10132 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
10133 && real_zerop (arg1))
10134 return NULL_TREE;
10135
10136 /* Optimize A / A to 1.0 if we don't care about
10137 NaNs or Infinities. Skip the transformation
10138 for non-real operands. */
10139 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
10140 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10141 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
10142 && operand_equal_p (arg0, arg1, 0))
10143 {
10144 tree r = build_real (TREE_TYPE (arg0), dconst1);
10145
10146 return omit_two_operands (type, r, arg0, arg1);
10147 }
10148
10149 /* The complex version of the above A / A optimization. */
10150 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10151 && operand_equal_p (arg0, arg1, 0))
10152 {
10153 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
10154 if (! HONOR_NANS (TYPE_MODE (elem_type))
10155 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
10156 {
10157 tree r = build_real (elem_type, dconst1);
10158 /* omit_two_operands will call fold_convert for us. */
10159 return omit_two_operands (type, r, arg0, arg1);
10160 }
10161 }
10162
10163 /* (-A) / (-B) -> A / B */
10164 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10165 return fold_build2 (RDIV_EXPR, type,
10166 TREE_OPERAND (arg0, 0),
10167 negate_expr (arg1));
10168 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10169 return fold_build2 (RDIV_EXPR, type,
10170 negate_expr (arg0),
10171 TREE_OPERAND (arg1, 0));
10172
10173 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
10174 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10175 && real_onep (arg1))
10176 return non_lvalue (fold_convert (type, arg0));
10177
10178 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
10179 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10180 && real_minus_onep (arg1))
10181 return non_lvalue (fold_convert (type, negate_expr (arg0)));
10182
10183 /* If ARG1 is a constant, we can convert this to a multiply by the
10184 reciprocal. This does not have the same rounding properties,
10185 so only do this if -funsafe-math-optimizations. We can actually
10186 always safely do it if ARG1 is a power of two, but it's hard to
10187 tell if it is or not in a portable manner. */
10188 if (TREE_CODE (arg1) == REAL_CST)
10189 {
10190 if (flag_unsafe_math_optimizations
10191 && 0 != (tem = const_binop (code, build_real (type, dconst1),
10192 arg1, 0)))
10193 return fold_build2 (MULT_EXPR, type, arg0, tem);
10194 /* Find the reciprocal if optimizing and the result is exact. */
10195 if (optimize)
10196 {
10197 REAL_VALUE_TYPE r;
10198 r = TREE_REAL_CST (arg1);
10199 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
10200 {
10201 tem = build_real (type, r);
10202 return fold_build2 (MULT_EXPR, type,
10203 fold_convert (type, arg0), tem);
10204 }
10205 }
10206 }
10207 /* Convert A/B/C to A/(B*C). */
10208 if (flag_unsafe_math_optimizations
10209 && TREE_CODE (arg0) == RDIV_EXPR)
10210 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
10211 fold_build2 (MULT_EXPR, type,
10212 TREE_OPERAND (arg0, 1), arg1));
10213
10214 /* Convert A/(B/C) to (A/B)*C. */
10215 if (flag_unsafe_math_optimizations
10216 && TREE_CODE (arg1) == RDIV_EXPR)
10217 return fold_build2 (MULT_EXPR, type,
10218 fold_build2 (RDIV_EXPR, type, arg0,
10219 TREE_OPERAND (arg1, 0)),
10220 TREE_OPERAND (arg1, 1));
10221
10222 /* Convert C1/(X*C2) into (C1/C2)/X. */
10223 if (flag_unsafe_math_optimizations
10224 && TREE_CODE (arg1) == MULT_EXPR
10225 && TREE_CODE (arg0) == REAL_CST
10226 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
10227 {
10228 tree tem = const_binop (RDIV_EXPR, arg0,
10229 TREE_OPERAND (arg1, 1), 0);
10230 if (tem)
10231 return fold_build2 (RDIV_EXPR, type, tem,
10232 TREE_OPERAND (arg1, 0));
10233 }
10234
10235 if (flag_unsafe_math_optimizations)
10236 {
10237 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10238 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10239
10240 /* Optimize sin(x)/cos(x) as tan(x). */
10241 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
10242 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
10243 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
10244 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
10245 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
10246 {
10247 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
10248
10249 if (tanfn != NULL_TREE)
10250 return build_function_call_expr (tanfn,
10251 TREE_OPERAND (arg0, 1));
10252 }
10253
10254 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
10255 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
10256 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
10257 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
10258 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
10259 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
10260 {
10261 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
10262
10263 if (tanfn != NULL_TREE)
10264 {
10265 tree tmp = TREE_OPERAND (arg0, 1);
10266 tmp = build_function_call_expr (tanfn, tmp);
10267 return fold_build2 (RDIV_EXPR, type,
10268 build_real (type, dconst1), tmp);
10269 }
10270 }
10271
10272 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
10273 NaNs or Infinities. */
10274 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
10275 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
10276 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
10277 {
10278 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
10279 tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1));
10280
10281 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
10282 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
10283 && operand_equal_p (arg00, arg01, 0))
10284 {
10285 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
10286
10287 if (cosfn != NULL_TREE)
10288 return build_function_call_expr (cosfn,
10289 TREE_OPERAND (arg0, 1));
10290 }
10291 }
10292
10293 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
10294 NaNs or Infinities. */
10295 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
10296 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
10297 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
10298 {
10299 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
10300 tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1));
10301
10302 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
10303 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
10304 && operand_equal_p (arg00, arg01, 0))
10305 {
10306 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
10307
10308 if (cosfn != NULL_TREE)
10309 {
10310 tree tmp = TREE_OPERAND (arg0, 1);
10311 tmp = build_function_call_expr (cosfn, tmp);
10312 return fold_build2 (RDIV_EXPR, type,
10313 build_real (type, dconst1),
10314 tmp);
10315 }
10316 }
10317 }
10318
10319 /* Optimize pow(x,c)/x as pow(x,c-1). */
10320 if (fcode0 == BUILT_IN_POW
10321 || fcode0 == BUILT_IN_POWF
10322 || fcode0 == BUILT_IN_POWL)
10323 {
10324 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
10325 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
10326 if (TREE_CODE (arg01) == REAL_CST
10327 && !TREE_OVERFLOW (arg01)
10328 && operand_equal_p (arg1, arg00, 0))
10329 {
10330 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10331 REAL_VALUE_TYPE c;
10332 tree arg, arglist;
10333
10334 c = TREE_REAL_CST (arg01);
10335 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
10336 arg = build_real (type, c);
10337 arglist = build_tree_list (NULL_TREE, arg);
10338 arglist = tree_cons (NULL_TREE, arg1, arglist);
10339 return build_function_call_expr (powfn, arglist);
10340 }
10341 }
10342
10343 /* Optimize x/expN(y) into x*expN(-y). */
10344 if (BUILTIN_EXPONENT_P (fcode1))
10345 {
10346 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
10347 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
10348 tree arglist = build_tree_list (NULL_TREE,
10349 fold_convert (type, arg));
10350 arg1 = build_function_call_expr (expfn, arglist);
10351 return fold_build2 (MULT_EXPR, type, arg0, arg1);
10352 }
10353
10354 /* Optimize x/pow(y,z) into x*pow(y,-z). */
10355 if (fcode1 == BUILT_IN_POW
10356 || fcode1 == BUILT_IN_POWF
10357 || fcode1 == BUILT_IN_POWL)
10358 {
10359 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
10360 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
10361 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
10362 tree neg11 = fold_convert (type, negate_expr (arg11));
10363 tree arglist = tree_cons (NULL_TREE, arg10,
10364 build_tree_list (NULL_TREE, neg11));
10365 arg1 = build_function_call_expr (powfn, arglist);
10366 return fold_build2 (MULT_EXPR, type, arg0, arg1);
10367 }
10368 }
10369 return NULL_TREE;
10370
10371 case TRUNC_DIV_EXPR:
10372 case FLOOR_DIV_EXPR:
10373 /* Simplify A / (B << N) where A and B are positive and B is
10374 a power of 2, to A >> (N + log2(B)). */
10375 if (TREE_CODE (arg1) == LSHIFT_EXPR
10376 && (TYPE_UNSIGNED (type) || tree_expr_nonnegative_p (arg0)))
10377 {
10378 tree sval = TREE_OPERAND (arg1, 0);
10379 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
10380 {
10381 tree sh_cnt = TREE_OPERAND (arg1, 1);
10382 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
10383
10384 sh_cnt = fold_build2 (PLUS_EXPR, TREE_TYPE (sh_cnt),
10385 sh_cnt, build_int_cst (NULL_TREE, pow2));
10386 return fold_build2 (RSHIFT_EXPR, type,
10387 fold_convert (type, arg0), sh_cnt);
10388 }
10389 }
10390 /* Fall thru */
10391
10392 case ROUND_DIV_EXPR:
10393 case CEIL_DIV_EXPR:
10394 case EXACT_DIV_EXPR:
10395 if (integer_onep (arg1))
10396 return non_lvalue (fold_convert (type, arg0));
10397 if (integer_zerop (arg1))
10398 return NULL_TREE;
10399 /* X / -1 is -X. */
10400 if (!TYPE_UNSIGNED (type)
10401 && TREE_CODE (arg1) == INTEGER_CST
10402 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
10403 && TREE_INT_CST_HIGH (arg1) == -1)
10404 return fold_convert (type, negate_expr (arg0));
10405
10406 /* Convert -A / -B to A / B when the type is signed and overflow is
10407 undefined. */
10408 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10409 && TREE_CODE (arg0) == NEGATE_EXPR
10410 && negate_expr_p (arg1))
10411 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10412 negate_expr (arg1));
10413 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10414 && TREE_CODE (arg1) == NEGATE_EXPR
10415 && negate_expr_p (arg0))
10416 return fold_build2 (code, type, negate_expr (arg0),
10417 TREE_OPERAND (arg1, 0));
10418
10419 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10420 operation, EXACT_DIV_EXPR.
10421
10422 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10423 At one time others generated faster code, it's not clear if they do
10424 after the last round to changes to the DIV code in expmed.c. */
10425 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
10426 && multiple_of_p (type, arg0, arg1))
10427 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
10428
10429 if (TREE_CODE (arg1) == INTEGER_CST
10430 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
10431 return fold_convert (type, tem);
10432
10433 return NULL_TREE;
10434
10435 case CEIL_MOD_EXPR:
10436 case FLOOR_MOD_EXPR:
10437 case ROUND_MOD_EXPR:
10438 case TRUNC_MOD_EXPR:
10439 /* X % 1 is always zero, but be sure to preserve any side
10440 effects in X. */
10441 if (integer_onep (arg1))
10442 return omit_one_operand (type, integer_zero_node, arg0);
10443
10444 /* X % 0, return X % 0 unchanged so that we can get the
10445 proper warnings and errors. */
10446 if (integer_zerop (arg1))
10447 return NULL_TREE;
10448
10449 /* 0 % X is always zero, but be sure to preserve any side
10450 effects in X. Place this after checking for X == 0. */
10451 if (integer_zerop (arg0))
10452 return omit_one_operand (type, integer_zero_node, arg1);
10453
10454 /* X % -1 is zero. */
10455 if (!TYPE_UNSIGNED (type)
10456 && TREE_CODE (arg1) == INTEGER_CST
10457 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
10458 && TREE_INT_CST_HIGH (arg1) == -1)
10459 return omit_one_operand (type, integer_zero_node, arg0);
10460
10461 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
10462 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
10463 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
10464 && (TYPE_UNSIGNED (type) || tree_expr_nonnegative_p (arg0)))
10465 {
10466 tree c = arg1;
10467 /* Also optimize A % (C << N) where C is a power of 2,
10468 to A & ((C << N) - 1). */
10469 if (TREE_CODE (arg1) == LSHIFT_EXPR)
10470 c = TREE_OPERAND (arg1, 0);
10471
10472 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
10473 {
10474 tree mask = fold_build2 (MINUS_EXPR, TREE_TYPE (arg1), arg1,
10475 build_int_cst (TREE_TYPE (arg1), 1));
10476 return fold_build2 (BIT_AND_EXPR, type,
10477 fold_convert (type, arg0),
10478 fold_convert (type, mask));
10479 }
10480 }
10481
10482 /* X % -C is the same as X % C. */
10483 if (code == TRUNC_MOD_EXPR
10484 && !TYPE_UNSIGNED (type)
10485 && TREE_CODE (arg1) == INTEGER_CST
10486 && !TREE_OVERFLOW (arg1)
10487 && TREE_INT_CST_HIGH (arg1) < 0
10488 && !TYPE_OVERFLOW_TRAPS (type)
10489 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
10490 && !sign_bit_p (arg1, arg1))
10491 return fold_build2 (code, type, fold_convert (type, arg0),
10492 fold_convert (type, negate_expr (arg1)));
10493
10494 /* X % -Y is the same as X % Y. */
10495 if (code == TRUNC_MOD_EXPR
10496 && !TYPE_UNSIGNED (type)
10497 && TREE_CODE (arg1) == NEGATE_EXPR
10498 && !TYPE_OVERFLOW_TRAPS (type))
10499 return fold_build2 (code, type, fold_convert (type, arg0),
10500 fold_convert (type, TREE_OPERAND (arg1, 0)));
10501
10502 if (TREE_CODE (arg1) == INTEGER_CST
10503 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
10504 return fold_convert (type, tem);
10505
10506 return NULL_TREE;
10507
10508 case LROTATE_EXPR:
10509 case RROTATE_EXPR:
10510 if (integer_all_onesp (arg0))
10511 return omit_one_operand (type, arg0, arg1);
10512 goto shift;
10513
10514 case RSHIFT_EXPR:
10515 /* Optimize -1 >> x for arithmetic right shifts. */
10516 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
10517 return omit_one_operand (type, arg0, arg1);
10518 /* ... fall through ... */
10519
10520 case LSHIFT_EXPR:
10521 shift:
10522 if (integer_zerop (arg1))
10523 return non_lvalue (fold_convert (type, arg0));
10524 if (integer_zerop (arg0))
10525 return omit_one_operand (type, arg0, arg1);
10526
10527 /* Since negative shift count is not well-defined,
10528 don't try to compute it in the compiler. */
10529 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
10530 return NULL_TREE;
10531
10532 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
10533 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
10534 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
10535 && host_integerp (TREE_OPERAND (arg0, 1), false)
10536 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
10537 {
10538 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
10539 + TREE_INT_CST_LOW (arg1));
10540
10541 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
10542 being well defined. */
10543 if (low >= TYPE_PRECISION (type))
10544 {
10545 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
10546 low = low % TYPE_PRECISION (type);
10547 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
10548 return build_int_cst (type, 0);
10549 else
10550 low = TYPE_PRECISION (type) - 1;
10551 }
10552
10553 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10554 build_int_cst (type, low));
10555 }
10556
10557 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
10558 into x & ((unsigned)-1 >> c) for unsigned types. */
10559 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
10560 || (TYPE_UNSIGNED (type)
10561 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
10562 && host_integerp (arg1, false)
10563 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
10564 && host_integerp (TREE_OPERAND (arg0, 1), false)
10565 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
10566 {
10567 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10568 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
10569 tree lshift;
10570 tree arg00;
10571
10572 if (low0 == low1)
10573 {
10574 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
10575
10576 lshift = build_int_cst (type, -1);
10577 lshift = int_const_binop (code, lshift, arg1, 0);
10578
10579 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
10580 }
10581 }
10582
10583 /* Rewrite an LROTATE_EXPR by a constant into an
10584 RROTATE_EXPR by a new constant. */
10585 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
10586 {
10587 tree tem = build_int_cst (TREE_TYPE (arg1),
10588 GET_MODE_BITSIZE (TYPE_MODE (type)));
10589 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
10590 return fold_build2 (RROTATE_EXPR, type, arg0, tem);
10591 }
10592
10593 /* If we have a rotate of a bit operation with the rotate count and
10594 the second operand of the bit operation both constant,
10595 permute the two operations. */
10596 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10597 && (TREE_CODE (arg0) == BIT_AND_EXPR
10598 || TREE_CODE (arg0) == BIT_IOR_EXPR
10599 || TREE_CODE (arg0) == BIT_XOR_EXPR)
10600 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10601 return fold_build2 (TREE_CODE (arg0), type,
10602 fold_build2 (code, type,
10603 TREE_OPERAND (arg0, 0), arg1),
10604 fold_build2 (code, type,
10605 TREE_OPERAND (arg0, 1), arg1));
10606
10607 /* Two consecutive rotates adding up to the width of the mode can
10608 be ignored. */
10609 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10610 && TREE_CODE (arg0) == RROTATE_EXPR
10611 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10612 && TREE_INT_CST_HIGH (arg1) == 0
10613 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
10614 && ((TREE_INT_CST_LOW (arg1)
10615 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
10616 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
10617 return TREE_OPERAND (arg0, 0);
10618
10619 return NULL_TREE;
10620
10621 case MIN_EXPR:
10622 if (operand_equal_p (arg0, arg1, 0))
10623 return omit_one_operand (type, arg0, arg1);
10624 if (INTEGRAL_TYPE_P (type)
10625 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
10626 return omit_one_operand (type, arg1, arg0);
10627 tem = fold_minmax (MIN_EXPR, type, arg0, arg1);
10628 if (tem)
10629 return tem;
10630 goto associate;
10631
10632 case MAX_EXPR:
10633 if (operand_equal_p (arg0, arg1, 0))
10634 return omit_one_operand (type, arg0, arg1);
10635 if (INTEGRAL_TYPE_P (type)
10636 && TYPE_MAX_VALUE (type)
10637 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
10638 return omit_one_operand (type, arg1, arg0);
10639 tem = fold_minmax (MAX_EXPR, type, arg0, arg1);
10640 if (tem)
10641 return tem;
10642 goto associate;
10643
10644 case TRUTH_ANDIF_EXPR:
10645 /* Note that the operands of this must be ints
10646 and their values must be 0 or 1.
10647 ("true" is a fixed value perhaps depending on the language.) */
10648 /* If first arg is constant zero, return it. */
10649 if (integer_zerop (arg0))
10650 return fold_convert (type, arg0);
10651 case TRUTH_AND_EXPR:
10652 /* If either arg is constant true, drop it. */
10653 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10654 return non_lvalue (fold_convert (type, arg1));
10655 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
10656 /* Preserve sequence points. */
10657 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10658 return non_lvalue (fold_convert (type, arg0));
10659 /* If second arg is constant zero, result is zero, but first arg
10660 must be evaluated. */
10661 if (integer_zerop (arg1))
10662 return omit_one_operand (type, arg1, arg0);
10663 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10664 case will be handled here. */
10665 if (integer_zerop (arg0))
10666 return omit_one_operand (type, arg0, arg1);
10667
10668 /* !X && X is always false. */
10669 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10670 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10671 return omit_one_operand (type, integer_zero_node, arg1);
10672 /* X && !X is always false. */
10673 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10674 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10675 return omit_one_operand (type, integer_zero_node, arg0);
10676
10677 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10678 means A >= Y && A != MAX, but in this case we know that
10679 A < X <= MAX. */
10680
10681 if (!TREE_SIDE_EFFECTS (arg0)
10682 && !TREE_SIDE_EFFECTS (arg1))
10683 {
10684 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
10685 if (tem && !operand_equal_p (tem, arg0, 0))
10686 return fold_build2 (code, type, tem, arg1);
10687
10688 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
10689 if (tem && !operand_equal_p (tem, arg1, 0))
10690 return fold_build2 (code, type, arg0, tem);
10691 }
10692
10693 truth_andor:
10694 /* We only do these simplifications if we are optimizing. */
10695 if (!optimize)
10696 return NULL_TREE;
10697
10698 /* Check for things like (A || B) && (A || C). We can convert this
10699 to A || (B && C). Note that either operator can be any of the four
10700 truth and/or operations and the transformation will still be
10701 valid. Also note that we only care about order for the
10702 ANDIF and ORIF operators. If B contains side effects, this
10703 might change the truth-value of A. */
10704 if (TREE_CODE (arg0) == TREE_CODE (arg1)
10705 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
10706 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
10707 || TREE_CODE (arg0) == TRUTH_AND_EXPR
10708 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
10709 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
10710 {
10711 tree a00 = TREE_OPERAND (arg0, 0);
10712 tree a01 = TREE_OPERAND (arg0, 1);
10713 tree a10 = TREE_OPERAND (arg1, 0);
10714 tree a11 = TREE_OPERAND (arg1, 1);
10715 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
10716 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
10717 && (code == TRUTH_AND_EXPR
10718 || code == TRUTH_OR_EXPR));
10719
10720 if (operand_equal_p (a00, a10, 0))
10721 return fold_build2 (TREE_CODE (arg0), type, a00,
10722 fold_build2 (code, type, a01, a11));
10723 else if (commutative && operand_equal_p (a00, a11, 0))
10724 return fold_build2 (TREE_CODE (arg0), type, a00,
10725 fold_build2 (code, type, a01, a10));
10726 else if (commutative && operand_equal_p (a01, a10, 0))
10727 return fold_build2 (TREE_CODE (arg0), type, a01,
10728 fold_build2 (code, type, a00, a11));
10729
10730 /* This case if tricky because we must either have commutative
10731 operators or else A10 must not have side-effects. */
10732
10733 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
10734 && operand_equal_p (a01, a11, 0))
10735 return fold_build2 (TREE_CODE (arg0), type,
10736 fold_build2 (code, type, a00, a10),
10737 a01);
10738 }
10739
10740 /* See if we can build a range comparison. */
10741 if (0 != (tem = fold_range_test (code, type, op0, op1)))
10742 return tem;
10743
10744 /* Check for the possibility of merging component references. If our
10745 lhs is another similar operation, try to merge its rhs with our
10746 rhs. Then try to merge our lhs and rhs. */
10747 if (TREE_CODE (arg0) == code
10748 && 0 != (tem = fold_truthop (code, type,
10749 TREE_OPERAND (arg0, 1), arg1)))
10750 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10751
10752 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
10753 return tem;
10754
10755 return NULL_TREE;
10756
10757 case TRUTH_ORIF_EXPR:
10758 /* Note that the operands of this must be ints
10759 and their values must be 0 or true.
10760 ("true" is a fixed value perhaps depending on the language.) */
10761 /* If first arg is constant true, return it. */
10762 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10763 return fold_convert (type, arg0);
10764 case TRUTH_OR_EXPR:
10765 /* If either arg is constant zero, drop it. */
10766 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
10767 return non_lvalue (fold_convert (type, arg1));
10768 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
10769 /* Preserve sequence points. */
10770 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10771 return non_lvalue (fold_convert (type, arg0));
10772 /* If second arg is constant true, result is true, but we must
10773 evaluate first arg. */
10774 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
10775 return omit_one_operand (type, arg1, arg0);
10776 /* Likewise for first arg, but note this only occurs here for
10777 TRUTH_OR_EXPR. */
10778 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10779 return omit_one_operand (type, arg0, arg1);
10780
10781 /* !X || X is always true. */
10782 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10783 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10784 return omit_one_operand (type, integer_one_node, arg1);
10785 /* X || !X is always true. */
10786 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10787 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10788 return omit_one_operand (type, integer_one_node, arg0);
10789
10790 goto truth_andor;
10791
10792 case TRUTH_XOR_EXPR:
10793 /* If the second arg is constant zero, drop it. */
10794 if (integer_zerop (arg1))
10795 return non_lvalue (fold_convert (type, arg0));
10796 /* If the second arg is constant true, this is a logical inversion. */
10797 if (integer_onep (arg1))
10798 {
10799 /* Only call invert_truthvalue if operand is a truth value. */
10800 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
10801 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
10802 else
10803 tem = invert_truthvalue (arg0);
10804 return non_lvalue (fold_convert (type, tem));
10805 }
10806 /* Identical arguments cancel to zero. */
10807 if (operand_equal_p (arg0, arg1, 0))
10808 return omit_one_operand (type, integer_zero_node, arg0);
10809
10810 /* !X ^ X is always true. */
10811 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10812 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10813 return omit_one_operand (type, integer_one_node, arg1);
10814
10815 /* X ^ !X is always true. */
10816 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10817 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10818 return omit_one_operand (type, integer_one_node, arg0);
10819
10820 return NULL_TREE;
10821
10822 case EQ_EXPR:
10823 case NE_EXPR:
10824 tem = fold_comparison (code, type, op0, op1);
10825 if (tem != NULL_TREE)
10826 return tem;
10827
10828 /* bool_var != 0 becomes bool_var. */
10829 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10830 && code == NE_EXPR)
10831 return non_lvalue (fold_convert (type, arg0));
10832
10833 /* bool_var == 1 becomes bool_var. */
10834 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10835 && code == EQ_EXPR)
10836 return non_lvalue (fold_convert (type, arg0));
10837
10838 /* bool_var != 1 becomes !bool_var. */
10839 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10840 && code == NE_EXPR)
10841 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
10842
10843 /* bool_var == 0 becomes !bool_var. */
10844 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10845 && code == EQ_EXPR)
10846 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
10847
10848 /* If this is an equality comparison of the address of a non-weak
10849 object against zero, then we know the result. */
10850 if (TREE_CODE (arg0) == ADDR_EXPR
10851 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
10852 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
10853 && integer_zerop (arg1))
10854 return constant_boolean_node (code != EQ_EXPR, type);
10855
10856 /* If this is an equality comparison of the address of two non-weak,
10857 unaliased symbols neither of which are extern (since we do not
10858 have access to attributes for externs), then we know the result. */
10859 if (TREE_CODE (arg0) == ADDR_EXPR
10860 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
10861 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
10862 && ! lookup_attribute ("alias",
10863 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
10864 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
10865 && TREE_CODE (arg1) == ADDR_EXPR
10866 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
10867 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
10868 && ! lookup_attribute ("alias",
10869 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
10870 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
10871 {
10872 /* We know that we're looking at the address of two
10873 non-weak, unaliased, static _DECL nodes.
10874
10875 It is both wasteful and incorrect to call operand_equal_p
10876 to compare the two ADDR_EXPR nodes. It is wasteful in that
10877 all we need to do is test pointer equality for the arguments
10878 to the two ADDR_EXPR nodes. It is incorrect to use
10879 operand_equal_p as that function is NOT equivalent to a
10880 C equality test. It can in fact return false for two
10881 objects which would test as equal using the C equality
10882 operator. */
10883 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
10884 return constant_boolean_node (equal
10885 ? code == EQ_EXPR : code != EQ_EXPR,
10886 type);
10887 }
10888
10889 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
10890 a MINUS_EXPR of a constant, we can convert it into a comparison with
10891 a revised constant as long as no overflow occurs. */
10892 if (TREE_CODE (arg1) == INTEGER_CST
10893 && (TREE_CODE (arg0) == PLUS_EXPR
10894 || TREE_CODE (arg0) == MINUS_EXPR)
10895 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10896 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
10897 ? MINUS_EXPR : PLUS_EXPR,
10898 fold_convert (TREE_TYPE (arg0), arg1),
10899 TREE_OPERAND (arg0, 1), 0))
10900 && !TREE_OVERFLOW (tem))
10901 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10902
10903 /* Similarly for a NEGATE_EXPR. */
10904 if (TREE_CODE (arg0) == NEGATE_EXPR
10905 && TREE_CODE (arg1) == INTEGER_CST
10906 && 0 != (tem = negate_expr (arg1))
10907 && TREE_CODE (tem) == INTEGER_CST
10908 && !TREE_OVERFLOW (tem))
10909 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10910
10911 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
10912 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10913 && TREE_CODE (arg1) == INTEGER_CST
10914 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10915 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10916 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg0),
10917 fold_convert (TREE_TYPE (arg0), arg1),
10918 TREE_OPERAND (arg0, 1)));
10919
10920 /* If we have X - Y == 0, we can convert that to X == Y and similarly
10921 for !=. Don't do this for ordered comparisons due to overflow. */
10922 if (TREE_CODE (arg0) == MINUS_EXPR
10923 && integer_zerop (arg1))
10924 return fold_build2 (code, type,
10925 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
10926
10927 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
10928 if (TREE_CODE (arg0) == ABS_EXPR
10929 && (integer_zerop (arg1) || real_zerop (arg1)))
10930 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
10931
10932 /* If this is an EQ or NE comparison with zero and ARG0 is
10933 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
10934 two operations, but the latter can be done in one less insn
10935 on machines that have only two-operand insns or on which a
10936 constant cannot be the first operand. */
10937 if (TREE_CODE (arg0) == BIT_AND_EXPR
10938 && integer_zerop (arg1))
10939 {
10940 tree arg00 = TREE_OPERAND (arg0, 0);
10941 tree arg01 = TREE_OPERAND (arg0, 1);
10942 if (TREE_CODE (arg00) == LSHIFT_EXPR
10943 && integer_onep (TREE_OPERAND (arg00, 0)))
10944 return
10945 fold_build2 (code, type,
10946 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10947 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
10948 arg01, TREE_OPERAND (arg00, 1)),
10949 fold_convert (TREE_TYPE (arg0),
10950 integer_one_node)),
10951 arg1);
10952 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
10953 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
10954 return
10955 fold_build2 (code, type,
10956 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10957 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
10958 arg00, TREE_OPERAND (arg01, 1)),
10959 fold_convert (TREE_TYPE (arg0),
10960 integer_one_node)),
10961 arg1);
10962 }
10963
10964 /* If this is an NE or EQ comparison of zero against the result of a
10965 signed MOD operation whose second operand is a power of 2, make
10966 the MOD operation unsigned since it is simpler and equivalent. */
10967 if (integer_zerop (arg1)
10968 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
10969 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
10970 || TREE_CODE (arg0) == CEIL_MOD_EXPR
10971 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
10972 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
10973 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10974 {
10975 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
10976 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
10977 fold_convert (newtype,
10978 TREE_OPERAND (arg0, 0)),
10979 fold_convert (newtype,
10980 TREE_OPERAND (arg0, 1)));
10981
10982 return fold_build2 (code, type, newmod,
10983 fold_convert (newtype, arg1));
10984 }
10985
10986 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10987 C1 is a valid shift constant, and C2 is a power of two, i.e.
10988 a single bit. */
10989 if (TREE_CODE (arg0) == BIT_AND_EXPR
10990 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
10991 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
10992 == INTEGER_CST
10993 && integer_pow2p (TREE_OPERAND (arg0, 1))
10994 && integer_zerop (arg1))
10995 {
10996 tree itype = TREE_TYPE (arg0);
10997 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
10998 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
10999
11000 /* Check for a valid shift count. */
11001 if (TREE_INT_CST_HIGH (arg001) == 0
11002 && TREE_INT_CST_LOW (arg001) < prec)
11003 {
11004 tree arg01 = TREE_OPERAND (arg0, 1);
11005 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
11006 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
11007 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
11008 can be rewritten as (X & (C2 << C1)) != 0. */
11009 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
11010 {
11011 tem = fold_build2 (LSHIFT_EXPR, itype, arg01, arg001);
11012 tem = fold_build2 (BIT_AND_EXPR, itype, arg000, tem);
11013 return fold_build2 (code, type, tem, arg1);
11014 }
11015 /* Otherwise, for signed (arithmetic) shifts,
11016 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
11017 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
11018 else if (!TYPE_UNSIGNED (itype))
11019 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
11020 arg000, build_int_cst (itype, 0));
11021 /* Otherwise, of unsigned (logical) shifts,
11022 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
11023 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
11024 else
11025 return omit_one_operand (type,
11026 code == EQ_EXPR ? integer_one_node
11027 : integer_zero_node,
11028 arg000);
11029 }
11030 }
11031
11032 /* If this is an NE comparison of zero with an AND of one, remove the
11033 comparison since the AND will give the correct value. */
11034 if (code == NE_EXPR
11035 && integer_zerop (arg1)
11036 && TREE_CODE (arg0) == BIT_AND_EXPR
11037 && integer_onep (TREE_OPERAND (arg0, 1)))
11038 return fold_convert (type, arg0);
11039
11040 /* If we have (A & C) == C where C is a power of 2, convert this into
11041 (A & C) != 0. Similarly for NE_EXPR. */
11042 if (TREE_CODE (arg0) == BIT_AND_EXPR
11043 && integer_pow2p (TREE_OPERAND (arg0, 1))
11044 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11045 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11046 arg0, fold_convert (TREE_TYPE (arg0),
11047 integer_zero_node));
11048
11049 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
11050 bit, then fold the expression into A < 0 or A >= 0. */
11051 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
11052 if (tem)
11053 return tem;
11054
11055 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
11056 Similarly for NE_EXPR. */
11057 if (TREE_CODE (arg0) == BIT_AND_EXPR
11058 && TREE_CODE (arg1) == INTEGER_CST
11059 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11060 {
11061 tree notc = fold_build1 (BIT_NOT_EXPR,
11062 TREE_TYPE (TREE_OPERAND (arg0, 1)),
11063 TREE_OPERAND (arg0, 1));
11064 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11065 arg1, notc);
11066 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
11067 if (integer_nonzerop (dandnotc))
11068 return omit_one_operand (type, rslt, arg0);
11069 }
11070
11071 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
11072 Similarly for NE_EXPR. */
11073 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11074 && TREE_CODE (arg1) == INTEGER_CST
11075 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11076 {
11077 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
11078 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11079 TREE_OPERAND (arg0, 1), notd);
11080 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
11081 if (integer_nonzerop (candnotd))
11082 return omit_one_operand (type, rslt, arg0);
11083 }
11084
11085 /* If this is a comparison of a field, we may be able to simplify it. */
11086 if ((TREE_CODE (arg0) == COMPONENT_REF
11087 || TREE_CODE (arg0) == BIT_FIELD_REF)
11088 /* Handle the constant case even without -O
11089 to make sure the warnings are given. */
11090 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
11091 {
11092 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
11093 if (t1)
11094 return t1;
11095 }
11096
11097 /* Optimize comparisons of strlen vs zero to a compare of the
11098 first character of the string vs zero. To wit,
11099 strlen(ptr) == 0 => *ptr == 0
11100 strlen(ptr) != 0 => *ptr != 0
11101 Other cases should reduce to one of these two (or a constant)
11102 due to the return value of strlen being unsigned. */
11103 if (TREE_CODE (arg0) == CALL_EXPR
11104 && integer_zerop (arg1))
11105 {
11106 tree fndecl = get_callee_fndecl (arg0);
11107 tree arglist;
11108
11109 if (fndecl
11110 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
11111 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
11112 && (arglist = TREE_OPERAND (arg0, 1))
11113 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
11114 && ! TREE_CHAIN (arglist))
11115 {
11116 tree iref = build_fold_indirect_ref (TREE_VALUE (arglist));
11117 return fold_build2 (code, type, iref,
11118 build_int_cst (TREE_TYPE (iref), 0));
11119 }
11120 }
11121
11122 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
11123 of X. Similarly fold (X >> C) == 0 into X >= 0. */
11124 if (TREE_CODE (arg0) == RSHIFT_EXPR
11125 && integer_zerop (arg1)
11126 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11127 {
11128 tree arg00 = TREE_OPERAND (arg0, 0);
11129 tree arg01 = TREE_OPERAND (arg0, 1);
11130 tree itype = TREE_TYPE (arg00);
11131 if (TREE_INT_CST_HIGH (arg01) == 0
11132 && TREE_INT_CST_LOW (arg01)
11133 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
11134 {
11135 if (TYPE_UNSIGNED (itype))
11136 {
11137 itype = lang_hooks.types.signed_type (itype);
11138 arg00 = fold_convert (itype, arg00);
11139 }
11140 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
11141 type, arg00, build_int_cst (itype, 0));
11142 }
11143 }
11144
11145 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
11146 if (integer_zerop (arg1)
11147 && TREE_CODE (arg0) == BIT_XOR_EXPR)
11148 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11149 TREE_OPERAND (arg0, 1));
11150
11151 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
11152 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11153 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11154 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11155 build_int_cst (TREE_TYPE (arg1), 0));
11156 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
11157 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11158 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11159 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11160 return fold_build2 (code, type, TREE_OPERAND (arg0, 1),
11161 build_int_cst (TREE_TYPE (arg1), 0));
11162
11163 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
11164 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11165 && TREE_CODE (arg1) == INTEGER_CST
11166 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11167 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11168 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg1),
11169 TREE_OPERAND (arg0, 1), arg1));
11170
11171 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
11172 (X & C) == 0 when C is a single bit. */
11173 if (TREE_CODE (arg0) == BIT_AND_EXPR
11174 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
11175 && integer_zerop (arg1)
11176 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11177 {
11178 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11179 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
11180 TREE_OPERAND (arg0, 1));
11181 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
11182 type, tem, arg1);
11183 }
11184
11185 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
11186 constant C is a power of two, i.e. a single bit. */
11187 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11188 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11189 && integer_zerop (arg1)
11190 && integer_pow2p (TREE_OPERAND (arg0, 1))
11191 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11192 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
11193 {
11194 tree arg00 = TREE_OPERAND (arg0, 0);
11195 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11196 arg00, build_int_cst (TREE_TYPE (arg00), 0));
11197 }
11198
11199 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
11200 when is C is a power of two, i.e. a single bit. */
11201 if (TREE_CODE (arg0) == BIT_AND_EXPR
11202 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
11203 && integer_zerop (arg1)
11204 && integer_pow2p (TREE_OPERAND (arg0, 1))
11205 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11206 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
11207 {
11208 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
11209 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg000),
11210 arg000, TREE_OPERAND (arg0, 1));
11211 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11212 tem, build_int_cst (TREE_TYPE (tem), 0));
11213 }
11214
11215 if (integer_zerop (arg1)
11216 && tree_expr_nonzero_p (arg0))
11217 {
11218 tree res = constant_boolean_node (code==NE_EXPR, type);
11219 return omit_one_operand (type, res, arg0);
11220 }
11221
11222 /* Fold -X op -Y as X op Y, where op is eq/ne. */
11223 if (TREE_CODE (arg0) == NEGATE_EXPR
11224 && TREE_CODE (arg1) == NEGATE_EXPR)
11225 return fold_build2 (code, type,
11226 TREE_OPERAND (arg0, 0),
11227 TREE_OPERAND (arg1, 0));
11228
11229 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
11230 if (TREE_CODE (arg0) == BIT_AND_EXPR
11231 && TREE_CODE (arg1) == BIT_AND_EXPR)
11232 {
11233 tree arg00 = TREE_OPERAND (arg0, 0);
11234 tree arg01 = TREE_OPERAND (arg0, 1);
11235 tree arg10 = TREE_OPERAND (arg1, 0);
11236 tree arg11 = TREE_OPERAND (arg1, 1);
11237 tree itype = TREE_TYPE (arg0);
11238
11239 if (operand_equal_p (arg01, arg11, 0))
11240 return fold_build2 (code, type,
11241 fold_build2 (BIT_AND_EXPR, itype,
11242 fold_build2 (BIT_XOR_EXPR, itype,
11243 arg00, arg10),
11244 arg01),
11245 build_int_cst (itype, 0));
11246
11247 if (operand_equal_p (arg01, arg10, 0))
11248 return fold_build2 (code, type,
11249 fold_build2 (BIT_AND_EXPR, itype,
11250 fold_build2 (BIT_XOR_EXPR, itype,
11251 arg00, arg11),
11252 arg01),
11253 build_int_cst (itype, 0));
11254
11255 if (operand_equal_p (arg00, arg11, 0))
11256 return fold_build2 (code, type,
11257 fold_build2 (BIT_AND_EXPR, itype,
11258 fold_build2 (BIT_XOR_EXPR, itype,
11259 arg01, arg10),
11260 arg00),
11261 build_int_cst (itype, 0));
11262
11263 if (operand_equal_p (arg00, arg10, 0))
11264 return fold_build2 (code, type,
11265 fold_build2 (BIT_AND_EXPR, itype,
11266 fold_build2 (BIT_XOR_EXPR, itype,
11267 arg01, arg11),
11268 arg00),
11269 build_int_cst (itype, 0));
11270 }
11271
11272 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11273 && TREE_CODE (arg1) == BIT_XOR_EXPR)
11274 {
11275 tree arg00 = TREE_OPERAND (arg0, 0);
11276 tree arg01 = TREE_OPERAND (arg0, 1);
11277 tree arg10 = TREE_OPERAND (arg1, 0);
11278 tree arg11 = TREE_OPERAND (arg1, 1);
11279 tree itype = TREE_TYPE (arg0);
11280
11281 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
11282 operand_equal_p guarantees no side-effects so we don't need
11283 to use omit_one_operand on Z. */
11284 if (operand_equal_p (arg01, arg11, 0))
11285 return fold_build2 (code, type, arg00, arg10);
11286 if (operand_equal_p (arg01, arg10, 0))
11287 return fold_build2 (code, type, arg00, arg11);
11288 if (operand_equal_p (arg00, arg11, 0))
11289 return fold_build2 (code, type, arg01, arg10);
11290 if (operand_equal_p (arg00, arg10, 0))
11291 return fold_build2 (code, type, arg01, arg11);
11292
11293 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
11294 if (TREE_CODE (arg01) == INTEGER_CST
11295 && TREE_CODE (arg11) == INTEGER_CST)
11296 return fold_build2 (code, type,
11297 fold_build2 (BIT_XOR_EXPR, itype, arg00,
11298 fold_build2 (BIT_XOR_EXPR, itype,
11299 arg01, arg11)),
11300 arg10);
11301 }
11302 return NULL_TREE;
11303
11304 case LT_EXPR:
11305 case GT_EXPR:
11306 case LE_EXPR:
11307 case GE_EXPR:
11308 tem = fold_comparison (code, type, op0, op1);
11309 if (tem != NULL_TREE)
11310 return tem;
11311
11312 /* Transform comparisons of the form X +- C CMP X. */
11313 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11314 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11315 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
11316 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
11317 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11318 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
11319 {
11320 tree arg01 = TREE_OPERAND (arg0, 1);
11321 enum tree_code code0 = TREE_CODE (arg0);
11322 int is_positive;
11323
11324 if (TREE_CODE (arg01) == REAL_CST)
11325 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
11326 else
11327 is_positive = tree_int_cst_sgn (arg01);
11328
11329 /* (X - c) > X becomes false. */
11330 if (code == GT_EXPR
11331 && ((code0 == MINUS_EXPR && is_positive >= 0)
11332 || (code0 == PLUS_EXPR && is_positive <= 0)))
11333 return constant_boolean_node (0, type);
11334
11335 /* Likewise (X + c) < X becomes false. */
11336 if (code == LT_EXPR
11337 && ((code0 == PLUS_EXPR && is_positive >= 0)
11338 || (code0 == MINUS_EXPR && is_positive <= 0)))
11339 return constant_boolean_node (0, type);
11340
11341 /* Convert (X - c) <= X to true. */
11342 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
11343 && code == LE_EXPR
11344 && ((code0 == MINUS_EXPR && is_positive >= 0)
11345 || (code0 == PLUS_EXPR && is_positive <= 0)))
11346 return constant_boolean_node (1, type);
11347
11348 /* Convert (X + c) >= X to true. */
11349 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
11350 && code == GE_EXPR
11351 && ((code0 == PLUS_EXPR && is_positive >= 0)
11352 || (code0 == MINUS_EXPR && is_positive <= 0)))
11353 return constant_boolean_node (1, type);
11354
11355 if (TREE_CODE (arg01) == INTEGER_CST)
11356 {
11357 /* Convert X + c > X and X - c < X to true for integers. */
11358 if (code == GT_EXPR
11359 && ((code0 == PLUS_EXPR && is_positive > 0)
11360 || (code0 == MINUS_EXPR && is_positive < 0)))
11361 return constant_boolean_node (1, type);
11362
11363 if (code == LT_EXPR
11364 && ((code0 == MINUS_EXPR && is_positive > 0)
11365 || (code0 == PLUS_EXPR && is_positive < 0)))
11366 return constant_boolean_node (1, type);
11367
11368 /* Convert X + c <= X and X - c >= X to false for integers. */
11369 if (code == LE_EXPR
11370 && ((code0 == PLUS_EXPR && is_positive > 0)
11371 || (code0 == MINUS_EXPR && is_positive < 0)))
11372 return constant_boolean_node (0, type);
11373
11374 if (code == GE_EXPR
11375 && ((code0 == MINUS_EXPR && is_positive > 0)
11376 || (code0 == PLUS_EXPR && is_positive < 0)))
11377 return constant_boolean_node (0, type);
11378 }
11379 }
11380
11381 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
11382 This transformation affects the cases which are handled in later
11383 optimizations involving comparisons with non-negative constants. */
11384 if (TREE_CODE (arg1) == INTEGER_CST
11385 && TREE_CODE (arg0) != INTEGER_CST
11386 && tree_int_cst_sgn (arg1) > 0)
11387 {
11388 if (code == GE_EXPR)
11389 {
11390 arg1 = const_binop (MINUS_EXPR, arg1,
11391 build_int_cst (TREE_TYPE (arg1), 1), 0);
11392 return fold_build2 (GT_EXPR, type, arg0,
11393 fold_convert (TREE_TYPE (arg0), arg1));
11394 }
11395 if (code == LT_EXPR)
11396 {
11397 arg1 = const_binop (MINUS_EXPR, arg1,
11398 build_int_cst (TREE_TYPE (arg1), 1), 0);
11399 return fold_build2 (LE_EXPR, type, arg0,
11400 fold_convert (TREE_TYPE (arg0), arg1));
11401 }
11402 }
11403
11404 /* Comparisons with the highest or lowest possible integer of
11405 the specified precision will have known values. */
11406 {
11407 tree arg1_type = TREE_TYPE (arg1);
11408 unsigned int width = TYPE_PRECISION (arg1_type);
11409
11410 if (TREE_CODE (arg1) == INTEGER_CST
11411 && !TREE_OVERFLOW (arg1)
11412 && width <= 2 * HOST_BITS_PER_WIDE_INT
11413 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
11414 {
11415 HOST_WIDE_INT signed_max_hi;
11416 unsigned HOST_WIDE_INT signed_max_lo;
11417 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
11418
11419 if (width <= HOST_BITS_PER_WIDE_INT)
11420 {
11421 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
11422 - 1;
11423 signed_max_hi = 0;
11424 max_hi = 0;
11425
11426 if (TYPE_UNSIGNED (arg1_type))
11427 {
11428 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
11429 min_lo = 0;
11430 min_hi = 0;
11431 }
11432 else
11433 {
11434 max_lo = signed_max_lo;
11435 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
11436 min_hi = -1;
11437 }
11438 }
11439 else
11440 {
11441 width -= HOST_BITS_PER_WIDE_INT;
11442 signed_max_lo = -1;
11443 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
11444 - 1;
11445 max_lo = -1;
11446 min_lo = 0;
11447
11448 if (TYPE_UNSIGNED (arg1_type))
11449 {
11450 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
11451 min_hi = 0;
11452 }
11453 else
11454 {
11455 max_hi = signed_max_hi;
11456 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
11457 }
11458 }
11459
11460 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
11461 && TREE_INT_CST_LOW (arg1) == max_lo)
11462 switch (code)
11463 {
11464 case GT_EXPR:
11465 return omit_one_operand (type, integer_zero_node, arg0);
11466
11467 case GE_EXPR:
11468 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11469
11470 case LE_EXPR:
11471 return omit_one_operand (type, integer_one_node, arg0);
11472
11473 case LT_EXPR:
11474 return fold_build2 (NE_EXPR, type, arg0, arg1);
11475
11476 /* The GE_EXPR and LT_EXPR cases above are not normally
11477 reached because of previous transformations. */
11478
11479 default:
11480 break;
11481 }
11482 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11483 == max_hi
11484 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
11485 switch (code)
11486 {
11487 case GT_EXPR:
11488 arg1 = const_binop (PLUS_EXPR, arg1,
11489 build_int_cst (TREE_TYPE (arg1), 1), 0);
11490 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11491 case LE_EXPR:
11492 arg1 = const_binop (PLUS_EXPR, arg1,
11493 build_int_cst (TREE_TYPE (arg1), 1), 0);
11494 return fold_build2 (NE_EXPR, type, arg0, arg1);
11495 default:
11496 break;
11497 }
11498 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11499 == min_hi
11500 && TREE_INT_CST_LOW (arg1) == min_lo)
11501 switch (code)
11502 {
11503 case LT_EXPR:
11504 return omit_one_operand (type, integer_zero_node, arg0);
11505
11506 case LE_EXPR:
11507 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11508
11509 case GE_EXPR:
11510 return omit_one_operand (type, integer_one_node, arg0);
11511
11512 case GT_EXPR:
11513 return fold_build2 (NE_EXPR, type, op0, op1);
11514
11515 default:
11516 break;
11517 }
11518 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11519 == min_hi
11520 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
11521 switch (code)
11522 {
11523 case GE_EXPR:
11524 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
11525 return fold_build2 (NE_EXPR, type, arg0, arg1);
11526 case LT_EXPR:
11527 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
11528 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11529 default:
11530 break;
11531 }
11532
11533 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
11534 && TREE_INT_CST_LOW (arg1) == signed_max_lo
11535 && TYPE_UNSIGNED (arg1_type)
11536 /* We will flip the signedness of the comparison operator
11537 associated with the mode of arg1, so the sign bit is
11538 specified by this mode. Check that arg1 is the signed
11539 max associated with this sign bit. */
11540 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
11541 /* signed_type does not work on pointer types. */
11542 && INTEGRAL_TYPE_P (arg1_type))
11543 {
11544 /* The following case also applies to X < signed_max+1
11545 and X >= signed_max+1 because previous transformations. */
11546 if (code == LE_EXPR || code == GT_EXPR)
11547 {
11548 tree st0, st1;
11549 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
11550 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
11551 return fold_build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
11552 type, fold_convert (st0, arg0),
11553 build_int_cst (st1, 0));
11554 }
11555 }
11556 }
11557 }
11558
11559 /* If we are comparing an ABS_EXPR with a constant, we can
11560 convert all the cases into explicit comparisons, but they may
11561 well not be faster than doing the ABS and one comparison.
11562 But ABS (X) <= C is a range comparison, which becomes a subtraction
11563 and a comparison, and is probably faster. */
11564 if (code == LE_EXPR
11565 && TREE_CODE (arg1) == INTEGER_CST
11566 && TREE_CODE (arg0) == ABS_EXPR
11567 && ! TREE_SIDE_EFFECTS (arg0)
11568 && (0 != (tem = negate_expr (arg1)))
11569 && TREE_CODE (tem) == INTEGER_CST
11570 && !TREE_OVERFLOW (tem))
11571 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11572 build2 (GE_EXPR, type,
11573 TREE_OPERAND (arg0, 0), tem),
11574 build2 (LE_EXPR, type,
11575 TREE_OPERAND (arg0, 0), arg1));
11576
11577 /* Convert ABS_EXPR<x> >= 0 to true. */
11578 if (code == GE_EXPR
11579 && tree_expr_nonnegative_p (arg0)
11580 && (integer_zerop (arg1)
11581 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11582 && real_zerop (arg1))))
11583 return omit_one_operand (type, integer_one_node, arg0);
11584
11585 /* Convert ABS_EXPR<x> < 0 to false. */
11586 if (code == LT_EXPR
11587 && tree_expr_nonnegative_p (arg0)
11588 && (integer_zerop (arg1) || real_zerop (arg1)))
11589 return omit_one_operand (type, integer_zero_node, arg0);
11590
11591 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11592 and similarly for >= into !=. */
11593 if ((code == LT_EXPR || code == GE_EXPR)
11594 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11595 && TREE_CODE (arg1) == LSHIFT_EXPR
11596 && integer_onep (TREE_OPERAND (arg1, 0)))
11597 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11598 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11599 TREE_OPERAND (arg1, 1)),
11600 build_int_cst (TREE_TYPE (arg0), 0));
11601
11602 if ((code == LT_EXPR || code == GE_EXPR)
11603 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11604 && (TREE_CODE (arg1) == NOP_EXPR
11605 || TREE_CODE (arg1) == CONVERT_EXPR)
11606 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
11607 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
11608 return
11609 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11610 fold_convert (TREE_TYPE (arg0),
11611 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11612 TREE_OPERAND (TREE_OPERAND (arg1, 0),
11613 1))),
11614 build_int_cst (TREE_TYPE (arg0), 0));
11615
11616 return NULL_TREE;
11617
11618 case UNORDERED_EXPR:
11619 case ORDERED_EXPR:
11620 case UNLT_EXPR:
11621 case UNLE_EXPR:
11622 case UNGT_EXPR:
11623 case UNGE_EXPR:
11624 case UNEQ_EXPR:
11625 case LTGT_EXPR:
11626 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
11627 {
11628 t1 = fold_relational_const (code, type, arg0, arg1);
11629 if (t1 != NULL_TREE)
11630 return t1;
11631 }
11632
11633 /* If the first operand is NaN, the result is constant. */
11634 if (TREE_CODE (arg0) == REAL_CST
11635 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
11636 && (code != LTGT_EXPR || ! flag_trapping_math))
11637 {
11638 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
11639 ? integer_zero_node
11640 : integer_one_node;
11641 return omit_one_operand (type, t1, arg1);
11642 }
11643
11644 /* If the second operand is NaN, the result is constant. */
11645 if (TREE_CODE (arg1) == REAL_CST
11646 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
11647 && (code != LTGT_EXPR || ! flag_trapping_math))
11648 {
11649 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
11650 ? integer_zero_node
11651 : integer_one_node;
11652 return omit_one_operand (type, t1, arg0);
11653 }
11654
11655 /* Simplify unordered comparison of something with itself. */
11656 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
11657 && operand_equal_p (arg0, arg1, 0))
11658 return constant_boolean_node (1, type);
11659
11660 if (code == LTGT_EXPR
11661 && !flag_trapping_math
11662 && operand_equal_p (arg0, arg1, 0))
11663 return constant_boolean_node (0, type);
11664
11665 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
11666 {
11667 tree targ0 = strip_float_extensions (arg0);
11668 tree targ1 = strip_float_extensions (arg1);
11669 tree newtype = TREE_TYPE (targ0);
11670
11671 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
11672 newtype = TREE_TYPE (targ1);
11673
11674 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
11675 return fold_build2 (code, type, fold_convert (newtype, targ0),
11676 fold_convert (newtype, targ1));
11677 }
11678
11679 return NULL_TREE;
11680
11681 case COMPOUND_EXPR:
11682 /* When pedantic, a compound expression can be neither an lvalue
11683 nor an integer constant expression. */
11684 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
11685 return NULL_TREE;
11686 /* Don't let (0, 0) be null pointer constant. */
11687 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
11688 : fold_convert (type, arg1);
11689 return pedantic_non_lvalue (tem);
11690
11691 case COMPLEX_EXPR:
11692 if ((TREE_CODE (arg0) == REAL_CST
11693 && TREE_CODE (arg1) == REAL_CST)
11694 || (TREE_CODE (arg0) == INTEGER_CST
11695 && TREE_CODE (arg1) == INTEGER_CST))
11696 return build_complex (type, arg0, arg1);
11697 return NULL_TREE;
11698
11699 case ASSERT_EXPR:
11700 /* An ASSERT_EXPR should never be passed to fold_binary. */
11701 gcc_unreachable ();
11702
11703 default:
11704 return NULL_TREE;
11705 } /* switch (code) */
11706 }
11707
11708 /* Callback for walk_tree, looking for LABEL_EXPR.
11709 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
11710 Do not check the sub-tree of GOTO_EXPR. */
11711
11712 static tree
11713 contains_label_1 (tree *tp,
11714 int *walk_subtrees,
11715 void *data ATTRIBUTE_UNUSED)
11716 {
11717 switch (TREE_CODE (*tp))
11718 {
11719 case LABEL_EXPR:
11720 return *tp;
11721 case GOTO_EXPR:
11722 *walk_subtrees = 0;
11723 /* no break */
11724 default:
11725 return NULL_TREE;
11726 }
11727 }
11728
11729 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
11730 accessible from outside the sub-tree. Returns NULL_TREE if no
11731 addressable label is found. */
11732
11733 static bool
11734 contains_label_p (tree st)
11735 {
11736 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
11737 }
11738
11739 /* Fold a ternary expression of code CODE and type TYPE with operands
11740 OP0, OP1, and OP2. Return the folded expression if folding is
11741 successful. Otherwise, return NULL_TREE. */
11742
11743 tree
11744 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
11745 {
11746 tree tem;
11747 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
11748 enum tree_code_class kind = TREE_CODE_CLASS (code);
11749
11750 gcc_assert (IS_EXPR_CODE_CLASS (kind)
11751 && TREE_CODE_LENGTH (code) == 3);
11752
11753 /* Strip any conversions that don't change the mode. This is safe
11754 for every expression, except for a comparison expression because
11755 its signedness is derived from its operands. So, in the latter
11756 case, only strip conversions that don't change the signedness.
11757
11758 Note that this is done as an internal manipulation within the
11759 constant folder, in order to find the simplest representation of
11760 the arguments so that their form can be studied. In any cases,
11761 the appropriate type conversions should be put back in the tree
11762 that will get out of the constant folder. */
11763 if (op0)
11764 {
11765 arg0 = op0;
11766 STRIP_NOPS (arg0);
11767 }
11768
11769 if (op1)
11770 {
11771 arg1 = op1;
11772 STRIP_NOPS (arg1);
11773 }
11774
11775 switch (code)
11776 {
11777 case COMPONENT_REF:
11778 if (TREE_CODE (arg0) == CONSTRUCTOR
11779 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
11780 {
11781 unsigned HOST_WIDE_INT idx;
11782 tree field, value;
11783 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
11784 if (field == arg1)
11785 return value;
11786 }
11787 return NULL_TREE;
11788
11789 case COND_EXPR:
11790 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11791 so all simple results must be passed through pedantic_non_lvalue. */
11792 if (TREE_CODE (arg0) == INTEGER_CST)
11793 {
11794 tree unused_op = integer_zerop (arg0) ? op1 : op2;
11795 tem = integer_zerop (arg0) ? op2 : op1;
11796 /* Only optimize constant conditions when the selected branch
11797 has the same type as the COND_EXPR. This avoids optimizing
11798 away "c ? x : throw", where the throw has a void type.
11799 Avoid throwing away that operand which contains label. */
11800 if ((!TREE_SIDE_EFFECTS (unused_op)
11801 || !contains_label_p (unused_op))
11802 && (! VOID_TYPE_P (TREE_TYPE (tem))
11803 || VOID_TYPE_P (type)))
11804 return pedantic_non_lvalue (tem);
11805 return NULL_TREE;
11806 }
11807 if (operand_equal_p (arg1, op2, 0))
11808 return pedantic_omit_one_operand (type, arg1, arg0);
11809
11810 /* If we have A op B ? A : C, we may be able to convert this to a
11811 simpler expression, depending on the operation and the values
11812 of B and C. Signed zeros prevent all of these transformations,
11813 for reasons given above each one.
11814
11815 Also try swapping the arguments and inverting the conditional. */
11816 if (COMPARISON_CLASS_P (arg0)
11817 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11818 arg1, TREE_OPERAND (arg0, 1))
11819 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
11820 {
11821 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
11822 if (tem)
11823 return tem;
11824 }
11825
11826 if (COMPARISON_CLASS_P (arg0)
11827 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11828 op2,
11829 TREE_OPERAND (arg0, 1))
11830 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
11831 {
11832 tem = fold_truth_not_expr (arg0);
11833 if (tem && COMPARISON_CLASS_P (tem))
11834 {
11835 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
11836 if (tem)
11837 return tem;
11838 }
11839 }
11840
11841 /* If the second operand is simpler than the third, swap them
11842 since that produces better jump optimization results. */
11843 if (truth_value_p (TREE_CODE (arg0))
11844 && tree_swap_operands_p (op1, op2, false))
11845 {
11846 /* See if this can be inverted. If it can't, possibly because
11847 it was a floating-point inequality comparison, don't do
11848 anything. */
11849 tem = fold_truth_not_expr (arg0);
11850 if (tem)
11851 return fold_build3 (code, type, tem, op2, op1);
11852 }
11853
11854 /* Convert A ? 1 : 0 to simply A. */
11855 if (integer_onep (op1)
11856 && integer_zerop (op2)
11857 /* If we try to convert OP0 to our type, the
11858 call to fold will try to move the conversion inside
11859 a COND, which will recurse. In that case, the COND_EXPR
11860 is probably the best choice, so leave it alone. */
11861 && type == TREE_TYPE (arg0))
11862 return pedantic_non_lvalue (arg0);
11863
11864 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
11865 over COND_EXPR in cases such as floating point comparisons. */
11866 if (integer_zerop (op1)
11867 && integer_onep (op2)
11868 && truth_value_p (TREE_CODE (arg0)))
11869 return pedantic_non_lvalue (fold_convert (type,
11870 invert_truthvalue (arg0)));
11871
11872 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
11873 if (TREE_CODE (arg0) == LT_EXPR
11874 && integer_zerop (TREE_OPERAND (arg0, 1))
11875 && integer_zerop (op2)
11876 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
11877 {
11878 /* sign_bit_p only checks ARG1 bits within A's precision.
11879 If <sign bit of A> has wider type than A, bits outside
11880 of A's precision in <sign bit of A> need to be checked.
11881 If they are all 0, this optimization needs to be done
11882 in unsigned A's type, if they are all 1 in signed A's type,
11883 otherwise this can't be done. */
11884 if (TYPE_PRECISION (TREE_TYPE (tem))
11885 < TYPE_PRECISION (TREE_TYPE (arg1))
11886 && TYPE_PRECISION (TREE_TYPE (tem))
11887 < TYPE_PRECISION (type))
11888 {
11889 unsigned HOST_WIDE_INT mask_lo;
11890 HOST_WIDE_INT mask_hi;
11891 int inner_width, outer_width;
11892 tree tem_type;
11893
11894 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
11895 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
11896 if (outer_width > TYPE_PRECISION (type))
11897 outer_width = TYPE_PRECISION (type);
11898
11899 if (outer_width > HOST_BITS_PER_WIDE_INT)
11900 {
11901 mask_hi = ((unsigned HOST_WIDE_INT) -1
11902 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
11903 mask_lo = -1;
11904 }
11905 else
11906 {
11907 mask_hi = 0;
11908 mask_lo = ((unsigned HOST_WIDE_INT) -1
11909 >> (HOST_BITS_PER_WIDE_INT - outer_width));
11910 }
11911 if (inner_width > HOST_BITS_PER_WIDE_INT)
11912 {
11913 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
11914 >> (HOST_BITS_PER_WIDE_INT - inner_width));
11915 mask_lo = 0;
11916 }
11917 else
11918 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
11919 >> (HOST_BITS_PER_WIDE_INT - inner_width));
11920
11921 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
11922 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
11923 {
11924 tem_type = lang_hooks.types.signed_type (TREE_TYPE (tem));
11925 tem = fold_convert (tem_type, tem);
11926 }
11927 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
11928 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
11929 {
11930 tem_type = lang_hooks.types.unsigned_type (TREE_TYPE (tem));
11931 tem = fold_convert (tem_type, tem);
11932 }
11933 else
11934 tem = NULL;
11935 }
11936
11937 if (tem)
11938 return fold_convert (type,
11939 fold_build2 (BIT_AND_EXPR,
11940 TREE_TYPE (tem), tem,
11941 fold_convert (TREE_TYPE (tem),
11942 arg1)));
11943 }
11944
11945 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
11946 already handled above. */
11947 if (TREE_CODE (arg0) == BIT_AND_EXPR
11948 && integer_onep (TREE_OPERAND (arg0, 1))
11949 && integer_zerop (op2)
11950 && integer_pow2p (arg1))
11951 {
11952 tree tem = TREE_OPERAND (arg0, 0);
11953 STRIP_NOPS (tem);
11954 if (TREE_CODE (tem) == RSHIFT_EXPR
11955 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
11956 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
11957 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
11958 return fold_build2 (BIT_AND_EXPR, type,
11959 TREE_OPERAND (tem, 0), arg1);
11960 }
11961
11962 /* A & N ? N : 0 is simply A & N if N is a power of two. This
11963 is probably obsolete because the first operand should be a
11964 truth value (that's why we have the two cases above), but let's
11965 leave it in until we can confirm this for all front-ends. */
11966 if (integer_zerop (op2)
11967 && TREE_CODE (arg0) == NE_EXPR
11968 && integer_zerop (TREE_OPERAND (arg0, 1))
11969 && integer_pow2p (arg1)
11970 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11971 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11972 arg1, OEP_ONLY_CONST))
11973 return pedantic_non_lvalue (fold_convert (type,
11974 TREE_OPERAND (arg0, 0)));
11975
11976 /* Convert A ? B : 0 into A && B if A and B are truth values. */
11977 if (integer_zerop (op2)
11978 && truth_value_p (TREE_CODE (arg0))
11979 && truth_value_p (TREE_CODE (arg1)))
11980 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11981 fold_convert (type, arg0),
11982 arg1);
11983
11984 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
11985 if (integer_onep (op2)
11986 && truth_value_p (TREE_CODE (arg0))
11987 && truth_value_p (TREE_CODE (arg1)))
11988 {
11989 /* Only perform transformation if ARG0 is easily inverted. */
11990 tem = fold_truth_not_expr (arg0);
11991 if (tem)
11992 return fold_build2 (TRUTH_ORIF_EXPR, type,
11993 fold_convert (type, tem),
11994 arg1);
11995 }
11996
11997 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
11998 if (integer_zerop (arg1)
11999 && truth_value_p (TREE_CODE (arg0))
12000 && truth_value_p (TREE_CODE (op2)))
12001 {
12002 /* Only perform transformation if ARG0 is easily inverted. */
12003 tem = fold_truth_not_expr (arg0);
12004 if (tem)
12005 return fold_build2 (TRUTH_ANDIF_EXPR, type,
12006 fold_convert (type, tem),
12007 op2);
12008 }
12009
12010 /* Convert A ? 1 : B into A || B if A and B are truth values. */
12011 if (integer_onep (arg1)
12012 && truth_value_p (TREE_CODE (arg0))
12013 && truth_value_p (TREE_CODE (op2)))
12014 return fold_build2 (TRUTH_ORIF_EXPR, type,
12015 fold_convert (type, arg0),
12016 op2);
12017
12018 return NULL_TREE;
12019
12020 case CALL_EXPR:
12021 /* Check for a built-in function. */
12022 if (TREE_CODE (op0) == ADDR_EXPR
12023 && TREE_CODE (TREE_OPERAND (op0, 0)) == FUNCTION_DECL
12024 && DECL_BUILT_IN (TREE_OPERAND (op0, 0)))
12025 return fold_builtin (TREE_OPERAND (op0, 0), op1, false);
12026 return NULL_TREE;
12027
12028 case BIT_FIELD_REF:
12029 if (TREE_CODE (arg0) == VECTOR_CST
12030 && type == TREE_TYPE (TREE_TYPE (arg0))
12031 && host_integerp (arg1, 1)
12032 && host_integerp (op2, 1))
12033 {
12034 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
12035 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
12036
12037 if (width != 0
12038 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
12039 && (idx % width) == 0
12040 && (idx = idx / width)
12041 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
12042 {
12043 tree elements = TREE_VECTOR_CST_ELTS (arg0);
12044 while (idx-- > 0 && elements)
12045 elements = TREE_CHAIN (elements);
12046 if (elements)
12047 return TREE_VALUE (elements);
12048 else
12049 return fold_convert (type, integer_zero_node);
12050 }
12051 }
12052 return NULL_TREE;
12053
12054 default:
12055 return NULL_TREE;
12056 } /* switch (code) */
12057 }
12058
12059 /* Perform constant folding and related simplification of EXPR.
12060 The related simplifications include x*1 => x, x*0 => 0, etc.,
12061 and application of the associative law.
12062 NOP_EXPR conversions may be removed freely (as long as we
12063 are careful not to change the type of the overall expression).
12064 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
12065 but we can constant-fold them if they have constant operands. */
12066
12067 #ifdef ENABLE_FOLD_CHECKING
12068 # define fold(x) fold_1 (x)
12069 static tree fold_1 (tree);
12070 static
12071 #endif
12072 tree
12073 fold (tree expr)
12074 {
12075 const tree t = expr;
12076 enum tree_code code = TREE_CODE (t);
12077 enum tree_code_class kind = TREE_CODE_CLASS (code);
12078 tree tem;
12079
12080 /* Return right away if a constant. */
12081 if (kind == tcc_constant)
12082 return t;
12083
12084 if (IS_EXPR_CODE_CLASS (kind)
12085 || IS_GIMPLE_STMT_CODE_CLASS (kind))
12086 {
12087 tree type = TREE_TYPE (t);
12088 tree op0, op1, op2;
12089
12090 switch (TREE_CODE_LENGTH (code))
12091 {
12092 case 1:
12093 op0 = TREE_OPERAND (t, 0);
12094 tem = fold_unary (code, type, op0);
12095 return tem ? tem : expr;
12096 case 2:
12097 op0 = TREE_OPERAND (t, 0);
12098 op1 = TREE_OPERAND (t, 1);
12099 tem = fold_binary (code, type, op0, op1);
12100 return tem ? tem : expr;
12101 case 3:
12102 op0 = TREE_OPERAND (t, 0);
12103 op1 = TREE_OPERAND (t, 1);
12104 op2 = TREE_OPERAND (t, 2);
12105 tem = fold_ternary (code, type, op0, op1, op2);
12106 return tem ? tem : expr;
12107 default:
12108 break;
12109 }
12110 }
12111
12112 switch (code)
12113 {
12114 case CONST_DECL:
12115 return fold (DECL_INITIAL (t));
12116
12117 default:
12118 return t;
12119 } /* switch (code) */
12120 }
12121
12122 #ifdef ENABLE_FOLD_CHECKING
12123 #undef fold
12124
12125 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
12126 static void fold_check_failed (tree, tree);
12127 void print_fold_checksum (tree);
12128
12129 /* When --enable-checking=fold, compute a digest of expr before
12130 and after actual fold call to see if fold did not accidentally
12131 change original expr. */
12132
12133 tree
12134 fold (tree expr)
12135 {
12136 tree ret;
12137 struct md5_ctx ctx;
12138 unsigned char checksum_before[16], checksum_after[16];
12139 htab_t ht;
12140
12141 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12142 md5_init_ctx (&ctx);
12143 fold_checksum_tree (expr, &ctx, ht);
12144 md5_finish_ctx (&ctx, checksum_before);
12145 htab_empty (ht);
12146
12147 ret = fold_1 (expr);
12148
12149 md5_init_ctx (&ctx);
12150 fold_checksum_tree (expr, &ctx, ht);
12151 md5_finish_ctx (&ctx, checksum_after);
12152 htab_delete (ht);
12153
12154 if (memcmp (checksum_before, checksum_after, 16))
12155 fold_check_failed (expr, ret);
12156
12157 return ret;
12158 }
12159
12160 void
12161 print_fold_checksum (tree expr)
12162 {
12163 struct md5_ctx ctx;
12164 unsigned char checksum[16], cnt;
12165 htab_t ht;
12166
12167 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12168 md5_init_ctx (&ctx);
12169 fold_checksum_tree (expr, &ctx, ht);
12170 md5_finish_ctx (&ctx, checksum);
12171 htab_delete (ht);
12172 for (cnt = 0; cnt < 16; ++cnt)
12173 fprintf (stderr, "%02x", checksum[cnt]);
12174 putc ('\n', stderr);
12175 }
12176
12177 static void
12178 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
12179 {
12180 internal_error ("fold check: original tree changed by fold");
12181 }
12182
12183 static void
12184 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
12185 {
12186 void **slot;
12187 enum tree_code code;
12188 struct tree_function_decl buf;
12189 int i, len;
12190
12191 recursive_label:
12192
12193 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
12194 <= sizeof (struct tree_function_decl))
12195 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
12196 if (expr == NULL)
12197 return;
12198 slot = htab_find_slot (ht, expr, INSERT);
12199 if (*slot != NULL)
12200 return;
12201 *slot = expr;
12202 code = TREE_CODE (expr);
12203 if (TREE_CODE_CLASS (code) == tcc_declaration
12204 && DECL_ASSEMBLER_NAME_SET_P (expr))
12205 {
12206 /* Allow DECL_ASSEMBLER_NAME to be modified. */
12207 memcpy ((char *) &buf, expr, tree_size (expr));
12208 expr = (tree) &buf;
12209 SET_DECL_ASSEMBLER_NAME (expr, NULL);
12210 }
12211 else if (TREE_CODE_CLASS (code) == tcc_type
12212 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
12213 || TYPE_CACHED_VALUES_P (expr)
12214 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
12215 {
12216 /* Allow these fields to be modified. */
12217 memcpy ((char *) &buf, expr, tree_size (expr));
12218 expr = (tree) &buf;
12219 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr) = 0;
12220 TYPE_POINTER_TO (expr) = NULL;
12221 TYPE_REFERENCE_TO (expr) = NULL;
12222 if (TYPE_CACHED_VALUES_P (expr))
12223 {
12224 TYPE_CACHED_VALUES_P (expr) = 0;
12225 TYPE_CACHED_VALUES (expr) = NULL;
12226 }
12227 }
12228 md5_process_bytes (expr, tree_size (expr), ctx);
12229 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
12230 if (TREE_CODE_CLASS (code) != tcc_type
12231 && TREE_CODE_CLASS (code) != tcc_declaration
12232 && code != TREE_LIST)
12233 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
12234 switch (TREE_CODE_CLASS (code))
12235 {
12236 case tcc_constant:
12237 switch (code)
12238 {
12239 case STRING_CST:
12240 md5_process_bytes (TREE_STRING_POINTER (expr),
12241 TREE_STRING_LENGTH (expr), ctx);
12242 break;
12243 case COMPLEX_CST:
12244 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
12245 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
12246 break;
12247 case VECTOR_CST:
12248 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
12249 break;
12250 default:
12251 break;
12252 }
12253 break;
12254 case tcc_exceptional:
12255 switch (code)
12256 {
12257 case TREE_LIST:
12258 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
12259 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
12260 expr = TREE_CHAIN (expr);
12261 goto recursive_label;
12262 break;
12263 case TREE_VEC:
12264 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
12265 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
12266 break;
12267 default:
12268 break;
12269 }
12270 break;
12271 case tcc_expression:
12272 case tcc_reference:
12273 case tcc_comparison:
12274 case tcc_unary:
12275 case tcc_binary:
12276 case tcc_statement:
12277 len = TREE_CODE_LENGTH (code);
12278 for (i = 0; i < len; ++i)
12279 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
12280 break;
12281 case tcc_declaration:
12282 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
12283 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
12284 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
12285 {
12286 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
12287 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
12288 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
12289 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
12290 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
12291 }
12292 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
12293 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
12294
12295 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
12296 {
12297 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
12298 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
12299 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
12300 }
12301 break;
12302 case tcc_type:
12303 if (TREE_CODE (expr) == ENUMERAL_TYPE)
12304 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
12305 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
12306 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
12307 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
12308 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
12309 if (INTEGRAL_TYPE_P (expr)
12310 || SCALAR_FLOAT_TYPE_P (expr))
12311 {
12312 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
12313 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
12314 }
12315 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
12316 if (TREE_CODE (expr) == RECORD_TYPE
12317 || TREE_CODE (expr) == UNION_TYPE
12318 || TREE_CODE (expr) == QUAL_UNION_TYPE)
12319 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
12320 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
12321 break;
12322 default:
12323 break;
12324 }
12325 }
12326
12327 #endif
12328
12329 /* Fold a unary tree expression with code CODE of type TYPE with an
12330 operand OP0. Return a folded expression if successful. Otherwise,
12331 return a tree expression with code CODE of type TYPE with an
12332 operand OP0. */
12333
12334 tree
12335 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
12336 {
12337 tree tem;
12338 #ifdef ENABLE_FOLD_CHECKING
12339 unsigned char checksum_before[16], checksum_after[16];
12340 struct md5_ctx ctx;
12341 htab_t ht;
12342
12343 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12344 md5_init_ctx (&ctx);
12345 fold_checksum_tree (op0, &ctx, ht);
12346 md5_finish_ctx (&ctx, checksum_before);
12347 htab_empty (ht);
12348 #endif
12349
12350 tem = fold_unary (code, type, op0);
12351 if (!tem)
12352 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
12353
12354 #ifdef ENABLE_FOLD_CHECKING
12355 md5_init_ctx (&ctx);
12356 fold_checksum_tree (op0, &ctx, ht);
12357 md5_finish_ctx (&ctx, checksum_after);
12358 htab_delete (ht);
12359
12360 if (memcmp (checksum_before, checksum_after, 16))
12361 fold_check_failed (op0, tem);
12362 #endif
12363 return tem;
12364 }
12365
12366 /* Fold a binary tree expression with code CODE of type TYPE with
12367 operands OP0 and OP1. Return a folded expression if successful.
12368 Otherwise, return a tree expression with code CODE of type TYPE
12369 with operands OP0 and OP1. */
12370
12371 tree
12372 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
12373 MEM_STAT_DECL)
12374 {
12375 tree tem;
12376 #ifdef ENABLE_FOLD_CHECKING
12377 unsigned char checksum_before_op0[16],
12378 checksum_before_op1[16],
12379 checksum_after_op0[16],
12380 checksum_after_op1[16];
12381 struct md5_ctx ctx;
12382 htab_t ht;
12383
12384 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12385 md5_init_ctx (&ctx);
12386 fold_checksum_tree (op0, &ctx, ht);
12387 md5_finish_ctx (&ctx, checksum_before_op0);
12388 htab_empty (ht);
12389
12390 md5_init_ctx (&ctx);
12391 fold_checksum_tree (op1, &ctx, ht);
12392 md5_finish_ctx (&ctx, checksum_before_op1);
12393 htab_empty (ht);
12394 #endif
12395
12396 tem = fold_binary (code, type, op0, op1);
12397 if (!tem)
12398 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
12399
12400 #ifdef ENABLE_FOLD_CHECKING
12401 md5_init_ctx (&ctx);
12402 fold_checksum_tree (op0, &ctx, ht);
12403 md5_finish_ctx (&ctx, checksum_after_op0);
12404 htab_empty (ht);
12405
12406 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12407 fold_check_failed (op0, tem);
12408
12409 md5_init_ctx (&ctx);
12410 fold_checksum_tree (op1, &ctx, ht);
12411 md5_finish_ctx (&ctx, checksum_after_op1);
12412 htab_delete (ht);
12413
12414 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12415 fold_check_failed (op1, tem);
12416 #endif
12417 return tem;
12418 }
12419
12420 /* Fold a ternary tree expression with code CODE of type TYPE with
12421 operands OP0, OP1, and OP2. Return a folded expression if
12422 successful. Otherwise, return a tree expression with code CODE of
12423 type TYPE with operands OP0, OP1, and OP2. */
12424
12425 tree
12426 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
12427 MEM_STAT_DECL)
12428 {
12429 tree tem;
12430 #ifdef ENABLE_FOLD_CHECKING
12431 unsigned char checksum_before_op0[16],
12432 checksum_before_op1[16],
12433 checksum_before_op2[16],
12434 checksum_after_op0[16],
12435 checksum_after_op1[16],
12436 checksum_after_op2[16];
12437 struct md5_ctx ctx;
12438 htab_t ht;
12439
12440 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12441 md5_init_ctx (&ctx);
12442 fold_checksum_tree (op0, &ctx, ht);
12443 md5_finish_ctx (&ctx, checksum_before_op0);
12444 htab_empty (ht);
12445
12446 md5_init_ctx (&ctx);
12447 fold_checksum_tree (op1, &ctx, ht);
12448 md5_finish_ctx (&ctx, checksum_before_op1);
12449 htab_empty (ht);
12450
12451 md5_init_ctx (&ctx);
12452 fold_checksum_tree (op2, &ctx, ht);
12453 md5_finish_ctx (&ctx, checksum_before_op2);
12454 htab_empty (ht);
12455 #endif
12456
12457 tem = fold_ternary (code, type, op0, op1, op2);
12458 if (!tem)
12459 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
12460
12461 #ifdef ENABLE_FOLD_CHECKING
12462 md5_init_ctx (&ctx);
12463 fold_checksum_tree (op0, &ctx, ht);
12464 md5_finish_ctx (&ctx, checksum_after_op0);
12465 htab_empty (ht);
12466
12467 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12468 fold_check_failed (op0, tem);
12469
12470 md5_init_ctx (&ctx);
12471 fold_checksum_tree (op1, &ctx, ht);
12472 md5_finish_ctx (&ctx, checksum_after_op1);
12473 htab_empty (ht);
12474
12475 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12476 fold_check_failed (op1, tem);
12477
12478 md5_init_ctx (&ctx);
12479 fold_checksum_tree (op2, &ctx, ht);
12480 md5_finish_ctx (&ctx, checksum_after_op2);
12481 htab_delete (ht);
12482
12483 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
12484 fold_check_failed (op2, tem);
12485 #endif
12486 return tem;
12487 }
12488
12489 /* Perform constant folding and related simplification of initializer
12490 expression EXPR. These behave identically to "fold_buildN" but ignore
12491 potential run-time traps and exceptions that fold must preserve. */
12492
12493 #define START_FOLD_INIT \
12494 int saved_signaling_nans = flag_signaling_nans;\
12495 int saved_trapping_math = flag_trapping_math;\
12496 int saved_rounding_math = flag_rounding_math;\
12497 int saved_trapv = flag_trapv;\
12498 int saved_folding_initializer = folding_initializer;\
12499 flag_signaling_nans = 0;\
12500 flag_trapping_math = 0;\
12501 flag_rounding_math = 0;\
12502 flag_trapv = 0;\
12503 folding_initializer = 1;
12504
12505 #define END_FOLD_INIT \
12506 flag_signaling_nans = saved_signaling_nans;\
12507 flag_trapping_math = saved_trapping_math;\
12508 flag_rounding_math = saved_rounding_math;\
12509 flag_trapv = saved_trapv;\
12510 folding_initializer = saved_folding_initializer;
12511
12512 tree
12513 fold_build1_initializer (enum tree_code code, tree type, tree op)
12514 {
12515 tree result;
12516 START_FOLD_INIT;
12517
12518 result = fold_build1 (code, type, op);
12519
12520 END_FOLD_INIT;
12521 return result;
12522 }
12523
12524 tree
12525 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
12526 {
12527 tree result;
12528 START_FOLD_INIT;
12529
12530 result = fold_build2 (code, type, op0, op1);
12531
12532 END_FOLD_INIT;
12533 return result;
12534 }
12535
12536 tree
12537 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
12538 tree op2)
12539 {
12540 tree result;
12541 START_FOLD_INIT;
12542
12543 result = fold_build3 (code, type, op0, op1, op2);
12544
12545 END_FOLD_INIT;
12546 return result;
12547 }
12548
12549 #undef START_FOLD_INIT
12550 #undef END_FOLD_INIT
12551
12552 /* Determine if first argument is a multiple of second argument. Return 0 if
12553 it is not, or we cannot easily determined it to be.
12554
12555 An example of the sort of thing we care about (at this point; this routine
12556 could surely be made more general, and expanded to do what the *_DIV_EXPR's
12557 fold cases do now) is discovering that
12558
12559 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12560
12561 is a multiple of
12562
12563 SAVE_EXPR (J * 8)
12564
12565 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
12566
12567 This code also handles discovering that
12568
12569 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12570
12571 is a multiple of 8 so we don't have to worry about dealing with a
12572 possible remainder.
12573
12574 Note that we *look* inside a SAVE_EXPR only to determine how it was
12575 calculated; it is not safe for fold to do much of anything else with the
12576 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
12577 at run time. For example, the latter example above *cannot* be implemented
12578 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
12579 evaluation time of the original SAVE_EXPR is not necessarily the same at
12580 the time the new expression is evaluated. The only optimization of this
12581 sort that would be valid is changing
12582
12583 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
12584
12585 divided by 8 to
12586
12587 SAVE_EXPR (I) * SAVE_EXPR (J)
12588
12589 (where the same SAVE_EXPR (J) is used in the original and the
12590 transformed version). */
12591
12592 static int
12593 multiple_of_p (tree type, tree top, tree bottom)
12594 {
12595 if (operand_equal_p (top, bottom, 0))
12596 return 1;
12597
12598 if (TREE_CODE (type) != INTEGER_TYPE)
12599 return 0;
12600
12601 switch (TREE_CODE (top))
12602 {
12603 case BIT_AND_EXPR:
12604 /* Bitwise and provides a power of two multiple. If the mask is
12605 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
12606 if (!integer_pow2p (bottom))
12607 return 0;
12608 /* FALLTHRU */
12609
12610 case MULT_EXPR:
12611 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12612 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12613
12614 case PLUS_EXPR:
12615 case MINUS_EXPR:
12616 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12617 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12618
12619 case LSHIFT_EXPR:
12620 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
12621 {
12622 tree op1, t1;
12623
12624 op1 = TREE_OPERAND (top, 1);
12625 /* const_binop may not detect overflow correctly,
12626 so check for it explicitly here. */
12627 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
12628 > TREE_INT_CST_LOW (op1)
12629 && TREE_INT_CST_HIGH (op1) == 0
12630 && 0 != (t1 = fold_convert (type,
12631 const_binop (LSHIFT_EXPR,
12632 size_one_node,
12633 op1, 0)))
12634 && !TREE_OVERFLOW (t1))
12635 return multiple_of_p (type, t1, bottom);
12636 }
12637 return 0;
12638
12639 case NOP_EXPR:
12640 /* Can't handle conversions from non-integral or wider integral type. */
12641 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
12642 || (TYPE_PRECISION (type)
12643 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
12644 return 0;
12645
12646 /* .. fall through ... */
12647
12648 case SAVE_EXPR:
12649 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
12650
12651 case INTEGER_CST:
12652 if (TREE_CODE (bottom) != INTEGER_CST
12653 || (TYPE_UNSIGNED (type)
12654 && (tree_int_cst_sgn (top) < 0
12655 || tree_int_cst_sgn (bottom) < 0)))
12656 return 0;
12657 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
12658 top, bottom, 0));
12659
12660 default:
12661 return 0;
12662 }
12663 }
12664
12665 /* Return true if `t' is known to be non-negative. */
12666
12667 bool
12668 tree_expr_nonnegative_p (tree t)
12669 {
12670 if (t == error_mark_node)
12671 return false;
12672
12673 if (TYPE_UNSIGNED (TREE_TYPE (t)))
12674 return true;
12675
12676 switch (TREE_CODE (t))
12677 {
12678 case SSA_NAME:
12679 /* Query VRP to see if it has recorded any information about
12680 the range of this object. */
12681 return ssa_name_nonnegative_p (t);
12682
12683 case ABS_EXPR:
12684 /* We can't return 1 if flag_wrapv is set because
12685 ABS_EXPR<INT_MIN> = INT_MIN. */
12686 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
12687 return true;
12688 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
12689 return true;
12690 break;
12691
12692 case INTEGER_CST:
12693 return tree_int_cst_sgn (t) >= 0;
12694
12695 case REAL_CST:
12696 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
12697
12698 case PLUS_EXPR:
12699 if (FLOAT_TYPE_P (TREE_TYPE (t)))
12700 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12701 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12702
12703 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
12704 both unsigned and at least 2 bits shorter than the result. */
12705 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
12706 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
12707 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
12708 {
12709 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
12710 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
12711 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12712 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12713 {
12714 unsigned int prec = MAX (TYPE_PRECISION (inner1),
12715 TYPE_PRECISION (inner2)) + 1;
12716 return prec < TYPE_PRECISION (TREE_TYPE (t));
12717 }
12718 }
12719 break;
12720
12721 case MULT_EXPR:
12722 if (FLOAT_TYPE_P (TREE_TYPE (t)))
12723 {
12724 /* x * x for floating point x is always non-negative. */
12725 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
12726 return true;
12727 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12728 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12729 }
12730
12731 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
12732 both unsigned and their total bits is shorter than the result. */
12733 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
12734 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
12735 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
12736 {
12737 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
12738 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
12739 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12740 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12741 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
12742 < TYPE_PRECISION (TREE_TYPE (t));
12743 }
12744 return false;
12745
12746 case BIT_AND_EXPR:
12747 case MAX_EXPR:
12748 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12749 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12750
12751 case BIT_IOR_EXPR:
12752 case BIT_XOR_EXPR:
12753 case MIN_EXPR:
12754 case RDIV_EXPR:
12755 case TRUNC_DIV_EXPR:
12756 case CEIL_DIV_EXPR:
12757 case FLOOR_DIV_EXPR:
12758 case ROUND_DIV_EXPR:
12759 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12760 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12761
12762 case TRUNC_MOD_EXPR:
12763 case CEIL_MOD_EXPR:
12764 case FLOOR_MOD_EXPR:
12765 case ROUND_MOD_EXPR:
12766 case SAVE_EXPR:
12767 case NON_LVALUE_EXPR:
12768 case FLOAT_EXPR:
12769 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12770
12771 case COMPOUND_EXPR:
12772 case MODIFY_EXPR:
12773 case GIMPLE_MODIFY_STMT:
12774 return tree_expr_nonnegative_p (GENERIC_TREE_OPERAND (t, 1));
12775
12776 case BIND_EXPR:
12777 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t, 1)));
12778
12779 case COND_EXPR:
12780 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
12781 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
12782
12783 case NOP_EXPR:
12784 {
12785 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
12786 tree outer_type = TREE_TYPE (t);
12787
12788 if (TREE_CODE (outer_type) == REAL_TYPE)
12789 {
12790 if (TREE_CODE (inner_type) == REAL_TYPE)
12791 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12792 if (TREE_CODE (inner_type) == INTEGER_TYPE)
12793 {
12794 if (TYPE_UNSIGNED (inner_type))
12795 return true;
12796 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12797 }
12798 }
12799 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
12800 {
12801 if (TREE_CODE (inner_type) == REAL_TYPE)
12802 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
12803 if (TREE_CODE (inner_type) == INTEGER_TYPE)
12804 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
12805 && TYPE_UNSIGNED (inner_type);
12806 }
12807 }
12808 break;
12809
12810 case TARGET_EXPR:
12811 {
12812 tree temp = TARGET_EXPR_SLOT (t);
12813 t = TARGET_EXPR_INITIAL (t);
12814
12815 /* If the initializer is non-void, then it's a normal expression
12816 that will be assigned to the slot. */
12817 if (!VOID_TYPE_P (t))
12818 return tree_expr_nonnegative_p (t);
12819
12820 /* Otherwise, the initializer sets the slot in some way. One common
12821 way is an assignment statement at the end of the initializer. */
12822 while (1)
12823 {
12824 if (TREE_CODE (t) == BIND_EXPR)
12825 t = expr_last (BIND_EXPR_BODY (t));
12826 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
12827 || TREE_CODE (t) == TRY_CATCH_EXPR)
12828 t = expr_last (TREE_OPERAND (t, 0));
12829 else if (TREE_CODE (t) == STATEMENT_LIST)
12830 t = expr_last (t);
12831 else
12832 break;
12833 }
12834 if ((TREE_CODE (t) == MODIFY_EXPR
12835 || TREE_CODE (t) == GIMPLE_MODIFY_STMT)
12836 && GENERIC_TREE_OPERAND (t, 0) == temp)
12837 return tree_expr_nonnegative_p (GENERIC_TREE_OPERAND (t, 1));
12838
12839 return false;
12840 }
12841
12842 case CALL_EXPR:
12843 {
12844 tree fndecl = get_callee_fndecl (t);
12845 tree arglist = TREE_OPERAND (t, 1);
12846 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
12847 switch (DECL_FUNCTION_CODE (fndecl))
12848 {
12849 CASE_FLT_FN (BUILT_IN_ACOS):
12850 CASE_FLT_FN (BUILT_IN_ACOSH):
12851 CASE_FLT_FN (BUILT_IN_CABS):
12852 CASE_FLT_FN (BUILT_IN_COSH):
12853 CASE_FLT_FN (BUILT_IN_ERFC):
12854 CASE_FLT_FN (BUILT_IN_EXP):
12855 CASE_FLT_FN (BUILT_IN_EXP10):
12856 CASE_FLT_FN (BUILT_IN_EXP2):
12857 CASE_FLT_FN (BUILT_IN_FABS):
12858 CASE_FLT_FN (BUILT_IN_FDIM):
12859 CASE_FLT_FN (BUILT_IN_HYPOT):
12860 CASE_FLT_FN (BUILT_IN_POW10):
12861 CASE_INT_FN (BUILT_IN_FFS):
12862 CASE_INT_FN (BUILT_IN_PARITY):
12863 CASE_INT_FN (BUILT_IN_POPCOUNT):
12864 case BUILT_IN_BSWAP32:
12865 case BUILT_IN_BSWAP64:
12866 /* Always true. */
12867 return true;
12868
12869 CASE_FLT_FN (BUILT_IN_SQRT):
12870 /* sqrt(-0.0) is -0.0. */
12871 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
12872 return true;
12873 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
12874
12875 CASE_FLT_FN (BUILT_IN_ASINH):
12876 CASE_FLT_FN (BUILT_IN_ATAN):
12877 CASE_FLT_FN (BUILT_IN_ATANH):
12878 CASE_FLT_FN (BUILT_IN_CBRT):
12879 CASE_FLT_FN (BUILT_IN_CEIL):
12880 CASE_FLT_FN (BUILT_IN_ERF):
12881 CASE_FLT_FN (BUILT_IN_EXPM1):
12882 CASE_FLT_FN (BUILT_IN_FLOOR):
12883 CASE_FLT_FN (BUILT_IN_FMOD):
12884 CASE_FLT_FN (BUILT_IN_FREXP):
12885 CASE_FLT_FN (BUILT_IN_LCEIL):
12886 CASE_FLT_FN (BUILT_IN_LDEXP):
12887 CASE_FLT_FN (BUILT_IN_LFLOOR):
12888 CASE_FLT_FN (BUILT_IN_LLCEIL):
12889 CASE_FLT_FN (BUILT_IN_LLFLOOR):
12890 CASE_FLT_FN (BUILT_IN_LLRINT):
12891 CASE_FLT_FN (BUILT_IN_LLROUND):
12892 CASE_FLT_FN (BUILT_IN_LRINT):
12893 CASE_FLT_FN (BUILT_IN_LROUND):
12894 CASE_FLT_FN (BUILT_IN_MODF):
12895 CASE_FLT_FN (BUILT_IN_NEARBYINT):
12896 CASE_FLT_FN (BUILT_IN_RINT):
12897 CASE_FLT_FN (BUILT_IN_ROUND):
12898 CASE_FLT_FN (BUILT_IN_SIGNBIT):
12899 CASE_FLT_FN (BUILT_IN_SINH):
12900 CASE_FLT_FN (BUILT_IN_TANH):
12901 CASE_FLT_FN (BUILT_IN_TRUNC):
12902 /* True if the 1st argument is nonnegative. */
12903 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
12904
12905 CASE_FLT_FN (BUILT_IN_FMAX):
12906 /* True if the 1st OR 2nd arguments are nonnegative. */
12907 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
12908 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
12909
12910 CASE_FLT_FN (BUILT_IN_FMIN):
12911 /* True if the 1st AND 2nd arguments are nonnegative. */
12912 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
12913 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
12914
12915 CASE_FLT_FN (BUILT_IN_COPYSIGN):
12916 /* True if the 2nd argument is nonnegative. */
12917 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
12918
12919 CASE_FLT_FN (BUILT_IN_POWI):
12920 /* True if the 1st argument is nonnegative or the second
12921 argument is an even integer. */
12922 if (TREE_CODE (TREE_VALUE (TREE_CHAIN (arglist))) == INTEGER_CST)
12923 {
12924 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
12925 if ((TREE_INT_CST_LOW (arg1) & 1) == 0)
12926 return true;
12927 }
12928 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
12929
12930 CASE_FLT_FN (BUILT_IN_POW):
12931 /* True if the 1st argument is nonnegative or the second
12932 argument is an even integer valued real. */
12933 if (TREE_CODE (TREE_VALUE (TREE_CHAIN (arglist))) == REAL_CST)
12934 {
12935 REAL_VALUE_TYPE c;
12936 HOST_WIDE_INT n;
12937
12938 c = TREE_REAL_CST (TREE_VALUE (TREE_CHAIN (arglist)));
12939 n = real_to_integer (&c);
12940 if ((n & 1) == 0)
12941 {
12942 REAL_VALUE_TYPE cint;
12943 real_from_integer (&cint, VOIDmode, n,
12944 n < 0 ? -1 : 0, 0);
12945 if (real_identical (&c, &cint))
12946 return true;
12947 }
12948 }
12949 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
12950
12951 default:
12952 break;
12953 }
12954 }
12955
12956 /* ... fall through ... */
12957
12958 default:
12959 if (truth_value_p (TREE_CODE (t)))
12960 /* Truth values evaluate to 0 or 1, which is nonnegative. */
12961 return true;
12962 }
12963
12964 /* We don't know sign of `t', so be conservative and return false. */
12965 return false;
12966 }
12967
12968 /* Return true when T is an address and is known to be nonzero.
12969 For floating point we further ensure that T is not denormal.
12970 Similar logic is present in nonzero_address in rtlanal.h. */
12971
12972 bool
12973 tree_expr_nonzero_p (tree t)
12974 {
12975 tree type = TREE_TYPE (t);
12976
12977 /* Doing something useful for floating point would need more work. */
12978 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
12979 return false;
12980
12981 switch (TREE_CODE (t))
12982 {
12983 case SSA_NAME:
12984 /* Query VRP to see if it has recorded any information about
12985 the range of this object. */
12986 return ssa_name_nonzero_p (t);
12987
12988 case ABS_EXPR:
12989 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
12990
12991 case INTEGER_CST:
12992 return !integer_zerop (t);
12993
12994 case PLUS_EXPR:
12995 if (TYPE_OVERFLOW_UNDEFINED (type))
12996 {
12997 /* With the presence of negative values it is hard
12998 to say something. */
12999 if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
13000 || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
13001 return false;
13002 /* One of operands must be positive and the other non-negative. */
13003 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
13004 || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
13005 }
13006 break;
13007
13008 case MULT_EXPR:
13009 if (TYPE_OVERFLOW_UNDEFINED (type))
13010 {
13011 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
13012 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
13013 }
13014 break;
13015
13016 case NOP_EXPR:
13017 {
13018 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
13019 tree outer_type = TREE_TYPE (t);
13020
13021 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
13022 && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
13023 }
13024 break;
13025
13026 case ADDR_EXPR:
13027 {
13028 tree base = get_base_address (TREE_OPERAND (t, 0));
13029
13030 if (!base)
13031 return false;
13032
13033 /* Weak declarations may link to NULL. */
13034 if (VAR_OR_FUNCTION_DECL_P (base))
13035 return !DECL_WEAK (base);
13036
13037 /* Constants are never weak. */
13038 if (CONSTANT_CLASS_P (base))
13039 return true;
13040
13041 return false;
13042 }
13043
13044 case COND_EXPR:
13045 return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
13046 && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
13047
13048 case MIN_EXPR:
13049 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
13050 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
13051
13052 case MAX_EXPR:
13053 if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
13054 {
13055 /* When both operands are nonzero, then MAX must be too. */
13056 if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
13057 return true;
13058
13059 /* MAX where operand 0 is positive is positive. */
13060 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
13061 }
13062 /* MAX where operand 1 is positive is positive. */
13063 else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
13064 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
13065 return true;
13066 break;
13067
13068 case COMPOUND_EXPR:
13069 case MODIFY_EXPR:
13070 case GIMPLE_MODIFY_STMT:
13071 case BIND_EXPR:
13072 return tree_expr_nonzero_p (GENERIC_TREE_OPERAND (t, 1));
13073
13074 case SAVE_EXPR:
13075 case NON_LVALUE_EXPR:
13076 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
13077
13078 case BIT_IOR_EXPR:
13079 return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
13080 || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
13081
13082 case CALL_EXPR:
13083 return alloca_call_p (t);
13084
13085 default:
13086 break;
13087 }
13088 return false;
13089 }
13090
13091 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
13092 attempt to fold the expression to a constant without modifying TYPE,
13093 OP0 or OP1.
13094
13095 If the expression could be simplified to a constant, then return
13096 the constant. If the expression would not be simplified to a
13097 constant, then return NULL_TREE. */
13098
13099 tree
13100 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
13101 {
13102 tree tem = fold_binary (code, type, op0, op1);
13103 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13104 }
13105
13106 /* Given the components of a unary expression CODE, TYPE and OP0,
13107 attempt to fold the expression to a constant without modifying
13108 TYPE or OP0.
13109
13110 If the expression could be simplified to a constant, then return
13111 the constant. If the expression would not be simplified to a
13112 constant, then return NULL_TREE. */
13113
13114 tree
13115 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
13116 {
13117 tree tem = fold_unary (code, type, op0);
13118 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13119 }
13120
13121 /* If EXP represents referencing an element in a constant string
13122 (either via pointer arithmetic or array indexing), return the
13123 tree representing the value accessed, otherwise return NULL. */
13124
13125 tree
13126 fold_read_from_constant_string (tree exp)
13127 {
13128 if ((TREE_CODE (exp) == INDIRECT_REF
13129 || TREE_CODE (exp) == ARRAY_REF)
13130 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
13131 {
13132 tree exp1 = TREE_OPERAND (exp, 0);
13133 tree index;
13134 tree string;
13135
13136 if (TREE_CODE (exp) == INDIRECT_REF)
13137 string = string_constant (exp1, &index);
13138 else
13139 {
13140 tree low_bound = array_ref_low_bound (exp);
13141 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
13142
13143 /* Optimize the special-case of a zero lower bound.
13144
13145 We convert the low_bound to sizetype to avoid some problems
13146 with constant folding. (E.g. suppose the lower bound is 1,
13147 and its mode is QI. Without the conversion,l (ARRAY
13148 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
13149 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
13150 if (! integer_zerop (low_bound))
13151 index = size_diffop (index, fold_convert (sizetype, low_bound));
13152
13153 string = exp1;
13154 }
13155
13156 if (string
13157 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
13158 && TREE_CODE (string) == STRING_CST
13159 && TREE_CODE (index) == INTEGER_CST
13160 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
13161 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
13162 == MODE_INT)
13163 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
13164 return fold_convert (TREE_TYPE (exp),
13165 build_int_cst (NULL_TREE,
13166 (TREE_STRING_POINTER (string)
13167 [TREE_INT_CST_LOW (index)])));
13168 }
13169 return NULL;
13170 }
13171
13172 /* Return the tree for neg (ARG0) when ARG0 is known to be either
13173 an integer constant or real constant.
13174
13175 TYPE is the type of the result. */
13176
13177 static tree
13178 fold_negate_const (tree arg0, tree type)
13179 {
13180 tree t = NULL_TREE;
13181
13182 switch (TREE_CODE (arg0))
13183 {
13184 case INTEGER_CST:
13185 {
13186 unsigned HOST_WIDE_INT low;
13187 HOST_WIDE_INT high;
13188 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
13189 TREE_INT_CST_HIGH (arg0),
13190 &low, &high);
13191 t = force_fit_type_double (type, low, high, 1,
13192 (overflow | TREE_OVERFLOW (arg0))
13193 && !TYPE_UNSIGNED (type));
13194 break;
13195 }
13196
13197 case REAL_CST:
13198 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
13199 break;
13200
13201 default:
13202 gcc_unreachable ();
13203 }
13204
13205 return t;
13206 }
13207
13208 /* Return the tree for abs (ARG0) when ARG0 is known to be either
13209 an integer constant or real constant.
13210
13211 TYPE is the type of the result. */
13212
13213 tree
13214 fold_abs_const (tree arg0, tree type)
13215 {
13216 tree t = NULL_TREE;
13217
13218 switch (TREE_CODE (arg0))
13219 {
13220 case INTEGER_CST:
13221 /* If the value is unsigned, then the absolute value is
13222 the same as the ordinary value. */
13223 if (TYPE_UNSIGNED (type))
13224 t = arg0;
13225 /* Similarly, if the value is non-negative. */
13226 else if (INT_CST_LT (integer_minus_one_node, arg0))
13227 t = arg0;
13228 /* If the value is negative, then the absolute value is
13229 its negation. */
13230 else
13231 {
13232 unsigned HOST_WIDE_INT low;
13233 HOST_WIDE_INT high;
13234 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
13235 TREE_INT_CST_HIGH (arg0),
13236 &low, &high);
13237 t = force_fit_type_double (type, low, high, -1,
13238 overflow | TREE_OVERFLOW (arg0));
13239 }
13240 break;
13241
13242 case REAL_CST:
13243 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
13244 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
13245 else
13246 t = arg0;
13247 break;
13248
13249 default:
13250 gcc_unreachable ();
13251 }
13252
13253 return t;
13254 }
13255
13256 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
13257 constant. TYPE is the type of the result. */
13258
13259 static tree
13260 fold_not_const (tree arg0, tree type)
13261 {
13262 tree t = NULL_TREE;
13263
13264 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
13265
13266 t = force_fit_type_double (type, ~TREE_INT_CST_LOW (arg0),
13267 ~TREE_INT_CST_HIGH (arg0), 0,
13268 TREE_OVERFLOW (arg0));
13269
13270 return t;
13271 }
13272
13273 /* Given CODE, a relational operator, the target type, TYPE and two
13274 constant operands OP0 and OP1, return the result of the
13275 relational operation. If the result is not a compile time
13276 constant, then return NULL_TREE. */
13277
13278 static tree
13279 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
13280 {
13281 int result, invert;
13282
13283 /* From here on, the only cases we handle are when the result is
13284 known to be a constant. */
13285
13286 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
13287 {
13288 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
13289 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
13290
13291 /* Handle the cases where either operand is a NaN. */
13292 if (real_isnan (c0) || real_isnan (c1))
13293 {
13294 switch (code)
13295 {
13296 case EQ_EXPR:
13297 case ORDERED_EXPR:
13298 result = 0;
13299 break;
13300
13301 case NE_EXPR:
13302 case UNORDERED_EXPR:
13303 case UNLT_EXPR:
13304 case UNLE_EXPR:
13305 case UNGT_EXPR:
13306 case UNGE_EXPR:
13307 case UNEQ_EXPR:
13308 result = 1;
13309 break;
13310
13311 case LT_EXPR:
13312 case LE_EXPR:
13313 case GT_EXPR:
13314 case GE_EXPR:
13315 case LTGT_EXPR:
13316 if (flag_trapping_math)
13317 return NULL_TREE;
13318 result = 0;
13319 break;
13320
13321 default:
13322 gcc_unreachable ();
13323 }
13324
13325 return constant_boolean_node (result, type);
13326 }
13327
13328 return constant_boolean_node (real_compare (code, c0, c1), type);
13329 }
13330
13331 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
13332
13333 To compute GT, swap the arguments and do LT.
13334 To compute GE, do LT and invert the result.
13335 To compute LE, swap the arguments, do LT and invert the result.
13336 To compute NE, do EQ and invert the result.
13337
13338 Therefore, the code below must handle only EQ and LT. */
13339
13340 if (code == LE_EXPR || code == GT_EXPR)
13341 {
13342 tree tem = op0;
13343 op0 = op1;
13344 op1 = tem;
13345 code = swap_tree_comparison (code);
13346 }
13347
13348 /* Note that it is safe to invert for real values here because we
13349 have already handled the one case that it matters. */
13350
13351 invert = 0;
13352 if (code == NE_EXPR || code == GE_EXPR)
13353 {
13354 invert = 1;
13355 code = invert_tree_comparison (code, false);
13356 }
13357
13358 /* Compute a result for LT or EQ if args permit;
13359 Otherwise return T. */
13360 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
13361 {
13362 if (code == EQ_EXPR)
13363 result = tree_int_cst_equal (op0, op1);
13364 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
13365 result = INT_CST_LT_UNSIGNED (op0, op1);
13366 else
13367 result = INT_CST_LT (op0, op1);
13368 }
13369 else
13370 return NULL_TREE;
13371
13372 if (invert)
13373 result ^= 1;
13374 return constant_boolean_node (result, type);
13375 }
13376
13377 /* Build an expression for the a clean point containing EXPR with type TYPE.
13378 Don't build a cleanup point expression for EXPR which don't have side
13379 effects. */
13380
13381 tree
13382 fold_build_cleanup_point_expr (tree type, tree expr)
13383 {
13384 /* If the expression does not have side effects then we don't have to wrap
13385 it with a cleanup point expression. */
13386 if (!TREE_SIDE_EFFECTS (expr))
13387 return expr;
13388
13389 /* If the expression is a return, check to see if the expression inside the
13390 return has no side effects or the right hand side of the modify expression
13391 inside the return. If either don't have side effects set we don't need to
13392 wrap the expression in a cleanup point expression. Note we don't check the
13393 left hand side of the modify because it should always be a return decl. */
13394 if (TREE_CODE (expr) == RETURN_EXPR)
13395 {
13396 tree op = TREE_OPERAND (expr, 0);
13397 if (!op || !TREE_SIDE_EFFECTS (op))
13398 return expr;
13399 op = TREE_OPERAND (op, 1);
13400 if (!TREE_SIDE_EFFECTS (op))
13401 return expr;
13402 }
13403
13404 return build1 (CLEANUP_POINT_EXPR, type, expr);
13405 }
13406
13407 /* Build an expression for the address of T. Folds away INDIRECT_REF to
13408 avoid confusing the gimplify process. */
13409
13410 tree
13411 build_fold_addr_expr_with_type (tree t, tree ptrtype)
13412 {
13413 /* The size of the object is not relevant when talking about its address. */
13414 if (TREE_CODE (t) == WITH_SIZE_EXPR)
13415 t = TREE_OPERAND (t, 0);
13416
13417 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
13418 if (TREE_CODE (t) == INDIRECT_REF
13419 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
13420 {
13421 t = TREE_OPERAND (t, 0);
13422 if (TREE_TYPE (t) != ptrtype)
13423 t = build1 (NOP_EXPR, ptrtype, t);
13424 }
13425 else
13426 {
13427 tree base = t;
13428
13429 while (handled_component_p (base))
13430 base = TREE_OPERAND (base, 0);
13431 if (DECL_P (base))
13432 TREE_ADDRESSABLE (base) = 1;
13433
13434 t = build1 (ADDR_EXPR, ptrtype, t);
13435 }
13436
13437 return t;
13438 }
13439
13440 tree
13441 build_fold_addr_expr (tree t)
13442 {
13443 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
13444 }
13445
13446 /* Given a pointer value OP0 and a type TYPE, return a simplified version
13447 of an indirection through OP0, or NULL_TREE if no simplification is
13448 possible. */
13449
13450 tree
13451 fold_indirect_ref_1 (tree type, tree op0)
13452 {
13453 tree sub = op0;
13454 tree subtype;
13455
13456 STRIP_NOPS (sub);
13457 subtype = TREE_TYPE (sub);
13458 if (!POINTER_TYPE_P (subtype))
13459 return NULL_TREE;
13460
13461 if (TREE_CODE (sub) == ADDR_EXPR)
13462 {
13463 tree op = TREE_OPERAND (sub, 0);
13464 tree optype = TREE_TYPE (op);
13465 /* *&CONST_DECL -> to the value of the const decl. */
13466 if (TREE_CODE (op) == CONST_DECL)
13467 return DECL_INITIAL (op);
13468 /* *&p => p; make sure to handle *&"str"[cst] here. */
13469 if (type == optype)
13470 {
13471 tree fop = fold_read_from_constant_string (op);
13472 if (fop)
13473 return fop;
13474 else
13475 return op;
13476 }
13477 /* *(foo *)&fooarray => fooarray[0] */
13478 else if (TREE_CODE (optype) == ARRAY_TYPE
13479 && type == TREE_TYPE (optype))
13480 {
13481 tree type_domain = TYPE_DOMAIN (optype);
13482 tree min_val = size_zero_node;
13483 if (type_domain && TYPE_MIN_VALUE (type_domain))
13484 min_val = TYPE_MIN_VALUE (type_domain);
13485 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
13486 }
13487 /* *(foo *)&complexfoo => __real__ complexfoo */
13488 else if (TREE_CODE (optype) == COMPLEX_TYPE
13489 && type == TREE_TYPE (optype))
13490 return fold_build1 (REALPART_EXPR, type, op);
13491 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
13492 else if (TREE_CODE (optype) == VECTOR_TYPE
13493 && type == TREE_TYPE (optype))
13494 {
13495 tree part_width = TYPE_SIZE (type);
13496 tree index = bitsize_int (0);
13497 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
13498 }
13499 }
13500
13501 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
13502 if (TREE_CODE (sub) == PLUS_EXPR
13503 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
13504 {
13505 tree op00 = TREE_OPERAND (sub, 0);
13506 tree op01 = TREE_OPERAND (sub, 1);
13507 tree op00type;
13508
13509 STRIP_NOPS (op00);
13510 op00type = TREE_TYPE (op00);
13511 if (TREE_CODE (op00) == ADDR_EXPR
13512 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
13513 && type == TREE_TYPE (TREE_TYPE (op00type)))
13514 {
13515 tree size = TYPE_SIZE_UNIT (type);
13516 if (tree_int_cst_equal (size, op01))
13517 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
13518 }
13519 }
13520
13521 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
13522 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
13523 && type == TREE_TYPE (TREE_TYPE (subtype)))
13524 {
13525 tree type_domain;
13526 tree min_val = size_zero_node;
13527 sub = build_fold_indirect_ref (sub);
13528 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
13529 if (type_domain && TYPE_MIN_VALUE (type_domain))
13530 min_val = TYPE_MIN_VALUE (type_domain);
13531 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
13532 }
13533
13534 return NULL_TREE;
13535 }
13536
13537 /* Builds an expression for an indirection through T, simplifying some
13538 cases. */
13539
13540 tree
13541 build_fold_indirect_ref (tree t)
13542 {
13543 tree type = TREE_TYPE (TREE_TYPE (t));
13544 tree sub = fold_indirect_ref_1 (type, t);
13545
13546 if (sub)
13547 return sub;
13548 else
13549 return build1 (INDIRECT_REF, type, t);
13550 }
13551
13552 /* Given an INDIRECT_REF T, return either T or a simplified version. */
13553
13554 tree
13555 fold_indirect_ref (tree t)
13556 {
13557 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
13558
13559 if (sub)
13560 return sub;
13561 else
13562 return t;
13563 }
13564
13565 /* Strip non-trapping, non-side-effecting tree nodes from an expression
13566 whose result is ignored. The type of the returned tree need not be
13567 the same as the original expression. */
13568
13569 tree
13570 fold_ignored_result (tree t)
13571 {
13572 if (!TREE_SIDE_EFFECTS (t))
13573 return integer_zero_node;
13574
13575 for (;;)
13576 switch (TREE_CODE_CLASS (TREE_CODE (t)))
13577 {
13578 case tcc_unary:
13579 t = TREE_OPERAND (t, 0);
13580 break;
13581
13582 case tcc_binary:
13583 case tcc_comparison:
13584 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
13585 t = TREE_OPERAND (t, 0);
13586 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
13587 t = TREE_OPERAND (t, 1);
13588 else
13589 return t;
13590 break;
13591
13592 case tcc_expression:
13593 switch (TREE_CODE (t))
13594 {
13595 case COMPOUND_EXPR:
13596 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
13597 return t;
13598 t = TREE_OPERAND (t, 0);
13599 break;
13600
13601 case COND_EXPR:
13602 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
13603 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
13604 return t;
13605 t = TREE_OPERAND (t, 0);
13606 break;
13607
13608 default:
13609 return t;
13610 }
13611 break;
13612
13613 default:
13614 return t;
13615 }
13616 }
13617
13618 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
13619 This can only be applied to objects of a sizetype. */
13620
13621 tree
13622 round_up (tree value, int divisor)
13623 {
13624 tree div = NULL_TREE;
13625
13626 gcc_assert (divisor > 0);
13627 if (divisor == 1)
13628 return value;
13629
13630 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
13631 have to do anything. Only do this when we are not given a const,
13632 because in that case, this check is more expensive than just
13633 doing it. */
13634 if (TREE_CODE (value) != INTEGER_CST)
13635 {
13636 div = build_int_cst (TREE_TYPE (value), divisor);
13637
13638 if (multiple_of_p (TREE_TYPE (value), value, div))
13639 return value;
13640 }
13641
13642 /* If divisor is a power of two, simplify this to bit manipulation. */
13643 if (divisor == (divisor & -divisor))
13644 {
13645 if (TREE_CODE (value) == INTEGER_CST)
13646 {
13647 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (value);
13648 unsigned HOST_WIDE_INT high;
13649 bool overflow_p;
13650
13651 if ((low & (divisor - 1)) == 0)
13652 return value;
13653
13654 overflow_p = TREE_OVERFLOW (value);
13655 high = TREE_INT_CST_HIGH (value);
13656 low &= ~(divisor - 1);
13657 low += divisor;
13658 if (low == 0)
13659 {
13660 high++;
13661 if (high == 0)
13662 overflow_p = true;
13663 }
13664
13665 return force_fit_type_double (TREE_TYPE (value), low, high,
13666 -1, overflow_p);
13667 }
13668 else
13669 {
13670 tree t;
13671
13672 t = build_int_cst (TREE_TYPE (value), divisor - 1);
13673 value = size_binop (PLUS_EXPR, value, t);
13674 t = build_int_cst (TREE_TYPE (value), -divisor);
13675 value = size_binop (BIT_AND_EXPR, value, t);
13676 }
13677 }
13678 else
13679 {
13680 if (!div)
13681 div = build_int_cst (TREE_TYPE (value), divisor);
13682 value = size_binop (CEIL_DIV_EXPR, value, div);
13683 value = size_binop (MULT_EXPR, value, div);
13684 }
13685
13686 return value;
13687 }
13688
13689 /* Likewise, but round down. */
13690
13691 tree
13692 round_down (tree value, int divisor)
13693 {
13694 tree div = NULL_TREE;
13695
13696 gcc_assert (divisor > 0);
13697 if (divisor == 1)
13698 return value;
13699
13700 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
13701 have to do anything. Only do this when we are not given a const,
13702 because in that case, this check is more expensive than just
13703 doing it. */
13704 if (TREE_CODE (value) != INTEGER_CST)
13705 {
13706 div = build_int_cst (TREE_TYPE (value), divisor);
13707
13708 if (multiple_of_p (TREE_TYPE (value), value, div))
13709 return value;
13710 }
13711
13712 /* If divisor is a power of two, simplify this to bit manipulation. */
13713 if (divisor == (divisor & -divisor))
13714 {
13715 tree t;
13716
13717 t = build_int_cst (TREE_TYPE (value), -divisor);
13718 value = size_binop (BIT_AND_EXPR, value, t);
13719 }
13720 else
13721 {
13722 if (!div)
13723 div = build_int_cst (TREE_TYPE (value), divisor);
13724 value = size_binop (FLOOR_DIV_EXPR, value, div);
13725 value = size_binop (MULT_EXPR, value, div);
13726 }
13727
13728 return value;
13729 }
13730
13731 /* Returns the pointer to the base of the object addressed by EXP and
13732 extracts the information about the offset of the access, storing it
13733 to PBITPOS and POFFSET. */
13734
13735 static tree
13736 split_address_to_core_and_offset (tree exp,
13737 HOST_WIDE_INT *pbitpos, tree *poffset)
13738 {
13739 tree core;
13740 enum machine_mode mode;
13741 int unsignedp, volatilep;
13742 HOST_WIDE_INT bitsize;
13743
13744 if (TREE_CODE (exp) == ADDR_EXPR)
13745 {
13746 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
13747 poffset, &mode, &unsignedp, &volatilep,
13748 false);
13749 core = build_fold_addr_expr (core);
13750 }
13751 else
13752 {
13753 core = exp;
13754 *pbitpos = 0;
13755 *poffset = NULL_TREE;
13756 }
13757
13758 return core;
13759 }
13760
13761 /* Returns true if addresses of E1 and E2 differ by a constant, false
13762 otherwise. If they do, E1 - E2 is stored in *DIFF. */
13763
13764 bool
13765 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
13766 {
13767 tree core1, core2;
13768 HOST_WIDE_INT bitpos1, bitpos2;
13769 tree toffset1, toffset2, tdiff, type;
13770
13771 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
13772 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
13773
13774 if (bitpos1 % BITS_PER_UNIT != 0
13775 || bitpos2 % BITS_PER_UNIT != 0
13776 || !operand_equal_p (core1, core2, 0))
13777 return false;
13778
13779 if (toffset1 && toffset2)
13780 {
13781 type = TREE_TYPE (toffset1);
13782 if (type != TREE_TYPE (toffset2))
13783 toffset2 = fold_convert (type, toffset2);
13784
13785 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
13786 if (!cst_and_fits_in_hwi (tdiff))
13787 return false;
13788
13789 *diff = int_cst_value (tdiff);
13790 }
13791 else if (toffset1 || toffset2)
13792 {
13793 /* If only one of the offsets is non-constant, the difference cannot
13794 be a constant. */
13795 return false;
13796 }
13797 else
13798 *diff = 0;
13799
13800 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
13801 return true;
13802 }
13803
13804 /* Simplify the floating point expression EXP when the sign of the
13805 result is not significant. Return NULL_TREE if no simplification
13806 is possible. */
13807
13808 tree
13809 fold_strip_sign_ops (tree exp)
13810 {
13811 tree arg0, arg1;
13812
13813 switch (TREE_CODE (exp))
13814 {
13815 case ABS_EXPR:
13816 case NEGATE_EXPR:
13817 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
13818 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
13819
13820 case MULT_EXPR:
13821 case RDIV_EXPR:
13822 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
13823 return NULL_TREE;
13824 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
13825 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
13826 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
13827 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
13828 arg0 ? arg0 : TREE_OPERAND (exp, 0),
13829 arg1 ? arg1 : TREE_OPERAND (exp, 1));
13830 break;
13831
13832 case COMPOUND_EXPR:
13833 arg0 = TREE_OPERAND (exp, 0);
13834 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
13835 if (arg1)
13836 return fold_build2 (COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
13837 break;
13838
13839 case COND_EXPR:
13840 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
13841 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
13842 if (arg0 || arg1)
13843 return fold_build3 (COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
13844 arg0 ? arg0 : TREE_OPERAND (exp, 1),
13845 arg1 ? arg1 : TREE_OPERAND (exp, 2));
13846 break;
13847
13848 case CALL_EXPR:
13849 {
13850 const enum built_in_function fcode = builtin_mathfn_code (exp);
13851 switch (fcode)
13852 {
13853 CASE_FLT_FN (BUILT_IN_COPYSIGN):
13854 /* Strip copysign function call, return the 1st argument. */
13855 arg0 = TREE_VALUE (TREE_OPERAND (exp, 1));
13856 arg1 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (exp, 1)));
13857 return omit_one_operand (TREE_TYPE (exp), arg0, arg1);
13858
13859 default:
13860 /* Strip sign ops from the argument of "odd" math functions. */
13861 if (negate_mathfn_p (fcode))
13862 {
13863 arg0 = fold_strip_sign_ops (TREE_VALUE (TREE_OPERAND (exp, 1)));
13864 if (arg0)
13865 return build_function_call_expr (get_callee_fndecl (exp),
13866 build_tree_list (NULL_TREE,
13867 arg0));
13868 }
13869 break;
13870 }
13871 }
13872 break;
13873
13874 default:
13875 break;
13876 }
13877 return NULL_TREE;
13878 }