target.h (globalize_decl_name): New.
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
21 02110-1301, USA. */
22
23 /*@@ This file should be rewritten to use an arbitrary precision
24 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
25 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
26 @@ The routines that translate from the ap rep should
27 @@ warn if precision et. al. is lost.
28 @@ This would also make life easier when this technology is used
29 @@ for cross-compilers. */
30
31 /* The entry points in this file are fold, size_int_wide, size_binop
32 and force_fit_type_double.
33
34 fold takes a tree as argument and returns a simplified tree.
35
36 size_binop takes a tree code for an arithmetic operation
37 and two operands that are trees, and produces a tree for the
38 result, assuming the type comes from `sizetype'.
39
40 size_int takes an integer value, and creates a tree constant
41 with type from `sizetype'.
42
43 force_fit_type_double takes a constant, an overflowable flag and a
44 prior overflow indicator. It forces the value to fit the type and
45 sets TREE_OVERFLOW.
46
47 Note: Since the folders get called on non-gimple code as well as
48 gimple code, we need to handle GIMPLE tuples as well as their
49 corresponding tree equivalents. */
50
51 #include "config.h"
52 #include "system.h"
53 #include "coretypes.h"
54 #include "tm.h"
55 #include "flags.h"
56 #include "tree.h"
57 #include "real.h"
58 #include "rtl.h"
59 #include "expr.h"
60 #include "tm_p.h"
61 #include "toplev.h"
62 #include "ggc.h"
63 #include "hashtab.h"
64 #include "langhooks.h"
65 #include "md5.h"
66
67 /* Non-zero if we are folding constants inside an initializer; zero
68 otherwise. */
69 int folding_initializer = 0;
70
71 /* The following constants represent a bit based encoding of GCC's
72 comparison operators. This encoding simplifies transformations
73 on relational comparison operators, such as AND and OR. */
74 enum comparison_code {
75 COMPCODE_FALSE = 0,
76 COMPCODE_LT = 1,
77 COMPCODE_EQ = 2,
78 COMPCODE_LE = 3,
79 COMPCODE_GT = 4,
80 COMPCODE_LTGT = 5,
81 COMPCODE_GE = 6,
82 COMPCODE_ORD = 7,
83 COMPCODE_UNORD = 8,
84 COMPCODE_UNLT = 9,
85 COMPCODE_UNEQ = 10,
86 COMPCODE_UNLE = 11,
87 COMPCODE_UNGT = 12,
88 COMPCODE_NE = 13,
89 COMPCODE_UNGE = 14,
90 COMPCODE_TRUE = 15
91 };
92
93 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
94 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
95 static bool negate_mathfn_p (enum built_in_function);
96 static bool negate_expr_p (tree);
97 static tree negate_expr (tree);
98 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
99 static tree associate_trees (tree, tree, enum tree_code, tree);
100 static tree const_binop (enum tree_code, tree, tree, int);
101 static enum comparison_code comparison_to_compcode (enum tree_code);
102 static enum tree_code compcode_to_comparison (enum comparison_code);
103 static tree combine_comparisons (enum tree_code, enum tree_code,
104 enum tree_code, tree, tree, tree);
105 static int truth_value_p (enum tree_code);
106 static int operand_equal_for_comparison_p (tree, tree, tree);
107 static int twoval_comparison_p (tree, tree *, tree *, int *);
108 static tree eval_subst (tree, tree, tree, tree, tree);
109 static tree pedantic_omit_one_operand (tree, tree, tree);
110 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
111 static tree make_bit_field_ref (tree, tree, int, int, int);
112 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
113 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
114 enum machine_mode *, int *, int *,
115 tree *, tree *);
116 static int all_ones_mask_p (tree, int);
117 static tree sign_bit_p (tree, tree);
118 static int simple_operand_p (tree);
119 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
120 static tree range_predecessor (tree);
121 static tree range_successor (tree);
122 static tree make_range (tree, int *, tree *, tree *);
123 static tree build_range_check (tree, tree, int, tree, tree);
124 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
125 tree);
126 static tree fold_range_test (enum tree_code, tree, tree, tree);
127 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
128 static tree unextend (tree, int, int, tree);
129 static tree fold_truthop (enum tree_code, tree, tree, tree);
130 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
131 static tree extract_muldiv (tree, tree, enum tree_code, tree);
132 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
133 static int multiple_of_p (tree, tree, tree);
134 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
135 tree, tree,
136 tree, tree, int);
137 static bool fold_real_zero_addition_p (tree, tree, int);
138 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
139 tree, tree, tree);
140 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
141 static tree fold_div_compare (enum tree_code, tree, tree, tree);
142 static bool reorder_operands_p (tree, tree);
143 static tree fold_negate_const (tree, tree);
144 static tree fold_not_const (tree, tree);
145 static tree fold_relational_const (enum tree_code, tree, tree, tree);
146 static int native_encode_expr (tree, unsigned char *, int);
147 static tree native_interpret_expr (tree, unsigned char *, int);
148
149
150 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
151 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
152 and SUM1. Then this yields nonzero if overflow occurred during the
153 addition.
154
155 Overflow occurs if A and B have the same sign, but A and SUM differ in
156 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
157 sign. */
158 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
159 \f
160 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
161 We do that by representing the two-word integer in 4 words, with only
162 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
163 number. The value of the word is LOWPART + HIGHPART * BASE. */
164
165 #define LOWPART(x) \
166 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
167 #define HIGHPART(x) \
168 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
169 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
170
171 /* Unpack a two-word integer into 4 words.
172 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
173 WORDS points to the array of HOST_WIDE_INTs. */
174
175 static void
176 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
177 {
178 words[0] = LOWPART (low);
179 words[1] = HIGHPART (low);
180 words[2] = LOWPART (hi);
181 words[3] = HIGHPART (hi);
182 }
183
184 /* Pack an array of 4 words into a two-word integer.
185 WORDS points to the array of words.
186 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
187
188 static void
189 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
190 HOST_WIDE_INT *hi)
191 {
192 *low = words[0] + words[1] * BASE;
193 *hi = words[2] + words[3] * BASE;
194 }
195 \f
196 /* Force the double-word integer L1, H1 to be within the range of the
197 integer type TYPE. Stores the properly truncated and sign-extended
198 double-word integer in *LV, *HV. Returns true if the operation
199 overflows, that is, argument and result are different. */
200
201 int
202 fit_double_type (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
203 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, tree type)
204 {
205 unsigned HOST_WIDE_INT low0 = l1;
206 HOST_WIDE_INT high0 = h1;
207 unsigned int prec;
208 int sign_extended_type;
209
210 if (POINTER_TYPE_P (type)
211 || TREE_CODE (type) == OFFSET_TYPE)
212 prec = POINTER_SIZE;
213 else
214 prec = TYPE_PRECISION (type);
215
216 /* Size types *are* sign extended. */
217 sign_extended_type = (!TYPE_UNSIGNED (type)
218 || (TREE_CODE (type) == INTEGER_TYPE
219 && TYPE_IS_SIZETYPE (type)));
220
221 /* First clear all bits that are beyond the type's precision. */
222 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
223 ;
224 else if (prec > HOST_BITS_PER_WIDE_INT)
225 h1 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
226 else
227 {
228 h1 = 0;
229 if (prec < HOST_BITS_PER_WIDE_INT)
230 l1 &= ~((HOST_WIDE_INT) (-1) << prec);
231 }
232
233 /* Then do sign extension if necessary. */
234 if (!sign_extended_type)
235 /* No sign extension */;
236 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
237 /* Correct width already. */;
238 else if (prec > HOST_BITS_PER_WIDE_INT)
239 {
240 /* Sign extend top half? */
241 if (h1 & ((unsigned HOST_WIDE_INT)1
242 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
243 h1 |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
244 }
245 else if (prec == HOST_BITS_PER_WIDE_INT)
246 {
247 if ((HOST_WIDE_INT)l1 < 0)
248 h1 = -1;
249 }
250 else
251 {
252 /* Sign extend bottom half? */
253 if (l1 & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
254 {
255 h1 = -1;
256 l1 |= (HOST_WIDE_INT)(-1) << prec;
257 }
258 }
259
260 *lv = l1;
261 *hv = h1;
262
263 /* If the value didn't fit, signal overflow. */
264 return l1 != low0 || h1 != high0;
265 }
266
267 /* We force the double-int HIGH:LOW to the range of the type TYPE by
268 sign or zero extending it.
269 OVERFLOWABLE indicates if we are interested
270 in overflow of the value, when >0 we are only interested in signed
271 overflow, for <0 we are interested in any overflow. OVERFLOWED
272 indicates whether overflow has already occurred. CONST_OVERFLOWED
273 indicates whether constant overflow has already occurred. We force
274 T's value to be within range of T's type (by setting to 0 or 1 all
275 the bits outside the type's range). We set TREE_OVERFLOWED if,
276 OVERFLOWED is nonzero,
277 or OVERFLOWABLE is >0 and signed overflow occurs
278 or OVERFLOWABLE is <0 and any overflow occurs
279 We return a new tree node for the extended double-int. The node
280 is shared if no overflow flags are set. */
281
282 tree
283 force_fit_type_double (tree type, unsigned HOST_WIDE_INT low,
284 HOST_WIDE_INT high, int overflowable,
285 bool overflowed)
286 {
287 int sign_extended_type;
288 bool overflow;
289
290 /* Size types *are* sign extended. */
291 sign_extended_type = (!TYPE_UNSIGNED (type)
292 || (TREE_CODE (type) == INTEGER_TYPE
293 && TYPE_IS_SIZETYPE (type)));
294
295 overflow = fit_double_type (low, high, &low, &high, type);
296
297 /* If we need to set overflow flags, return a new unshared node. */
298 if (overflowed || overflow)
299 {
300 if (overflowed
301 || overflowable < 0
302 || (overflowable > 0 && sign_extended_type))
303 {
304 tree t = make_node (INTEGER_CST);
305 TREE_INT_CST_LOW (t) = low;
306 TREE_INT_CST_HIGH (t) = high;
307 TREE_TYPE (t) = type;
308 TREE_OVERFLOW (t) = 1;
309 return t;
310 }
311 }
312
313 /* Else build a shared node. */
314 return build_int_cst_wide (type, low, high);
315 }
316 \f
317 /* Add two doubleword integers with doubleword result.
318 Return nonzero if the operation overflows according to UNSIGNED_P.
319 Each argument is given as two `HOST_WIDE_INT' pieces.
320 One argument is L1 and H1; the other, L2 and H2.
321 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
322
323 int
324 add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
325 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
326 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
327 bool unsigned_p)
328 {
329 unsigned HOST_WIDE_INT l;
330 HOST_WIDE_INT h;
331
332 l = l1 + l2;
333 h = h1 + h2 + (l < l1);
334
335 *lv = l;
336 *hv = h;
337
338 if (unsigned_p)
339 return (unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1;
340 else
341 return OVERFLOW_SUM_SIGN (h1, h2, h);
342 }
343
344 /* Negate a doubleword integer with doubleword result.
345 Return nonzero if the operation overflows, assuming it's signed.
346 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
347 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
348
349 int
350 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
351 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
352 {
353 if (l1 == 0)
354 {
355 *lv = 0;
356 *hv = - h1;
357 return (*hv & h1) < 0;
358 }
359 else
360 {
361 *lv = -l1;
362 *hv = ~h1;
363 return 0;
364 }
365 }
366 \f
367 /* Multiply two doubleword integers with doubleword result.
368 Return nonzero if the operation overflows according to UNSIGNED_P.
369 Each argument is given as two `HOST_WIDE_INT' pieces.
370 One argument is L1 and H1; the other, L2 and H2.
371 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
372
373 int
374 mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
375 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
376 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
377 bool unsigned_p)
378 {
379 HOST_WIDE_INT arg1[4];
380 HOST_WIDE_INT arg2[4];
381 HOST_WIDE_INT prod[4 * 2];
382 unsigned HOST_WIDE_INT carry;
383 int i, j, k;
384 unsigned HOST_WIDE_INT toplow, neglow;
385 HOST_WIDE_INT tophigh, neghigh;
386
387 encode (arg1, l1, h1);
388 encode (arg2, l2, h2);
389
390 memset (prod, 0, sizeof prod);
391
392 for (i = 0; i < 4; i++)
393 {
394 carry = 0;
395 for (j = 0; j < 4; j++)
396 {
397 k = i + j;
398 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
399 carry += arg1[i] * arg2[j];
400 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
401 carry += prod[k];
402 prod[k] = LOWPART (carry);
403 carry = HIGHPART (carry);
404 }
405 prod[i + 4] = carry;
406 }
407
408 decode (prod, lv, hv);
409 decode (prod + 4, &toplow, &tophigh);
410
411 /* Unsigned overflow is immediate. */
412 if (unsigned_p)
413 return (toplow | tophigh) != 0;
414
415 /* Check for signed overflow by calculating the signed representation of the
416 top half of the result; it should agree with the low half's sign bit. */
417 if (h1 < 0)
418 {
419 neg_double (l2, h2, &neglow, &neghigh);
420 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
421 }
422 if (h2 < 0)
423 {
424 neg_double (l1, h1, &neglow, &neghigh);
425 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
426 }
427 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
428 }
429 \f
430 /* Shift the doubleword integer in L1, H1 left by COUNT places
431 keeping only PREC bits of result.
432 Shift right if COUNT is negative.
433 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
434 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
435
436 void
437 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
438 HOST_WIDE_INT count, unsigned int prec,
439 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
440 {
441 unsigned HOST_WIDE_INT signmask;
442
443 if (count < 0)
444 {
445 rshift_double (l1, h1, -count, prec, lv, hv, arith);
446 return;
447 }
448
449 if (SHIFT_COUNT_TRUNCATED)
450 count %= prec;
451
452 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
453 {
454 /* Shifting by the host word size is undefined according to the
455 ANSI standard, so we must handle this as a special case. */
456 *hv = 0;
457 *lv = 0;
458 }
459 else if (count >= HOST_BITS_PER_WIDE_INT)
460 {
461 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
462 *lv = 0;
463 }
464 else
465 {
466 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
467 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
468 *lv = l1 << count;
469 }
470
471 /* Sign extend all bits that are beyond the precision. */
472
473 signmask = -((prec > HOST_BITS_PER_WIDE_INT
474 ? ((unsigned HOST_WIDE_INT) *hv
475 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
476 : (*lv >> (prec - 1))) & 1);
477
478 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
479 ;
480 else if (prec >= HOST_BITS_PER_WIDE_INT)
481 {
482 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
483 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
484 }
485 else
486 {
487 *hv = signmask;
488 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
489 *lv |= signmask << prec;
490 }
491 }
492
493 /* Shift the doubleword integer in L1, H1 right by COUNT places
494 keeping only PREC bits of result. COUNT must be positive.
495 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
496 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
497
498 void
499 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
500 HOST_WIDE_INT count, unsigned int prec,
501 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
502 int arith)
503 {
504 unsigned HOST_WIDE_INT signmask;
505
506 signmask = (arith
507 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
508 : 0);
509
510 if (SHIFT_COUNT_TRUNCATED)
511 count %= prec;
512
513 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
514 {
515 /* Shifting by the host word size is undefined according to the
516 ANSI standard, so we must handle this as a special case. */
517 *hv = 0;
518 *lv = 0;
519 }
520 else if (count >= HOST_BITS_PER_WIDE_INT)
521 {
522 *hv = 0;
523 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
524 }
525 else
526 {
527 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
528 *lv = ((l1 >> count)
529 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
530 }
531
532 /* Zero / sign extend all bits that are beyond the precision. */
533
534 if (count >= (HOST_WIDE_INT)prec)
535 {
536 *hv = signmask;
537 *lv = signmask;
538 }
539 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
540 ;
541 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
542 {
543 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
544 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
545 }
546 else
547 {
548 *hv = signmask;
549 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
550 *lv |= signmask << (prec - count);
551 }
552 }
553 \f
554 /* Rotate the doubleword integer in L1, H1 left by COUNT places
555 keeping only PREC bits of result.
556 Rotate right if COUNT is negative.
557 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
558
559 void
560 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
561 HOST_WIDE_INT count, unsigned int prec,
562 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
563 {
564 unsigned HOST_WIDE_INT s1l, s2l;
565 HOST_WIDE_INT s1h, s2h;
566
567 count %= prec;
568 if (count < 0)
569 count += prec;
570
571 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
572 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
573 *lv = s1l | s2l;
574 *hv = s1h | s2h;
575 }
576
577 /* Rotate the doubleword integer in L1, H1 left by COUNT places
578 keeping only PREC bits of result. COUNT must be positive.
579 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
580
581 void
582 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
583 HOST_WIDE_INT count, unsigned int prec,
584 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
585 {
586 unsigned HOST_WIDE_INT s1l, s2l;
587 HOST_WIDE_INT s1h, s2h;
588
589 count %= prec;
590 if (count < 0)
591 count += prec;
592
593 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
594 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
595 *lv = s1l | s2l;
596 *hv = s1h | s2h;
597 }
598 \f
599 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
600 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
601 CODE is a tree code for a kind of division, one of
602 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
603 or EXACT_DIV_EXPR
604 It controls how the quotient is rounded to an integer.
605 Return nonzero if the operation overflows.
606 UNS nonzero says do unsigned division. */
607
608 int
609 div_and_round_double (enum tree_code code, int uns,
610 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
611 HOST_WIDE_INT hnum_orig,
612 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
613 HOST_WIDE_INT hden_orig,
614 unsigned HOST_WIDE_INT *lquo,
615 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
616 HOST_WIDE_INT *hrem)
617 {
618 int quo_neg = 0;
619 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
620 HOST_WIDE_INT den[4], quo[4];
621 int i, j;
622 unsigned HOST_WIDE_INT work;
623 unsigned HOST_WIDE_INT carry = 0;
624 unsigned HOST_WIDE_INT lnum = lnum_orig;
625 HOST_WIDE_INT hnum = hnum_orig;
626 unsigned HOST_WIDE_INT lden = lden_orig;
627 HOST_WIDE_INT hden = hden_orig;
628 int overflow = 0;
629
630 if (hden == 0 && lden == 0)
631 overflow = 1, lden = 1;
632
633 /* Calculate quotient sign and convert operands to unsigned. */
634 if (!uns)
635 {
636 if (hnum < 0)
637 {
638 quo_neg = ~ quo_neg;
639 /* (minimum integer) / (-1) is the only overflow case. */
640 if (neg_double (lnum, hnum, &lnum, &hnum)
641 && ((HOST_WIDE_INT) lden & hden) == -1)
642 overflow = 1;
643 }
644 if (hden < 0)
645 {
646 quo_neg = ~ quo_neg;
647 neg_double (lden, hden, &lden, &hden);
648 }
649 }
650
651 if (hnum == 0 && hden == 0)
652 { /* single precision */
653 *hquo = *hrem = 0;
654 /* This unsigned division rounds toward zero. */
655 *lquo = lnum / lden;
656 goto finish_up;
657 }
658
659 if (hnum == 0)
660 { /* trivial case: dividend < divisor */
661 /* hden != 0 already checked. */
662 *hquo = *lquo = 0;
663 *hrem = hnum;
664 *lrem = lnum;
665 goto finish_up;
666 }
667
668 memset (quo, 0, sizeof quo);
669
670 memset (num, 0, sizeof num); /* to zero 9th element */
671 memset (den, 0, sizeof den);
672
673 encode (num, lnum, hnum);
674 encode (den, lden, hden);
675
676 /* Special code for when the divisor < BASE. */
677 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
678 {
679 /* hnum != 0 already checked. */
680 for (i = 4 - 1; i >= 0; i--)
681 {
682 work = num[i] + carry * BASE;
683 quo[i] = work / lden;
684 carry = work % lden;
685 }
686 }
687 else
688 {
689 /* Full double precision division,
690 with thanks to Don Knuth's "Seminumerical Algorithms". */
691 int num_hi_sig, den_hi_sig;
692 unsigned HOST_WIDE_INT quo_est, scale;
693
694 /* Find the highest nonzero divisor digit. */
695 for (i = 4 - 1;; i--)
696 if (den[i] != 0)
697 {
698 den_hi_sig = i;
699 break;
700 }
701
702 /* Insure that the first digit of the divisor is at least BASE/2.
703 This is required by the quotient digit estimation algorithm. */
704
705 scale = BASE / (den[den_hi_sig] + 1);
706 if (scale > 1)
707 { /* scale divisor and dividend */
708 carry = 0;
709 for (i = 0; i <= 4 - 1; i++)
710 {
711 work = (num[i] * scale) + carry;
712 num[i] = LOWPART (work);
713 carry = HIGHPART (work);
714 }
715
716 num[4] = carry;
717 carry = 0;
718 for (i = 0; i <= 4 - 1; i++)
719 {
720 work = (den[i] * scale) + carry;
721 den[i] = LOWPART (work);
722 carry = HIGHPART (work);
723 if (den[i] != 0) den_hi_sig = i;
724 }
725 }
726
727 num_hi_sig = 4;
728
729 /* Main loop */
730 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
731 {
732 /* Guess the next quotient digit, quo_est, by dividing the first
733 two remaining dividend digits by the high order quotient digit.
734 quo_est is never low and is at most 2 high. */
735 unsigned HOST_WIDE_INT tmp;
736
737 num_hi_sig = i + den_hi_sig + 1;
738 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
739 if (num[num_hi_sig] != den[den_hi_sig])
740 quo_est = work / den[den_hi_sig];
741 else
742 quo_est = BASE - 1;
743
744 /* Refine quo_est so it's usually correct, and at most one high. */
745 tmp = work - quo_est * den[den_hi_sig];
746 if (tmp < BASE
747 && (den[den_hi_sig - 1] * quo_est
748 > (tmp * BASE + num[num_hi_sig - 2])))
749 quo_est--;
750
751 /* Try QUO_EST as the quotient digit, by multiplying the
752 divisor by QUO_EST and subtracting from the remaining dividend.
753 Keep in mind that QUO_EST is the I - 1st digit. */
754
755 carry = 0;
756 for (j = 0; j <= den_hi_sig; j++)
757 {
758 work = quo_est * den[j] + carry;
759 carry = HIGHPART (work);
760 work = num[i + j] - LOWPART (work);
761 num[i + j] = LOWPART (work);
762 carry += HIGHPART (work) != 0;
763 }
764
765 /* If quo_est was high by one, then num[i] went negative and
766 we need to correct things. */
767 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
768 {
769 quo_est--;
770 carry = 0; /* add divisor back in */
771 for (j = 0; j <= den_hi_sig; j++)
772 {
773 work = num[i + j] + den[j] + carry;
774 carry = HIGHPART (work);
775 num[i + j] = LOWPART (work);
776 }
777
778 num [num_hi_sig] += carry;
779 }
780
781 /* Store the quotient digit. */
782 quo[i] = quo_est;
783 }
784 }
785
786 decode (quo, lquo, hquo);
787
788 finish_up:
789 /* If result is negative, make it so. */
790 if (quo_neg)
791 neg_double (*lquo, *hquo, lquo, hquo);
792
793 /* Compute trial remainder: rem = num - (quo * den) */
794 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
795 neg_double (*lrem, *hrem, lrem, hrem);
796 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
797
798 switch (code)
799 {
800 case TRUNC_DIV_EXPR:
801 case TRUNC_MOD_EXPR: /* round toward zero */
802 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
803 return overflow;
804
805 case FLOOR_DIV_EXPR:
806 case FLOOR_MOD_EXPR: /* round toward negative infinity */
807 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
808 {
809 /* quo = quo - 1; */
810 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
811 lquo, hquo);
812 }
813 else
814 return overflow;
815 break;
816
817 case CEIL_DIV_EXPR:
818 case CEIL_MOD_EXPR: /* round toward positive infinity */
819 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
820 {
821 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
822 lquo, hquo);
823 }
824 else
825 return overflow;
826 break;
827
828 case ROUND_DIV_EXPR:
829 case ROUND_MOD_EXPR: /* round to closest integer */
830 {
831 unsigned HOST_WIDE_INT labs_rem = *lrem;
832 HOST_WIDE_INT habs_rem = *hrem;
833 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
834 HOST_WIDE_INT habs_den = hden, htwice;
835
836 /* Get absolute values. */
837 if (*hrem < 0)
838 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
839 if (hden < 0)
840 neg_double (lden, hden, &labs_den, &habs_den);
841
842 /* If (2 * abs (lrem) >= abs (lden)) */
843 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
844 labs_rem, habs_rem, &ltwice, &htwice);
845
846 if (((unsigned HOST_WIDE_INT) habs_den
847 < (unsigned HOST_WIDE_INT) htwice)
848 || (((unsigned HOST_WIDE_INT) habs_den
849 == (unsigned HOST_WIDE_INT) htwice)
850 && (labs_den < ltwice)))
851 {
852 if (*hquo < 0)
853 /* quo = quo - 1; */
854 add_double (*lquo, *hquo,
855 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
856 else
857 /* quo = quo + 1; */
858 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
859 lquo, hquo);
860 }
861 else
862 return overflow;
863 }
864 break;
865
866 default:
867 gcc_unreachable ();
868 }
869
870 /* Compute true remainder: rem = num - (quo * den) */
871 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
872 neg_double (*lrem, *hrem, lrem, hrem);
873 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
874 return overflow;
875 }
876
877 /* If ARG2 divides ARG1 with zero remainder, carries out the division
878 of type CODE and returns the quotient.
879 Otherwise returns NULL_TREE. */
880
881 static tree
882 div_if_zero_remainder (enum tree_code code, tree arg1, tree arg2)
883 {
884 unsigned HOST_WIDE_INT int1l, int2l;
885 HOST_WIDE_INT int1h, int2h;
886 unsigned HOST_WIDE_INT quol, reml;
887 HOST_WIDE_INT quoh, remh;
888 tree type = TREE_TYPE (arg1);
889 int uns = TYPE_UNSIGNED (type);
890
891 int1l = TREE_INT_CST_LOW (arg1);
892 int1h = TREE_INT_CST_HIGH (arg1);
893 int2l = TREE_INT_CST_LOW (arg2);
894 int2h = TREE_INT_CST_HIGH (arg2);
895
896 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
897 &quol, &quoh, &reml, &remh);
898 if (remh != 0 || reml != 0)
899 return NULL_TREE;
900
901 return build_int_cst_wide (type, quol, quoh);
902 }
903 \f
904 /* Return true if the built-in mathematical function specified by CODE
905 is odd, i.e. -f(x) == f(-x). */
906
907 static bool
908 negate_mathfn_p (enum built_in_function code)
909 {
910 switch (code)
911 {
912 CASE_FLT_FN (BUILT_IN_ASIN):
913 CASE_FLT_FN (BUILT_IN_ASINH):
914 CASE_FLT_FN (BUILT_IN_ATAN):
915 CASE_FLT_FN (BUILT_IN_ATANH):
916 CASE_FLT_FN (BUILT_IN_CBRT):
917 CASE_FLT_FN (BUILT_IN_ERF):
918 CASE_FLT_FN (BUILT_IN_LLROUND):
919 CASE_FLT_FN (BUILT_IN_LROUND):
920 CASE_FLT_FN (BUILT_IN_ROUND):
921 CASE_FLT_FN (BUILT_IN_SIN):
922 CASE_FLT_FN (BUILT_IN_SINH):
923 CASE_FLT_FN (BUILT_IN_TAN):
924 CASE_FLT_FN (BUILT_IN_TANH):
925 CASE_FLT_FN (BUILT_IN_TRUNC):
926 return true;
927
928 CASE_FLT_FN (BUILT_IN_LLRINT):
929 CASE_FLT_FN (BUILT_IN_LRINT):
930 CASE_FLT_FN (BUILT_IN_NEARBYINT):
931 CASE_FLT_FN (BUILT_IN_RINT):
932 return !flag_rounding_math;
933
934 default:
935 break;
936 }
937 return false;
938 }
939
940 /* Check whether we may negate an integer constant T without causing
941 overflow. */
942
943 bool
944 may_negate_without_overflow_p (tree t)
945 {
946 unsigned HOST_WIDE_INT val;
947 unsigned int prec;
948 tree type;
949
950 gcc_assert (TREE_CODE (t) == INTEGER_CST);
951
952 type = TREE_TYPE (t);
953 if (TYPE_UNSIGNED (type))
954 return false;
955
956 prec = TYPE_PRECISION (type);
957 if (prec > HOST_BITS_PER_WIDE_INT)
958 {
959 if (TREE_INT_CST_LOW (t) != 0)
960 return true;
961 prec -= HOST_BITS_PER_WIDE_INT;
962 val = TREE_INT_CST_HIGH (t);
963 }
964 else
965 val = TREE_INT_CST_LOW (t);
966 if (prec < HOST_BITS_PER_WIDE_INT)
967 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
968 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
969 }
970
971 /* Determine whether an expression T can be cheaply negated using
972 the function negate_expr without introducing undefined overflow. */
973
974 static bool
975 negate_expr_p (tree t)
976 {
977 tree type;
978
979 if (t == 0)
980 return false;
981
982 type = TREE_TYPE (t);
983
984 STRIP_SIGN_NOPS (t);
985 switch (TREE_CODE (t))
986 {
987 case INTEGER_CST:
988 if (TYPE_UNSIGNED (type)
989 || (flag_wrapv && ! flag_trapv))
990 return true;
991
992 /* Check that -CST will not overflow type. */
993 return may_negate_without_overflow_p (t);
994 case BIT_NOT_EXPR:
995 return INTEGRAL_TYPE_P (type)
996 && (TYPE_UNSIGNED (type)
997 || (flag_wrapv && !flag_trapv));
998
999 case REAL_CST:
1000 case NEGATE_EXPR:
1001 return true;
1002
1003 case COMPLEX_CST:
1004 return negate_expr_p (TREE_REALPART (t))
1005 && negate_expr_p (TREE_IMAGPART (t));
1006
1007 case PLUS_EXPR:
1008 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1009 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1010 return false;
1011 /* -(A + B) -> (-B) - A. */
1012 if (negate_expr_p (TREE_OPERAND (t, 1))
1013 && reorder_operands_p (TREE_OPERAND (t, 0),
1014 TREE_OPERAND (t, 1)))
1015 return true;
1016 /* -(A + B) -> (-A) - B. */
1017 return negate_expr_p (TREE_OPERAND (t, 0));
1018
1019 case MINUS_EXPR:
1020 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
1021 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1022 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1023 && reorder_operands_p (TREE_OPERAND (t, 0),
1024 TREE_OPERAND (t, 1));
1025
1026 case MULT_EXPR:
1027 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1028 break;
1029
1030 /* Fall through. */
1031
1032 case RDIV_EXPR:
1033 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1034 return negate_expr_p (TREE_OPERAND (t, 1))
1035 || negate_expr_p (TREE_OPERAND (t, 0));
1036 break;
1037
1038 case TRUNC_DIV_EXPR:
1039 case ROUND_DIV_EXPR:
1040 case FLOOR_DIV_EXPR:
1041 case CEIL_DIV_EXPR:
1042 case EXACT_DIV_EXPR:
1043 if (TYPE_UNSIGNED (TREE_TYPE (t)) || flag_wrapv)
1044 break;
1045 return negate_expr_p (TREE_OPERAND (t, 1))
1046 || negate_expr_p (TREE_OPERAND (t, 0));
1047
1048 case NOP_EXPR:
1049 /* Negate -((double)float) as (double)(-float). */
1050 if (TREE_CODE (type) == REAL_TYPE)
1051 {
1052 tree tem = strip_float_extensions (t);
1053 if (tem != t)
1054 return negate_expr_p (tem);
1055 }
1056 break;
1057
1058 case CALL_EXPR:
1059 /* Negate -f(x) as f(-x). */
1060 if (negate_mathfn_p (builtin_mathfn_code (t)))
1061 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
1062 break;
1063
1064 case RSHIFT_EXPR:
1065 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1066 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1067 {
1068 tree op1 = TREE_OPERAND (t, 1);
1069 if (TREE_INT_CST_HIGH (op1) == 0
1070 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1071 == TREE_INT_CST_LOW (op1))
1072 return true;
1073 }
1074 break;
1075
1076 default:
1077 break;
1078 }
1079 return false;
1080 }
1081
1082 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1083 simplification is possible.
1084 If negate_expr_p would return true for T, NULL_TREE will never be
1085 returned. */
1086
1087 static tree
1088 fold_negate_expr (tree t)
1089 {
1090 tree type = TREE_TYPE (t);
1091 tree tem;
1092
1093 switch (TREE_CODE (t))
1094 {
1095 /* Convert - (~A) to A + 1. */
1096 case BIT_NOT_EXPR:
1097 if (INTEGRAL_TYPE_P (type))
1098 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0),
1099 build_int_cst (type, 1));
1100 break;
1101
1102 case INTEGER_CST:
1103 tem = fold_negate_const (t, type);
1104 if (!TREE_OVERFLOW (tem)
1105 || TYPE_UNSIGNED (type)
1106 || !flag_trapv)
1107 return tem;
1108 break;
1109
1110 case REAL_CST:
1111 tem = fold_negate_const (t, type);
1112 /* Two's complement FP formats, such as c4x, may overflow. */
1113 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
1114 return tem;
1115 break;
1116
1117 case COMPLEX_CST:
1118 {
1119 tree rpart = negate_expr (TREE_REALPART (t));
1120 tree ipart = negate_expr (TREE_IMAGPART (t));
1121
1122 if ((TREE_CODE (rpart) == REAL_CST
1123 && TREE_CODE (ipart) == REAL_CST)
1124 || (TREE_CODE (rpart) == INTEGER_CST
1125 && TREE_CODE (ipart) == INTEGER_CST))
1126 return build_complex (type, rpart, ipart);
1127 }
1128 break;
1129
1130 case NEGATE_EXPR:
1131 return TREE_OPERAND (t, 0);
1132
1133 case PLUS_EXPR:
1134 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1135 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1136 {
1137 /* -(A + B) -> (-B) - A. */
1138 if (negate_expr_p (TREE_OPERAND (t, 1))
1139 && reorder_operands_p (TREE_OPERAND (t, 0),
1140 TREE_OPERAND (t, 1)))
1141 {
1142 tem = negate_expr (TREE_OPERAND (t, 1));
1143 return fold_build2 (MINUS_EXPR, type,
1144 tem, TREE_OPERAND (t, 0));
1145 }
1146
1147 /* -(A + B) -> (-A) - B. */
1148 if (negate_expr_p (TREE_OPERAND (t, 0)))
1149 {
1150 tem = negate_expr (TREE_OPERAND (t, 0));
1151 return fold_build2 (MINUS_EXPR, type,
1152 tem, TREE_OPERAND (t, 1));
1153 }
1154 }
1155 break;
1156
1157 case MINUS_EXPR:
1158 /* - (A - B) -> B - A */
1159 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1160 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1161 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1162 return fold_build2 (MINUS_EXPR, type,
1163 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
1164 break;
1165
1166 case MULT_EXPR:
1167 if (TYPE_UNSIGNED (type))
1168 break;
1169
1170 /* Fall through. */
1171
1172 case RDIV_EXPR:
1173 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
1174 {
1175 tem = TREE_OPERAND (t, 1);
1176 if (negate_expr_p (tem))
1177 return fold_build2 (TREE_CODE (t), type,
1178 TREE_OPERAND (t, 0), negate_expr (tem));
1179 tem = TREE_OPERAND (t, 0);
1180 if (negate_expr_p (tem))
1181 return fold_build2 (TREE_CODE (t), type,
1182 negate_expr (tem), TREE_OPERAND (t, 1));
1183 }
1184 break;
1185
1186 case TRUNC_DIV_EXPR:
1187 case ROUND_DIV_EXPR:
1188 case FLOOR_DIV_EXPR:
1189 case CEIL_DIV_EXPR:
1190 case EXACT_DIV_EXPR:
1191 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
1192 {
1193 tem = TREE_OPERAND (t, 1);
1194 if (negate_expr_p (tem))
1195 return fold_build2 (TREE_CODE (t), type,
1196 TREE_OPERAND (t, 0), negate_expr (tem));
1197 tem = TREE_OPERAND (t, 0);
1198 if (negate_expr_p (tem))
1199 return fold_build2 (TREE_CODE (t), type,
1200 negate_expr (tem), TREE_OPERAND (t, 1));
1201 }
1202 break;
1203
1204 case NOP_EXPR:
1205 /* Convert -((double)float) into (double)(-float). */
1206 if (TREE_CODE (type) == REAL_TYPE)
1207 {
1208 tem = strip_float_extensions (t);
1209 if (tem != t && negate_expr_p (tem))
1210 return negate_expr (tem);
1211 }
1212 break;
1213
1214 case CALL_EXPR:
1215 /* Negate -f(x) as f(-x). */
1216 if (negate_mathfn_p (builtin_mathfn_code (t))
1217 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1218 {
1219 tree fndecl, arg, arglist;
1220
1221 fndecl = get_callee_fndecl (t);
1222 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1223 arglist = build_tree_list (NULL_TREE, arg);
1224 return build_function_call_expr (fndecl, arglist);
1225 }
1226 break;
1227
1228 case RSHIFT_EXPR:
1229 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1230 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1231 {
1232 tree op1 = TREE_OPERAND (t, 1);
1233 if (TREE_INT_CST_HIGH (op1) == 0
1234 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1235 == TREE_INT_CST_LOW (op1))
1236 {
1237 tree ntype = TYPE_UNSIGNED (type)
1238 ? lang_hooks.types.signed_type (type)
1239 : lang_hooks.types.unsigned_type (type);
1240 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1241 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1242 return fold_convert (type, temp);
1243 }
1244 }
1245 break;
1246
1247 default:
1248 break;
1249 }
1250
1251 return NULL_TREE;
1252 }
1253
1254 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1255 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1256 return NULL_TREE. */
1257
1258 static tree
1259 negate_expr (tree t)
1260 {
1261 tree type, tem;
1262
1263 if (t == NULL_TREE)
1264 return NULL_TREE;
1265
1266 type = TREE_TYPE (t);
1267 STRIP_SIGN_NOPS (t);
1268
1269 tem = fold_negate_expr (t);
1270 if (!tem)
1271 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1272 return fold_convert (type, tem);
1273 }
1274 \f
1275 /* Split a tree IN into a constant, literal and variable parts that could be
1276 combined with CODE to make IN. "constant" means an expression with
1277 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1278 commutative arithmetic operation. Store the constant part into *CONP,
1279 the literal in *LITP and return the variable part. If a part isn't
1280 present, set it to null. If the tree does not decompose in this way,
1281 return the entire tree as the variable part and the other parts as null.
1282
1283 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1284 case, we negate an operand that was subtracted. Except if it is a
1285 literal for which we use *MINUS_LITP instead.
1286
1287 If NEGATE_P is true, we are negating all of IN, again except a literal
1288 for which we use *MINUS_LITP instead.
1289
1290 If IN is itself a literal or constant, return it as appropriate.
1291
1292 Note that we do not guarantee that any of the three values will be the
1293 same type as IN, but they will have the same signedness and mode. */
1294
1295 static tree
1296 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1297 tree *minus_litp, int negate_p)
1298 {
1299 tree var = 0;
1300
1301 *conp = 0;
1302 *litp = 0;
1303 *minus_litp = 0;
1304
1305 /* Strip any conversions that don't change the machine mode or signedness. */
1306 STRIP_SIGN_NOPS (in);
1307
1308 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1309 *litp = in;
1310 else if (TREE_CODE (in) == code
1311 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1312 /* We can associate addition and subtraction together (even
1313 though the C standard doesn't say so) for integers because
1314 the value is not affected. For reals, the value might be
1315 affected, so we can't. */
1316 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1317 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1318 {
1319 tree op0 = TREE_OPERAND (in, 0);
1320 tree op1 = TREE_OPERAND (in, 1);
1321 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1322 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1323
1324 /* First see if either of the operands is a literal, then a constant. */
1325 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1326 *litp = op0, op0 = 0;
1327 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1328 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1329
1330 if (op0 != 0 && TREE_CONSTANT (op0))
1331 *conp = op0, op0 = 0;
1332 else if (op1 != 0 && TREE_CONSTANT (op1))
1333 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1334
1335 /* If we haven't dealt with either operand, this is not a case we can
1336 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1337 if (op0 != 0 && op1 != 0)
1338 var = in;
1339 else if (op0 != 0)
1340 var = op0;
1341 else
1342 var = op1, neg_var_p = neg1_p;
1343
1344 /* Now do any needed negations. */
1345 if (neg_litp_p)
1346 *minus_litp = *litp, *litp = 0;
1347 if (neg_conp_p)
1348 *conp = negate_expr (*conp);
1349 if (neg_var_p)
1350 var = negate_expr (var);
1351 }
1352 else if (TREE_CONSTANT (in))
1353 *conp = in;
1354 else
1355 var = in;
1356
1357 if (negate_p)
1358 {
1359 if (*litp)
1360 *minus_litp = *litp, *litp = 0;
1361 else if (*minus_litp)
1362 *litp = *minus_litp, *minus_litp = 0;
1363 *conp = negate_expr (*conp);
1364 var = negate_expr (var);
1365 }
1366
1367 return var;
1368 }
1369
1370 /* Re-associate trees split by the above function. T1 and T2 are either
1371 expressions to associate or null. Return the new expression, if any. If
1372 we build an operation, do it in TYPE and with CODE. */
1373
1374 static tree
1375 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1376 {
1377 if (t1 == 0)
1378 return t2;
1379 else if (t2 == 0)
1380 return t1;
1381
1382 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1383 try to fold this since we will have infinite recursion. But do
1384 deal with any NEGATE_EXPRs. */
1385 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1386 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1387 {
1388 if (code == PLUS_EXPR)
1389 {
1390 if (TREE_CODE (t1) == NEGATE_EXPR)
1391 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1392 fold_convert (type, TREE_OPERAND (t1, 0)));
1393 else if (TREE_CODE (t2) == NEGATE_EXPR)
1394 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1395 fold_convert (type, TREE_OPERAND (t2, 0)));
1396 else if (integer_zerop (t2))
1397 return fold_convert (type, t1);
1398 }
1399 else if (code == MINUS_EXPR)
1400 {
1401 if (integer_zerop (t2))
1402 return fold_convert (type, t1);
1403 }
1404
1405 return build2 (code, type, fold_convert (type, t1),
1406 fold_convert (type, t2));
1407 }
1408
1409 return fold_build2 (code, type, fold_convert (type, t1),
1410 fold_convert (type, t2));
1411 }
1412 \f
1413 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1414 for use in int_const_binop, size_binop and size_diffop. */
1415
1416 static bool
1417 int_binop_types_match_p (enum tree_code code, tree type1, tree type2)
1418 {
1419 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
1420 return false;
1421 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
1422 return false;
1423
1424 switch (code)
1425 {
1426 case LSHIFT_EXPR:
1427 case RSHIFT_EXPR:
1428 case LROTATE_EXPR:
1429 case RROTATE_EXPR:
1430 return true;
1431
1432 default:
1433 break;
1434 }
1435
1436 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
1437 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
1438 && TYPE_MODE (type1) == TYPE_MODE (type2);
1439 }
1440
1441
1442 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1443 to produce a new constant. Return NULL_TREE if we don't know how
1444 to evaluate CODE at compile-time.
1445
1446 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1447
1448 tree
1449 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1450 {
1451 unsigned HOST_WIDE_INT int1l, int2l;
1452 HOST_WIDE_INT int1h, int2h;
1453 unsigned HOST_WIDE_INT low;
1454 HOST_WIDE_INT hi;
1455 unsigned HOST_WIDE_INT garbagel;
1456 HOST_WIDE_INT garbageh;
1457 tree t;
1458 tree type = TREE_TYPE (arg1);
1459 int uns = TYPE_UNSIGNED (type);
1460 int is_sizetype
1461 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1462 int overflow = 0;
1463
1464 int1l = TREE_INT_CST_LOW (arg1);
1465 int1h = TREE_INT_CST_HIGH (arg1);
1466 int2l = TREE_INT_CST_LOW (arg2);
1467 int2h = TREE_INT_CST_HIGH (arg2);
1468
1469 switch (code)
1470 {
1471 case BIT_IOR_EXPR:
1472 low = int1l | int2l, hi = int1h | int2h;
1473 break;
1474
1475 case BIT_XOR_EXPR:
1476 low = int1l ^ int2l, hi = int1h ^ int2h;
1477 break;
1478
1479 case BIT_AND_EXPR:
1480 low = int1l & int2l, hi = int1h & int2h;
1481 break;
1482
1483 case RSHIFT_EXPR:
1484 int2l = -int2l;
1485 case LSHIFT_EXPR:
1486 /* It's unclear from the C standard whether shifts can overflow.
1487 The following code ignores overflow; perhaps a C standard
1488 interpretation ruling is needed. */
1489 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1490 &low, &hi, !uns);
1491 break;
1492
1493 case RROTATE_EXPR:
1494 int2l = - int2l;
1495 case LROTATE_EXPR:
1496 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1497 &low, &hi);
1498 break;
1499
1500 case PLUS_EXPR:
1501 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1502 break;
1503
1504 case MINUS_EXPR:
1505 neg_double (int2l, int2h, &low, &hi);
1506 add_double (int1l, int1h, low, hi, &low, &hi);
1507 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1508 break;
1509
1510 case MULT_EXPR:
1511 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1512 break;
1513
1514 case TRUNC_DIV_EXPR:
1515 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1516 case EXACT_DIV_EXPR:
1517 /* This is a shortcut for a common special case. */
1518 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1519 && !TREE_OVERFLOW (arg1)
1520 && !TREE_OVERFLOW (arg2)
1521 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1522 {
1523 if (code == CEIL_DIV_EXPR)
1524 int1l += int2l - 1;
1525
1526 low = int1l / int2l, hi = 0;
1527 break;
1528 }
1529
1530 /* ... fall through ... */
1531
1532 case ROUND_DIV_EXPR:
1533 if (int2h == 0 && int2l == 0)
1534 return NULL_TREE;
1535 if (int2h == 0 && int2l == 1)
1536 {
1537 low = int1l, hi = int1h;
1538 break;
1539 }
1540 if (int1l == int2l && int1h == int2h
1541 && ! (int1l == 0 && int1h == 0))
1542 {
1543 low = 1, hi = 0;
1544 break;
1545 }
1546 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1547 &low, &hi, &garbagel, &garbageh);
1548 break;
1549
1550 case TRUNC_MOD_EXPR:
1551 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1552 /* This is a shortcut for a common special case. */
1553 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1554 && !TREE_OVERFLOW (arg1)
1555 && !TREE_OVERFLOW (arg2)
1556 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1557 {
1558 if (code == CEIL_MOD_EXPR)
1559 int1l += int2l - 1;
1560 low = int1l % int2l, hi = 0;
1561 break;
1562 }
1563
1564 /* ... fall through ... */
1565
1566 case ROUND_MOD_EXPR:
1567 if (int2h == 0 && int2l == 0)
1568 return NULL_TREE;
1569 overflow = div_and_round_double (code, uns,
1570 int1l, int1h, int2l, int2h,
1571 &garbagel, &garbageh, &low, &hi);
1572 break;
1573
1574 case MIN_EXPR:
1575 case MAX_EXPR:
1576 if (uns)
1577 low = (((unsigned HOST_WIDE_INT) int1h
1578 < (unsigned HOST_WIDE_INT) int2h)
1579 || (((unsigned HOST_WIDE_INT) int1h
1580 == (unsigned HOST_WIDE_INT) int2h)
1581 && int1l < int2l));
1582 else
1583 low = (int1h < int2h
1584 || (int1h == int2h && int1l < int2l));
1585
1586 if (low == (code == MIN_EXPR))
1587 low = int1l, hi = int1h;
1588 else
1589 low = int2l, hi = int2h;
1590 break;
1591
1592 default:
1593 return NULL_TREE;
1594 }
1595
1596 if (notrunc)
1597 {
1598 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1599
1600 /* Propagate overflow flags ourselves. */
1601 if (((!uns || is_sizetype) && overflow)
1602 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1603 {
1604 t = copy_node (t);
1605 TREE_OVERFLOW (t) = 1;
1606 }
1607 }
1608 else
1609 t = force_fit_type_double (TREE_TYPE (arg1), low, hi, 1,
1610 ((!uns || is_sizetype) && overflow)
1611 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1612
1613 return t;
1614 }
1615
1616 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1617 constant. We assume ARG1 and ARG2 have the same data type, or at least
1618 are the same kind of constant and the same machine mode. Return zero if
1619 combining the constants is not allowed in the current operating mode.
1620
1621 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1622
1623 static tree
1624 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1625 {
1626 /* Sanity check for the recursive cases. */
1627 if (!arg1 || !arg2)
1628 return NULL_TREE;
1629
1630 STRIP_NOPS (arg1);
1631 STRIP_NOPS (arg2);
1632
1633 if (TREE_CODE (arg1) == INTEGER_CST)
1634 return int_const_binop (code, arg1, arg2, notrunc);
1635
1636 if (TREE_CODE (arg1) == REAL_CST)
1637 {
1638 enum machine_mode mode;
1639 REAL_VALUE_TYPE d1;
1640 REAL_VALUE_TYPE d2;
1641 REAL_VALUE_TYPE value;
1642 REAL_VALUE_TYPE result;
1643 bool inexact;
1644 tree t, type;
1645
1646 /* The following codes are handled by real_arithmetic. */
1647 switch (code)
1648 {
1649 case PLUS_EXPR:
1650 case MINUS_EXPR:
1651 case MULT_EXPR:
1652 case RDIV_EXPR:
1653 case MIN_EXPR:
1654 case MAX_EXPR:
1655 break;
1656
1657 default:
1658 return NULL_TREE;
1659 }
1660
1661 d1 = TREE_REAL_CST (arg1);
1662 d2 = TREE_REAL_CST (arg2);
1663
1664 type = TREE_TYPE (arg1);
1665 mode = TYPE_MODE (type);
1666
1667 /* Don't perform operation if we honor signaling NaNs and
1668 either operand is a NaN. */
1669 if (HONOR_SNANS (mode)
1670 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1671 return NULL_TREE;
1672
1673 /* Don't perform operation if it would raise a division
1674 by zero exception. */
1675 if (code == RDIV_EXPR
1676 && REAL_VALUES_EQUAL (d2, dconst0)
1677 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1678 return NULL_TREE;
1679
1680 /* If either operand is a NaN, just return it. Otherwise, set up
1681 for floating-point trap; we return an overflow. */
1682 if (REAL_VALUE_ISNAN (d1))
1683 return arg1;
1684 else if (REAL_VALUE_ISNAN (d2))
1685 return arg2;
1686
1687 inexact = real_arithmetic (&value, code, &d1, &d2);
1688 real_convert (&result, mode, &value);
1689
1690 /* Don't constant fold this floating point operation if
1691 the result has overflowed and flag_trapping_math. */
1692 if (flag_trapping_math
1693 && MODE_HAS_INFINITIES (mode)
1694 && REAL_VALUE_ISINF (result)
1695 && !REAL_VALUE_ISINF (d1)
1696 && !REAL_VALUE_ISINF (d2))
1697 return NULL_TREE;
1698
1699 /* Don't constant fold this floating point operation if the
1700 result may dependent upon the run-time rounding mode and
1701 flag_rounding_math is set, or if GCC's software emulation
1702 is unable to accurately represent the result. */
1703 if ((flag_rounding_math
1704 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1705 && !flag_unsafe_math_optimizations))
1706 && (inexact || !real_identical (&result, &value)))
1707 return NULL_TREE;
1708
1709 t = build_real (type, result);
1710
1711 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1712 return t;
1713 }
1714
1715 if (TREE_CODE (arg1) == COMPLEX_CST)
1716 {
1717 tree type = TREE_TYPE (arg1);
1718 tree r1 = TREE_REALPART (arg1);
1719 tree i1 = TREE_IMAGPART (arg1);
1720 tree r2 = TREE_REALPART (arg2);
1721 tree i2 = TREE_IMAGPART (arg2);
1722 tree real, imag;
1723
1724 switch (code)
1725 {
1726 case PLUS_EXPR:
1727 case MINUS_EXPR:
1728 real = const_binop (code, r1, r2, notrunc);
1729 imag = const_binop (code, i1, i2, notrunc);
1730 break;
1731
1732 case MULT_EXPR:
1733 real = const_binop (MINUS_EXPR,
1734 const_binop (MULT_EXPR, r1, r2, notrunc),
1735 const_binop (MULT_EXPR, i1, i2, notrunc),
1736 notrunc);
1737 imag = const_binop (PLUS_EXPR,
1738 const_binop (MULT_EXPR, r1, i2, notrunc),
1739 const_binop (MULT_EXPR, i1, r2, notrunc),
1740 notrunc);
1741 break;
1742
1743 case RDIV_EXPR:
1744 {
1745 tree magsquared
1746 = const_binop (PLUS_EXPR,
1747 const_binop (MULT_EXPR, r2, r2, notrunc),
1748 const_binop (MULT_EXPR, i2, i2, notrunc),
1749 notrunc);
1750 tree t1
1751 = const_binop (PLUS_EXPR,
1752 const_binop (MULT_EXPR, r1, r2, notrunc),
1753 const_binop (MULT_EXPR, i1, i2, notrunc),
1754 notrunc);
1755 tree t2
1756 = const_binop (MINUS_EXPR,
1757 const_binop (MULT_EXPR, i1, r2, notrunc),
1758 const_binop (MULT_EXPR, r1, i2, notrunc),
1759 notrunc);
1760
1761 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1762 code = TRUNC_DIV_EXPR;
1763
1764 real = const_binop (code, t1, magsquared, notrunc);
1765 imag = const_binop (code, t2, magsquared, notrunc);
1766 }
1767 break;
1768
1769 default:
1770 return NULL_TREE;
1771 }
1772
1773 if (real && imag)
1774 return build_complex (type, real, imag);
1775 }
1776
1777 return NULL_TREE;
1778 }
1779
1780 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1781 indicates which particular sizetype to create. */
1782
1783 tree
1784 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1785 {
1786 return build_int_cst (sizetype_tab[(int) kind], number);
1787 }
1788 \f
1789 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1790 is a tree code. The type of the result is taken from the operands.
1791 Both must be equivalent integer types, ala int_binop_types_match_p.
1792 If the operands are constant, so is the result. */
1793
1794 tree
1795 size_binop (enum tree_code code, tree arg0, tree arg1)
1796 {
1797 tree type = TREE_TYPE (arg0);
1798
1799 if (arg0 == error_mark_node || arg1 == error_mark_node)
1800 return error_mark_node;
1801
1802 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1803 TREE_TYPE (arg1)));
1804
1805 /* Handle the special case of two integer constants faster. */
1806 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1807 {
1808 /* And some specific cases even faster than that. */
1809 if (code == PLUS_EXPR && integer_zerop (arg0))
1810 return arg1;
1811 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1812 && integer_zerop (arg1))
1813 return arg0;
1814 else if (code == MULT_EXPR && integer_onep (arg0))
1815 return arg1;
1816
1817 /* Handle general case of two integer constants. */
1818 return int_const_binop (code, arg0, arg1, 0);
1819 }
1820
1821 return fold_build2 (code, type, arg0, arg1);
1822 }
1823
1824 /* Given two values, either both of sizetype or both of bitsizetype,
1825 compute the difference between the two values. Return the value
1826 in signed type corresponding to the type of the operands. */
1827
1828 tree
1829 size_diffop (tree arg0, tree arg1)
1830 {
1831 tree type = TREE_TYPE (arg0);
1832 tree ctype;
1833
1834 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1835 TREE_TYPE (arg1)));
1836
1837 /* If the type is already signed, just do the simple thing. */
1838 if (!TYPE_UNSIGNED (type))
1839 return size_binop (MINUS_EXPR, arg0, arg1);
1840
1841 if (type == sizetype)
1842 ctype = ssizetype;
1843 else if (type == bitsizetype)
1844 ctype = sbitsizetype;
1845 else
1846 ctype = lang_hooks.types.signed_type (type);
1847
1848 /* If either operand is not a constant, do the conversions to the signed
1849 type and subtract. The hardware will do the right thing with any
1850 overflow in the subtraction. */
1851 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1852 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1853 fold_convert (ctype, arg1));
1854
1855 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1856 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1857 overflow) and negate (which can't either). Special-case a result
1858 of zero while we're here. */
1859 if (tree_int_cst_equal (arg0, arg1))
1860 return build_int_cst (ctype, 0);
1861 else if (tree_int_cst_lt (arg1, arg0))
1862 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1863 else
1864 return size_binop (MINUS_EXPR, build_int_cst (ctype, 0),
1865 fold_convert (ctype, size_binop (MINUS_EXPR,
1866 arg1, arg0)));
1867 }
1868 \f
1869 /* A subroutine of fold_convert_const handling conversions of an
1870 INTEGER_CST to another integer type. */
1871
1872 static tree
1873 fold_convert_const_int_from_int (tree type, tree arg1)
1874 {
1875 tree t;
1876
1877 /* Given an integer constant, make new constant with new type,
1878 appropriately sign-extended or truncated. */
1879 t = force_fit_type_double (type, TREE_INT_CST_LOW (arg1),
1880 TREE_INT_CST_HIGH (arg1),
1881 /* Don't set the overflow when
1882 converting a pointer */
1883 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1884 (TREE_INT_CST_HIGH (arg1) < 0
1885 && (TYPE_UNSIGNED (type)
1886 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1887 | TREE_OVERFLOW (arg1));
1888
1889 return t;
1890 }
1891
1892 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1893 to an integer type. */
1894
1895 static tree
1896 fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
1897 {
1898 int overflow = 0;
1899 tree t;
1900
1901 /* The following code implements the floating point to integer
1902 conversion rules required by the Java Language Specification,
1903 that IEEE NaNs are mapped to zero and values that overflow
1904 the target precision saturate, i.e. values greater than
1905 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1906 are mapped to INT_MIN. These semantics are allowed by the
1907 C and C++ standards that simply state that the behavior of
1908 FP-to-integer conversion is unspecified upon overflow. */
1909
1910 HOST_WIDE_INT high, low;
1911 REAL_VALUE_TYPE r;
1912 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1913
1914 switch (code)
1915 {
1916 case FIX_TRUNC_EXPR:
1917 real_trunc (&r, VOIDmode, &x);
1918 break;
1919
1920 default:
1921 gcc_unreachable ();
1922 }
1923
1924 /* If R is NaN, return zero and show we have an overflow. */
1925 if (REAL_VALUE_ISNAN (r))
1926 {
1927 overflow = 1;
1928 high = 0;
1929 low = 0;
1930 }
1931
1932 /* See if R is less than the lower bound or greater than the
1933 upper bound. */
1934
1935 if (! overflow)
1936 {
1937 tree lt = TYPE_MIN_VALUE (type);
1938 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1939 if (REAL_VALUES_LESS (r, l))
1940 {
1941 overflow = 1;
1942 high = TREE_INT_CST_HIGH (lt);
1943 low = TREE_INT_CST_LOW (lt);
1944 }
1945 }
1946
1947 if (! overflow)
1948 {
1949 tree ut = TYPE_MAX_VALUE (type);
1950 if (ut)
1951 {
1952 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1953 if (REAL_VALUES_LESS (u, r))
1954 {
1955 overflow = 1;
1956 high = TREE_INT_CST_HIGH (ut);
1957 low = TREE_INT_CST_LOW (ut);
1958 }
1959 }
1960 }
1961
1962 if (! overflow)
1963 REAL_VALUE_TO_INT (&low, &high, r);
1964
1965 t = force_fit_type_double (type, low, high, -1,
1966 overflow | TREE_OVERFLOW (arg1));
1967 return t;
1968 }
1969
1970 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1971 to another floating point type. */
1972
1973 static tree
1974 fold_convert_const_real_from_real (tree type, tree arg1)
1975 {
1976 REAL_VALUE_TYPE value;
1977 tree t;
1978
1979 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1980 t = build_real (type, value);
1981
1982 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1983 return t;
1984 }
1985
1986 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1987 type TYPE. If no simplification can be done return NULL_TREE. */
1988
1989 static tree
1990 fold_convert_const (enum tree_code code, tree type, tree arg1)
1991 {
1992 if (TREE_TYPE (arg1) == type)
1993 return arg1;
1994
1995 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1996 {
1997 if (TREE_CODE (arg1) == INTEGER_CST)
1998 return fold_convert_const_int_from_int (type, arg1);
1999 else if (TREE_CODE (arg1) == REAL_CST)
2000 return fold_convert_const_int_from_real (code, type, arg1);
2001 }
2002 else if (TREE_CODE (type) == REAL_TYPE)
2003 {
2004 if (TREE_CODE (arg1) == INTEGER_CST)
2005 return build_real_from_int_cst (type, arg1);
2006 if (TREE_CODE (arg1) == REAL_CST)
2007 return fold_convert_const_real_from_real (type, arg1);
2008 }
2009 return NULL_TREE;
2010 }
2011
2012 /* Construct a vector of zero elements of vector type TYPE. */
2013
2014 static tree
2015 build_zero_vector (tree type)
2016 {
2017 tree elem, list;
2018 int i, units;
2019
2020 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2021 units = TYPE_VECTOR_SUBPARTS (type);
2022
2023 list = NULL_TREE;
2024 for (i = 0; i < units; i++)
2025 list = tree_cons (NULL_TREE, elem, list);
2026 return build_vector (type, list);
2027 }
2028
2029 /* Convert expression ARG to type TYPE. Used by the middle-end for
2030 simple conversions in preference to calling the front-end's convert. */
2031
2032 tree
2033 fold_convert (tree type, tree arg)
2034 {
2035 tree orig = TREE_TYPE (arg);
2036 tree tem;
2037
2038 if (type == orig)
2039 return arg;
2040
2041 if (TREE_CODE (arg) == ERROR_MARK
2042 || TREE_CODE (type) == ERROR_MARK
2043 || TREE_CODE (orig) == ERROR_MARK)
2044 return error_mark_node;
2045
2046 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
2047 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
2048 TYPE_MAIN_VARIANT (orig)))
2049 return fold_build1 (NOP_EXPR, type, arg);
2050
2051 switch (TREE_CODE (type))
2052 {
2053 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2054 case POINTER_TYPE: case REFERENCE_TYPE:
2055 case OFFSET_TYPE:
2056 if (TREE_CODE (arg) == INTEGER_CST)
2057 {
2058 tem = fold_convert_const (NOP_EXPR, type, arg);
2059 if (tem != NULL_TREE)
2060 return tem;
2061 }
2062 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2063 || TREE_CODE (orig) == OFFSET_TYPE)
2064 return fold_build1 (NOP_EXPR, type, arg);
2065 if (TREE_CODE (orig) == COMPLEX_TYPE)
2066 {
2067 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2068 return fold_convert (type, tem);
2069 }
2070 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2071 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2072 return fold_build1 (NOP_EXPR, type, arg);
2073
2074 case REAL_TYPE:
2075 if (TREE_CODE (arg) == INTEGER_CST)
2076 {
2077 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2078 if (tem != NULL_TREE)
2079 return tem;
2080 }
2081 else if (TREE_CODE (arg) == REAL_CST)
2082 {
2083 tem = fold_convert_const (NOP_EXPR, type, arg);
2084 if (tem != NULL_TREE)
2085 return tem;
2086 }
2087
2088 switch (TREE_CODE (orig))
2089 {
2090 case INTEGER_TYPE:
2091 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2092 case POINTER_TYPE: case REFERENCE_TYPE:
2093 return fold_build1 (FLOAT_EXPR, type, arg);
2094
2095 case REAL_TYPE:
2096 return fold_build1 (NOP_EXPR, type, arg);
2097
2098 case COMPLEX_TYPE:
2099 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2100 return fold_convert (type, tem);
2101
2102 default:
2103 gcc_unreachable ();
2104 }
2105
2106 case COMPLEX_TYPE:
2107 switch (TREE_CODE (orig))
2108 {
2109 case INTEGER_TYPE:
2110 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2111 case POINTER_TYPE: case REFERENCE_TYPE:
2112 case REAL_TYPE:
2113 return build2 (COMPLEX_EXPR, type,
2114 fold_convert (TREE_TYPE (type), arg),
2115 fold_convert (TREE_TYPE (type), integer_zero_node));
2116 case COMPLEX_TYPE:
2117 {
2118 tree rpart, ipart;
2119
2120 if (TREE_CODE (arg) == COMPLEX_EXPR)
2121 {
2122 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2123 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2124 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2125 }
2126
2127 arg = save_expr (arg);
2128 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2129 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2130 rpart = fold_convert (TREE_TYPE (type), rpart);
2131 ipart = fold_convert (TREE_TYPE (type), ipart);
2132 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2133 }
2134
2135 default:
2136 gcc_unreachable ();
2137 }
2138
2139 case VECTOR_TYPE:
2140 if (integer_zerop (arg))
2141 return build_zero_vector (type);
2142 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2143 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2144 || TREE_CODE (orig) == VECTOR_TYPE);
2145 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2146
2147 case VOID_TYPE:
2148 tem = fold_ignored_result (arg);
2149 if (TREE_CODE (tem) == GIMPLE_MODIFY_STMT)
2150 return tem;
2151 return fold_build1 (NOP_EXPR, type, tem);
2152
2153 default:
2154 gcc_unreachable ();
2155 }
2156 }
2157 \f
2158 /* Return false if expr can be assumed not to be an lvalue, true
2159 otherwise. */
2160
2161 static bool
2162 maybe_lvalue_p (tree x)
2163 {
2164 /* We only need to wrap lvalue tree codes. */
2165 switch (TREE_CODE (x))
2166 {
2167 case VAR_DECL:
2168 case PARM_DECL:
2169 case RESULT_DECL:
2170 case LABEL_DECL:
2171 case FUNCTION_DECL:
2172 case SSA_NAME:
2173
2174 case COMPONENT_REF:
2175 case INDIRECT_REF:
2176 case ALIGN_INDIRECT_REF:
2177 case MISALIGNED_INDIRECT_REF:
2178 case ARRAY_REF:
2179 case ARRAY_RANGE_REF:
2180 case BIT_FIELD_REF:
2181 case OBJ_TYPE_REF:
2182
2183 case REALPART_EXPR:
2184 case IMAGPART_EXPR:
2185 case PREINCREMENT_EXPR:
2186 case PREDECREMENT_EXPR:
2187 case SAVE_EXPR:
2188 case TRY_CATCH_EXPR:
2189 case WITH_CLEANUP_EXPR:
2190 case COMPOUND_EXPR:
2191 case MODIFY_EXPR:
2192 case GIMPLE_MODIFY_STMT:
2193 case TARGET_EXPR:
2194 case COND_EXPR:
2195 case BIND_EXPR:
2196 case MIN_EXPR:
2197 case MAX_EXPR:
2198 break;
2199
2200 default:
2201 /* Assume the worst for front-end tree codes. */
2202 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2203 break;
2204 return false;
2205 }
2206
2207 return true;
2208 }
2209
2210 /* Return an expr equal to X but certainly not valid as an lvalue. */
2211
2212 tree
2213 non_lvalue (tree x)
2214 {
2215 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2216 us. */
2217 if (in_gimple_form)
2218 return x;
2219
2220 if (! maybe_lvalue_p (x))
2221 return x;
2222 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2223 }
2224
2225 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2226 Zero means allow extended lvalues. */
2227
2228 int pedantic_lvalues;
2229
2230 /* When pedantic, return an expr equal to X but certainly not valid as a
2231 pedantic lvalue. Otherwise, return X. */
2232
2233 static tree
2234 pedantic_non_lvalue (tree x)
2235 {
2236 if (pedantic_lvalues)
2237 return non_lvalue (x);
2238 else
2239 return x;
2240 }
2241 \f
2242 /* Given a tree comparison code, return the code that is the logical inverse
2243 of the given code. It is not safe to do this for floating-point
2244 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2245 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2246
2247 enum tree_code
2248 invert_tree_comparison (enum tree_code code, bool honor_nans)
2249 {
2250 if (honor_nans && flag_trapping_math)
2251 return ERROR_MARK;
2252
2253 switch (code)
2254 {
2255 case EQ_EXPR:
2256 return NE_EXPR;
2257 case NE_EXPR:
2258 return EQ_EXPR;
2259 case GT_EXPR:
2260 return honor_nans ? UNLE_EXPR : LE_EXPR;
2261 case GE_EXPR:
2262 return honor_nans ? UNLT_EXPR : LT_EXPR;
2263 case LT_EXPR:
2264 return honor_nans ? UNGE_EXPR : GE_EXPR;
2265 case LE_EXPR:
2266 return honor_nans ? UNGT_EXPR : GT_EXPR;
2267 case LTGT_EXPR:
2268 return UNEQ_EXPR;
2269 case UNEQ_EXPR:
2270 return LTGT_EXPR;
2271 case UNGT_EXPR:
2272 return LE_EXPR;
2273 case UNGE_EXPR:
2274 return LT_EXPR;
2275 case UNLT_EXPR:
2276 return GE_EXPR;
2277 case UNLE_EXPR:
2278 return GT_EXPR;
2279 case ORDERED_EXPR:
2280 return UNORDERED_EXPR;
2281 case UNORDERED_EXPR:
2282 return ORDERED_EXPR;
2283 default:
2284 gcc_unreachable ();
2285 }
2286 }
2287
2288 /* Similar, but return the comparison that results if the operands are
2289 swapped. This is safe for floating-point. */
2290
2291 enum tree_code
2292 swap_tree_comparison (enum tree_code code)
2293 {
2294 switch (code)
2295 {
2296 case EQ_EXPR:
2297 case NE_EXPR:
2298 case ORDERED_EXPR:
2299 case UNORDERED_EXPR:
2300 case LTGT_EXPR:
2301 case UNEQ_EXPR:
2302 return code;
2303 case GT_EXPR:
2304 return LT_EXPR;
2305 case GE_EXPR:
2306 return LE_EXPR;
2307 case LT_EXPR:
2308 return GT_EXPR;
2309 case LE_EXPR:
2310 return GE_EXPR;
2311 case UNGT_EXPR:
2312 return UNLT_EXPR;
2313 case UNGE_EXPR:
2314 return UNLE_EXPR;
2315 case UNLT_EXPR:
2316 return UNGT_EXPR;
2317 case UNLE_EXPR:
2318 return UNGE_EXPR;
2319 default:
2320 gcc_unreachable ();
2321 }
2322 }
2323
2324
2325 /* Convert a comparison tree code from an enum tree_code representation
2326 into a compcode bit-based encoding. This function is the inverse of
2327 compcode_to_comparison. */
2328
2329 static enum comparison_code
2330 comparison_to_compcode (enum tree_code code)
2331 {
2332 switch (code)
2333 {
2334 case LT_EXPR:
2335 return COMPCODE_LT;
2336 case EQ_EXPR:
2337 return COMPCODE_EQ;
2338 case LE_EXPR:
2339 return COMPCODE_LE;
2340 case GT_EXPR:
2341 return COMPCODE_GT;
2342 case NE_EXPR:
2343 return COMPCODE_NE;
2344 case GE_EXPR:
2345 return COMPCODE_GE;
2346 case ORDERED_EXPR:
2347 return COMPCODE_ORD;
2348 case UNORDERED_EXPR:
2349 return COMPCODE_UNORD;
2350 case UNLT_EXPR:
2351 return COMPCODE_UNLT;
2352 case UNEQ_EXPR:
2353 return COMPCODE_UNEQ;
2354 case UNLE_EXPR:
2355 return COMPCODE_UNLE;
2356 case UNGT_EXPR:
2357 return COMPCODE_UNGT;
2358 case LTGT_EXPR:
2359 return COMPCODE_LTGT;
2360 case UNGE_EXPR:
2361 return COMPCODE_UNGE;
2362 default:
2363 gcc_unreachable ();
2364 }
2365 }
2366
2367 /* Convert a compcode bit-based encoding of a comparison operator back
2368 to GCC's enum tree_code representation. This function is the
2369 inverse of comparison_to_compcode. */
2370
2371 static enum tree_code
2372 compcode_to_comparison (enum comparison_code code)
2373 {
2374 switch (code)
2375 {
2376 case COMPCODE_LT:
2377 return LT_EXPR;
2378 case COMPCODE_EQ:
2379 return EQ_EXPR;
2380 case COMPCODE_LE:
2381 return LE_EXPR;
2382 case COMPCODE_GT:
2383 return GT_EXPR;
2384 case COMPCODE_NE:
2385 return NE_EXPR;
2386 case COMPCODE_GE:
2387 return GE_EXPR;
2388 case COMPCODE_ORD:
2389 return ORDERED_EXPR;
2390 case COMPCODE_UNORD:
2391 return UNORDERED_EXPR;
2392 case COMPCODE_UNLT:
2393 return UNLT_EXPR;
2394 case COMPCODE_UNEQ:
2395 return UNEQ_EXPR;
2396 case COMPCODE_UNLE:
2397 return UNLE_EXPR;
2398 case COMPCODE_UNGT:
2399 return UNGT_EXPR;
2400 case COMPCODE_LTGT:
2401 return LTGT_EXPR;
2402 case COMPCODE_UNGE:
2403 return UNGE_EXPR;
2404 default:
2405 gcc_unreachable ();
2406 }
2407 }
2408
2409 /* Return a tree for the comparison which is the combination of
2410 doing the AND or OR (depending on CODE) of the two operations LCODE
2411 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2412 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2413 if this makes the transformation invalid. */
2414
2415 tree
2416 combine_comparisons (enum tree_code code, enum tree_code lcode,
2417 enum tree_code rcode, tree truth_type,
2418 tree ll_arg, tree lr_arg)
2419 {
2420 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2421 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2422 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2423 enum comparison_code compcode;
2424
2425 switch (code)
2426 {
2427 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2428 compcode = lcompcode & rcompcode;
2429 break;
2430
2431 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2432 compcode = lcompcode | rcompcode;
2433 break;
2434
2435 default:
2436 return NULL_TREE;
2437 }
2438
2439 if (!honor_nans)
2440 {
2441 /* Eliminate unordered comparisons, as well as LTGT and ORD
2442 which are not used unless the mode has NaNs. */
2443 compcode &= ~COMPCODE_UNORD;
2444 if (compcode == COMPCODE_LTGT)
2445 compcode = COMPCODE_NE;
2446 else if (compcode == COMPCODE_ORD)
2447 compcode = COMPCODE_TRUE;
2448 }
2449 else if (flag_trapping_math)
2450 {
2451 /* Check that the original operation and the optimized ones will trap
2452 under the same condition. */
2453 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2454 && (lcompcode != COMPCODE_EQ)
2455 && (lcompcode != COMPCODE_ORD);
2456 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2457 && (rcompcode != COMPCODE_EQ)
2458 && (rcompcode != COMPCODE_ORD);
2459 bool trap = (compcode & COMPCODE_UNORD) == 0
2460 && (compcode != COMPCODE_EQ)
2461 && (compcode != COMPCODE_ORD);
2462
2463 /* In a short-circuited boolean expression the LHS might be
2464 such that the RHS, if evaluated, will never trap. For
2465 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2466 if neither x nor y is NaN. (This is a mixed blessing: for
2467 example, the expression above will never trap, hence
2468 optimizing it to x < y would be invalid). */
2469 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2470 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2471 rtrap = false;
2472
2473 /* If the comparison was short-circuited, and only the RHS
2474 trapped, we may now generate a spurious trap. */
2475 if (rtrap && !ltrap
2476 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2477 return NULL_TREE;
2478
2479 /* If we changed the conditions that cause a trap, we lose. */
2480 if ((ltrap || rtrap) != trap)
2481 return NULL_TREE;
2482 }
2483
2484 if (compcode == COMPCODE_TRUE)
2485 return constant_boolean_node (true, truth_type);
2486 else if (compcode == COMPCODE_FALSE)
2487 return constant_boolean_node (false, truth_type);
2488 else
2489 return fold_build2 (compcode_to_comparison (compcode),
2490 truth_type, ll_arg, lr_arg);
2491 }
2492
2493 /* Return nonzero if CODE is a tree code that represents a truth value. */
2494
2495 static int
2496 truth_value_p (enum tree_code code)
2497 {
2498 return (TREE_CODE_CLASS (code) == tcc_comparison
2499 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2500 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2501 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2502 }
2503 \f
2504 /* Return nonzero if two operands (typically of the same tree node)
2505 are necessarily equal. If either argument has side-effects this
2506 function returns zero. FLAGS modifies behavior as follows:
2507
2508 If OEP_ONLY_CONST is set, only return nonzero for constants.
2509 This function tests whether the operands are indistinguishable;
2510 it does not test whether they are equal using C's == operation.
2511 The distinction is important for IEEE floating point, because
2512 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2513 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2514
2515 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2516 even though it may hold multiple values during a function.
2517 This is because a GCC tree node guarantees that nothing else is
2518 executed between the evaluation of its "operands" (which may often
2519 be evaluated in arbitrary order). Hence if the operands themselves
2520 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2521 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2522 unset means assuming isochronic (or instantaneous) tree equivalence.
2523 Unless comparing arbitrary expression trees, such as from different
2524 statements, this flag can usually be left unset.
2525
2526 If OEP_PURE_SAME is set, then pure functions with identical arguments
2527 are considered the same. It is used when the caller has other ways
2528 to ensure that global memory is unchanged in between. */
2529
2530 int
2531 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2532 {
2533 /* If either is ERROR_MARK, they aren't equal. */
2534 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2535 return 0;
2536
2537 /* If both types don't have the same signedness, then we can't consider
2538 them equal. We must check this before the STRIP_NOPS calls
2539 because they may change the signedness of the arguments. */
2540 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2541 return 0;
2542
2543 /* If both types don't have the same precision, then it is not safe
2544 to strip NOPs. */
2545 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2546 return 0;
2547
2548 STRIP_NOPS (arg0);
2549 STRIP_NOPS (arg1);
2550
2551 /* In case both args are comparisons but with different comparison
2552 code, try to swap the comparison operands of one arg to produce
2553 a match and compare that variant. */
2554 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2555 && COMPARISON_CLASS_P (arg0)
2556 && COMPARISON_CLASS_P (arg1))
2557 {
2558 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2559
2560 if (TREE_CODE (arg0) == swap_code)
2561 return operand_equal_p (TREE_OPERAND (arg0, 0),
2562 TREE_OPERAND (arg1, 1), flags)
2563 && operand_equal_p (TREE_OPERAND (arg0, 1),
2564 TREE_OPERAND (arg1, 0), flags);
2565 }
2566
2567 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2568 /* This is needed for conversions and for COMPONENT_REF.
2569 Might as well play it safe and always test this. */
2570 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2571 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2572 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2573 return 0;
2574
2575 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2576 We don't care about side effects in that case because the SAVE_EXPR
2577 takes care of that for us. In all other cases, two expressions are
2578 equal if they have no side effects. If we have two identical
2579 expressions with side effects that should be treated the same due
2580 to the only side effects being identical SAVE_EXPR's, that will
2581 be detected in the recursive calls below. */
2582 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2583 && (TREE_CODE (arg0) == SAVE_EXPR
2584 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2585 return 1;
2586
2587 /* Next handle constant cases, those for which we can return 1 even
2588 if ONLY_CONST is set. */
2589 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2590 switch (TREE_CODE (arg0))
2591 {
2592 case INTEGER_CST:
2593 return tree_int_cst_equal (arg0, arg1);
2594
2595 case REAL_CST:
2596 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2597 TREE_REAL_CST (arg1)))
2598 return 1;
2599
2600
2601 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2602 {
2603 /* If we do not distinguish between signed and unsigned zero,
2604 consider them equal. */
2605 if (real_zerop (arg0) && real_zerop (arg1))
2606 return 1;
2607 }
2608 return 0;
2609
2610 case VECTOR_CST:
2611 {
2612 tree v1, v2;
2613
2614 v1 = TREE_VECTOR_CST_ELTS (arg0);
2615 v2 = TREE_VECTOR_CST_ELTS (arg1);
2616 while (v1 && v2)
2617 {
2618 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2619 flags))
2620 return 0;
2621 v1 = TREE_CHAIN (v1);
2622 v2 = TREE_CHAIN (v2);
2623 }
2624
2625 return v1 == v2;
2626 }
2627
2628 case COMPLEX_CST:
2629 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2630 flags)
2631 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2632 flags));
2633
2634 case STRING_CST:
2635 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2636 && ! memcmp (TREE_STRING_POINTER (arg0),
2637 TREE_STRING_POINTER (arg1),
2638 TREE_STRING_LENGTH (arg0)));
2639
2640 case ADDR_EXPR:
2641 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2642 0);
2643 default:
2644 break;
2645 }
2646
2647 if (flags & OEP_ONLY_CONST)
2648 return 0;
2649
2650 /* Define macros to test an operand from arg0 and arg1 for equality and a
2651 variant that allows null and views null as being different from any
2652 non-null value. In the latter case, if either is null, the both
2653 must be; otherwise, do the normal comparison. */
2654 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2655 TREE_OPERAND (arg1, N), flags)
2656
2657 #define OP_SAME_WITH_NULL(N) \
2658 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2659 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2660
2661 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2662 {
2663 case tcc_unary:
2664 /* Two conversions are equal only if signedness and modes match. */
2665 switch (TREE_CODE (arg0))
2666 {
2667 case NOP_EXPR:
2668 case CONVERT_EXPR:
2669 case FIX_TRUNC_EXPR:
2670 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2671 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2672 return 0;
2673 break;
2674 default:
2675 break;
2676 }
2677
2678 return OP_SAME (0);
2679
2680
2681 case tcc_comparison:
2682 case tcc_binary:
2683 if (OP_SAME (0) && OP_SAME (1))
2684 return 1;
2685
2686 /* For commutative ops, allow the other order. */
2687 return (commutative_tree_code (TREE_CODE (arg0))
2688 && operand_equal_p (TREE_OPERAND (arg0, 0),
2689 TREE_OPERAND (arg1, 1), flags)
2690 && operand_equal_p (TREE_OPERAND (arg0, 1),
2691 TREE_OPERAND (arg1, 0), flags));
2692
2693 case tcc_reference:
2694 /* If either of the pointer (or reference) expressions we are
2695 dereferencing contain a side effect, these cannot be equal. */
2696 if (TREE_SIDE_EFFECTS (arg0)
2697 || TREE_SIDE_EFFECTS (arg1))
2698 return 0;
2699
2700 switch (TREE_CODE (arg0))
2701 {
2702 case INDIRECT_REF:
2703 case ALIGN_INDIRECT_REF:
2704 case MISALIGNED_INDIRECT_REF:
2705 case REALPART_EXPR:
2706 case IMAGPART_EXPR:
2707 return OP_SAME (0);
2708
2709 case ARRAY_REF:
2710 case ARRAY_RANGE_REF:
2711 /* Operands 2 and 3 may be null. */
2712 return (OP_SAME (0)
2713 && OP_SAME (1)
2714 && OP_SAME_WITH_NULL (2)
2715 && OP_SAME_WITH_NULL (3));
2716
2717 case COMPONENT_REF:
2718 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2719 may be NULL when we're called to compare MEM_EXPRs. */
2720 return OP_SAME_WITH_NULL (0)
2721 && OP_SAME (1)
2722 && OP_SAME_WITH_NULL (2);
2723
2724 case BIT_FIELD_REF:
2725 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2726
2727 default:
2728 return 0;
2729 }
2730
2731 case tcc_expression:
2732 switch (TREE_CODE (arg0))
2733 {
2734 case ADDR_EXPR:
2735 case TRUTH_NOT_EXPR:
2736 return OP_SAME (0);
2737
2738 case TRUTH_ANDIF_EXPR:
2739 case TRUTH_ORIF_EXPR:
2740 return OP_SAME (0) && OP_SAME (1);
2741
2742 case TRUTH_AND_EXPR:
2743 case TRUTH_OR_EXPR:
2744 case TRUTH_XOR_EXPR:
2745 if (OP_SAME (0) && OP_SAME (1))
2746 return 1;
2747
2748 /* Otherwise take into account this is a commutative operation. */
2749 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2750 TREE_OPERAND (arg1, 1), flags)
2751 && operand_equal_p (TREE_OPERAND (arg0, 1),
2752 TREE_OPERAND (arg1, 0), flags));
2753
2754 case CALL_EXPR:
2755 /* If the CALL_EXPRs call different functions, then they
2756 clearly can not be equal. */
2757 if (!OP_SAME (0))
2758 return 0;
2759
2760 {
2761 unsigned int cef = call_expr_flags (arg0);
2762 if (flags & OEP_PURE_SAME)
2763 cef &= ECF_CONST | ECF_PURE;
2764 else
2765 cef &= ECF_CONST;
2766 if (!cef)
2767 return 0;
2768 }
2769
2770 /* Now see if all the arguments are the same. operand_equal_p
2771 does not handle TREE_LIST, so we walk the operands here
2772 feeding them to operand_equal_p. */
2773 arg0 = TREE_OPERAND (arg0, 1);
2774 arg1 = TREE_OPERAND (arg1, 1);
2775 while (arg0 && arg1)
2776 {
2777 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2778 flags))
2779 return 0;
2780
2781 arg0 = TREE_CHAIN (arg0);
2782 arg1 = TREE_CHAIN (arg1);
2783 }
2784
2785 /* If we get here and both argument lists are exhausted
2786 then the CALL_EXPRs are equal. */
2787 return ! (arg0 || arg1);
2788
2789 default:
2790 return 0;
2791 }
2792
2793 case tcc_declaration:
2794 /* Consider __builtin_sqrt equal to sqrt. */
2795 return (TREE_CODE (arg0) == FUNCTION_DECL
2796 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2797 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2798 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2799
2800 default:
2801 return 0;
2802 }
2803
2804 #undef OP_SAME
2805 #undef OP_SAME_WITH_NULL
2806 }
2807 \f
2808 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2809 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2810
2811 When in doubt, return 0. */
2812
2813 static int
2814 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2815 {
2816 int unsignedp1, unsignedpo;
2817 tree primarg0, primarg1, primother;
2818 unsigned int correct_width;
2819
2820 if (operand_equal_p (arg0, arg1, 0))
2821 return 1;
2822
2823 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2824 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2825 return 0;
2826
2827 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2828 and see if the inner values are the same. This removes any
2829 signedness comparison, which doesn't matter here. */
2830 primarg0 = arg0, primarg1 = arg1;
2831 STRIP_NOPS (primarg0);
2832 STRIP_NOPS (primarg1);
2833 if (operand_equal_p (primarg0, primarg1, 0))
2834 return 1;
2835
2836 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2837 actual comparison operand, ARG0.
2838
2839 First throw away any conversions to wider types
2840 already present in the operands. */
2841
2842 primarg1 = get_narrower (arg1, &unsignedp1);
2843 primother = get_narrower (other, &unsignedpo);
2844
2845 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2846 if (unsignedp1 == unsignedpo
2847 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2848 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2849 {
2850 tree type = TREE_TYPE (arg0);
2851
2852 /* Make sure shorter operand is extended the right way
2853 to match the longer operand. */
2854 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2855 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2856
2857 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2858 return 1;
2859 }
2860
2861 return 0;
2862 }
2863 \f
2864 /* See if ARG is an expression that is either a comparison or is performing
2865 arithmetic on comparisons. The comparisons must only be comparing
2866 two different values, which will be stored in *CVAL1 and *CVAL2; if
2867 they are nonzero it means that some operands have already been found.
2868 No variables may be used anywhere else in the expression except in the
2869 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2870 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2871
2872 If this is true, return 1. Otherwise, return zero. */
2873
2874 static int
2875 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2876 {
2877 enum tree_code code = TREE_CODE (arg);
2878 enum tree_code_class class = TREE_CODE_CLASS (code);
2879
2880 /* We can handle some of the tcc_expression cases here. */
2881 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2882 class = tcc_unary;
2883 else if (class == tcc_expression
2884 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2885 || code == COMPOUND_EXPR))
2886 class = tcc_binary;
2887
2888 else if (class == tcc_expression && code == SAVE_EXPR
2889 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2890 {
2891 /* If we've already found a CVAL1 or CVAL2, this expression is
2892 two complex to handle. */
2893 if (*cval1 || *cval2)
2894 return 0;
2895
2896 class = tcc_unary;
2897 *save_p = 1;
2898 }
2899
2900 switch (class)
2901 {
2902 case tcc_unary:
2903 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2904
2905 case tcc_binary:
2906 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2907 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2908 cval1, cval2, save_p));
2909
2910 case tcc_constant:
2911 return 1;
2912
2913 case tcc_expression:
2914 if (code == COND_EXPR)
2915 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2916 cval1, cval2, save_p)
2917 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2918 cval1, cval2, save_p)
2919 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2920 cval1, cval2, save_p));
2921 return 0;
2922
2923 case tcc_comparison:
2924 /* First see if we can handle the first operand, then the second. For
2925 the second operand, we know *CVAL1 can't be zero. It must be that
2926 one side of the comparison is each of the values; test for the
2927 case where this isn't true by failing if the two operands
2928 are the same. */
2929
2930 if (operand_equal_p (TREE_OPERAND (arg, 0),
2931 TREE_OPERAND (arg, 1), 0))
2932 return 0;
2933
2934 if (*cval1 == 0)
2935 *cval1 = TREE_OPERAND (arg, 0);
2936 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2937 ;
2938 else if (*cval2 == 0)
2939 *cval2 = TREE_OPERAND (arg, 0);
2940 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2941 ;
2942 else
2943 return 0;
2944
2945 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2946 ;
2947 else if (*cval2 == 0)
2948 *cval2 = TREE_OPERAND (arg, 1);
2949 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2950 ;
2951 else
2952 return 0;
2953
2954 return 1;
2955
2956 default:
2957 return 0;
2958 }
2959 }
2960 \f
2961 /* ARG is a tree that is known to contain just arithmetic operations and
2962 comparisons. Evaluate the operations in the tree substituting NEW0 for
2963 any occurrence of OLD0 as an operand of a comparison and likewise for
2964 NEW1 and OLD1. */
2965
2966 static tree
2967 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2968 {
2969 tree type = TREE_TYPE (arg);
2970 enum tree_code code = TREE_CODE (arg);
2971 enum tree_code_class class = TREE_CODE_CLASS (code);
2972
2973 /* We can handle some of the tcc_expression cases here. */
2974 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2975 class = tcc_unary;
2976 else if (class == tcc_expression
2977 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2978 class = tcc_binary;
2979
2980 switch (class)
2981 {
2982 case tcc_unary:
2983 return fold_build1 (code, type,
2984 eval_subst (TREE_OPERAND (arg, 0),
2985 old0, new0, old1, new1));
2986
2987 case tcc_binary:
2988 return fold_build2 (code, type,
2989 eval_subst (TREE_OPERAND (arg, 0),
2990 old0, new0, old1, new1),
2991 eval_subst (TREE_OPERAND (arg, 1),
2992 old0, new0, old1, new1));
2993
2994 case tcc_expression:
2995 switch (code)
2996 {
2997 case SAVE_EXPR:
2998 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2999
3000 case COMPOUND_EXPR:
3001 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
3002
3003 case COND_EXPR:
3004 return fold_build3 (code, type,
3005 eval_subst (TREE_OPERAND (arg, 0),
3006 old0, new0, old1, new1),
3007 eval_subst (TREE_OPERAND (arg, 1),
3008 old0, new0, old1, new1),
3009 eval_subst (TREE_OPERAND (arg, 2),
3010 old0, new0, old1, new1));
3011 default:
3012 break;
3013 }
3014 /* Fall through - ??? */
3015
3016 case tcc_comparison:
3017 {
3018 tree arg0 = TREE_OPERAND (arg, 0);
3019 tree arg1 = TREE_OPERAND (arg, 1);
3020
3021 /* We need to check both for exact equality and tree equality. The
3022 former will be true if the operand has a side-effect. In that
3023 case, we know the operand occurred exactly once. */
3024
3025 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3026 arg0 = new0;
3027 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3028 arg0 = new1;
3029
3030 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3031 arg1 = new0;
3032 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3033 arg1 = new1;
3034
3035 return fold_build2 (code, type, arg0, arg1);
3036 }
3037
3038 default:
3039 return arg;
3040 }
3041 }
3042 \f
3043 /* Return a tree for the case when the result of an expression is RESULT
3044 converted to TYPE and OMITTED was previously an operand of the expression
3045 but is now not needed (e.g., we folded OMITTED * 0).
3046
3047 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3048 the conversion of RESULT to TYPE. */
3049
3050 tree
3051 omit_one_operand (tree type, tree result, tree omitted)
3052 {
3053 tree t = fold_convert (type, result);
3054
3055 if (TREE_SIDE_EFFECTS (omitted))
3056 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3057
3058 return non_lvalue (t);
3059 }
3060
3061 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3062
3063 static tree
3064 pedantic_omit_one_operand (tree type, tree result, tree omitted)
3065 {
3066 tree t = fold_convert (type, result);
3067
3068 if (TREE_SIDE_EFFECTS (omitted))
3069 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3070
3071 return pedantic_non_lvalue (t);
3072 }
3073
3074 /* Return a tree for the case when the result of an expression is RESULT
3075 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3076 of the expression but are now not needed.
3077
3078 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3079 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3080 evaluated before OMITTED2. Otherwise, if neither has side effects,
3081 just do the conversion of RESULT to TYPE. */
3082
3083 tree
3084 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
3085 {
3086 tree t = fold_convert (type, result);
3087
3088 if (TREE_SIDE_EFFECTS (omitted2))
3089 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3090 if (TREE_SIDE_EFFECTS (omitted1))
3091 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3092
3093 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
3094 }
3095
3096 \f
3097 /* Return a simplified tree node for the truth-negation of ARG. This
3098 never alters ARG itself. We assume that ARG is an operation that
3099 returns a truth value (0 or 1).
3100
3101 FIXME: one would think we would fold the result, but it causes
3102 problems with the dominator optimizer. */
3103
3104 tree
3105 fold_truth_not_expr (tree arg)
3106 {
3107 tree type = TREE_TYPE (arg);
3108 enum tree_code code = TREE_CODE (arg);
3109
3110 /* If this is a comparison, we can simply invert it, except for
3111 floating-point non-equality comparisons, in which case we just
3112 enclose a TRUTH_NOT_EXPR around what we have. */
3113
3114 if (TREE_CODE_CLASS (code) == tcc_comparison)
3115 {
3116 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3117 if (FLOAT_TYPE_P (op_type)
3118 && flag_trapping_math
3119 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3120 && code != NE_EXPR && code != EQ_EXPR)
3121 return NULL_TREE;
3122 else
3123 {
3124 code = invert_tree_comparison (code,
3125 HONOR_NANS (TYPE_MODE (op_type)));
3126 if (code == ERROR_MARK)
3127 return NULL_TREE;
3128 else
3129 return build2 (code, type,
3130 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3131 }
3132 }
3133
3134 switch (code)
3135 {
3136 case INTEGER_CST:
3137 return constant_boolean_node (integer_zerop (arg), type);
3138
3139 case TRUTH_AND_EXPR:
3140 return build2 (TRUTH_OR_EXPR, type,
3141 invert_truthvalue (TREE_OPERAND (arg, 0)),
3142 invert_truthvalue (TREE_OPERAND (arg, 1)));
3143
3144 case TRUTH_OR_EXPR:
3145 return build2 (TRUTH_AND_EXPR, type,
3146 invert_truthvalue (TREE_OPERAND (arg, 0)),
3147 invert_truthvalue (TREE_OPERAND (arg, 1)));
3148
3149 case TRUTH_XOR_EXPR:
3150 /* Here we can invert either operand. We invert the first operand
3151 unless the second operand is a TRUTH_NOT_EXPR in which case our
3152 result is the XOR of the first operand with the inside of the
3153 negation of the second operand. */
3154
3155 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3156 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3157 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3158 else
3159 return build2 (TRUTH_XOR_EXPR, type,
3160 invert_truthvalue (TREE_OPERAND (arg, 0)),
3161 TREE_OPERAND (arg, 1));
3162
3163 case TRUTH_ANDIF_EXPR:
3164 return build2 (TRUTH_ORIF_EXPR, type,
3165 invert_truthvalue (TREE_OPERAND (arg, 0)),
3166 invert_truthvalue (TREE_OPERAND (arg, 1)));
3167
3168 case TRUTH_ORIF_EXPR:
3169 return build2 (TRUTH_ANDIF_EXPR, type,
3170 invert_truthvalue (TREE_OPERAND (arg, 0)),
3171 invert_truthvalue (TREE_OPERAND (arg, 1)));
3172
3173 case TRUTH_NOT_EXPR:
3174 return TREE_OPERAND (arg, 0);
3175
3176 case COND_EXPR:
3177 {
3178 tree arg1 = TREE_OPERAND (arg, 1);
3179 tree arg2 = TREE_OPERAND (arg, 2);
3180 /* A COND_EXPR may have a throw as one operand, which
3181 then has void type. Just leave void operands
3182 as they are. */
3183 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3184 VOID_TYPE_P (TREE_TYPE (arg1))
3185 ? arg1 : invert_truthvalue (arg1),
3186 VOID_TYPE_P (TREE_TYPE (arg2))
3187 ? arg2 : invert_truthvalue (arg2));
3188 }
3189
3190 case COMPOUND_EXPR:
3191 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3192 invert_truthvalue (TREE_OPERAND (arg, 1)));
3193
3194 case NON_LVALUE_EXPR:
3195 return invert_truthvalue (TREE_OPERAND (arg, 0));
3196
3197 case NOP_EXPR:
3198 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3199 return build1 (TRUTH_NOT_EXPR, type, arg);
3200
3201 case CONVERT_EXPR:
3202 case FLOAT_EXPR:
3203 return build1 (TREE_CODE (arg), type,
3204 invert_truthvalue (TREE_OPERAND (arg, 0)));
3205
3206 case BIT_AND_EXPR:
3207 if (!integer_onep (TREE_OPERAND (arg, 1)))
3208 break;
3209 return build2 (EQ_EXPR, type, arg,
3210 build_int_cst (type, 0));
3211
3212 case SAVE_EXPR:
3213 return build1 (TRUTH_NOT_EXPR, type, arg);
3214
3215 case CLEANUP_POINT_EXPR:
3216 return build1 (CLEANUP_POINT_EXPR, type,
3217 invert_truthvalue (TREE_OPERAND (arg, 0)));
3218
3219 default:
3220 break;
3221 }
3222
3223 return NULL_TREE;
3224 }
3225
3226 /* Return a simplified tree node for the truth-negation of ARG. This
3227 never alters ARG itself. We assume that ARG is an operation that
3228 returns a truth value (0 or 1).
3229
3230 FIXME: one would think we would fold the result, but it causes
3231 problems with the dominator optimizer. */
3232
3233 tree
3234 invert_truthvalue (tree arg)
3235 {
3236 tree tem;
3237
3238 if (TREE_CODE (arg) == ERROR_MARK)
3239 return arg;
3240
3241 tem = fold_truth_not_expr (arg);
3242 if (!tem)
3243 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3244
3245 return tem;
3246 }
3247
3248 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3249 operands are another bit-wise operation with a common input. If so,
3250 distribute the bit operations to save an operation and possibly two if
3251 constants are involved. For example, convert
3252 (A | B) & (A | C) into A | (B & C)
3253 Further simplification will occur if B and C are constants.
3254
3255 If this optimization cannot be done, 0 will be returned. */
3256
3257 static tree
3258 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3259 {
3260 tree common;
3261 tree left, right;
3262
3263 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3264 || TREE_CODE (arg0) == code
3265 || (TREE_CODE (arg0) != BIT_AND_EXPR
3266 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3267 return 0;
3268
3269 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3270 {
3271 common = TREE_OPERAND (arg0, 0);
3272 left = TREE_OPERAND (arg0, 1);
3273 right = TREE_OPERAND (arg1, 1);
3274 }
3275 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3276 {
3277 common = TREE_OPERAND (arg0, 0);
3278 left = TREE_OPERAND (arg0, 1);
3279 right = TREE_OPERAND (arg1, 0);
3280 }
3281 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3282 {
3283 common = TREE_OPERAND (arg0, 1);
3284 left = TREE_OPERAND (arg0, 0);
3285 right = TREE_OPERAND (arg1, 1);
3286 }
3287 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3288 {
3289 common = TREE_OPERAND (arg0, 1);
3290 left = TREE_OPERAND (arg0, 0);
3291 right = TREE_OPERAND (arg1, 0);
3292 }
3293 else
3294 return 0;
3295
3296 return fold_build2 (TREE_CODE (arg0), type, common,
3297 fold_build2 (code, type, left, right));
3298 }
3299
3300 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3301 with code CODE. This optimization is unsafe. */
3302 static tree
3303 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3304 {
3305 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3306 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3307
3308 /* (A / C) +- (B / C) -> (A +- B) / C. */
3309 if (mul0 == mul1
3310 && operand_equal_p (TREE_OPERAND (arg0, 1),
3311 TREE_OPERAND (arg1, 1), 0))
3312 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3313 fold_build2 (code, type,
3314 TREE_OPERAND (arg0, 0),
3315 TREE_OPERAND (arg1, 0)),
3316 TREE_OPERAND (arg0, 1));
3317
3318 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3319 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3320 TREE_OPERAND (arg1, 0), 0)
3321 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3322 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3323 {
3324 REAL_VALUE_TYPE r0, r1;
3325 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3326 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3327 if (!mul0)
3328 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3329 if (!mul1)
3330 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3331 real_arithmetic (&r0, code, &r0, &r1);
3332 return fold_build2 (MULT_EXPR, type,
3333 TREE_OPERAND (arg0, 0),
3334 build_real (type, r0));
3335 }
3336
3337 return NULL_TREE;
3338 }
3339 \f
3340 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3341 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3342
3343 static tree
3344 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3345 int unsignedp)
3346 {
3347 tree result;
3348
3349 if (bitpos == 0)
3350 {
3351 tree size = TYPE_SIZE (TREE_TYPE (inner));
3352 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3353 || POINTER_TYPE_P (TREE_TYPE (inner)))
3354 && host_integerp (size, 0)
3355 && tree_low_cst (size, 0) == bitsize)
3356 return fold_convert (type, inner);
3357 }
3358
3359 result = build3 (BIT_FIELD_REF, type, inner,
3360 size_int (bitsize), bitsize_int (bitpos));
3361
3362 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3363
3364 return result;
3365 }
3366
3367 /* Optimize a bit-field compare.
3368
3369 There are two cases: First is a compare against a constant and the
3370 second is a comparison of two items where the fields are at the same
3371 bit position relative to the start of a chunk (byte, halfword, word)
3372 large enough to contain it. In these cases we can avoid the shift
3373 implicit in bitfield extractions.
3374
3375 For constants, we emit a compare of the shifted constant with the
3376 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3377 compared. For two fields at the same position, we do the ANDs with the
3378 similar mask and compare the result of the ANDs.
3379
3380 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3381 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3382 are the left and right operands of the comparison, respectively.
3383
3384 If the optimization described above can be done, we return the resulting
3385 tree. Otherwise we return zero. */
3386
3387 static tree
3388 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3389 tree lhs, tree rhs)
3390 {
3391 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3392 tree type = TREE_TYPE (lhs);
3393 tree signed_type, unsigned_type;
3394 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3395 enum machine_mode lmode, rmode, nmode;
3396 int lunsignedp, runsignedp;
3397 int lvolatilep = 0, rvolatilep = 0;
3398 tree linner, rinner = NULL_TREE;
3399 tree mask;
3400 tree offset;
3401
3402 /* Get all the information about the extractions being done. If the bit size
3403 if the same as the size of the underlying object, we aren't doing an
3404 extraction at all and so can do nothing. We also don't want to
3405 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3406 then will no longer be able to replace it. */
3407 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3408 &lunsignedp, &lvolatilep, false);
3409 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3410 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3411 return 0;
3412
3413 if (!const_p)
3414 {
3415 /* If this is not a constant, we can only do something if bit positions,
3416 sizes, and signedness are the same. */
3417 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3418 &runsignedp, &rvolatilep, false);
3419
3420 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3421 || lunsignedp != runsignedp || offset != 0
3422 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3423 return 0;
3424 }
3425
3426 /* See if we can find a mode to refer to this field. We should be able to,
3427 but fail if we can't. */
3428 nmode = get_best_mode (lbitsize, lbitpos,
3429 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3430 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3431 TYPE_ALIGN (TREE_TYPE (rinner))),
3432 word_mode, lvolatilep || rvolatilep);
3433 if (nmode == VOIDmode)
3434 return 0;
3435
3436 /* Set signed and unsigned types of the precision of this mode for the
3437 shifts below. */
3438 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3439 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3440
3441 /* Compute the bit position and size for the new reference and our offset
3442 within it. If the new reference is the same size as the original, we
3443 won't optimize anything, so return zero. */
3444 nbitsize = GET_MODE_BITSIZE (nmode);
3445 nbitpos = lbitpos & ~ (nbitsize - 1);
3446 lbitpos -= nbitpos;
3447 if (nbitsize == lbitsize)
3448 return 0;
3449
3450 if (BYTES_BIG_ENDIAN)
3451 lbitpos = nbitsize - lbitsize - lbitpos;
3452
3453 /* Make the mask to be used against the extracted field. */
3454 mask = build_int_cst_type (unsigned_type, -1);
3455 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3456 mask = const_binop (RSHIFT_EXPR, mask,
3457 size_int (nbitsize - lbitsize - lbitpos), 0);
3458
3459 if (! const_p)
3460 /* If not comparing with constant, just rework the comparison
3461 and return. */
3462 return fold_build2 (code, compare_type,
3463 fold_build2 (BIT_AND_EXPR, unsigned_type,
3464 make_bit_field_ref (linner,
3465 unsigned_type,
3466 nbitsize, nbitpos,
3467 1),
3468 mask),
3469 fold_build2 (BIT_AND_EXPR, unsigned_type,
3470 make_bit_field_ref (rinner,
3471 unsigned_type,
3472 nbitsize, nbitpos,
3473 1),
3474 mask));
3475
3476 /* Otherwise, we are handling the constant case. See if the constant is too
3477 big for the field. Warn and return a tree of for 0 (false) if so. We do
3478 this not only for its own sake, but to avoid having to test for this
3479 error case below. If we didn't, we might generate wrong code.
3480
3481 For unsigned fields, the constant shifted right by the field length should
3482 be all zero. For signed fields, the high-order bits should agree with
3483 the sign bit. */
3484
3485 if (lunsignedp)
3486 {
3487 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3488 fold_convert (unsigned_type, rhs),
3489 size_int (lbitsize), 0)))
3490 {
3491 warning (0, "comparison is always %d due to width of bit-field",
3492 code == NE_EXPR);
3493 return constant_boolean_node (code == NE_EXPR, compare_type);
3494 }
3495 }
3496 else
3497 {
3498 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3499 size_int (lbitsize - 1), 0);
3500 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3501 {
3502 warning (0, "comparison is always %d due to width of bit-field",
3503 code == NE_EXPR);
3504 return constant_boolean_node (code == NE_EXPR, compare_type);
3505 }
3506 }
3507
3508 /* Single-bit compares should always be against zero. */
3509 if (lbitsize == 1 && ! integer_zerop (rhs))
3510 {
3511 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3512 rhs = build_int_cst (type, 0);
3513 }
3514
3515 /* Make a new bitfield reference, shift the constant over the
3516 appropriate number of bits and mask it with the computed mask
3517 (in case this was a signed field). If we changed it, make a new one. */
3518 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3519 if (lvolatilep)
3520 {
3521 TREE_SIDE_EFFECTS (lhs) = 1;
3522 TREE_THIS_VOLATILE (lhs) = 1;
3523 }
3524
3525 rhs = const_binop (BIT_AND_EXPR,
3526 const_binop (LSHIFT_EXPR,
3527 fold_convert (unsigned_type, rhs),
3528 size_int (lbitpos), 0),
3529 mask, 0);
3530
3531 return build2 (code, compare_type,
3532 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3533 rhs);
3534 }
3535 \f
3536 /* Subroutine for fold_truthop: decode a field reference.
3537
3538 If EXP is a comparison reference, we return the innermost reference.
3539
3540 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3541 set to the starting bit number.
3542
3543 If the innermost field can be completely contained in a mode-sized
3544 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3545
3546 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3547 otherwise it is not changed.
3548
3549 *PUNSIGNEDP is set to the signedness of the field.
3550
3551 *PMASK is set to the mask used. This is either contained in a
3552 BIT_AND_EXPR or derived from the width of the field.
3553
3554 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3555
3556 Return 0 if this is not a component reference or is one that we can't
3557 do anything with. */
3558
3559 static tree
3560 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3561 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3562 int *punsignedp, int *pvolatilep,
3563 tree *pmask, tree *pand_mask)
3564 {
3565 tree outer_type = 0;
3566 tree and_mask = 0;
3567 tree mask, inner, offset;
3568 tree unsigned_type;
3569 unsigned int precision;
3570
3571 /* All the optimizations using this function assume integer fields.
3572 There are problems with FP fields since the type_for_size call
3573 below can fail for, e.g., XFmode. */
3574 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3575 return 0;
3576
3577 /* We are interested in the bare arrangement of bits, so strip everything
3578 that doesn't affect the machine mode. However, record the type of the
3579 outermost expression if it may matter below. */
3580 if (TREE_CODE (exp) == NOP_EXPR
3581 || TREE_CODE (exp) == CONVERT_EXPR
3582 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3583 outer_type = TREE_TYPE (exp);
3584 STRIP_NOPS (exp);
3585
3586 if (TREE_CODE (exp) == BIT_AND_EXPR)
3587 {
3588 and_mask = TREE_OPERAND (exp, 1);
3589 exp = TREE_OPERAND (exp, 0);
3590 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3591 if (TREE_CODE (and_mask) != INTEGER_CST)
3592 return 0;
3593 }
3594
3595 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3596 punsignedp, pvolatilep, false);
3597 if ((inner == exp && and_mask == 0)
3598 || *pbitsize < 0 || offset != 0
3599 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3600 return 0;
3601
3602 /* If the number of bits in the reference is the same as the bitsize of
3603 the outer type, then the outer type gives the signedness. Otherwise
3604 (in case of a small bitfield) the signedness is unchanged. */
3605 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3606 *punsignedp = TYPE_UNSIGNED (outer_type);
3607
3608 /* Compute the mask to access the bitfield. */
3609 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3610 precision = TYPE_PRECISION (unsigned_type);
3611
3612 mask = build_int_cst_type (unsigned_type, -1);
3613
3614 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3615 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3616
3617 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3618 if (and_mask != 0)
3619 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3620 fold_convert (unsigned_type, and_mask), mask);
3621
3622 *pmask = mask;
3623 *pand_mask = and_mask;
3624 return inner;
3625 }
3626
3627 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3628 bit positions. */
3629
3630 static int
3631 all_ones_mask_p (tree mask, int size)
3632 {
3633 tree type = TREE_TYPE (mask);
3634 unsigned int precision = TYPE_PRECISION (type);
3635 tree tmask;
3636
3637 tmask = build_int_cst_type (lang_hooks.types.signed_type (type), -1);
3638
3639 return
3640 tree_int_cst_equal (mask,
3641 const_binop (RSHIFT_EXPR,
3642 const_binop (LSHIFT_EXPR, tmask,
3643 size_int (precision - size),
3644 0),
3645 size_int (precision - size), 0));
3646 }
3647
3648 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3649 represents the sign bit of EXP's type. If EXP represents a sign
3650 or zero extension, also test VAL against the unextended type.
3651 The return value is the (sub)expression whose sign bit is VAL,
3652 or NULL_TREE otherwise. */
3653
3654 static tree
3655 sign_bit_p (tree exp, tree val)
3656 {
3657 unsigned HOST_WIDE_INT mask_lo, lo;
3658 HOST_WIDE_INT mask_hi, hi;
3659 int width;
3660 tree t;
3661
3662 /* Tree EXP must have an integral type. */
3663 t = TREE_TYPE (exp);
3664 if (! INTEGRAL_TYPE_P (t))
3665 return NULL_TREE;
3666
3667 /* Tree VAL must be an integer constant. */
3668 if (TREE_CODE (val) != INTEGER_CST
3669 || TREE_OVERFLOW (val))
3670 return NULL_TREE;
3671
3672 width = TYPE_PRECISION (t);
3673 if (width > HOST_BITS_PER_WIDE_INT)
3674 {
3675 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3676 lo = 0;
3677
3678 mask_hi = ((unsigned HOST_WIDE_INT) -1
3679 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3680 mask_lo = -1;
3681 }
3682 else
3683 {
3684 hi = 0;
3685 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3686
3687 mask_hi = 0;
3688 mask_lo = ((unsigned HOST_WIDE_INT) -1
3689 >> (HOST_BITS_PER_WIDE_INT - width));
3690 }
3691
3692 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3693 treat VAL as if it were unsigned. */
3694 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3695 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3696 return exp;
3697
3698 /* Handle extension from a narrower type. */
3699 if (TREE_CODE (exp) == NOP_EXPR
3700 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3701 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3702
3703 return NULL_TREE;
3704 }
3705
3706 /* Subroutine for fold_truthop: determine if an operand is simple enough
3707 to be evaluated unconditionally. */
3708
3709 static int
3710 simple_operand_p (tree exp)
3711 {
3712 /* Strip any conversions that don't change the machine mode. */
3713 STRIP_NOPS (exp);
3714
3715 return (CONSTANT_CLASS_P (exp)
3716 || TREE_CODE (exp) == SSA_NAME
3717 || (DECL_P (exp)
3718 && ! TREE_ADDRESSABLE (exp)
3719 && ! TREE_THIS_VOLATILE (exp)
3720 && ! DECL_NONLOCAL (exp)
3721 /* Don't regard global variables as simple. They may be
3722 allocated in ways unknown to the compiler (shared memory,
3723 #pragma weak, etc). */
3724 && ! TREE_PUBLIC (exp)
3725 && ! DECL_EXTERNAL (exp)
3726 /* Loading a static variable is unduly expensive, but global
3727 registers aren't expensive. */
3728 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3729 }
3730 \f
3731 /* The following functions are subroutines to fold_range_test and allow it to
3732 try to change a logical combination of comparisons into a range test.
3733
3734 For example, both
3735 X == 2 || X == 3 || X == 4 || X == 5
3736 and
3737 X >= 2 && X <= 5
3738 are converted to
3739 (unsigned) (X - 2) <= 3
3740
3741 We describe each set of comparisons as being either inside or outside
3742 a range, using a variable named like IN_P, and then describe the
3743 range with a lower and upper bound. If one of the bounds is omitted,
3744 it represents either the highest or lowest value of the type.
3745
3746 In the comments below, we represent a range by two numbers in brackets
3747 preceded by a "+" to designate being inside that range, or a "-" to
3748 designate being outside that range, so the condition can be inverted by
3749 flipping the prefix. An omitted bound is represented by a "-". For
3750 example, "- [-, 10]" means being outside the range starting at the lowest
3751 possible value and ending at 10, in other words, being greater than 10.
3752 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3753 always false.
3754
3755 We set up things so that the missing bounds are handled in a consistent
3756 manner so neither a missing bound nor "true" and "false" need to be
3757 handled using a special case. */
3758
3759 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3760 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3761 and UPPER1_P are nonzero if the respective argument is an upper bound
3762 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3763 must be specified for a comparison. ARG1 will be converted to ARG0's
3764 type if both are specified. */
3765
3766 static tree
3767 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3768 tree arg1, int upper1_p)
3769 {
3770 tree tem;
3771 int result;
3772 int sgn0, sgn1;
3773
3774 /* If neither arg represents infinity, do the normal operation.
3775 Else, if not a comparison, return infinity. Else handle the special
3776 comparison rules. Note that most of the cases below won't occur, but
3777 are handled for consistency. */
3778
3779 if (arg0 != 0 && arg1 != 0)
3780 {
3781 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3782 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3783 STRIP_NOPS (tem);
3784 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3785 }
3786
3787 if (TREE_CODE_CLASS (code) != tcc_comparison)
3788 return 0;
3789
3790 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3791 for neither. In real maths, we cannot assume open ended ranges are
3792 the same. But, this is computer arithmetic, where numbers are finite.
3793 We can therefore make the transformation of any unbounded range with
3794 the value Z, Z being greater than any representable number. This permits
3795 us to treat unbounded ranges as equal. */
3796 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3797 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3798 switch (code)
3799 {
3800 case EQ_EXPR:
3801 result = sgn0 == sgn1;
3802 break;
3803 case NE_EXPR:
3804 result = sgn0 != sgn1;
3805 break;
3806 case LT_EXPR:
3807 result = sgn0 < sgn1;
3808 break;
3809 case LE_EXPR:
3810 result = sgn0 <= sgn1;
3811 break;
3812 case GT_EXPR:
3813 result = sgn0 > sgn1;
3814 break;
3815 case GE_EXPR:
3816 result = sgn0 >= sgn1;
3817 break;
3818 default:
3819 gcc_unreachable ();
3820 }
3821
3822 return constant_boolean_node (result, type);
3823 }
3824 \f
3825 /* Given EXP, a logical expression, set the range it is testing into
3826 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3827 actually being tested. *PLOW and *PHIGH will be made of the same type
3828 as the returned expression. If EXP is not a comparison, we will most
3829 likely not be returning a useful value and range. */
3830
3831 static tree
3832 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3833 {
3834 enum tree_code code;
3835 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3836 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3837 int in_p, n_in_p;
3838 tree low, high, n_low, n_high;
3839
3840 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3841 and see if we can refine the range. Some of the cases below may not
3842 happen, but it doesn't seem worth worrying about this. We "continue"
3843 the outer loop when we've changed something; otherwise we "break"
3844 the switch, which will "break" the while. */
3845
3846 in_p = 0;
3847 low = high = build_int_cst (TREE_TYPE (exp), 0);
3848
3849 while (1)
3850 {
3851 code = TREE_CODE (exp);
3852 exp_type = TREE_TYPE (exp);
3853
3854 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3855 {
3856 if (TREE_CODE_LENGTH (code) > 0)
3857 arg0 = TREE_OPERAND (exp, 0);
3858 if (TREE_CODE_CLASS (code) == tcc_comparison
3859 || TREE_CODE_CLASS (code) == tcc_unary
3860 || TREE_CODE_CLASS (code) == tcc_binary)
3861 arg0_type = TREE_TYPE (arg0);
3862 if (TREE_CODE_CLASS (code) == tcc_binary
3863 || TREE_CODE_CLASS (code) == tcc_comparison
3864 || (TREE_CODE_CLASS (code) == tcc_expression
3865 && TREE_CODE_LENGTH (code) > 1))
3866 arg1 = TREE_OPERAND (exp, 1);
3867 }
3868
3869 switch (code)
3870 {
3871 case TRUTH_NOT_EXPR:
3872 in_p = ! in_p, exp = arg0;
3873 continue;
3874
3875 case EQ_EXPR: case NE_EXPR:
3876 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3877 /* We can only do something if the range is testing for zero
3878 and if the second operand is an integer constant. Note that
3879 saying something is "in" the range we make is done by
3880 complementing IN_P since it will set in the initial case of
3881 being not equal to zero; "out" is leaving it alone. */
3882 if (low == 0 || high == 0
3883 || ! integer_zerop (low) || ! integer_zerop (high)
3884 || TREE_CODE (arg1) != INTEGER_CST)
3885 break;
3886
3887 switch (code)
3888 {
3889 case NE_EXPR: /* - [c, c] */
3890 low = high = arg1;
3891 break;
3892 case EQ_EXPR: /* + [c, c] */
3893 in_p = ! in_p, low = high = arg1;
3894 break;
3895 case GT_EXPR: /* - [-, c] */
3896 low = 0, high = arg1;
3897 break;
3898 case GE_EXPR: /* + [c, -] */
3899 in_p = ! in_p, low = arg1, high = 0;
3900 break;
3901 case LT_EXPR: /* - [c, -] */
3902 low = arg1, high = 0;
3903 break;
3904 case LE_EXPR: /* + [-, c] */
3905 in_p = ! in_p, low = 0, high = arg1;
3906 break;
3907 default:
3908 gcc_unreachable ();
3909 }
3910
3911 /* If this is an unsigned comparison, we also know that EXP is
3912 greater than or equal to zero. We base the range tests we make
3913 on that fact, so we record it here so we can parse existing
3914 range tests. We test arg0_type since often the return type
3915 of, e.g. EQ_EXPR, is boolean. */
3916 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3917 {
3918 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3919 in_p, low, high, 1,
3920 build_int_cst (arg0_type, 0),
3921 NULL_TREE))
3922 break;
3923
3924 in_p = n_in_p, low = n_low, high = n_high;
3925
3926 /* If the high bound is missing, but we have a nonzero low
3927 bound, reverse the range so it goes from zero to the low bound
3928 minus 1. */
3929 if (high == 0 && low && ! integer_zerop (low))
3930 {
3931 in_p = ! in_p;
3932 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3933 integer_one_node, 0);
3934 low = build_int_cst (arg0_type, 0);
3935 }
3936 }
3937
3938 exp = arg0;
3939 continue;
3940
3941 case NEGATE_EXPR:
3942 /* (-x) IN [a,b] -> x in [-b, -a] */
3943 n_low = range_binop (MINUS_EXPR, exp_type,
3944 build_int_cst (exp_type, 0),
3945 0, high, 1);
3946 n_high = range_binop (MINUS_EXPR, exp_type,
3947 build_int_cst (exp_type, 0),
3948 0, low, 0);
3949 low = n_low, high = n_high;
3950 exp = arg0;
3951 continue;
3952
3953 case BIT_NOT_EXPR:
3954 /* ~ X -> -X - 1 */
3955 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
3956 build_int_cst (exp_type, 1));
3957 continue;
3958
3959 case PLUS_EXPR: case MINUS_EXPR:
3960 if (TREE_CODE (arg1) != INTEGER_CST)
3961 break;
3962
3963 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3964 move a constant to the other side. */
3965 if (flag_wrapv && !TYPE_UNSIGNED (arg0_type))
3966 break;
3967
3968 /* If EXP is signed, any overflow in the computation is undefined,
3969 so we don't worry about it so long as our computations on
3970 the bounds don't overflow. For unsigned, overflow is defined
3971 and this is exactly the right thing. */
3972 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3973 arg0_type, low, 0, arg1, 0);
3974 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3975 arg0_type, high, 1, arg1, 0);
3976 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3977 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3978 break;
3979
3980 /* Check for an unsigned range which has wrapped around the maximum
3981 value thus making n_high < n_low, and normalize it. */
3982 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3983 {
3984 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3985 integer_one_node, 0);
3986 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3987 integer_one_node, 0);
3988
3989 /* If the range is of the form +/- [ x+1, x ], we won't
3990 be able to normalize it. But then, it represents the
3991 whole range or the empty set, so make it
3992 +/- [ -, - ]. */
3993 if (tree_int_cst_equal (n_low, low)
3994 && tree_int_cst_equal (n_high, high))
3995 low = high = 0;
3996 else
3997 in_p = ! in_p;
3998 }
3999 else
4000 low = n_low, high = n_high;
4001
4002 exp = arg0;
4003 continue;
4004
4005 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
4006 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4007 break;
4008
4009 if (! INTEGRAL_TYPE_P (arg0_type)
4010 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4011 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4012 break;
4013
4014 n_low = low, n_high = high;
4015
4016 if (n_low != 0)
4017 n_low = fold_convert (arg0_type, n_low);
4018
4019 if (n_high != 0)
4020 n_high = fold_convert (arg0_type, n_high);
4021
4022
4023 /* If we're converting arg0 from an unsigned type, to exp,
4024 a signed type, we will be doing the comparison as unsigned.
4025 The tests above have already verified that LOW and HIGH
4026 are both positive.
4027
4028 So we have to ensure that we will handle large unsigned
4029 values the same way that the current signed bounds treat
4030 negative values. */
4031
4032 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4033 {
4034 tree high_positive;
4035 tree equiv_type = lang_hooks.types.type_for_mode
4036 (TYPE_MODE (arg0_type), 1);
4037
4038 /* A range without an upper bound is, naturally, unbounded.
4039 Since convert would have cropped a very large value, use
4040 the max value for the destination type. */
4041 high_positive
4042 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4043 : TYPE_MAX_VALUE (arg0_type);
4044
4045 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4046 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
4047 fold_convert (arg0_type,
4048 high_positive),
4049 build_int_cst (arg0_type, 1));
4050
4051 /* If the low bound is specified, "and" the range with the
4052 range for which the original unsigned value will be
4053 positive. */
4054 if (low != 0)
4055 {
4056 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4057 1, n_low, n_high, 1,
4058 fold_convert (arg0_type,
4059 integer_zero_node),
4060 high_positive))
4061 break;
4062
4063 in_p = (n_in_p == in_p);
4064 }
4065 else
4066 {
4067 /* Otherwise, "or" the range with the range of the input
4068 that will be interpreted as negative. */
4069 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4070 0, n_low, n_high, 1,
4071 fold_convert (arg0_type,
4072 integer_zero_node),
4073 high_positive))
4074 break;
4075
4076 in_p = (in_p != n_in_p);
4077 }
4078 }
4079
4080 exp = arg0;
4081 low = n_low, high = n_high;
4082 continue;
4083
4084 default:
4085 break;
4086 }
4087
4088 break;
4089 }
4090
4091 /* If EXP is a constant, we can evaluate whether this is true or false. */
4092 if (TREE_CODE (exp) == INTEGER_CST)
4093 {
4094 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4095 exp, 0, low, 0))
4096 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4097 exp, 1, high, 1)));
4098 low = high = 0;
4099 exp = 0;
4100 }
4101
4102 *pin_p = in_p, *plow = low, *phigh = high;
4103 return exp;
4104 }
4105 \f
4106 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4107 type, TYPE, return an expression to test if EXP is in (or out of, depending
4108 on IN_P) the range. Return 0 if the test couldn't be created. */
4109
4110 static tree
4111 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
4112 {
4113 tree etype = TREE_TYPE (exp);
4114 tree value;
4115
4116 #ifdef HAVE_canonicalize_funcptr_for_compare
4117 /* Disable this optimization for function pointer expressions
4118 on targets that require function pointer canonicalization. */
4119 if (HAVE_canonicalize_funcptr_for_compare
4120 && TREE_CODE (etype) == POINTER_TYPE
4121 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4122 return NULL_TREE;
4123 #endif
4124
4125 if (! in_p)
4126 {
4127 value = build_range_check (type, exp, 1, low, high);
4128 if (value != 0)
4129 return invert_truthvalue (value);
4130
4131 return 0;
4132 }
4133
4134 if (low == 0 && high == 0)
4135 return build_int_cst (type, 1);
4136
4137 if (low == 0)
4138 return fold_build2 (LE_EXPR, type, exp,
4139 fold_convert (etype, high));
4140
4141 if (high == 0)
4142 return fold_build2 (GE_EXPR, type, exp,
4143 fold_convert (etype, low));
4144
4145 if (operand_equal_p (low, high, 0))
4146 return fold_build2 (EQ_EXPR, type, exp,
4147 fold_convert (etype, low));
4148
4149 if (integer_zerop (low))
4150 {
4151 if (! TYPE_UNSIGNED (etype))
4152 {
4153 etype = lang_hooks.types.unsigned_type (etype);
4154 high = fold_convert (etype, high);
4155 exp = fold_convert (etype, exp);
4156 }
4157 return build_range_check (type, exp, 1, 0, high);
4158 }
4159
4160 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4161 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4162 {
4163 unsigned HOST_WIDE_INT lo;
4164 HOST_WIDE_INT hi;
4165 int prec;
4166
4167 prec = TYPE_PRECISION (etype);
4168 if (prec <= HOST_BITS_PER_WIDE_INT)
4169 {
4170 hi = 0;
4171 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4172 }
4173 else
4174 {
4175 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4176 lo = (unsigned HOST_WIDE_INT) -1;
4177 }
4178
4179 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4180 {
4181 if (TYPE_UNSIGNED (etype))
4182 {
4183 etype = lang_hooks.types.signed_type (etype);
4184 exp = fold_convert (etype, exp);
4185 }
4186 return fold_build2 (GT_EXPR, type, exp,
4187 build_int_cst (etype, 0));
4188 }
4189 }
4190
4191 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4192 This requires wrap-around arithmetics for the type of the expression. */
4193 switch (TREE_CODE (etype))
4194 {
4195 case INTEGER_TYPE:
4196 /* There is no requirement that LOW be within the range of ETYPE
4197 if the latter is a subtype. It must, however, be within the base
4198 type of ETYPE. So be sure we do the subtraction in that type. */
4199 if (TREE_TYPE (etype))
4200 etype = TREE_TYPE (etype);
4201 break;
4202
4203 case ENUMERAL_TYPE:
4204 case BOOLEAN_TYPE:
4205 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4206 TYPE_UNSIGNED (etype));
4207 break;
4208
4209 default:
4210 break;
4211 }
4212
4213 /* If we don't have wrap-around arithmetics upfront, try to force it. */
4214 if (TREE_CODE (etype) == INTEGER_TYPE
4215 && !TYPE_UNSIGNED (etype) && !flag_wrapv)
4216 {
4217 tree utype, minv, maxv;
4218
4219 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4220 for the type in question, as we rely on this here. */
4221 utype = lang_hooks.types.unsigned_type (etype);
4222 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4223 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4224 integer_one_node, 1);
4225 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4226
4227 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4228 minv, 1, maxv, 1)))
4229 etype = utype;
4230 else
4231 return 0;
4232 }
4233
4234 high = fold_convert (etype, high);
4235 low = fold_convert (etype, low);
4236 exp = fold_convert (etype, exp);
4237
4238 value = const_binop (MINUS_EXPR, high, low, 0);
4239
4240 if (value != 0 && !TREE_OVERFLOW (value))
4241 return build_range_check (type,
4242 fold_build2 (MINUS_EXPR, etype, exp, low),
4243 1, build_int_cst (etype, 0), value);
4244
4245 return 0;
4246 }
4247 \f
4248 /* Return the predecessor of VAL in its type, handling the infinite case. */
4249
4250 static tree
4251 range_predecessor (tree val)
4252 {
4253 tree type = TREE_TYPE (val);
4254
4255 if (INTEGRAL_TYPE_P (type)
4256 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4257 return 0;
4258 else
4259 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4260 }
4261
4262 /* Return the successor of VAL in its type, handling the infinite case. */
4263
4264 static tree
4265 range_successor (tree val)
4266 {
4267 tree type = TREE_TYPE (val);
4268
4269 if (INTEGRAL_TYPE_P (type)
4270 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4271 return 0;
4272 else
4273 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4274 }
4275
4276 /* Given two ranges, see if we can merge them into one. Return 1 if we
4277 can, 0 if we can't. Set the output range into the specified parameters. */
4278
4279 static int
4280 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4281 tree high0, int in1_p, tree low1, tree high1)
4282 {
4283 int no_overlap;
4284 int subset;
4285 int temp;
4286 tree tem;
4287 int in_p;
4288 tree low, high;
4289 int lowequal = ((low0 == 0 && low1 == 0)
4290 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4291 low0, 0, low1, 0)));
4292 int highequal = ((high0 == 0 && high1 == 0)
4293 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4294 high0, 1, high1, 1)));
4295
4296 /* Make range 0 be the range that starts first, or ends last if they
4297 start at the same value. Swap them if it isn't. */
4298 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4299 low0, 0, low1, 0))
4300 || (lowequal
4301 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4302 high1, 1, high0, 1))))
4303 {
4304 temp = in0_p, in0_p = in1_p, in1_p = temp;
4305 tem = low0, low0 = low1, low1 = tem;
4306 tem = high0, high0 = high1, high1 = tem;
4307 }
4308
4309 /* Now flag two cases, whether the ranges are disjoint or whether the
4310 second range is totally subsumed in the first. Note that the tests
4311 below are simplified by the ones above. */
4312 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4313 high0, 1, low1, 0));
4314 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4315 high1, 1, high0, 1));
4316
4317 /* We now have four cases, depending on whether we are including or
4318 excluding the two ranges. */
4319 if (in0_p && in1_p)
4320 {
4321 /* If they don't overlap, the result is false. If the second range
4322 is a subset it is the result. Otherwise, the range is from the start
4323 of the second to the end of the first. */
4324 if (no_overlap)
4325 in_p = 0, low = high = 0;
4326 else if (subset)
4327 in_p = 1, low = low1, high = high1;
4328 else
4329 in_p = 1, low = low1, high = high0;
4330 }
4331
4332 else if (in0_p && ! in1_p)
4333 {
4334 /* If they don't overlap, the result is the first range. If they are
4335 equal, the result is false. If the second range is a subset of the
4336 first, and the ranges begin at the same place, we go from just after
4337 the end of the second range to the end of the first. If the second
4338 range is not a subset of the first, or if it is a subset and both
4339 ranges end at the same place, the range starts at the start of the
4340 first range and ends just before the second range.
4341 Otherwise, we can't describe this as a single range. */
4342 if (no_overlap)
4343 in_p = 1, low = low0, high = high0;
4344 else if (lowequal && highequal)
4345 in_p = 0, low = high = 0;
4346 else if (subset && lowequal)
4347 {
4348 low = range_successor (high1);
4349 high = high0;
4350 in_p = (low != 0);
4351 }
4352 else if (! subset || highequal)
4353 {
4354 low = low0;
4355 high = range_predecessor (low1);
4356 in_p = (high != 0);
4357 }
4358 else
4359 return 0;
4360 }
4361
4362 else if (! in0_p && in1_p)
4363 {
4364 /* If they don't overlap, the result is the second range. If the second
4365 is a subset of the first, the result is false. Otherwise,
4366 the range starts just after the first range and ends at the
4367 end of the second. */
4368 if (no_overlap)
4369 in_p = 1, low = low1, high = high1;
4370 else if (subset || highequal)
4371 in_p = 0, low = high = 0;
4372 else
4373 {
4374 low = range_successor (high0);
4375 high = high1;
4376 in_p = (low != 0);
4377 }
4378 }
4379
4380 else
4381 {
4382 /* The case where we are excluding both ranges. Here the complex case
4383 is if they don't overlap. In that case, the only time we have a
4384 range is if they are adjacent. If the second is a subset of the
4385 first, the result is the first. Otherwise, the range to exclude
4386 starts at the beginning of the first range and ends at the end of the
4387 second. */
4388 if (no_overlap)
4389 {
4390 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4391 range_successor (high0),
4392 1, low1, 0)))
4393 in_p = 0, low = low0, high = high1;
4394 else
4395 {
4396 /* Canonicalize - [min, x] into - [-, x]. */
4397 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4398 switch (TREE_CODE (TREE_TYPE (low0)))
4399 {
4400 case ENUMERAL_TYPE:
4401 if (TYPE_PRECISION (TREE_TYPE (low0))
4402 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4403 break;
4404 /* FALLTHROUGH */
4405 case INTEGER_TYPE:
4406 if (tree_int_cst_equal (low0,
4407 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4408 low0 = 0;
4409 break;
4410 case POINTER_TYPE:
4411 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4412 && integer_zerop (low0))
4413 low0 = 0;
4414 break;
4415 default:
4416 break;
4417 }
4418
4419 /* Canonicalize - [x, max] into - [x, -]. */
4420 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4421 switch (TREE_CODE (TREE_TYPE (high1)))
4422 {
4423 case ENUMERAL_TYPE:
4424 if (TYPE_PRECISION (TREE_TYPE (high1))
4425 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4426 break;
4427 /* FALLTHROUGH */
4428 case INTEGER_TYPE:
4429 if (tree_int_cst_equal (high1,
4430 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4431 high1 = 0;
4432 break;
4433 case POINTER_TYPE:
4434 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4435 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4436 high1, 1,
4437 integer_one_node, 1)))
4438 high1 = 0;
4439 break;
4440 default:
4441 break;
4442 }
4443
4444 /* The ranges might be also adjacent between the maximum and
4445 minimum values of the given type. For
4446 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4447 return + [x + 1, y - 1]. */
4448 if (low0 == 0 && high1 == 0)
4449 {
4450 low = range_successor (high0);
4451 high = range_predecessor (low1);
4452 if (low == 0 || high == 0)
4453 return 0;
4454
4455 in_p = 1;
4456 }
4457 else
4458 return 0;
4459 }
4460 }
4461 else if (subset)
4462 in_p = 0, low = low0, high = high0;
4463 else
4464 in_p = 0, low = low0, high = high1;
4465 }
4466
4467 *pin_p = in_p, *plow = low, *phigh = high;
4468 return 1;
4469 }
4470 \f
4471
4472 /* Subroutine of fold, looking inside expressions of the form
4473 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4474 of the COND_EXPR. This function is being used also to optimize
4475 A op B ? C : A, by reversing the comparison first.
4476
4477 Return a folded expression whose code is not a COND_EXPR
4478 anymore, or NULL_TREE if no folding opportunity is found. */
4479
4480 static tree
4481 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4482 {
4483 enum tree_code comp_code = TREE_CODE (arg0);
4484 tree arg00 = TREE_OPERAND (arg0, 0);
4485 tree arg01 = TREE_OPERAND (arg0, 1);
4486 tree arg1_type = TREE_TYPE (arg1);
4487 tree tem;
4488
4489 STRIP_NOPS (arg1);
4490 STRIP_NOPS (arg2);
4491
4492 /* If we have A op 0 ? A : -A, consider applying the following
4493 transformations:
4494
4495 A == 0? A : -A same as -A
4496 A != 0? A : -A same as A
4497 A >= 0? A : -A same as abs (A)
4498 A > 0? A : -A same as abs (A)
4499 A <= 0? A : -A same as -abs (A)
4500 A < 0? A : -A same as -abs (A)
4501
4502 None of these transformations work for modes with signed
4503 zeros. If A is +/-0, the first two transformations will
4504 change the sign of the result (from +0 to -0, or vice
4505 versa). The last four will fix the sign of the result,
4506 even though the original expressions could be positive or
4507 negative, depending on the sign of A.
4508
4509 Note that all these transformations are correct if A is
4510 NaN, since the two alternatives (A and -A) are also NaNs. */
4511 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4512 ? real_zerop (arg01)
4513 : integer_zerop (arg01))
4514 && ((TREE_CODE (arg2) == NEGATE_EXPR
4515 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4516 /* In the case that A is of the form X-Y, '-A' (arg2) may
4517 have already been folded to Y-X, check for that. */
4518 || (TREE_CODE (arg1) == MINUS_EXPR
4519 && TREE_CODE (arg2) == MINUS_EXPR
4520 && operand_equal_p (TREE_OPERAND (arg1, 0),
4521 TREE_OPERAND (arg2, 1), 0)
4522 && operand_equal_p (TREE_OPERAND (arg1, 1),
4523 TREE_OPERAND (arg2, 0), 0))))
4524 switch (comp_code)
4525 {
4526 case EQ_EXPR:
4527 case UNEQ_EXPR:
4528 tem = fold_convert (arg1_type, arg1);
4529 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4530 case NE_EXPR:
4531 case LTGT_EXPR:
4532 return pedantic_non_lvalue (fold_convert (type, arg1));
4533 case UNGE_EXPR:
4534 case UNGT_EXPR:
4535 if (flag_trapping_math)
4536 break;
4537 /* Fall through. */
4538 case GE_EXPR:
4539 case GT_EXPR:
4540 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4541 arg1 = fold_convert (lang_hooks.types.signed_type
4542 (TREE_TYPE (arg1)), arg1);
4543 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4544 return pedantic_non_lvalue (fold_convert (type, tem));
4545 case UNLE_EXPR:
4546 case UNLT_EXPR:
4547 if (flag_trapping_math)
4548 break;
4549 case LE_EXPR:
4550 case LT_EXPR:
4551 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4552 arg1 = fold_convert (lang_hooks.types.signed_type
4553 (TREE_TYPE (arg1)), arg1);
4554 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4555 return negate_expr (fold_convert (type, tem));
4556 default:
4557 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4558 break;
4559 }
4560
4561 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4562 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4563 both transformations are correct when A is NaN: A != 0
4564 is then true, and A == 0 is false. */
4565
4566 if (integer_zerop (arg01) && integer_zerop (arg2))
4567 {
4568 if (comp_code == NE_EXPR)
4569 return pedantic_non_lvalue (fold_convert (type, arg1));
4570 else if (comp_code == EQ_EXPR)
4571 return build_int_cst (type, 0);
4572 }
4573
4574 /* Try some transformations of A op B ? A : B.
4575
4576 A == B? A : B same as B
4577 A != B? A : B same as A
4578 A >= B? A : B same as max (A, B)
4579 A > B? A : B same as max (B, A)
4580 A <= B? A : B same as min (A, B)
4581 A < B? A : B same as min (B, A)
4582
4583 As above, these transformations don't work in the presence
4584 of signed zeros. For example, if A and B are zeros of
4585 opposite sign, the first two transformations will change
4586 the sign of the result. In the last four, the original
4587 expressions give different results for (A=+0, B=-0) and
4588 (A=-0, B=+0), but the transformed expressions do not.
4589
4590 The first two transformations are correct if either A or B
4591 is a NaN. In the first transformation, the condition will
4592 be false, and B will indeed be chosen. In the case of the
4593 second transformation, the condition A != B will be true,
4594 and A will be chosen.
4595
4596 The conversions to max() and min() are not correct if B is
4597 a number and A is not. The conditions in the original
4598 expressions will be false, so all four give B. The min()
4599 and max() versions would give a NaN instead. */
4600 if (operand_equal_for_comparison_p (arg01, arg2, arg00)
4601 /* Avoid these transformations if the COND_EXPR may be used
4602 as an lvalue in the C++ front-end. PR c++/19199. */
4603 && (in_gimple_form
4604 || (strcmp (lang_hooks.name, "GNU C++") != 0
4605 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4606 || ! maybe_lvalue_p (arg1)
4607 || ! maybe_lvalue_p (arg2)))
4608 {
4609 tree comp_op0 = arg00;
4610 tree comp_op1 = arg01;
4611 tree comp_type = TREE_TYPE (comp_op0);
4612
4613 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4614 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4615 {
4616 comp_type = type;
4617 comp_op0 = arg1;
4618 comp_op1 = arg2;
4619 }
4620
4621 switch (comp_code)
4622 {
4623 case EQ_EXPR:
4624 return pedantic_non_lvalue (fold_convert (type, arg2));
4625 case NE_EXPR:
4626 return pedantic_non_lvalue (fold_convert (type, arg1));
4627 case LE_EXPR:
4628 case LT_EXPR:
4629 case UNLE_EXPR:
4630 case UNLT_EXPR:
4631 /* In C++ a ?: expression can be an lvalue, so put the
4632 operand which will be used if they are equal first
4633 so that we can convert this back to the
4634 corresponding COND_EXPR. */
4635 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4636 {
4637 comp_op0 = fold_convert (comp_type, comp_op0);
4638 comp_op1 = fold_convert (comp_type, comp_op1);
4639 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4640 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4641 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4642 return pedantic_non_lvalue (fold_convert (type, tem));
4643 }
4644 break;
4645 case GE_EXPR:
4646 case GT_EXPR:
4647 case UNGE_EXPR:
4648 case UNGT_EXPR:
4649 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4650 {
4651 comp_op0 = fold_convert (comp_type, comp_op0);
4652 comp_op1 = fold_convert (comp_type, comp_op1);
4653 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4654 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
4655 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
4656 return pedantic_non_lvalue (fold_convert (type, tem));
4657 }
4658 break;
4659 case UNEQ_EXPR:
4660 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4661 return pedantic_non_lvalue (fold_convert (type, arg2));
4662 break;
4663 case LTGT_EXPR:
4664 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4665 return pedantic_non_lvalue (fold_convert (type, arg1));
4666 break;
4667 default:
4668 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4669 break;
4670 }
4671 }
4672
4673 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4674 we might still be able to simplify this. For example,
4675 if C1 is one less or one more than C2, this might have started
4676 out as a MIN or MAX and been transformed by this function.
4677 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4678
4679 if (INTEGRAL_TYPE_P (type)
4680 && TREE_CODE (arg01) == INTEGER_CST
4681 && TREE_CODE (arg2) == INTEGER_CST)
4682 switch (comp_code)
4683 {
4684 case EQ_EXPR:
4685 /* We can replace A with C1 in this case. */
4686 arg1 = fold_convert (type, arg01);
4687 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
4688
4689 case LT_EXPR:
4690 /* If C1 is C2 + 1, this is min(A, C2). */
4691 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4692 OEP_ONLY_CONST)
4693 && operand_equal_p (arg01,
4694 const_binop (PLUS_EXPR, arg2,
4695 build_int_cst (type, 1), 0),
4696 OEP_ONLY_CONST))
4697 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4698 type, arg1, arg2));
4699 break;
4700
4701 case LE_EXPR:
4702 /* If C1 is C2 - 1, this is min(A, C2). */
4703 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4704 OEP_ONLY_CONST)
4705 && operand_equal_p (arg01,
4706 const_binop (MINUS_EXPR, arg2,
4707 build_int_cst (type, 1), 0),
4708 OEP_ONLY_CONST))
4709 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4710 type, arg1, arg2));
4711 break;
4712
4713 case GT_EXPR:
4714 /* If C1 is C2 - 1, this is max(A, C2). */
4715 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4716 OEP_ONLY_CONST)
4717 && operand_equal_p (arg01,
4718 const_binop (MINUS_EXPR, arg2,
4719 build_int_cst (type, 1), 0),
4720 OEP_ONLY_CONST))
4721 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4722 type, arg1, arg2));
4723 break;
4724
4725 case GE_EXPR:
4726 /* If C1 is C2 + 1, this is max(A, C2). */
4727 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4728 OEP_ONLY_CONST)
4729 && operand_equal_p (arg01,
4730 const_binop (PLUS_EXPR, arg2,
4731 build_int_cst (type, 1), 0),
4732 OEP_ONLY_CONST))
4733 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4734 type, arg1, arg2));
4735 break;
4736 case NE_EXPR:
4737 break;
4738 default:
4739 gcc_unreachable ();
4740 }
4741
4742 return NULL_TREE;
4743 }
4744
4745
4746 \f
4747 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4748 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4749 #endif
4750
4751 /* EXP is some logical combination of boolean tests. See if we can
4752 merge it into some range test. Return the new tree if so. */
4753
4754 static tree
4755 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
4756 {
4757 int or_op = (code == TRUTH_ORIF_EXPR
4758 || code == TRUTH_OR_EXPR);
4759 int in0_p, in1_p, in_p;
4760 tree low0, low1, low, high0, high1, high;
4761 tree lhs = make_range (op0, &in0_p, &low0, &high0);
4762 tree rhs = make_range (op1, &in1_p, &low1, &high1);
4763 tree tem;
4764
4765 /* If this is an OR operation, invert both sides; we will invert
4766 again at the end. */
4767 if (or_op)
4768 in0_p = ! in0_p, in1_p = ! in1_p;
4769
4770 /* If both expressions are the same, if we can merge the ranges, and we
4771 can build the range test, return it or it inverted. If one of the
4772 ranges is always true or always false, consider it to be the same
4773 expression as the other. */
4774 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4775 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4776 in1_p, low1, high1)
4777 && 0 != (tem = (build_range_check (type,
4778 lhs != 0 ? lhs
4779 : rhs != 0 ? rhs : integer_zero_node,
4780 in_p, low, high))))
4781 return or_op ? invert_truthvalue (tem) : tem;
4782
4783 /* On machines where the branch cost is expensive, if this is a
4784 short-circuited branch and the underlying object on both sides
4785 is the same, make a non-short-circuit operation. */
4786 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4787 && lhs != 0 && rhs != 0
4788 && (code == TRUTH_ANDIF_EXPR
4789 || code == TRUTH_ORIF_EXPR)
4790 && operand_equal_p (lhs, rhs, 0))
4791 {
4792 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4793 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4794 which cases we can't do this. */
4795 if (simple_operand_p (lhs))
4796 return build2 (code == TRUTH_ANDIF_EXPR
4797 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4798 type, op0, op1);
4799
4800 else if (lang_hooks.decls.global_bindings_p () == 0
4801 && ! CONTAINS_PLACEHOLDER_P (lhs))
4802 {
4803 tree common = save_expr (lhs);
4804
4805 if (0 != (lhs = build_range_check (type, common,
4806 or_op ? ! in0_p : in0_p,
4807 low0, high0))
4808 && (0 != (rhs = build_range_check (type, common,
4809 or_op ? ! in1_p : in1_p,
4810 low1, high1))))
4811 return build2 (code == TRUTH_ANDIF_EXPR
4812 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4813 type, lhs, rhs);
4814 }
4815 }
4816
4817 return 0;
4818 }
4819 \f
4820 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4821 bit value. Arrange things so the extra bits will be set to zero if and
4822 only if C is signed-extended to its full width. If MASK is nonzero,
4823 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4824
4825 static tree
4826 unextend (tree c, int p, int unsignedp, tree mask)
4827 {
4828 tree type = TREE_TYPE (c);
4829 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4830 tree temp;
4831
4832 if (p == modesize || unsignedp)
4833 return c;
4834
4835 /* We work by getting just the sign bit into the low-order bit, then
4836 into the high-order bit, then sign-extend. We then XOR that value
4837 with C. */
4838 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4839 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4840
4841 /* We must use a signed type in order to get an arithmetic right shift.
4842 However, we must also avoid introducing accidental overflows, so that
4843 a subsequent call to integer_zerop will work. Hence we must
4844 do the type conversion here. At this point, the constant is either
4845 zero or one, and the conversion to a signed type can never overflow.
4846 We could get an overflow if this conversion is done anywhere else. */
4847 if (TYPE_UNSIGNED (type))
4848 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4849
4850 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4851 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4852 if (mask != 0)
4853 temp = const_binop (BIT_AND_EXPR, temp,
4854 fold_convert (TREE_TYPE (c), mask), 0);
4855 /* If necessary, convert the type back to match the type of C. */
4856 if (TYPE_UNSIGNED (type))
4857 temp = fold_convert (type, temp);
4858
4859 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4860 }
4861 \f
4862 /* Find ways of folding logical expressions of LHS and RHS:
4863 Try to merge two comparisons to the same innermost item.
4864 Look for range tests like "ch >= '0' && ch <= '9'".
4865 Look for combinations of simple terms on machines with expensive branches
4866 and evaluate the RHS unconditionally.
4867
4868 For example, if we have p->a == 2 && p->b == 4 and we can make an
4869 object large enough to span both A and B, we can do this with a comparison
4870 against the object ANDed with the a mask.
4871
4872 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4873 operations to do this with one comparison.
4874
4875 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4876 function and the one above.
4877
4878 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4879 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4880
4881 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4882 two operands.
4883
4884 We return the simplified tree or 0 if no optimization is possible. */
4885
4886 static tree
4887 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
4888 {
4889 /* If this is the "or" of two comparisons, we can do something if
4890 the comparisons are NE_EXPR. If this is the "and", we can do something
4891 if the comparisons are EQ_EXPR. I.e.,
4892 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4893
4894 WANTED_CODE is this operation code. For single bit fields, we can
4895 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4896 comparison for one-bit fields. */
4897
4898 enum tree_code wanted_code;
4899 enum tree_code lcode, rcode;
4900 tree ll_arg, lr_arg, rl_arg, rr_arg;
4901 tree ll_inner, lr_inner, rl_inner, rr_inner;
4902 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
4903 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
4904 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
4905 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
4906 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
4907 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
4908 enum machine_mode lnmode, rnmode;
4909 tree ll_mask, lr_mask, rl_mask, rr_mask;
4910 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
4911 tree l_const, r_const;
4912 tree lntype, rntype, result;
4913 int first_bit, end_bit;
4914 int volatilep;
4915 tree orig_lhs = lhs, orig_rhs = rhs;
4916 enum tree_code orig_code = code;
4917
4918 /* Start by getting the comparison codes. Fail if anything is volatile.
4919 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4920 it were surrounded with a NE_EXPR. */
4921
4922 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
4923 return 0;
4924
4925 lcode = TREE_CODE (lhs);
4926 rcode = TREE_CODE (rhs);
4927
4928 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
4929 {
4930 lhs = build2 (NE_EXPR, truth_type, lhs,
4931 build_int_cst (TREE_TYPE (lhs), 0));
4932 lcode = NE_EXPR;
4933 }
4934
4935 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
4936 {
4937 rhs = build2 (NE_EXPR, truth_type, rhs,
4938 build_int_cst (TREE_TYPE (rhs), 0));
4939 rcode = NE_EXPR;
4940 }
4941
4942 if (TREE_CODE_CLASS (lcode) != tcc_comparison
4943 || TREE_CODE_CLASS (rcode) != tcc_comparison)
4944 return 0;
4945
4946 ll_arg = TREE_OPERAND (lhs, 0);
4947 lr_arg = TREE_OPERAND (lhs, 1);
4948 rl_arg = TREE_OPERAND (rhs, 0);
4949 rr_arg = TREE_OPERAND (rhs, 1);
4950
4951 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4952 if (simple_operand_p (ll_arg)
4953 && simple_operand_p (lr_arg))
4954 {
4955 tree result;
4956 if (operand_equal_p (ll_arg, rl_arg, 0)
4957 && operand_equal_p (lr_arg, rr_arg, 0))
4958 {
4959 result = combine_comparisons (code, lcode, rcode,
4960 truth_type, ll_arg, lr_arg);
4961 if (result)
4962 return result;
4963 }
4964 else if (operand_equal_p (ll_arg, rr_arg, 0)
4965 && operand_equal_p (lr_arg, rl_arg, 0))
4966 {
4967 result = combine_comparisons (code, lcode,
4968 swap_tree_comparison (rcode),
4969 truth_type, ll_arg, lr_arg);
4970 if (result)
4971 return result;
4972 }
4973 }
4974
4975 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
4976 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
4977
4978 /* If the RHS can be evaluated unconditionally and its operands are
4979 simple, it wins to evaluate the RHS unconditionally on machines
4980 with expensive branches. In this case, this isn't a comparison
4981 that can be merged. Avoid doing this if the RHS is a floating-point
4982 comparison since those can trap. */
4983
4984 if (BRANCH_COST >= 2
4985 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
4986 && simple_operand_p (rl_arg)
4987 && simple_operand_p (rr_arg))
4988 {
4989 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4990 if (code == TRUTH_OR_EXPR
4991 && lcode == NE_EXPR && integer_zerop (lr_arg)
4992 && rcode == NE_EXPR && integer_zerop (rr_arg)
4993 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4994 return build2 (NE_EXPR, truth_type,
4995 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4996 ll_arg, rl_arg),
4997 build_int_cst (TREE_TYPE (ll_arg), 0));
4998
4999 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5000 if (code == TRUTH_AND_EXPR
5001 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5002 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5003 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
5004 return build2 (EQ_EXPR, truth_type,
5005 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5006 ll_arg, rl_arg),
5007 build_int_cst (TREE_TYPE (ll_arg), 0));
5008
5009 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5010 {
5011 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5012 return build2 (code, truth_type, lhs, rhs);
5013 return NULL_TREE;
5014 }
5015 }
5016
5017 /* See if the comparisons can be merged. Then get all the parameters for
5018 each side. */
5019
5020 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5021 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5022 return 0;
5023
5024 volatilep = 0;
5025 ll_inner = decode_field_reference (ll_arg,
5026 &ll_bitsize, &ll_bitpos, &ll_mode,
5027 &ll_unsignedp, &volatilep, &ll_mask,
5028 &ll_and_mask);
5029 lr_inner = decode_field_reference (lr_arg,
5030 &lr_bitsize, &lr_bitpos, &lr_mode,
5031 &lr_unsignedp, &volatilep, &lr_mask,
5032 &lr_and_mask);
5033 rl_inner = decode_field_reference (rl_arg,
5034 &rl_bitsize, &rl_bitpos, &rl_mode,
5035 &rl_unsignedp, &volatilep, &rl_mask,
5036 &rl_and_mask);
5037 rr_inner = decode_field_reference (rr_arg,
5038 &rr_bitsize, &rr_bitpos, &rr_mode,
5039 &rr_unsignedp, &volatilep, &rr_mask,
5040 &rr_and_mask);
5041
5042 /* It must be true that the inner operation on the lhs of each
5043 comparison must be the same if we are to be able to do anything.
5044 Then see if we have constants. If not, the same must be true for
5045 the rhs's. */
5046 if (volatilep || ll_inner == 0 || rl_inner == 0
5047 || ! operand_equal_p (ll_inner, rl_inner, 0))
5048 return 0;
5049
5050 if (TREE_CODE (lr_arg) == INTEGER_CST
5051 && TREE_CODE (rr_arg) == INTEGER_CST)
5052 l_const = lr_arg, r_const = rr_arg;
5053 else if (lr_inner == 0 || rr_inner == 0
5054 || ! operand_equal_p (lr_inner, rr_inner, 0))
5055 return 0;
5056 else
5057 l_const = r_const = 0;
5058
5059 /* If either comparison code is not correct for our logical operation,
5060 fail. However, we can convert a one-bit comparison against zero into
5061 the opposite comparison against that bit being set in the field. */
5062
5063 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5064 if (lcode != wanted_code)
5065 {
5066 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5067 {
5068 /* Make the left operand unsigned, since we are only interested
5069 in the value of one bit. Otherwise we are doing the wrong
5070 thing below. */
5071 ll_unsignedp = 1;
5072 l_const = ll_mask;
5073 }
5074 else
5075 return 0;
5076 }
5077
5078 /* This is analogous to the code for l_const above. */
5079 if (rcode != wanted_code)
5080 {
5081 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5082 {
5083 rl_unsignedp = 1;
5084 r_const = rl_mask;
5085 }
5086 else
5087 return 0;
5088 }
5089
5090 /* See if we can find a mode that contains both fields being compared on
5091 the left. If we can't, fail. Otherwise, update all constants and masks
5092 to be relative to a field of that size. */
5093 first_bit = MIN (ll_bitpos, rl_bitpos);
5094 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5095 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5096 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5097 volatilep);
5098 if (lnmode == VOIDmode)
5099 return 0;
5100
5101 lnbitsize = GET_MODE_BITSIZE (lnmode);
5102 lnbitpos = first_bit & ~ (lnbitsize - 1);
5103 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5104 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5105
5106 if (BYTES_BIG_ENDIAN)
5107 {
5108 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5109 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5110 }
5111
5112 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
5113 size_int (xll_bitpos), 0);
5114 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
5115 size_int (xrl_bitpos), 0);
5116
5117 if (l_const)
5118 {
5119 l_const = fold_convert (lntype, l_const);
5120 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5121 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
5122 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5123 fold_build1 (BIT_NOT_EXPR,
5124 lntype, ll_mask),
5125 0)))
5126 {
5127 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5128
5129 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5130 }
5131 }
5132 if (r_const)
5133 {
5134 r_const = fold_convert (lntype, r_const);
5135 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5136 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
5137 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5138 fold_build1 (BIT_NOT_EXPR,
5139 lntype, rl_mask),
5140 0)))
5141 {
5142 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5143
5144 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5145 }
5146 }
5147
5148 /* If the right sides are not constant, do the same for it. Also,
5149 disallow this optimization if a size or signedness mismatch occurs
5150 between the left and right sides. */
5151 if (l_const == 0)
5152 {
5153 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5154 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5155 /* Make sure the two fields on the right
5156 correspond to the left without being swapped. */
5157 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5158 return 0;
5159
5160 first_bit = MIN (lr_bitpos, rr_bitpos);
5161 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5162 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5163 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5164 volatilep);
5165 if (rnmode == VOIDmode)
5166 return 0;
5167
5168 rnbitsize = GET_MODE_BITSIZE (rnmode);
5169 rnbitpos = first_bit & ~ (rnbitsize - 1);
5170 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5171 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5172
5173 if (BYTES_BIG_ENDIAN)
5174 {
5175 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5176 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5177 }
5178
5179 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
5180 size_int (xlr_bitpos), 0);
5181 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
5182 size_int (xrr_bitpos), 0);
5183
5184 /* Make a mask that corresponds to both fields being compared.
5185 Do this for both items being compared. If the operands are the
5186 same size and the bits being compared are in the same position
5187 then we can do this by masking both and comparing the masked
5188 results. */
5189 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5190 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
5191 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5192 {
5193 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5194 ll_unsignedp || rl_unsignedp);
5195 if (! all_ones_mask_p (ll_mask, lnbitsize))
5196 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5197
5198 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
5199 lr_unsignedp || rr_unsignedp);
5200 if (! all_ones_mask_p (lr_mask, rnbitsize))
5201 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5202
5203 return build2 (wanted_code, truth_type, lhs, rhs);
5204 }
5205
5206 /* There is still another way we can do something: If both pairs of
5207 fields being compared are adjacent, we may be able to make a wider
5208 field containing them both.
5209
5210 Note that we still must mask the lhs/rhs expressions. Furthermore,
5211 the mask must be shifted to account for the shift done by
5212 make_bit_field_ref. */
5213 if ((ll_bitsize + ll_bitpos == rl_bitpos
5214 && lr_bitsize + lr_bitpos == rr_bitpos)
5215 || (ll_bitpos == rl_bitpos + rl_bitsize
5216 && lr_bitpos == rr_bitpos + rr_bitsize))
5217 {
5218 tree type;
5219
5220 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
5221 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5222 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
5223 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5224
5225 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5226 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
5227 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5228 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
5229
5230 /* Convert to the smaller type before masking out unwanted bits. */
5231 type = lntype;
5232 if (lntype != rntype)
5233 {
5234 if (lnbitsize > rnbitsize)
5235 {
5236 lhs = fold_convert (rntype, lhs);
5237 ll_mask = fold_convert (rntype, ll_mask);
5238 type = rntype;
5239 }
5240 else if (lnbitsize < rnbitsize)
5241 {
5242 rhs = fold_convert (lntype, rhs);
5243 lr_mask = fold_convert (lntype, lr_mask);
5244 type = lntype;
5245 }
5246 }
5247
5248 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5249 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5250
5251 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5252 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5253
5254 return build2 (wanted_code, truth_type, lhs, rhs);
5255 }
5256
5257 return 0;
5258 }
5259
5260 /* Handle the case of comparisons with constants. If there is something in
5261 common between the masks, those bits of the constants must be the same.
5262 If not, the condition is always false. Test for this to avoid generating
5263 incorrect code below. */
5264 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5265 if (! integer_zerop (result)
5266 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5267 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5268 {
5269 if (wanted_code == NE_EXPR)
5270 {
5271 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5272 return constant_boolean_node (true, truth_type);
5273 }
5274 else
5275 {
5276 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5277 return constant_boolean_node (false, truth_type);
5278 }
5279 }
5280
5281 /* Construct the expression we will return. First get the component
5282 reference we will make. Unless the mask is all ones the width of
5283 that field, perform the mask operation. Then compare with the
5284 merged constant. */
5285 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5286 ll_unsignedp || rl_unsignedp);
5287
5288 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5289 if (! all_ones_mask_p (ll_mask, lnbitsize))
5290 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5291
5292 return build2 (wanted_code, truth_type, result,
5293 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5294 }
5295 \f
5296 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5297 constant. */
5298
5299 static tree
5300 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5301 {
5302 tree arg0 = op0;
5303 enum tree_code op_code;
5304 tree comp_const = op1;
5305 tree minmax_const;
5306 int consts_equal, consts_lt;
5307 tree inner;
5308
5309 STRIP_SIGN_NOPS (arg0);
5310
5311 op_code = TREE_CODE (arg0);
5312 minmax_const = TREE_OPERAND (arg0, 1);
5313 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5314 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5315 inner = TREE_OPERAND (arg0, 0);
5316
5317 /* If something does not permit us to optimize, return the original tree. */
5318 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5319 || TREE_CODE (comp_const) != INTEGER_CST
5320 || TREE_OVERFLOW (comp_const)
5321 || TREE_CODE (minmax_const) != INTEGER_CST
5322 || TREE_OVERFLOW (minmax_const))
5323 return NULL_TREE;
5324
5325 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5326 and GT_EXPR, doing the rest with recursive calls using logical
5327 simplifications. */
5328 switch (code)
5329 {
5330 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5331 {
5332 tree tem = optimize_minmax_comparison (invert_tree_comparison (code, false),
5333 type, op0, op1);
5334 if (tem)
5335 return invert_truthvalue (tem);
5336 return NULL_TREE;
5337 }
5338
5339 case GE_EXPR:
5340 return
5341 fold_build2 (TRUTH_ORIF_EXPR, type,
5342 optimize_minmax_comparison
5343 (EQ_EXPR, type, arg0, comp_const),
5344 optimize_minmax_comparison
5345 (GT_EXPR, type, arg0, comp_const));
5346
5347 case EQ_EXPR:
5348 if (op_code == MAX_EXPR && consts_equal)
5349 /* MAX (X, 0) == 0 -> X <= 0 */
5350 return fold_build2 (LE_EXPR, type, inner, comp_const);
5351
5352 else if (op_code == MAX_EXPR && consts_lt)
5353 /* MAX (X, 0) == 5 -> X == 5 */
5354 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5355
5356 else if (op_code == MAX_EXPR)
5357 /* MAX (X, 0) == -1 -> false */
5358 return omit_one_operand (type, integer_zero_node, inner);
5359
5360 else if (consts_equal)
5361 /* MIN (X, 0) == 0 -> X >= 0 */
5362 return fold_build2 (GE_EXPR, type, inner, comp_const);
5363
5364 else if (consts_lt)
5365 /* MIN (X, 0) == 5 -> false */
5366 return omit_one_operand (type, integer_zero_node, inner);
5367
5368 else
5369 /* MIN (X, 0) == -1 -> X == -1 */
5370 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5371
5372 case GT_EXPR:
5373 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5374 /* MAX (X, 0) > 0 -> X > 0
5375 MAX (X, 0) > 5 -> X > 5 */
5376 return fold_build2 (GT_EXPR, type, inner, comp_const);
5377
5378 else if (op_code == MAX_EXPR)
5379 /* MAX (X, 0) > -1 -> true */
5380 return omit_one_operand (type, integer_one_node, inner);
5381
5382 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5383 /* MIN (X, 0) > 0 -> false
5384 MIN (X, 0) > 5 -> false */
5385 return omit_one_operand (type, integer_zero_node, inner);
5386
5387 else
5388 /* MIN (X, 0) > -1 -> X > -1 */
5389 return fold_build2 (GT_EXPR, type, inner, comp_const);
5390
5391 default:
5392 return NULL_TREE;
5393 }
5394 }
5395 \f
5396 /* T is an integer expression that is being multiplied, divided, or taken a
5397 modulus (CODE says which and what kind of divide or modulus) by a
5398 constant C. See if we can eliminate that operation by folding it with
5399 other operations already in T. WIDE_TYPE, if non-null, is a type that
5400 should be used for the computation if wider than our type.
5401
5402 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5403 (X * 2) + (Y * 4). We must, however, be assured that either the original
5404 expression would not overflow or that overflow is undefined for the type
5405 in the language in question.
5406
5407 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5408 the machine has a multiply-accumulate insn or that this is part of an
5409 addressing calculation.
5410
5411 If we return a non-null expression, it is an equivalent form of the
5412 original computation, but need not be in the original type. */
5413
5414 static tree
5415 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
5416 {
5417 /* To avoid exponential search depth, refuse to allow recursion past
5418 three levels. Beyond that (1) it's highly unlikely that we'll find
5419 something interesting and (2) we've probably processed it before
5420 when we built the inner expression. */
5421
5422 static int depth;
5423 tree ret;
5424
5425 if (depth > 3)
5426 return NULL;
5427
5428 depth++;
5429 ret = extract_muldiv_1 (t, c, code, wide_type);
5430 depth--;
5431
5432 return ret;
5433 }
5434
5435 static tree
5436 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
5437 {
5438 tree type = TREE_TYPE (t);
5439 enum tree_code tcode = TREE_CODE (t);
5440 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5441 > GET_MODE_SIZE (TYPE_MODE (type)))
5442 ? wide_type : type);
5443 tree t1, t2;
5444 int same_p = tcode == code;
5445 tree op0 = NULL_TREE, op1 = NULL_TREE;
5446
5447 /* Don't deal with constants of zero here; they confuse the code below. */
5448 if (integer_zerop (c))
5449 return NULL_TREE;
5450
5451 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5452 op0 = TREE_OPERAND (t, 0);
5453
5454 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5455 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5456
5457 /* Note that we need not handle conditional operations here since fold
5458 already handles those cases. So just do arithmetic here. */
5459 switch (tcode)
5460 {
5461 case INTEGER_CST:
5462 /* For a constant, we can always simplify if we are a multiply
5463 or (for divide and modulus) if it is a multiple of our constant. */
5464 if (code == MULT_EXPR
5465 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5466 return const_binop (code, fold_convert (ctype, t),
5467 fold_convert (ctype, c), 0);
5468 break;
5469
5470 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5471 /* If op0 is an expression ... */
5472 if ((COMPARISON_CLASS_P (op0)
5473 || UNARY_CLASS_P (op0)
5474 || BINARY_CLASS_P (op0)
5475 || EXPRESSION_CLASS_P (op0))
5476 /* ... and is unsigned, and its type is smaller than ctype,
5477 then we cannot pass through as widening. */
5478 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5479 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5480 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5481 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5482 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5483 /* ... or this is a truncation (t is narrower than op0),
5484 then we cannot pass through this narrowing. */
5485 || (GET_MODE_SIZE (TYPE_MODE (type))
5486 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5487 /* ... or signedness changes for division or modulus,
5488 then we cannot pass through this conversion. */
5489 || (code != MULT_EXPR
5490 && (TYPE_UNSIGNED (ctype)
5491 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5492 break;
5493
5494 /* Pass the constant down and see if we can make a simplification. If
5495 we can, replace this expression with the inner simplification for
5496 possible later conversion to our or some other type. */
5497 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5498 && TREE_CODE (t2) == INTEGER_CST
5499 && !TREE_OVERFLOW (t2)
5500 && (0 != (t1 = extract_muldiv (op0, t2, code,
5501 code == MULT_EXPR
5502 ? ctype : NULL_TREE))))
5503 return t1;
5504 break;
5505
5506 case ABS_EXPR:
5507 /* If widening the type changes it from signed to unsigned, then we
5508 must avoid building ABS_EXPR itself as unsigned. */
5509 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5510 {
5511 tree cstype = (*lang_hooks.types.signed_type) (ctype);
5512 if ((t1 = extract_muldiv (op0, c, code, cstype)) != 0)
5513 {
5514 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5515 return fold_convert (ctype, t1);
5516 }
5517 break;
5518 }
5519 /* FALLTHROUGH */
5520 case NEGATE_EXPR:
5521 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5522 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5523 break;
5524
5525 case MIN_EXPR: case MAX_EXPR:
5526 /* If widening the type changes the signedness, then we can't perform
5527 this optimization as that changes the result. */
5528 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5529 break;
5530
5531 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5532 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
5533 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
5534 {
5535 if (tree_int_cst_sgn (c) < 0)
5536 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5537
5538 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5539 fold_convert (ctype, t2));
5540 }
5541 break;
5542
5543 case LSHIFT_EXPR: case RSHIFT_EXPR:
5544 /* If the second operand is constant, this is a multiplication
5545 or floor division, by a power of two, so we can treat it that
5546 way unless the multiplier or divisor overflows. Signed
5547 left-shift overflow is implementation-defined rather than
5548 undefined in C90, so do not convert signed left shift into
5549 multiplication. */
5550 if (TREE_CODE (op1) == INTEGER_CST
5551 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5552 /* const_binop may not detect overflow correctly,
5553 so check for it explicitly here. */
5554 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5555 && TREE_INT_CST_HIGH (op1) == 0
5556 && 0 != (t1 = fold_convert (ctype,
5557 const_binop (LSHIFT_EXPR,
5558 size_one_node,
5559 op1, 0)))
5560 && !TREE_OVERFLOW (t1))
5561 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5562 ? MULT_EXPR : FLOOR_DIV_EXPR,
5563 ctype, fold_convert (ctype, op0), t1),
5564 c, code, wide_type);
5565 break;
5566
5567 case PLUS_EXPR: case MINUS_EXPR:
5568 /* See if we can eliminate the operation on both sides. If we can, we
5569 can return a new PLUS or MINUS. If we can't, the only remaining
5570 cases where we can do anything are if the second operand is a
5571 constant. */
5572 t1 = extract_muldiv (op0, c, code, wide_type);
5573 t2 = extract_muldiv (op1, c, code, wide_type);
5574 if (t1 != 0 && t2 != 0
5575 && (code == MULT_EXPR
5576 /* If not multiplication, we can only do this if both operands
5577 are divisible by c. */
5578 || (multiple_of_p (ctype, op0, c)
5579 && multiple_of_p (ctype, op1, c))))
5580 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5581 fold_convert (ctype, t2));
5582
5583 /* If this was a subtraction, negate OP1 and set it to be an addition.
5584 This simplifies the logic below. */
5585 if (tcode == MINUS_EXPR)
5586 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5587
5588 if (TREE_CODE (op1) != INTEGER_CST)
5589 break;
5590
5591 /* If either OP1 or C are negative, this optimization is not safe for
5592 some of the division and remainder types while for others we need
5593 to change the code. */
5594 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5595 {
5596 if (code == CEIL_DIV_EXPR)
5597 code = FLOOR_DIV_EXPR;
5598 else if (code == FLOOR_DIV_EXPR)
5599 code = CEIL_DIV_EXPR;
5600 else if (code != MULT_EXPR
5601 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5602 break;
5603 }
5604
5605 /* If it's a multiply or a division/modulus operation of a multiple
5606 of our constant, do the operation and verify it doesn't overflow. */
5607 if (code == MULT_EXPR
5608 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5609 {
5610 op1 = const_binop (code, fold_convert (ctype, op1),
5611 fold_convert (ctype, c), 0);
5612 /* We allow the constant to overflow with wrapping semantics. */
5613 if (op1 == 0
5614 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
5615 break;
5616 }
5617 else
5618 break;
5619
5620 /* If we have an unsigned type is not a sizetype, we cannot widen
5621 the operation since it will change the result if the original
5622 computation overflowed. */
5623 if (TYPE_UNSIGNED (ctype)
5624 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5625 && ctype != type)
5626 break;
5627
5628 /* If we were able to eliminate our operation from the first side,
5629 apply our operation to the second side and reform the PLUS. */
5630 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5631 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5632
5633 /* The last case is if we are a multiply. In that case, we can
5634 apply the distributive law to commute the multiply and addition
5635 if the multiplication of the constants doesn't overflow. */
5636 if (code == MULT_EXPR)
5637 return fold_build2 (tcode, ctype,
5638 fold_build2 (code, ctype,
5639 fold_convert (ctype, op0),
5640 fold_convert (ctype, c)),
5641 op1);
5642
5643 break;
5644
5645 case MULT_EXPR:
5646 /* We have a special case here if we are doing something like
5647 (C * 8) % 4 since we know that's zero. */
5648 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5649 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5650 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5651 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5652 return omit_one_operand (type, integer_zero_node, op0);
5653
5654 /* ... fall through ... */
5655
5656 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5657 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5658 /* If we can extract our operation from the LHS, do so and return a
5659 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5660 do something only if the second operand is a constant. */
5661 if (same_p
5662 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5663 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5664 fold_convert (ctype, op1));
5665 else if (tcode == MULT_EXPR && code == MULT_EXPR
5666 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
5667 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5668 fold_convert (ctype, t1));
5669 else if (TREE_CODE (op1) != INTEGER_CST)
5670 return 0;
5671
5672 /* If these are the same operation types, we can associate them
5673 assuming no overflow. */
5674 if (tcode == code
5675 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5676 fold_convert (ctype, c), 0))
5677 && !TREE_OVERFLOW (t1))
5678 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5679
5680 /* If these operations "cancel" each other, we have the main
5681 optimizations of this pass, which occur when either constant is a
5682 multiple of the other, in which case we replace this with either an
5683 operation or CODE or TCODE.
5684
5685 If we have an unsigned type that is not a sizetype, we cannot do
5686 this since it will change the result if the original computation
5687 overflowed. */
5688 if ((! TYPE_UNSIGNED (ctype)
5689 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5690 && ! flag_wrapv
5691 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5692 || (tcode == MULT_EXPR
5693 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5694 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5695 {
5696 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5697 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5698 fold_convert (ctype,
5699 const_binop (TRUNC_DIV_EXPR,
5700 op1, c, 0)));
5701 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5702 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5703 fold_convert (ctype,
5704 const_binop (TRUNC_DIV_EXPR,
5705 c, op1, 0)));
5706 }
5707 break;
5708
5709 default:
5710 break;
5711 }
5712
5713 return 0;
5714 }
5715 \f
5716 /* Return a node which has the indicated constant VALUE (either 0 or
5717 1), and is of the indicated TYPE. */
5718
5719 tree
5720 constant_boolean_node (int value, tree type)
5721 {
5722 if (type == integer_type_node)
5723 return value ? integer_one_node : integer_zero_node;
5724 else if (type == boolean_type_node)
5725 return value ? boolean_true_node : boolean_false_node;
5726 else
5727 return build_int_cst (type, value);
5728 }
5729
5730
5731 /* Return true if expr looks like an ARRAY_REF and set base and
5732 offset to the appropriate trees. If there is no offset,
5733 offset is set to NULL_TREE. Base will be canonicalized to
5734 something you can get the element type from using
5735 TREE_TYPE (TREE_TYPE (base)). Offset will be the offset
5736 in bytes to the base. */
5737
5738 static bool
5739 extract_array_ref (tree expr, tree *base, tree *offset)
5740 {
5741 /* One canonical form is a PLUS_EXPR with the first
5742 argument being an ADDR_EXPR with a possible NOP_EXPR
5743 attached. */
5744 if (TREE_CODE (expr) == PLUS_EXPR)
5745 {
5746 tree op0 = TREE_OPERAND (expr, 0);
5747 tree inner_base, dummy1;
5748 /* Strip NOP_EXPRs here because the C frontends and/or
5749 folders present us (int *)&x.a + 4B possibly. */
5750 STRIP_NOPS (op0);
5751 if (extract_array_ref (op0, &inner_base, &dummy1))
5752 {
5753 *base = inner_base;
5754 if (dummy1 == NULL_TREE)
5755 *offset = TREE_OPERAND (expr, 1);
5756 else
5757 *offset = fold_build2 (PLUS_EXPR, TREE_TYPE (expr),
5758 dummy1, TREE_OPERAND (expr, 1));
5759 return true;
5760 }
5761 }
5762 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5763 which we transform into an ADDR_EXPR with appropriate
5764 offset. For other arguments to the ADDR_EXPR we assume
5765 zero offset and as such do not care about the ADDR_EXPR
5766 type and strip possible nops from it. */
5767 else if (TREE_CODE (expr) == ADDR_EXPR)
5768 {
5769 tree op0 = TREE_OPERAND (expr, 0);
5770 if (TREE_CODE (op0) == ARRAY_REF)
5771 {
5772 tree idx = TREE_OPERAND (op0, 1);
5773 *base = TREE_OPERAND (op0, 0);
5774 *offset = fold_build2 (MULT_EXPR, TREE_TYPE (idx), idx,
5775 array_ref_element_size (op0));
5776 }
5777 else
5778 {
5779 /* Handle array-to-pointer decay as &a. */
5780 if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
5781 *base = TREE_OPERAND (expr, 0);
5782 else
5783 *base = expr;
5784 *offset = NULL_TREE;
5785 }
5786 return true;
5787 }
5788 /* The next canonical form is a VAR_DECL with POINTER_TYPE. */
5789 else if (SSA_VAR_P (expr)
5790 && TREE_CODE (TREE_TYPE (expr)) == POINTER_TYPE)
5791 {
5792 *base = expr;
5793 *offset = NULL_TREE;
5794 return true;
5795 }
5796
5797 return false;
5798 }
5799
5800
5801 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5802 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5803 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5804 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5805 COND is the first argument to CODE; otherwise (as in the example
5806 given here), it is the second argument. TYPE is the type of the
5807 original expression. Return NULL_TREE if no simplification is
5808 possible. */
5809
5810 static tree
5811 fold_binary_op_with_conditional_arg (enum tree_code code,
5812 tree type, tree op0, tree op1,
5813 tree cond, tree arg, int cond_first_p)
5814 {
5815 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5816 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5817 tree test, true_value, false_value;
5818 tree lhs = NULL_TREE;
5819 tree rhs = NULL_TREE;
5820
5821 /* This transformation is only worthwhile if we don't have to wrap
5822 arg in a SAVE_EXPR, and the operation can be simplified on at least
5823 one of the branches once its pushed inside the COND_EXPR. */
5824 if (!TREE_CONSTANT (arg))
5825 return NULL_TREE;
5826
5827 if (TREE_CODE (cond) == COND_EXPR)
5828 {
5829 test = TREE_OPERAND (cond, 0);
5830 true_value = TREE_OPERAND (cond, 1);
5831 false_value = TREE_OPERAND (cond, 2);
5832 /* If this operand throws an expression, then it does not make
5833 sense to try to perform a logical or arithmetic operation
5834 involving it. */
5835 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5836 lhs = true_value;
5837 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5838 rhs = false_value;
5839 }
5840 else
5841 {
5842 tree testtype = TREE_TYPE (cond);
5843 test = cond;
5844 true_value = constant_boolean_node (true, testtype);
5845 false_value = constant_boolean_node (false, testtype);
5846 }
5847
5848 arg = fold_convert (arg_type, arg);
5849 if (lhs == 0)
5850 {
5851 true_value = fold_convert (cond_type, true_value);
5852 if (cond_first_p)
5853 lhs = fold_build2 (code, type, true_value, arg);
5854 else
5855 lhs = fold_build2 (code, type, arg, true_value);
5856 }
5857 if (rhs == 0)
5858 {
5859 false_value = fold_convert (cond_type, false_value);
5860 if (cond_first_p)
5861 rhs = fold_build2 (code, type, false_value, arg);
5862 else
5863 rhs = fold_build2 (code, type, arg, false_value);
5864 }
5865
5866 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
5867 return fold_convert (type, test);
5868 }
5869
5870 \f
5871 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5872
5873 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5874 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5875 ADDEND is the same as X.
5876
5877 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5878 and finite. The problematic cases are when X is zero, and its mode
5879 has signed zeros. In the case of rounding towards -infinity,
5880 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5881 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5882
5883 static bool
5884 fold_real_zero_addition_p (tree type, tree addend, int negate)
5885 {
5886 if (!real_zerop (addend))
5887 return false;
5888
5889 /* Don't allow the fold with -fsignaling-nans. */
5890 if (HONOR_SNANS (TYPE_MODE (type)))
5891 return false;
5892
5893 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5894 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
5895 return true;
5896
5897 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5898 if (TREE_CODE (addend) == REAL_CST
5899 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
5900 negate = !negate;
5901
5902 /* The mode has signed zeros, and we have to honor their sign.
5903 In this situation, there is only one case we can return true for.
5904 X - 0 is the same as X unless rounding towards -infinity is
5905 supported. */
5906 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
5907 }
5908
5909 /* Subroutine of fold() that checks comparisons of built-in math
5910 functions against real constants.
5911
5912 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5913 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5914 is the type of the result and ARG0 and ARG1 are the operands of the
5915 comparison. ARG1 must be a TREE_REAL_CST.
5916
5917 The function returns the constant folded tree if a simplification
5918 can be made, and NULL_TREE otherwise. */
5919
5920 static tree
5921 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5922 tree type, tree arg0, tree arg1)
5923 {
5924 REAL_VALUE_TYPE c;
5925
5926 if (BUILTIN_SQRT_P (fcode))
5927 {
5928 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5929 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5930
5931 c = TREE_REAL_CST (arg1);
5932 if (REAL_VALUE_NEGATIVE (c))
5933 {
5934 /* sqrt(x) < y is always false, if y is negative. */
5935 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5936 return omit_one_operand (type, integer_zero_node, arg);
5937
5938 /* sqrt(x) > y is always true, if y is negative and we
5939 don't care about NaNs, i.e. negative values of x. */
5940 if (code == NE_EXPR || !HONOR_NANS (mode))
5941 return omit_one_operand (type, integer_one_node, arg);
5942
5943 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5944 return fold_build2 (GE_EXPR, type, arg,
5945 build_real (TREE_TYPE (arg), dconst0));
5946 }
5947 else if (code == GT_EXPR || code == GE_EXPR)
5948 {
5949 REAL_VALUE_TYPE c2;
5950
5951 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5952 real_convert (&c2, mode, &c2);
5953
5954 if (REAL_VALUE_ISINF (c2))
5955 {
5956 /* sqrt(x) > y is x == +Inf, when y is very large. */
5957 if (HONOR_INFINITIES (mode))
5958 return fold_build2 (EQ_EXPR, type, arg,
5959 build_real (TREE_TYPE (arg), c2));
5960
5961 /* sqrt(x) > y is always false, when y is very large
5962 and we don't care about infinities. */
5963 return omit_one_operand (type, integer_zero_node, arg);
5964 }
5965
5966 /* sqrt(x) > c is the same as x > c*c. */
5967 return fold_build2 (code, type, arg,
5968 build_real (TREE_TYPE (arg), c2));
5969 }
5970 else if (code == LT_EXPR || code == LE_EXPR)
5971 {
5972 REAL_VALUE_TYPE c2;
5973
5974 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5975 real_convert (&c2, mode, &c2);
5976
5977 if (REAL_VALUE_ISINF (c2))
5978 {
5979 /* sqrt(x) < y is always true, when y is a very large
5980 value and we don't care about NaNs or Infinities. */
5981 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5982 return omit_one_operand (type, integer_one_node, arg);
5983
5984 /* sqrt(x) < y is x != +Inf when y is very large and we
5985 don't care about NaNs. */
5986 if (! HONOR_NANS (mode))
5987 return fold_build2 (NE_EXPR, type, arg,
5988 build_real (TREE_TYPE (arg), c2));
5989
5990 /* sqrt(x) < y is x >= 0 when y is very large and we
5991 don't care about Infinities. */
5992 if (! HONOR_INFINITIES (mode))
5993 return fold_build2 (GE_EXPR, type, arg,
5994 build_real (TREE_TYPE (arg), dconst0));
5995
5996 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5997 if (lang_hooks.decls.global_bindings_p () != 0
5998 || CONTAINS_PLACEHOLDER_P (arg))
5999 return NULL_TREE;
6000
6001 arg = save_expr (arg);
6002 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6003 fold_build2 (GE_EXPR, type, arg,
6004 build_real (TREE_TYPE (arg),
6005 dconst0)),
6006 fold_build2 (NE_EXPR, type, arg,
6007 build_real (TREE_TYPE (arg),
6008 c2)));
6009 }
6010
6011 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6012 if (! HONOR_NANS (mode))
6013 return fold_build2 (code, type, arg,
6014 build_real (TREE_TYPE (arg), c2));
6015
6016 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6017 if (lang_hooks.decls.global_bindings_p () == 0
6018 && ! CONTAINS_PLACEHOLDER_P (arg))
6019 {
6020 arg = save_expr (arg);
6021 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6022 fold_build2 (GE_EXPR, type, arg,
6023 build_real (TREE_TYPE (arg),
6024 dconst0)),
6025 fold_build2 (code, type, arg,
6026 build_real (TREE_TYPE (arg),
6027 c2)));
6028 }
6029 }
6030 }
6031
6032 return NULL_TREE;
6033 }
6034
6035 /* Subroutine of fold() that optimizes comparisons against Infinities,
6036 either +Inf or -Inf.
6037
6038 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6039 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6040 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6041
6042 The function returns the constant folded tree if a simplification
6043 can be made, and NULL_TREE otherwise. */
6044
6045 static tree
6046 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6047 {
6048 enum machine_mode mode;
6049 REAL_VALUE_TYPE max;
6050 tree temp;
6051 bool neg;
6052
6053 mode = TYPE_MODE (TREE_TYPE (arg0));
6054
6055 /* For negative infinity swap the sense of the comparison. */
6056 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6057 if (neg)
6058 code = swap_tree_comparison (code);
6059
6060 switch (code)
6061 {
6062 case GT_EXPR:
6063 /* x > +Inf is always false, if with ignore sNANs. */
6064 if (HONOR_SNANS (mode))
6065 return NULL_TREE;
6066 return omit_one_operand (type, integer_zero_node, arg0);
6067
6068 case LE_EXPR:
6069 /* x <= +Inf is always true, if we don't case about NaNs. */
6070 if (! HONOR_NANS (mode))
6071 return omit_one_operand (type, integer_one_node, arg0);
6072
6073 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6074 if (lang_hooks.decls.global_bindings_p () == 0
6075 && ! CONTAINS_PLACEHOLDER_P (arg0))
6076 {
6077 arg0 = save_expr (arg0);
6078 return fold_build2 (EQ_EXPR, type, arg0, arg0);
6079 }
6080 break;
6081
6082 case EQ_EXPR:
6083 case GE_EXPR:
6084 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6085 real_maxval (&max, neg, mode);
6086 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6087 arg0, build_real (TREE_TYPE (arg0), max));
6088
6089 case LT_EXPR:
6090 /* x < +Inf is always equal to x <= DBL_MAX. */
6091 real_maxval (&max, neg, mode);
6092 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6093 arg0, build_real (TREE_TYPE (arg0), max));
6094
6095 case NE_EXPR:
6096 /* x != +Inf is always equal to !(x > DBL_MAX). */
6097 real_maxval (&max, neg, mode);
6098 if (! HONOR_NANS (mode))
6099 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6100 arg0, build_real (TREE_TYPE (arg0), max));
6101
6102 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6103 arg0, build_real (TREE_TYPE (arg0), max));
6104 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
6105
6106 default:
6107 break;
6108 }
6109
6110 return NULL_TREE;
6111 }
6112
6113 /* Subroutine of fold() that optimizes comparisons of a division by
6114 a nonzero integer constant against an integer constant, i.e.
6115 X/C1 op C2.
6116
6117 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6118 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6119 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6120
6121 The function returns the constant folded tree if a simplification
6122 can be made, and NULL_TREE otherwise. */
6123
6124 static tree
6125 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6126 {
6127 tree prod, tmp, hi, lo;
6128 tree arg00 = TREE_OPERAND (arg0, 0);
6129 tree arg01 = TREE_OPERAND (arg0, 1);
6130 unsigned HOST_WIDE_INT lpart;
6131 HOST_WIDE_INT hpart;
6132 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6133 bool neg_overflow;
6134 int overflow;
6135
6136 /* We have to do this the hard way to detect unsigned overflow.
6137 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6138 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6139 TREE_INT_CST_HIGH (arg01),
6140 TREE_INT_CST_LOW (arg1),
6141 TREE_INT_CST_HIGH (arg1),
6142 &lpart, &hpart, unsigned_p);
6143 prod = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6144 -1, overflow);
6145 neg_overflow = false;
6146
6147 if (unsigned_p)
6148 {
6149 tmp = int_const_binop (MINUS_EXPR, arg01,
6150 build_int_cst (TREE_TYPE (arg01), 1), 0);
6151 lo = prod;
6152
6153 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6154 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6155 TREE_INT_CST_HIGH (prod),
6156 TREE_INT_CST_LOW (tmp),
6157 TREE_INT_CST_HIGH (tmp),
6158 &lpart, &hpart, unsigned_p);
6159 hi = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6160 -1, overflow | TREE_OVERFLOW (prod));
6161 }
6162 else if (tree_int_cst_sgn (arg01) >= 0)
6163 {
6164 tmp = int_const_binop (MINUS_EXPR, arg01,
6165 build_int_cst (TREE_TYPE (arg01), 1), 0);
6166 switch (tree_int_cst_sgn (arg1))
6167 {
6168 case -1:
6169 neg_overflow = true;
6170 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6171 hi = prod;
6172 break;
6173
6174 case 0:
6175 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6176 hi = tmp;
6177 break;
6178
6179 case 1:
6180 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6181 lo = prod;
6182 break;
6183
6184 default:
6185 gcc_unreachable ();
6186 }
6187 }
6188 else
6189 {
6190 /* A negative divisor reverses the relational operators. */
6191 code = swap_tree_comparison (code);
6192
6193 tmp = int_const_binop (PLUS_EXPR, arg01,
6194 build_int_cst (TREE_TYPE (arg01), 1), 0);
6195 switch (tree_int_cst_sgn (arg1))
6196 {
6197 case -1:
6198 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6199 lo = prod;
6200 break;
6201
6202 case 0:
6203 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6204 lo = tmp;
6205 break;
6206
6207 case 1:
6208 neg_overflow = true;
6209 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6210 hi = prod;
6211 break;
6212
6213 default:
6214 gcc_unreachable ();
6215 }
6216 }
6217
6218 switch (code)
6219 {
6220 case EQ_EXPR:
6221 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6222 return omit_one_operand (type, integer_zero_node, arg00);
6223 if (TREE_OVERFLOW (hi))
6224 return fold_build2 (GE_EXPR, type, arg00, lo);
6225 if (TREE_OVERFLOW (lo))
6226 return fold_build2 (LE_EXPR, type, arg00, hi);
6227 return build_range_check (type, arg00, 1, lo, hi);
6228
6229 case NE_EXPR:
6230 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6231 return omit_one_operand (type, integer_one_node, arg00);
6232 if (TREE_OVERFLOW (hi))
6233 return fold_build2 (LT_EXPR, type, arg00, lo);
6234 if (TREE_OVERFLOW (lo))
6235 return fold_build2 (GT_EXPR, type, arg00, hi);
6236 return build_range_check (type, arg00, 0, lo, hi);
6237
6238 case LT_EXPR:
6239 if (TREE_OVERFLOW (lo))
6240 {
6241 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6242 return omit_one_operand (type, tmp, arg00);
6243 }
6244 return fold_build2 (LT_EXPR, type, arg00, lo);
6245
6246 case LE_EXPR:
6247 if (TREE_OVERFLOW (hi))
6248 {
6249 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6250 return omit_one_operand (type, tmp, arg00);
6251 }
6252 return fold_build2 (LE_EXPR, type, arg00, hi);
6253
6254 case GT_EXPR:
6255 if (TREE_OVERFLOW (hi))
6256 {
6257 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6258 return omit_one_operand (type, tmp, arg00);
6259 }
6260 return fold_build2 (GT_EXPR, type, arg00, hi);
6261
6262 case GE_EXPR:
6263 if (TREE_OVERFLOW (lo))
6264 {
6265 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6266 return omit_one_operand (type, tmp, arg00);
6267 }
6268 return fold_build2 (GE_EXPR, type, arg00, lo);
6269
6270 default:
6271 break;
6272 }
6273
6274 return NULL_TREE;
6275 }
6276
6277
6278 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6279 equality/inequality test, then return a simplified form of the test
6280 using a sign testing. Otherwise return NULL. TYPE is the desired
6281 result type. */
6282
6283 static tree
6284 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6285 tree result_type)
6286 {
6287 /* If this is testing a single bit, we can optimize the test. */
6288 if ((code == NE_EXPR || code == EQ_EXPR)
6289 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6290 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6291 {
6292 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6293 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6294 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6295
6296 if (arg00 != NULL_TREE
6297 /* This is only a win if casting to a signed type is cheap,
6298 i.e. when arg00's type is not a partial mode. */
6299 && TYPE_PRECISION (TREE_TYPE (arg00))
6300 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6301 {
6302 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
6303 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6304 result_type, fold_convert (stype, arg00),
6305 build_int_cst (stype, 0));
6306 }
6307 }
6308
6309 return NULL_TREE;
6310 }
6311
6312 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6313 equality/inequality test, then return a simplified form of
6314 the test using shifts and logical operations. Otherwise return
6315 NULL. TYPE is the desired result type. */
6316
6317 tree
6318 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6319 tree result_type)
6320 {
6321 /* If this is testing a single bit, we can optimize the test. */
6322 if ((code == NE_EXPR || code == EQ_EXPR)
6323 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6324 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6325 {
6326 tree inner = TREE_OPERAND (arg0, 0);
6327 tree type = TREE_TYPE (arg0);
6328 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6329 enum machine_mode operand_mode = TYPE_MODE (type);
6330 int ops_unsigned;
6331 tree signed_type, unsigned_type, intermediate_type;
6332 tree tem, one;
6333
6334 /* First, see if we can fold the single bit test into a sign-bit
6335 test. */
6336 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6337 result_type);
6338 if (tem)
6339 return tem;
6340
6341 /* Otherwise we have (A & C) != 0 where C is a single bit,
6342 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6343 Similarly for (A & C) == 0. */
6344
6345 /* If INNER is a right shift of a constant and it plus BITNUM does
6346 not overflow, adjust BITNUM and INNER. */
6347 if (TREE_CODE (inner) == RSHIFT_EXPR
6348 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6349 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6350 && bitnum < TYPE_PRECISION (type)
6351 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6352 bitnum - TYPE_PRECISION (type)))
6353 {
6354 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6355 inner = TREE_OPERAND (inner, 0);
6356 }
6357
6358 /* If we are going to be able to omit the AND below, we must do our
6359 operations as unsigned. If we must use the AND, we have a choice.
6360 Normally unsigned is faster, but for some machines signed is. */
6361 #ifdef LOAD_EXTEND_OP
6362 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6363 && !flag_syntax_only) ? 0 : 1;
6364 #else
6365 ops_unsigned = 1;
6366 #endif
6367
6368 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6369 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6370 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6371 inner = fold_convert (intermediate_type, inner);
6372
6373 if (bitnum != 0)
6374 inner = build2 (RSHIFT_EXPR, intermediate_type,
6375 inner, size_int (bitnum));
6376
6377 one = build_int_cst (intermediate_type, 1);
6378
6379 if (code == EQ_EXPR)
6380 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type, inner, one);
6381
6382 /* Put the AND last so it can combine with more things. */
6383 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6384
6385 /* Make sure to return the proper type. */
6386 inner = fold_convert (result_type, inner);
6387
6388 return inner;
6389 }
6390 return NULL_TREE;
6391 }
6392
6393 /* Check whether we are allowed to reorder operands arg0 and arg1,
6394 such that the evaluation of arg1 occurs before arg0. */
6395
6396 static bool
6397 reorder_operands_p (tree arg0, tree arg1)
6398 {
6399 if (! flag_evaluation_order)
6400 return true;
6401 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6402 return true;
6403 return ! TREE_SIDE_EFFECTS (arg0)
6404 && ! TREE_SIDE_EFFECTS (arg1);
6405 }
6406
6407 /* Test whether it is preferable two swap two operands, ARG0 and
6408 ARG1, for example because ARG0 is an integer constant and ARG1
6409 isn't. If REORDER is true, only recommend swapping if we can
6410 evaluate the operands in reverse order. */
6411
6412 bool
6413 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
6414 {
6415 STRIP_SIGN_NOPS (arg0);
6416 STRIP_SIGN_NOPS (arg1);
6417
6418 if (TREE_CODE (arg1) == INTEGER_CST)
6419 return 0;
6420 if (TREE_CODE (arg0) == INTEGER_CST)
6421 return 1;
6422
6423 if (TREE_CODE (arg1) == REAL_CST)
6424 return 0;
6425 if (TREE_CODE (arg0) == REAL_CST)
6426 return 1;
6427
6428 if (TREE_CODE (arg1) == COMPLEX_CST)
6429 return 0;
6430 if (TREE_CODE (arg0) == COMPLEX_CST)
6431 return 1;
6432
6433 if (TREE_CONSTANT (arg1))
6434 return 0;
6435 if (TREE_CONSTANT (arg0))
6436 return 1;
6437
6438 if (optimize_size)
6439 return 0;
6440
6441 if (reorder && flag_evaluation_order
6442 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6443 return 0;
6444
6445 if (DECL_P (arg1))
6446 return 0;
6447 if (DECL_P (arg0))
6448 return 1;
6449
6450 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6451 for commutative and comparison operators. Ensuring a canonical
6452 form allows the optimizers to find additional redundancies without
6453 having to explicitly check for both orderings. */
6454 if (TREE_CODE (arg0) == SSA_NAME
6455 && TREE_CODE (arg1) == SSA_NAME
6456 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6457 return 1;
6458
6459 return 0;
6460 }
6461
6462 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6463 ARG0 is extended to a wider type. */
6464
6465 static tree
6466 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6467 {
6468 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6469 tree arg1_unw;
6470 tree shorter_type, outer_type;
6471 tree min, max;
6472 bool above, below;
6473
6474 if (arg0_unw == arg0)
6475 return NULL_TREE;
6476 shorter_type = TREE_TYPE (arg0_unw);
6477
6478 #ifdef HAVE_canonicalize_funcptr_for_compare
6479 /* Disable this optimization if we're casting a function pointer
6480 type on targets that require function pointer canonicalization. */
6481 if (HAVE_canonicalize_funcptr_for_compare
6482 && TREE_CODE (shorter_type) == POINTER_TYPE
6483 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6484 return NULL_TREE;
6485 #endif
6486
6487 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6488 return NULL_TREE;
6489
6490 arg1_unw = get_unwidened (arg1, shorter_type);
6491
6492 /* If possible, express the comparison in the shorter mode. */
6493 if ((code == EQ_EXPR || code == NE_EXPR
6494 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6495 && (TREE_TYPE (arg1_unw) == shorter_type
6496 || (TREE_CODE (arg1_unw) == INTEGER_CST
6497 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6498 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6499 && int_fits_type_p (arg1_unw, shorter_type))))
6500 return fold_build2 (code, type, arg0_unw,
6501 fold_convert (shorter_type, arg1_unw));
6502
6503 if (TREE_CODE (arg1_unw) != INTEGER_CST
6504 || TREE_CODE (shorter_type) != INTEGER_TYPE
6505 || !int_fits_type_p (arg1_unw, shorter_type))
6506 return NULL_TREE;
6507
6508 /* If we are comparing with the integer that does not fit into the range
6509 of the shorter type, the result is known. */
6510 outer_type = TREE_TYPE (arg1_unw);
6511 min = lower_bound_in_type (outer_type, shorter_type);
6512 max = upper_bound_in_type (outer_type, shorter_type);
6513
6514 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6515 max, arg1_unw));
6516 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6517 arg1_unw, min));
6518
6519 switch (code)
6520 {
6521 case EQ_EXPR:
6522 if (above || below)
6523 return omit_one_operand (type, integer_zero_node, arg0);
6524 break;
6525
6526 case NE_EXPR:
6527 if (above || below)
6528 return omit_one_operand (type, integer_one_node, arg0);
6529 break;
6530
6531 case LT_EXPR:
6532 case LE_EXPR:
6533 if (above)
6534 return omit_one_operand (type, integer_one_node, arg0);
6535 else if (below)
6536 return omit_one_operand (type, integer_zero_node, arg0);
6537
6538 case GT_EXPR:
6539 case GE_EXPR:
6540 if (above)
6541 return omit_one_operand (type, integer_zero_node, arg0);
6542 else if (below)
6543 return omit_one_operand (type, integer_one_node, arg0);
6544
6545 default:
6546 break;
6547 }
6548
6549 return NULL_TREE;
6550 }
6551
6552 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6553 ARG0 just the signedness is changed. */
6554
6555 static tree
6556 fold_sign_changed_comparison (enum tree_code code, tree type,
6557 tree arg0, tree arg1)
6558 {
6559 tree arg0_inner;
6560 tree inner_type, outer_type;
6561
6562 if (TREE_CODE (arg0) != NOP_EXPR
6563 && TREE_CODE (arg0) != CONVERT_EXPR)
6564 return NULL_TREE;
6565
6566 outer_type = TREE_TYPE (arg0);
6567 arg0_inner = TREE_OPERAND (arg0, 0);
6568 inner_type = TREE_TYPE (arg0_inner);
6569
6570 #ifdef HAVE_canonicalize_funcptr_for_compare
6571 /* Disable this optimization if we're casting a function pointer
6572 type on targets that require function pointer canonicalization. */
6573 if (HAVE_canonicalize_funcptr_for_compare
6574 && TREE_CODE (inner_type) == POINTER_TYPE
6575 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6576 return NULL_TREE;
6577 #endif
6578
6579 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6580 return NULL_TREE;
6581
6582 if (TREE_CODE (arg1) != INTEGER_CST
6583 && !((TREE_CODE (arg1) == NOP_EXPR
6584 || TREE_CODE (arg1) == CONVERT_EXPR)
6585 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6586 return NULL_TREE;
6587
6588 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6589 && code != NE_EXPR
6590 && code != EQ_EXPR)
6591 return NULL_TREE;
6592
6593 if (TREE_CODE (arg1) == INTEGER_CST)
6594 arg1 = force_fit_type_double (inner_type, TREE_INT_CST_LOW (arg1),
6595 TREE_INT_CST_HIGH (arg1), 0,
6596 TREE_OVERFLOW (arg1));
6597 else
6598 arg1 = fold_convert (inner_type, arg1);
6599
6600 return fold_build2 (code, type, arg0_inner, arg1);
6601 }
6602
6603 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6604 step of the array. Reconstructs s and delta in the case of s * delta
6605 being an integer constant (and thus already folded).
6606 ADDR is the address. MULT is the multiplicative expression.
6607 If the function succeeds, the new address expression is returned. Otherwise
6608 NULL_TREE is returned. */
6609
6610 static tree
6611 try_move_mult_to_index (enum tree_code code, tree addr, tree op1)
6612 {
6613 tree s, delta, step;
6614 tree ref = TREE_OPERAND (addr, 0), pref;
6615 tree ret, pos;
6616 tree itype;
6617
6618 /* Canonicalize op1 into a possibly non-constant delta
6619 and an INTEGER_CST s. */
6620 if (TREE_CODE (op1) == MULT_EXPR)
6621 {
6622 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6623
6624 STRIP_NOPS (arg0);
6625 STRIP_NOPS (arg1);
6626
6627 if (TREE_CODE (arg0) == INTEGER_CST)
6628 {
6629 s = arg0;
6630 delta = arg1;
6631 }
6632 else if (TREE_CODE (arg1) == INTEGER_CST)
6633 {
6634 s = arg1;
6635 delta = arg0;
6636 }
6637 else
6638 return NULL_TREE;
6639 }
6640 else if (TREE_CODE (op1) == INTEGER_CST)
6641 {
6642 delta = op1;
6643 s = NULL_TREE;
6644 }
6645 else
6646 {
6647 /* Simulate we are delta * 1. */
6648 delta = op1;
6649 s = integer_one_node;
6650 }
6651
6652 for (;; ref = TREE_OPERAND (ref, 0))
6653 {
6654 if (TREE_CODE (ref) == ARRAY_REF)
6655 {
6656 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6657 if (! itype)
6658 continue;
6659
6660 step = array_ref_element_size (ref);
6661 if (TREE_CODE (step) != INTEGER_CST)
6662 continue;
6663
6664 if (s)
6665 {
6666 if (! tree_int_cst_equal (step, s))
6667 continue;
6668 }
6669 else
6670 {
6671 /* Try if delta is a multiple of step. */
6672 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step);
6673 if (! tmp)
6674 continue;
6675 delta = tmp;
6676 }
6677
6678 break;
6679 }
6680
6681 if (!handled_component_p (ref))
6682 return NULL_TREE;
6683 }
6684
6685 /* We found the suitable array reference. So copy everything up to it,
6686 and replace the index. */
6687
6688 pref = TREE_OPERAND (addr, 0);
6689 ret = copy_node (pref);
6690 pos = ret;
6691
6692 while (pref != ref)
6693 {
6694 pref = TREE_OPERAND (pref, 0);
6695 TREE_OPERAND (pos, 0) = copy_node (pref);
6696 pos = TREE_OPERAND (pos, 0);
6697 }
6698
6699 TREE_OPERAND (pos, 1) = fold_build2 (code, itype,
6700 fold_convert (itype,
6701 TREE_OPERAND (pos, 1)),
6702 fold_convert (itype, delta));
6703
6704 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6705 }
6706
6707
6708 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6709 means A >= Y && A != MAX, but in this case we know that
6710 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6711
6712 static tree
6713 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6714 {
6715 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6716
6717 if (TREE_CODE (bound) == LT_EXPR)
6718 a = TREE_OPERAND (bound, 0);
6719 else if (TREE_CODE (bound) == GT_EXPR)
6720 a = TREE_OPERAND (bound, 1);
6721 else
6722 return NULL_TREE;
6723
6724 typea = TREE_TYPE (a);
6725 if (!INTEGRAL_TYPE_P (typea)
6726 && !POINTER_TYPE_P (typea))
6727 return NULL_TREE;
6728
6729 if (TREE_CODE (ineq) == LT_EXPR)
6730 {
6731 a1 = TREE_OPERAND (ineq, 1);
6732 y = TREE_OPERAND (ineq, 0);
6733 }
6734 else if (TREE_CODE (ineq) == GT_EXPR)
6735 {
6736 a1 = TREE_OPERAND (ineq, 0);
6737 y = TREE_OPERAND (ineq, 1);
6738 }
6739 else
6740 return NULL_TREE;
6741
6742 if (TREE_TYPE (a1) != typea)
6743 return NULL_TREE;
6744
6745 diff = fold_build2 (MINUS_EXPR, typea, a1, a);
6746 if (!integer_onep (diff))
6747 return NULL_TREE;
6748
6749 return fold_build2 (GE_EXPR, type, a, y);
6750 }
6751
6752 /* Fold a sum or difference of at least one multiplication.
6753 Returns the folded tree or NULL if no simplification could be made. */
6754
6755 static tree
6756 fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
6757 {
6758 tree arg00, arg01, arg10, arg11;
6759 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6760
6761 /* (A * C) +- (B * C) -> (A+-B) * C.
6762 (A * C) +- A -> A * (C+-1).
6763 We are most concerned about the case where C is a constant,
6764 but other combinations show up during loop reduction. Since
6765 it is not difficult, try all four possibilities. */
6766
6767 if (TREE_CODE (arg0) == MULT_EXPR)
6768 {
6769 arg00 = TREE_OPERAND (arg0, 0);
6770 arg01 = TREE_OPERAND (arg0, 1);
6771 }
6772 else
6773 {
6774 arg00 = arg0;
6775 arg01 = build_one_cst (type);
6776 }
6777 if (TREE_CODE (arg1) == MULT_EXPR)
6778 {
6779 arg10 = TREE_OPERAND (arg1, 0);
6780 arg11 = TREE_OPERAND (arg1, 1);
6781 }
6782 else
6783 {
6784 arg10 = arg1;
6785 arg11 = build_one_cst (type);
6786 }
6787 same = NULL_TREE;
6788
6789 if (operand_equal_p (arg01, arg11, 0))
6790 same = arg01, alt0 = arg00, alt1 = arg10;
6791 else if (operand_equal_p (arg00, arg10, 0))
6792 same = arg00, alt0 = arg01, alt1 = arg11;
6793 else if (operand_equal_p (arg00, arg11, 0))
6794 same = arg00, alt0 = arg01, alt1 = arg10;
6795 else if (operand_equal_p (arg01, arg10, 0))
6796 same = arg01, alt0 = arg00, alt1 = arg11;
6797
6798 /* No identical multiplicands; see if we can find a common
6799 power-of-two factor in non-power-of-two multiplies. This
6800 can help in multi-dimensional array access. */
6801 else if (host_integerp (arg01, 0)
6802 && host_integerp (arg11, 0))
6803 {
6804 HOST_WIDE_INT int01, int11, tmp;
6805 bool swap = false;
6806 tree maybe_same;
6807 int01 = TREE_INT_CST_LOW (arg01);
6808 int11 = TREE_INT_CST_LOW (arg11);
6809
6810 /* Move min of absolute values to int11. */
6811 if ((int01 >= 0 ? int01 : -int01)
6812 < (int11 >= 0 ? int11 : -int11))
6813 {
6814 tmp = int01, int01 = int11, int11 = tmp;
6815 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6816 maybe_same = arg01;
6817 swap = true;
6818 }
6819 else
6820 maybe_same = arg11;
6821
6822 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0)
6823 {
6824 alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
6825 build_int_cst (TREE_TYPE (arg00),
6826 int01 / int11));
6827 alt1 = arg10;
6828 same = maybe_same;
6829 if (swap)
6830 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
6831 }
6832 }
6833
6834 if (same)
6835 return fold_build2 (MULT_EXPR, type,
6836 fold_build2 (code, type,
6837 fold_convert (type, alt0),
6838 fold_convert (type, alt1)),
6839 fold_convert (type, same));
6840
6841 return NULL_TREE;
6842 }
6843
6844 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
6845 specified by EXPR into the buffer PTR of length LEN bytes.
6846 Return the number of bytes placed in the buffer, or zero
6847 upon failure. */
6848
6849 static int
6850 native_encode_int (tree expr, unsigned char *ptr, int len)
6851 {
6852 tree type = TREE_TYPE (expr);
6853 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
6854 int byte, offset, word, words;
6855 unsigned char value;
6856
6857 if (total_bytes > len)
6858 return 0;
6859 words = total_bytes / UNITS_PER_WORD;
6860
6861 for (byte = 0; byte < total_bytes; byte++)
6862 {
6863 int bitpos = byte * BITS_PER_UNIT;
6864 if (bitpos < HOST_BITS_PER_WIDE_INT)
6865 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
6866 else
6867 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
6868 >> (bitpos - HOST_BITS_PER_WIDE_INT));
6869
6870 if (total_bytes > UNITS_PER_WORD)
6871 {
6872 word = byte / UNITS_PER_WORD;
6873 if (WORDS_BIG_ENDIAN)
6874 word = (words - 1) - word;
6875 offset = word * UNITS_PER_WORD;
6876 if (BYTES_BIG_ENDIAN)
6877 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
6878 else
6879 offset += byte % UNITS_PER_WORD;
6880 }
6881 else
6882 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
6883 ptr[offset] = value;
6884 }
6885 return total_bytes;
6886 }
6887
6888
6889 /* Subroutine of native_encode_expr. Encode the REAL_CST
6890 specified by EXPR into the buffer PTR of length LEN bytes.
6891 Return the number of bytes placed in the buffer, or zero
6892 upon failure. */
6893
6894 static int
6895 native_encode_real (tree expr, unsigned char *ptr, int len)
6896 {
6897 tree type = TREE_TYPE (expr);
6898 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
6899 int byte, offset, word, words;
6900 unsigned char value;
6901
6902 /* There are always 32 bits in each long, no matter the size of
6903 the hosts long. We handle floating point representations with
6904 up to 192 bits. */
6905 long tmp[6];
6906
6907 if (total_bytes > len)
6908 return 0;
6909 words = total_bytes / UNITS_PER_WORD;
6910
6911 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
6912
6913 for (byte = 0; byte < total_bytes; byte++)
6914 {
6915 int bitpos = byte * BITS_PER_UNIT;
6916 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
6917
6918 if (total_bytes > UNITS_PER_WORD)
6919 {
6920 word = byte / UNITS_PER_WORD;
6921 if (FLOAT_WORDS_BIG_ENDIAN)
6922 word = (words - 1) - word;
6923 offset = word * UNITS_PER_WORD;
6924 if (BYTES_BIG_ENDIAN)
6925 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
6926 else
6927 offset += byte % UNITS_PER_WORD;
6928 }
6929 else
6930 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
6931 ptr[offset] = value;
6932 }
6933 return total_bytes;
6934 }
6935
6936 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
6937 specified by EXPR into the buffer PTR of length LEN bytes.
6938 Return the number of bytes placed in the buffer, or zero
6939 upon failure. */
6940
6941 static int
6942 native_encode_complex (tree expr, unsigned char *ptr, int len)
6943 {
6944 int rsize, isize;
6945 tree part;
6946
6947 part = TREE_REALPART (expr);
6948 rsize = native_encode_expr (part, ptr, len);
6949 if (rsize == 0)
6950 return 0;
6951 part = TREE_IMAGPART (expr);
6952 isize = native_encode_expr (part, ptr+rsize, len-rsize);
6953 if (isize != rsize)
6954 return 0;
6955 return rsize + isize;
6956 }
6957
6958
6959 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
6960 specified by EXPR into the buffer PTR of length LEN bytes.
6961 Return the number of bytes placed in the buffer, or zero
6962 upon failure. */
6963
6964 static int
6965 native_encode_vector (tree expr, unsigned char *ptr, int len)
6966 {
6967 int i, size, offset, count;
6968 tree itype, elem, elements;
6969
6970 offset = 0;
6971 elements = TREE_VECTOR_CST_ELTS (expr);
6972 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
6973 itype = TREE_TYPE (TREE_TYPE (expr));
6974 size = GET_MODE_SIZE (TYPE_MODE (itype));
6975 for (i = 0; i < count; i++)
6976 {
6977 if (elements)
6978 {
6979 elem = TREE_VALUE (elements);
6980 elements = TREE_CHAIN (elements);
6981 }
6982 else
6983 elem = NULL_TREE;
6984
6985 if (elem)
6986 {
6987 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
6988 return 0;
6989 }
6990 else
6991 {
6992 if (offset + size > len)
6993 return 0;
6994 memset (ptr+offset, 0, size);
6995 }
6996 offset += size;
6997 }
6998 return offset;
6999 }
7000
7001
7002 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7003 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7004 buffer PTR of length LEN bytes. Return the number of bytes
7005 placed in the buffer, or zero upon failure. */
7006
7007 static int
7008 native_encode_expr (tree expr, unsigned char *ptr, int len)
7009 {
7010 switch (TREE_CODE (expr))
7011 {
7012 case INTEGER_CST:
7013 return native_encode_int (expr, ptr, len);
7014
7015 case REAL_CST:
7016 return native_encode_real (expr, ptr, len);
7017
7018 case COMPLEX_CST:
7019 return native_encode_complex (expr, ptr, len);
7020
7021 case VECTOR_CST:
7022 return native_encode_vector (expr, ptr, len);
7023
7024 default:
7025 return 0;
7026 }
7027 }
7028
7029
7030 /* Subroutine of native_interpret_expr. Interpret the contents of
7031 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7032 If the buffer cannot be interpreted, return NULL_TREE. */
7033
7034 static tree
7035 native_interpret_int (tree type, unsigned char *ptr, int len)
7036 {
7037 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7038 int byte, offset, word, words;
7039 unsigned char value;
7040 unsigned int HOST_WIDE_INT lo = 0;
7041 HOST_WIDE_INT hi = 0;
7042
7043 if (total_bytes > len)
7044 return NULL_TREE;
7045 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7046 return NULL_TREE;
7047 words = total_bytes / UNITS_PER_WORD;
7048
7049 for (byte = 0; byte < total_bytes; byte++)
7050 {
7051 int bitpos = byte * BITS_PER_UNIT;
7052 if (total_bytes > UNITS_PER_WORD)
7053 {
7054 word = byte / UNITS_PER_WORD;
7055 if (WORDS_BIG_ENDIAN)
7056 word = (words - 1) - word;
7057 offset = word * UNITS_PER_WORD;
7058 if (BYTES_BIG_ENDIAN)
7059 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7060 else
7061 offset += byte % UNITS_PER_WORD;
7062 }
7063 else
7064 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7065 value = ptr[offset];
7066
7067 if (bitpos < HOST_BITS_PER_WIDE_INT)
7068 lo |= (unsigned HOST_WIDE_INT) value << bitpos;
7069 else
7070 hi |= (unsigned HOST_WIDE_INT) value
7071 << (bitpos - HOST_BITS_PER_WIDE_INT);
7072 }
7073
7074 return build_int_cst_wide_type (type, lo, hi);
7075 }
7076
7077
7078 /* Subroutine of native_interpret_expr. Interpret the contents of
7079 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7080 If the buffer cannot be interpreted, return NULL_TREE. */
7081
7082 static tree
7083 native_interpret_real (tree type, unsigned char *ptr, int len)
7084 {
7085 enum machine_mode mode = TYPE_MODE (type);
7086 int total_bytes = GET_MODE_SIZE (mode);
7087 int byte, offset, word, words;
7088 unsigned char value;
7089 /* There are always 32 bits in each long, no matter the size of
7090 the hosts long. We handle floating point representations with
7091 up to 192 bits. */
7092 REAL_VALUE_TYPE r;
7093 long tmp[6];
7094
7095 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7096 if (total_bytes > len || total_bytes > 24)
7097 return NULL_TREE;
7098 words = total_bytes / UNITS_PER_WORD;
7099
7100 memset (tmp, 0, sizeof (tmp));
7101 for (byte = 0; byte < total_bytes; byte++)
7102 {
7103 int bitpos = byte * BITS_PER_UNIT;
7104 if (total_bytes > UNITS_PER_WORD)
7105 {
7106 word = byte / UNITS_PER_WORD;
7107 if (FLOAT_WORDS_BIG_ENDIAN)
7108 word = (words - 1) - word;
7109 offset = word * UNITS_PER_WORD;
7110 if (BYTES_BIG_ENDIAN)
7111 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7112 else
7113 offset += byte % UNITS_PER_WORD;
7114 }
7115 else
7116 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7117 value = ptr[offset];
7118
7119 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7120 }
7121
7122 real_from_target (&r, tmp, mode);
7123 return build_real (type, r);
7124 }
7125
7126
7127 /* Subroutine of native_interpret_expr. Interpret the contents of
7128 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7129 If the buffer cannot be interpreted, return NULL_TREE. */
7130
7131 static tree
7132 native_interpret_complex (tree type, unsigned char *ptr, int len)
7133 {
7134 tree etype, rpart, ipart;
7135 int size;
7136
7137 etype = TREE_TYPE (type);
7138 size = GET_MODE_SIZE (TYPE_MODE (etype));
7139 if (size * 2 > len)
7140 return NULL_TREE;
7141 rpart = native_interpret_expr (etype, ptr, size);
7142 if (!rpart)
7143 return NULL_TREE;
7144 ipart = native_interpret_expr (etype, ptr+size, size);
7145 if (!ipart)
7146 return NULL_TREE;
7147 return build_complex (type, rpart, ipart);
7148 }
7149
7150
7151 /* Subroutine of native_interpret_expr. Interpret the contents of
7152 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7153 If the buffer cannot be interpreted, return NULL_TREE. */
7154
7155 static tree
7156 native_interpret_vector (tree type, unsigned char *ptr, int len)
7157 {
7158 tree etype, elem, elements;
7159 int i, size, count;
7160
7161 etype = TREE_TYPE (type);
7162 size = GET_MODE_SIZE (TYPE_MODE (etype));
7163 count = TYPE_VECTOR_SUBPARTS (type);
7164 if (size * count > len)
7165 return NULL_TREE;
7166
7167 elements = NULL_TREE;
7168 for (i = count - 1; i >= 0; i--)
7169 {
7170 elem = native_interpret_expr (etype, ptr+(i*size), size);
7171 if (!elem)
7172 return NULL_TREE;
7173 elements = tree_cons (NULL_TREE, elem, elements);
7174 }
7175 return build_vector (type, elements);
7176 }
7177
7178
7179 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7180 the buffer PTR of length LEN as a constant of type TYPE. For
7181 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7182 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7183 return NULL_TREE. */
7184
7185 static tree
7186 native_interpret_expr (tree type, unsigned char *ptr, int len)
7187 {
7188 switch (TREE_CODE (type))
7189 {
7190 case INTEGER_TYPE:
7191 case ENUMERAL_TYPE:
7192 case BOOLEAN_TYPE:
7193 return native_interpret_int (type, ptr, len);
7194
7195 case REAL_TYPE:
7196 return native_interpret_real (type, ptr, len);
7197
7198 case COMPLEX_TYPE:
7199 return native_interpret_complex (type, ptr, len);
7200
7201 case VECTOR_TYPE:
7202 return native_interpret_vector (type, ptr, len);
7203
7204 default:
7205 return NULL_TREE;
7206 }
7207 }
7208
7209
7210 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7211 TYPE at compile-time. If we're unable to perform the conversion
7212 return NULL_TREE. */
7213
7214 static tree
7215 fold_view_convert_expr (tree type, tree expr)
7216 {
7217 /* We support up to 512-bit values (for V8DFmode). */
7218 unsigned char buffer[64];
7219 int len;
7220
7221 /* Check that the host and target are sane. */
7222 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7223 return NULL_TREE;
7224
7225 len = native_encode_expr (expr, buffer, sizeof (buffer));
7226 if (len == 0)
7227 return NULL_TREE;
7228
7229 return native_interpret_expr (type, buffer, len);
7230 }
7231
7232
7233 /* Fold a unary expression of code CODE and type TYPE with operand
7234 OP0. Return the folded expression if folding is successful.
7235 Otherwise, return NULL_TREE. */
7236
7237 tree
7238 fold_unary (enum tree_code code, tree type, tree op0)
7239 {
7240 tree tem;
7241 tree arg0;
7242 enum tree_code_class kind = TREE_CODE_CLASS (code);
7243
7244 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7245 && TREE_CODE_LENGTH (code) == 1);
7246
7247 arg0 = op0;
7248 if (arg0)
7249 {
7250 if (code == NOP_EXPR || code == CONVERT_EXPR
7251 || code == FLOAT_EXPR || code == ABS_EXPR)
7252 {
7253 /* Don't use STRIP_NOPS, because signedness of argument type
7254 matters. */
7255 STRIP_SIGN_NOPS (arg0);
7256 }
7257 else
7258 {
7259 /* Strip any conversions that don't change the mode. This
7260 is safe for every expression, except for a comparison
7261 expression because its signedness is derived from its
7262 operands.
7263
7264 Note that this is done as an internal manipulation within
7265 the constant folder, in order to find the simplest
7266 representation of the arguments so that their form can be
7267 studied. In any cases, the appropriate type conversions
7268 should be put back in the tree that will get out of the
7269 constant folder. */
7270 STRIP_NOPS (arg0);
7271 }
7272 }
7273
7274 if (TREE_CODE_CLASS (code) == tcc_unary)
7275 {
7276 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7277 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7278 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
7279 else if (TREE_CODE (arg0) == COND_EXPR)
7280 {
7281 tree arg01 = TREE_OPERAND (arg0, 1);
7282 tree arg02 = TREE_OPERAND (arg0, 2);
7283 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7284 arg01 = fold_build1 (code, type, arg01);
7285 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7286 arg02 = fold_build1 (code, type, arg02);
7287 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
7288 arg01, arg02);
7289
7290 /* If this was a conversion, and all we did was to move into
7291 inside the COND_EXPR, bring it back out. But leave it if
7292 it is a conversion from integer to integer and the
7293 result precision is no wider than a word since such a
7294 conversion is cheap and may be optimized away by combine,
7295 while it couldn't if it were outside the COND_EXPR. Then return
7296 so we don't get into an infinite recursion loop taking the
7297 conversion out and then back in. */
7298
7299 if ((code == NOP_EXPR || code == CONVERT_EXPR
7300 || code == NON_LVALUE_EXPR)
7301 && TREE_CODE (tem) == COND_EXPR
7302 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7303 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7304 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7305 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7306 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7307 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7308 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7309 && (INTEGRAL_TYPE_P
7310 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7311 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7312 || flag_syntax_only))
7313 tem = build1 (code, type,
7314 build3 (COND_EXPR,
7315 TREE_TYPE (TREE_OPERAND
7316 (TREE_OPERAND (tem, 1), 0)),
7317 TREE_OPERAND (tem, 0),
7318 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7319 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
7320 return tem;
7321 }
7322 else if (COMPARISON_CLASS_P (arg0))
7323 {
7324 if (TREE_CODE (type) == BOOLEAN_TYPE)
7325 {
7326 arg0 = copy_node (arg0);
7327 TREE_TYPE (arg0) = type;
7328 return arg0;
7329 }
7330 else if (TREE_CODE (type) != INTEGER_TYPE)
7331 return fold_build3 (COND_EXPR, type, arg0,
7332 fold_build1 (code, type,
7333 integer_one_node),
7334 fold_build1 (code, type,
7335 integer_zero_node));
7336 }
7337 }
7338
7339 switch (code)
7340 {
7341 case NOP_EXPR:
7342 case FLOAT_EXPR:
7343 case CONVERT_EXPR:
7344 case FIX_TRUNC_EXPR:
7345 if (TREE_TYPE (op0) == type)
7346 return op0;
7347
7348 /* If we have (type) (a CMP b) and type is an integral type, return
7349 new expression involving the new type. */
7350 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
7351 return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
7352 TREE_OPERAND (op0, 1));
7353
7354 /* Handle cases of two conversions in a row. */
7355 if (TREE_CODE (op0) == NOP_EXPR
7356 || TREE_CODE (op0) == CONVERT_EXPR)
7357 {
7358 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7359 tree inter_type = TREE_TYPE (op0);
7360 int inside_int = INTEGRAL_TYPE_P (inside_type);
7361 int inside_ptr = POINTER_TYPE_P (inside_type);
7362 int inside_float = FLOAT_TYPE_P (inside_type);
7363 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7364 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7365 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7366 int inter_int = INTEGRAL_TYPE_P (inter_type);
7367 int inter_ptr = POINTER_TYPE_P (inter_type);
7368 int inter_float = FLOAT_TYPE_P (inter_type);
7369 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7370 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7371 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7372 int final_int = INTEGRAL_TYPE_P (type);
7373 int final_ptr = POINTER_TYPE_P (type);
7374 int final_float = FLOAT_TYPE_P (type);
7375 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7376 unsigned int final_prec = TYPE_PRECISION (type);
7377 int final_unsignedp = TYPE_UNSIGNED (type);
7378
7379 /* In addition to the cases of two conversions in a row
7380 handled below, if we are converting something to its own
7381 type via an object of identical or wider precision, neither
7382 conversion is needed. */
7383 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7384 && (((inter_int || inter_ptr) && final_int)
7385 || (inter_float && final_float))
7386 && inter_prec >= final_prec)
7387 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7388
7389 /* Likewise, if the intermediate and final types are either both
7390 float or both integer, we don't need the middle conversion if
7391 it is wider than the final type and doesn't change the signedness
7392 (for integers). Avoid this if the final type is a pointer
7393 since then we sometimes need the inner conversion. Likewise if
7394 the outer has a precision not equal to the size of its mode. */
7395 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
7396 || (inter_float && inside_float)
7397 || (inter_vec && inside_vec))
7398 && inter_prec >= inside_prec
7399 && (inter_float || inter_vec
7400 || inter_unsignedp == inside_unsignedp)
7401 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7402 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7403 && ! final_ptr
7404 && (! final_vec || inter_prec == inside_prec))
7405 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7406
7407 /* If we have a sign-extension of a zero-extended value, we can
7408 replace that by a single zero-extension. */
7409 if (inside_int && inter_int && final_int
7410 && inside_prec < inter_prec && inter_prec < final_prec
7411 && inside_unsignedp && !inter_unsignedp)
7412 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7413
7414 /* Two conversions in a row are not needed unless:
7415 - some conversion is floating-point (overstrict for now), or
7416 - some conversion is a vector (overstrict for now), or
7417 - the intermediate type is narrower than both initial and
7418 final, or
7419 - the intermediate type and innermost type differ in signedness,
7420 and the outermost type is wider than the intermediate, or
7421 - the initial type is a pointer type and the precisions of the
7422 intermediate and final types differ, or
7423 - the final type is a pointer type and the precisions of the
7424 initial and intermediate types differ.
7425 - the final type is a pointer type and the initial type not
7426 - the initial type is a pointer to an array and the final type
7427 not. */
7428 if (! inside_float && ! inter_float && ! final_float
7429 && ! inside_vec && ! inter_vec && ! final_vec
7430 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7431 && ! (inside_int && inter_int
7432 && inter_unsignedp != inside_unsignedp
7433 && inter_prec < final_prec)
7434 && ((inter_unsignedp && inter_prec > inside_prec)
7435 == (final_unsignedp && final_prec > inter_prec))
7436 && ! (inside_ptr && inter_prec != final_prec)
7437 && ! (final_ptr && inside_prec != inter_prec)
7438 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7439 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7440 && final_ptr == inside_ptr
7441 && ! (inside_ptr
7442 && TREE_CODE (TREE_TYPE (inside_type)) == ARRAY_TYPE
7443 && TREE_CODE (TREE_TYPE (type)) != ARRAY_TYPE))
7444 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7445 }
7446
7447 /* Handle (T *)&A.B.C for A being of type T and B and C
7448 living at offset zero. This occurs frequently in
7449 C++ upcasting and then accessing the base. */
7450 if (TREE_CODE (op0) == ADDR_EXPR
7451 && POINTER_TYPE_P (type)
7452 && handled_component_p (TREE_OPERAND (op0, 0)))
7453 {
7454 HOST_WIDE_INT bitsize, bitpos;
7455 tree offset;
7456 enum machine_mode mode;
7457 int unsignedp, volatilep;
7458 tree base = TREE_OPERAND (op0, 0);
7459 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7460 &mode, &unsignedp, &volatilep, false);
7461 /* If the reference was to a (constant) zero offset, we can use
7462 the address of the base if it has the same base type
7463 as the result type. */
7464 if (! offset && bitpos == 0
7465 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
7466 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7467 return fold_convert (type, build_fold_addr_expr (base));
7468 }
7469
7470 if ((TREE_CODE (op0) == MODIFY_EXPR
7471 || TREE_CODE (op0) == GIMPLE_MODIFY_STMT)
7472 && TREE_CONSTANT (GENERIC_TREE_OPERAND (op0, 1))
7473 /* Detect assigning a bitfield. */
7474 && !(TREE_CODE (GENERIC_TREE_OPERAND (op0, 0)) == COMPONENT_REF
7475 && DECL_BIT_FIELD
7476 (TREE_OPERAND (GENERIC_TREE_OPERAND (op0, 0), 1))))
7477 {
7478 /* Don't leave an assignment inside a conversion
7479 unless assigning a bitfield. */
7480 tem = fold_build1 (code, type, GENERIC_TREE_OPERAND (op0, 1));
7481 /* First do the assignment, then return converted constant. */
7482 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7483 TREE_NO_WARNING (tem) = 1;
7484 TREE_USED (tem) = 1;
7485 return tem;
7486 }
7487
7488 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7489 constants (if x has signed type, the sign bit cannot be set
7490 in c). This folds extension into the BIT_AND_EXPR. */
7491 if (INTEGRAL_TYPE_P (type)
7492 && TREE_CODE (type) != BOOLEAN_TYPE
7493 && TREE_CODE (op0) == BIT_AND_EXPR
7494 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7495 {
7496 tree and = op0;
7497 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
7498 int change = 0;
7499
7500 if (TYPE_UNSIGNED (TREE_TYPE (and))
7501 || (TYPE_PRECISION (type)
7502 <= TYPE_PRECISION (TREE_TYPE (and))))
7503 change = 1;
7504 else if (TYPE_PRECISION (TREE_TYPE (and1))
7505 <= HOST_BITS_PER_WIDE_INT
7506 && host_integerp (and1, 1))
7507 {
7508 unsigned HOST_WIDE_INT cst;
7509
7510 cst = tree_low_cst (and1, 1);
7511 cst &= (HOST_WIDE_INT) -1
7512 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7513 change = (cst == 0);
7514 #ifdef LOAD_EXTEND_OP
7515 if (change
7516 && !flag_syntax_only
7517 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7518 == ZERO_EXTEND))
7519 {
7520 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
7521 and0 = fold_convert (uns, and0);
7522 and1 = fold_convert (uns, and1);
7523 }
7524 #endif
7525 }
7526 if (change)
7527 {
7528 tem = force_fit_type_double (type, TREE_INT_CST_LOW (and1),
7529 TREE_INT_CST_HIGH (and1), 0,
7530 TREE_OVERFLOW (and1));
7531 return fold_build2 (BIT_AND_EXPR, type,
7532 fold_convert (type, and0), tem);
7533 }
7534 }
7535
7536 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
7537 T2 being pointers to types of the same size. */
7538 if (POINTER_TYPE_P (type)
7539 && BINARY_CLASS_P (arg0)
7540 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7541 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7542 {
7543 tree arg00 = TREE_OPERAND (arg0, 0);
7544 tree t0 = type;
7545 tree t1 = TREE_TYPE (arg00);
7546 tree tt0 = TREE_TYPE (t0);
7547 tree tt1 = TREE_TYPE (t1);
7548 tree s0 = TYPE_SIZE (tt0);
7549 tree s1 = TYPE_SIZE (tt1);
7550
7551 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
7552 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
7553 TREE_OPERAND (arg0, 1));
7554 }
7555
7556 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7557 of the same precision, and X is a integer type not narrower than
7558 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7559 if (INTEGRAL_TYPE_P (type)
7560 && TREE_CODE (op0) == BIT_NOT_EXPR
7561 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7562 && (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
7563 || TREE_CODE (TREE_OPERAND (op0, 0)) == CONVERT_EXPR)
7564 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7565 {
7566 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7567 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7568 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7569 return fold_build1 (BIT_NOT_EXPR, type, fold_convert (type, tem));
7570 }
7571
7572 tem = fold_convert_const (code, type, arg0);
7573 return tem ? tem : NULL_TREE;
7574
7575 case VIEW_CONVERT_EXPR:
7576 if (TREE_TYPE (op0) == type)
7577 return op0;
7578 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
7579 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
7580 return fold_view_convert_expr (type, op0);
7581
7582 case NEGATE_EXPR:
7583 tem = fold_negate_expr (arg0);
7584 if (tem)
7585 return fold_convert (type, tem);
7586 return NULL_TREE;
7587
7588 case ABS_EXPR:
7589 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
7590 return fold_abs_const (arg0, type);
7591 else if (TREE_CODE (arg0) == NEGATE_EXPR)
7592 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
7593 /* Convert fabs((double)float) into (double)fabsf(float). */
7594 else if (TREE_CODE (arg0) == NOP_EXPR
7595 && TREE_CODE (type) == REAL_TYPE)
7596 {
7597 tree targ0 = strip_float_extensions (arg0);
7598 if (targ0 != arg0)
7599 return fold_convert (type, fold_build1 (ABS_EXPR,
7600 TREE_TYPE (targ0),
7601 targ0));
7602 }
7603 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
7604 else if (tree_expr_nonnegative_p (arg0) || TREE_CODE (arg0) == ABS_EXPR)
7605 return arg0;
7606
7607 /* Strip sign ops from argument. */
7608 if (TREE_CODE (type) == REAL_TYPE)
7609 {
7610 tem = fold_strip_sign_ops (arg0);
7611 if (tem)
7612 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
7613 }
7614 return NULL_TREE;
7615
7616 case CONJ_EXPR:
7617 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7618 return fold_convert (type, arg0);
7619 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7620 {
7621 tree itype = TREE_TYPE (type);
7622 tree rpart = fold_convert (itype, TREE_OPERAND (arg0, 0));
7623 tree ipart = fold_convert (itype, TREE_OPERAND (arg0, 1));
7624 return fold_build2 (COMPLEX_EXPR, type, rpart, negate_expr (ipart));
7625 }
7626 if (TREE_CODE (arg0) == COMPLEX_CST)
7627 {
7628 tree itype = TREE_TYPE (type);
7629 tree rpart = fold_convert (itype, TREE_REALPART (arg0));
7630 tree ipart = fold_convert (itype, TREE_IMAGPART (arg0));
7631 return build_complex (type, rpart, negate_expr (ipart));
7632 }
7633 if (TREE_CODE (arg0) == CONJ_EXPR)
7634 return fold_convert (type, TREE_OPERAND (arg0, 0));
7635 return NULL_TREE;
7636
7637 case BIT_NOT_EXPR:
7638 if (TREE_CODE (arg0) == INTEGER_CST)
7639 return fold_not_const (arg0, type);
7640 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
7641 return TREE_OPERAND (arg0, 0);
7642 /* Convert ~ (-A) to A - 1. */
7643 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
7644 return fold_build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0),
7645 build_int_cst (type, 1));
7646 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
7647 else if (INTEGRAL_TYPE_P (type)
7648 && ((TREE_CODE (arg0) == MINUS_EXPR
7649 && integer_onep (TREE_OPERAND (arg0, 1)))
7650 || (TREE_CODE (arg0) == PLUS_EXPR
7651 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
7652 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7653 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7654 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7655 && (tem = fold_unary (BIT_NOT_EXPR, type,
7656 fold_convert (type,
7657 TREE_OPERAND (arg0, 0)))))
7658 return fold_build2 (BIT_XOR_EXPR, type, tem,
7659 fold_convert (type, TREE_OPERAND (arg0, 1)));
7660 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7661 && (tem = fold_unary (BIT_NOT_EXPR, type,
7662 fold_convert (type,
7663 TREE_OPERAND (arg0, 1)))))
7664 return fold_build2 (BIT_XOR_EXPR, type,
7665 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
7666
7667 return NULL_TREE;
7668
7669 case TRUTH_NOT_EXPR:
7670 /* The argument to invert_truthvalue must have Boolean type. */
7671 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
7672 arg0 = fold_convert (boolean_type_node, arg0);
7673
7674 /* Note that the operand of this must be an int
7675 and its values must be 0 or 1.
7676 ("true" is a fixed value perhaps depending on the language,
7677 but we don't handle values other than 1 correctly yet.) */
7678 tem = fold_truth_not_expr (arg0);
7679 if (!tem)
7680 return NULL_TREE;
7681 return fold_convert (type, tem);
7682
7683 case REALPART_EXPR:
7684 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7685 return fold_convert (type, arg0);
7686 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7687 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
7688 TREE_OPERAND (arg0, 1));
7689 if (TREE_CODE (arg0) == COMPLEX_CST)
7690 return fold_convert (type, TREE_REALPART (arg0));
7691 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7692 {
7693 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7694 tem = fold_build2 (TREE_CODE (arg0), itype,
7695 fold_build1 (REALPART_EXPR, itype,
7696 TREE_OPERAND (arg0, 0)),
7697 fold_build1 (REALPART_EXPR, itype,
7698 TREE_OPERAND (arg0, 1)));
7699 return fold_convert (type, tem);
7700 }
7701 if (TREE_CODE (arg0) == CONJ_EXPR)
7702 {
7703 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7704 tem = fold_build1 (REALPART_EXPR, itype, TREE_OPERAND (arg0, 0));
7705 return fold_convert (type, tem);
7706 }
7707 if (TREE_CODE (arg0) == CALL_EXPR)
7708 {
7709 tree fn = get_callee_fndecl (arg0);
7710 if (DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
7711 switch (DECL_FUNCTION_CODE (fn))
7712 {
7713 CASE_FLT_FN (BUILT_IN_CEXPI):
7714 fn = mathfn_built_in (type, BUILT_IN_COS);
7715 return build_function_call_expr (fn, TREE_OPERAND (arg0, 1));
7716
7717 default:;
7718 }
7719 }
7720 return NULL_TREE;
7721
7722 case IMAGPART_EXPR:
7723 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7724 return fold_convert (type, integer_zero_node);
7725 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7726 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
7727 TREE_OPERAND (arg0, 0));
7728 if (TREE_CODE (arg0) == COMPLEX_CST)
7729 return fold_convert (type, TREE_IMAGPART (arg0));
7730 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7731 {
7732 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7733 tem = fold_build2 (TREE_CODE (arg0), itype,
7734 fold_build1 (IMAGPART_EXPR, itype,
7735 TREE_OPERAND (arg0, 0)),
7736 fold_build1 (IMAGPART_EXPR, itype,
7737 TREE_OPERAND (arg0, 1)));
7738 return fold_convert (type, tem);
7739 }
7740 if (TREE_CODE (arg0) == CONJ_EXPR)
7741 {
7742 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7743 tem = fold_build1 (IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
7744 return fold_convert (type, negate_expr (tem));
7745 }
7746 if (TREE_CODE (arg0) == CALL_EXPR)
7747 {
7748 tree fn = get_callee_fndecl (arg0);
7749 if (DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
7750 switch (DECL_FUNCTION_CODE (fn))
7751 {
7752 CASE_FLT_FN (BUILT_IN_CEXPI):
7753 fn = mathfn_built_in (type, BUILT_IN_SIN);
7754 return build_function_call_expr (fn, TREE_OPERAND (arg0, 1));
7755
7756 default:;
7757 }
7758 }
7759 return NULL_TREE;
7760
7761 default:
7762 return NULL_TREE;
7763 } /* switch (code) */
7764 }
7765
7766 /* Fold a binary expression of code CODE and type TYPE with operands
7767 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
7768 Return the folded expression if folding is successful. Otherwise,
7769 return NULL_TREE. */
7770
7771 static tree
7772 fold_minmax (enum tree_code code, tree type, tree op0, tree op1)
7773 {
7774 enum tree_code compl_code;
7775
7776 if (code == MIN_EXPR)
7777 compl_code = MAX_EXPR;
7778 else if (code == MAX_EXPR)
7779 compl_code = MIN_EXPR;
7780 else
7781 gcc_unreachable ();
7782
7783 /* MIN (MAX (a, b), b) == b. */
7784 if (TREE_CODE (op0) == compl_code
7785 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
7786 return omit_one_operand (type, op1, TREE_OPERAND (op0, 0));
7787
7788 /* MIN (MAX (b, a), b) == b. */
7789 if (TREE_CODE (op0) == compl_code
7790 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
7791 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
7792 return omit_one_operand (type, op1, TREE_OPERAND (op0, 1));
7793
7794 /* MIN (a, MAX (a, b)) == a. */
7795 if (TREE_CODE (op1) == compl_code
7796 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
7797 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
7798 return omit_one_operand (type, op0, TREE_OPERAND (op1, 1));
7799
7800 /* MIN (a, MAX (b, a)) == a. */
7801 if (TREE_CODE (op1) == compl_code
7802 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
7803 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
7804 return omit_one_operand (type, op0, TREE_OPERAND (op1, 0));
7805
7806 return NULL_TREE;
7807 }
7808
7809 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
7810 by changing CODE to reduce the magnitude of constants involved in
7811 ARG0 of the comparison.
7812 Returns a canonicalized comparison tree if a simplification was
7813 possible, otherwise returns NULL_TREE. */
7814
7815 static tree
7816 maybe_canonicalize_comparison_1 (enum tree_code code, tree type,
7817 tree arg0, tree arg1)
7818 {
7819 enum tree_code code0 = TREE_CODE (arg0);
7820 tree t, cst0 = NULL_TREE;
7821 int sgn0;
7822 bool swap = false;
7823
7824 /* Match A +- CST code arg1 and CST code arg1. */
7825 if (!(((code0 == MINUS_EXPR
7826 || code0 == PLUS_EXPR)
7827 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7828 || code0 == INTEGER_CST))
7829 return NULL_TREE;
7830
7831 /* Identify the constant in arg0 and its sign. */
7832 if (code0 == INTEGER_CST)
7833 cst0 = arg0;
7834 else
7835 cst0 = TREE_OPERAND (arg0, 1);
7836 sgn0 = tree_int_cst_sgn (cst0);
7837
7838 /* Overflowed constants and zero will cause problems. */
7839 if (integer_zerop (cst0)
7840 || TREE_OVERFLOW (cst0))
7841 return NULL_TREE;
7842
7843 /* See if we can reduce the magnitude of the constant in
7844 arg0 by changing the comparison code. */
7845 if (code0 == INTEGER_CST)
7846 {
7847 /* CST <= arg1 -> CST-1 < arg1. */
7848 if (code == LE_EXPR && sgn0 == 1)
7849 code = LT_EXPR;
7850 /* -CST < arg1 -> -CST-1 <= arg1. */
7851 else if (code == LT_EXPR && sgn0 == -1)
7852 code = LE_EXPR;
7853 /* CST > arg1 -> CST-1 >= arg1. */
7854 else if (code == GT_EXPR && sgn0 == 1)
7855 code = GE_EXPR;
7856 /* -CST >= arg1 -> -CST-1 > arg1. */
7857 else if (code == GE_EXPR && sgn0 == -1)
7858 code = GT_EXPR;
7859 else
7860 return NULL_TREE;
7861 /* arg1 code' CST' might be more canonical. */
7862 swap = true;
7863 }
7864 else
7865 {
7866 /* A - CST < arg1 -> A - CST-1 <= arg1. */
7867 if (code == LT_EXPR
7868 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
7869 code = LE_EXPR;
7870 /* A + CST > arg1 -> A + CST-1 >= arg1. */
7871 else if (code == GT_EXPR
7872 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
7873 code = GE_EXPR;
7874 /* A + CST <= arg1 -> A + CST-1 < arg1. */
7875 else if (code == LE_EXPR
7876 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
7877 code = LT_EXPR;
7878 /* A - CST >= arg1 -> A - CST-1 > arg1. */
7879 else if (code == GE_EXPR
7880 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
7881 code = GT_EXPR;
7882 else
7883 return NULL_TREE;
7884 }
7885
7886 /* Now build the constant reduced in magnitude. */
7887 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
7888 cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
7889 if (code0 != INTEGER_CST)
7890 t = fold_build2 (code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
7891
7892 /* If swapping might yield to a more canonical form, do so. */
7893 if (swap)
7894 return fold_build2 (swap_tree_comparison (code), type, arg1, t);
7895 else
7896 return fold_build2 (code, type, t, arg1);
7897 }
7898
7899 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
7900 overflow further. Try to decrease the magnitude of constants involved
7901 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
7902 and put sole constants at the second argument position.
7903 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
7904
7905 static tree
7906 maybe_canonicalize_comparison (enum tree_code code, tree type,
7907 tree arg0, tree arg1)
7908 {
7909 tree t;
7910
7911 /* In principle pointers also have undefined overflow behavior,
7912 but that causes problems elsewhere. */
7913 if ((flag_wrapv || flag_trapv)
7914 || (TYPE_UNSIGNED (TREE_TYPE (arg0))
7915 || POINTER_TYPE_P (TREE_TYPE (arg0))))
7916 return NULL_TREE;
7917
7918 /* Try canonicalization by simplifying arg0. */
7919 t = maybe_canonicalize_comparison_1 (code, type, arg0, arg1);
7920 if (t)
7921 return t;
7922
7923 /* Try canonicalization by simplifying arg1 using the swapped
7924 comparison. */
7925 code = swap_tree_comparison (code);
7926 return maybe_canonicalize_comparison_1 (code, type, arg1, arg0);
7927 }
7928
7929 /* Subroutine of fold_binary. This routine performs all of the
7930 transformations that are common to the equality/inequality
7931 operators (EQ_EXPR and NE_EXPR) and the ordering operators
7932 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
7933 fold_binary should call fold_binary. Fold a comparison with
7934 tree code CODE and type TYPE with operands OP0 and OP1. Return
7935 the folded comparison or NULL_TREE. */
7936
7937 static tree
7938 fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
7939 {
7940 tree arg0, arg1, tem;
7941
7942 arg0 = op0;
7943 arg1 = op1;
7944
7945 STRIP_SIGN_NOPS (arg0);
7946 STRIP_SIGN_NOPS (arg1);
7947
7948 tem = fold_relational_const (code, type, arg0, arg1);
7949 if (tem != NULL_TREE)
7950 return tem;
7951
7952 /* If one arg is a real or integer constant, put it last. */
7953 if (tree_swap_operands_p (arg0, arg1, true))
7954 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
7955
7956 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
7957 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7958 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7959 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
7960 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
7961 && !(flag_wrapv || flag_trapv))
7962 && (TREE_CODE (arg1) == INTEGER_CST
7963 && !TREE_OVERFLOW (arg1)))
7964 {
7965 tree const1 = TREE_OPERAND (arg0, 1);
7966 tree const2 = arg1;
7967 tree variable = TREE_OPERAND (arg0, 0);
7968 tree lhs;
7969 int lhs_add;
7970 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
7971
7972 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
7973 TREE_TYPE (arg1), const2, const1);
7974 if (TREE_CODE (lhs) == TREE_CODE (arg1)
7975 && (TREE_CODE (lhs) != INTEGER_CST
7976 || !TREE_OVERFLOW (lhs)))
7977 return fold_build2 (code, type, variable, lhs);
7978 }
7979
7980 /* For comparisons of pointers we can decompose it to a compile time
7981 comparison of the base objects and the offsets into the object.
7982 This requires at least one operand being an ADDR_EXPR to do more
7983 than the operand_equal_p test below. */
7984 if (POINTER_TYPE_P (TREE_TYPE (arg0))
7985 && (TREE_CODE (arg0) == ADDR_EXPR
7986 || TREE_CODE (arg1) == ADDR_EXPR))
7987 {
7988 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
7989 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
7990 enum machine_mode mode;
7991 int volatilep, unsignedp;
7992 bool indirect_base0 = false;
7993
7994 /* Get base and offset for the access. Strip ADDR_EXPR for
7995 get_inner_reference, but put it back by stripping INDIRECT_REF
7996 off the base object if possible. */
7997 base0 = arg0;
7998 if (TREE_CODE (arg0) == ADDR_EXPR)
7999 {
8000 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8001 &bitsize, &bitpos0, &offset0, &mode,
8002 &unsignedp, &volatilep, false);
8003 if (TREE_CODE (base0) == INDIRECT_REF)
8004 base0 = TREE_OPERAND (base0, 0);
8005 else
8006 indirect_base0 = true;
8007 }
8008
8009 base1 = arg1;
8010 if (TREE_CODE (arg1) == ADDR_EXPR)
8011 {
8012 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8013 &bitsize, &bitpos1, &offset1, &mode,
8014 &unsignedp, &volatilep, false);
8015 /* We have to make sure to have an indirect/non-indirect base1
8016 just the same as we did for base0. */
8017 if (TREE_CODE (base1) == INDIRECT_REF
8018 && !indirect_base0)
8019 base1 = TREE_OPERAND (base1, 0);
8020 else if (!indirect_base0)
8021 base1 = NULL_TREE;
8022 }
8023 else if (indirect_base0)
8024 base1 = NULL_TREE;
8025
8026 /* If we have equivalent bases we might be able to simplify. */
8027 if (base0 && base1
8028 && operand_equal_p (base0, base1, 0))
8029 {
8030 /* We can fold this expression to a constant if the non-constant
8031 offset parts are equal. */
8032 if (offset0 == offset1
8033 || (offset0 && offset1
8034 && operand_equal_p (offset0, offset1, 0)))
8035 {
8036 switch (code)
8037 {
8038 case EQ_EXPR:
8039 return build_int_cst (boolean_type_node, bitpos0 == bitpos1);
8040 case NE_EXPR:
8041 return build_int_cst (boolean_type_node, bitpos0 != bitpos1);
8042 case LT_EXPR:
8043 return build_int_cst (boolean_type_node, bitpos0 < bitpos1);
8044 case LE_EXPR:
8045 return build_int_cst (boolean_type_node, bitpos0 <= bitpos1);
8046 case GE_EXPR:
8047 return build_int_cst (boolean_type_node, bitpos0 >= bitpos1);
8048 case GT_EXPR:
8049 return build_int_cst (boolean_type_node, bitpos0 > bitpos1);
8050 default:;
8051 }
8052 }
8053 /* We can simplify the comparison to a comparison of the variable
8054 offset parts if the constant offset parts are equal.
8055 Be careful to use signed size type here because otherwise we
8056 mess with array offsets in the wrong way. This is possible
8057 because pointer arithmetic is restricted to retain within an
8058 object and overflow on pointer differences is undefined as of
8059 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8060 else if (bitpos0 == bitpos1)
8061 {
8062 tree signed_size_type_node;
8063 signed_size_type_node = signed_type_for (size_type_node);
8064
8065 /* By converting to signed size type we cover middle-end pointer
8066 arithmetic which operates on unsigned pointer types of size
8067 type size and ARRAY_REF offsets which are properly sign or
8068 zero extended from their type in case it is narrower than
8069 size type. */
8070 if (offset0 == NULL_TREE)
8071 offset0 = build_int_cst (signed_size_type_node, 0);
8072 else
8073 offset0 = fold_convert (signed_size_type_node, offset0);
8074 if (offset1 == NULL_TREE)
8075 offset1 = build_int_cst (signed_size_type_node, 0);
8076 else
8077 offset1 = fold_convert (signed_size_type_node, offset1);
8078
8079 return fold_build2 (code, type, offset0, offset1);
8080 }
8081 }
8082 }
8083
8084 /* If this is a comparison of two exprs that look like an ARRAY_REF of the
8085 same object, then we can fold this to a comparison of the two offsets in
8086 signed size type. This is possible because pointer arithmetic is
8087 restricted to retain within an object and overflow on pointer differences
8088 is undefined as of 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8089 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8090 && !flag_wrapv && !flag_trapv)
8091 {
8092 tree base0, offset0, base1, offset1;
8093
8094 if (extract_array_ref (arg0, &base0, &offset0)
8095 && extract_array_ref (arg1, &base1, &offset1)
8096 && operand_equal_p (base0, base1, 0))
8097 {
8098 tree signed_size_type_node;
8099 signed_size_type_node = signed_type_for (size_type_node);
8100
8101 /* By converting to signed size type we cover middle-end pointer
8102 arithmetic which operates on unsigned pointer types of size
8103 type size and ARRAY_REF offsets which are properly sign or
8104 zero extended from their type in case it is narrower than
8105 size type. */
8106 if (offset0 == NULL_TREE)
8107 offset0 = build_int_cst (signed_size_type_node, 0);
8108 else
8109 offset0 = fold_convert (signed_size_type_node, offset0);
8110 if (offset1 == NULL_TREE)
8111 offset1 = build_int_cst (signed_size_type_node, 0);
8112 else
8113 offset1 = fold_convert (signed_size_type_node, offset1);
8114
8115 return fold_build2 (code, type, offset0, offset1);
8116 }
8117 }
8118
8119 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8120 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8121 the resulting offset is smaller in absolute value than the
8122 original one. */
8123 if (!(flag_wrapv || flag_trapv)
8124 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
8125 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8126 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8127 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8128 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8129 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8130 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8131 {
8132 tree const1 = TREE_OPERAND (arg0, 1);
8133 tree const2 = TREE_OPERAND (arg1, 1);
8134 tree variable1 = TREE_OPERAND (arg0, 0);
8135 tree variable2 = TREE_OPERAND (arg1, 0);
8136 tree cst;
8137
8138 /* Put the constant on the side where it doesn't overflow and is
8139 of lower absolute value than before. */
8140 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8141 ? MINUS_EXPR : PLUS_EXPR,
8142 const2, const1, 0);
8143 if (!TREE_OVERFLOW (cst)
8144 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
8145 return fold_build2 (code, type,
8146 variable1,
8147 fold_build2 (TREE_CODE (arg1), TREE_TYPE (arg1),
8148 variable2, cst));
8149
8150 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8151 ? MINUS_EXPR : PLUS_EXPR,
8152 const1, const2, 0);
8153 if (!TREE_OVERFLOW (cst)
8154 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
8155 return fold_build2 (code, type,
8156 fold_build2 (TREE_CODE (arg0), TREE_TYPE (arg0),
8157 variable1, cst),
8158 variable2);
8159 }
8160
8161 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
8162 signed arithmetic case. That form is created by the compiler
8163 often enough for folding it to be of value. One example is in
8164 computing loop trip counts after Operator Strength Reduction. */
8165 if (!(flag_wrapv || flag_trapv)
8166 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
8167 && TREE_CODE (arg0) == MULT_EXPR
8168 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8169 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8170 && integer_zerop (arg1))
8171 {
8172 tree const1 = TREE_OPERAND (arg0, 1);
8173 tree const2 = arg1; /* zero */
8174 tree variable1 = TREE_OPERAND (arg0, 0);
8175 enum tree_code cmp_code = code;
8176
8177 gcc_assert (!integer_zerop (const1));
8178
8179 /* If const1 is negative we swap the sense of the comparison. */
8180 if (tree_int_cst_sgn (const1) < 0)
8181 cmp_code = swap_tree_comparison (cmp_code);
8182
8183 return fold_build2 (cmp_code, type, variable1, const2);
8184 }
8185
8186 tem = maybe_canonicalize_comparison (code, type, arg0, arg1);
8187 if (tem)
8188 return tem;
8189
8190 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8191 {
8192 tree targ0 = strip_float_extensions (arg0);
8193 tree targ1 = strip_float_extensions (arg1);
8194 tree newtype = TREE_TYPE (targ0);
8195
8196 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8197 newtype = TREE_TYPE (targ1);
8198
8199 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8200 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8201 return fold_build2 (code, type, fold_convert (newtype, targ0),
8202 fold_convert (newtype, targ1));
8203
8204 /* (-a) CMP (-b) -> b CMP a */
8205 if (TREE_CODE (arg0) == NEGATE_EXPR
8206 && TREE_CODE (arg1) == NEGATE_EXPR)
8207 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
8208 TREE_OPERAND (arg0, 0));
8209
8210 if (TREE_CODE (arg1) == REAL_CST)
8211 {
8212 REAL_VALUE_TYPE cst;
8213 cst = TREE_REAL_CST (arg1);
8214
8215 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8216 if (TREE_CODE (arg0) == NEGATE_EXPR)
8217 return fold_build2 (swap_tree_comparison (code), type,
8218 TREE_OPERAND (arg0, 0),
8219 build_real (TREE_TYPE (arg1),
8220 REAL_VALUE_NEGATE (cst)));
8221
8222 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8223 /* a CMP (-0) -> a CMP 0 */
8224 if (REAL_VALUE_MINUS_ZERO (cst))
8225 return fold_build2 (code, type, arg0,
8226 build_real (TREE_TYPE (arg1), dconst0));
8227
8228 /* x != NaN is always true, other ops are always false. */
8229 if (REAL_VALUE_ISNAN (cst)
8230 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8231 {
8232 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8233 return omit_one_operand (type, tem, arg0);
8234 }
8235
8236 /* Fold comparisons against infinity. */
8237 if (REAL_VALUE_ISINF (cst))
8238 {
8239 tem = fold_inf_compare (code, type, arg0, arg1);
8240 if (tem != NULL_TREE)
8241 return tem;
8242 }
8243 }
8244
8245 /* If this is a comparison of a real constant with a PLUS_EXPR
8246 or a MINUS_EXPR of a real constant, we can convert it into a
8247 comparison with a revised real constant as long as no overflow
8248 occurs when unsafe_math_optimizations are enabled. */
8249 if (flag_unsafe_math_optimizations
8250 && TREE_CODE (arg1) == REAL_CST
8251 && (TREE_CODE (arg0) == PLUS_EXPR
8252 || TREE_CODE (arg0) == MINUS_EXPR)
8253 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8254 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8255 ? MINUS_EXPR : PLUS_EXPR,
8256 arg1, TREE_OPERAND (arg0, 1), 0))
8257 && !TREE_OVERFLOW (tem))
8258 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
8259
8260 /* Likewise, we can simplify a comparison of a real constant with
8261 a MINUS_EXPR whose first operand is also a real constant, i.e.
8262 (c1 - x) < c2 becomes x > c1-c2. */
8263 if (flag_unsafe_math_optimizations
8264 && TREE_CODE (arg1) == REAL_CST
8265 && TREE_CODE (arg0) == MINUS_EXPR
8266 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
8267 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
8268 arg1, 0))
8269 && !TREE_OVERFLOW (tem))
8270 return fold_build2 (swap_tree_comparison (code), type,
8271 TREE_OPERAND (arg0, 1), tem);
8272
8273 /* Fold comparisons against built-in math functions. */
8274 if (TREE_CODE (arg1) == REAL_CST
8275 && flag_unsafe_math_optimizations
8276 && ! flag_errno_math)
8277 {
8278 enum built_in_function fcode = builtin_mathfn_code (arg0);
8279
8280 if (fcode != END_BUILTINS)
8281 {
8282 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
8283 if (tem != NULL_TREE)
8284 return tem;
8285 }
8286 }
8287 }
8288
8289 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
8290 if (TREE_CONSTANT (arg1)
8291 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
8292 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
8293 /* This optimization is invalid for ordered comparisons
8294 if CONST+INCR overflows or if foo+incr might overflow.
8295 This optimization is invalid for floating point due to rounding.
8296 For pointer types we assume overflow doesn't happen. */
8297 && (POINTER_TYPE_P (TREE_TYPE (arg0))
8298 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8299 && (code == EQ_EXPR || code == NE_EXPR))))
8300 {
8301 tree varop, newconst;
8302
8303 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
8304 {
8305 newconst = fold_build2 (PLUS_EXPR, TREE_TYPE (arg0),
8306 arg1, TREE_OPERAND (arg0, 1));
8307 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
8308 TREE_OPERAND (arg0, 0),
8309 TREE_OPERAND (arg0, 1));
8310 }
8311 else
8312 {
8313 newconst = fold_build2 (MINUS_EXPR, TREE_TYPE (arg0),
8314 arg1, TREE_OPERAND (arg0, 1));
8315 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
8316 TREE_OPERAND (arg0, 0),
8317 TREE_OPERAND (arg0, 1));
8318 }
8319
8320
8321 /* If VAROP is a reference to a bitfield, we must mask
8322 the constant by the width of the field. */
8323 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
8324 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
8325 && host_integerp (DECL_SIZE (TREE_OPERAND
8326 (TREE_OPERAND (varop, 0), 1)), 1))
8327 {
8328 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
8329 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
8330 tree folded_compare, shift;
8331
8332 /* First check whether the comparison would come out
8333 always the same. If we don't do that we would
8334 change the meaning with the masking. */
8335 folded_compare = fold_build2 (code, type,
8336 TREE_OPERAND (varop, 0), arg1);
8337 if (TREE_CODE (folded_compare) == INTEGER_CST)
8338 return omit_one_operand (type, folded_compare, varop);
8339
8340 shift = build_int_cst (NULL_TREE,
8341 TYPE_PRECISION (TREE_TYPE (varop)) - size);
8342 shift = fold_convert (TREE_TYPE (varop), shift);
8343 newconst = fold_build2 (LSHIFT_EXPR, TREE_TYPE (varop),
8344 newconst, shift);
8345 newconst = fold_build2 (RSHIFT_EXPR, TREE_TYPE (varop),
8346 newconst, shift);
8347 }
8348
8349 return fold_build2 (code, type, varop, newconst);
8350 }
8351
8352 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
8353 && (TREE_CODE (arg0) == NOP_EXPR
8354 || TREE_CODE (arg0) == CONVERT_EXPR))
8355 {
8356 /* If we are widening one operand of an integer comparison,
8357 see if the other operand is similarly being widened. Perhaps we
8358 can do the comparison in the narrower type. */
8359 tem = fold_widened_comparison (code, type, arg0, arg1);
8360 if (tem)
8361 return tem;
8362
8363 /* Or if we are changing signedness. */
8364 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
8365 if (tem)
8366 return tem;
8367 }
8368
8369 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8370 constant, we can simplify it. */
8371 if (TREE_CODE (arg1) == INTEGER_CST
8372 && (TREE_CODE (arg0) == MIN_EXPR
8373 || TREE_CODE (arg0) == MAX_EXPR)
8374 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8375 {
8376 tem = optimize_minmax_comparison (code, type, op0, op1);
8377 if (tem)
8378 return tem;
8379 }
8380
8381 /* Simplify comparison of something with itself. (For IEEE
8382 floating-point, we can only do some of these simplifications.) */
8383 if (operand_equal_p (arg0, arg1, 0))
8384 {
8385 switch (code)
8386 {
8387 case EQ_EXPR:
8388 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8389 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8390 return constant_boolean_node (1, type);
8391 break;
8392
8393 case GE_EXPR:
8394 case LE_EXPR:
8395 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8396 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8397 return constant_boolean_node (1, type);
8398 return fold_build2 (EQ_EXPR, type, arg0, arg1);
8399
8400 case NE_EXPR:
8401 /* For NE, we can only do this simplification if integer
8402 or we don't honor IEEE floating point NaNs. */
8403 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
8404 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8405 break;
8406 /* ... fall through ... */
8407 case GT_EXPR:
8408 case LT_EXPR:
8409 return constant_boolean_node (0, type);
8410 default:
8411 gcc_unreachable ();
8412 }
8413 }
8414
8415 /* If we are comparing an expression that just has comparisons
8416 of two integer values, arithmetic expressions of those comparisons,
8417 and constants, we can simplify it. There are only three cases
8418 to check: the two values can either be equal, the first can be
8419 greater, or the second can be greater. Fold the expression for
8420 those three values. Since each value must be 0 or 1, we have
8421 eight possibilities, each of which corresponds to the constant 0
8422 or 1 or one of the six possible comparisons.
8423
8424 This handles common cases like (a > b) == 0 but also handles
8425 expressions like ((x > y) - (y > x)) > 0, which supposedly
8426 occur in macroized code. */
8427
8428 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8429 {
8430 tree cval1 = 0, cval2 = 0;
8431 int save_p = 0;
8432
8433 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8434 /* Don't handle degenerate cases here; they should already
8435 have been handled anyway. */
8436 && cval1 != 0 && cval2 != 0
8437 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8438 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8439 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8440 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8441 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8442 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8443 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8444 {
8445 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8446 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8447
8448 /* We can't just pass T to eval_subst in case cval1 or cval2
8449 was the same as ARG1. */
8450
8451 tree high_result
8452 = fold_build2 (code, type,
8453 eval_subst (arg0, cval1, maxval,
8454 cval2, minval),
8455 arg1);
8456 tree equal_result
8457 = fold_build2 (code, type,
8458 eval_subst (arg0, cval1, maxval,
8459 cval2, maxval),
8460 arg1);
8461 tree low_result
8462 = fold_build2 (code, type,
8463 eval_subst (arg0, cval1, minval,
8464 cval2, maxval),
8465 arg1);
8466
8467 /* All three of these results should be 0 or 1. Confirm they are.
8468 Then use those values to select the proper code to use. */
8469
8470 if (TREE_CODE (high_result) == INTEGER_CST
8471 && TREE_CODE (equal_result) == INTEGER_CST
8472 && TREE_CODE (low_result) == INTEGER_CST)
8473 {
8474 /* Make a 3-bit mask with the high-order bit being the
8475 value for `>', the next for '=', and the low for '<'. */
8476 switch ((integer_onep (high_result) * 4)
8477 + (integer_onep (equal_result) * 2)
8478 + integer_onep (low_result))
8479 {
8480 case 0:
8481 /* Always false. */
8482 return omit_one_operand (type, integer_zero_node, arg0);
8483 case 1:
8484 code = LT_EXPR;
8485 break;
8486 case 2:
8487 code = EQ_EXPR;
8488 break;
8489 case 3:
8490 code = LE_EXPR;
8491 break;
8492 case 4:
8493 code = GT_EXPR;
8494 break;
8495 case 5:
8496 code = NE_EXPR;
8497 break;
8498 case 6:
8499 code = GE_EXPR;
8500 break;
8501 case 7:
8502 /* Always true. */
8503 return omit_one_operand (type, integer_one_node, arg0);
8504 }
8505
8506 if (save_p)
8507 return save_expr (build2 (code, type, cval1, cval2));
8508 return fold_build2 (code, type, cval1, cval2);
8509 }
8510 }
8511 }
8512
8513 /* Fold a comparison of the address of COMPONENT_REFs with the same
8514 type and component to a comparison of the address of the base
8515 object. In short, &x->a OP &y->a to x OP y and
8516 &x->a OP &y.a to x OP &y */
8517 if (TREE_CODE (arg0) == ADDR_EXPR
8518 && TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
8519 && TREE_CODE (arg1) == ADDR_EXPR
8520 && TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
8521 {
8522 tree cref0 = TREE_OPERAND (arg0, 0);
8523 tree cref1 = TREE_OPERAND (arg1, 0);
8524 if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
8525 {
8526 tree op0 = TREE_OPERAND (cref0, 0);
8527 tree op1 = TREE_OPERAND (cref1, 0);
8528 return fold_build2 (code, type,
8529 build_fold_addr_expr (op0),
8530 build_fold_addr_expr (op1));
8531 }
8532 }
8533
8534 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8535 into a single range test. */
8536 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
8537 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
8538 && TREE_CODE (arg1) == INTEGER_CST
8539 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8540 && !integer_zerop (TREE_OPERAND (arg0, 1))
8541 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8542 && !TREE_OVERFLOW (arg1))
8543 {
8544 tem = fold_div_compare (code, type, arg0, arg1);
8545 if (tem != NULL_TREE)
8546 return tem;
8547 }
8548
8549 /* Fold ~X op ~Y as Y op X. */
8550 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8551 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8552 return fold_build2 (code, type,
8553 TREE_OPERAND (arg1, 0),
8554 TREE_OPERAND (arg0, 0));
8555
8556 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
8557 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8558 && TREE_CODE (arg1) == INTEGER_CST)
8559 return fold_build2 (swap_tree_comparison (code), type,
8560 TREE_OPERAND (arg0, 0),
8561 fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1));
8562
8563 return NULL_TREE;
8564 }
8565
8566
8567 /* Subroutine of fold_binary. Optimize complex multiplications of the
8568 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8569 argument EXPR represents the expression "z" of type TYPE. */
8570
8571 static tree
8572 fold_mult_zconjz (tree type, tree expr)
8573 {
8574 tree itype = TREE_TYPE (type);
8575 tree rpart, ipart, tem;
8576
8577 if (TREE_CODE (expr) == COMPLEX_EXPR)
8578 {
8579 rpart = TREE_OPERAND (expr, 0);
8580 ipart = TREE_OPERAND (expr, 1);
8581 }
8582 else if (TREE_CODE (expr) == COMPLEX_CST)
8583 {
8584 rpart = TREE_REALPART (expr);
8585 ipart = TREE_IMAGPART (expr);
8586 }
8587 else
8588 {
8589 expr = save_expr (expr);
8590 rpart = fold_build1 (REALPART_EXPR, itype, expr);
8591 ipart = fold_build1 (IMAGPART_EXPR, itype, expr);
8592 }
8593
8594 rpart = save_expr (rpart);
8595 ipart = save_expr (ipart);
8596 tem = fold_build2 (PLUS_EXPR, itype,
8597 fold_build2 (MULT_EXPR, itype, rpart, rpart),
8598 fold_build2 (MULT_EXPR, itype, ipart, ipart));
8599 return fold_build2 (COMPLEX_EXPR, type, tem,
8600 fold_convert (itype, integer_zero_node));
8601 }
8602
8603
8604 /* Fold a binary expression of code CODE and type TYPE with operands
8605 OP0 and OP1. Return the folded expression if folding is
8606 successful. Otherwise, return NULL_TREE. */
8607
8608 tree
8609 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
8610 {
8611 enum tree_code_class kind = TREE_CODE_CLASS (code);
8612 tree arg0, arg1, tem;
8613 tree t1 = NULL_TREE;
8614
8615 gcc_assert ((IS_EXPR_CODE_CLASS (kind)
8616 || IS_GIMPLE_STMT_CODE_CLASS (kind))
8617 && TREE_CODE_LENGTH (code) == 2
8618 && op0 != NULL_TREE
8619 && op1 != NULL_TREE);
8620
8621 arg0 = op0;
8622 arg1 = op1;
8623
8624 /* Strip any conversions that don't change the mode. This is
8625 safe for every expression, except for a comparison expression
8626 because its signedness is derived from its operands. So, in
8627 the latter case, only strip conversions that don't change the
8628 signedness.
8629
8630 Note that this is done as an internal manipulation within the
8631 constant folder, in order to find the simplest representation
8632 of the arguments so that their form can be studied. In any
8633 cases, the appropriate type conversions should be put back in
8634 the tree that will get out of the constant folder. */
8635
8636 if (kind == tcc_comparison)
8637 {
8638 STRIP_SIGN_NOPS (arg0);
8639 STRIP_SIGN_NOPS (arg1);
8640 }
8641 else
8642 {
8643 STRIP_NOPS (arg0);
8644 STRIP_NOPS (arg1);
8645 }
8646
8647 /* Note that TREE_CONSTANT isn't enough: static var addresses are
8648 constant but we can't do arithmetic on them. */
8649 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
8650 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
8651 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
8652 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
8653 {
8654 if (kind == tcc_binary)
8655 tem = const_binop (code, arg0, arg1, 0);
8656 else if (kind == tcc_comparison)
8657 tem = fold_relational_const (code, type, arg0, arg1);
8658 else
8659 tem = NULL_TREE;
8660
8661 if (tem != NULL_TREE)
8662 {
8663 if (TREE_TYPE (tem) != type)
8664 tem = fold_convert (type, tem);
8665 return tem;
8666 }
8667 }
8668
8669 /* If this is a commutative operation, and ARG0 is a constant, move it
8670 to ARG1 to reduce the number of tests below. */
8671 if (commutative_tree_code (code)
8672 && tree_swap_operands_p (arg0, arg1, true))
8673 return fold_build2 (code, type, op1, op0);
8674
8675 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
8676
8677 First check for cases where an arithmetic operation is applied to a
8678 compound, conditional, or comparison operation. Push the arithmetic
8679 operation inside the compound or conditional to see if any folding
8680 can then be done. Convert comparison to conditional for this purpose.
8681 The also optimizes non-constant cases that used to be done in
8682 expand_expr.
8683
8684 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
8685 one of the operands is a comparison and the other is a comparison, a
8686 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
8687 code below would make the expression more complex. Change it to a
8688 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
8689 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
8690
8691 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
8692 || code == EQ_EXPR || code == NE_EXPR)
8693 && ((truth_value_p (TREE_CODE (arg0))
8694 && (truth_value_p (TREE_CODE (arg1))
8695 || (TREE_CODE (arg1) == BIT_AND_EXPR
8696 && integer_onep (TREE_OPERAND (arg1, 1)))))
8697 || (truth_value_p (TREE_CODE (arg1))
8698 && (truth_value_p (TREE_CODE (arg0))
8699 || (TREE_CODE (arg0) == BIT_AND_EXPR
8700 && integer_onep (TREE_OPERAND (arg0, 1)))))))
8701 {
8702 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
8703 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
8704 : TRUTH_XOR_EXPR,
8705 boolean_type_node,
8706 fold_convert (boolean_type_node, arg0),
8707 fold_convert (boolean_type_node, arg1));
8708
8709 if (code == EQ_EXPR)
8710 tem = invert_truthvalue (tem);
8711
8712 return fold_convert (type, tem);
8713 }
8714
8715 if (TREE_CODE_CLASS (code) == tcc_binary
8716 || TREE_CODE_CLASS (code) == tcc_comparison)
8717 {
8718 if (TREE_CODE (arg0) == COMPOUND_EXPR)
8719 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
8720 fold_build2 (code, type,
8721 TREE_OPERAND (arg0, 1), op1));
8722 if (TREE_CODE (arg1) == COMPOUND_EXPR
8723 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
8724 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
8725 fold_build2 (code, type,
8726 op0, TREE_OPERAND (arg1, 1)));
8727
8728 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
8729 {
8730 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
8731 arg0, arg1,
8732 /*cond_first_p=*/1);
8733 if (tem != NULL_TREE)
8734 return tem;
8735 }
8736
8737 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
8738 {
8739 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
8740 arg1, arg0,
8741 /*cond_first_p=*/0);
8742 if (tem != NULL_TREE)
8743 return tem;
8744 }
8745 }
8746
8747 switch (code)
8748 {
8749 case PLUS_EXPR:
8750 /* A + (-B) -> A - B */
8751 if (TREE_CODE (arg1) == NEGATE_EXPR)
8752 return fold_build2 (MINUS_EXPR, type,
8753 fold_convert (type, arg0),
8754 fold_convert (type, TREE_OPERAND (arg1, 0)));
8755 /* (-A) + B -> B - A */
8756 if (TREE_CODE (arg0) == NEGATE_EXPR
8757 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
8758 return fold_build2 (MINUS_EXPR, type,
8759 fold_convert (type, arg1),
8760 fold_convert (type, TREE_OPERAND (arg0, 0)));
8761 /* Convert ~A + 1 to -A. */
8762 if (INTEGRAL_TYPE_P (type)
8763 && TREE_CODE (arg0) == BIT_NOT_EXPR
8764 && integer_onep (arg1))
8765 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
8766
8767 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
8768 same or one. */
8769 if ((TREE_CODE (arg0) == MULT_EXPR
8770 || TREE_CODE (arg1) == MULT_EXPR)
8771 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
8772 {
8773 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
8774 if (tem)
8775 return tem;
8776 }
8777
8778 if (! FLOAT_TYPE_P (type))
8779 {
8780 if (integer_zerop (arg1))
8781 return non_lvalue (fold_convert (type, arg0));
8782
8783 /* ~X + X is -1. */
8784 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8785 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
8786 && !TYPE_TRAP_SIGNED (type))
8787 {
8788 t1 = build_int_cst_type (type, -1);
8789 return omit_one_operand (type, t1, arg1);
8790 }
8791
8792 /* X + ~X is -1. */
8793 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8794 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
8795 && !TYPE_TRAP_SIGNED (type))
8796 {
8797 t1 = build_int_cst_type (type, -1);
8798 return omit_one_operand (type, t1, arg0);
8799 }
8800
8801 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
8802 with a constant, and the two constants have no bits in common,
8803 we should treat this as a BIT_IOR_EXPR since this may produce more
8804 simplifications. */
8805 if (TREE_CODE (arg0) == BIT_AND_EXPR
8806 && TREE_CODE (arg1) == BIT_AND_EXPR
8807 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8808 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8809 && integer_zerop (const_binop (BIT_AND_EXPR,
8810 TREE_OPERAND (arg0, 1),
8811 TREE_OPERAND (arg1, 1), 0)))
8812 {
8813 code = BIT_IOR_EXPR;
8814 goto bit_ior;
8815 }
8816
8817 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
8818 (plus (plus (mult) (mult)) (foo)) so that we can
8819 take advantage of the factoring cases below. */
8820 if (((TREE_CODE (arg0) == PLUS_EXPR
8821 || TREE_CODE (arg0) == MINUS_EXPR)
8822 && TREE_CODE (arg1) == MULT_EXPR)
8823 || ((TREE_CODE (arg1) == PLUS_EXPR
8824 || TREE_CODE (arg1) == MINUS_EXPR)
8825 && TREE_CODE (arg0) == MULT_EXPR))
8826 {
8827 tree parg0, parg1, parg, marg;
8828 enum tree_code pcode;
8829
8830 if (TREE_CODE (arg1) == MULT_EXPR)
8831 parg = arg0, marg = arg1;
8832 else
8833 parg = arg1, marg = arg0;
8834 pcode = TREE_CODE (parg);
8835 parg0 = TREE_OPERAND (parg, 0);
8836 parg1 = TREE_OPERAND (parg, 1);
8837 STRIP_NOPS (parg0);
8838 STRIP_NOPS (parg1);
8839
8840 if (TREE_CODE (parg0) == MULT_EXPR
8841 && TREE_CODE (parg1) != MULT_EXPR)
8842 return fold_build2 (pcode, type,
8843 fold_build2 (PLUS_EXPR, type,
8844 fold_convert (type, parg0),
8845 fold_convert (type, marg)),
8846 fold_convert (type, parg1));
8847 if (TREE_CODE (parg0) != MULT_EXPR
8848 && TREE_CODE (parg1) == MULT_EXPR)
8849 return fold_build2 (PLUS_EXPR, type,
8850 fold_convert (type, parg0),
8851 fold_build2 (pcode, type,
8852 fold_convert (type, marg),
8853 fold_convert (type,
8854 parg1)));
8855 }
8856
8857 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
8858 of the array. Loop optimizer sometimes produce this type of
8859 expressions. */
8860 if (TREE_CODE (arg0) == ADDR_EXPR)
8861 {
8862 tem = try_move_mult_to_index (PLUS_EXPR, arg0, arg1);
8863 if (tem)
8864 return fold_convert (type, tem);
8865 }
8866 else if (TREE_CODE (arg1) == ADDR_EXPR)
8867 {
8868 tem = try_move_mult_to_index (PLUS_EXPR, arg1, arg0);
8869 if (tem)
8870 return fold_convert (type, tem);
8871 }
8872 }
8873 else
8874 {
8875 /* See if ARG1 is zero and X + ARG1 reduces to X. */
8876 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
8877 return non_lvalue (fold_convert (type, arg0));
8878
8879 /* Likewise if the operands are reversed. */
8880 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
8881 return non_lvalue (fold_convert (type, arg1));
8882
8883 /* Convert X + -C into X - C. */
8884 if (TREE_CODE (arg1) == REAL_CST
8885 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
8886 {
8887 tem = fold_negate_const (arg1, type);
8888 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
8889 return fold_build2 (MINUS_EXPR, type,
8890 fold_convert (type, arg0),
8891 fold_convert (type, tem));
8892 }
8893
8894 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
8895 to __complex__ ( x, y ). This is not the same for SNaNs or
8896 if singed zeros are involved. */
8897 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8898 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
8899 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
8900 {
8901 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
8902 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
8903 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
8904 bool arg0rz = false, arg0iz = false;
8905 if ((arg0r && (arg0rz = real_zerop (arg0r)))
8906 || (arg0i && (arg0iz = real_zerop (arg0i))))
8907 {
8908 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
8909 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
8910 if (arg0rz && arg1i && real_zerop (arg1i))
8911 {
8912 tree rp = arg1r ? arg1r
8913 : build1 (REALPART_EXPR, rtype, arg1);
8914 tree ip = arg0i ? arg0i
8915 : build1 (IMAGPART_EXPR, rtype, arg0);
8916 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
8917 }
8918 else if (arg0iz && arg1r && real_zerop (arg1r))
8919 {
8920 tree rp = arg0r ? arg0r
8921 : build1 (REALPART_EXPR, rtype, arg0);
8922 tree ip = arg1i ? arg1i
8923 : build1 (IMAGPART_EXPR, rtype, arg1);
8924 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
8925 }
8926 }
8927 }
8928
8929 if (flag_unsafe_math_optimizations
8930 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
8931 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
8932 && (tem = distribute_real_division (code, type, arg0, arg1)))
8933 return tem;
8934
8935 /* Convert x+x into x*2.0. */
8936 if (operand_equal_p (arg0, arg1, 0)
8937 && SCALAR_FLOAT_TYPE_P (type))
8938 return fold_build2 (MULT_EXPR, type, arg0,
8939 build_real (type, dconst2));
8940
8941 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
8942 if (flag_unsafe_math_optimizations
8943 && TREE_CODE (arg1) == PLUS_EXPR
8944 && TREE_CODE (arg0) != MULT_EXPR)
8945 {
8946 tree tree10 = TREE_OPERAND (arg1, 0);
8947 tree tree11 = TREE_OPERAND (arg1, 1);
8948 if (TREE_CODE (tree11) == MULT_EXPR
8949 && TREE_CODE (tree10) == MULT_EXPR)
8950 {
8951 tree tree0;
8952 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
8953 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
8954 }
8955 }
8956 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
8957 if (flag_unsafe_math_optimizations
8958 && TREE_CODE (arg0) == PLUS_EXPR
8959 && TREE_CODE (arg1) != MULT_EXPR)
8960 {
8961 tree tree00 = TREE_OPERAND (arg0, 0);
8962 tree tree01 = TREE_OPERAND (arg0, 1);
8963 if (TREE_CODE (tree01) == MULT_EXPR
8964 && TREE_CODE (tree00) == MULT_EXPR)
8965 {
8966 tree tree0;
8967 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
8968 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
8969 }
8970 }
8971 }
8972
8973 bit_rotate:
8974 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
8975 is a rotate of A by C1 bits. */
8976 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
8977 is a rotate of A by B bits. */
8978 {
8979 enum tree_code code0, code1;
8980 code0 = TREE_CODE (arg0);
8981 code1 = TREE_CODE (arg1);
8982 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
8983 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
8984 && operand_equal_p (TREE_OPERAND (arg0, 0),
8985 TREE_OPERAND (arg1, 0), 0)
8986 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
8987 {
8988 tree tree01, tree11;
8989 enum tree_code code01, code11;
8990
8991 tree01 = TREE_OPERAND (arg0, 1);
8992 tree11 = TREE_OPERAND (arg1, 1);
8993 STRIP_NOPS (tree01);
8994 STRIP_NOPS (tree11);
8995 code01 = TREE_CODE (tree01);
8996 code11 = TREE_CODE (tree11);
8997 if (code01 == INTEGER_CST
8998 && code11 == INTEGER_CST
8999 && TREE_INT_CST_HIGH (tree01) == 0
9000 && TREE_INT_CST_HIGH (tree11) == 0
9001 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
9002 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9003 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
9004 code0 == LSHIFT_EXPR ? tree01 : tree11);
9005 else if (code11 == MINUS_EXPR)
9006 {
9007 tree tree110, tree111;
9008 tree110 = TREE_OPERAND (tree11, 0);
9009 tree111 = TREE_OPERAND (tree11, 1);
9010 STRIP_NOPS (tree110);
9011 STRIP_NOPS (tree111);
9012 if (TREE_CODE (tree110) == INTEGER_CST
9013 && 0 == compare_tree_int (tree110,
9014 TYPE_PRECISION
9015 (TREE_TYPE (TREE_OPERAND
9016 (arg0, 0))))
9017 && operand_equal_p (tree01, tree111, 0))
9018 return build2 ((code0 == LSHIFT_EXPR
9019 ? LROTATE_EXPR
9020 : RROTATE_EXPR),
9021 type, TREE_OPERAND (arg0, 0), tree01);
9022 }
9023 else if (code01 == MINUS_EXPR)
9024 {
9025 tree tree010, tree011;
9026 tree010 = TREE_OPERAND (tree01, 0);
9027 tree011 = TREE_OPERAND (tree01, 1);
9028 STRIP_NOPS (tree010);
9029 STRIP_NOPS (tree011);
9030 if (TREE_CODE (tree010) == INTEGER_CST
9031 && 0 == compare_tree_int (tree010,
9032 TYPE_PRECISION
9033 (TREE_TYPE (TREE_OPERAND
9034 (arg0, 0))))
9035 && operand_equal_p (tree11, tree011, 0))
9036 return build2 ((code0 != LSHIFT_EXPR
9037 ? LROTATE_EXPR
9038 : RROTATE_EXPR),
9039 type, TREE_OPERAND (arg0, 0), tree11);
9040 }
9041 }
9042 }
9043
9044 associate:
9045 /* In most languages, can't associate operations on floats through
9046 parentheses. Rather than remember where the parentheses were, we
9047 don't associate floats at all, unless the user has specified
9048 -funsafe-math-optimizations. */
9049
9050 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
9051 {
9052 tree var0, con0, lit0, minus_lit0;
9053 tree var1, con1, lit1, minus_lit1;
9054
9055 /* Split both trees into variables, constants, and literals. Then
9056 associate each group together, the constants with literals,
9057 then the result with variables. This increases the chances of
9058 literals being recombined later and of generating relocatable
9059 expressions for the sum of a constant and literal. */
9060 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
9061 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
9062 code == MINUS_EXPR);
9063
9064 /* Only do something if we found more than two objects. Otherwise,
9065 nothing has changed and we risk infinite recursion. */
9066 if (2 < ((var0 != 0) + (var1 != 0)
9067 + (con0 != 0) + (con1 != 0)
9068 + (lit0 != 0) + (lit1 != 0)
9069 + (minus_lit0 != 0) + (minus_lit1 != 0)))
9070 {
9071 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9072 if (code == MINUS_EXPR)
9073 code = PLUS_EXPR;
9074
9075 var0 = associate_trees (var0, var1, code, type);
9076 con0 = associate_trees (con0, con1, code, type);
9077 lit0 = associate_trees (lit0, lit1, code, type);
9078 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
9079
9080 /* Preserve the MINUS_EXPR if the negative part of the literal is
9081 greater than the positive part. Otherwise, the multiplicative
9082 folding code (i.e extract_muldiv) may be fooled in case
9083 unsigned constants are subtracted, like in the following
9084 example: ((X*2 + 4) - 8U)/2. */
9085 if (minus_lit0 && lit0)
9086 {
9087 if (TREE_CODE (lit0) == INTEGER_CST
9088 && TREE_CODE (minus_lit0) == INTEGER_CST
9089 && tree_int_cst_lt (lit0, minus_lit0))
9090 {
9091 minus_lit0 = associate_trees (minus_lit0, lit0,
9092 MINUS_EXPR, type);
9093 lit0 = 0;
9094 }
9095 else
9096 {
9097 lit0 = associate_trees (lit0, minus_lit0,
9098 MINUS_EXPR, type);
9099 minus_lit0 = 0;
9100 }
9101 }
9102 if (minus_lit0)
9103 {
9104 if (con0 == 0)
9105 return fold_convert (type,
9106 associate_trees (var0, minus_lit0,
9107 MINUS_EXPR, type));
9108 else
9109 {
9110 con0 = associate_trees (con0, minus_lit0,
9111 MINUS_EXPR, type);
9112 return fold_convert (type,
9113 associate_trees (var0, con0,
9114 PLUS_EXPR, type));
9115 }
9116 }
9117
9118 con0 = associate_trees (con0, lit0, code, type);
9119 return fold_convert (type, associate_trees (var0, con0,
9120 code, type));
9121 }
9122 }
9123
9124 return NULL_TREE;
9125
9126 case MINUS_EXPR:
9127 /* A - (-B) -> A + B */
9128 if (TREE_CODE (arg1) == NEGATE_EXPR)
9129 return fold_build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0));
9130 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9131 if (TREE_CODE (arg0) == NEGATE_EXPR
9132 && (FLOAT_TYPE_P (type)
9133 || INTEGRAL_TYPE_P (type))
9134 && negate_expr_p (arg1)
9135 && reorder_operands_p (arg0, arg1))
9136 return fold_build2 (MINUS_EXPR, type, negate_expr (arg1),
9137 TREE_OPERAND (arg0, 0));
9138 /* Convert -A - 1 to ~A. */
9139 if (INTEGRAL_TYPE_P (type)
9140 && TREE_CODE (arg0) == NEGATE_EXPR
9141 && integer_onep (arg1)
9142 && !TYPE_TRAP_SIGNED (type))
9143 return fold_build1 (BIT_NOT_EXPR, type,
9144 fold_convert (type, TREE_OPERAND (arg0, 0)));
9145
9146 /* Convert -1 - A to ~A. */
9147 if (INTEGRAL_TYPE_P (type)
9148 && integer_all_onesp (arg0))
9149 return fold_build1 (BIT_NOT_EXPR, type, op1);
9150
9151 if (! FLOAT_TYPE_P (type))
9152 {
9153 if (integer_zerop (arg0))
9154 return negate_expr (fold_convert (type, arg1));
9155 if (integer_zerop (arg1))
9156 return non_lvalue (fold_convert (type, arg0));
9157
9158 /* Fold A - (A & B) into ~B & A. */
9159 if (!TREE_SIDE_EFFECTS (arg0)
9160 && TREE_CODE (arg1) == BIT_AND_EXPR)
9161 {
9162 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
9163 return fold_build2 (BIT_AND_EXPR, type,
9164 fold_build1 (BIT_NOT_EXPR, type,
9165 TREE_OPERAND (arg1, 0)),
9166 arg0);
9167 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9168 return fold_build2 (BIT_AND_EXPR, type,
9169 fold_build1 (BIT_NOT_EXPR, type,
9170 TREE_OPERAND (arg1, 1)),
9171 arg0);
9172 }
9173
9174 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
9175 any power of 2 minus 1. */
9176 if (TREE_CODE (arg0) == BIT_AND_EXPR
9177 && TREE_CODE (arg1) == BIT_AND_EXPR
9178 && operand_equal_p (TREE_OPERAND (arg0, 0),
9179 TREE_OPERAND (arg1, 0), 0))
9180 {
9181 tree mask0 = TREE_OPERAND (arg0, 1);
9182 tree mask1 = TREE_OPERAND (arg1, 1);
9183 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
9184
9185 if (operand_equal_p (tem, mask1, 0))
9186 {
9187 tem = fold_build2 (BIT_XOR_EXPR, type,
9188 TREE_OPERAND (arg0, 0), mask1);
9189 return fold_build2 (MINUS_EXPR, type, tem, mask1);
9190 }
9191 }
9192 }
9193
9194 /* See if ARG1 is zero and X - ARG1 reduces to X. */
9195 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
9196 return non_lvalue (fold_convert (type, arg0));
9197
9198 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
9199 ARG0 is zero and X + ARG0 reduces to X, since that would mean
9200 (-ARG1 + ARG0) reduces to -ARG1. */
9201 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9202 return negate_expr (fold_convert (type, arg1));
9203
9204 /* Fold &x - &x. This can happen from &x.foo - &x.
9205 This is unsafe for certain floats even in non-IEEE formats.
9206 In IEEE, it is unsafe because it does wrong for NaNs.
9207 Also note that operand_equal_p is always false if an operand
9208 is volatile. */
9209
9210 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
9211 && operand_equal_p (arg0, arg1, 0))
9212 return fold_convert (type, integer_zero_node);
9213
9214 /* A - B -> A + (-B) if B is easily negatable. */
9215 if (negate_expr_p (arg1)
9216 && ((FLOAT_TYPE_P (type)
9217 /* Avoid this transformation if B is a positive REAL_CST. */
9218 && (TREE_CODE (arg1) != REAL_CST
9219 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
9220 || INTEGRAL_TYPE_P (type)))
9221 return fold_build2 (PLUS_EXPR, type,
9222 fold_convert (type, arg0),
9223 fold_convert (type, negate_expr (arg1)));
9224
9225 /* Try folding difference of addresses. */
9226 {
9227 HOST_WIDE_INT diff;
9228
9229 if ((TREE_CODE (arg0) == ADDR_EXPR
9230 || TREE_CODE (arg1) == ADDR_EXPR)
9231 && ptr_difference_const (arg0, arg1, &diff))
9232 return build_int_cst_type (type, diff);
9233 }
9234
9235 /* Fold &a[i] - &a[j] to i-j. */
9236 if (TREE_CODE (arg0) == ADDR_EXPR
9237 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
9238 && TREE_CODE (arg1) == ADDR_EXPR
9239 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
9240 {
9241 tree aref0 = TREE_OPERAND (arg0, 0);
9242 tree aref1 = TREE_OPERAND (arg1, 0);
9243 if (operand_equal_p (TREE_OPERAND (aref0, 0),
9244 TREE_OPERAND (aref1, 0), 0))
9245 {
9246 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
9247 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
9248 tree esz = array_ref_element_size (aref0);
9249 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9250 return fold_build2 (MULT_EXPR, type, diff,
9251 fold_convert (type, esz));
9252
9253 }
9254 }
9255
9256 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
9257 of the array. Loop optimizer sometimes produce this type of
9258 expressions. */
9259 if (TREE_CODE (arg0) == ADDR_EXPR)
9260 {
9261 tem = try_move_mult_to_index (MINUS_EXPR, arg0, arg1);
9262 if (tem)
9263 return fold_convert (type, tem);
9264 }
9265
9266 if (flag_unsafe_math_optimizations
9267 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9268 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9269 && (tem = distribute_real_division (code, type, arg0, arg1)))
9270 return tem;
9271
9272 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
9273 same or one. */
9274 if ((TREE_CODE (arg0) == MULT_EXPR
9275 || TREE_CODE (arg1) == MULT_EXPR)
9276 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
9277 {
9278 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
9279 if (tem)
9280 return tem;
9281 }
9282
9283 goto associate;
9284
9285 case MULT_EXPR:
9286 /* (-A) * (-B) -> A * B */
9287 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
9288 return fold_build2 (MULT_EXPR, type,
9289 fold_convert (type, TREE_OPERAND (arg0, 0)),
9290 fold_convert (type, negate_expr (arg1)));
9291 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
9292 return fold_build2 (MULT_EXPR, type,
9293 fold_convert (type, negate_expr (arg0)),
9294 fold_convert (type, TREE_OPERAND (arg1, 0)));
9295
9296 if (! FLOAT_TYPE_P (type))
9297 {
9298 if (integer_zerop (arg1))
9299 return omit_one_operand (type, arg1, arg0);
9300 if (integer_onep (arg1))
9301 return non_lvalue (fold_convert (type, arg0));
9302 /* Transform x * -1 into -x. */
9303 if (integer_all_onesp (arg1))
9304 return fold_convert (type, negate_expr (arg0));
9305 /* Transform x * -C into -x * C if x is easily negatable. */
9306 if (TREE_CODE (arg1) == INTEGER_CST
9307 && tree_int_cst_sgn (arg1) == -1
9308 && negate_expr_p (arg0)
9309 && (tem = negate_expr (arg1)) != arg1
9310 && !TREE_OVERFLOW (tem))
9311 return fold_build2 (MULT_EXPR, type,
9312 negate_expr (arg0), tem);
9313
9314 /* (a * (1 << b)) is (a << b) */
9315 if (TREE_CODE (arg1) == LSHIFT_EXPR
9316 && integer_onep (TREE_OPERAND (arg1, 0)))
9317 return fold_build2 (LSHIFT_EXPR, type, arg0,
9318 TREE_OPERAND (arg1, 1));
9319 if (TREE_CODE (arg0) == LSHIFT_EXPR
9320 && integer_onep (TREE_OPERAND (arg0, 0)))
9321 return fold_build2 (LSHIFT_EXPR, type, arg1,
9322 TREE_OPERAND (arg0, 1));
9323
9324 if (TREE_CODE (arg1) == INTEGER_CST
9325 && 0 != (tem = extract_muldiv (op0,
9326 fold_convert (type, arg1),
9327 code, NULL_TREE)))
9328 return fold_convert (type, tem);
9329
9330 /* Optimize z * conj(z) for integer complex numbers. */
9331 if (TREE_CODE (arg0) == CONJ_EXPR
9332 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9333 return fold_mult_zconjz (type, arg1);
9334 if (TREE_CODE (arg1) == CONJ_EXPR
9335 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9336 return fold_mult_zconjz (type, arg0);
9337 }
9338 else
9339 {
9340 /* Maybe fold x * 0 to 0. The expressions aren't the same
9341 when x is NaN, since x * 0 is also NaN. Nor are they the
9342 same in modes with signed zeros, since multiplying a
9343 negative value by 0 gives -0, not +0. */
9344 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9345 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9346 && real_zerop (arg1))
9347 return omit_one_operand (type, arg1, arg0);
9348 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
9349 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9350 && real_onep (arg1))
9351 return non_lvalue (fold_convert (type, arg0));
9352
9353 /* Transform x * -1.0 into -x. */
9354 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9355 && real_minus_onep (arg1))
9356 return fold_convert (type, negate_expr (arg0));
9357
9358 /* Convert (C1/X)*C2 into (C1*C2)/X. */
9359 if (flag_unsafe_math_optimizations
9360 && TREE_CODE (arg0) == RDIV_EXPR
9361 && TREE_CODE (arg1) == REAL_CST
9362 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
9363 {
9364 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
9365 arg1, 0);
9366 if (tem)
9367 return fold_build2 (RDIV_EXPR, type, tem,
9368 TREE_OPERAND (arg0, 1));
9369 }
9370
9371 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
9372 if (operand_equal_p (arg0, arg1, 0))
9373 {
9374 tree tem = fold_strip_sign_ops (arg0);
9375 if (tem != NULL_TREE)
9376 {
9377 tem = fold_convert (type, tem);
9378 return fold_build2 (MULT_EXPR, type, tem, tem);
9379 }
9380 }
9381
9382 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
9383 This is not the same for NaNs or if singed zeros are
9384 involved. */
9385 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9386 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9387 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
9388 && TREE_CODE (arg1) == COMPLEX_CST
9389 && real_zerop (TREE_REALPART (arg1)))
9390 {
9391 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9392 if (real_onep (TREE_IMAGPART (arg1)))
9393 return fold_build2 (COMPLEX_EXPR, type,
9394 negate_expr (fold_build1 (IMAGPART_EXPR,
9395 rtype, arg0)),
9396 fold_build1 (REALPART_EXPR, rtype, arg0));
9397 else if (real_minus_onep (TREE_IMAGPART (arg1)))
9398 return fold_build2 (COMPLEX_EXPR, type,
9399 fold_build1 (IMAGPART_EXPR, rtype, arg0),
9400 negate_expr (fold_build1 (REALPART_EXPR,
9401 rtype, arg0)));
9402 }
9403
9404 /* Optimize z * conj(z) for floating point complex numbers.
9405 Guarded by flag_unsafe_math_optimizations as non-finite
9406 imaginary components don't produce scalar results. */
9407 if (flag_unsafe_math_optimizations
9408 && TREE_CODE (arg0) == CONJ_EXPR
9409 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9410 return fold_mult_zconjz (type, arg1);
9411 if (flag_unsafe_math_optimizations
9412 && TREE_CODE (arg1) == CONJ_EXPR
9413 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9414 return fold_mult_zconjz (type, arg0);
9415
9416 if (flag_unsafe_math_optimizations)
9417 {
9418 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
9419 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
9420
9421 /* Optimizations of root(...)*root(...). */
9422 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
9423 {
9424 tree rootfn, arg, arglist;
9425 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9426 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9427
9428 /* Optimize sqrt(x)*sqrt(x) as x. */
9429 if (BUILTIN_SQRT_P (fcode0)
9430 && operand_equal_p (arg00, arg10, 0)
9431 && ! HONOR_SNANS (TYPE_MODE (type)))
9432 return arg00;
9433
9434 /* Optimize root(x)*root(y) as root(x*y). */
9435 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9436 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
9437 arglist = build_tree_list (NULL_TREE, arg);
9438 return build_function_call_expr (rootfn, arglist);
9439 }
9440
9441 /* Optimize expN(x)*expN(y) as expN(x+y). */
9442 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
9443 {
9444 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9445 tree arg = fold_build2 (PLUS_EXPR, type,
9446 TREE_VALUE (TREE_OPERAND (arg0, 1)),
9447 TREE_VALUE (TREE_OPERAND (arg1, 1)));
9448 tree arglist = build_tree_list (NULL_TREE, arg);
9449 return build_function_call_expr (expfn, arglist);
9450 }
9451
9452 /* Optimizations of pow(...)*pow(...). */
9453 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
9454 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
9455 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
9456 {
9457 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9458 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
9459 1)));
9460 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9461 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
9462 1)));
9463
9464 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
9465 if (operand_equal_p (arg01, arg11, 0))
9466 {
9467 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9468 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
9469 tree arglist = tree_cons (NULL_TREE, arg,
9470 build_tree_list (NULL_TREE,
9471 arg01));
9472 return build_function_call_expr (powfn, arglist);
9473 }
9474
9475 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
9476 if (operand_equal_p (arg00, arg10, 0))
9477 {
9478 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9479 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
9480 tree arglist = tree_cons (NULL_TREE, arg00,
9481 build_tree_list (NULL_TREE,
9482 arg));
9483 return build_function_call_expr (powfn, arglist);
9484 }
9485 }
9486
9487 /* Optimize tan(x)*cos(x) as sin(x). */
9488 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
9489 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
9490 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
9491 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
9492 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
9493 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
9494 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
9495 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
9496 {
9497 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
9498
9499 if (sinfn != NULL_TREE)
9500 return build_function_call_expr (sinfn,
9501 TREE_OPERAND (arg0, 1));
9502 }
9503
9504 /* Optimize x*pow(x,c) as pow(x,c+1). */
9505 if (fcode1 == BUILT_IN_POW
9506 || fcode1 == BUILT_IN_POWF
9507 || fcode1 == BUILT_IN_POWL)
9508 {
9509 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9510 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
9511 1)));
9512 if (TREE_CODE (arg11) == REAL_CST
9513 && !TREE_OVERFLOW (arg11)
9514 && operand_equal_p (arg0, arg10, 0))
9515 {
9516 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
9517 REAL_VALUE_TYPE c;
9518 tree arg, arglist;
9519
9520 c = TREE_REAL_CST (arg11);
9521 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
9522 arg = build_real (type, c);
9523 arglist = build_tree_list (NULL_TREE, arg);
9524 arglist = tree_cons (NULL_TREE, arg0, arglist);
9525 return build_function_call_expr (powfn, arglist);
9526 }
9527 }
9528
9529 /* Optimize pow(x,c)*x as pow(x,c+1). */
9530 if (fcode0 == BUILT_IN_POW
9531 || fcode0 == BUILT_IN_POWF
9532 || fcode0 == BUILT_IN_POWL)
9533 {
9534 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9535 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
9536 1)));
9537 if (TREE_CODE (arg01) == REAL_CST
9538 && !TREE_OVERFLOW (arg01)
9539 && operand_equal_p (arg1, arg00, 0))
9540 {
9541 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9542 REAL_VALUE_TYPE c;
9543 tree arg, arglist;
9544
9545 c = TREE_REAL_CST (arg01);
9546 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
9547 arg = build_real (type, c);
9548 arglist = build_tree_list (NULL_TREE, arg);
9549 arglist = tree_cons (NULL_TREE, arg1, arglist);
9550 return build_function_call_expr (powfn, arglist);
9551 }
9552 }
9553
9554 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
9555 if (! optimize_size
9556 && operand_equal_p (arg0, arg1, 0))
9557 {
9558 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
9559
9560 if (powfn)
9561 {
9562 tree arg = build_real (type, dconst2);
9563 tree arglist = build_tree_list (NULL_TREE, arg);
9564 arglist = tree_cons (NULL_TREE, arg0, arglist);
9565 return build_function_call_expr (powfn, arglist);
9566 }
9567 }
9568 }
9569 }
9570 goto associate;
9571
9572 case BIT_IOR_EXPR:
9573 bit_ior:
9574 if (integer_all_onesp (arg1))
9575 return omit_one_operand (type, arg1, arg0);
9576 if (integer_zerop (arg1))
9577 return non_lvalue (fold_convert (type, arg0));
9578 if (operand_equal_p (arg0, arg1, 0))
9579 return non_lvalue (fold_convert (type, arg0));
9580
9581 /* ~X | X is -1. */
9582 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9583 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9584 {
9585 t1 = build_int_cst_type (type, -1);
9586 return omit_one_operand (type, t1, arg1);
9587 }
9588
9589 /* X | ~X is -1. */
9590 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9591 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9592 {
9593 t1 = build_int_cst_type (type, -1);
9594 return omit_one_operand (type, t1, arg0);
9595 }
9596
9597 /* Canonicalize (X & C1) | C2. */
9598 if (TREE_CODE (arg0) == BIT_AND_EXPR
9599 && TREE_CODE (arg1) == INTEGER_CST
9600 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9601 {
9602 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, mlo, mhi;
9603 int width = TYPE_PRECISION (type);
9604 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
9605 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
9606 hi2 = TREE_INT_CST_HIGH (arg1);
9607 lo2 = TREE_INT_CST_LOW (arg1);
9608
9609 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
9610 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
9611 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9612
9613 if (width > HOST_BITS_PER_WIDE_INT)
9614 {
9615 mhi = (unsigned HOST_WIDE_INT) -1
9616 >> (2 * HOST_BITS_PER_WIDE_INT - width);
9617 mlo = -1;
9618 }
9619 else
9620 {
9621 mhi = 0;
9622 mlo = (unsigned HOST_WIDE_INT) -1
9623 >> (HOST_BITS_PER_WIDE_INT - width);
9624 }
9625
9626 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
9627 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
9628 return fold_build2 (BIT_IOR_EXPR, type,
9629 TREE_OPERAND (arg0, 0), arg1);
9630
9631 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2. */
9632 hi1 &= mhi;
9633 lo1 &= mlo;
9634 if ((hi1 & ~hi2) != hi1 || (lo1 & ~lo2) != lo1)
9635 return fold_build2 (BIT_IOR_EXPR, type,
9636 fold_build2 (BIT_AND_EXPR, type,
9637 TREE_OPERAND (arg0, 0),
9638 build_int_cst_wide (type,
9639 lo1 & ~lo2,
9640 hi1 & ~hi2)),
9641 arg1);
9642 }
9643
9644 /* (X & Y) | Y is (X, Y). */
9645 if (TREE_CODE (arg0) == BIT_AND_EXPR
9646 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9647 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9648 /* (X & Y) | X is (Y, X). */
9649 if (TREE_CODE (arg0) == BIT_AND_EXPR
9650 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9651 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9652 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
9653 /* X | (X & Y) is (Y, X). */
9654 if (TREE_CODE (arg1) == BIT_AND_EXPR
9655 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
9656 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
9657 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
9658 /* X | (Y & X) is (Y, X). */
9659 if (TREE_CODE (arg1) == BIT_AND_EXPR
9660 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9661 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9662 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
9663
9664 t1 = distribute_bit_expr (code, type, arg0, arg1);
9665 if (t1 != NULL_TREE)
9666 return t1;
9667
9668 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
9669
9670 This results in more efficient code for machines without a NAND
9671 instruction. Combine will canonicalize to the first form
9672 which will allow use of NAND instructions provided by the
9673 backend if they exist. */
9674 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9675 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9676 {
9677 return fold_build1 (BIT_NOT_EXPR, type,
9678 build2 (BIT_AND_EXPR, type,
9679 TREE_OPERAND (arg0, 0),
9680 TREE_OPERAND (arg1, 0)));
9681 }
9682
9683 /* See if this can be simplified into a rotate first. If that
9684 is unsuccessful continue in the association code. */
9685 goto bit_rotate;
9686
9687 case BIT_XOR_EXPR:
9688 if (integer_zerop (arg1))
9689 return non_lvalue (fold_convert (type, arg0));
9690 if (integer_all_onesp (arg1))
9691 return fold_build1 (BIT_NOT_EXPR, type, arg0);
9692 if (operand_equal_p (arg0, arg1, 0))
9693 return omit_one_operand (type, integer_zero_node, arg0);
9694
9695 /* ~X ^ X is -1. */
9696 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9697 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9698 {
9699 t1 = build_int_cst_type (type, -1);
9700 return omit_one_operand (type, t1, arg1);
9701 }
9702
9703 /* X ^ ~X is -1. */
9704 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9705 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9706 {
9707 t1 = build_int_cst_type (type, -1);
9708 return omit_one_operand (type, t1, arg0);
9709 }
9710
9711 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
9712 with a constant, and the two constants have no bits in common,
9713 we should treat this as a BIT_IOR_EXPR since this may produce more
9714 simplifications. */
9715 if (TREE_CODE (arg0) == BIT_AND_EXPR
9716 && TREE_CODE (arg1) == BIT_AND_EXPR
9717 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9718 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9719 && integer_zerop (const_binop (BIT_AND_EXPR,
9720 TREE_OPERAND (arg0, 1),
9721 TREE_OPERAND (arg1, 1), 0)))
9722 {
9723 code = BIT_IOR_EXPR;
9724 goto bit_ior;
9725 }
9726
9727 /* (X | Y) ^ X -> Y & ~ X*/
9728 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9729 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9730 {
9731 tree t2 = TREE_OPERAND (arg0, 1);
9732 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
9733 arg1);
9734 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9735 fold_convert (type, t1));
9736 return t1;
9737 }
9738
9739 /* (Y | X) ^ X -> Y & ~ X*/
9740 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9741 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9742 {
9743 tree t2 = TREE_OPERAND (arg0, 0);
9744 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
9745 arg1);
9746 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9747 fold_convert (type, t1));
9748 return t1;
9749 }
9750
9751 /* X ^ (X | Y) -> Y & ~ X*/
9752 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9753 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
9754 {
9755 tree t2 = TREE_OPERAND (arg1, 1);
9756 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
9757 arg0);
9758 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9759 fold_convert (type, t1));
9760 return t1;
9761 }
9762
9763 /* X ^ (Y | X) -> Y & ~ X*/
9764 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9765 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
9766 {
9767 tree t2 = TREE_OPERAND (arg1, 0);
9768 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
9769 arg0);
9770 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9771 fold_convert (type, t1));
9772 return t1;
9773 }
9774
9775 /* Convert ~X ^ ~Y to X ^ Y. */
9776 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9777 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9778 return fold_build2 (code, type,
9779 fold_convert (type, TREE_OPERAND (arg0, 0)),
9780 fold_convert (type, TREE_OPERAND (arg1, 0)));
9781
9782 /* Convert ~X ^ C to X ^ ~C. */
9783 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9784 && TREE_CODE (arg1) == INTEGER_CST)
9785 return fold_build2 (code, type,
9786 fold_convert (type, TREE_OPERAND (arg0, 0)),
9787 fold_build1 (BIT_NOT_EXPR, type, arg1));
9788
9789 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
9790 if (TREE_CODE (arg0) == BIT_AND_EXPR
9791 && integer_onep (TREE_OPERAND (arg0, 1))
9792 && integer_onep (arg1))
9793 return fold_build2 (EQ_EXPR, type, arg0,
9794 build_int_cst (TREE_TYPE (arg0), 0));
9795
9796 /* Fold (X & Y) ^ Y as ~X & Y. */
9797 if (TREE_CODE (arg0) == BIT_AND_EXPR
9798 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9799 {
9800 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
9801 return fold_build2 (BIT_AND_EXPR, type,
9802 fold_build1 (BIT_NOT_EXPR, type, tem),
9803 fold_convert (type, arg1));
9804 }
9805 /* Fold (X & Y) ^ X as ~Y & X. */
9806 if (TREE_CODE (arg0) == BIT_AND_EXPR
9807 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9808 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9809 {
9810 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
9811 return fold_build2 (BIT_AND_EXPR, type,
9812 fold_build1 (BIT_NOT_EXPR, type, tem),
9813 fold_convert (type, arg1));
9814 }
9815 /* Fold X ^ (X & Y) as X & ~Y. */
9816 if (TREE_CODE (arg1) == BIT_AND_EXPR
9817 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9818 {
9819 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
9820 return fold_build2 (BIT_AND_EXPR, type,
9821 fold_convert (type, arg0),
9822 fold_build1 (BIT_NOT_EXPR, type, tem));
9823 }
9824 /* Fold X ^ (Y & X) as ~Y & X. */
9825 if (TREE_CODE (arg1) == BIT_AND_EXPR
9826 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9827 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9828 {
9829 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
9830 return fold_build2 (BIT_AND_EXPR, type,
9831 fold_build1 (BIT_NOT_EXPR, type, tem),
9832 fold_convert (type, arg0));
9833 }
9834
9835 /* See if this can be simplified into a rotate first. If that
9836 is unsuccessful continue in the association code. */
9837 goto bit_rotate;
9838
9839 case BIT_AND_EXPR:
9840 if (integer_all_onesp (arg1))
9841 return non_lvalue (fold_convert (type, arg0));
9842 if (integer_zerop (arg1))
9843 return omit_one_operand (type, arg1, arg0);
9844 if (operand_equal_p (arg0, arg1, 0))
9845 return non_lvalue (fold_convert (type, arg0));
9846
9847 /* ~X & X is always zero. */
9848 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9849 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9850 return omit_one_operand (type, integer_zero_node, arg1);
9851
9852 /* X & ~X is always zero. */
9853 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9854 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9855 return omit_one_operand (type, integer_zero_node, arg0);
9856
9857 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
9858 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9859 && TREE_CODE (arg1) == INTEGER_CST
9860 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9861 return fold_build2 (BIT_IOR_EXPR, type,
9862 fold_build2 (BIT_AND_EXPR, type,
9863 TREE_OPERAND (arg0, 0), arg1),
9864 fold_build2 (BIT_AND_EXPR, type,
9865 TREE_OPERAND (arg0, 1), arg1));
9866
9867 /* (X | Y) & Y is (X, Y). */
9868 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9869 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9870 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9871 /* (X | Y) & X is (Y, X). */
9872 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9873 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9874 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9875 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
9876 /* X & (X | Y) is (Y, X). */
9877 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9878 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
9879 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
9880 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
9881 /* X & (Y | X) is (Y, X). */
9882 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9883 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9884 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9885 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
9886
9887 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
9888 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9889 && integer_onep (TREE_OPERAND (arg0, 1))
9890 && integer_onep (arg1))
9891 {
9892 tem = TREE_OPERAND (arg0, 0);
9893 return fold_build2 (EQ_EXPR, type,
9894 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
9895 build_int_cst (TREE_TYPE (tem), 1)),
9896 build_int_cst (TREE_TYPE (tem), 0));
9897 }
9898 /* Fold ~X & 1 as (X & 1) == 0. */
9899 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9900 && integer_onep (arg1))
9901 {
9902 tem = TREE_OPERAND (arg0, 0);
9903 return fold_build2 (EQ_EXPR, type,
9904 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
9905 build_int_cst (TREE_TYPE (tem), 1)),
9906 build_int_cst (TREE_TYPE (tem), 0));
9907 }
9908
9909 /* Fold (X ^ Y) & Y as ~X & Y. */
9910 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9911 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9912 {
9913 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
9914 return fold_build2 (BIT_AND_EXPR, type,
9915 fold_build1 (BIT_NOT_EXPR, type, tem),
9916 fold_convert (type, arg1));
9917 }
9918 /* Fold (X ^ Y) & X as ~Y & X. */
9919 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9920 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9921 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9922 {
9923 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
9924 return fold_build2 (BIT_AND_EXPR, type,
9925 fold_build1 (BIT_NOT_EXPR, type, tem),
9926 fold_convert (type, arg1));
9927 }
9928 /* Fold X & (X ^ Y) as X & ~Y. */
9929 if (TREE_CODE (arg1) == BIT_XOR_EXPR
9930 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9931 {
9932 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
9933 return fold_build2 (BIT_AND_EXPR, type,
9934 fold_convert (type, arg0),
9935 fold_build1 (BIT_NOT_EXPR, type, tem));
9936 }
9937 /* Fold X & (Y ^ X) as ~Y & X. */
9938 if (TREE_CODE (arg1) == BIT_XOR_EXPR
9939 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9940 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9941 {
9942 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
9943 return fold_build2 (BIT_AND_EXPR, type,
9944 fold_build1 (BIT_NOT_EXPR, type, tem),
9945 fold_convert (type, arg0));
9946 }
9947
9948 t1 = distribute_bit_expr (code, type, arg0, arg1);
9949 if (t1 != NULL_TREE)
9950 return t1;
9951 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
9952 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
9953 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
9954 {
9955 unsigned int prec
9956 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
9957
9958 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
9959 && (~TREE_INT_CST_LOW (arg1)
9960 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
9961 return fold_convert (type, TREE_OPERAND (arg0, 0));
9962 }
9963
9964 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
9965
9966 This results in more efficient code for machines without a NOR
9967 instruction. Combine will canonicalize to the first form
9968 which will allow use of NOR instructions provided by the
9969 backend if they exist. */
9970 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9971 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9972 {
9973 return fold_build1 (BIT_NOT_EXPR, type,
9974 build2 (BIT_IOR_EXPR, type,
9975 TREE_OPERAND (arg0, 0),
9976 TREE_OPERAND (arg1, 0)));
9977 }
9978
9979 goto associate;
9980
9981 case RDIV_EXPR:
9982 /* Don't touch a floating-point divide by zero unless the mode
9983 of the constant can represent infinity. */
9984 if (TREE_CODE (arg1) == REAL_CST
9985 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
9986 && real_zerop (arg1))
9987 return NULL_TREE;
9988
9989 /* Optimize A / A to 1.0 if we don't care about
9990 NaNs or Infinities. Skip the transformation
9991 for non-real operands. */
9992 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
9993 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9994 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
9995 && operand_equal_p (arg0, arg1, 0))
9996 {
9997 tree r = build_real (TREE_TYPE (arg0), dconst1);
9998
9999 return omit_two_operands (type, r, arg0, arg1);
10000 }
10001
10002 /* The complex version of the above A / A optimization. */
10003 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10004 && operand_equal_p (arg0, arg1, 0))
10005 {
10006 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
10007 if (! HONOR_NANS (TYPE_MODE (elem_type))
10008 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
10009 {
10010 tree r = build_real (elem_type, dconst1);
10011 /* omit_two_operands will call fold_convert for us. */
10012 return omit_two_operands (type, r, arg0, arg1);
10013 }
10014 }
10015
10016 /* (-A) / (-B) -> A / B */
10017 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10018 return fold_build2 (RDIV_EXPR, type,
10019 TREE_OPERAND (arg0, 0),
10020 negate_expr (arg1));
10021 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10022 return fold_build2 (RDIV_EXPR, type,
10023 negate_expr (arg0),
10024 TREE_OPERAND (arg1, 0));
10025
10026 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
10027 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10028 && real_onep (arg1))
10029 return non_lvalue (fold_convert (type, arg0));
10030
10031 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
10032 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10033 && real_minus_onep (arg1))
10034 return non_lvalue (fold_convert (type, negate_expr (arg0)));
10035
10036 /* If ARG1 is a constant, we can convert this to a multiply by the
10037 reciprocal. This does not have the same rounding properties,
10038 so only do this if -funsafe-math-optimizations. We can actually
10039 always safely do it if ARG1 is a power of two, but it's hard to
10040 tell if it is or not in a portable manner. */
10041 if (TREE_CODE (arg1) == REAL_CST)
10042 {
10043 if (flag_unsafe_math_optimizations
10044 && 0 != (tem = const_binop (code, build_real (type, dconst1),
10045 arg1, 0)))
10046 return fold_build2 (MULT_EXPR, type, arg0, tem);
10047 /* Find the reciprocal if optimizing and the result is exact. */
10048 if (optimize)
10049 {
10050 REAL_VALUE_TYPE r;
10051 r = TREE_REAL_CST (arg1);
10052 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
10053 {
10054 tem = build_real (type, r);
10055 return fold_build2 (MULT_EXPR, type,
10056 fold_convert (type, arg0), tem);
10057 }
10058 }
10059 }
10060 /* Convert A/B/C to A/(B*C). */
10061 if (flag_unsafe_math_optimizations
10062 && TREE_CODE (arg0) == RDIV_EXPR)
10063 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
10064 fold_build2 (MULT_EXPR, type,
10065 TREE_OPERAND (arg0, 1), arg1));
10066
10067 /* Convert A/(B/C) to (A/B)*C. */
10068 if (flag_unsafe_math_optimizations
10069 && TREE_CODE (arg1) == RDIV_EXPR)
10070 return fold_build2 (MULT_EXPR, type,
10071 fold_build2 (RDIV_EXPR, type, arg0,
10072 TREE_OPERAND (arg1, 0)),
10073 TREE_OPERAND (arg1, 1));
10074
10075 /* Convert C1/(X*C2) into (C1/C2)/X. */
10076 if (flag_unsafe_math_optimizations
10077 && TREE_CODE (arg1) == MULT_EXPR
10078 && TREE_CODE (arg0) == REAL_CST
10079 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
10080 {
10081 tree tem = const_binop (RDIV_EXPR, arg0,
10082 TREE_OPERAND (arg1, 1), 0);
10083 if (tem)
10084 return fold_build2 (RDIV_EXPR, type, tem,
10085 TREE_OPERAND (arg1, 0));
10086 }
10087
10088 if (flag_unsafe_math_optimizations)
10089 {
10090 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10091 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10092
10093 /* Optimize sin(x)/cos(x) as tan(x). */
10094 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
10095 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
10096 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
10097 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
10098 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
10099 {
10100 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
10101
10102 if (tanfn != NULL_TREE)
10103 return build_function_call_expr (tanfn,
10104 TREE_OPERAND (arg0, 1));
10105 }
10106
10107 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
10108 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
10109 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
10110 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
10111 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
10112 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
10113 {
10114 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
10115
10116 if (tanfn != NULL_TREE)
10117 {
10118 tree tmp = TREE_OPERAND (arg0, 1);
10119 tmp = build_function_call_expr (tanfn, tmp);
10120 return fold_build2 (RDIV_EXPR, type,
10121 build_real (type, dconst1), tmp);
10122 }
10123 }
10124
10125 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
10126 NaNs or Infinities. */
10127 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
10128 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
10129 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
10130 {
10131 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
10132 tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1));
10133
10134 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
10135 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
10136 && operand_equal_p (arg00, arg01, 0))
10137 {
10138 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
10139
10140 if (cosfn != NULL_TREE)
10141 return build_function_call_expr (cosfn,
10142 TREE_OPERAND (arg0, 1));
10143 }
10144 }
10145
10146 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
10147 NaNs or Infinities. */
10148 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
10149 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
10150 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
10151 {
10152 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
10153 tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1));
10154
10155 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
10156 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
10157 && operand_equal_p (arg00, arg01, 0))
10158 {
10159 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
10160
10161 if (cosfn != NULL_TREE)
10162 {
10163 tree tmp = TREE_OPERAND (arg0, 1);
10164 tmp = build_function_call_expr (cosfn, tmp);
10165 return fold_build2 (RDIV_EXPR, type,
10166 build_real (type, dconst1),
10167 tmp);
10168 }
10169 }
10170 }
10171
10172 /* Optimize pow(x,c)/x as pow(x,c-1). */
10173 if (fcode0 == BUILT_IN_POW
10174 || fcode0 == BUILT_IN_POWF
10175 || fcode0 == BUILT_IN_POWL)
10176 {
10177 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
10178 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
10179 if (TREE_CODE (arg01) == REAL_CST
10180 && !TREE_OVERFLOW (arg01)
10181 && operand_equal_p (arg1, arg00, 0))
10182 {
10183 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10184 REAL_VALUE_TYPE c;
10185 tree arg, arglist;
10186
10187 c = TREE_REAL_CST (arg01);
10188 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
10189 arg = build_real (type, c);
10190 arglist = build_tree_list (NULL_TREE, arg);
10191 arglist = tree_cons (NULL_TREE, arg1, arglist);
10192 return build_function_call_expr (powfn, arglist);
10193 }
10194 }
10195
10196 /* Optimize x/expN(y) into x*expN(-y). */
10197 if (BUILTIN_EXPONENT_P (fcode1))
10198 {
10199 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
10200 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
10201 tree arglist = build_tree_list (NULL_TREE,
10202 fold_convert (type, arg));
10203 arg1 = build_function_call_expr (expfn, arglist);
10204 return fold_build2 (MULT_EXPR, type, arg0, arg1);
10205 }
10206
10207 /* Optimize x/pow(y,z) into x*pow(y,-z). */
10208 if (fcode1 == BUILT_IN_POW
10209 || fcode1 == BUILT_IN_POWF
10210 || fcode1 == BUILT_IN_POWL)
10211 {
10212 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
10213 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
10214 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
10215 tree neg11 = fold_convert (type, negate_expr (arg11));
10216 tree arglist = tree_cons(NULL_TREE, arg10,
10217 build_tree_list (NULL_TREE, neg11));
10218 arg1 = build_function_call_expr (powfn, arglist);
10219 return fold_build2 (MULT_EXPR, type, arg0, arg1);
10220 }
10221 }
10222 return NULL_TREE;
10223
10224 case TRUNC_DIV_EXPR:
10225 case FLOOR_DIV_EXPR:
10226 /* Simplify A / (B << N) where A and B are positive and B is
10227 a power of 2, to A >> (N + log2(B)). */
10228 if (TREE_CODE (arg1) == LSHIFT_EXPR
10229 && (TYPE_UNSIGNED (type) || tree_expr_nonnegative_p (arg0)))
10230 {
10231 tree sval = TREE_OPERAND (arg1, 0);
10232 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
10233 {
10234 tree sh_cnt = TREE_OPERAND (arg1, 1);
10235 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
10236
10237 sh_cnt = fold_build2 (PLUS_EXPR, TREE_TYPE (sh_cnt),
10238 sh_cnt, build_int_cst (NULL_TREE, pow2));
10239 return fold_build2 (RSHIFT_EXPR, type,
10240 fold_convert (type, arg0), sh_cnt);
10241 }
10242 }
10243 /* Fall thru */
10244
10245 case ROUND_DIV_EXPR:
10246 case CEIL_DIV_EXPR:
10247 case EXACT_DIV_EXPR:
10248 if (integer_onep (arg1))
10249 return non_lvalue (fold_convert (type, arg0));
10250 if (integer_zerop (arg1))
10251 return NULL_TREE;
10252 /* X / -1 is -X. */
10253 if (!TYPE_UNSIGNED (type)
10254 && TREE_CODE (arg1) == INTEGER_CST
10255 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
10256 && TREE_INT_CST_HIGH (arg1) == -1)
10257 return fold_convert (type, negate_expr (arg0));
10258
10259 /* Convert -A / -B to A / B when the type is signed and overflow is
10260 undefined. */
10261 if (!TYPE_UNSIGNED (type) && !flag_wrapv
10262 && TREE_CODE (arg0) == NEGATE_EXPR
10263 && negate_expr_p (arg1))
10264 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10265 negate_expr (arg1));
10266 if (!TYPE_UNSIGNED (type) && !flag_wrapv
10267 && TREE_CODE (arg1) == NEGATE_EXPR
10268 && negate_expr_p (arg0))
10269 return fold_build2 (code, type, negate_expr (arg0),
10270 TREE_OPERAND (arg1, 0));
10271
10272 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10273 operation, EXACT_DIV_EXPR.
10274
10275 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10276 At one time others generated faster code, it's not clear if they do
10277 after the last round to changes to the DIV code in expmed.c. */
10278 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
10279 && multiple_of_p (type, arg0, arg1))
10280 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
10281
10282 if (TREE_CODE (arg1) == INTEGER_CST
10283 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
10284 return fold_convert (type, tem);
10285
10286 return NULL_TREE;
10287
10288 case CEIL_MOD_EXPR:
10289 case FLOOR_MOD_EXPR:
10290 case ROUND_MOD_EXPR:
10291 case TRUNC_MOD_EXPR:
10292 /* X % 1 is always zero, but be sure to preserve any side
10293 effects in X. */
10294 if (integer_onep (arg1))
10295 return omit_one_operand (type, integer_zero_node, arg0);
10296
10297 /* X % 0, return X % 0 unchanged so that we can get the
10298 proper warnings and errors. */
10299 if (integer_zerop (arg1))
10300 return NULL_TREE;
10301
10302 /* 0 % X is always zero, but be sure to preserve any side
10303 effects in X. Place this after checking for X == 0. */
10304 if (integer_zerop (arg0))
10305 return omit_one_operand (type, integer_zero_node, arg1);
10306
10307 /* X % -1 is zero. */
10308 if (!TYPE_UNSIGNED (type)
10309 && TREE_CODE (arg1) == INTEGER_CST
10310 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
10311 && TREE_INT_CST_HIGH (arg1) == -1)
10312 return omit_one_operand (type, integer_zero_node, arg0);
10313
10314 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
10315 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
10316 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
10317 && (TYPE_UNSIGNED (type) || tree_expr_nonnegative_p (arg0)))
10318 {
10319 tree c = arg1;
10320 /* Also optimize A % (C << N) where C is a power of 2,
10321 to A & ((C << N) - 1). */
10322 if (TREE_CODE (arg1) == LSHIFT_EXPR)
10323 c = TREE_OPERAND (arg1, 0);
10324
10325 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
10326 {
10327 tree mask = fold_build2 (MINUS_EXPR, TREE_TYPE (arg1), arg1,
10328 build_int_cst (TREE_TYPE (arg1), 1));
10329 return fold_build2 (BIT_AND_EXPR, type,
10330 fold_convert (type, arg0),
10331 fold_convert (type, mask));
10332 }
10333 }
10334
10335 /* X % -C is the same as X % C. */
10336 if (code == TRUNC_MOD_EXPR
10337 && !TYPE_UNSIGNED (type)
10338 && TREE_CODE (arg1) == INTEGER_CST
10339 && !TREE_OVERFLOW (arg1)
10340 && TREE_INT_CST_HIGH (arg1) < 0
10341 && !flag_trapv
10342 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
10343 && !sign_bit_p (arg1, arg1))
10344 return fold_build2 (code, type, fold_convert (type, arg0),
10345 fold_convert (type, negate_expr (arg1)));
10346
10347 /* X % -Y is the same as X % Y. */
10348 if (code == TRUNC_MOD_EXPR
10349 && !TYPE_UNSIGNED (type)
10350 && TREE_CODE (arg1) == NEGATE_EXPR
10351 && !flag_trapv)
10352 return fold_build2 (code, type, fold_convert (type, arg0),
10353 fold_convert (type, TREE_OPERAND (arg1, 0)));
10354
10355 if (TREE_CODE (arg1) == INTEGER_CST
10356 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
10357 return fold_convert (type, tem);
10358
10359 return NULL_TREE;
10360
10361 case LROTATE_EXPR:
10362 case RROTATE_EXPR:
10363 if (integer_all_onesp (arg0))
10364 return omit_one_operand (type, arg0, arg1);
10365 goto shift;
10366
10367 case RSHIFT_EXPR:
10368 /* Optimize -1 >> x for arithmetic right shifts. */
10369 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
10370 return omit_one_operand (type, arg0, arg1);
10371 /* ... fall through ... */
10372
10373 case LSHIFT_EXPR:
10374 shift:
10375 if (integer_zerop (arg1))
10376 return non_lvalue (fold_convert (type, arg0));
10377 if (integer_zerop (arg0))
10378 return omit_one_operand (type, arg0, arg1);
10379
10380 /* Since negative shift count is not well-defined,
10381 don't try to compute it in the compiler. */
10382 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
10383 return NULL_TREE;
10384
10385 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
10386 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
10387 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
10388 && host_integerp (TREE_OPERAND (arg0, 1), false)
10389 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
10390 {
10391 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
10392 + TREE_INT_CST_LOW (arg1));
10393
10394 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
10395 being well defined. */
10396 if (low >= TYPE_PRECISION (type))
10397 {
10398 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
10399 low = low % TYPE_PRECISION (type);
10400 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
10401 return build_int_cst (type, 0);
10402 else
10403 low = TYPE_PRECISION (type) - 1;
10404 }
10405
10406 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10407 build_int_cst (type, low));
10408 }
10409
10410 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
10411 into x & ((unsigned)-1 >> c) for unsigned types. */
10412 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
10413 || (TYPE_UNSIGNED (type)
10414 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
10415 && host_integerp (arg1, false)
10416 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
10417 && host_integerp (TREE_OPERAND (arg0, 1), false)
10418 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
10419 {
10420 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10421 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
10422 tree lshift;
10423 tree arg00;
10424
10425 if (low0 == low1)
10426 {
10427 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
10428
10429 lshift = build_int_cst (type, -1);
10430 lshift = int_const_binop (code, lshift, arg1, 0);
10431
10432 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
10433 }
10434 }
10435
10436 /* Rewrite an LROTATE_EXPR by a constant into an
10437 RROTATE_EXPR by a new constant. */
10438 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
10439 {
10440 tree tem = build_int_cst (TREE_TYPE (arg1),
10441 GET_MODE_BITSIZE (TYPE_MODE (type)));
10442 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
10443 return fold_build2 (RROTATE_EXPR, type, arg0, tem);
10444 }
10445
10446 /* If we have a rotate of a bit operation with the rotate count and
10447 the second operand of the bit operation both constant,
10448 permute the two operations. */
10449 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10450 && (TREE_CODE (arg0) == BIT_AND_EXPR
10451 || TREE_CODE (arg0) == BIT_IOR_EXPR
10452 || TREE_CODE (arg0) == BIT_XOR_EXPR)
10453 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10454 return fold_build2 (TREE_CODE (arg0), type,
10455 fold_build2 (code, type,
10456 TREE_OPERAND (arg0, 0), arg1),
10457 fold_build2 (code, type,
10458 TREE_OPERAND (arg0, 1), arg1));
10459
10460 /* Two consecutive rotates adding up to the width of the mode can
10461 be ignored. */
10462 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10463 && TREE_CODE (arg0) == RROTATE_EXPR
10464 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10465 && TREE_INT_CST_HIGH (arg1) == 0
10466 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
10467 && ((TREE_INT_CST_LOW (arg1)
10468 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
10469 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
10470 return TREE_OPERAND (arg0, 0);
10471
10472 return NULL_TREE;
10473
10474 case MIN_EXPR:
10475 if (operand_equal_p (arg0, arg1, 0))
10476 return omit_one_operand (type, arg0, arg1);
10477 if (INTEGRAL_TYPE_P (type)
10478 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
10479 return omit_one_operand (type, arg1, arg0);
10480 tem = fold_minmax (MIN_EXPR, type, arg0, arg1);
10481 if (tem)
10482 return tem;
10483 goto associate;
10484
10485 case MAX_EXPR:
10486 if (operand_equal_p (arg0, arg1, 0))
10487 return omit_one_operand (type, arg0, arg1);
10488 if (INTEGRAL_TYPE_P (type)
10489 && TYPE_MAX_VALUE (type)
10490 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
10491 return omit_one_operand (type, arg1, arg0);
10492 tem = fold_minmax (MAX_EXPR, type, arg0, arg1);
10493 if (tem)
10494 return tem;
10495 goto associate;
10496
10497 case TRUTH_ANDIF_EXPR:
10498 /* Note that the operands of this must be ints
10499 and their values must be 0 or 1.
10500 ("true" is a fixed value perhaps depending on the language.) */
10501 /* If first arg is constant zero, return it. */
10502 if (integer_zerop (arg0))
10503 return fold_convert (type, arg0);
10504 case TRUTH_AND_EXPR:
10505 /* If either arg is constant true, drop it. */
10506 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10507 return non_lvalue (fold_convert (type, arg1));
10508 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
10509 /* Preserve sequence points. */
10510 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10511 return non_lvalue (fold_convert (type, arg0));
10512 /* If second arg is constant zero, result is zero, but first arg
10513 must be evaluated. */
10514 if (integer_zerop (arg1))
10515 return omit_one_operand (type, arg1, arg0);
10516 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10517 case will be handled here. */
10518 if (integer_zerop (arg0))
10519 return omit_one_operand (type, arg0, arg1);
10520
10521 /* !X && X is always false. */
10522 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10523 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10524 return omit_one_operand (type, integer_zero_node, arg1);
10525 /* X && !X is always false. */
10526 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10527 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10528 return omit_one_operand (type, integer_zero_node, arg0);
10529
10530 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10531 means A >= Y && A != MAX, but in this case we know that
10532 A < X <= MAX. */
10533
10534 if (!TREE_SIDE_EFFECTS (arg0)
10535 && !TREE_SIDE_EFFECTS (arg1))
10536 {
10537 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
10538 if (tem && !operand_equal_p (tem, arg0, 0))
10539 return fold_build2 (code, type, tem, arg1);
10540
10541 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
10542 if (tem && !operand_equal_p (tem, arg1, 0))
10543 return fold_build2 (code, type, arg0, tem);
10544 }
10545
10546 truth_andor:
10547 /* We only do these simplifications if we are optimizing. */
10548 if (!optimize)
10549 return NULL_TREE;
10550
10551 /* Check for things like (A || B) && (A || C). We can convert this
10552 to A || (B && C). Note that either operator can be any of the four
10553 truth and/or operations and the transformation will still be
10554 valid. Also note that we only care about order for the
10555 ANDIF and ORIF operators. If B contains side effects, this
10556 might change the truth-value of A. */
10557 if (TREE_CODE (arg0) == TREE_CODE (arg1)
10558 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
10559 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
10560 || TREE_CODE (arg0) == TRUTH_AND_EXPR
10561 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
10562 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
10563 {
10564 tree a00 = TREE_OPERAND (arg0, 0);
10565 tree a01 = TREE_OPERAND (arg0, 1);
10566 tree a10 = TREE_OPERAND (arg1, 0);
10567 tree a11 = TREE_OPERAND (arg1, 1);
10568 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
10569 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
10570 && (code == TRUTH_AND_EXPR
10571 || code == TRUTH_OR_EXPR));
10572
10573 if (operand_equal_p (a00, a10, 0))
10574 return fold_build2 (TREE_CODE (arg0), type, a00,
10575 fold_build2 (code, type, a01, a11));
10576 else if (commutative && operand_equal_p (a00, a11, 0))
10577 return fold_build2 (TREE_CODE (arg0), type, a00,
10578 fold_build2 (code, type, a01, a10));
10579 else if (commutative && operand_equal_p (a01, a10, 0))
10580 return fold_build2 (TREE_CODE (arg0), type, a01,
10581 fold_build2 (code, type, a00, a11));
10582
10583 /* This case if tricky because we must either have commutative
10584 operators or else A10 must not have side-effects. */
10585
10586 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
10587 && operand_equal_p (a01, a11, 0))
10588 return fold_build2 (TREE_CODE (arg0), type,
10589 fold_build2 (code, type, a00, a10),
10590 a01);
10591 }
10592
10593 /* See if we can build a range comparison. */
10594 if (0 != (tem = fold_range_test (code, type, op0, op1)))
10595 return tem;
10596
10597 /* Check for the possibility of merging component references. If our
10598 lhs is another similar operation, try to merge its rhs with our
10599 rhs. Then try to merge our lhs and rhs. */
10600 if (TREE_CODE (arg0) == code
10601 && 0 != (tem = fold_truthop (code, type,
10602 TREE_OPERAND (arg0, 1), arg1)))
10603 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10604
10605 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
10606 return tem;
10607
10608 return NULL_TREE;
10609
10610 case TRUTH_ORIF_EXPR:
10611 /* Note that the operands of this must be ints
10612 and their values must be 0 or true.
10613 ("true" is a fixed value perhaps depending on the language.) */
10614 /* If first arg is constant true, return it. */
10615 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10616 return fold_convert (type, arg0);
10617 case TRUTH_OR_EXPR:
10618 /* If either arg is constant zero, drop it. */
10619 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
10620 return non_lvalue (fold_convert (type, arg1));
10621 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
10622 /* Preserve sequence points. */
10623 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10624 return non_lvalue (fold_convert (type, arg0));
10625 /* If second arg is constant true, result is true, but we must
10626 evaluate first arg. */
10627 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
10628 return omit_one_operand (type, arg1, arg0);
10629 /* Likewise for first arg, but note this only occurs here for
10630 TRUTH_OR_EXPR. */
10631 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10632 return omit_one_operand (type, arg0, arg1);
10633
10634 /* !X || X is always true. */
10635 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10636 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10637 return omit_one_operand (type, integer_one_node, arg1);
10638 /* X || !X is always true. */
10639 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10640 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10641 return omit_one_operand (type, integer_one_node, arg0);
10642
10643 goto truth_andor;
10644
10645 case TRUTH_XOR_EXPR:
10646 /* If the second arg is constant zero, drop it. */
10647 if (integer_zerop (arg1))
10648 return non_lvalue (fold_convert (type, arg0));
10649 /* If the second arg is constant true, this is a logical inversion. */
10650 if (integer_onep (arg1))
10651 {
10652 /* Only call invert_truthvalue if operand is a truth value. */
10653 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
10654 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
10655 else
10656 tem = invert_truthvalue (arg0);
10657 return non_lvalue (fold_convert (type, tem));
10658 }
10659 /* Identical arguments cancel to zero. */
10660 if (operand_equal_p (arg0, arg1, 0))
10661 return omit_one_operand (type, integer_zero_node, arg0);
10662
10663 /* !X ^ X is always true. */
10664 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10665 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10666 return omit_one_operand (type, integer_one_node, arg1);
10667
10668 /* X ^ !X is always true. */
10669 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10670 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10671 return omit_one_operand (type, integer_one_node, arg0);
10672
10673 return NULL_TREE;
10674
10675 case EQ_EXPR:
10676 case NE_EXPR:
10677 tem = fold_comparison (code, type, op0, op1);
10678 if (tem != NULL_TREE)
10679 return tem;
10680
10681 /* bool_var != 0 becomes bool_var. */
10682 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10683 && code == NE_EXPR)
10684 return non_lvalue (fold_convert (type, arg0));
10685
10686 /* bool_var == 1 becomes bool_var. */
10687 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10688 && code == EQ_EXPR)
10689 return non_lvalue (fold_convert (type, arg0));
10690
10691 /* bool_var != 1 becomes !bool_var. */
10692 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10693 && code == NE_EXPR)
10694 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
10695
10696 /* bool_var == 0 becomes !bool_var. */
10697 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10698 && code == EQ_EXPR)
10699 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
10700
10701 /* If this is an equality comparison of the address of a non-weak
10702 object against zero, then we know the result. */
10703 if (TREE_CODE (arg0) == ADDR_EXPR
10704 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
10705 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
10706 && integer_zerop (arg1))
10707 return constant_boolean_node (code != EQ_EXPR, type);
10708
10709 /* If this is an equality comparison of the address of two non-weak,
10710 unaliased symbols neither of which are extern (since we do not
10711 have access to attributes for externs), then we know the result. */
10712 if (TREE_CODE (arg0) == ADDR_EXPR
10713 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
10714 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
10715 && ! lookup_attribute ("alias",
10716 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
10717 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
10718 && TREE_CODE (arg1) == ADDR_EXPR
10719 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
10720 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
10721 && ! lookup_attribute ("alias",
10722 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
10723 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
10724 {
10725 /* We know that we're looking at the address of two
10726 non-weak, unaliased, static _DECL nodes.
10727
10728 It is both wasteful and incorrect to call operand_equal_p
10729 to compare the two ADDR_EXPR nodes. It is wasteful in that
10730 all we need to do is test pointer equality for the arguments
10731 to the two ADDR_EXPR nodes. It is incorrect to use
10732 operand_equal_p as that function is NOT equivalent to a
10733 C equality test. It can in fact return false for two
10734 objects which would test as equal using the C equality
10735 operator. */
10736 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
10737 return constant_boolean_node (equal
10738 ? code == EQ_EXPR : code != EQ_EXPR,
10739 type);
10740 }
10741
10742 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
10743 a MINUS_EXPR of a constant, we can convert it into a comparison with
10744 a revised constant as long as no overflow occurs. */
10745 if (TREE_CODE (arg1) == INTEGER_CST
10746 && (TREE_CODE (arg0) == PLUS_EXPR
10747 || TREE_CODE (arg0) == MINUS_EXPR)
10748 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10749 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
10750 ? MINUS_EXPR : PLUS_EXPR,
10751 fold_convert (TREE_TYPE (arg0), arg1),
10752 TREE_OPERAND (arg0, 1), 0))
10753 && !TREE_OVERFLOW (tem))
10754 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10755
10756 /* Similarly for a NEGATE_EXPR. */
10757 if (TREE_CODE (arg0) == NEGATE_EXPR
10758 && TREE_CODE (arg1) == INTEGER_CST
10759 && 0 != (tem = negate_expr (arg1))
10760 && TREE_CODE (tem) == INTEGER_CST
10761 && !TREE_OVERFLOW (tem))
10762 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10763
10764 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
10765 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10766 && TREE_CODE (arg1) == INTEGER_CST
10767 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10768 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10769 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg0),
10770 fold_convert (TREE_TYPE (arg0), arg1),
10771 TREE_OPERAND (arg0, 1)));
10772
10773 /* If we have X - Y == 0, we can convert that to X == Y and similarly
10774 for !=. Don't do this for ordered comparisons due to overflow. */
10775 if (TREE_CODE (arg0) == MINUS_EXPR
10776 && integer_zerop (arg1))
10777 return fold_build2 (code, type,
10778 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
10779
10780 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
10781 if (TREE_CODE (arg0) == ABS_EXPR
10782 && (integer_zerop (arg1) || real_zerop (arg1)))
10783 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
10784
10785 /* If this is an EQ or NE comparison with zero and ARG0 is
10786 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
10787 two operations, but the latter can be done in one less insn
10788 on machines that have only two-operand insns or on which a
10789 constant cannot be the first operand. */
10790 if (TREE_CODE (arg0) == BIT_AND_EXPR
10791 && integer_zerop (arg1))
10792 {
10793 tree arg00 = TREE_OPERAND (arg0, 0);
10794 tree arg01 = TREE_OPERAND (arg0, 1);
10795 if (TREE_CODE (arg00) == LSHIFT_EXPR
10796 && integer_onep (TREE_OPERAND (arg00, 0)))
10797 return
10798 fold_build2 (code, type,
10799 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10800 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
10801 arg01, TREE_OPERAND (arg00, 1)),
10802 fold_convert (TREE_TYPE (arg0),
10803 integer_one_node)),
10804 arg1);
10805 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
10806 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
10807 return
10808 fold_build2 (code, type,
10809 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10810 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
10811 arg00, TREE_OPERAND (arg01, 1)),
10812 fold_convert (TREE_TYPE (arg0),
10813 integer_one_node)),
10814 arg1);
10815 }
10816
10817 /* If this is an NE or EQ comparison of zero against the result of a
10818 signed MOD operation whose second operand is a power of 2, make
10819 the MOD operation unsigned since it is simpler and equivalent. */
10820 if (integer_zerop (arg1)
10821 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
10822 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
10823 || TREE_CODE (arg0) == CEIL_MOD_EXPR
10824 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
10825 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
10826 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10827 {
10828 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
10829 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
10830 fold_convert (newtype,
10831 TREE_OPERAND (arg0, 0)),
10832 fold_convert (newtype,
10833 TREE_OPERAND (arg0, 1)));
10834
10835 return fold_build2 (code, type, newmod,
10836 fold_convert (newtype, arg1));
10837 }
10838
10839 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10840 C1 is a valid shift constant, and C2 is a power of two, i.e.
10841 a single bit. */
10842 if (TREE_CODE (arg0) == BIT_AND_EXPR
10843 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
10844 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
10845 == INTEGER_CST
10846 && integer_pow2p (TREE_OPERAND (arg0, 1))
10847 && integer_zerop (arg1))
10848 {
10849 tree itype = TREE_TYPE (arg0);
10850 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
10851 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
10852
10853 /* Check for a valid shift count. */
10854 if (TREE_INT_CST_HIGH (arg001) == 0
10855 && TREE_INT_CST_LOW (arg001) < prec)
10856 {
10857 tree arg01 = TREE_OPERAND (arg0, 1);
10858 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10859 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
10860 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10861 can be rewritten as (X & (C2 << C1)) != 0. */
10862 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
10863 {
10864 tem = fold_build2 (LSHIFT_EXPR, itype, arg01, arg001);
10865 tem = fold_build2 (BIT_AND_EXPR, itype, arg000, tem);
10866 return fold_build2 (code, type, tem, arg1);
10867 }
10868 /* Otherwise, for signed (arithmetic) shifts,
10869 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10870 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
10871 else if (!TYPE_UNSIGNED (itype))
10872 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
10873 arg000, build_int_cst (itype, 0));
10874 /* Otherwise, of unsigned (logical) shifts,
10875 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10876 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
10877 else
10878 return omit_one_operand (type,
10879 code == EQ_EXPR ? integer_one_node
10880 : integer_zero_node,
10881 arg000);
10882 }
10883 }
10884
10885 /* If this is an NE comparison of zero with an AND of one, remove the
10886 comparison since the AND will give the correct value. */
10887 if (code == NE_EXPR
10888 && integer_zerop (arg1)
10889 && TREE_CODE (arg0) == BIT_AND_EXPR
10890 && integer_onep (TREE_OPERAND (arg0, 1)))
10891 return fold_convert (type, arg0);
10892
10893 /* If we have (A & C) == C where C is a power of 2, convert this into
10894 (A & C) != 0. Similarly for NE_EXPR. */
10895 if (TREE_CODE (arg0) == BIT_AND_EXPR
10896 && integer_pow2p (TREE_OPERAND (arg0, 1))
10897 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10898 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10899 arg0, fold_convert (TREE_TYPE (arg0),
10900 integer_zero_node));
10901
10902 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
10903 bit, then fold the expression into A < 0 or A >= 0. */
10904 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
10905 if (tem)
10906 return tem;
10907
10908 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
10909 Similarly for NE_EXPR. */
10910 if (TREE_CODE (arg0) == BIT_AND_EXPR
10911 && TREE_CODE (arg1) == INTEGER_CST
10912 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10913 {
10914 tree notc = fold_build1 (BIT_NOT_EXPR,
10915 TREE_TYPE (TREE_OPERAND (arg0, 1)),
10916 TREE_OPERAND (arg0, 1));
10917 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10918 arg1, notc);
10919 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10920 if (integer_nonzerop (dandnotc))
10921 return omit_one_operand (type, rslt, arg0);
10922 }
10923
10924 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
10925 Similarly for NE_EXPR. */
10926 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10927 && TREE_CODE (arg1) == INTEGER_CST
10928 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10929 {
10930 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
10931 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10932 TREE_OPERAND (arg0, 1), notd);
10933 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10934 if (integer_nonzerop (candnotd))
10935 return omit_one_operand (type, rslt, arg0);
10936 }
10937
10938 /* If this is a comparison of a field, we may be able to simplify it. */
10939 if ((TREE_CODE (arg0) == COMPONENT_REF
10940 || TREE_CODE (arg0) == BIT_FIELD_REF)
10941 /* Handle the constant case even without -O
10942 to make sure the warnings are given. */
10943 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
10944 {
10945 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
10946 if (t1)
10947 return t1;
10948 }
10949
10950 /* Optimize comparisons of strlen vs zero to a compare of the
10951 first character of the string vs zero. To wit,
10952 strlen(ptr) == 0 => *ptr == 0
10953 strlen(ptr) != 0 => *ptr != 0
10954 Other cases should reduce to one of these two (or a constant)
10955 due to the return value of strlen being unsigned. */
10956 if (TREE_CODE (arg0) == CALL_EXPR
10957 && integer_zerop (arg1))
10958 {
10959 tree fndecl = get_callee_fndecl (arg0);
10960 tree arglist;
10961
10962 if (fndecl
10963 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
10964 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
10965 && (arglist = TREE_OPERAND (arg0, 1))
10966 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
10967 && ! TREE_CHAIN (arglist))
10968 {
10969 tree iref = build_fold_indirect_ref (TREE_VALUE (arglist));
10970 return fold_build2 (code, type, iref,
10971 build_int_cst (TREE_TYPE (iref), 0));
10972 }
10973 }
10974
10975 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
10976 of X. Similarly fold (X >> C) == 0 into X >= 0. */
10977 if (TREE_CODE (arg0) == RSHIFT_EXPR
10978 && integer_zerop (arg1)
10979 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10980 {
10981 tree arg00 = TREE_OPERAND (arg0, 0);
10982 tree arg01 = TREE_OPERAND (arg0, 1);
10983 tree itype = TREE_TYPE (arg00);
10984 if (TREE_INT_CST_HIGH (arg01) == 0
10985 && TREE_INT_CST_LOW (arg01)
10986 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
10987 {
10988 if (TYPE_UNSIGNED (itype))
10989 {
10990 itype = lang_hooks.types.signed_type (itype);
10991 arg00 = fold_convert (itype, arg00);
10992 }
10993 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
10994 type, arg00, build_int_cst (itype, 0));
10995 }
10996 }
10997
10998 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
10999 if (integer_zerop (arg1)
11000 && TREE_CODE (arg0) == BIT_XOR_EXPR)
11001 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11002 TREE_OPERAND (arg0, 1));
11003
11004 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
11005 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11006 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11007 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11008 build_int_cst (TREE_TYPE (arg1), 0));
11009 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
11010 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11011 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11012 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11013 return fold_build2 (code, type, TREE_OPERAND (arg0, 1),
11014 build_int_cst (TREE_TYPE (arg1), 0));
11015
11016 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
11017 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11018 && TREE_CODE (arg1) == INTEGER_CST
11019 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11020 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11021 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg1),
11022 TREE_OPERAND (arg0, 1), arg1));
11023
11024 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
11025 (X & C) == 0 when C is a single bit. */
11026 if (TREE_CODE (arg0) == BIT_AND_EXPR
11027 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
11028 && integer_zerop (arg1)
11029 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11030 {
11031 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11032 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
11033 TREE_OPERAND (arg0, 1));
11034 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
11035 type, tem, arg1);
11036 }
11037
11038 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
11039 constant C is a power of two, i.e. a single bit. */
11040 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11041 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11042 && integer_zerop (arg1)
11043 && integer_pow2p (TREE_OPERAND (arg0, 1))
11044 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11045 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
11046 {
11047 tree arg00 = TREE_OPERAND (arg0, 0);
11048 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11049 arg00, build_int_cst (TREE_TYPE (arg00), 0));
11050 }
11051
11052 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
11053 when is C is a power of two, i.e. a single bit. */
11054 if (TREE_CODE (arg0) == BIT_AND_EXPR
11055 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
11056 && integer_zerop (arg1)
11057 && integer_pow2p (TREE_OPERAND (arg0, 1))
11058 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11059 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
11060 {
11061 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
11062 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg000),
11063 arg000, TREE_OPERAND (arg0, 1));
11064 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11065 tem, build_int_cst (TREE_TYPE (tem), 0));
11066 }
11067
11068 if (integer_zerop (arg1)
11069 && tree_expr_nonzero_p (arg0))
11070 {
11071 tree res = constant_boolean_node (code==NE_EXPR, type);
11072 return omit_one_operand (type, res, arg0);
11073 }
11074
11075 /* Fold -X op -Y as X op Y, where op is eq/ne. */
11076 if (TREE_CODE (arg0) == NEGATE_EXPR
11077 && TREE_CODE (arg1) == NEGATE_EXPR)
11078 return fold_build2 (code, type,
11079 TREE_OPERAND (arg0, 0),
11080 TREE_OPERAND (arg1, 0));
11081
11082 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
11083 if (TREE_CODE (arg0) == BIT_AND_EXPR
11084 && TREE_CODE (arg1) == BIT_AND_EXPR)
11085 {
11086 tree arg00 = TREE_OPERAND (arg0, 0);
11087 tree arg01 = TREE_OPERAND (arg0, 1);
11088 tree arg10 = TREE_OPERAND (arg1, 0);
11089 tree arg11 = TREE_OPERAND (arg1, 1);
11090 tree itype = TREE_TYPE (arg0);
11091
11092 if (operand_equal_p (arg01, arg11, 0))
11093 return fold_build2 (code, type,
11094 fold_build2 (BIT_AND_EXPR, itype,
11095 fold_build2 (BIT_XOR_EXPR, itype,
11096 arg00, arg10),
11097 arg01),
11098 build_int_cst (itype, 0));
11099
11100 if (operand_equal_p (arg01, arg10, 0))
11101 return fold_build2 (code, type,
11102 fold_build2 (BIT_AND_EXPR, itype,
11103 fold_build2 (BIT_XOR_EXPR, itype,
11104 arg00, arg11),
11105 arg01),
11106 build_int_cst (itype, 0));
11107
11108 if (operand_equal_p (arg00, arg11, 0))
11109 return fold_build2 (code, type,
11110 fold_build2 (BIT_AND_EXPR, itype,
11111 fold_build2 (BIT_XOR_EXPR, itype,
11112 arg01, arg10),
11113 arg00),
11114 build_int_cst (itype, 0));
11115
11116 if (operand_equal_p (arg00, arg10, 0))
11117 return fold_build2 (code, type,
11118 fold_build2 (BIT_AND_EXPR, itype,
11119 fold_build2 (BIT_XOR_EXPR, itype,
11120 arg01, arg11),
11121 arg00),
11122 build_int_cst (itype, 0));
11123 }
11124
11125 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11126 && TREE_CODE (arg1) == BIT_XOR_EXPR)
11127 {
11128 tree arg00 = TREE_OPERAND (arg0, 0);
11129 tree arg01 = TREE_OPERAND (arg0, 1);
11130 tree arg10 = TREE_OPERAND (arg1, 0);
11131 tree arg11 = TREE_OPERAND (arg1, 1);
11132 tree itype = TREE_TYPE (arg0);
11133
11134 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
11135 operand_equal_p guarantees no side-effects so we don't need
11136 to use omit_one_operand on Z. */
11137 if (operand_equal_p (arg01, arg11, 0))
11138 return fold_build2 (code, type, arg00, arg10);
11139 if (operand_equal_p (arg01, arg10, 0))
11140 return fold_build2 (code, type, arg00, arg11);
11141 if (operand_equal_p (arg00, arg11, 0))
11142 return fold_build2 (code, type, arg01, arg10);
11143 if (operand_equal_p (arg00, arg10, 0))
11144 return fold_build2 (code, type, arg01, arg11);
11145
11146 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
11147 if (TREE_CODE (arg01) == INTEGER_CST
11148 && TREE_CODE (arg11) == INTEGER_CST)
11149 return fold_build2 (code, type,
11150 fold_build2 (BIT_XOR_EXPR, itype, arg00,
11151 fold_build2 (BIT_XOR_EXPR, itype,
11152 arg01, arg11)),
11153 arg10);
11154 }
11155 return NULL_TREE;
11156
11157 case LT_EXPR:
11158 case GT_EXPR:
11159 case LE_EXPR:
11160 case GE_EXPR:
11161 tem = fold_comparison (code, type, op0, op1);
11162 if (tem != NULL_TREE)
11163 return tem;
11164
11165 /* Transform comparisons of the form X +- C CMP X. */
11166 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11167 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11168 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
11169 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
11170 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11171 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
11172 && !(flag_wrapv || flag_trapv))))
11173 {
11174 tree arg01 = TREE_OPERAND (arg0, 1);
11175 enum tree_code code0 = TREE_CODE (arg0);
11176 int is_positive;
11177
11178 if (TREE_CODE (arg01) == REAL_CST)
11179 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
11180 else
11181 is_positive = tree_int_cst_sgn (arg01);
11182
11183 /* (X - c) > X becomes false. */
11184 if (code == GT_EXPR
11185 && ((code0 == MINUS_EXPR && is_positive >= 0)
11186 || (code0 == PLUS_EXPR && is_positive <= 0)))
11187 return constant_boolean_node (0, type);
11188
11189 /* Likewise (X + c) < X becomes false. */
11190 if (code == LT_EXPR
11191 && ((code0 == PLUS_EXPR && is_positive >= 0)
11192 || (code0 == MINUS_EXPR && is_positive <= 0)))
11193 return constant_boolean_node (0, type);
11194
11195 /* Convert (X - c) <= X to true. */
11196 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
11197 && code == LE_EXPR
11198 && ((code0 == MINUS_EXPR && is_positive >= 0)
11199 || (code0 == PLUS_EXPR && is_positive <= 0)))
11200 return constant_boolean_node (1, type);
11201
11202 /* Convert (X + c) >= X to true. */
11203 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
11204 && code == GE_EXPR
11205 && ((code0 == PLUS_EXPR && is_positive >= 0)
11206 || (code0 == MINUS_EXPR && is_positive <= 0)))
11207 return constant_boolean_node (1, type);
11208
11209 if (TREE_CODE (arg01) == INTEGER_CST)
11210 {
11211 /* Convert X + c > X and X - c < X to true for integers. */
11212 if (code == GT_EXPR
11213 && ((code0 == PLUS_EXPR && is_positive > 0)
11214 || (code0 == MINUS_EXPR && is_positive < 0)))
11215 return constant_boolean_node (1, type);
11216
11217 if (code == LT_EXPR
11218 && ((code0 == MINUS_EXPR && is_positive > 0)
11219 || (code0 == PLUS_EXPR && is_positive < 0)))
11220 return constant_boolean_node (1, type);
11221
11222 /* Convert X + c <= X and X - c >= X to false for integers. */
11223 if (code == LE_EXPR
11224 && ((code0 == PLUS_EXPR && is_positive > 0)
11225 || (code0 == MINUS_EXPR && is_positive < 0)))
11226 return constant_boolean_node (0, type);
11227
11228 if (code == GE_EXPR
11229 && ((code0 == MINUS_EXPR && is_positive > 0)
11230 || (code0 == PLUS_EXPR && is_positive < 0)))
11231 return constant_boolean_node (0, type);
11232 }
11233 }
11234
11235 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
11236 This transformation affects the cases which are handled in later
11237 optimizations involving comparisons with non-negative constants. */
11238 if (TREE_CODE (arg1) == INTEGER_CST
11239 && TREE_CODE (arg0) != INTEGER_CST
11240 && tree_int_cst_sgn (arg1) > 0)
11241 {
11242 if (code == GE_EXPR)
11243 {
11244 arg1 = const_binop (MINUS_EXPR, arg1,
11245 build_int_cst (TREE_TYPE (arg1), 1), 0);
11246 return fold_build2 (GT_EXPR, type, arg0,
11247 fold_convert (TREE_TYPE (arg0), arg1));
11248 }
11249 if (code == LT_EXPR)
11250 {
11251 arg1 = const_binop (MINUS_EXPR, arg1,
11252 build_int_cst (TREE_TYPE (arg1), 1), 0);
11253 return fold_build2 (LE_EXPR, type, arg0,
11254 fold_convert (TREE_TYPE (arg0), arg1));
11255 }
11256 }
11257
11258 /* Comparisons with the highest or lowest possible integer of
11259 the specified precision will have known values. */
11260 {
11261 tree arg1_type = TREE_TYPE (arg1);
11262 unsigned int width = TYPE_PRECISION (arg1_type);
11263
11264 if (TREE_CODE (arg1) == INTEGER_CST
11265 && !TREE_OVERFLOW (arg1)
11266 && width <= 2 * HOST_BITS_PER_WIDE_INT
11267 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
11268 {
11269 HOST_WIDE_INT signed_max_hi;
11270 unsigned HOST_WIDE_INT signed_max_lo;
11271 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
11272
11273 if (width <= HOST_BITS_PER_WIDE_INT)
11274 {
11275 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
11276 - 1;
11277 signed_max_hi = 0;
11278 max_hi = 0;
11279
11280 if (TYPE_UNSIGNED (arg1_type))
11281 {
11282 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
11283 min_lo = 0;
11284 min_hi = 0;
11285 }
11286 else
11287 {
11288 max_lo = signed_max_lo;
11289 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
11290 min_hi = -1;
11291 }
11292 }
11293 else
11294 {
11295 width -= HOST_BITS_PER_WIDE_INT;
11296 signed_max_lo = -1;
11297 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
11298 - 1;
11299 max_lo = -1;
11300 min_lo = 0;
11301
11302 if (TYPE_UNSIGNED (arg1_type))
11303 {
11304 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
11305 min_hi = 0;
11306 }
11307 else
11308 {
11309 max_hi = signed_max_hi;
11310 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
11311 }
11312 }
11313
11314 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
11315 && TREE_INT_CST_LOW (arg1) == max_lo)
11316 switch (code)
11317 {
11318 case GT_EXPR:
11319 return omit_one_operand (type, integer_zero_node, arg0);
11320
11321 case GE_EXPR:
11322 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11323
11324 case LE_EXPR:
11325 return omit_one_operand (type, integer_one_node, arg0);
11326
11327 case LT_EXPR:
11328 return fold_build2 (NE_EXPR, type, arg0, arg1);
11329
11330 /* The GE_EXPR and LT_EXPR cases above are not normally
11331 reached because of previous transformations. */
11332
11333 default:
11334 break;
11335 }
11336 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11337 == max_hi
11338 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
11339 switch (code)
11340 {
11341 case GT_EXPR:
11342 arg1 = const_binop (PLUS_EXPR, arg1,
11343 build_int_cst (TREE_TYPE (arg1), 1), 0);
11344 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11345 case LE_EXPR:
11346 arg1 = const_binop (PLUS_EXPR, arg1,
11347 build_int_cst (TREE_TYPE (arg1), 1), 0);
11348 return fold_build2 (NE_EXPR, type, arg0, arg1);
11349 default:
11350 break;
11351 }
11352 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11353 == min_hi
11354 && TREE_INT_CST_LOW (arg1) == min_lo)
11355 switch (code)
11356 {
11357 case LT_EXPR:
11358 return omit_one_operand (type, integer_zero_node, arg0);
11359
11360 case LE_EXPR:
11361 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11362
11363 case GE_EXPR:
11364 return omit_one_operand (type, integer_one_node, arg0);
11365
11366 case GT_EXPR:
11367 return fold_build2 (NE_EXPR, type, op0, op1);
11368
11369 default:
11370 break;
11371 }
11372 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11373 == min_hi
11374 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
11375 switch (code)
11376 {
11377 case GE_EXPR:
11378 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
11379 return fold_build2 (NE_EXPR, type, arg0, arg1);
11380 case LT_EXPR:
11381 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
11382 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11383 default:
11384 break;
11385 }
11386
11387 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
11388 && TREE_INT_CST_LOW (arg1) == signed_max_lo
11389 && TYPE_UNSIGNED (arg1_type)
11390 /* We will flip the signedness of the comparison operator
11391 associated with the mode of arg1, so the sign bit is
11392 specified by this mode. Check that arg1 is the signed
11393 max associated with this sign bit. */
11394 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
11395 /* signed_type does not work on pointer types. */
11396 && INTEGRAL_TYPE_P (arg1_type))
11397 {
11398 /* The following case also applies to X < signed_max+1
11399 and X >= signed_max+1 because previous transformations. */
11400 if (code == LE_EXPR || code == GT_EXPR)
11401 {
11402 tree st0, st1;
11403 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
11404 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
11405 return fold_build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
11406 type, fold_convert (st0, arg0),
11407 build_int_cst (st1, 0));
11408 }
11409 }
11410 }
11411 }
11412
11413 /* If we are comparing an ABS_EXPR with a constant, we can
11414 convert all the cases into explicit comparisons, but they may
11415 well not be faster than doing the ABS and one comparison.
11416 But ABS (X) <= C is a range comparison, which becomes a subtraction
11417 and a comparison, and is probably faster. */
11418 if (code == LE_EXPR
11419 && TREE_CODE (arg1) == INTEGER_CST
11420 && TREE_CODE (arg0) == ABS_EXPR
11421 && ! TREE_SIDE_EFFECTS (arg0)
11422 && (0 != (tem = negate_expr (arg1)))
11423 && TREE_CODE (tem) == INTEGER_CST
11424 && !TREE_OVERFLOW (tem))
11425 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11426 build2 (GE_EXPR, type,
11427 TREE_OPERAND (arg0, 0), tem),
11428 build2 (LE_EXPR, type,
11429 TREE_OPERAND (arg0, 0), arg1));
11430
11431 /* Convert ABS_EXPR<x> >= 0 to true. */
11432 if (code == GE_EXPR
11433 && tree_expr_nonnegative_p (arg0)
11434 && (integer_zerop (arg1)
11435 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11436 && real_zerop (arg1))))
11437 return omit_one_operand (type, integer_one_node, arg0);
11438
11439 /* Convert ABS_EXPR<x> < 0 to false. */
11440 if (code == LT_EXPR
11441 && tree_expr_nonnegative_p (arg0)
11442 && (integer_zerop (arg1) || real_zerop (arg1)))
11443 return omit_one_operand (type, integer_zero_node, arg0);
11444
11445 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11446 and similarly for >= into !=. */
11447 if ((code == LT_EXPR || code == GE_EXPR)
11448 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11449 && TREE_CODE (arg1) == LSHIFT_EXPR
11450 && integer_onep (TREE_OPERAND (arg1, 0)))
11451 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11452 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11453 TREE_OPERAND (arg1, 1)),
11454 build_int_cst (TREE_TYPE (arg0), 0));
11455
11456 if ((code == LT_EXPR || code == GE_EXPR)
11457 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11458 && (TREE_CODE (arg1) == NOP_EXPR
11459 || TREE_CODE (arg1) == CONVERT_EXPR)
11460 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
11461 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
11462 return
11463 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11464 fold_convert (TREE_TYPE (arg0),
11465 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11466 TREE_OPERAND (TREE_OPERAND (arg1, 0),
11467 1))),
11468 build_int_cst (TREE_TYPE (arg0), 0));
11469
11470 return NULL_TREE;
11471
11472 case UNORDERED_EXPR:
11473 case ORDERED_EXPR:
11474 case UNLT_EXPR:
11475 case UNLE_EXPR:
11476 case UNGT_EXPR:
11477 case UNGE_EXPR:
11478 case UNEQ_EXPR:
11479 case LTGT_EXPR:
11480 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
11481 {
11482 t1 = fold_relational_const (code, type, arg0, arg1);
11483 if (t1 != NULL_TREE)
11484 return t1;
11485 }
11486
11487 /* If the first operand is NaN, the result is constant. */
11488 if (TREE_CODE (arg0) == REAL_CST
11489 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
11490 && (code != LTGT_EXPR || ! flag_trapping_math))
11491 {
11492 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
11493 ? integer_zero_node
11494 : integer_one_node;
11495 return omit_one_operand (type, t1, arg1);
11496 }
11497
11498 /* If the second operand is NaN, the result is constant. */
11499 if (TREE_CODE (arg1) == REAL_CST
11500 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
11501 && (code != LTGT_EXPR || ! flag_trapping_math))
11502 {
11503 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
11504 ? integer_zero_node
11505 : integer_one_node;
11506 return omit_one_operand (type, t1, arg0);
11507 }
11508
11509 /* Simplify unordered comparison of something with itself. */
11510 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
11511 && operand_equal_p (arg0, arg1, 0))
11512 return constant_boolean_node (1, type);
11513
11514 if (code == LTGT_EXPR
11515 && !flag_trapping_math
11516 && operand_equal_p (arg0, arg1, 0))
11517 return constant_boolean_node (0, type);
11518
11519 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
11520 {
11521 tree targ0 = strip_float_extensions (arg0);
11522 tree targ1 = strip_float_extensions (arg1);
11523 tree newtype = TREE_TYPE (targ0);
11524
11525 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
11526 newtype = TREE_TYPE (targ1);
11527
11528 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
11529 return fold_build2 (code, type, fold_convert (newtype, targ0),
11530 fold_convert (newtype, targ1));
11531 }
11532
11533 return NULL_TREE;
11534
11535 case COMPOUND_EXPR:
11536 /* When pedantic, a compound expression can be neither an lvalue
11537 nor an integer constant expression. */
11538 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
11539 return NULL_TREE;
11540 /* Don't let (0, 0) be null pointer constant. */
11541 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
11542 : fold_convert (type, arg1);
11543 return pedantic_non_lvalue (tem);
11544
11545 case COMPLEX_EXPR:
11546 if ((TREE_CODE (arg0) == REAL_CST
11547 && TREE_CODE (arg1) == REAL_CST)
11548 || (TREE_CODE (arg0) == INTEGER_CST
11549 && TREE_CODE (arg1) == INTEGER_CST))
11550 return build_complex (type, arg0, arg1);
11551 return NULL_TREE;
11552
11553 case ASSERT_EXPR:
11554 /* An ASSERT_EXPR should never be passed to fold_binary. */
11555 gcc_unreachable ();
11556
11557 default:
11558 return NULL_TREE;
11559 } /* switch (code) */
11560 }
11561
11562 /* Callback for walk_tree, looking for LABEL_EXPR.
11563 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
11564 Do not check the sub-tree of GOTO_EXPR. */
11565
11566 static tree
11567 contains_label_1 (tree *tp,
11568 int *walk_subtrees,
11569 void *data ATTRIBUTE_UNUSED)
11570 {
11571 switch (TREE_CODE (*tp))
11572 {
11573 case LABEL_EXPR:
11574 return *tp;
11575 case GOTO_EXPR:
11576 *walk_subtrees = 0;
11577 /* no break */
11578 default:
11579 return NULL_TREE;
11580 }
11581 }
11582
11583 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
11584 accessible from outside the sub-tree. Returns NULL_TREE if no
11585 addressable label is found. */
11586
11587 static bool
11588 contains_label_p (tree st)
11589 {
11590 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
11591 }
11592
11593 /* Fold a ternary expression of code CODE and type TYPE with operands
11594 OP0, OP1, and OP2. Return the folded expression if folding is
11595 successful. Otherwise, return NULL_TREE. */
11596
11597 tree
11598 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
11599 {
11600 tree tem;
11601 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
11602 enum tree_code_class kind = TREE_CODE_CLASS (code);
11603
11604 gcc_assert (IS_EXPR_CODE_CLASS (kind)
11605 && TREE_CODE_LENGTH (code) == 3);
11606
11607 /* Strip any conversions that don't change the mode. This is safe
11608 for every expression, except for a comparison expression because
11609 its signedness is derived from its operands. So, in the latter
11610 case, only strip conversions that don't change the signedness.
11611
11612 Note that this is done as an internal manipulation within the
11613 constant folder, in order to find the simplest representation of
11614 the arguments so that their form can be studied. In any cases,
11615 the appropriate type conversions should be put back in the tree
11616 that will get out of the constant folder. */
11617 if (op0)
11618 {
11619 arg0 = op0;
11620 STRIP_NOPS (arg0);
11621 }
11622
11623 if (op1)
11624 {
11625 arg1 = op1;
11626 STRIP_NOPS (arg1);
11627 }
11628
11629 switch (code)
11630 {
11631 case COMPONENT_REF:
11632 if (TREE_CODE (arg0) == CONSTRUCTOR
11633 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
11634 {
11635 unsigned HOST_WIDE_INT idx;
11636 tree field, value;
11637 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
11638 if (field == arg1)
11639 return value;
11640 }
11641 return NULL_TREE;
11642
11643 case COND_EXPR:
11644 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11645 so all simple results must be passed through pedantic_non_lvalue. */
11646 if (TREE_CODE (arg0) == INTEGER_CST)
11647 {
11648 tree unused_op = integer_zerop (arg0) ? op1 : op2;
11649 tem = integer_zerop (arg0) ? op2 : op1;
11650 /* Only optimize constant conditions when the selected branch
11651 has the same type as the COND_EXPR. This avoids optimizing
11652 away "c ? x : throw", where the throw has a void type.
11653 Avoid throwing away that operand which contains label. */
11654 if ((!TREE_SIDE_EFFECTS (unused_op)
11655 || !contains_label_p (unused_op))
11656 && (! VOID_TYPE_P (TREE_TYPE (tem))
11657 || VOID_TYPE_P (type)))
11658 return pedantic_non_lvalue (tem);
11659 return NULL_TREE;
11660 }
11661 if (operand_equal_p (arg1, op2, 0))
11662 return pedantic_omit_one_operand (type, arg1, arg0);
11663
11664 /* If we have A op B ? A : C, we may be able to convert this to a
11665 simpler expression, depending on the operation and the values
11666 of B and C. Signed zeros prevent all of these transformations,
11667 for reasons given above each one.
11668
11669 Also try swapping the arguments and inverting the conditional. */
11670 if (COMPARISON_CLASS_P (arg0)
11671 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11672 arg1, TREE_OPERAND (arg0, 1))
11673 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
11674 {
11675 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
11676 if (tem)
11677 return tem;
11678 }
11679
11680 if (COMPARISON_CLASS_P (arg0)
11681 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11682 op2,
11683 TREE_OPERAND (arg0, 1))
11684 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
11685 {
11686 tem = fold_truth_not_expr (arg0);
11687 if (tem && COMPARISON_CLASS_P (tem))
11688 {
11689 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
11690 if (tem)
11691 return tem;
11692 }
11693 }
11694
11695 /* If the second operand is simpler than the third, swap them
11696 since that produces better jump optimization results. */
11697 if (truth_value_p (TREE_CODE (arg0))
11698 && tree_swap_operands_p (op1, op2, false))
11699 {
11700 /* See if this can be inverted. If it can't, possibly because
11701 it was a floating-point inequality comparison, don't do
11702 anything. */
11703 tem = fold_truth_not_expr (arg0);
11704 if (tem)
11705 return fold_build3 (code, type, tem, op2, op1);
11706 }
11707
11708 /* Convert A ? 1 : 0 to simply A. */
11709 if (integer_onep (op1)
11710 && integer_zerop (op2)
11711 /* If we try to convert OP0 to our type, the
11712 call to fold will try to move the conversion inside
11713 a COND, which will recurse. In that case, the COND_EXPR
11714 is probably the best choice, so leave it alone. */
11715 && type == TREE_TYPE (arg0))
11716 return pedantic_non_lvalue (arg0);
11717
11718 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
11719 over COND_EXPR in cases such as floating point comparisons. */
11720 if (integer_zerop (op1)
11721 && integer_onep (op2)
11722 && truth_value_p (TREE_CODE (arg0)))
11723 return pedantic_non_lvalue (fold_convert (type,
11724 invert_truthvalue (arg0)));
11725
11726 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
11727 if (TREE_CODE (arg0) == LT_EXPR
11728 && integer_zerop (TREE_OPERAND (arg0, 1))
11729 && integer_zerop (op2)
11730 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
11731 {
11732 /* sign_bit_p only checks ARG1 bits within A's precision.
11733 If <sign bit of A> has wider type than A, bits outside
11734 of A's precision in <sign bit of A> need to be checked.
11735 If they are all 0, this optimization needs to be done
11736 in unsigned A's type, if they are all 1 in signed A's type,
11737 otherwise this can't be done. */
11738 if (TYPE_PRECISION (TREE_TYPE (tem))
11739 < TYPE_PRECISION (TREE_TYPE (arg1))
11740 && TYPE_PRECISION (TREE_TYPE (tem))
11741 < TYPE_PRECISION (type))
11742 {
11743 unsigned HOST_WIDE_INT mask_lo;
11744 HOST_WIDE_INT mask_hi;
11745 int inner_width, outer_width;
11746 tree tem_type;
11747
11748 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
11749 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
11750 if (outer_width > TYPE_PRECISION (type))
11751 outer_width = TYPE_PRECISION (type);
11752
11753 if (outer_width > HOST_BITS_PER_WIDE_INT)
11754 {
11755 mask_hi = ((unsigned HOST_WIDE_INT) -1
11756 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
11757 mask_lo = -1;
11758 }
11759 else
11760 {
11761 mask_hi = 0;
11762 mask_lo = ((unsigned HOST_WIDE_INT) -1
11763 >> (HOST_BITS_PER_WIDE_INT - outer_width));
11764 }
11765 if (inner_width > HOST_BITS_PER_WIDE_INT)
11766 {
11767 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
11768 >> (HOST_BITS_PER_WIDE_INT - inner_width));
11769 mask_lo = 0;
11770 }
11771 else
11772 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
11773 >> (HOST_BITS_PER_WIDE_INT - inner_width));
11774
11775 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
11776 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
11777 {
11778 tem_type = lang_hooks.types.signed_type (TREE_TYPE (tem));
11779 tem = fold_convert (tem_type, tem);
11780 }
11781 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
11782 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
11783 {
11784 tem_type = lang_hooks.types.unsigned_type (TREE_TYPE (tem));
11785 tem = fold_convert (tem_type, tem);
11786 }
11787 else
11788 tem = NULL;
11789 }
11790
11791 if (tem)
11792 return fold_convert (type,
11793 fold_build2 (BIT_AND_EXPR,
11794 TREE_TYPE (tem), tem,
11795 fold_convert (TREE_TYPE (tem),
11796 arg1)));
11797 }
11798
11799 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
11800 already handled above. */
11801 if (TREE_CODE (arg0) == BIT_AND_EXPR
11802 && integer_onep (TREE_OPERAND (arg0, 1))
11803 && integer_zerop (op2)
11804 && integer_pow2p (arg1))
11805 {
11806 tree tem = TREE_OPERAND (arg0, 0);
11807 STRIP_NOPS (tem);
11808 if (TREE_CODE (tem) == RSHIFT_EXPR
11809 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
11810 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
11811 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
11812 return fold_build2 (BIT_AND_EXPR, type,
11813 TREE_OPERAND (tem, 0), arg1);
11814 }
11815
11816 /* A & N ? N : 0 is simply A & N if N is a power of two. This
11817 is probably obsolete because the first operand should be a
11818 truth value (that's why we have the two cases above), but let's
11819 leave it in until we can confirm this for all front-ends. */
11820 if (integer_zerop (op2)
11821 && TREE_CODE (arg0) == NE_EXPR
11822 && integer_zerop (TREE_OPERAND (arg0, 1))
11823 && integer_pow2p (arg1)
11824 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11825 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11826 arg1, OEP_ONLY_CONST))
11827 return pedantic_non_lvalue (fold_convert (type,
11828 TREE_OPERAND (arg0, 0)));
11829
11830 /* Convert A ? B : 0 into A && B if A and B are truth values. */
11831 if (integer_zerop (op2)
11832 && truth_value_p (TREE_CODE (arg0))
11833 && truth_value_p (TREE_CODE (arg1)))
11834 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11835 fold_convert (type, arg0),
11836 arg1);
11837
11838 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
11839 if (integer_onep (op2)
11840 && truth_value_p (TREE_CODE (arg0))
11841 && truth_value_p (TREE_CODE (arg1)))
11842 {
11843 /* Only perform transformation if ARG0 is easily inverted. */
11844 tem = fold_truth_not_expr (arg0);
11845 if (tem)
11846 return fold_build2 (TRUTH_ORIF_EXPR, type,
11847 fold_convert (type, tem),
11848 arg1);
11849 }
11850
11851 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
11852 if (integer_zerop (arg1)
11853 && truth_value_p (TREE_CODE (arg0))
11854 && truth_value_p (TREE_CODE (op2)))
11855 {
11856 /* Only perform transformation if ARG0 is easily inverted. */
11857 tem = fold_truth_not_expr (arg0);
11858 if (tem)
11859 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11860 fold_convert (type, tem),
11861 op2);
11862 }
11863
11864 /* Convert A ? 1 : B into A || B if A and B are truth values. */
11865 if (integer_onep (arg1)
11866 && truth_value_p (TREE_CODE (arg0))
11867 && truth_value_p (TREE_CODE (op2)))
11868 return fold_build2 (TRUTH_ORIF_EXPR, type,
11869 fold_convert (type, arg0),
11870 op2);
11871
11872 return NULL_TREE;
11873
11874 case CALL_EXPR:
11875 /* Check for a built-in function. */
11876 if (TREE_CODE (op0) == ADDR_EXPR
11877 && TREE_CODE (TREE_OPERAND (op0, 0)) == FUNCTION_DECL
11878 && DECL_BUILT_IN (TREE_OPERAND (op0, 0)))
11879 return fold_builtin (TREE_OPERAND (op0, 0), op1, false);
11880 return NULL_TREE;
11881
11882 case BIT_FIELD_REF:
11883 if (TREE_CODE (arg0) == VECTOR_CST
11884 && type == TREE_TYPE (TREE_TYPE (arg0))
11885 && host_integerp (arg1, 1)
11886 && host_integerp (op2, 1))
11887 {
11888 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
11889 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
11890
11891 if (width != 0
11892 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
11893 && (idx % width) == 0
11894 && (idx = idx / width)
11895 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
11896 {
11897 tree elements = TREE_VECTOR_CST_ELTS (arg0);
11898 while (idx-- > 0 && elements)
11899 elements = TREE_CHAIN (elements);
11900 if (elements)
11901 return TREE_VALUE (elements);
11902 else
11903 return fold_convert (type, integer_zero_node);
11904 }
11905 }
11906 return NULL_TREE;
11907
11908 default:
11909 return NULL_TREE;
11910 } /* switch (code) */
11911 }
11912
11913 /* Perform constant folding and related simplification of EXPR.
11914 The related simplifications include x*1 => x, x*0 => 0, etc.,
11915 and application of the associative law.
11916 NOP_EXPR conversions may be removed freely (as long as we
11917 are careful not to change the type of the overall expression).
11918 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
11919 but we can constant-fold them if they have constant operands. */
11920
11921 #ifdef ENABLE_FOLD_CHECKING
11922 # define fold(x) fold_1 (x)
11923 static tree fold_1 (tree);
11924 static
11925 #endif
11926 tree
11927 fold (tree expr)
11928 {
11929 const tree t = expr;
11930 enum tree_code code = TREE_CODE (t);
11931 enum tree_code_class kind = TREE_CODE_CLASS (code);
11932 tree tem;
11933
11934 /* Return right away if a constant. */
11935 if (kind == tcc_constant)
11936 return t;
11937
11938 if (IS_EXPR_CODE_CLASS (kind)
11939 || IS_GIMPLE_STMT_CODE_CLASS (kind))
11940 {
11941 tree type = TREE_TYPE (t);
11942 tree op0, op1, op2;
11943
11944 switch (TREE_CODE_LENGTH (code))
11945 {
11946 case 1:
11947 op0 = TREE_OPERAND (t, 0);
11948 tem = fold_unary (code, type, op0);
11949 return tem ? tem : expr;
11950 case 2:
11951 op0 = TREE_OPERAND (t, 0);
11952 op1 = TREE_OPERAND (t, 1);
11953 tem = fold_binary (code, type, op0, op1);
11954 return tem ? tem : expr;
11955 case 3:
11956 op0 = TREE_OPERAND (t, 0);
11957 op1 = TREE_OPERAND (t, 1);
11958 op2 = TREE_OPERAND (t, 2);
11959 tem = fold_ternary (code, type, op0, op1, op2);
11960 return tem ? tem : expr;
11961 default:
11962 break;
11963 }
11964 }
11965
11966 switch (code)
11967 {
11968 case CONST_DECL:
11969 return fold (DECL_INITIAL (t));
11970
11971 default:
11972 return t;
11973 } /* switch (code) */
11974 }
11975
11976 #ifdef ENABLE_FOLD_CHECKING
11977 #undef fold
11978
11979 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
11980 static void fold_check_failed (tree, tree);
11981 void print_fold_checksum (tree);
11982
11983 /* When --enable-checking=fold, compute a digest of expr before
11984 and after actual fold call to see if fold did not accidentally
11985 change original expr. */
11986
11987 tree
11988 fold (tree expr)
11989 {
11990 tree ret;
11991 struct md5_ctx ctx;
11992 unsigned char checksum_before[16], checksum_after[16];
11993 htab_t ht;
11994
11995 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11996 md5_init_ctx (&ctx);
11997 fold_checksum_tree (expr, &ctx, ht);
11998 md5_finish_ctx (&ctx, checksum_before);
11999 htab_empty (ht);
12000
12001 ret = fold_1 (expr);
12002
12003 md5_init_ctx (&ctx);
12004 fold_checksum_tree (expr, &ctx, ht);
12005 md5_finish_ctx (&ctx, checksum_after);
12006 htab_delete (ht);
12007
12008 if (memcmp (checksum_before, checksum_after, 16))
12009 fold_check_failed (expr, ret);
12010
12011 return ret;
12012 }
12013
12014 void
12015 print_fold_checksum (tree expr)
12016 {
12017 struct md5_ctx ctx;
12018 unsigned char checksum[16], cnt;
12019 htab_t ht;
12020
12021 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12022 md5_init_ctx (&ctx);
12023 fold_checksum_tree (expr, &ctx, ht);
12024 md5_finish_ctx (&ctx, checksum);
12025 htab_delete (ht);
12026 for (cnt = 0; cnt < 16; ++cnt)
12027 fprintf (stderr, "%02x", checksum[cnt]);
12028 putc ('\n', stderr);
12029 }
12030
12031 static void
12032 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
12033 {
12034 internal_error ("fold check: original tree changed by fold");
12035 }
12036
12037 static void
12038 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
12039 {
12040 void **slot;
12041 enum tree_code code;
12042 struct tree_function_decl buf;
12043 int i, len;
12044
12045 recursive_label:
12046
12047 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
12048 <= sizeof (struct tree_function_decl))
12049 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
12050 if (expr == NULL)
12051 return;
12052 slot = htab_find_slot (ht, expr, INSERT);
12053 if (*slot != NULL)
12054 return;
12055 *slot = expr;
12056 code = TREE_CODE (expr);
12057 if (TREE_CODE_CLASS (code) == tcc_declaration
12058 && DECL_ASSEMBLER_NAME_SET_P (expr))
12059 {
12060 /* Allow DECL_ASSEMBLER_NAME to be modified. */
12061 memcpy ((char *) &buf, expr, tree_size (expr));
12062 expr = (tree) &buf;
12063 SET_DECL_ASSEMBLER_NAME (expr, NULL);
12064 }
12065 else if (TREE_CODE_CLASS (code) == tcc_type
12066 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
12067 || TYPE_CACHED_VALUES_P (expr)
12068 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
12069 {
12070 /* Allow these fields to be modified. */
12071 memcpy ((char *) &buf, expr, tree_size (expr));
12072 expr = (tree) &buf;
12073 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr) = 0;
12074 TYPE_POINTER_TO (expr) = NULL;
12075 TYPE_REFERENCE_TO (expr) = NULL;
12076 if (TYPE_CACHED_VALUES_P (expr))
12077 {
12078 TYPE_CACHED_VALUES_P (expr) = 0;
12079 TYPE_CACHED_VALUES (expr) = NULL;
12080 }
12081 }
12082 md5_process_bytes (expr, tree_size (expr), ctx);
12083 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
12084 if (TREE_CODE_CLASS (code) != tcc_type
12085 && TREE_CODE_CLASS (code) != tcc_declaration
12086 && code != TREE_LIST)
12087 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
12088 switch (TREE_CODE_CLASS (code))
12089 {
12090 case tcc_constant:
12091 switch (code)
12092 {
12093 case STRING_CST:
12094 md5_process_bytes (TREE_STRING_POINTER (expr),
12095 TREE_STRING_LENGTH (expr), ctx);
12096 break;
12097 case COMPLEX_CST:
12098 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
12099 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
12100 break;
12101 case VECTOR_CST:
12102 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
12103 break;
12104 default:
12105 break;
12106 }
12107 break;
12108 case tcc_exceptional:
12109 switch (code)
12110 {
12111 case TREE_LIST:
12112 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
12113 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
12114 expr = TREE_CHAIN (expr);
12115 goto recursive_label;
12116 break;
12117 case TREE_VEC:
12118 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
12119 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
12120 break;
12121 default:
12122 break;
12123 }
12124 break;
12125 case tcc_expression:
12126 case tcc_reference:
12127 case tcc_comparison:
12128 case tcc_unary:
12129 case tcc_binary:
12130 case tcc_statement:
12131 len = TREE_CODE_LENGTH (code);
12132 for (i = 0; i < len; ++i)
12133 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
12134 break;
12135 case tcc_declaration:
12136 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
12137 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
12138 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
12139 {
12140 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
12141 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
12142 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
12143 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
12144 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
12145 }
12146 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
12147 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
12148
12149 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
12150 {
12151 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
12152 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
12153 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
12154 }
12155 break;
12156 case tcc_type:
12157 if (TREE_CODE (expr) == ENUMERAL_TYPE)
12158 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
12159 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
12160 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
12161 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
12162 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
12163 if (INTEGRAL_TYPE_P (expr)
12164 || SCALAR_FLOAT_TYPE_P (expr))
12165 {
12166 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
12167 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
12168 }
12169 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
12170 if (TREE_CODE (expr) == RECORD_TYPE
12171 || TREE_CODE (expr) == UNION_TYPE
12172 || TREE_CODE (expr) == QUAL_UNION_TYPE)
12173 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
12174 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
12175 break;
12176 default:
12177 break;
12178 }
12179 }
12180
12181 #endif
12182
12183 /* Fold a unary tree expression with code CODE of type TYPE with an
12184 operand OP0. Return a folded expression if successful. Otherwise,
12185 return a tree expression with code CODE of type TYPE with an
12186 operand OP0. */
12187
12188 tree
12189 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
12190 {
12191 tree tem;
12192 #ifdef ENABLE_FOLD_CHECKING
12193 unsigned char checksum_before[16], checksum_after[16];
12194 struct md5_ctx ctx;
12195 htab_t ht;
12196
12197 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12198 md5_init_ctx (&ctx);
12199 fold_checksum_tree (op0, &ctx, ht);
12200 md5_finish_ctx (&ctx, checksum_before);
12201 htab_empty (ht);
12202 #endif
12203
12204 tem = fold_unary (code, type, op0);
12205 if (!tem)
12206 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
12207
12208 #ifdef ENABLE_FOLD_CHECKING
12209 md5_init_ctx (&ctx);
12210 fold_checksum_tree (op0, &ctx, ht);
12211 md5_finish_ctx (&ctx, checksum_after);
12212 htab_delete (ht);
12213
12214 if (memcmp (checksum_before, checksum_after, 16))
12215 fold_check_failed (op0, tem);
12216 #endif
12217 return tem;
12218 }
12219
12220 /* Fold a binary tree expression with code CODE of type TYPE with
12221 operands OP0 and OP1. Return a folded expression if successful.
12222 Otherwise, return a tree expression with code CODE of type TYPE
12223 with operands OP0 and OP1. */
12224
12225 tree
12226 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
12227 MEM_STAT_DECL)
12228 {
12229 tree tem;
12230 #ifdef ENABLE_FOLD_CHECKING
12231 unsigned char checksum_before_op0[16],
12232 checksum_before_op1[16],
12233 checksum_after_op0[16],
12234 checksum_after_op1[16];
12235 struct md5_ctx ctx;
12236 htab_t ht;
12237
12238 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12239 md5_init_ctx (&ctx);
12240 fold_checksum_tree (op0, &ctx, ht);
12241 md5_finish_ctx (&ctx, checksum_before_op0);
12242 htab_empty (ht);
12243
12244 md5_init_ctx (&ctx);
12245 fold_checksum_tree (op1, &ctx, ht);
12246 md5_finish_ctx (&ctx, checksum_before_op1);
12247 htab_empty (ht);
12248 #endif
12249
12250 tem = fold_binary (code, type, op0, op1);
12251 if (!tem)
12252 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
12253
12254 #ifdef ENABLE_FOLD_CHECKING
12255 md5_init_ctx (&ctx);
12256 fold_checksum_tree (op0, &ctx, ht);
12257 md5_finish_ctx (&ctx, checksum_after_op0);
12258 htab_empty (ht);
12259
12260 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12261 fold_check_failed (op0, tem);
12262
12263 md5_init_ctx (&ctx);
12264 fold_checksum_tree (op1, &ctx, ht);
12265 md5_finish_ctx (&ctx, checksum_after_op1);
12266 htab_delete (ht);
12267
12268 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12269 fold_check_failed (op1, tem);
12270 #endif
12271 return tem;
12272 }
12273
12274 /* Fold a ternary tree expression with code CODE of type TYPE with
12275 operands OP0, OP1, and OP2. Return a folded expression if
12276 successful. Otherwise, return a tree expression with code CODE of
12277 type TYPE with operands OP0, OP1, and OP2. */
12278
12279 tree
12280 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
12281 MEM_STAT_DECL)
12282 {
12283 tree tem;
12284 #ifdef ENABLE_FOLD_CHECKING
12285 unsigned char checksum_before_op0[16],
12286 checksum_before_op1[16],
12287 checksum_before_op2[16],
12288 checksum_after_op0[16],
12289 checksum_after_op1[16],
12290 checksum_after_op2[16];
12291 struct md5_ctx ctx;
12292 htab_t ht;
12293
12294 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12295 md5_init_ctx (&ctx);
12296 fold_checksum_tree (op0, &ctx, ht);
12297 md5_finish_ctx (&ctx, checksum_before_op0);
12298 htab_empty (ht);
12299
12300 md5_init_ctx (&ctx);
12301 fold_checksum_tree (op1, &ctx, ht);
12302 md5_finish_ctx (&ctx, checksum_before_op1);
12303 htab_empty (ht);
12304
12305 md5_init_ctx (&ctx);
12306 fold_checksum_tree (op2, &ctx, ht);
12307 md5_finish_ctx (&ctx, checksum_before_op2);
12308 htab_empty (ht);
12309 #endif
12310
12311 tem = fold_ternary (code, type, op0, op1, op2);
12312 if (!tem)
12313 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
12314
12315 #ifdef ENABLE_FOLD_CHECKING
12316 md5_init_ctx (&ctx);
12317 fold_checksum_tree (op0, &ctx, ht);
12318 md5_finish_ctx (&ctx, checksum_after_op0);
12319 htab_empty (ht);
12320
12321 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12322 fold_check_failed (op0, tem);
12323
12324 md5_init_ctx (&ctx);
12325 fold_checksum_tree (op1, &ctx, ht);
12326 md5_finish_ctx (&ctx, checksum_after_op1);
12327 htab_empty (ht);
12328
12329 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12330 fold_check_failed (op1, tem);
12331
12332 md5_init_ctx (&ctx);
12333 fold_checksum_tree (op2, &ctx, ht);
12334 md5_finish_ctx (&ctx, checksum_after_op2);
12335 htab_delete (ht);
12336
12337 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
12338 fold_check_failed (op2, tem);
12339 #endif
12340 return tem;
12341 }
12342
12343 /* Perform constant folding and related simplification of initializer
12344 expression EXPR. These behave identically to "fold_buildN" but ignore
12345 potential run-time traps and exceptions that fold must preserve. */
12346
12347 #define START_FOLD_INIT \
12348 int saved_signaling_nans = flag_signaling_nans;\
12349 int saved_trapping_math = flag_trapping_math;\
12350 int saved_rounding_math = flag_rounding_math;\
12351 int saved_trapv = flag_trapv;\
12352 int saved_folding_initializer = folding_initializer;\
12353 flag_signaling_nans = 0;\
12354 flag_trapping_math = 0;\
12355 flag_rounding_math = 0;\
12356 flag_trapv = 0;\
12357 folding_initializer = 1;
12358
12359 #define END_FOLD_INIT \
12360 flag_signaling_nans = saved_signaling_nans;\
12361 flag_trapping_math = saved_trapping_math;\
12362 flag_rounding_math = saved_rounding_math;\
12363 flag_trapv = saved_trapv;\
12364 folding_initializer = saved_folding_initializer;
12365
12366 tree
12367 fold_build1_initializer (enum tree_code code, tree type, tree op)
12368 {
12369 tree result;
12370 START_FOLD_INIT;
12371
12372 result = fold_build1 (code, type, op);
12373
12374 END_FOLD_INIT;
12375 return result;
12376 }
12377
12378 tree
12379 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
12380 {
12381 tree result;
12382 START_FOLD_INIT;
12383
12384 result = fold_build2 (code, type, op0, op1);
12385
12386 END_FOLD_INIT;
12387 return result;
12388 }
12389
12390 tree
12391 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
12392 tree op2)
12393 {
12394 tree result;
12395 START_FOLD_INIT;
12396
12397 result = fold_build3 (code, type, op0, op1, op2);
12398
12399 END_FOLD_INIT;
12400 return result;
12401 }
12402
12403 #undef START_FOLD_INIT
12404 #undef END_FOLD_INIT
12405
12406 /* Determine if first argument is a multiple of second argument. Return 0 if
12407 it is not, or we cannot easily determined it to be.
12408
12409 An example of the sort of thing we care about (at this point; this routine
12410 could surely be made more general, and expanded to do what the *_DIV_EXPR's
12411 fold cases do now) is discovering that
12412
12413 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12414
12415 is a multiple of
12416
12417 SAVE_EXPR (J * 8)
12418
12419 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
12420
12421 This code also handles discovering that
12422
12423 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12424
12425 is a multiple of 8 so we don't have to worry about dealing with a
12426 possible remainder.
12427
12428 Note that we *look* inside a SAVE_EXPR only to determine how it was
12429 calculated; it is not safe for fold to do much of anything else with the
12430 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
12431 at run time. For example, the latter example above *cannot* be implemented
12432 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
12433 evaluation time of the original SAVE_EXPR is not necessarily the same at
12434 the time the new expression is evaluated. The only optimization of this
12435 sort that would be valid is changing
12436
12437 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
12438
12439 divided by 8 to
12440
12441 SAVE_EXPR (I) * SAVE_EXPR (J)
12442
12443 (where the same SAVE_EXPR (J) is used in the original and the
12444 transformed version). */
12445
12446 static int
12447 multiple_of_p (tree type, tree top, tree bottom)
12448 {
12449 if (operand_equal_p (top, bottom, 0))
12450 return 1;
12451
12452 if (TREE_CODE (type) != INTEGER_TYPE)
12453 return 0;
12454
12455 switch (TREE_CODE (top))
12456 {
12457 case BIT_AND_EXPR:
12458 /* Bitwise and provides a power of two multiple. If the mask is
12459 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
12460 if (!integer_pow2p (bottom))
12461 return 0;
12462 /* FALLTHRU */
12463
12464 case MULT_EXPR:
12465 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12466 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12467
12468 case PLUS_EXPR:
12469 case MINUS_EXPR:
12470 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12471 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12472
12473 case LSHIFT_EXPR:
12474 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
12475 {
12476 tree op1, t1;
12477
12478 op1 = TREE_OPERAND (top, 1);
12479 /* const_binop may not detect overflow correctly,
12480 so check for it explicitly here. */
12481 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
12482 > TREE_INT_CST_LOW (op1)
12483 && TREE_INT_CST_HIGH (op1) == 0
12484 && 0 != (t1 = fold_convert (type,
12485 const_binop (LSHIFT_EXPR,
12486 size_one_node,
12487 op1, 0)))
12488 && !TREE_OVERFLOW (t1))
12489 return multiple_of_p (type, t1, bottom);
12490 }
12491 return 0;
12492
12493 case NOP_EXPR:
12494 /* Can't handle conversions from non-integral or wider integral type. */
12495 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
12496 || (TYPE_PRECISION (type)
12497 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
12498 return 0;
12499
12500 /* .. fall through ... */
12501
12502 case SAVE_EXPR:
12503 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
12504
12505 case INTEGER_CST:
12506 if (TREE_CODE (bottom) != INTEGER_CST
12507 || (TYPE_UNSIGNED (type)
12508 && (tree_int_cst_sgn (top) < 0
12509 || tree_int_cst_sgn (bottom) < 0)))
12510 return 0;
12511 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
12512 top, bottom, 0));
12513
12514 default:
12515 return 0;
12516 }
12517 }
12518
12519 /* Return true if `t' is known to be non-negative. */
12520
12521 bool
12522 tree_expr_nonnegative_p (tree t)
12523 {
12524 if (t == error_mark_node)
12525 return false;
12526
12527 if (TYPE_UNSIGNED (TREE_TYPE (t)))
12528 return true;
12529
12530 switch (TREE_CODE (t))
12531 {
12532 case SSA_NAME:
12533 /* Query VRP to see if it has recorded any information about
12534 the range of this object. */
12535 return ssa_name_nonnegative_p (t);
12536
12537 case ABS_EXPR:
12538 /* We can't return 1 if flag_wrapv is set because
12539 ABS_EXPR<INT_MIN> = INT_MIN. */
12540 if (!(flag_wrapv && INTEGRAL_TYPE_P (TREE_TYPE (t))))
12541 return true;
12542 break;
12543
12544 case INTEGER_CST:
12545 return tree_int_cst_sgn (t) >= 0;
12546
12547 case REAL_CST:
12548 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
12549
12550 case PLUS_EXPR:
12551 if (FLOAT_TYPE_P (TREE_TYPE (t)))
12552 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12553 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12554
12555 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
12556 both unsigned and at least 2 bits shorter than the result. */
12557 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
12558 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
12559 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
12560 {
12561 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
12562 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
12563 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12564 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12565 {
12566 unsigned int prec = MAX (TYPE_PRECISION (inner1),
12567 TYPE_PRECISION (inner2)) + 1;
12568 return prec < TYPE_PRECISION (TREE_TYPE (t));
12569 }
12570 }
12571 break;
12572
12573 case MULT_EXPR:
12574 if (FLOAT_TYPE_P (TREE_TYPE (t)))
12575 {
12576 /* x * x for floating point x is always non-negative. */
12577 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
12578 return true;
12579 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12580 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12581 }
12582
12583 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
12584 both unsigned and their total bits is shorter than the result. */
12585 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
12586 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
12587 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
12588 {
12589 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
12590 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
12591 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12592 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12593 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
12594 < TYPE_PRECISION (TREE_TYPE (t));
12595 }
12596 return false;
12597
12598 case BIT_AND_EXPR:
12599 case MAX_EXPR:
12600 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12601 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12602
12603 case BIT_IOR_EXPR:
12604 case BIT_XOR_EXPR:
12605 case MIN_EXPR:
12606 case RDIV_EXPR:
12607 case TRUNC_DIV_EXPR:
12608 case CEIL_DIV_EXPR:
12609 case FLOOR_DIV_EXPR:
12610 case ROUND_DIV_EXPR:
12611 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12612 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12613
12614 case TRUNC_MOD_EXPR:
12615 case CEIL_MOD_EXPR:
12616 case FLOOR_MOD_EXPR:
12617 case ROUND_MOD_EXPR:
12618 case SAVE_EXPR:
12619 case NON_LVALUE_EXPR:
12620 case FLOAT_EXPR:
12621 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12622
12623 case COMPOUND_EXPR:
12624 case MODIFY_EXPR:
12625 case GIMPLE_MODIFY_STMT:
12626 return tree_expr_nonnegative_p (GENERIC_TREE_OPERAND (t, 1));
12627
12628 case BIND_EXPR:
12629 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t, 1)));
12630
12631 case COND_EXPR:
12632 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
12633 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
12634
12635 case NOP_EXPR:
12636 {
12637 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
12638 tree outer_type = TREE_TYPE (t);
12639
12640 if (TREE_CODE (outer_type) == REAL_TYPE)
12641 {
12642 if (TREE_CODE (inner_type) == REAL_TYPE)
12643 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12644 if (TREE_CODE (inner_type) == INTEGER_TYPE)
12645 {
12646 if (TYPE_UNSIGNED (inner_type))
12647 return true;
12648 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12649 }
12650 }
12651 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
12652 {
12653 if (TREE_CODE (inner_type) == REAL_TYPE)
12654 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
12655 if (TREE_CODE (inner_type) == INTEGER_TYPE)
12656 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
12657 && TYPE_UNSIGNED (inner_type);
12658 }
12659 }
12660 break;
12661
12662 case TARGET_EXPR:
12663 {
12664 tree temp = TARGET_EXPR_SLOT (t);
12665 t = TARGET_EXPR_INITIAL (t);
12666
12667 /* If the initializer is non-void, then it's a normal expression
12668 that will be assigned to the slot. */
12669 if (!VOID_TYPE_P (t))
12670 return tree_expr_nonnegative_p (t);
12671
12672 /* Otherwise, the initializer sets the slot in some way. One common
12673 way is an assignment statement at the end of the initializer. */
12674 while (1)
12675 {
12676 if (TREE_CODE (t) == BIND_EXPR)
12677 t = expr_last (BIND_EXPR_BODY (t));
12678 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
12679 || TREE_CODE (t) == TRY_CATCH_EXPR)
12680 t = expr_last (TREE_OPERAND (t, 0));
12681 else if (TREE_CODE (t) == STATEMENT_LIST)
12682 t = expr_last (t);
12683 else
12684 break;
12685 }
12686 if ((TREE_CODE (t) == MODIFY_EXPR
12687 || TREE_CODE (t) == GIMPLE_MODIFY_STMT)
12688 && GENERIC_TREE_OPERAND (t, 0) == temp)
12689 return tree_expr_nonnegative_p (GENERIC_TREE_OPERAND (t, 1));
12690
12691 return false;
12692 }
12693
12694 case CALL_EXPR:
12695 {
12696 tree fndecl = get_callee_fndecl (t);
12697 tree arglist = TREE_OPERAND (t, 1);
12698 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
12699 switch (DECL_FUNCTION_CODE (fndecl))
12700 {
12701 CASE_FLT_FN (BUILT_IN_ACOS):
12702 CASE_FLT_FN (BUILT_IN_ACOSH):
12703 CASE_FLT_FN (BUILT_IN_CABS):
12704 CASE_FLT_FN (BUILT_IN_COSH):
12705 CASE_FLT_FN (BUILT_IN_ERFC):
12706 CASE_FLT_FN (BUILT_IN_EXP):
12707 CASE_FLT_FN (BUILT_IN_EXP10):
12708 CASE_FLT_FN (BUILT_IN_EXP2):
12709 CASE_FLT_FN (BUILT_IN_FABS):
12710 CASE_FLT_FN (BUILT_IN_FDIM):
12711 CASE_FLT_FN (BUILT_IN_HYPOT):
12712 CASE_FLT_FN (BUILT_IN_POW10):
12713 CASE_INT_FN (BUILT_IN_FFS):
12714 CASE_INT_FN (BUILT_IN_PARITY):
12715 CASE_INT_FN (BUILT_IN_POPCOUNT):
12716 case BUILT_IN_BSWAP32:
12717 case BUILT_IN_BSWAP64:
12718 /* Always true. */
12719 return true;
12720
12721 CASE_FLT_FN (BUILT_IN_SQRT):
12722 /* sqrt(-0.0) is -0.0. */
12723 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
12724 return true;
12725 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
12726
12727 CASE_FLT_FN (BUILT_IN_ASINH):
12728 CASE_FLT_FN (BUILT_IN_ATAN):
12729 CASE_FLT_FN (BUILT_IN_ATANH):
12730 CASE_FLT_FN (BUILT_IN_CBRT):
12731 CASE_FLT_FN (BUILT_IN_CEIL):
12732 CASE_FLT_FN (BUILT_IN_ERF):
12733 CASE_FLT_FN (BUILT_IN_EXPM1):
12734 CASE_FLT_FN (BUILT_IN_FLOOR):
12735 CASE_FLT_FN (BUILT_IN_FMOD):
12736 CASE_FLT_FN (BUILT_IN_FREXP):
12737 CASE_FLT_FN (BUILT_IN_LCEIL):
12738 CASE_FLT_FN (BUILT_IN_LDEXP):
12739 CASE_FLT_FN (BUILT_IN_LFLOOR):
12740 CASE_FLT_FN (BUILT_IN_LLCEIL):
12741 CASE_FLT_FN (BUILT_IN_LLFLOOR):
12742 CASE_FLT_FN (BUILT_IN_LLRINT):
12743 CASE_FLT_FN (BUILT_IN_LLROUND):
12744 CASE_FLT_FN (BUILT_IN_LRINT):
12745 CASE_FLT_FN (BUILT_IN_LROUND):
12746 CASE_FLT_FN (BUILT_IN_MODF):
12747 CASE_FLT_FN (BUILT_IN_NEARBYINT):
12748 CASE_FLT_FN (BUILT_IN_RINT):
12749 CASE_FLT_FN (BUILT_IN_ROUND):
12750 CASE_FLT_FN (BUILT_IN_SIGNBIT):
12751 CASE_FLT_FN (BUILT_IN_SINH):
12752 CASE_FLT_FN (BUILT_IN_TANH):
12753 CASE_FLT_FN (BUILT_IN_TRUNC):
12754 /* True if the 1st argument is nonnegative. */
12755 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
12756
12757 CASE_FLT_FN (BUILT_IN_FMAX):
12758 /* True if the 1st OR 2nd arguments are nonnegative. */
12759 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
12760 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
12761
12762 CASE_FLT_FN (BUILT_IN_FMIN):
12763 /* True if the 1st AND 2nd arguments are nonnegative. */
12764 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
12765 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
12766
12767 CASE_FLT_FN (BUILT_IN_COPYSIGN):
12768 /* True if the 2nd argument is nonnegative. */
12769 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
12770
12771 CASE_FLT_FN (BUILT_IN_POWI):
12772 /* True if the 1st argument is nonnegative or the second
12773 argument is an even integer. */
12774 if (TREE_CODE (TREE_VALUE (TREE_CHAIN (arglist))) == INTEGER_CST)
12775 {
12776 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
12777 if ((TREE_INT_CST_LOW (arg1) & 1) == 0)
12778 return true;
12779 }
12780 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
12781
12782 CASE_FLT_FN (BUILT_IN_POW):
12783 /* True if the 1st argument is nonnegative or the second
12784 argument is an even integer valued real. */
12785 if (TREE_CODE (TREE_VALUE (TREE_CHAIN (arglist))) == REAL_CST)
12786 {
12787 REAL_VALUE_TYPE c;
12788 HOST_WIDE_INT n;
12789
12790 c = TREE_REAL_CST (TREE_VALUE (TREE_CHAIN (arglist)));
12791 n = real_to_integer (&c);
12792 if ((n & 1) == 0)
12793 {
12794 REAL_VALUE_TYPE cint;
12795 real_from_integer (&cint, VOIDmode, n,
12796 n < 0 ? -1 : 0, 0);
12797 if (real_identical (&c, &cint))
12798 return true;
12799 }
12800 }
12801 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
12802
12803 default:
12804 break;
12805 }
12806 }
12807
12808 /* ... fall through ... */
12809
12810 default:
12811 if (truth_value_p (TREE_CODE (t)))
12812 /* Truth values evaluate to 0 or 1, which is nonnegative. */
12813 return true;
12814 }
12815
12816 /* We don't know sign of `t', so be conservative and return false. */
12817 return false;
12818 }
12819
12820 /* Return true when T is an address and is known to be nonzero.
12821 For floating point we further ensure that T is not denormal.
12822 Similar logic is present in nonzero_address in rtlanal.h. */
12823
12824 bool
12825 tree_expr_nonzero_p (tree t)
12826 {
12827 tree type = TREE_TYPE (t);
12828
12829 /* Doing something useful for floating point would need more work. */
12830 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
12831 return false;
12832
12833 switch (TREE_CODE (t))
12834 {
12835 case SSA_NAME:
12836 /* Query VRP to see if it has recorded any information about
12837 the range of this object. */
12838 return ssa_name_nonzero_p (t);
12839
12840 case ABS_EXPR:
12841 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
12842
12843 case INTEGER_CST:
12844 return !integer_zerop (t);
12845
12846 case PLUS_EXPR:
12847 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
12848 {
12849 /* With the presence of negative values it is hard
12850 to say something. */
12851 if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12852 || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
12853 return false;
12854 /* One of operands must be positive and the other non-negative. */
12855 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
12856 || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
12857 }
12858 break;
12859
12860 case MULT_EXPR:
12861 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
12862 {
12863 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
12864 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
12865 }
12866 break;
12867
12868 case NOP_EXPR:
12869 {
12870 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
12871 tree outer_type = TREE_TYPE (t);
12872
12873 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
12874 && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
12875 }
12876 break;
12877
12878 case ADDR_EXPR:
12879 {
12880 tree base = get_base_address (TREE_OPERAND (t, 0));
12881
12882 if (!base)
12883 return false;
12884
12885 /* Weak declarations may link to NULL. */
12886 if (VAR_OR_FUNCTION_DECL_P (base))
12887 return !DECL_WEAK (base);
12888
12889 /* Constants are never weak. */
12890 if (CONSTANT_CLASS_P (base))
12891 return true;
12892
12893 return false;
12894 }
12895
12896 case COND_EXPR:
12897 return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
12898 && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
12899
12900 case MIN_EXPR:
12901 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
12902 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
12903
12904 case MAX_EXPR:
12905 if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
12906 {
12907 /* When both operands are nonzero, then MAX must be too. */
12908 if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
12909 return true;
12910
12911 /* MAX where operand 0 is positive is positive. */
12912 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12913 }
12914 /* MAX where operand 1 is positive is positive. */
12915 else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
12916 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
12917 return true;
12918 break;
12919
12920 case COMPOUND_EXPR:
12921 case MODIFY_EXPR:
12922 case GIMPLE_MODIFY_STMT:
12923 case BIND_EXPR:
12924 return tree_expr_nonzero_p (GENERIC_TREE_OPERAND (t, 1));
12925
12926 case SAVE_EXPR:
12927 case NON_LVALUE_EXPR:
12928 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
12929
12930 case BIT_IOR_EXPR:
12931 return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
12932 || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
12933
12934 case CALL_EXPR:
12935 return alloca_call_p (t);
12936
12937 default:
12938 break;
12939 }
12940 return false;
12941 }
12942
12943 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
12944 attempt to fold the expression to a constant without modifying TYPE,
12945 OP0 or OP1.
12946
12947 If the expression could be simplified to a constant, then return
12948 the constant. If the expression would not be simplified to a
12949 constant, then return NULL_TREE. */
12950
12951 tree
12952 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
12953 {
12954 tree tem = fold_binary (code, type, op0, op1);
12955 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
12956 }
12957
12958 /* Given the components of a unary expression CODE, TYPE and OP0,
12959 attempt to fold the expression to a constant without modifying
12960 TYPE or OP0.
12961
12962 If the expression could be simplified to a constant, then return
12963 the constant. If the expression would not be simplified to a
12964 constant, then return NULL_TREE. */
12965
12966 tree
12967 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
12968 {
12969 tree tem = fold_unary (code, type, op0);
12970 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
12971 }
12972
12973 /* If EXP represents referencing an element in a constant string
12974 (either via pointer arithmetic or array indexing), return the
12975 tree representing the value accessed, otherwise return NULL. */
12976
12977 tree
12978 fold_read_from_constant_string (tree exp)
12979 {
12980 if ((TREE_CODE (exp) == INDIRECT_REF
12981 || TREE_CODE (exp) == ARRAY_REF)
12982 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
12983 {
12984 tree exp1 = TREE_OPERAND (exp, 0);
12985 tree index;
12986 tree string;
12987
12988 if (TREE_CODE (exp) == INDIRECT_REF)
12989 string = string_constant (exp1, &index);
12990 else
12991 {
12992 tree low_bound = array_ref_low_bound (exp);
12993 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
12994
12995 /* Optimize the special-case of a zero lower bound.
12996
12997 We convert the low_bound to sizetype to avoid some problems
12998 with constant folding. (E.g. suppose the lower bound is 1,
12999 and its mode is QI. Without the conversion,l (ARRAY
13000 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
13001 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
13002 if (! integer_zerop (low_bound))
13003 index = size_diffop (index, fold_convert (sizetype, low_bound));
13004
13005 string = exp1;
13006 }
13007
13008 if (string
13009 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
13010 && TREE_CODE (string) == STRING_CST
13011 && TREE_CODE (index) == INTEGER_CST
13012 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
13013 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
13014 == MODE_INT)
13015 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
13016 return fold_convert (TREE_TYPE (exp),
13017 build_int_cst (NULL_TREE,
13018 (TREE_STRING_POINTER (string)
13019 [TREE_INT_CST_LOW (index)])));
13020 }
13021 return NULL;
13022 }
13023
13024 /* Return the tree for neg (ARG0) when ARG0 is known to be either
13025 an integer constant or real constant.
13026
13027 TYPE is the type of the result. */
13028
13029 static tree
13030 fold_negate_const (tree arg0, tree type)
13031 {
13032 tree t = NULL_TREE;
13033
13034 switch (TREE_CODE (arg0))
13035 {
13036 case INTEGER_CST:
13037 {
13038 unsigned HOST_WIDE_INT low;
13039 HOST_WIDE_INT high;
13040 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
13041 TREE_INT_CST_HIGH (arg0),
13042 &low, &high);
13043 t = force_fit_type_double (type, low, high, 1,
13044 (overflow | TREE_OVERFLOW (arg0))
13045 && !TYPE_UNSIGNED (type));
13046 break;
13047 }
13048
13049 case REAL_CST:
13050 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
13051 break;
13052
13053 default:
13054 gcc_unreachable ();
13055 }
13056
13057 return t;
13058 }
13059
13060 /* Return the tree for abs (ARG0) when ARG0 is known to be either
13061 an integer constant or real constant.
13062
13063 TYPE is the type of the result. */
13064
13065 tree
13066 fold_abs_const (tree arg0, tree type)
13067 {
13068 tree t = NULL_TREE;
13069
13070 switch (TREE_CODE (arg0))
13071 {
13072 case INTEGER_CST:
13073 /* If the value is unsigned, then the absolute value is
13074 the same as the ordinary value. */
13075 if (TYPE_UNSIGNED (type))
13076 t = arg0;
13077 /* Similarly, if the value is non-negative. */
13078 else if (INT_CST_LT (integer_minus_one_node, arg0))
13079 t = arg0;
13080 /* If the value is negative, then the absolute value is
13081 its negation. */
13082 else
13083 {
13084 unsigned HOST_WIDE_INT low;
13085 HOST_WIDE_INT high;
13086 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
13087 TREE_INT_CST_HIGH (arg0),
13088 &low, &high);
13089 t = force_fit_type_double (type, low, high, -1,
13090 overflow | TREE_OVERFLOW (arg0));
13091 }
13092 break;
13093
13094 case REAL_CST:
13095 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
13096 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
13097 else
13098 t = arg0;
13099 break;
13100
13101 default:
13102 gcc_unreachable ();
13103 }
13104
13105 return t;
13106 }
13107
13108 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
13109 constant. TYPE is the type of the result. */
13110
13111 static tree
13112 fold_not_const (tree arg0, tree type)
13113 {
13114 tree t = NULL_TREE;
13115
13116 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
13117
13118 t = force_fit_type_double (type, ~TREE_INT_CST_LOW (arg0),
13119 ~TREE_INT_CST_HIGH (arg0), 0,
13120 TREE_OVERFLOW (arg0));
13121
13122 return t;
13123 }
13124
13125 /* Given CODE, a relational operator, the target type, TYPE and two
13126 constant operands OP0 and OP1, return the result of the
13127 relational operation. If the result is not a compile time
13128 constant, then return NULL_TREE. */
13129
13130 static tree
13131 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
13132 {
13133 int result, invert;
13134
13135 /* From here on, the only cases we handle are when the result is
13136 known to be a constant. */
13137
13138 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
13139 {
13140 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
13141 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
13142
13143 /* Handle the cases where either operand is a NaN. */
13144 if (real_isnan (c0) || real_isnan (c1))
13145 {
13146 switch (code)
13147 {
13148 case EQ_EXPR:
13149 case ORDERED_EXPR:
13150 result = 0;
13151 break;
13152
13153 case NE_EXPR:
13154 case UNORDERED_EXPR:
13155 case UNLT_EXPR:
13156 case UNLE_EXPR:
13157 case UNGT_EXPR:
13158 case UNGE_EXPR:
13159 case UNEQ_EXPR:
13160 result = 1;
13161 break;
13162
13163 case LT_EXPR:
13164 case LE_EXPR:
13165 case GT_EXPR:
13166 case GE_EXPR:
13167 case LTGT_EXPR:
13168 if (flag_trapping_math)
13169 return NULL_TREE;
13170 result = 0;
13171 break;
13172
13173 default:
13174 gcc_unreachable ();
13175 }
13176
13177 return constant_boolean_node (result, type);
13178 }
13179
13180 return constant_boolean_node (real_compare (code, c0, c1), type);
13181 }
13182
13183 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
13184
13185 To compute GT, swap the arguments and do LT.
13186 To compute GE, do LT and invert the result.
13187 To compute LE, swap the arguments, do LT and invert the result.
13188 To compute NE, do EQ and invert the result.
13189
13190 Therefore, the code below must handle only EQ and LT. */
13191
13192 if (code == LE_EXPR || code == GT_EXPR)
13193 {
13194 tree tem = op0;
13195 op0 = op1;
13196 op1 = tem;
13197 code = swap_tree_comparison (code);
13198 }
13199
13200 /* Note that it is safe to invert for real values here because we
13201 have already handled the one case that it matters. */
13202
13203 invert = 0;
13204 if (code == NE_EXPR || code == GE_EXPR)
13205 {
13206 invert = 1;
13207 code = invert_tree_comparison (code, false);
13208 }
13209
13210 /* Compute a result for LT or EQ if args permit;
13211 Otherwise return T. */
13212 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
13213 {
13214 if (code == EQ_EXPR)
13215 result = tree_int_cst_equal (op0, op1);
13216 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
13217 result = INT_CST_LT_UNSIGNED (op0, op1);
13218 else
13219 result = INT_CST_LT (op0, op1);
13220 }
13221 else
13222 return NULL_TREE;
13223
13224 if (invert)
13225 result ^= 1;
13226 return constant_boolean_node (result, type);
13227 }
13228
13229 /* Build an expression for the a clean point containing EXPR with type TYPE.
13230 Don't build a cleanup point expression for EXPR which don't have side
13231 effects. */
13232
13233 tree
13234 fold_build_cleanup_point_expr (tree type, tree expr)
13235 {
13236 /* If the expression does not have side effects then we don't have to wrap
13237 it with a cleanup point expression. */
13238 if (!TREE_SIDE_EFFECTS (expr))
13239 return expr;
13240
13241 /* If the expression is a return, check to see if the expression inside the
13242 return has no side effects or the right hand side of the modify expression
13243 inside the return. If either don't have side effects set we don't need to
13244 wrap the expression in a cleanup point expression. Note we don't check the
13245 left hand side of the modify because it should always be a return decl. */
13246 if (TREE_CODE (expr) == RETURN_EXPR)
13247 {
13248 tree op = TREE_OPERAND (expr, 0);
13249 if (!op || !TREE_SIDE_EFFECTS (op))
13250 return expr;
13251 op = TREE_OPERAND (op, 1);
13252 if (!TREE_SIDE_EFFECTS (op))
13253 return expr;
13254 }
13255
13256 return build1 (CLEANUP_POINT_EXPR, type, expr);
13257 }
13258
13259 /* Build an expression for the address of T. Folds away INDIRECT_REF to
13260 avoid confusing the gimplify process. */
13261
13262 tree
13263 build_fold_addr_expr_with_type (tree t, tree ptrtype)
13264 {
13265 /* The size of the object is not relevant when talking about its address. */
13266 if (TREE_CODE (t) == WITH_SIZE_EXPR)
13267 t = TREE_OPERAND (t, 0);
13268
13269 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
13270 if (TREE_CODE (t) == INDIRECT_REF
13271 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
13272 {
13273 t = TREE_OPERAND (t, 0);
13274 if (TREE_TYPE (t) != ptrtype)
13275 t = build1 (NOP_EXPR, ptrtype, t);
13276 }
13277 else
13278 {
13279 tree base = t;
13280
13281 while (handled_component_p (base))
13282 base = TREE_OPERAND (base, 0);
13283 if (DECL_P (base))
13284 TREE_ADDRESSABLE (base) = 1;
13285
13286 t = build1 (ADDR_EXPR, ptrtype, t);
13287 }
13288
13289 return t;
13290 }
13291
13292 tree
13293 build_fold_addr_expr (tree t)
13294 {
13295 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
13296 }
13297
13298 /* Given a pointer value OP0 and a type TYPE, return a simplified version
13299 of an indirection through OP0, or NULL_TREE if no simplification is
13300 possible. */
13301
13302 tree
13303 fold_indirect_ref_1 (tree type, tree op0)
13304 {
13305 tree sub = op0;
13306 tree subtype;
13307
13308 STRIP_NOPS (sub);
13309 subtype = TREE_TYPE (sub);
13310 if (!POINTER_TYPE_P (subtype))
13311 return NULL_TREE;
13312
13313 if (TREE_CODE (sub) == ADDR_EXPR)
13314 {
13315 tree op = TREE_OPERAND (sub, 0);
13316 tree optype = TREE_TYPE (op);
13317 /* *&CONST_DECL -> to the value of the const decl. */
13318 if (TREE_CODE (op) == CONST_DECL)
13319 return DECL_INITIAL (op);
13320 /* *&p => p; make sure to handle *&"str"[cst] here. */
13321 if (type == optype)
13322 {
13323 tree fop = fold_read_from_constant_string (op);
13324 if (fop)
13325 return fop;
13326 else
13327 return op;
13328 }
13329 /* *(foo *)&fooarray => fooarray[0] */
13330 else if (TREE_CODE (optype) == ARRAY_TYPE
13331 && type == TREE_TYPE (optype))
13332 {
13333 tree type_domain = TYPE_DOMAIN (optype);
13334 tree min_val = size_zero_node;
13335 if (type_domain && TYPE_MIN_VALUE (type_domain))
13336 min_val = TYPE_MIN_VALUE (type_domain);
13337 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
13338 }
13339 /* *(foo *)&complexfoo => __real__ complexfoo */
13340 else if (TREE_CODE (optype) == COMPLEX_TYPE
13341 && type == TREE_TYPE (optype))
13342 return fold_build1 (REALPART_EXPR, type, op);
13343 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
13344 else if (TREE_CODE (optype) == VECTOR_TYPE
13345 && type == TREE_TYPE (optype))
13346 {
13347 tree part_width = TYPE_SIZE (type);
13348 tree index = bitsize_int (0);
13349 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
13350 }
13351 }
13352
13353 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
13354 if (TREE_CODE (sub) == PLUS_EXPR
13355 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
13356 {
13357 tree op00 = TREE_OPERAND (sub, 0);
13358 tree op01 = TREE_OPERAND (sub, 1);
13359 tree op00type;
13360
13361 STRIP_NOPS (op00);
13362 op00type = TREE_TYPE (op00);
13363 if (TREE_CODE (op00) == ADDR_EXPR
13364 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
13365 && type == TREE_TYPE (TREE_TYPE (op00type)))
13366 {
13367 tree size = TYPE_SIZE_UNIT (type);
13368 if (tree_int_cst_equal (size, op01))
13369 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
13370 }
13371 }
13372
13373 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
13374 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
13375 && type == TREE_TYPE (TREE_TYPE (subtype)))
13376 {
13377 tree type_domain;
13378 tree min_val = size_zero_node;
13379 sub = build_fold_indirect_ref (sub);
13380 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
13381 if (type_domain && TYPE_MIN_VALUE (type_domain))
13382 min_val = TYPE_MIN_VALUE (type_domain);
13383 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
13384 }
13385
13386 return NULL_TREE;
13387 }
13388
13389 /* Builds an expression for an indirection through T, simplifying some
13390 cases. */
13391
13392 tree
13393 build_fold_indirect_ref (tree t)
13394 {
13395 tree type = TREE_TYPE (TREE_TYPE (t));
13396 tree sub = fold_indirect_ref_1 (type, t);
13397
13398 if (sub)
13399 return sub;
13400 else
13401 return build1 (INDIRECT_REF, type, t);
13402 }
13403
13404 /* Given an INDIRECT_REF T, return either T or a simplified version. */
13405
13406 tree
13407 fold_indirect_ref (tree t)
13408 {
13409 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
13410
13411 if (sub)
13412 return sub;
13413 else
13414 return t;
13415 }
13416
13417 /* Strip non-trapping, non-side-effecting tree nodes from an expression
13418 whose result is ignored. The type of the returned tree need not be
13419 the same as the original expression. */
13420
13421 tree
13422 fold_ignored_result (tree t)
13423 {
13424 if (!TREE_SIDE_EFFECTS (t))
13425 return integer_zero_node;
13426
13427 for (;;)
13428 switch (TREE_CODE_CLASS (TREE_CODE (t)))
13429 {
13430 case tcc_unary:
13431 t = TREE_OPERAND (t, 0);
13432 break;
13433
13434 case tcc_binary:
13435 case tcc_comparison:
13436 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
13437 t = TREE_OPERAND (t, 0);
13438 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
13439 t = TREE_OPERAND (t, 1);
13440 else
13441 return t;
13442 break;
13443
13444 case tcc_expression:
13445 switch (TREE_CODE (t))
13446 {
13447 case COMPOUND_EXPR:
13448 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
13449 return t;
13450 t = TREE_OPERAND (t, 0);
13451 break;
13452
13453 case COND_EXPR:
13454 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
13455 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
13456 return t;
13457 t = TREE_OPERAND (t, 0);
13458 break;
13459
13460 default:
13461 return t;
13462 }
13463 break;
13464
13465 default:
13466 return t;
13467 }
13468 }
13469
13470 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
13471 This can only be applied to objects of a sizetype. */
13472
13473 tree
13474 round_up (tree value, int divisor)
13475 {
13476 tree div = NULL_TREE;
13477
13478 gcc_assert (divisor > 0);
13479 if (divisor == 1)
13480 return value;
13481
13482 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
13483 have to do anything. Only do this when we are not given a const,
13484 because in that case, this check is more expensive than just
13485 doing it. */
13486 if (TREE_CODE (value) != INTEGER_CST)
13487 {
13488 div = build_int_cst (TREE_TYPE (value), divisor);
13489
13490 if (multiple_of_p (TREE_TYPE (value), value, div))
13491 return value;
13492 }
13493
13494 /* If divisor is a power of two, simplify this to bit manipulation. */
13495 if (divisor == (divisor & -divisor))
13496 {
13497 tree t;
13498
13499 t = build_int_cst (TREE_TYPE (value), divisor - 1);
13500 value = size_binop (PLUS_EXPR, value, t);
13501 t = build_int_cst (TREE_TYPE (value), -divisor);
13502 value = size_binop (BIT_AND_EXPR, value, t);
13503 }
13504 else
13505 {
13506 if (!div)
13507 div = build_int_cst (TREE_TYPE (value), divisor);
13508 value = size_binop (CEIL_DIV_EXPR, value, div);
13509 value = size_binop (MULT_EXPR, value, div);
13510 }
13511
13512 return value;
13513 }
13514
13515 /* Likewise, but round down. */
13516
13517 tree
13518 round_down (tree value, int divisor)
13519 {
13520 tree div = NULL_TREE;
13521
13522 gcc_assert (divisor > 0);
13523 if (divisor == 1)
13524 return value;
13525
13526 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
13527 have to do anything. Only do this when we are not given a const,
13528 because in that case, this check is more expensive than just
13529 doing it. */
13530 if (TREE_CODE (value) != INTEGER_CST)
13531 {
13532 div = build_int_cst (TREE_TYPE (value), divisor);
13533
13534 if (multiple_of_p (TREE_TYPE (value), value, div))
13535 return value;
13536 }
13537
13538 /* If divisor is a power of two, simplify this to bit manipulation. */
13539 if (divisor == (divisor & -divisor))
13540 {
13541 tree t;
13542
13543 t = build_int_cst (TREE_TYPE (value), -divisor);
13544 value = size_binop (BIT_AND_EXPR, value, t);
13545 }
13546 else
13547 {
13548 if (!div)
13549 div = build_int_cst (TREE_TYPE (value), divisor);
13550 value = size_binop (FLOOR_DIV_EXPR, value, div);
13551 value = size_binop (MULT_EXPR, value, div);
13552 }
13553
13554 return value;
13555 }
13556
13557 /* Returns the pointer to the base of the object addressed by EXP and
13558 extracts the information about the offset of the access, storing it
13559 to PBITPOS and POFFSET. */
13560
13561 static tree
13562 split_address_to_core_and_offset (tree exp,
13563 HOST_WIDE_INT *pbitpos, tree *poffset)
13564 {
13565 tree core;
13566 enum machine_mode mode;
13567 int unsignedp, volatilep;
13568 HOST_WIDE_INT bitsize;
13569
13570 if (TREE_CODE (exp) == ADDR_EXPR)
13571 {
13572 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
13573 poffset, &mode, &unsignedp, &volatilep,
13574 false);
13575 core = build_fold_addr_expr (core);
13576 }
13577 else
13578 {
13579 core = exp;
13580 *pbitpos = 0;
13581 *poffset = NULL_TREE;
13582 }
13583
13584 return core;
13585 }
13586
13587 /* Returns true if addresses of E1 and E2 differ by a constant, false
13588 otherwise. If they do, E1 - E2 is stored in *DIFF. */
13589
13590 bool
13591 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
13592 {
13593 tree core1, core2;
13594 HOST_WIDE_INT bitpos1, bitpos2;
13595 tree toffset1, toffset2, tdiff, type;
13596
13597 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
13598 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
13599
13600 if (bitpos1 % BITS_PER_UNIT != 0
13601 || bitpos2 % BITS_PER_UNIT != 0
13602 || !operand_equal_p (core1, core2, 0))
13603 return false;
13604
13605 if (toffset1 && toffset2)
13606 {
13607 type = TREE_TYPE (toffset1);
13608 if (type != TREE_TYPE (toffset2))
13609 toffset2 = fold_convert (type, toffset2);
13610
13611 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
13612 if (!cst_and_fits_in_hwi (tdiff))
13613 return false;
13614
13615 *diff = int_cst_value (tdiff);
13616 }
13617 else if (toffset1 || toffset2)
13618 {
13619 /* If only one of the offsets is non-constant, the difference cannot
13620 be a constant. */
13621 return false;
13622 }
13623 else
13624 *diff = 0;
13625
13626 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
13627 return true;
13628 }
13629
13630 /* Simplify the floating point expression EXP when the sign of the
13631 result is not significant. Return NULL_TREE if no simplification
13632 is possible. */
13633
13634 tree
13635 fold_strip_sign_ops (tree exp)
13636 {
13637 tree arg0, arg1;
13638
13639 switch (TREE_CODE (exp))
13640 {
13641 case ABS_EXPR:
13642 case NEGATE_EXPR:
13643 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
13644 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
13645
13646 case MULT_EXPR:
13647 case RDIV_EXPR:
13648 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
13649 return NULL_TREE;
13650 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
13651 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
13652 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
13653 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
13654 arg0 ? arg0 : TREE_OPERAND (exp, 0),
13655 arg1 ? arg1 : TREE_OPERAND (exp, 1));
13656 break;
13657
13658 case COMPOUND_EXPR:
13659 arg0 = TREE_OPERAND (exp, 0);
13660 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
13661 if (arg1)
13662 return fold_build2 (COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
13663 break;
13664
13665 case COND_EXPR:
13666 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
13667 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
13668 if (arg0 || arg1)
13669 return fold_build3 (COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
13670 arg0 ? arg0 : TREE_OPERAND (exp, 1),
13671 arg1 ? arg1 : TREE_OPERAND (exp, 2));
13672 break;
13673
13674 case CALL_EXPR:
13675 {
13676 const enum built_in_function fcode = builtin_mathfn_code (exp);
13677 switch (fcode)
13678 {
13679 CASE_FLT_FN (BUILT_IN_COPYSIGN):
13680 /* Strip copysign function call, return the 1st argument. */
13681 arg0 = TREE_VALUE (TREE_OPERAND (exp, 1));
13682 arg1 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (exp, 1)));
13683 return omit_one_operand (TREE_TYPE (exp), arg0, arg1);
13684
13685 default:
13686 /* Strip sign ops from the argument of "odd" math functions. */
13687 if (negate_mathfn_p (fcode))
13688 {
13689 arg0 = fold_strip_sign_ops (TREE_VALUE (TREE_OPERAND (exp, 1)));
13690 if (arg0)
13691 return build_function_call_expr (get_callee_fndecl (exp),
13692 build_tree_list (NULL_TREE,
13693 arg0));
13694 }
13695 break;
13696 }
13697 }
13698 break;
13699
13700 default:
13701 break;
13702 }
13703 return NULL_TREE;
13704 }