re PR tree-optimization/43833 (false warning: array subscript is above array bounds...
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
29
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type_double.
32
33 fold takes a tree as argument and returns a simplified tree.
34
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
38
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
41
42 force_fit_type_double takes a constant, an overflowable flag and a
43 prior overflow indicator. It forces the value to fit the type and
44 sets TREE_OVERFLOW.
45
46 Note: Since the folders get called on non-gimple code as well as
47 gimple code, we need to handle GIMPLE tuples as well as their
48 corresponding tree equivalents. */
49
50 #include "config.h"
51 #include "system.h"
52 #include "coretypes.h"
53 #include "tm.h"
54 #include "flags.h"
55 #include "tree.h"
56 #include "real.h"
57 #include "fixed-value.h"
58 #include "rtl.h"
59 #include "expr.h"
60 #include "tm_p.h"
61 #include "target.h"
62 #include "toplev.h"
63 #include "intl.h"
64 #include "ggc.h"
65 #include "hashtab.h"
66 #include "langhooks.h"
67 #include "md5.h"
68 #include "gimple.h"
69
70 /* Nonzero if we are folding constants inside an initializer; zero
71 otherwise. */
72 int folding_initializer = 0;
73
74 /* The following constants represent a bit based encoding of GCC's
75 comparison operators. This encoding simplifies transformations
76 on relational comparison operators, such as AND and OR. */
77 enum comparison_code {
78 COMPCODE_FALSE = 0,
79 COMPCODE_LT = 1,
80 COMPCODE_EQ = 2,
81 COMPCODE_LE = 3,
82 COMPCODE_GT = 4,
83 COMPCODE_LTGT = 5,
84 COMPCODE_GE = 6,
85 COMPCODE_ORD = 7,
86 COMPCODE_UNORD = 8,
87 COMPCODE_UNLT = 9,
88 COMPCODE_UNEQ = 10,
89 COMPCODE_UNLE = 11,
90 COMPCODE_UNGT = 12,
91 COMPCODE_NE = 13,
92 COMPCODE_UNGE = 14,
93 COMPCODE_TRUE = 15
94 };
95
96 static bool negate_mathfn_p (enum built_in_function);
97 static bool negate_expr_p (tree);
98 static tree negate_expr (tree);
99 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
100 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
101 static tree const_binop (enum tree_code, tree, tree, int);
102 static enum comparison_code comparison_to_compcode (enum tree_code);
103 static enum tree_code compcode_to_comparison (enum comparison_code);
104 static int operand_equal_for_comparison_p (tree, tree, tree);
105 static int twoval_comparison_p (tree, tree *, tree *, int *);
106 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
107 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
108 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
109 static tree make_bit_field_ref (location_t, tree, tree,
110 HOST_WIDE_INT, HOST_WIDE_INT, int);
111 static tree optimize_bit_field_compare (location_t, enum tree_code,
112 tree, tree, tree);
113 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
114 HOST_WIDE_INT *,
115 enum machine_mode *, int *, int *,
116 tree *, tree *);
117 static int all_ones_mask_p (const_tree, int);
118 static tree sign_bit_p (tree, const_tree);
119 static int simple_operand_p (const_tree);
120 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
121 static tree range_predecessor (tree);
122 static tree range_successor (tree);
123 extern tree make_range (tree, int *, tree *, tree *, bool *);
124 extern bool merge_ranges (int *, tree *, tree *, int, tree, tree, int,
125 tree, tree);
126 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
127 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
128 static tree unextend (tree, int, int, tree);
129 static tree fold_truthop (location_t, enum tree_code, tree, tree, tree);
130 static tree optimize_minmax_comparison (location_t, enum tree_code,
131 tree, tree, tree);
132 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
133 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
134 static tree fold_binary_op_with_conditional_arg (location_t,
135 enum tree_code, tree,
136 tree, tree,
137 tree, tree, int);
138 static tree fold_mathfn_compare (location_t,
139 enum built_in_function, enum tree_code,
140 tree, tree, tree);
141 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
142 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
143 static bool reorder_operands_p (const_tree, const_tree);
144 static tree fold_negate_const (tree, tree);
145 static tree fold_not_const (tree, tree);
146 static tree fold_relational_const (enum tree_code, tree, tree, tree);
147 static tree fold_convert_const (enum tree_code, tree, tree);
148
149
150 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
151 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
152 and SUM1. Then this yields nonzero if overflow occurred during the
153 addition.
154
155 Overflow occurs if A and B have the same sign, but A and SUM differ in
156 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
157 sign. */
158 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
159 \f
160 /* If ARG2 divides ARG1 with zero remainder, carries out the division
161 of type CODE and returns the quotient.
162 Otherwise returns NULL_TREE. */
163
164 tree
165 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
166 {
167 double_int quo, rem;
168 int uns;
169
170 /* The sign of the division is according to operand two, that
171 does the correct thing for POINTER_PLUS_EXPR where we want
172 a signed division. */
173 uns = TYPE_UNSIGNED (TREE_TYPE (arg2));
174 if (TREE_CODE (TREE_TYPE (arg2)) == INTEGER_TYPE
175 && TYPE_IS_SIZETYPE (TREE_TYPE (arg2)))
176 uns = false;
177
178 quo = double_int_divmod (tree_to_double_int (arg1),
179 tree_to_double_int (arg2),
180 uns, code, &rem);
181
182 if (double_int_zero_p (rem))
183 return build_int_cst_wide (TREE_TYPE (arg1), quo.low, quo.high);
184
185 return NULL_TREE;
186 }
187 \f
188 /* This is nonzero if we should defer warnings about undefined
189 overflow. This facility exists because these warnings are a
190 special case. The code to estimate loop iterations does not want
191 to issue any warnings, since it works with expressions which do not
192 occur in user code. Various bits of cleanup code call fold(), but
193 only use the result if it has certain characteristics (e.g., is a
194 constant); that code only wants to issue a warning if the result is
195 used. */
196
197 static int fold_deferring_overflow_warnings;
198
199 /* If a warning about undefined overflow is deferred, this is the
200 warning. Note that this may cause us to turn two warnings into
201 one, but that is fine since it is sufficient to only give one
202 warning per expression. */
203
204 static const char* fold_deferred_overflow_warning;
205
206 /* If a warning about undefined overflow is deferred, this is the
207 level at which the warning should be emitted. */
208
209 static enum warn_strict_overflow_code fold_deferred_overflow_code;
210
211 /* Start deferring overflow warnings. We could use a stack here to
212 permit nested calls, but at present it is not necessary. */
213
214 void
215 fold_defer_overflow_warnings (void)
216 {
217 ++fold_deferring_overflow_warnings;
218 }
219
220 /* Stop deferring overflow warnings. If there is a pending warning,
221 and ISSUE is true, then issue the warning if appropriate. STMT is
222 the statement with which the warning should be associated (used for
223 location information); STMT may be NULL. CODE is the level of the
224 warning--a warn_strict_overflow_code value. This function will use
225 the smaller of CODE and the deferred code when deciding whether to
226 issue the warning. CODE may be zero to mean to always use the
227 deferred code. */
228
229 void
230 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
231 {
232 const char *warnmsg;
233 location_t locus;
234
235 gcc_assert (fold_deferring_overflow_warnings > 0);
236 --fold_deferring_overflow_warnings;
237 if (fold_deferring_overflow_warnings > 0)
238 {
239 if (fold_deferred_overflow_warning != NULL
240 && code != 0
241 && code < (int) fold_deferred_overflow_code)
242 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
243 return;
244 }
245
246 warnmsg = fold_deferred_overflow_warning;
247 fold_deferred_overflow_warning = NULL;
248
249 if (!issue || warnmsg == NULL)
250 return;
251
252 if (gimple_no_warning_p (stmt))
253 return;
254
255 /* Use the smallest code level when deciding to issue the
256 warning. */
257 if (code == 0 || code > (int) fold_deferred_overflow_code)
258 code = fold_deferred_overflow_code;
259
260 if (!issue_strict_overflow_warning (code))
261 return;
262
263 if (stmt == NULL)
264 locus = input_location;
265 else
266 locus = gimple_location (stmt);
267 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
268 }
269
270 /* Stop deferring overflow warnings, ignoring any deferred
271 warnings. */
272
273 void
274 fold_undefer_and_ignore_overflow_warnings (void)
275 {
276 fold_undefer_overflow_warnings (false, NULL, 0);
277 }
278
279 /* Whether we are deferring overflow warnings. */
280
281 bool
282 fold_deferring_overflow_warnings_p (void)
283 {
284 return fold_deferring_overflow_warnings > 0;
285 }
286
287 /* This is called when we fold something based on the fact that signed
288 overflow is undefined. */
289
290 static void
291 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
292 {
293 if (fold_deferring_overflow_warnings > 0)
294 {
295 if (fold_deferred_overflow_warning == NULL
296 || wc < fold_deferred_overflow_code)
297 {
298 fold_deferred_overflow_warning = gmsgid;
299 fold_deferred_overflow_code = wc;
300 }
301 }
302 else if (issue_strict_overflow_warning (wc))
303 warning (OPT_Wstrict_overflow, gmsgid);
304 }
305 \f
306 /* Return true if the built-in mathematical function specified by CODE
307 is odd, i.e. -f(x) == f(-x). */
308
309 static bool
310 negate_mathfn_p (enum built_in_function code)
311 {
312 switch (code)
313 {
314 CASE_FLT_FN (BUILT_IN_ASIN):
315 CASE_FLT_FN (BUILT_IN_ASINH):
316 CASE_FLT_FN (BUILT_IN_ATAN):
317 CASE_FLT_FN (BUILT_IN_ATANH):
318 CASE_FLT_FN (BUILT_IN_CASIN):
319 CASE_FLT_FN (BUILT_IN_CASINH):
320 CASE_FLT_FN (BUILT_IN_CATAN):
321 CASE_FLT_FN (BUILT_IN_CATANH):
322 CASE_FLT_FN (BUILT_IN_CBRT):
323 CASE_FLT_FN (BUILT_IN_CPROJ):
324 CASE_FLT_FN (BUILT_IN_CSIN):
325 CASE_FLT_FN (BUILT_IN_CSINH):
326 CASE_FLT_FN (BUILT_IN_CTAN):
327 CASE_FLT_FN (BUILT_IN_CTANH):
328 CASE_FLT_FN (BUILT_IN_ERF):
329 CASE_FLT_FN (BUILT_IN_LLROUND):
330 CASE_FLT_FN (BUILT_IN_LROUND):
331 CASE_FLT_FN (BUILT_IN_ROUND):
332 CASE_FLT_FN (BUILT_IN_SIN):
333 CASE_FLT_FN (BUILT_IN_SINH):
334 CASE_FLT_FN (BUILT_IN_TAN):
335 CASE_FLT_FN (BUILT_IN_TANH):
336 CASE_FLT_FN (BUILT_IN_TRUNC):
337 return true;
338
339 CASE_FLT_FN (BUILT_IN_LLRINT):
340 CASE_FLT_FN (BUILT_IN_LRINT):
341 CASE_FLT_FN (BUILT_IN_NEARBYINT):
342 CASE_FLT_FN (BUILT_IN_RINT):
343 return !flag_rounding_math;
344
345 default:
346 break;
347 }
348 return false;
349 }
350
351 /* Check whether we may negate an integer constant T without causing
352 overflow. */
353
354 bool
355 may_negate_without_overflow_p (const_tree t)
356 {
357 unsigned HOST_WIDE_INT val;
358 unsigned int prec;
359 tree type;
360
361 gcc_assert (TREE_CODE (t) == INTEGER_CST);
362
363 type = TREE_TYPE (t);
364 if (TYPE_UNSIGNED (type))
365 return false;
366
367 prec = TYPE_PRECISION (type);
368 if (prec > HOST_BITS_PER_WIDE_INT)
369 {
370 if (TREE_INT_CST_LOW (t) != 0)
371 return true;
372 prec -= HOST_BITS_PER_WIDE_INT;
373 val = TREE_INT_CST_HIGH (t);
374 }
375 else
376 val = TREE_INT_CST_LOW (t);
377 if (prec < HOST_BITS_PER_WIDE_INT)
378 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
379 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
380 }
381
382 /* Determine whether an expression T can be cheaply negated using
383 the function negate_expr without introducing undefined overflow. */
384
385 static bool
386 negate_expr_p (tree t)
387 {
388 tree type;
389
390 if (t == 0)
391 return false;
392
393 type = TREE_TYPE (t);
394
395 STRIP_SIGN_NOPS (t);
396 switch (TREE_CODE (t))
397 {
398 case INTEGER_CST:
399 if (TYPE_OVERFLOW_WRAPS (type))
400 return true;
401
402 /* Check that -CST will not overflow type. */
403 return may_negate_without_overflow_p (t);
404 case BIT_NOT_EXPR:
405 return (INTEGRAL_TYPE_P (type)
406 && TYPE_OVERFLOW_WRAPS (type));
407
408 case FIXED_CST:
409 case NEGATE_EXPR:
410 return true;
411
412 case REAL_CST:
413 /* We want to canonicalize to positive real constants. Pretend
414 that only negative ones can be easily negated. */
415 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
416
417 case COMPLEX_CST:
418 return negate_expr_p (TREE_REALPART (t))
419 && negate_expr_p (TREE_IMAGPART (t));
420
421 case COMPLEX_EXPR:
422 return negate_expr_p (TREE_OPERAND (t, 0))
423 && negate_expr_p (TREE_OPERAND (t, 1));
424
425 case CONJ_EXPR:
426 return negate_expr_p (TREE_OPERAND (t, 0));
427
428 case PLUS_EXPR:
429 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
430 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
431 return false;
432 /* -(A + B) -> (-B) - A. */
433 if (negate_expr_p (TREE_OPERAND (t, 1))
434 && reorder_operands_p (TREE_OPERAND (t, 0),
435 TREE_OPERAND (t, 1)))
436 return true;
437 /* -(A + B) -> (-A) - B. */
438 return negate_expr_p (TREE_OPERAND (t, 0));
439
440 case MINUS_EXPR:
441 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
442 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
443 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
444 && reorder_operands_p (TREE_OPERAND (t, 0),
445 TREE_OPERAND (t, 1));
446
447 case MULT_EXPR:
448 if (TYPE_UNSIGNED (TREE_TYPE (t)))
449 break;
450
451 /* Fall through. */
452
453 case RDIV_EXPR:
454 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
455 return negate_expr_p (TREE_OPERAND (t, 1))
456 || negate_expr_p (TREE_OPERAND (t, 0));
457 break;
458
459 case TRUNC_DIV_EXPR:
460 case ROUND_DIV_EXPR:
461 case FLOOR_DIV_EXPR:
462 case CEIL_DIV_EXPR:
463 case EXACT_DIV_EXPR:
464 /* In general we can't negate A / B, because if A is INT_MIN and
465 B is 1, we may turn this into INT_MIN / -1 which is undefined
466 and actually traps on some architectures. But if overflow is
467 undefined, we can negate, because - (INT_MIN / 1) is an
468 overflow. */
469 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
470 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
471 break;
472 return negate_expr_p (TREE_OPERAND (t, 1))
473 || negate_expr_p (TREE_OPERAND (t, 0));
474
475 case NOP_EXPR:
476 /* Negate -((double)float) as (double)(-float). */
477 if (TREE_CODE (type) == REAL_TYPE)
478 {
479 tree tem = strip_float_extensions (t);
480 if (tem != t)
481 return negate_expr_p (tem);
482 }
483 break;
484
485 case CALL_EXPR:
486 /* Negate -f(x) as f(-x). */
487 if (negate_mathfn_p (builtin_mathfn_code (t)))
488 return negate_expr_p (CALL_EXPR_ARG (t, 0));
489 break;
490
491 case RSHIFT_EXPR:
492 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
493 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
494 {
495 tree op1 = TREE_OPERAND (t, 1);
496 if (TREE_INT_CST_HIGH (op1) == 0
497 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
498 == TREE_INT_CST_LOW (op1))
499 return true;
500 }
501 break;
502
503 default:
504 break;
505 }
506 return false;
507 }
508
509 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
510 simplification is possible.
511 If negate_expr_p would return true for T, NULL_TREE will never be
512 returned. */
513
514 static tree
515 fold_negate_expr (location_t loc, tree t)
516 {
517 tree type = TREE_TYPE (t);
518 tree tem;
519
520 switch (TREE_CODE (t))
521 {
522 /* Convert - (~A) to A + 1. */
523 case BIT_NOT_EXPR:
524 if (INTEGRAL_TYPE_P (type))
525 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
526 build_int_cst (type, 1));
527 break;
528
529 case INTEGER_CST:
530 tem = fold_negate_const (t, type);
531 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
532 || !TYPE_OVERFLOW_TRAPS (type))
533 return tem;
534 break;
535
536 case REAL_CST:
537 tem = fold_negate_const (t, type);
538 /* Two's complement FP formats, such as c4x, may overflow. */
539 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
540 return tem;
541 break;
542
543 case FIXED_CST:
544 tem = fold_negate_const (t, type);
545 return tem;
546
547 case COMPLEX_CST:
548 {
549 tree rpart = negate_expr (TREE_REALPART (t));
550 tree ipart = negate_expr (TREE_IMAGPART (t));
551
552 if ((TREE_CODE (rpart) == REAL_CST
553 && TREE_CODE (ipart) == REAL_CST)
554 || (TREE_CODE (rpart) == INTEGER_CST
555 && TREE_CODE (ipart) == INTEGER_CST))
556 return build_complex (type, rpart, ipart);
557 }
558 break;
559
560 case COMPLEX_EXPR:
561 if (negate_expr_p (t))
562 return fold_build2_loc (loc, COMPLEX_EXPR, type,
563 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
564 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
565 break;
566
567 case CONJ_EXPR:
568 if (negate_expr_p (t))
569 return fold_build1_loc (loc, CONJ_EXPR, type,
570 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
571 break;
572
573 case NEGATE_EXPR:
574 return TREE_OPERAND (t, 0);
575
576 case PLUS_EXPR:
577 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
578 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
579 {
580 /* -(A + B) -> (-B) - A. */
581 if (negate_expr_p (TREE_OPERAND (t, 1))
582 && reorder_operands_p (TREE_OPERAND (t, 0),
583 TREE_OPERAND (t, 1)))
584 {
585 tem = negate_expr (TREE_OPERAND (t, 1));
586 return fold_build2_loc (loc, MINUS_EXPR, type,
587 tem, TREE_OPERAND (t, 0));
588 }
589
590 /* -(A + B) -> (-A) - B. */
591 if (negate_expr_p (TREE_OPERAND (t, 0)))
592 {
593 tem = negate_expr (TREE_OPERAND (t, 0));
594 return fold_build2_loc (loc, MINUS_EXPR, type,
595 tem, TREE_OPERAND (t, 1));
596 }
597 }
598 break;
599
600 case MINUS_EXPR:
601 /* - (A - B) -> B - A */
602 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
603 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
604 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
605 return fold_build2_loc (loc, MINUS_EXPR, type,
606 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
607 break;
608
609 case MULT_EXPR:
610 if (TYPE_UNSIGNED (type))
611 break;
612
613 /* Fall through. */
614
615 case RDIV_EXPR:
616 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
617 {
618 tem = TREE_OPERAND (t, 1);
619 if (negate_expr_p (tem))
620 return fold_build2_loc (loc, TREE_CODE (t), type,
621 TREE_OPERAND (t, 0), negate_expr (tem));
622 tem = TREE_OPERAND (t, 0);
623 if (negate_expr_p (tem))
624 return fold_build2_loc (loc, TREE_CODE (t), type,
625 negate_expr (tem), TREE_OPERAND (t, 1));
626 }
627 break;
628
629 case TRUNC_DIV_EXPR:
630 case ROUND_DIV_EXPR:
631 case FLOOR_DIV_EXPR:
632 case CEIL_DIV_EXPR:
633 case EXACT_DIV_EXPR:
634 /* In general we can't negate A / B, because if A is INT_MIN and
635 B is 1, we may turn this into INT_MIN / -1 which is undefined
636 and actually traps on some architectures. But if overflow is
637 undefined, we can negate, because - (INT_MIN / 1) is an
638 overflow. */
639 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
640 {
641 const char * const warnmsg = G_("assuming signed overflow does not "
642 "occur when negating a division");
643 tem = TREE_OPERAND (t, 1);
644 if (negate_expr_p (tem))
645 {
646 if (INTEGRAL_TYPE_P (type)
647 && (TREE_CODE (tem) != INTEGER_CST
648 || integer_onep (tem)))
649 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
650 return fold_build2_loc (loc, TREE_CODE (t), type,
651 TREE_OPERAND (t, 0), negate_expr (tem));
652 }
653 tem = TREE_OPERAND (t, 0);
654 if (negate_expr_p (tem))
655 {
656 if (INTEGRAL_TYPE_P (type)
657 && (TREE_CODE (tem) != INTEGER_CST
658 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
659 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
660 return fold_build2_loc (loc, TREE_CODE (t), type,
661 negate_expr (tem), TREE_OPERAND (t, 1));
662 }
663 }
664 break;
665
666 case NOP_EXPR:
667 /* Convert -((double)float) into (double)(-float). */
668 if (TREE_CODE (type) == REAL_TYPE)
669 {
670 tem = strip_float_extensions (t);
671 if (tem != t && negate_expr_p (tem))
672 return fold_convert_loc (loc, type, negate_expr (tem));
673 }
674 break;
675
676 case CALL_EXPR:
677 /* Negate -f(x) as f(-x). */
678 if (negate_mathfn_p (builtin_mathfn_code (t))
679 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
680 {
681 tree fndecl, arg;
682
683 fndecl = get_callee_fndecl (t);
684 arg = negate_expr (CALL_EXPR_ARG (t, 0));
685 return build_call_expr_loc (loc, fndecl, 1, arg);
686 }
687 break;
688
689 case RSHIFT_EXPR:
690 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
691 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
692 {
693 tree op1 = TREE_OPERAND (t, 1);
694 if (TREE_INT_CST_HIGH (op1) == 0
695 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
696 == TREE_INT_CST_LOW (op1))
697 {
698 tree ntype = TYPE_UNSIGNED (type)
699 ? signed_type_for (type)
700 : unsigned_type_for (type);
701 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
702 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
703 return fold_convert_loc (loc, type, temp);
704 }
705 }
706 break;
707
708 default:
709 break;
710 }
711
712 return NULL_TREE;
713 }
714
715 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
716 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
717 return NULL_TREE. */
718
719 static tree
720 negate_expr (tree t)
721 {
722 tree type, tem;
723 location_t loc;
724
725 if (t == NULL_TREE)
726 return NULL_TREE;
727
728 loc = EXPR_LOCATION (t);
729 type = TREE_TYPE (t);
730 STRIP_SIGN_NOPS (t);
731
732 tem = fold_negate_expr (loc, t);
733 if (!tem)
734 {
735 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
736 SET_EXPR_LOCATION (tem, loc);
737 }
738 return fold_convert_loc (loc, type, tem);
739 }
740 \f
741 /* Split a tree IN into a constant, literal and variable parts that could be
742 combined with CODE to make IN. "constant" means an expression with
743 TREE_CONSTANT but that isn't an actual constant. CODE must be a
744 commutative arithmetic operation. Store the constant part into *CONP,
745 the literal in *LITP and return the variable part. If a part isn't
746 present, set it to null. If the tree does not decompose in this way,
747 return the entire tree as the variable part and the other parts as null.
748
749 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
750 case, we negate an operand that was subtracted. Except if it is a
751 literal for which we use *MINUS_LITP instead.
752
753 If NEGATE_P is true, we are negating all of IN, again except a literal
754 for which we use *MINUS_LITP instead.
755
756 If IN is itself a literal or constant, return it as appropriate.
757
758 Note that we do not guarantee that any of the three values will be the
759 same type as IN, but they will have the same signedness and mode. */
760
761 static tree
762 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
763 tree *minus_litp, int negate_p)
764 {
765 tree var = 0;
766
767 *conp = 0;
768 *litp = 0;
769 *minus_litp = 0;
770
771 /* Strip any conversions that don't change the machine mode or signedness. */
772 STRIP_SIGN_NOPS (in);
773
774 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
775 || TREE_CODE (in) == FIXED_CST)
776 *litp = in;
777 else if (TREE_CODE (in) == code
778 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
779 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
780 /* We can associate addition and subtraction together (even
781 though the C standard doesn't say so) for integers because
782 the value is not affected. For reals, the value might be
783 affected, so we can't. */
784 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
785 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
786 {
787 tree op0 = TREE_OPERAND (in, 0);
788 tree op1 = TREE_OPERAND (in, 1);
789 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
790 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
791
792 /* First see if either of the operands is a literal, then a constant. */
793 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
794 || TREE_CODE (op0) == FIXED_CST)
795 *litp = op0, op0 = 0;
796 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
797 || TREE_CODE (op1) == FIXED_CST)
798 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
799
800 if (op0 != 0 && TREE_CONSTANT (op0))
801 *conp = op0, op0 = 0;
802 else if (op1 != 0 && TREE_CONSTANT (op1))
803 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
804
805 /* If we haven't dealt with either operand, this is not a case we can
806 decompose. Otherwise, VAR is either of the ones remaining, if any. */
807 if (op0 != 0 && op1 != 0)
808 var = in;
809 else if (op0 != 0)
810 var = op0;
811 else
812 var = op1, neg_var_p = neg1_p;
813
814 /* Now do any needed negations. */
815 if (neg_litp_p)
816 *minus_litp = *litp, *litp = 0;
817 if (neg_conp_p)
818 *conp = negate_expr (*conp);
819 if (neg_var_p)
820 var = negate_expr (var);
821 }
822 else if (TREE_CONSTANT (in))
823 *conp = in;
824 else
825 var = in;
826
827 if (negate_p)
828 {
829 if (*litp)
830 *minus_litp = *litp, *litp = 0;
831 else if (*minus_litp)
832 *litp = *minus_litp, *minus_litp = 0;
833 *conp = negate_expr (*conp);
834 var = negate_expr (var);
835 }
836
837 return var;
838 }
839
840 /* Re-associate trees split by the above function. T1 and T2 are
841 either expressions to associate or null. Return the new
842 expression, if any. LOC is the location of the new expression. If
843 we build an operation, do it in TYPE and with CODE. */
844
845 static tree
846 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
847 {
848 tree tem;
849
850 if (t1 == 0)
851 return t2;
852 else if (t2 == 0)
853 return t1;
854
855 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
856 try to fold this since we will have infinite recursion. But do
857 deal with any NEGATE_EXPRs. */
858 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
859 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
860 {
861 if (code == PLUS_EXPR)
862 {
863 if (TREE_CODE (t1) == NEGATE_EXPR)
864 tem = build2 (MINUS_EXPR, type, fold_convert_loc (loc, type, t2),
865 fold_convert_loc (loc, type, TREE_OPERAND (t1, 0)));
866 else if (TREE_CODE (t2) == NEGATE_EXPR)
867 tem = build2 (MINUS_EXPR, type, fold_convert_loc (loc, type, t1),
868 fold_convert_loc (loc, type, TREE_OPERAND (t2, 0)));
869 else if (integer_zerop (t2))
870 return fold_convert_loc (loc, type, t1);
871 }
872 else if (code == MINUS_EXPR)
873 {
874 if (integer_zerop (t2))
875 return fold_convert_loc (loc, type, t1);
876 }
877
878 tem = build2 (code, type, fold_convert_loc (loc, type, t1),
879 fold_convert_loc (loc, type, t2));
880 goto associate_trees_exit;
881 }
882
883 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
884 fold_convert_loc (loc, type, t2));
885 associate_trees_exit:
886 protected_set_expr_location (tem, loc);
887 return tem;
888 }
889 \f
890 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
891 for use in int_const_binop, size_binop and size_diffop. */
892
893 static bool
894 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
895 {
896 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
897 return false;
898 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
899 return false;
900
901 switch (code)
902 {
903 case LSHIFT_EXPR:
904 case RSHIFT_EXPR:
905 case LROTATE_EXPR:
906 case RROTATE_EXPR:
907 return true;
908
909 default:
910 break;
911 }
912
913 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
914 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
915 && TYPE_MODE (type1) == TYPE_MODE (type2);
916 }
917
918
919 /* Combine two integer constants ARG1 and ARG2 under operation CODE
920 to produce a new constant. Return NULL_TREE if we don't know how
921 to evaluate CODE at compile-time.
922
923 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
924
925 tree
926 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2, int notrunc)
927 {
928 unsigned HOST_WIDE_INT int1l, int2l;
929 HOST_WIDE_INT int1h, int2h;
930 unsigned HOST_WIDE_INT low;
931 HOST_WIDE_INT hi;
932 unsigned HOST_WIDE_INT garbagel;
933 HOST_WIDE_INT garbageh;
934 tree t;
935 tree type = TREE_TYPE (arg1);
936 int uns = TYPE_UNSIGNED (type);
937 int is_sizetype
938 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
939 int overflow = 0;
940
941 int1l = TREE_INT_CST_LOW (arg1);
942 int1h = TREE_INT_CST_HIGH (arg1);
943 int2l = TREE_INT_CST_LOW (arg2);
944 int2h = TREE_INT_CST_HIGH (arg2);
945
946 switch (code)
947 {
948 case BIT_IOR_EXPR:
949 low = int1l | int2l, hi = int1h | int2h;
950 break;
951
952 case BIT_XOR_EXPR:
953 low = int1l ^ int2l, hi = int1h ^ int2h;
954 break;
955
956 case BIT_AND_EXPR:
957 low = int1l & int2l, hi = int1h & int2h;
958 break;
959
960 case RSHIFT_EXPR:
961 int2l = -int2l;
962 case LSHIFT_EXPR:
963 /* It's unclear from the C standard whether shifts can overflow.
964 The following code ignores overflow; perhaps a C standard
965 interpretation ruling is needed. */
966 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
967 &low, &hi, !uns);
968 break;
969
970 case RROTATE_EXPR:
971 int2l = - int2l;
972 case LROTATE_EXPR:
973 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
974 &low, &hi);
975 break;
976
977 case PLUS_EXPR:
978 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
979 break;
980
981 case MINUS_EXPR:
982 neg_double (int2l, int2h, &low, &hi);
983 add_double (int1l, int1h, low, hi, &low, &hi);
984 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
985 break;
986
987 case MULT_EXPR:
988 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
989 break;
990
991 case TRUNC_DIV_EXPR:
992 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
993 case EXACT_DIV_EXPR:
994 /* This is a shortcut for a common special case. */
995 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
996 && !TREE_OVERFLOW (arg1)
997 && !TREE_OVERFLOW (arg2)
998 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
999 {
1000 if (code == CEIL_DIV_EXPR)
1001 int1l += int2l - 1;
1002
1003 low = int1l / int2l, hi = 0;
1004 break;
1005 }
1006
1007 /* ... fall through ... */
1008
1009 case ROUND_DIV_EXPR:
1010 if (int2h == 0 && int2l == 0)
1011 return NULL_TREE;
1012 if (int2h == 0 && int2l == 1)
1013 {
1014 low = int1l, hi = int1h;
1015 break;
1016 }
1017 if (int1l == int2l && int1h == int2h
1018 && ! (int1l == 0 && int1h == 0))
1019 {
1020 low = 1, hi = 0;
1021 break;
1022 }
1023 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1024 &low, &hi, &garbagel, &garbageh);
1025 break;
1026
1027 case TRUNC_MOD_EXPR:
1028 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1029 /* This is a shortcut for a common special case. */
1030 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1031 && !TREE_OVERFLOW (arg1)
1032 && !TREE_OVERFLOW (arg2)
1033 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1034 {
1035 if (code == CEIL_MOD_EXPR)
1036 int1l += int2l - 1;
1037 low = int1l % int2l, hi = 0;
1038 break;
1039 }
1040
1041 /* ... fall through ... */
1042
1043 case ROUND_MOD_EXPR:
1044 if (int2h == 0 && int2l == 0)
1045 return NULL_TREE;
1046 overflow = div_and_round_double (code, uns,
1047 int1l, int1h, int2l, int2h,
1048 &garbagel, &garbageh, &low, &hi);
1049 break;
1050
1051 case MIN_EXPR:
1052 case MAX_EXPR:
1053 if (uns)
1054 low = (((unsigned HOST_WIDE_INT) int1h
1055 < (unsigned HOST_WIDE_INT) int2h)
1056 || (((unsigned HOST_WIDE_INT) int1h
1057 == (unsigned HOST_WIDE_INT) int2h)
1058 && int1l < int2l));
1059 else
1060 low = (int1h < int2h
1061 || (int1h == int2h && int1l < int2l));
1062
1063 if (low == (code == MIN_EXPR))
1064 low = int1l, hi = int1h;
1065 else
1066 low = int2l, hi = int2h;
1067 break;
1068
1069 default:
1070 return NULL_TREE;
1071 }
1072
1073 if (notrunc)
1074 {
1075 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1076
1077 /* Propagate overflow flags ourselves. */
1078 if (((!uns || is_sizetype) && overflow)
1079 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1080 {
1081 t = copy_node (t);
1082 TREE_OVERFLOW (t) = 1;
1083 }
1084 }
1085 else
1086 t = force_fit_type_double (TREE_TYPE (arg1), low, hi, 1,
1087 ((!uns || is_sizetype) && overflow)
1088 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1089
1090 return t;
1091 }
1092
1093 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1094 constant. We assume ARG1 and ARG2 have the same data type, or at least
1095 are the same kind of constant and the same machine mode. Return zero if
1096 combining the constants is not allowed in the current operating mode.
1097
1098 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1099
1100 static tree
1101 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1102 {
1103 /* Sanity check for the recursive cases. */
1104 if (!arg1 || !arg2)
1105 return NULL_TREE;
1106
1107 STRIP_NOPS (arg1);
1108 STRIP_NOPS (arg2);
1109
1110 if (TREE_CODE (arg1) == INTEGER_CST)
1111 return int_const_binop (code, arg1, arg2, notrunc);
1112
1113 if (TREE_CODE (arg1) == REAL_CST)
1114 {
1115 enum machine_mode mode;
1116 REAL_VALUE_TYPE d1;
1117 REAL_VALUE_TYPE d2;
1118 REAL_VALUE_TYPE value;
1119 REAL_VALUE_TYPE result;
1120 bool inexact;
1121 tree t, type;
1122
1123 /* The following codes are handled by real_arithmetic. */
1124 switch (code)
1125 {
1126 case PLUS_EXPR:
1127 case MINUS_EXPR:
1128 case MULT_EXPR:
1129 case RDIV_EXPR:
1130 case MIN_EXPR:
1131 case MAX_EXPR:
1132 break;
1133
1134 default:
1135 return NULL_TREE;
1136 }
1137
1138 d1 = TREE_REAL_CST (arg1);
1139 d2 = TREE_REAL_CST (arg2);
1140
1141 type = TREE_TYPE (arg1);
1142 mode = TYPE_MODE (type);
1143
1144 /* Don't perform operation if we honor signaling NaNs and
1145 either operand is a NaN. */
1146 if (HONOR_SNANS (mode)
1147 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1148 return NULL_TREE;
1149
1150 /* Don't perform operation if it would raise a division
1151 by zero exception. */
1152 if (code == RDIV_EXPR
1153 && REAL_VALUES_EQUAL (d2, dconst0)
1154 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1155 return NULL_TREE;
1156
1157 /* If either operand is a NaN, just return it. Otherwise, set up
1158 for floating-point trap; we return an overflow. */
1159 if (REAL_VALUE_ISNAN (d1))
1160 return arg1;
1161 else if (REAL_VALUE_ISNAN (d2))
1162 return arg2;
1163
1164 inexact = real_arithmetic (&value, code, &d1, &d2);
1165 real_convert (&result, mode, &value);
1166
1167 /* Don't constant fold this floating point operation if
1168 the result has overflowed and flag_trapping_math. */
1169 if (flag_trapping_math
1170 && MODE_HAS_INFINITIES (mode)
1171 && REAL_VALUE_ISINF (result)
1172 && !REAL_VALUE_ISINF (d1)
1173 && !REAL_VALUE_ISINF (d2))
1174 return NULL_TREE;
1175
1176 /* Don't constant fold this floating point operation if the
1177 result may dependent upon the run-time rounding mode and
1178 flag_rounding_math is set, or if GCC's software emulation
1179 is unable to accurately represent the result. */
1180 if ((flag_rounding_math
1181 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1182 && (inexact || !real_identical (&result, &value)))
1183 return NULL_TREE;
1184
1185 t = build_real (type, result);
1186
1187 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1188 return t;
1189 }
1190
1191 if (TREE_CODE (arg1) == FIXED_CST)
1192 {
1193 FIXED_VALUE_TYPE f1;
1194 FIXED_VALUE_TYPE f2;
1195 FIXED_VALUE_TYPE result;
1196 tree t, type;
1197 int sat_p;
1198 bool overflow_p;
1199
1200 /* The following codes are handled by fixed_arithmetic. */
1201 switch (code)
1202 {
1203 case PLUS_EXPR:
1204 case MINUS_EXPR:
1205 case MULT_EXPR:
1206 case TRUNC_DIV_EXPR:
1207 f2 = TREE_FIXED_CST (arg2);
1208 break;
1209
1210 case LSHIFT_EXPR:
1211 case RSHIFT_EXPR:
1212 f2.data.high = TREE_INT_CST_HIGH (arg2);
1213 f2.data.low = TREE_INT_CST_LOW (arg2);
1214 f2.mode = SImode;
1215 break;
1216
1217 default:
1218 return NULL_TREE;
1219 }
1220
1221 f1 = TREE_FIXED_CST (arg1);
1222 type = TREE_TYPE (arg1);
1223 sat_p = TYPE_SATURATING (type);
1224 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1225 t = build_fixed (type, result);
1226 /* Propagate overflow flags. */
1227 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1228 TREE_OVERFLOW (t) = 1;
1229 return t;
1230 }
1231
1232 if (TREE_CODE (arg1) == COMPLEX_CST)
1233 {
1234 tree type = TREE_TYPE (arg1);
1235 tree r1 = TREE_REALPART (arg1);
1236 tree i1 = TREE_IMAGPART (arg1);
1237 tree r2 = TREE_REALPART (arg2);
1238 tree i2 = TREE_IMAGPART (arg2);
1239 tree real, imag;
1240
1241 switch (code)
1242 {
1243 case PLUS_EXPR:
1244 case MINUS_EXPR:
1245 real = const_binop (code, r1, r2, notrunc);
1246 imag = const_binop (code, i1, i2, notrunc);
1247 break;
1248
1249 case MULT_EXPR:
1250 if (COMPLEX_FLOAT_TYPE_P (type))
1251 return do_mpc_arg2 (arg1, arg2, type,
1252 /* do_nonfinite= */ folding_initializer,
1253 mpc_mul);
1254
1255 real = const_binop (MINUS_EXPR,
1256 const_binop (MULT_EXPR, r1, r2, notrunc),
1257 const_binop (MULT_EXPR, i1, i2, notrunc),
1258 notrunc);
1259 imag = const_binop (PLUS_EXPR,
1260 const_binop (MULT_EXPR, r1, i2, notrunc),
1261 const_binop (MULT_EXPR, i1, r2, notrunc),
1262 notrunc);
1263 break;
1264
1265 case RDIV_EXPR:
1266 if (COMPLEX_FLOAT_TYPE_P (type))
1267 return do_mpc_arg2 (arg1, arg2, type,
1268 /* do_nonfinite= */ folding_initializer,
1269 mpc_div);
1270 /* Fallthru ... */
1271 case TRUNC_DIV_EXPR:
1272 case CEIL_DIV_EXPR:
1273 case FLOOR_DIV_EXPR:
1274 case ROUND_DIV_EXPR:
1275 if (flag_complex_method == 0)
1276 {
1277 /* Keep this algorithm in sync with
1278 tree-complex.c:expand_complex_div_straight().
1279
1280 Expand complex division to scalars, straightforward algorithm.
1281 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1282 t = br*br + bi*bi
1283 */
1284 tree magsquared
1285 = const_binop (PLUS_EXPR,
1286 const_binop (MULT_EXPR, r2, r2, notrunc),
1287 const_binop (MULT_EXPR, i2, i2, notrunc),
1288 notrunc);
1289 tree t1
1290 = const_binop (PLUS_EXPR,
1291 const_binop (MULT_EXPR, r1, r2, notrunc),
1292 const_binop (MULT_EXPR, i1, i2, notrunc),
1293 notrunc);
1294 tree t2
1295 = const_binop (MINUS_EXPR,
1296 const_binop (MULT_EXPR, i1, r2, notrunc),
1297 const_binop (MULT_EXPR, r1, i2, notrunc),
1298 notrunc);
1299
1300 real = const_binop (code, t1, magsquared, notrunc);
1301 imag = const_binop (code, t2, magsquared, notrunc);
1302 }
1303 else
1304 {
1305 /* Keep this algorithm in sync with
1306 tree-complex.c:expand_complex_div_wide().
1307
1308 Expand complex division to scalars, modified algorithm to minimize
1309 overflow with wide input ranges. */
1310 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1311 fold_abs_const (r2, TREE_TYPE (type)),
1312 fold_abs_const (i2, TREE_TYPE (type)));
1313
1314 if (integer_nonzerop (compare))
1315 {
1316 /* In the TRUE branch, we compute
1317 ratio = br/bi;
1318 div = (br * ratio) + bi;
1319 tr = (ar * ratio) + ai;
1320 ti = (ai * ratio) - ar;
1321 tr = tr / div;
1322 ti = ti / div; */
1323 tree ratio = const_binop (code, r2, i2, notrunc);
1324 tree div = const_binop (PLUS_EXPR, i2,
1325 const_binop (MULT_EXPR, r2, ratio,
1326 notrunc),
1327 notrunc);
1328 real = const_binop (MULT_EXPR, r1, ratio, notrunc);
1329 real = const_binop (PLUS_EXPR, real, i1, notrunc);
1330 real = const_binop (code, real, div, notrunc);
1331
1332 imag = const_binop (MULT_EXPR, i1, ratio, notrunc);
1333 imag = const_binop (MINUS_EXPR, imag, r1, notrunc);
1334 imag = const_binop (code, imag, div, notrunc);
1335 }
1336 else
1337 {
1338 /* In the FALSE branch, we compute
1339 ratio = d/c;
1340 divisor = (d * ratio) + c;
1341 tr = (b * ratio) + a;
1342 ti = b - (a * ratio);
1343 tr = tr / div;
1344 ti = ti / div; */
1345 tree ratio = const_binop (code, i2, r2, notrunc);
1346 tree div = const_binop (PLUS_EXPR, r2,
1347 const_binop (MULT_EXPR, i2, ratio,
1348 notrunc),
1349 notrunc);
1350
1351 real = const_binop (MULT_EXPR, i1, ratio, notrunc);
1352 real = const_binop (PLUS_EXPR, real, r1, notrunc);
1353 real = const_binop (code, real, div, notrunc);
1354
1355 imag = const_binop (MULT_EXPR, r1, ratio, notrunc);
1356 imag = const_binop (MINUS_EXPR, i1, imag, notrunc);
1357 imag = const_binop (code, imag, div, notrunc);
1358 }
1359 }
1360 break;
1361
1362 default:
1363 return NULL_TREE;
1364 }
1365
1366 if (real && imag)
1367 return build_complex (type, real, imag);
1368 }
1369
1370 if (TREE_CODE (arg1) == VECTOR_CST)
1371 {
1372 tree type = TREE_TYPE(arg1);
1373 int count = TYPE_VECTOR_SUBPARTS (type), i;
1374 tree elements1, elements2, list = NULL_TREE;
1375
1376 if(TREE_CODE(arg2) != VECTOR_CST)
1377 return NULL_TREE;
1378
1379 elements1 = TREE_VECTOR_CST_ELTS (arg1);
1380 elements2 = TREE_VECTOR_CST_ELTS (arg2);
1381
1382 for (i = 0; i < count; i++)
1383 {
1384 tree elem1, elem2, elem;
1385
1386 /* The trailing elements can be empty and should be treated as 0 */
1387 if(!elements1)
1388 elem1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1389 else
1390 {
1391 elem1 = TREE_VALUE(elements1);
1392 elements1 = TREE_CHAIN (elements1);
1393 }
1394
1395 if(!elements2)
1396 elem2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1397 else
1398 {
1399 elem2 = TREE_VALUE(elements2);
1400 elements2 = TREE_CHAIN (elements2);
1401 }
1402
1403 elem = const_binop (code, elem1, elem2, notrunc);
1404
1405 /* It is possible that const_binop cannot handle the given
1406 code and return NULL_TREE */
1407 if(elem == NULL_TREE)
1408 return NULL_TREE;
1409
1410 list = tree_cons (NULL_TREE, elem, list);
1411 }
1412 return build_vector(type, nreverse(list));
1413 }
1414 return NULL_TREE;
1415 }
1416
1417 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1418 indicates which particular sizetype to create. */
1419
1420 tree
1421 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1422 {
1423 return build_int_cst (sizetype_tab[(int) kind], number);
1424 }
1425 \f
1426 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1427 is a tree code. The type of the result is taken from the operands.
1428 Both must be equivalent integer types, ala int_binop_types_match_p.
1429 If the operands are constant, so is the result. */
1430
1431 tree
1432 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1433 {
1434 tree type = TREE_TYPE (arg0);
1435
1436 if (arg0 == error_mark_node || arg1 == error_mark_node)
1437 return error_mark_node;
1438
1439 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1440 TREE_TYPE (arg1)));
1441
1442 /* Handle the special case of two integer constants faster. */
1443 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1444 {
1445 /* And some specific cases even faster than that. */
1446 if (code == PLUS_EXPR)
1447 {
1448 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1449 return arg1;
1450 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1451 return arg0;
1452 }
1453 else if (code == MINUS_EXPR)
1454 {
1455 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1456 return arg0;
1457 }
1458 else if (code == MULT_EXPR)
1459 {
1460 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1461 return arg1;
1462 }
1463
1464 /* Handle general case of two integer constants. */
1465 return int_const_binop (code, arg0, arg1, 0);
1466 }
1467
1468 return fold_build2_loc (loc, code, type, arg0, arg1);
1469 }
1470
1471 /* Given two values, either both of sizetype or both of bitsizetype,
1472 compute the difference between the two values. Return the value
1473 in signed type corresponding to the type of the operands. */
1474
1475 tree
1476 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1477 {
1478 tree type = TREE_TYPE (arg0);
1479 tree ctype;
1480
1481 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1482 TREE_TYPE (arg1)));
1483
1484 /* If the type is already signed, just do the simple thing. */
1485 if (!TYPE_UNSIGNED (type))
1486 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1487
1488 if (type == sizetype)
1489 ctype = ssizetype;
1490 else if (type == bitsizetype)
1491 ctype = sbitsizetype;
1492 else
1493 ctype = signed_type_for (type);
1494
1495 /* If either operand is not a constant, do the conversions to the signed
1496 type and subtract. The hardware will do the right thing with any
1497 overflow in the subtraction. */
1498 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1499 return size_binop_loc (loc, MINUS_EXPR,
1500 fold_convert_loc (loc, ctype, arg0),
1501 fold_convert_loc (loc, ctype, arg1));
1502
1503 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1504 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1505 overflow) and negate (which can't either). Special-case a result
1506 of zero while we're here. */
1507 if (tree_int_cst_equal (arg0, arg1))
1508 return build_int_cst (ctype, 0);
1509 else if (tree_int_cst_lt (arg1, arg0))
1510 return fold_convert_loc (loc, ctype,
1511 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1512 else
1513 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1514 fold_convert_loc (loc, ctype,
1515 size_binop_loc (loc,
1516 MINUS_EXPR,
1517 arg1, arg0)));
1518 }
1519 \f
1520 /* A subroutine of fold_convert_const handling conversions of an
1521 INTEGER_CST to another integer type. */
1522
1523 static tree
1524 fold_convert_const_int_from_int (tree type, const_tree arg1)
1525 {
1526 tree t;
1527
1528 /* Given an integer constant, make new constant with new type,
1529 appropriately sign-extended or truncated. */
1530 t = force_fit_type_double (type, TREE_INT_CST_LOW (arg1),
1531 TREE_INT_CST_HIGH (arg1),
1532 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1533 (TREE_INT_CST_HIGH (arg1) < 0
1534 && (TYPE_UNSIGNED (type)
1535 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1536 | TREE_OVERFLOW (arg1));
1537
1538 return t;
1539 }
1540
1541 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1542 to an integer type. */
1543
1544 static tree
1545 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1546 {
1547 int overflow = 0;
1548 tree t;
1549
1550 /* The following code implements the floating point to integer
1551 conversion rules required by the Java Language Specification,
1552 that IEEE NaNs are mapped to zero and values that overflow
1553 the target precision saturate, i.e. values greater than
1554 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1555 are mapped to INT_MIN. These semantics are allowed by the
1556 C and C++ standards that simply state that the behavior of
1557 FP-to-integer conversion is unspecified upon overflow. */
1558
1559 double_int val;
1560 REAL_VALUE_TYPE r;
1561 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1562
1563 switch (code)
1564 {
1565 case FIX_TRUNC_EXPR:
1566 real_trunc (&r, VOIDmode, &x);
1567 break;
1568
1569 default:
1570 gcc_unreachable ();
1571 }
1572
1573 /* If R is NaN, return zero and show we have an overflow. */
1574 if (REAL_VALUE_ISNAN (r))
1575 {
1576 overflow = 1;
1577 val = double_int_zero;
1578 }
1579
1580 /* See if R is less than the lower bound or greater than the
1581 upper bound. */
1582
1583 if (! overflow)
1584 {
1585 tree lt = TYPE_MIN_VALUE (type);
1586 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1587 if (REAL_VALUES_LESS (r, l))
1588 {
1589 overflow = 1;
1590 val = tree_to_double_int (lt);
1591 }
1592 }
1593
1594 if (! overflow)
1595 {
1596 tree ut = TYPE_MAX_VALUE (type);
1597 if (ut)
1598 {
1599 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1600 if (REAL_VALUES_LESS (u, r))
1601 {
1602 overflow = 1;
1603 val = tree_to_double_int (ut);
1604 }
1605 }
1606 }
1607
1608 if (! overflow)
1609 real_to_integer2 ((HOST_WIDE_INT *) &val.low, &val.high, &r);
1610
1611 t = force_fit_type_double (type, val.low, val.high, -1,
1612 overflow | TREE_OVERFLOW (arg1));
1613 return t;
1614 }
1615
1616 /* A subroutine of fold_convert_const handling conversions of a
1617 FIXED_CST to an integer type. */
1618
1619 static tree
1620 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1621 {
1622 tree t;
1623 double_int temp, temp_trunc;
1624 unsigned int mode;
1625
1626 /* Right shift FIXED_CST to temp by fbit. */
1627 temp = TREE_FIXED_CST (arg1).data;
1628 mode = TREE_FIXED_CST (arg1).mode;
1629 if (GET_MODE_FBIT (mode) < 2 * HOST_BITS_PER_WIDE_INT)
1630 {
1631 temp = double_int_rshift (temp, GET_MODE_FBIT (mode),
1632 HOST_BITS_PER_DOUBLE_INT,
1633 SIGNED_FIXED_POINT_MODE_P (mode));
1634
1635 /* Left shift temp to temp_trunc by fbit. */
1636 temp_trunc = double_int_lshift (temp, GET_MODE_FBIT (mode),
1637 HOST_BITS_PER_DOUBLE_INT,
1638 SIGNED_FIXED_POINT_MODE_P (mode));
1639 }
1640 else
1641 {
1642 temp = double_int_zero;
1643 temp_trunc = double_int_zero;
1644 }
1645
1646 /* If FIXED_CST is negative, we need to round the value toward 0.
1647 By checking if the fractional bits are not zero to add 1 to temp. */
1648 if (SIGNED_FIXED_POINT_MODE_P (mode)
1649 && double_int_negative_p (temp_trunc)
1650 && !double_int_equal_p (TREE_FIXED_CST (arg1).data, temp_trunc))
1651 temp = double_int_add (temp, double_int_one);
1652
1653 /* Given a fixed-point constant, make new constant with new type,
1654 appropriately sign-extended or truncated. */
1655 t = force_fit_type_double (type, temp.low, temp.high, -1,
1656 (double_int_negative_p (temp)
1657 && (TYPE_UNSIGNED (type)
1658 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1659 | TREE_OVERFLOW (arg1));
1660
1661 return t;
1662 }
1663
1664 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1665 to another floating point type. */
1666
1667 static tree
1668 fold_convert_const_real_from_real (tree type, const_tree arg1)
1669 {
1670 REAL_VALUE_TYPE value;
1671 tree t;
1672
1673 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1674 t = build_real (type, value);
1675
1676 /* If converting an infinity or NAN to a representation that doesn't
1677 have one, set the overflow bit so that we can produce some kind of
1678 error message at the appropriate point if necessary. It's not the
1679 most user-friendly message, but it's better than nothing. */
1680 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1681 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1682 TREE_OVERFLOW (t) = 1;
1683 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1684 && !MODE_HAS_NANS (TYPE_MODE (type)))
1685 TREE_OVERFLOW (t) = 1;
1686 /* Regular overflow, conversion produced an infinity in a mode that
1687 can't represent them. */
1688 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1689 && REAL_VALUE_ISINF (value)
1690 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1691 TREE_OVERFLOW (t) = 1;
1692 else
1693 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1694 return t;
1695 }
1696
1697 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1698 to a floating point type. */
1699
1700 static tree
1701 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1702 {
1703 REAL_VALUE_TYPE value;
1704 tree t;
1705
1706 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1707 t = build_real (type, value);
1708
1709 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1710 return t;
1711 }
1712
1713 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1714 to another fixed-point type. */
1715
1716 static tree
1717 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1718 {
1719 FIXED_VALUE_TYPE value;
1720 tree t;
1721 bool overflow_p;
1722
1723 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1724 TYPE_SATURATING (type));
1725 t = build_fixed (type, value);
1726
1727 /* Propagate overflow flags. */
1728 if (overflow_p | TREE_OVERFLOW (arg1))
1729 TREE_OVERFLOW (t) = 1;
1730 return t;
1731 }
1732
1733 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1734 to a fixed-point type. */
1735
1736 static tree
1737 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1738 {
1739 FIXED_VALUE_TYPE value;
1740 tree t;
1741 bool overflow_p;
1742
1743 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
1744 TREE_INT_CST (arg1),
1745 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1746 TYPE_SATURATING (type));
1747 t = build_fixed (type, value);
1748
1749 /* Propagate overflow flags. */
1750 if (overflow_p | TREE_OVERFLOW (arg1))
1751 TREE_OVERFLOW (t) = 1;
1752 return t;
1753 }
1754
1755 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1756 to a fixed-point type. */
1757
1758 static tree
1759 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1760 {
1761 FIXED_VALUE_TYPE value;
1762 tree t;
1763 bool overflow_p;
1764
1765 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1766 &TREE_REAL_CST (arg1),
1767 TYPE_SATURATING (type));
1768 t = build_fixed (type, value);
1769
1770 /* Propagate overflow flags. */
1771 if (overflow_p | TREE_OVERFLOW (arg1))
1772 TREE_OVERFLOW (t) = 1;
1773 return t;
1774 }
1775
1776 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1777 type TYPE. If no simplification can be done return NULL_TREE. */
1778
1779 static tree
1780 fold_convert_const (enum tree_code code, tree type, tree arg1)
1781 {
1782 if (TREE_TYPE (arg1) == type)
1783 return arg1;
1784
1785 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1786 || TREE_CODE (type) == OFFSET_TYPE)
1787 {
1788 if (TREE_CODE (arg1) == INTEGER_CST)
1789 return fold_convert_const_int_from_int (type, arg1);
1790 else if (TREE_CODE (arg1) == REAL_CST)
1791 return fold_convert_const_int_from_real (code, type, arg1);
1792 else if (TREE_CODE (arg1) == FIXED_CST)
1793 return fold_convert_const_int_from_fixed (type, arg1);
1794 }
1795 else if (TREE_CODE (type) == REAL_TYPE)
1796 {
1797 if (TREE_CODE (arg1) == INTEGER_CST)
1798 return build_real_from_int_cst (type, arg1);
1799 else if (TREE_CODE (arg1) == REAL_CST)
1800 return fold_convert_const_real_from_real (type, arg1);
1801 else if (TREE_CODE (arg1) == FIXED_CST)
1802 return fold_convert_const_real_from_fixed (type, arg1);
1803 }
1804 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1805 {
1806 if (TREE_CODE (arg1) == FIXED_CST)
1807 return fold_convert_const_fixed_from_fixed (type, arg1);
1808 else if (TREE_CODE (arg1) == INTEGER_CST)
1809 return fold_convert_const_fixed_from_int (type, arg1);
1810 else if (TREE_CODE (arg1) == REAL_CST)
1811 return fold_convert_const_fixed_from_real (type, arg1);
1812 }
1813 return NULL_TREE;
1814 }
1815
1816 /* Construct a vector of zero elements of vector type TYPE. */
1817
1818 static tree
1819 build_zero_vector (tree type)
1820 {
1821 tree elem, list;
1822 int i, units;
1823
1824 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1825 units = TYPE_VECTOR_SUBPARTS (type);
1826
1827 list = NULL_TREE;
1828 for (i = 0; i < units; i++)
1829 list = tree_cons (NULL_TREE, elem, list);
1830 return build_vector (type, list);
1831 }
1832
1833 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1834
1835 bool
1836 fold_convertible_p (const_tree type, const_tree arg)
1837 {
1838 tree orig = TREE_TYPE (arg);
1839
1840 if (type == orig)
1841 return true;
1842
1843 if (TREE_CODE (arg) == ERROR_MARK
1844 || TREE_CODE (type) == ERROR_MARK
1845 || TREE_CODE (orig) == ERROR_MARK)
1846 return false;
1847
1848 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1849 return true;
1850
1851 switch (TREE_CODE (type))
1852 {
1853 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1854 case POINTER_TYPE: case REFERENCE_TYPE:
1855 case OFFSET_TYPE:
1856 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1857 || TREE_CODE (orig) == OFFSET_TYPE)
1858 return true;
1859 return (TREE_CODE (orig) == VECTOR_TYPE
1860 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1861
1862 case REAL_TYPE:
1863 case FIXED_POINT_TYPE:
1864 case COMPLEX_TYPE:
1865 case VECTOR_TYPE:
1866 case VOID_TYPE:
1867 return TREE_CODE (type) == TREE_CODE (orig);
1868
1869 default:
1870 return false;
1871 }
1872 }
1873
1874 /* Convert expression ARG to type TYPE. Used by the middle-end for
1875 simple conversions in preference to calling the front-end's convert. */
1876
1877 tree
1878 fold_convert_loc (location_t loc, tree type, tree arg)
1879 {
1880 tree orig = TREE_TYPE (arg);
1881 tree tem;
1882
1883 if (type == orig)
1884 return arg;
1885
1886 if (TREE_CODE (arg) == ERROR_MARK
1887 || TREE_CODE (type) == ERROR_MARK
1888 || TREE_CODE (orig) == ERROR_MARK)
1889 return error_mark_node;
1890
1891 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1892 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1893
1894 switch (TREE_CODE (type))
1895 {
1896 case POINTER_TYPE:
1897 case REFERENCE_TYPE:
1898 /* Handle conversions between pointers to different address spaces. */
1899 if (POINTER_TYPE_P (orig)
1900 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1901 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1902 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1903 /* fall through */
1904
1905 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1906 case OFFSET_TYPE:
1907 if (TREE_CODE (arg) == INTEGER_CST)
1908 {
1909 tem = fold_convert_const (NOP_EXPR, type, arg);
1910 if (tem != NULL_TREE)
1911 return tem;
1912 }
1913 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1914 || TREE_CODE (orig) == OFFSET_TYPE)
1915 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1916 if (TREE_CODE (orig) == COMPLEX_TYPE)
1917 return fold_convert_loc (loc, type,
1918 fold_build1_loc (loc, REALPART_EXPR,
1919 TREE_TYPE (orig), arg));
1920 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1921 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1922 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1923
1924 case REAL_TYPE:
1925 if (TREE_CODE (arg) == INTEGER_CST)
1926 {
1927 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1928 if (tem != NULL_TREE)
1929 return tem;
1930 }
1931 else if (TREE_CODE (arg) == REAL_CST)
1932 {
1933 tem = fold_convert_const (NOP_EXPR, type, arg);
1934 if (tem != NULL_TREE)
1935 return tem;
1936 }
1937 else if (TREE_CODE (arg) == FIXED_CST)
1938 {
1939 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1940 if (tem != NULL_TREE)
1941 return tem;
1942 }
1943
1944 switch (TREE_CODE (orig))
1945 {
1946 case INTEGER_TYPE:
1947 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1948 case POINTER_TYPE: case REFERENCE_TYPE:
1949 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
1950
1951 case REAL_TYPE:
1952 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1953
1954 case FIXED_POINT_TYPE:
1955 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1956
1957 case COMPLEX_TYPE:
1958 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1959 return fold_convert_loc (loc, type, tem);
1960
1961 default:
1962 gcc_unreachable ();
1963 }
1964
1965 case FIXED_POINT_TYPE:
1966 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
1967 || TREE_CODE (arg) == REAL_CST)
1968 {
1969 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1970 if (tem != NULL_TREE)
1971 goto fold_convert_exit;
1972 }
1973
1974 switch (TREE_CODE (orig))
1975 {
1976 case FIXED_POINT_TYPE:
1977 case INTEGER_TYPE:
1978 case ENUMERAL_TYPE:
1979 case BOOLEAN_TYPE:
1980 case REAL_TYPE:
1981 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1982
1983 case COMPLEX_TYPE:
1984 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1985 return fold_convert_loc (loc, type, tem);
1986
1987 default:
1988 gcc_unreachable ();
1989 }
1990
1991 case COMPLEX_TYPE:
1992 switch (TREE_CODE (orig))
1993 {
1994 case INTEGER_TYPE:
1995 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1996 case POINTER_TYPE: case REFERENCE_TYPE:
1997 case REAL_TYPE:
1998 case FIXED_POINT_TYPE:
1999 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2000 fold_convert_loc (loc, TREE_TYPE (type), arg),
2001 fold_convert_loc (loc, TREE_TYPE (type),
2002 integer_zero_node));
2003 case COMPLEX_TYPE:
2004 {
2005 tree rpart, ipart;
2006
2007 if (TREE_CODE (arg) == COMPLEX_EXPR)
2008 {
2009 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2010 TREE_OPERAND (arg, 0));
2011 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2012 TREE_OPERAND (arg, 1));
2013 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2014 }
2015
2016 arg = save_expr (arg);
2017 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2018 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2019 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2020 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2021 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2022 }
2023
2024 default:
2025 gcc_unreachable ();
2026 }
2027
2028 case VECTOR_TYPE:
2029 if (integer_zerop (arg))
2030 return build_zero_vector (type);
2031 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2032 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2033 || TREE_CODE (orig) == VECTOR_TYPE);
2034 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2035
2036 case VOID_TYPE:
2037 tem = fold_ignored_result (arg);
2038 if (TREE_CODE (tem) == MODIFY_EXPR)
2039 goto fold_convert_exit;
2040 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2041
2042 default:
2043 gcc_unreachable ();
2044 }
2045 fold_convert_exit:
2046 protected_set_expr_location (tem, loc);
2047 return tem;
2048 }
2049 \f
2050 /* Return false if expr can be assumed not to be an lvalue, true
2051 otherwise. */
2052
2053 static bool
2054 maybe_lvalue_p (const_tree x)
2055 {
2056 /* We only need to wrap lvalue tree codes. */
2057 switch (TREE_CODE (x))
2058 {
2059 case VAR_DECL:
2060 case PARM_DECL:
2061 case RESULT_DECL:
2062 case LABEL_DECL:
2063 case FUNCTION_DECL:
2064 case SSA_NAME:
2065
2066 case COMPONENT_REF:
2067 case INDIRECT_REF:
2068 case ALIGN_INDIRECT_REF:
2069 case MISALIGNED_INDIRECT_REF:
2070 case ARRAY_REF:
2071 case ARRAY_RANGE_REF:
2072 case BIT_FIELD_REF:
2073 case OBJ_TYPE_REF:
2074
2075 case REALPART_EXPR:
2076 case IMAGPART_EXPR:
2077 case PREINCREMENT_EXPR:
2078 case PREDECREMENT_EXPR:
2079 case SAVE_EXPR:
2080 case TRY_CATCH_EXPR:
2081 case WITH_CLEANUP_EXPR:
2082 case COMPOUND_EXPR:
2083 case MODIFY_EXPR:
2084 case TARGET_EXPR:
2085 case COND_EXPR:
2086 case BIND_EXPR:
2087 break;
2088
2089 default:
2090 /* Assume the worst for front-end tree codes. */
2091 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2092 break;
2093 return false;
2094 }
2095
2096 return true;
2097 }
2098
2099 /* Return an expr equal to X but certainly not valid as an lvalue. */
2100
2101 tree
2102 non_lvalue_loc (location_t loc, tree x)
2103 {
2104 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2105 us. */
2106 if (in_gimple_form)
2107 return x;
2108
2109 if (! maybe_lvalue_p (x))
2110 return x;
2111 x = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2112 SET_EXPR_LOCATION (x, loc);
2113 return x;
2114 }
2115
2116 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2117 Zero means allow extended lvalues. */
2118
2119 int pedantic_lvalues;
2120
2121 /* When pedantic, return an expr equal to X but certainly not valid as a
2122 pedantic lvalue. Otherwise, return X. */
2123
2124 static tree
2125 pedantic_non_lvalue_loc (location_t loc, tree x)
2126 {
2127 if (pedantic_lvalues)
2128 return non_lvalue_loc (loc, x);
2129 protected_set_expr_location (x, loc);
2130 return x;
2131 }
2132 \f
2133 /* Given a tree comparison code, return the code that is the logical inverse
2134 of the given code. It is not safe to do this for floating-point
2135 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2136 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2137
2138 enum tree_code
2139 invert_tree_comparison (enum tree_code code, bool honor_nans)
2140 {
2141 if (honor_nans && flag_trapping_math)
2142 return ERROR_MARK;
2143
2144 switch (code)
2145 {
2146 case EQ_EXPR:
2147 return NE_EXPR;
2148 case NE_EXPR:
2149 return EQ_EXPR;
2150 case GT_EXPR:
2151 return honor_nans ? UNLE_EXPR : LE_EXPR;
2152 case GE_EXPR:
2153 return honor_nans ? UNLT_EXPR : LT_EXPR;
2154 case LT_EXPR:
2155 return honor_nans ? UNGE_EXPR : GE_EXPR;
2156 case LE_EXPR:
2157 return honor_nans ? UNGT_EXPR : GT_EXPR;
2158 case LTGT_EXPR:
2159 return UNEQ_EXPR;
2160 case UNEQ_EXPR:
2161 return LTGT_EXPR;
2162 case UNGT_EXPR:
2163 return LE_EXPR;
2164 case UNGE_EXPR:
2165 return LT_EXPR;
2166 case UNLT_EXPR:
2167 return GE_EXPR;
2168 case UNLE_EXPR:
2169 return GT_EXPR;
2170 case ORDERED_EXPR:
2171 return UNORDERED_EXPR;
2172 case UNORDERED_EXPR:
2173 return ORDERED_EXPR;
2174 default:
2175 gcc_unreachable ();
2176 }
2177 }
2178
2179 /* Similar, but return the comparison that results if the operands are
2180 swapped. This is safe for floating-point. */
2181
2182 enum tree_code
2183 swap_tree_comparison (enum tree_code code)
2184 {
2185 switch (code)
2186 {
2187 case EQ_EXPR:
2188 case NE_EXPR:
2189 case ORDERED_EXPR:
2190 case UNORDERED_EXPR:
2191 case LTGT_EXPR:
2192 case UNEQ_EXPR:
2193 return code;
2194 case GT_EXPR:
2195 return LT_EXPR;
2196 case GE_EXPR:
2197 return LE_EXPR;
2198 case LT_EXPR:
2199 return GT_EXPR;
2200 case LE_EXPR:
2201 return GE_EXPR;
2202 case UNGT_EXPR:
2203 return UNLT_EXPR;
2204 case UNGE_EXPR:
2205 return UNLE_EXPR;
2206 case UNLT_EXPR:
2207 return UNGT_EXPR;
2208 case UNLE_EXPR:
2209 return UNGE_EXPR;
2210 default:
2211 gcc_unreachable ();
2212 }
2213 }
2214
2215
2216 /* Convert a comparison tree code from an enum tree_code representation
2217 into a compcode bit-based encoding. This function is the inverse of
2218 compcode_to_comparison. */
2219
2220 static enum comparison_code
2221 comparison_to_compcode (enum tree_code code)
2222 {
2223 switch (code)
2224 {
2225 case LT_EXPR:
2226 return COMPCODE_LT;
2227 case EQ_EXPR:
2228 return COMPCODE_EQ;
2229 case LE_EXPR:
2230 return COMPCODE_LE;
2231 case GT_EXPR:
2232 return COMPCODE_GT;
2233 case NE_EXPR:
2234 return COMPCODE_NE;
2235 case GE_EXPR:
2236 return COMPCODE_GE;
2237 case ORDERED_EXPR:
2238 return COMPCODE_ORD;
2239 case UNORDERED_EXPR:
2240 return COMPCODE_UNORD;
2241 case UNLT_EXPR:
2242 return COMPCODE_UNLT;
2243 case UNEQ_EXPR:
2244 return COMPCODE_UNEQ;
2245 case UNLE_EXPR:
2246 return COMPCODE_UNLE;
2247 case UNGT_EXPR:
2248 return COMPCODE_UNGT;
2249 case LTGT_EXPR:
2250 return COMPCODE_LTGT;
2251 case UNGE_EXPR:
2252 return COMPCODE_UNGE;
2253 default:
2254 gcc_unreachable ();
2255 }
2256 }
2257
2258 /* Convert a compcode bit-based encoding of a comparison operator back
2259 to GCC's enum tree_code representation. This function is the
2260 inverse of comparison_to_compcode. */
2261
2262 static enum tree_code
2263 compcode_to_comparison (enum comparison_code code)
2264 {
2265 switch (code)
2266 {
2267 case COMPCODE_LT:
2268 return LT_EXPR;
2269 case COMPCODE_EQ:
2270 return EQ_EXPR;
2271 case COMPCODE_LE:
2272 return LE_EXPR;
2273 case COMPCODE_GT:
2274 return GT_EXPR;
2275 case COMPCODE_NE:
2276 return NE_EXPR;
2277 case COMPCODE_GE:
2278 return GE_EXPR;
2279 case COMPCODE_ORD:
2280 return ORDERED_EXPR;
2281 case COMPCODE_UNORD:
2282 return UNORDERED_EXPR;
2283 case COMPCODE_UNLT:
2284 return UNLT_EXPR;
2285 case COMPCODE_UNEQ:
2286 return UNEQ_EXPR;
2287 case COMPCODE_UNLE:
2288 return UNLE_EXPR;
2289 case COMPCODE_UNGT:
2290 return UNGT_EXPR;
2291 case COMPCODE_LTGT:
2292 return LTGT_EXPR;
2293 case COMPCODE_UNGE:
2294 return UNGE_EXPR;
2295 default:
2296 gcc_unreachable ();
2297 }
2298 }
2299
2300 /* Return a tree for the comparison which is the combination of
2301 doing the AND or OR (depending on CODE) of the two operations LCODE
2302 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2303 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2304 if this makes the transformation invalid. */
2305
2306 tree
2307 combine_comparisons (location_t loc,
2308 enum tree_code code, enum tree_code lcode,
2309 enum tree_code rcode, tree truth_type,
2310 tree ll_arg, tree lr_arg)
2311 {
2312 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2313 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2314 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2315 int compcode;
2316
2317 switch (code)
2318 {
2319 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2320 compcode = lcompcode & rcompcode;
2321 break;
2322
2323 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2324 compcode = lcompcode | rcompcode;
2325 break;
2326
2327 default:
2328 return NULL_TREE;
2329 }
2330
2331 if (!honor_nans)
2332 {
2333 /* Eliminate unordered comparisons, as well as LTGT and ORD
2334 which are not used unless the mode has NaNs. */
2335 compcode &= ~COMPCODE_UNORD;
2336 if (compcode == COMPCODE_LTGT)
2337 compcode = COMPCODE_NE;
2338 else if (compcode == COMPCODE_ORD)
2339 compcode = COMPCODE_TRUE;
2340 }
2341 else if (flag_trapping_math)
2342 {
2343 /* Check that the original operation and the optimized ones will trap
2344 under the same condition. */
2345 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2346 && (lcompcode != COMPCODE_EQ)
2347 && (lcompcode != COMPCODE_ORD);
2348 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2349 && (rcompcode != COMPCODE_EQ)
2350 && (rcompcode != COMPCODE_ORD);
2351 bool trap = (compcode & COMPCODE_UNORD) == 0
2352 && (compcode != COMPCODE_EQ)
2353 && (compcode != COMPCODE_ORD);
2354
2355 /* In a short-circuited boolean expression the LHS might be
2356 such that the RHS, if evaluated, will never trap. For
2357 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2358 if neither x nor y is NaN. (This is a mixed blessing: for
2359 example, the expression above will never trap, hence
2360 optimizing it to x < y would be invalid). */
2361 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2362 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2363 rtrap = false;
2364
2365 /* If the comparison was short-circuited, and only the RHS
2366 trapped, we may now generate a spurious trap. */
2367 if (rtrap && !ltrap
2368 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2369 return NULL_TREE;
2370
2371 /* If we changed the conditions that cause a trap, we lose. */
2372 if ((ltrap || rtrap) != trap)
2373 return NULL_TREE;
2374 }
2375
2376 if (compcode == COMPCODE_TRUE)
2377 return constant_boolean_node (true, truth_type);
2378 else if (compcode == COMPCODE_FALSE)
2379 return constant_boolean_node (false, truth_type);
2380 else
2381 {
2382 enum tree_code tcode;
2383
2384 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2385 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2386 }
2387 }
2388 \f
2389 /* Return nonzero if two operands (typically of the same tree node)
2390 are necessarily equal. If either argument has side-effects this
2391 function returns zero. FLAGS modifies behavior as follows:
2392
2393 If OEP_ONLY_CONST is set, only return nonzero for constants.
2394 This function tests whether the operands are indistinguishable;
2395 it does not test whether they are equal using C's == operation.
2396 The distinction is important for IEEE floating point, because
2397 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2398 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2399
2400 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2401 even though it may hold multiple values during a function.
2402 This is because a GCC tree node guarantees that nothing else is
2403 executed between the evaluation of its "operands" (which may often
2404 be evaluated in arbitrary order). Hence if the operands themselves
2405 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2406 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2407 unset means assuming isochronic (or instantaneous) tree equivalence.
2408 Unless comparing arbitrary expression trees, such as from different
2409 statements, this flag can usually be left unset.
2410
2411 If OEP_PURE_SAME is set, then pure functions with identical arguments
2412 are considered the same. It is used when the caller has other ways
2413 to ensure that global memory is unchanged in between. */
2414
2415 int
2416 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2417 {
2418 /* If either is ERROR_MARK, they aren't equal. */
2419 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2420 || TREE_TYPE (arg0) == error_mark_node
2421 || TREE_TYPE (arg1) == error_mark_node)
2422 return 0;
2423
2424 /* Similar, if either does not have a type (like a released SSA name),
2425 they aren't equal. */
2426 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2427 return 0;
2428
2429 /* Check equality of integer constants before bailing out due to
2430 precision differences. */
2431 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2432 return tree_int_cst_equal (arg0, arg1);
2433
2434 /* If both types don't have the same signedness, then we can't consider
2435 them equal. We must check this before the STRIP_NOPS calls
2436 because they may change the signedness of the arguments. As pointers
2437 strictly don't have a signedness, require either two pointers or
2438 two non-pointers as well. */
2439 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2440 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2441 return 0;
2442
2443 /* We cannot consider pointers to different address space equal. */
2444 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2445 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2446 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2447 return 0;
2448
2449 /* If both types don't have the same precision, then it is not safe
2450 to strip NOPs. */
2451 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2452 return 0;
2453
2454 STRIP_NOPS (arg0);
2455 STRIP_NOPS (arg1);
2456
2457 /* In case both args are comparisons but with different comparison
2458 code, try to swap the comparison operands of one arg to produce
2459 a match and compare that variant. */
2460 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2461 && COMPARISON_CLASS_P (arg0)
2462 && COMPARISON_CLASS_P (arg1))
2463 {
2464 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2465
2466 if (TREE_CODE (arg0) == swap_code)
2467 return operand_equal_p (TREE_OPERAND (arg0, 0),
2468 TREE_OPERAND (arg1, 1), flags)
2469 && operand_equal_p (TREE_OPERAND (arg0, 1),
2470 TREE_OPERAND (arg1, 0), flags);
2471 }
2472
2473 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2474 /* This is needed for conversions and for COMPONENT_REF.
2475 Might as well play it safe and always test this. */
2476 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2477 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2478 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2479 return 0;
2480
2481 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2482 We don't care about side effects in that case because the SAVE_EXPR
2483 takes care of that for us. In all other cases, two expressions are
2484 equal if they have no side effects. If we have two identical
2485 expressions with side effects that should be treated the same due
2486 to the only side effects being identical SAVE_EXPR's, that will
2487 be detected in the recursive calls below. */
2488 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2489 && (TREE_CODE (arg0) == SAVE_EXPR
2490 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2491 return 1;
2492
2493 /* Next handle constant cases, those for which we can return 1 even
2494 if ONLY_CONST is set. */
2495 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2496 switch (TREE_CODE (arg0))
2497 {
2498 case INTEGER_CST:
2499 return tree_int_cst_equal (arg0, arg1);
2500
2501 case FIXED_CST:
2502 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2503 TREE_FIXED_CST (arg1));
2504
2505 case REAL_CST:
2506 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2507 TREE_REAL_CST (arg1)))
2508 return 1;
2509
2510
2511 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2512 {
2513 /* If we do not distinguish between signed and unsigned zero,
2514 consider them equal. */
2515 if (real_zerop (arg0) && real_zerop (arg1))
2516 return 1;
2517 }
2518 return 0;
2519
2520 case VECTOR_CST:
2521 {
2522 tree v1, v2;
2523
2524 v1 = TREE_VECTOR_CST_ELTS (arg0);
2525 v2 = TREE_VECTOR_CST_ELTS (arg1);
2526 while (v1 && v2)
2527 {
2528 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2529 flags))
2530 return 0;
2531 v1 = TREE_CHAIN (v1);
2532 v2 = TREE_CHAIN (v2);
2533 }
2534
2535 return v1 == v2;
2536 }
2537
2538 case COMPLEX_CST:
2539 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2540 flags)
2541 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2542 flags));
2543
2544 case STRING_CST:
2545 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2546 && ! memcmp (TREE_STRING_POINTER (arg0),
2547 TREE_STRING_POINTER (arg1),
2548 TREE_STRING_LENGTH (arg0)));
2549
2550 case ADDR_EXPR:
2551 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2552 0);
2553 default:
2554 break;
2555 }
2556
2557 if (flags & OEP_ONLY_CONST)
2558 return 0;
2559
2560 /* Define macros to test an operand from arg0 and arg1 for equality and a
2561 variant that allows null and views null as being different from any
2562 non-null value. In the latter case, if either is null, the both
2563 must be; otherwise, do the normal comparison. */
2564 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2565 TREE_OPERAND (arg1, N), flags)
2566
2567 #define OP_SAME_WITH_NULL(N) \
2568 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2569 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2570
2571 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2572 {
2573 case tcc_unary:
2574 /* Two conversions are equal only if signedness and modes match. */
2575 switch (TREE_CODE (arg0))
2576 {
2577 CASE_CONVERT:
2578 case FIX_TRUNC_EXPR:
2579 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2580 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2581 return 0;
2582 break;
2583 default:
2584 break;
2585 }
2586
2587 return OP_SAME (0);
2588
2589
2590 case tcc_comparison:
2591 case tcc_binary:
2592 if (OP_SAME (0) && OP_SAME (1))
2593 return 1;
2594
2595 /* For commutative ops, allow the other order. */
2596 return (commutative_tree_code (TREE_CODE (arg0))
2597 && operand_equal_p (TREE_OPERAND (arg0, 0),
2598 TREE_OPERAND (arg1, 1), flags)
2599 && operand_equal_p (TREE_OPERAND (arg0, 1),
2600 TREE_OPERAND (arg1, 0), flags));
2601
2602 case tcc_reference:
2603 /* If either of the pointer (or reference) expressions we are
2604 dereferencing contain a side effect, these cannot be equal. */
2605 if (TREE_SIDE_EFFECTS (arg0)
2606 || TREE_SIDE_EFFECTS (arg1))
2607 return 0;
2608
2609 switch (TREE_CODE (arg0))
2610 {
2611 case INDIRECT_REF:
2612 case ALIGN_INDIRECT_REF:
2613 case MISALIGNED_INDIRECT_REF:
2614 case REALPART_EXPR:
2615 case IMAGPART_EXPR:
2616 return OP_SAME (0);
2617
2618 case ARRAY_REF:
2619 case ARRAY_RANGE_REF:
2620 /* Operands 2 and 3 may be null.
2621 Compare the array index by value if it is constant first as we
2622 may have different types but same value here. */
2623 return (OP_SAME (0)
2624 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2625 TREE_OPERAND (arg1, 1))
2626 || OP_SAME (1))
2627 && OP_SAME_WITH_NULL (2)
2628 && OP_SAME_WITH_NULL (3));
2629
2630 case COMPONENT_REF:
2631 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2632 may be NULL when we're called to compare MEM_EXPRs. */
2633 return OP_SAME_WITH_NULL (0)
2634 && OP_SAME (1)
2635 && OP_SAME_WITH_NULL (2);
2636
2637 case BIT_FIELD_REF:
2638 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2639
2640 default:
2641 return 0;
2642 }
2643
2644 case tcc_expression:
2645 switch (TREE_CODE (arg0))
2646 {
2647 case ADDR_EXPR:
2648 case TRUTH_NOT_EXPR:
2649 return OP_SAME (0);
2650
2651 case TRUTH_ANDIF_EXPR:
2652 case TRUTH_ORIF_EXPR:
2653 return OP_SAME (0) && OP_SAME (1);
2654
2655 case TRUTH_AND_EXPR:
2656 case TRUTH_OR_EXPR:
2657 case TRUTH_XOR_EXPR:
2658 if (OP_SAME (0) && OP_SAME (1))
2659 return 1;
2660
2661 /* Otherwise take into account this is a commutative operation. */
2662 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2663 TREE_OPERAND (arg1, 1), flags)
2664 && operand_equal_p (TREE_OPERAND (arg0, 1),
2665 TREE_OPERAND (arg1, 0), flags));
2666
2667 case COND_EXPR:
2668 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2669
2670 default:
2671 return 0;
2672 }
2673
2674 case tcc_vl_exp:
2675 switch (TREE_CODE (arg0))
2676 {
2677 case CALL_EXPR:
2678 /* If the CALL_EXPRs call different functions, then they
2679 clearly can not be equal. */
2680 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2681 flags))
2682 return 0;
2683
2684 {
2685 unsigned int cef = call_expr_flags (arg0);
2686 if (flags & OEP_PURE_SAME)
2687 cef &= ECF_CONST | ECF_PURE;
2688 else
2689 cef &= ECF_CONST;
2690 if (!cef)
2691 return 0;
2692 }
2693
2694 /* Now see if all the arguments are the same. */
2695 {
2696 const_call_expr_arg_iterator iter0, iter1;
2697 const_tree a0, a1;
2698 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2699 a1 = first_const_call_expr_arg (arg1, &iter1);
2700 a0 && a1;
2701 a0 = next_const_call_expr_arg (&iter0),
2702 a1 = next_const_call_expr_arg (&iter1))
2703 if (! operand_equal_p (a0, a1, flags))
2704 return 0;
2705
2706 /* If we get here and both argument lists are exhausted
2707 then the CALL_EXPRs are equal. */
2708 return ! (a0 || a1);
2709 }
2710 default:
2711 return 0;
2712 }
2713
2714 case tcc_declaration:
2715 /* Consider __builtin_sqrt equal to sqrt. */
2716 return (TREE_CODE (arg0) == FUNCTION_DECL
2717 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2718 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2719 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2720
2721 default:
2722 return 0;
2723 }
2724
2725 #undef OP_SAME
2726 #undef OP_SAME_WITH_NULL
2727 }
2728 \f
2729 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2730 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2731
2732 When in doubt, return 0. */
2733
2734 static int
2735 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2736 {
2737 int unsignedp1, unsignedpo;
2738 tree primarg0, primarg1, primother;
2739 unsigned int correct_width;
2740
2741 if (operand_equal_p (arg0, arg1, 0))
2742 return 1;
2743
2744 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2745 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2746 return 0;
2747
2748 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2749 and see if the inner values are the same. This removes any
2750 signedness comparison, which doesn't matter here. */
2751 primarg0 = arg0, primarg1 = arg1;
2752 STRIP_NOPS (primarg0);
2753 STRIP_NOPS (primarg1);
2754 if (operand_equal_p (primarg0, primarg1, 0))
2755 return 1;
2756
2757 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2758 actual comparison operand, ARG0.
2759
2760 First throw away any conversions to wider types
2761 already present in the operands. */
2762
2763 primarg1 = get_narrower (arg1, &unsignedp1);
2764 primother = get_narrower (other, &unsignedpo);
2765
2766 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2767 if (unsignedp1 == unsignedpo
2768 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2769 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2770 {
2771 tree type = TREE_TYPE (arg0);
2772
2773 /* Make sure shorter operand is extended the right way
2774 to match the longer operand. */
2775 primarg1 = fold_convert (signed_or_unsigned_type_for
2776 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2777
2778 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2779 return 1;
2780 }
2781
2782 return 0;
2783 }
2784 \f
2785 /* See if ARG is an expression that is either a comparison or is performing
2786 arithmetic on comparisons. The comparisons must only be comparing
2787 two different values, which will be stored in *CVAL1 and *CVAL2; if
2788 they are nonzero it means that some operands have already been found.
2789 No variables may be used anywhere else in the expression except in the
2790 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2791 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2792
2793 If this is true, return 1. Otherwise, return zero. */
2794
2795 static int
2796 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2797 {
2798 enum tree_code code = TREE_CODE (arg);
2799 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2800
2801 /* We can handle some of the tcc_expression cases here. */
2802 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2803 tclass = tcc_unary;
2804 else if (tclass == tcc_expression
2805 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2806 || code == COMPOUND_EXPR))
2807 tclass = tcc_binary;
2808
2809 else if (tclass == tcc_expression && code == SAVE_EXPR
2810 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2811 {
2812 /* If we've already found a CVAL1 or CVAL2, this expression is
2813 two complex to handle. */
2814 if (*cval1 || *cval2)
2815 return 0;
2816
2817 tclass = tcc_unary;
2818 *save_p = 1;
2819 }
2820
2821 switch (tclass)
2822 {
2823 case tcc_unary:
2824 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2825
2826 case tcc_binary:
2827 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2828 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2829 cval1, cval2, save_p));
2830
2831 case tcc_constant:
2832 return 1;
2833
2834 case tcc_expression:
2835 if (code == COND_EXPR)
2836 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2837 cval1, cval2, save_p)
2838 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2839 cval1, cval2, save_p)
2840 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2841 cval1, cval2, save_p));
2842 return 0;
2843
2844 case tcc_comparison:
2845 /* First see if we can handle the first operand, then the second. For
2846 the second operand, we know *CVAL1 can't be zero. It must be that
2847 one side of the comparison is each of the values; test for the
2848 case where this isn't true by failing if the two operands
2849 are the same. */
2850
2851 if (operand_equal_p (TREE_OPERAND (arg, 0),
2852 TREE_OPERAND (arg, 1), 0))
2853 return 0;
2854
2855 if (*cval1 == 0)
2856 *cval1 = TREE_OPERAND (arg, 0);
2857 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2858 ;
2859 else if (*cval2 == 0)
2860 *cval2 = TREE_OPERAND (arg, 0);
2861 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2862 ;
2863 else
2864 return 0;
2865
2866 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2867 ;
2868 else if (*cval2 == 0)
2869 *cval2 = TREE_OPERAND (arg, 1);
2870 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2871 ;
2872 else
2873 return 0;
2874
2875 return 1;
2876
2877 default:
2878 return 0;
2879 }
2880 }
2881 \f
2882 /* ARG is a tree that is known to contain just arithmetic operations and
2883 comparisons. Evaluate the operations in the tree substituting NEW0 for
2884 any occurrence of OLD0 as an operand of a comparison and likewise for
2885 NEW1 and OLD1. */
2886
2887 static tree
2888 eval_subst (location_t loc, tree arg, tree old0, tree new0,
2889 tree old1, tree new1)
2890 {
2891 tree type = TREE_TYPE (arg);
2892 enum tree_code code = TREE_CODE (arg);
2893 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2894
2895 /* We can handle some of the tcc_expression cases here. */
2896 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2897 tclass = tcc_unary;
2898 else if (tclass == tcc_expression
2899 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2900 tclass = tcc_binary;
2901
2902 switch (tclass)
2903 {
2904 case tcc_unary:
2905 return fold_build1_loc (loc, code, type,
2906 eval_subst (loc, TREE_OPERAND (arg, 0),
2907 old0, new0, old1, new1));
2908
2909 case tcc_binary:
2910 return fold_build2_loc (loc, code, type,
2911 eval_subst (loc, TREE_OPERAND (arg, 0),
2912 old0, new0, old1, new1),
2913 eval_subst (loc, TREE_OPERAND (arg, 1),
2914 old0, new0, old1, new1));
2915
2916 case tcc_expression:
2917 switch (code)
2918 {
2919 case SAVE_EXPR:
2920 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
2921 old1, new1);
2922
2923 case COMPOUND_EXPR:
2924 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
2925 old1, new1);
2926
2927 case COND_EXPR:
2928 return fold_build3_loc (loc, code, type,
2929 eval_subst (loc, TREE_OPERAND (arg, 0),
2930 old0, new0, old1, new1),
2931 eval_subst (loc, TREE_OPERAND (arg, 1),
2932 old0, new0, old1, new1),
2933 eval_subst (loc, TREE_OPERAND (arg, 2),
2934 old0, new0, old1, new1));
2935 default:
2936 break;
2937 }
2938 /* Fall through - ??? */
2939
2940 case tcc_comparison:
2941 {
2942 tree arg0 = TREE_OPERAND (arg, 0);
2943 tree arg1 = TREE_OPERAND (arg, 1);
2944
2945 /* We need to check both for exact equality and tree equality. The
2946 former will be true if the operand has a side-effect. In that
2947 case, we know the operand occurred exactly once. */
2948
2949 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2950 arg0 = new0;
2951 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2952 arg0 = new1;
2953
2954 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2955 arg1 = new0;
2956 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2957 arg1 = new1;
2958
2959 return fold_build2_loc (loc, code, type, arg0, arg1);
2960 }
2961
2962 default:
2963 return arg;
2964 }
2965 }
2966 \f
2967 /* Return a tree for the case when the result of an expression is RESULT
2968 converted to TYPE and OMITTED was previously an operand of the expression
2969 but is now not needed (e.g., we folded OMITTED * 0).
2970
2971 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2972 the conversion of RESULT to TYPE. */
2973
2974 tree
2975 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
2976 {
2977 tree t = fold_convert_loc (loc, type, result);
2978
2979 /* If the resulting operand is an empty statement, just return the omitted
2980 statement casted to void. */
2981 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
2982 {
2983 t = build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
2984 goto omit_one_operand_exit;
2985 }
2986
2987 if (TREE_SIDE_EFFECTS (omitted))
2988 {
2989 t = build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2990 goto omit_one_operand_exit;
2991 }
2992
2993 return non_lvalue_loc (loc, t);
2994
2995 omit_one_operand_exit:
2996 protected_set_expr_location (t, loc);
2997 return t;
2998 }
2999
3000 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3001
3002 static tree
3003 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
3004 tree omitted)
3005 {
3006 tree t = fold_convert_loc (loc, type, result);
3007
3008 /* If the resulting operand is an empty statement, just return the omitted
3009 statement casted to void. */
3010 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3011 {
3012 t = build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3013 goto pedantic_omit_one_operand_exit;
3014 }
3015
3016 if (TREE_SIDE_EFFECTS (omitted))
3017 {
3018 t = build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3019 goto pedantic_omit_one_operand_exit;
3020 }
3021
3022 return pedantic_non_lvalue_loc (loc, t);
3023
3024 pedantic_omit_one_operand_exit:
3025 protected_set_expr_location (t, loc);
3026 return t;
3027 }
3028
3029 /* Return a tree for the case when the result of an expression is RESULT
3030 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3031 of the expression but are now not needed.
3032
3033 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3034 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3035 evaluated before OMITTED2. Otherwise, if neither has side effects,
3036 just do the conversion of RESULT to TYPE. */
3037
3038 tree
3039 omit_two_operands_loc (location_t loc, tree type, tree result,
3040 tree omitted1, tree omitted2)
3041 {
3042 tree t = fold_convert_loc (loc, type, result);
3043
3044 if (TREE_SIDE_EFFECTS (omitted2))
3045 {
3046 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3047 SET_EXPR_LOCATION (t, loc);
3048 }
3049 if (TREE_SIDE_EFFECTS (omitted1))
3050 {
3051 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3052 SET_EXPR_LOCATION (t, loc);
3053 }
3054
3055 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3056 }
3057
3058 \f
3059 /* Return a simplified tree node for the truth-negation of ARG. This
3060 never alters ARG itself. We assume that ARG is an operation that
3061 returns a truth value (0 or 1).
3062
3063 FIXME: one would think we would fold the result, but it causes
3064 problems with the dominator optimizer. */
3065
3066 tree
3067 fold_truth_not_expr (location_t loc, tree arg)
3068 {
3069 tree t, type = TREE_TYPE (arg);
3070 enum tree_code code = TREE_CODE (arg);
3071 location_t loc1, loc2;
3072
3073 /* If this is a comparison, we can simply invert it, except for
3074 floating-point non-equality comparisons, in which case we just
3075 enclose a TRUTH_NOT_EXPR around what we have. */
3076
3077 if (TREE_CODE_CLASS (code) == tcc_comparison)
3078 {
3079 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3080 if (FLOAT_TYPE_P (op_type)
3081 && flag_trapping_math
3082 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3083 && code != NE_EXPR && code != EQ_EXPR)
3084 return NULL_TREE;
3085
3086 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3087 if (code == ERROR_MARK)
3088 return NULL_TREE;
3089
3090 t = build2 (code, type, TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3091 SET_EXPR_LOCATION (t, loc);
3092 return t;
3093 }
3094
3095 switch (code)
3096 {
3097 case INTEGER_CST:
3098 return constant_boolean_node (integer_zerop (arg), type);
3099
3100 case TRUTH_AND_EXPR:
3101 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3102 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3103 if (loc1 == UNKNOWN_LOCATION)
3104 loc1 = loc;
3105 if (loc2 == UNKNOWN_LOCATION)
3106 loc2 = loc;
3107 t = build2 (TRUTH_OR_EXPR, type,
3108 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3109 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3110 break;
3111
3112 case TRUTH_OR_EXPR:
3113 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3114 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3115 if (loc1 == UNKNOWN_LOCATION)
3116 loc1 = loc;
3117 if (loc2 == UNKNOWN_LOCATION)
3118 loc2 = loc;
3119 t = build2 (TRUTH_AND_EXPR, type,
3120 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3121 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3122 break;
3123
3124 case TRUTH_XOR_EXPR:
3125 /* Here we can invert either operand. We invert the first operand
3126 unless the second operand is a TRUTH_NOT_EXPR in which case our
3127 result is the XOR of the first operand with the inside of the
3128 negation of the second operand. */
3129
3130 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3131 t = build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3132 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3133 else
3134 t = build2 (TRUTH_XOR_EXPR, type,
3135 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3136 TREE_OPERAND (arg, 1));
3137 break;
3138
3139 case TRUTH_ANDIF_EXPR:
3140 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3141 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3142 if (loc1 == UNKNOWN_LOCATION)
3143 loc1 = loc;
3144 if (loc2 == UNKNOWN_LOCATION)
3145 loc2 = loc;
3146 t = build2 (TRUTH_ORIF_EXPR, type,
3147 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3148 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3149 break;
3150
3151 case TRUTH_ORIF_EXPR:
3152 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3153 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3154 if (loc1 == UNKNOWN_LOCATION)
3155 loc1 = loc;
3156 if (loc2 == UNKNOWN_LOCATION)
3157 loc2 = loc;
3158 t = build2 (TRUTH_ANDIF_EXPR, type,
3159 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3160 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3161 break;
3162
3163 case TRUTH_NOT_EXPR:
3164 return TREE_OPERAND (arg, 0);
3165
3166 case COND_EXPR:
3167 {
3168 tree arg1 = TREE_OPERAND (arg, 1);
3169 tree arg2 = TREE_OPERAND (arg, 2);
3170
3171 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3172 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 2));
3173 if (loc1 == UNKNOWN_LOCATION)
3174 loc1 = loc;
3175 if (loc2 == UNKNOWN_LOCATION)
3176 loc2 = loc;
3177
3178 /* A COND_EXPR may have a throw as one operand, which
3179 then has void type. Just leave void operands
3180 as they are. */
3181 t = build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3182 VOID_TYPE_P (TREE_TYPE (arg1))
3183 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3184 VOID_TYPE_P (TREE_TYPE (arg2))
3185 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3186 break;
3187 }
3188
3189 case COMPOUND_EXPR:
3190 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3191 if (loc1 == UNKNOWN_LOCATION)
3192 loc1 = loc;
3193 t = build2 (COMPOUND_EXPR, type,
3194 TREE_OPERAND (arg, 0),
3195 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3196 break;
3197
3198 case NON_LVALUE_EXPR:
3199 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3200 if (loc1 == UNKNOWN_LOCATION)
3201 loc1 = loc;
3202 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3203
3204 CASE_CONVERT:
3205 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3206 {
3207 t = build1 (TRUTH_NOT_EXPR, type, arg);
3208 break;
3209 }
3210
3211 /* ... fall through ... */
3212
3213 case FLOAT_EXPR:
3214 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3215 if (loc1 == UNKNOWN_LOCATION)
3216 loc1 = loc;
3217 t = build1 (TREE_CODE (arg), type,
3218 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3219 break;
3220
3221 case BIT_AND_EXPR:
3222 if (!integer_onep (TREE_OPERAND (arg, 1)))
3223 return NULL_TREE;
3224 t = build2 (EQ_EXPR, type, arg, build_int_cst (type, 0));
3225 break;
3226
3227 case SAVE_EXPR:
3228 t = build1 (TRUTH_NOT_EXPR, type, arg);
3229 break;
3230
3231 case CLEANUP_POINT_EXPR:
3232 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3233 if (loc1 == UNKNOWN_LOCATION)
3234 loc1 = loc;
3235 t = build1 (CLEANUP_POINT_EXPR, type,
3236 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3237 break;
3238
3239 default:
3240 t = NULL_TREE;
3241 break;
3242 }
3243
3244 if (t)
3245 SET_EXPR_LOCATION (t, loc);
3246
3247 return t;
3248 }
3249
3250 /* Return a simplified tree node for the truth-negation of ARG. This
3251 never alters ARG itself. We assume that ARG is an operation that
3252 returns a truth value (0 or 1).
3253
3254 FIXME: one would think we would fold the result, but it causes
3255 problems with the dominator optimizer. */
3256
3257 tree
3258 invert_truthvalue_loc (location_t loc, tree arg)
3259 {
3260 tree tem;
3261
3262 if (TREE_CODE (arg) == ERROR_MARK)
3263 return arg;
3264
3265 tem = fold_truth_not_expr (loc, arg);
3266 if (!tem)
3267 {
3268 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3269 SET_EXPR_LOCATION (tem, loc);
3270 }
3271
3272 return tem;
3273 }
3274
3275 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3276 operands are another bit-wise operation with a common input. If so,
3277 distribute the bit operations to save an operation and possibly two if
3278 constants are involved. For example, convert
3279 (A | B) & (A | C) into A | (B & C)
3280 Further simplification will occur if B and C are constants.
3281
3282 If this optimization cannot be done, 0 will be returned. */
3283
3284 static tree
3285 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3286 tree arg0, tree arg1)
3287 {
3288 tree common;
3289 tree left, right;
3290
3291 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3292 || TREE_CODE (arg0) == code
3293 || (TREE_CODE (arg0) != BIT_AND_EXPR
3294 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3295 return 0;
3296
3297 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3298 {
3299 common = TREE_OPERAND (arg0, 0);
3300 left = TREE_OPERAND (arg0, 1);
3301 right = TREE_OPERAND (arg1, 1);
3302 }
3303 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3304 {
3305 common = TREE_OPERAND (arg0, 0);
3306 left = TREE_OPERAND (arg0, 1);
3307 right = TREE_OPERAND (arg1, 0);
3308 }
3309 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3310 {
3311 common = TREE_OPERAND (arg0, 1);
3312 left = TREE_OPERAND (arg0, 0);
3313 right = TREE_OPERAND (arg1, 1);
3314 }
3315 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3316 {
3317 common = TREE_OPERAND (arg0, 1);
3318 left = TREE_OPERAND (arg0, 0);
3319 right = TREE_OPERAND (arg1, 0);
3320 }
3321 else
3322 return 0;
3323
3324 common = fold_convert_loc (loc, type, common);
3325 left = fold_convert_loc (loc, type, left);
3326 right = fold_convert_loc (loc, type, right);
3327 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3328 fold_build2_loc (loc, code, type, left, right));
3329 }
3330
3331 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3332 with code CODE. This optimization is unsafe. */
3333 static tree
3334 distribute_real_division (location_t loc, enum tree_code code, tree type,
3335 tree arg0, tree arg1)
3336 {
3337 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3338 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3339
3340 /* (A / C) +- (B / C) -> (A +- B) / C. */
3341 if (mul0 == mul1
3342 && operand_equal_p (TREE_OPERAND (arg0, 1),
3343 TREE_OPERAND (arg1, 1), 0))
3344 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3345 fold_build2_loc (loc, code, type,
3346 TREE_OPERAND (arg0, 0),
3347 TREE_OPERAND (arg1, 0)),
3348 TREE_OPERAND (arg0, 1));
3349
3350 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3351 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3352 TREE_OPERAND (arg1, 0), 0)
3353 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3354 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3355 {
3356 REAL_VALUE_TYPE r0, r1;
3357 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3358 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3359 if (!mul0)
3360 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3361 if (!mul1)
3362 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3363 real_arithmetic (&r0, code, &r0, &r1);
3364 return fold_build2_loc (loc, MULT_EXPR, type,
3365 TREE_OPERAND (arg0, 0),
3366 build_real (type, r0));
3367 }
3368
3369 return NULL_TREE;
3370 }
3371 \f
3372 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3373 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3374
3375 static tree
3376 make_bit_field_ref (location_t loc, tree inner, tree type,
3377 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3378 {
3379 tree result, bftype;
3380
3381 if (bitpos == 0)
3382 {
3383 tree size = TYPE_SIZE (TREE_TYPE (inner));
3384 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3385 || POINTER_TYPE_P (TREE_TYPE (inner)))
3386 && host_integerp (size, 0)
3387 && tree_low_cst (size, 0) == bitsize)
3388 return fold_convert_loc (loc, type, inner);
3389 }
3390
3391 bftype = type;
3392 if (TYPE_PRECISION (bftype) != bitsize
3393 || TYPE_UNSIGNED (bftype) == !unsignedp)
3394 bftype = build_nonstandard_integer_type (bitsize, 0);
3395
3396 result = build3 (BIT_FIELD_REF, bftype, inner,
3397 size_int (bitsize), bitsize_int (bitpos));
3398 SET_EXPR_LOCATION (result, loc);
3399
3400 if (bftype != type)
3401 result = fold_convert_loc (loc, type, result);
3402
3403 return result;
3404 }
3405
3406 /* Optimize a bit-field compare.
3407
3408 There are two cases: First is a compare against a constant and the
3409 second is a comparison of two items where the fields are at the same
3410 bit position relative to the start of a chunk (byte, halfword, word)
3411 large enough to contain it. In these cases we can avoid the shift
3412 implicit in bitfield extractions.
3413
3414 For constants, we emit a compare of the shifted constant with the
3415 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3416 compared. For two fields at the same position, we do the ANDs with the
3417 similar mask and compare the result of the ANDs.
3418
3419 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3420 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3421 are the left and right operands of the comparison, respectively.
3422
3423 If the optimization described above can be done, we return the resulting
3424 tree. Otherwise we return zero. */
3425
3426 static tree
3427 optimize_bit_field_compare (location_t loc, enum tree_code code,
3428 tree compare_type, tree lhs, tree rhs)
3429 {
3430 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3431 tree type = TREE_TYPE (lhs);
3432 tree signed_type, unsigned_type;
3433 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3434 enum machine_mode lmode, rmode, nmode;
3435 int lunsignedp, runsignedp;
3436 int lvolatilep = 0, rvolatilep = 0;
3437 tree linner, rinner = NULL_TREE;
3438 tree mask;
3439 tree offset;
3440
3441 /* Get all the information about the extractions being done. If the bit size
3442 if the same as the size of the underlying object, we aren't doing an
3443 extraction at all and so can do nothing. We also don't want to
3444 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3445 then will no longer be able to replace it. */
3446 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3447 &lunsignedp, &lvolatilep, false);
3448 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3449 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3450 return 0;
3451
3452 if (!const_p)
3453 {
3454 /* If this is not a constant, we can only do something if bit positions,
3455 sizes, and signedness are the same. */
3456 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3457 &runsignedp, &rvolatilep, false);
3458
3459 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3460 || lunsignedp != runsignedp || offset != 0
3461 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3462 return 0;
3463 }
3464
3465 /* See if we can find a mode to refer to this field. We should be able to,
3466 but fail if we can't. */
3467 nmode = get_best_mode (lbitsize, lbitpos,
3468 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3469 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3470 TYPE_ALIGN (TREE_TYPE (rinner))),
3471 word_mode, lvolatilep || rvolatilep);
3472 if (nmode == VOIDmode)
3473 return 0;
3474
3475 /* Set signed and unsigned types of the precision of this mode for the
3476 shifts below. */
3477 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3478 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3479
3480 /* Compute the bit position and size for the new reference and our offset
3481 within it. If the new reference is the same size as the original, we
3482 won't optimize anything, so return zero. */
3483 nbitsize = GET_MODE_BITSIZE (nmode);
3484 nbitpos = lbitpos & ~ (nbitsize - 1);
3485 lbitpos -= nbitpos;
3486 if (nbitsize == lbitsize)
3487 return 0;
3488
3489 if (BYTES_BIG_ENDIAN)
3490 lbitpos = nbitsize - lbitsize - lbitpos;
3491
3492 /* Make the mask to be used against the extracted field. */
3493 mask = build_int_cst_type (unsigned_type, -1);
3494 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3495 mask = const_binop (RSHIFT_EXPR, mask,
3496 size_int (nbitsize - lbitsize - lbitpos), 0);
3497
3498 if (! const_p)
3499 /* If not comparing with constant, just rework the comparison
3500 and return. */
3501 return fold_build2_loc (loc, code, compare_type,
3502 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3503 make_bit_field_ref (loc, linner,
3504 unsigned_type,
3505 nbitsize, nbitpos,
3506 1),
3507 mask),
3508 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3509 make_bit_field_ref (loc, rinner,
3510 unsigned_type,
3511 nbitsize, nbitpos,
3512 1),
3513 mask));
3514
3515 /* Otherwise, we are handling the constant case. See if the constant is too
3516 big for the field. Warn and return a tree of for 0 (false) if so. We do
3517 this not only for its own sake, but to avoid having to test for this
3518 error case below. If we didn't, we might generate wrong code.
3519
3520 For unsigned fields, the constant shifted right by the field length should
3521 be all zero. For signed fields, the high-order bits should agree with
3522 the sign bit. */
3523
3524 if (lunsignedp)
3525 {
3526 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3527 fold_convert_loc (loc,
3528 unsigned_type, rhs),
3529 size_int (lbitsize), 0)))
3530 {
3531 warning (0, "comparison is always %d due to width of bit-field",
3532 code == NE_EXPR);
3533 return constant_boolean_node (code == NE_EXPR, compare_type);
3534 }
3535 }
3536 else
3537 {
3538 tree tem = const_binop (RSHIFT_EXPR,
3539 fold_convert_loc (loc, signed_type, rhs),
3540 size_int (lbitsize - 1), 0);
3541 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3542 {
3543 warning (0, "comparison is always %d due to width of bit-field",
3544 code == NE_EXPR);
3545 return constant_boolean_node (code == NE_EXPR, compare_type);
3546 }
3547 }
3548
3549 /* Single-bit compares should always be against zero. */
3550 if (lbitsize == 1 && ! integer_zerop (rhs))
3551 {
3552 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3553 rhs = build_int_cst (type, 0);
3554 }
3555
3556 /* Make a new bitfield reference, shift the constant over the
3557 appropriate number of bits and mask it with the computed mask
3558 (in case this was a signed field). If we changed it, make a new one. */
3559 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3560 if (lvolatilep)
3561 {
3562 TREE_SIDE_EFFECTS (lhs) = 1;
3563 TREE_THIS_VOLATILE (lhs) = 1;
3564 }
3565
3566 rhs = const_binop (BIT_AND_EXPR,
3567 const_binop (LSHIFT_EXPR,
3568 fold_convert_loc (loc, unsigned_type, rhs),
3569 size_int (lbitpos), 0),
3570 mask, 0);
3571
3572 lhs = build2 (code, compare_type,
3573 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3574 rhs);
3575 SET_EXPR_LOCATION (lhs, loc);
3576 return lhs;
3577 }
3578 \f
3579 /* Subroutine for fold_truthop: decode a field reference.
3580
3581 If EXP is a comparison reference, we return the innermost reference.
3582
3583 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3584 set to the starting bit number.
3585
3586 If the innermost field can be completely contained in a mode-sized
3587 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3588
3589 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3590 otherwise it is not changed.
3591
3592 *PUNSIGNEDP is set to the signedness of the field.
3593
3594 *PMASK is set to the mask used. This is either contained in a
3595 BIT_AND_EXPR or derived from the width of the field.
3596
3597 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3598
3599 Return 0 if this is not a component reference or is one that we can't
3600 do anything with. */
3601
3602 static tree
3603 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3604 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3605 int *punsignedp, int *pvolatilep,
3606 tree *pmask, tree *pand_mask)
3607 {
3608 tree outer_type = 0;
3609 tree and_mask = 0;
3610 tree mask, inner, offset;
3611 tree unsigned_type;
3612 unsigned int precision;
3613
3614 /* All the optimizations using this function assume integer fields.
3615 There are problems with FP fields since the type_for_size call
3616 below can fail for, e.g., XFmode. */
3617 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3618 return 0;
3619
3620 /* We are interested in the bare arrangement of bits, so strip everything
3621 that doesn't affect the machine mode. However, record the type of the
3622 outermost expression if it may matter below. */
3623 if (CONVERT_EXPR_P (exp)
3624 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3625 outer_type = TREE_TYPE (exp);
3626 STRIP_NOPS (exp);
3627
3628 if (TREE_CODE (exp) == BIT_AND_EXPR)
3629 {
3630 and_mask = TREE_OPERAND (exp, 1);
3631 exp = TREE_OPERAND (exp, 0);
3632 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3633 if (TREE_CODE (and_mask) != INTEGER_CST)
3634 return 0;
3635 }
3636
3637 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3638 punsignedp, pvolatilep, false);
3639 if ((inner == exp && and_mask == 0)
3640 || *pbitsize < 0 || offset != 0
3641 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3642 return 0;
3643
3644 /* If the number of bits in the reference is the same as the bitsize of
3645 the outer type, then the outer type gives the signedness. Otherwise
3646 (in case of a small bitfield) the signedness is unchanged. */
3647 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3648 *punsignedp = TYPE_UNSIGNED (outer_type);
3649
3650 /* Compute the mask to access the bitfield. */
3651 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3652 precision = TYPE_PRECISION (unsigned_type);
3653
3654 mask = build_int_cst_type (unsigned_type, -1);
3655
3656 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3657 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3658
3659 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3660 if (and_mask != 0)
3661 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3662 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3663
3664 *pmask = mask;
3665 *pand_mask = and_mask;
3666 return inner;
3667 }
3668
3669 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3670 bit positions. */
3671
3672 static int
3673 all_ones_mask_p (const_tree mask, int size)
3674 {
3675 tree type = TREE_TYPE (mask);
3676 unsigned int precision = TYPE_PRECISION (type);
3677 tree tmask;
3678
3679 tmask = build_int_cst_type (signed_type_for (type), -1);
3680
3681 return
3682 tree_int_cst_equal (mask,
3683 const_binop (RSHIFT_EXPR,
3684 const_binop (LSHIFT_EXPR, tmask,
3685 size_int (precision - size),
3686 0),
3687 size_int (precision - size), 0));
3688 }
3689
3690 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3691 represents the sign bit of EXP's type. If EXP represents a sign
3692 or zero extension, also test VAL against the unextended type.
3693 The return value is the (sub)expression whose sign bit is VAL,
3694 or NULL_TREE otherwise. */
3695
3696 static tree
3697 sign_bit_p (tree exp, const_tree val)
3698 {
3699 unsigned HOST_WIDE_INT mask_lo, lo;
3700 HOST_WIDE_INT mask_hi, hi;
3701 int width;
3702 tree t;
3703
3704 /* Tree EXP must have an integral type. */
3705 t = TREE_TYPE (exp);
3706 if (! INTEGRAL_TYPE_P (t))
3707 return NULL_TREE;
3708
3709 /* Tree VAL must be an integer constant. */
3710 if (TREE_CODE (val) != INTEGER_CST
3711 || TREE_OVERFLOW (val))
3712 return NULL_TREE;
3713
3714 width = TYPE_PRECISION (t);
3715 if (width > HOST_BITS_PER_WIDE_INT)
3716 {
3717 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3718 lo = 0;
3719
3720 mask_hi = ((unsigned HOST_WIDE_INT) -1
3721 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3722 mask_lo = -1;
3723 }
3724 else
3725 {
3726 hi = 0;
3727 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3728
3729 mask_hi = 0;
3730 mask_lo = ((unsigned HOST_WIDE_INT) -1
3731 >> (HOST_BITS_PER_WIDE_INT - width));
3732 }
3733
3734 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3735 treat VAL as if it were unsigned. */
3736 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3737 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3738 return exp;
3739
3740 /* Handle extension from a narrower type. */
3741 if (TREE_CODE (exp) == NOP_EXPR
3742 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3743 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3744
3745 return NULL_TREE;
3746 }
3747
3748 /* Subroutine for fold_truthop: determine if an operand is simple enough
3749 to be evaluated unconditionally. */
3750
3751 static int
3752 simple_operand_p (const_tree exp)
3753 {
3754 /* Strip any conversions that don't change the machine mode. */
3755 STRIP_NOPS (exp);
3756
3757 return (CONSTANT_CLASS_P (exp)
3758 || TREE_CODE (exp) == SSA_NAME
3759 || (DECL_P (exp)
3760 && ! TREE_ADDRESSABLE (exp)
3761 && ! TREE_THIS_VOLATILE (exp)
3762 && ! DECL_NONLOCAL (exp)
3763 /* Don't regard global variables as simple. They may be
3764 allocated in ways unknown to the compiler (shared memory,
3765 #pragma weak, etc). */
3766 && ! TREE_PUBLIC (exp)
3767 && ! DECL_EXTERNAL (exp)
3768 /* Loading a static variable is unduly expensive, but global
3769 registers aren't expensive. */
3770 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3771 }
3772 \f
3773 /* The following functions are subroutines to fold_range_test and allow it to
3774 try to change a logical combination of comparisons into a range test.
3775
3776 For example, both
3777 X == 2 || X == 3 || X == 4 || X == 5
3778 and
3779 X >= 2 && X <= 5
3780 are converted to
3781 (unsigned) (X - 2) <= 3
3782
3783 We describe each set of comparisons as being either inside or outside
3784 a range, using a variable named like IN_P, and then describe the
3785 range with a lower and upper bound. If one of the bounds is omitted,
3786 it represents either the highest or lowest value of the type.
3787
3788 In the comments below, we represent a range by two numbers in brackets
3789 preceded by a "+" to designate being inside that range, or a "-" to
3790 designate being outside that range, so the condition can be inverted by
3791 flipping the prefix. An omitted bound is represented by a "-". For
3792 example, "- [-, 10]" means being outside the range starting at the lowest
3793 possible value and ending at 10, in other words, being greater than 10.
3794 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3795 always false.
3796
3797 We set up things so that the missing bounds are handled in a consistent
3798 manner so neither a missing bound nor "true" and "false" need to be
3799 handled using a special case. */
3800
3801 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3802 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3803 and UPPER1_P are nonzero if the respective argument is an upper bound
3804 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3805 must be specified for a comparison. ARG1 will be converted to ARG0's
3806 type if both are specified. */
3807
3808 static tree
3809 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3810 tree arg1, int upper1_p)
3811 {
3812 tree tem;
3813 int result;
3814 int sgn0, sgn1;
3815
3816 /* If neither arg represents infinity, do the normal operation.
3817 Else, if not a comparison, return infinity. Else handle the special
3818 comparison rules. Note that most of the cases below won't occur, but
3819 are handled for consistency. */
3820
3821 if (arg0 != 0 && arg1 != 0)
3822 {
3823 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3824 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3825 STRIP_NOPS (tem);
3826 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3827 }
3828
3829 if (TREE_CODE_CLASS (code) != tcc_comparison)
3830 return 0;
3831
3832 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3833 for neither. In real maths, we cannot assume open ended ranges are
3834 the same. But, this is computer arithmetic, where numbers are finite.
3835 We can therefore make the transformation of any unbounded range with
3836 the value Z, Z being greater than any representable number. This permits
3837 us to treat unbounded ranges as equal. */
3838 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3839 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3840 switch (code)
3841 {
3842 case EQ_EXPR:
3843 result = sgn0 == sgn1;
3844 break;
3845 case NE_EXPR:
3846 result = sgn0 != sgn1;
3847 break;
3848 case LT_EXPR:
3849 result = sgn0 < sgn1;
3850 break;
3851 case LE_EXPR:
3852 result = sgn0 <= sgn1;
3853 break;
3854 case GT_EXPR:
3855 result = sgn0 > sgn1;
3856 break;
3857 case GE_EXPR:
3858 result = sgn0 >= sgn1;
3859 break;
3860 default:
3861 gcc_unreachable ();
3862 }
3863
3864 return constant_boolean_node (result, type);
3865 }
3866 \f
3867 /* Given EXP, a logical expression, set the range it is testing into
3868 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3869 actually being tested. *PLOW and *PHIGH will be made of the same
3870 type as the returned expression. If EXP is not a comparison, we
3871 will most likely not be returning a useful value and range. Set
3872 *STRICT_OVERFLOW_P to true if the return value is only valid
3873 because signed overflow is undefined; otherwise, do not change
3874 *STRICT_OVERFLOW_P. */
3875
3876 tree
3877 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
3878 bool *strict_overflow_p)
3879 {
3880 enum tree_code code;
3881 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3882 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3883 int in_p, n_in_p;
3884 tree low, high, n_low, n_high;
3885 location_t loc = EXPR_LOCATION (exp);
3886
3887 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3888 and see if we can refine the range. Some of the cases below may not
3889 happen, but it doesn't seem worth worrying about this. We "continue"
3890 the outer loop when we've changed something; otherwise we "break"
3891 the switch, which will "break" the while. */
3892
3893 in_p = 0;
3894 low = high = build_int_cst (TREE_TYPE (exp), 0);
3895
3896 while (1)
3897 {
3898 code = TREE_CODE (exp);
3899 exp_type = TREE_TYPE (exp);
3900
3901 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3902 {
3903 if (TREE_OPERAND_LENGTH (exp) > 0)
3904 arg0 = TREE_OPERAND (exp, 0);
3905 if (TREE_CODE_CLASS (code) == tcc_comparison
3906 || TREE_CODE_CLASS (code) == tcc_unary
3907 || TREE_CODE_CLASS (code) == tcc_binary)
3908 arg0_type = TREE_TYPE (arg0);
3909 if (TREE_CODE_CLASS (code) == tcc_binary
3910 || TREE_CODE_CLASS (code) == tcc_comparison
3911 || (TREE_CODE_CLASS (code) == tcc_expression
3912 && TREE_OPERAND_LENGTH (exp) > 1))
3913 arg1 = TREE_OPERAND (exp, 1);
3914 }
3915
3916 switch (code)
3917 {
3918 case TRUTH_NOT_EXPR:
3919 in_p = ! in_p, exp = arg0;
3920 continue;
3921
3922 case EQ_EXPR: case NE_EXPR:
3923 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3924 /* We can only do something if the range is testing for zero
3925 and if the second operand is an integer constant. Note that
3926 saying something is "in" the range we make is done by
3927 complementing IN_P since it will set in the initial case of
3928 being not equal to zero; "out" is leaving it alone. */
3929 if (low == 0 || high == 0
3930 || ! integer_zerop (low) || ! integer_zerop (high)
3931 || TREE_CODE (arg1) != INTEGER_CST)
3932 break;
3933
3934 switch (code)
3935 {
3936 case NE_EXPR: /* - [c, c] */
3937 low = high = arg1;
3938 break;
3939 case EQ_EXPR: /* + [c, c] */
3940 in_p = ! in_p, low = high = arg1;
3941 break;
3942 case GT_EXPR: /* - [-, c] */
3943 low = 0, high = arg1;
3944 break;
3945 case GE_EXPR: /* + [c, -] */
3946 in_p = ! in_p, low = arg1, high = 0;
3947 break;
3948 case LT_EXPR: /* - [c, -] */
3949 low = arg1, high = 0;
3950 break;
3951 case LE_EXPR: /* + [-, c] */
3952 in_p = ! in_p, low = 0, high = arg1;
3953 break;
3954 default:
3955 gcc_unreachable ();
3956 }
3957
3958 /* If this is an unsigned comparison, we also know that EXP is
3959 greater than or equal to zero. We base the range tests we make
3960 on that fact, so we record it here so we can parse existing
3961 range tests. We test arg0_type since often the return type
3962 of, e.g. EQ_EXPR, is boolean. */
3963 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3964 {
3965 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3966 in_p, low, high, 1,
3967 build_int_cst (arg0_type, 0),
3968 NULL_TREE))
3969 break;
3970
3971 in_p = n_in_p, low = n_low, high = n_high;
3972
3973 /* If the high bound is missing, but we have a nonzero low
3974 bound, reverse the range so it goes from zero to the low bound
3975 minus 1. */
3976 if (high == 0 && low && ! integer_zerop (low))
3977 {
3978 in_p = ! in_p;
3979 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3980 integer_one_node, 0);
3981 low = build_int_cst (arg0_type, 0);
3982 }
3983 }
3984
3985 exp = arg0;
3986 continue;
3987
3988 case NEGATE_EXPR:
3989 /* (-x) IN [a,b] -> x in [-b, -a] */
3990 n_low = range_binop (MINUS_EXPR, exp_type,
3991 build_int_cst (exp_type, 0),
3992 0, high, 1);
3993 n_high = range_binop (MINUS_EXPR, exp_type,
3994 build_int_cst (exp_type, 0),
3995 0, low, 0);
3996 low = n_low, high = n_high;
3997 exp = arg0;
3998 continue;
3999
4000 case BIT_NOT_EXPR:
4001 /* ~ X -> -X - 1 */
4002 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
4003 build_int_cst (exp_type, 1));
4004 SET_EXPR_LOCATION (exp, loc);
4005 continue;
4006
4007 case PLUS_EXPR: case MINUS_EXPR:
4008 if (TREE_CODE (arg1) != INTEGER_CST)
4009 break;
4010
4011 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4012 move a constant to the other side. */
4013 if (!TYPE_UNSIGNED (arg0_type)
4014 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4015 break;
4016
4017 /* If EXP is signed, any overflow in the computation is undefined,
4018 so we don't worry about it so long as our computations on
4019 the bounds don't overflow. For unsigned, overflow is defined
4020 and this is exactly the right thing. */
4021 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4022 arg0_type, low, 0, arg1, 0);
4023 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4024 arg0_type, high, 1, arg1, 0);
4025 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4026 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4027 break;
4028
4029 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4030 *strict_overflow_p = true;
4031
4032 /* Check for an unsigned range which has wrapped around the maximum
4033 value thus making n_high < n_low, and normalize it. */
4034 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4035 {
4036 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4037 integer_one_node, 0);
4038 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4039 integer_one_node, 0);
4040
4041 /* If the range is of the form +/- [ x+1, x ], we won't
4042 be able to normalize it. But then, it represents the
4043 whole range or the empty set, so make it
4044 +/- [ -, - ]. */
4045 if (tree_int_cst_equal (n_low, low)
4046 && tree_int_cst_equal (n_high, high))
4047 low = high = 0;
4048 else
4049 in_p = ! in_p;
4050 }
4051 else
4052 low = n_low, high = n_high;
4053
4054 exp = arg0;
4055 continue;
4056
4057 CASE_CONVERT: case NON_LVALUE_EXPR:
4058 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4059 break;
4060
4061 if (! INTEGRAL_TYPE_P (arg0_type)
4062 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4063 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4064 break;
4065
4066 n_low = low, n_high = high;
4067
4068 if (n_low != 0)
4069 n_low = fold_convert_loc (loc, arg0_type, n_low);
4070
4071 if (n_high != 0)
4072 n_high = fold_convert_loc (loc, arg0_type, n_high);
4073
4074
4075 /* If we're converting arg0 from an unsigned type, to exp,
4076 a signed type, we will be doing the comparison as unsigned.
4077 The tests above have already verified that LOW and HIGH
4078 are both positive.
4079
4080 So we have to ensure that we will handle large unsigned
4081 values the same way that the current signed bounds treat
4082 negative values. */
4083
4084 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4085 {
4086 tree high_positive;
4087 tree equiv_type;
4088 /* For fixed-point modes, we need to pass the saturating flag
4089 as the 2nd parameter. */
4090 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4091 equiv_type = lang_hooks.types.type_for_mode
4092 (TYPE_MODE (arg0_type),
4093 TYPE_SATURATING (arg0_type));
4094 else
4095 equiv_type = lang_hooks.types.type_for_mode
4096 (TYPE_MODE (arg0_type), 1);
4097
4098 /* A range without an upper bound is, naturally, unbounded.
4099 Since convert would have cropped a very large value, use
4100 the max value for the destination type. */
4101 high_positive
4102 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4103 : TYPE_MAX_VALUE (arg0_type);
4104
4105 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4106 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4107 fold_convert_loc (loc, arg0_type,
4108 high_positive),
4109 build_int_cst (arg0_type, 1));
4110
4111 /* If the low bound is specified, "and" the range with the
4112 range for which the original unsigned value will be
4113 positive. */
4114 if (low != 0)
4115 {
4116 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4117 1, n_low, n_high, 1,
4118 fold_convert_loc (loc, arg0_type,
4119 integer_zero_node),
4120 high_positive))
4121 break;
4122
4123 in_p = (n_in_p == in_p);
4124 }
4125 else
4126 {
4127 /* Otherwise, "or" the range with the range of the input
4128 that will be interpreted as negative. */
4129 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4130 0, n_low, n_high, 1,
4131 fold_convert_loc (loc, arg0_type,
4132 integer_zero_node),
4133 high_positive))
4134 break;
4135
4136 in_p = (in_p != n_in_p);
4137 }
4138 }
4139
4140 exp = arg0;
4141 low = n_low, high = n_high;
4142 continue;
4143
4144 default:
4145 break;
4146 }
4147
4148 break;
4149 }
4150
4151 /* If EXP is a constant, we can evaluate whether this is true or false. */
4152 if (TREE_CODE (exp) == INTEGER_CST)
4153 {
4154 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4155 exp, 0, low, 0))
4156 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4157 exp, 1, high, 1)));
4158 low = high = 0;
4159 exp = 0;
4160 }
4161
4162 *pin_p = in_p, *plow = low, *phigh = high;
4163 return exp;
4164 }
4165 \f
4166 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4167 type, TYPE, return an expression to test if EXP is in (or out of, depending
4168 on IN_P) the range. Return 0 if the test couldn't be created. */
4169
4170 tree
4171 build_range_check (location_t loc, tree type, tree exp, int in_p,
4172 tree low, tree high)
4173 {
4174 tree etype = TREE_TYPE (exp), value;
4175
4176 #ifdef HAVE_canonicalize_funcptr_for_compare
4177 /* Disable this optimization for function pointer expressions
4178 on targets that require function pointer canonicalization. */
4179 if (HAVE_canonicalize_funcptr_for_compare
4180 && TREE_CODE (etype) == POINTER_TYPE
4181 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4182 return NULL_TREE;
4183 #endif
4184
4185 if (! in_p)
4186 {
4187 value = build_range_check (loc, type, exp, 1, low, high);
4188 if (value != 0)
4189 return invert_truthvalue_loc (loc, value);
4190
4191 return 0;
4192 }
4193
4194 if (low == 0 && high == 0)
4195 return build_int_cst (type, 1);
4196
4197 if (low == 0)
4198 return fold_build2_loc (loc, LE_EXPR, type, exp,
4199 fold_convert_loc (loc, etype, high));
4200
4201 if (high == 0)
4202 return fold_build2_loc (loc, GE_EXPR, type, exp,
4203 fold_convert_loc (loc, etype, low));
4204
4205 if (operand_equal_p (low, high, 0))
4206 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4207 fold_convert_loc (loc, etype, low));
4208
4209 if (integer_zerop (low))
4210 {
4211 if (! TYPE_UNSIGNED (etype))
4212 {
4213 etype = unsigned_type_for (etype);
4214 high = fold_convert_loc (loc, etype, high);
4215 exp = fold_convert_loc (loc, etype, exp);
4216 }
4217 return build_range_check (loc, type, exp, 1, 0, high);
4218 }
4219
4220 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4221 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4222 {
4223 unsigned HOST_WIDE_INT lo;
4224 HOST_WIDE_INT hi;
4225 int prec;
4226
4227 prec = TYPE_PRECISION (etype);
4228 if (prec <= HOST_BITS_PER_WIDE_INT)
4229 {
4230 hi = 0;
4231 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4232 }
4233 else
4234 {
4235 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4236 lo = (unsigned HOST_WIDE_INT) -1;
4237 }
4238
4239 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4240 {
4241 if (TYPE_UNSIGNED (etype))
4242 {
4243 tree signed_etype = signed_type_for (etype);
4244 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4245 etype
4246 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4247 else
4248 etype = signed_etype;
4249 exp = fold_convert_loc (loc, etype, exp);
4250 }
4251 return fold_build2_loc (loc, GT_EXPR, type, exp,
4252 build_int_cst (etype, 0));
4253 }
4254 }
4255
4256 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4257 This requires wrap-around arithmetics for the type of the expression.
4258 First make sure that arithmetics in this type is valid, then make sure
4259 that it wraps around. */
4260 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4261 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4262 TYPE_UNSIGNED (etype));
4263
4264 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4265 {
4266 tree utype, minv, maxv;
4267
4268 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4269 for the type in question, as we rely on this here. */
4270 utype = unsigned_type_for (etype);
4271 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4272 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4273 integer_one_node, 1);
4274 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4275
4276 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4277 minv, 1, maxv, 1)))
4278 etype = utype;
4279 else
4280 return 0;
4281 }
4282
4283 high = fold_convert_loc (loc, etype, high);
4284 low = fold_convert_loc (loc, etype, low);
4285 exp = fold_convert_loc (loc, etype, exp);
4286
4287 value = const_binop (MINUS_EXPR, high, low, 0);
4288
4289
4290 if (POINTER_TYPE_P (etype))
4291 {
4292 if (value != 0 && !TREE_OVERFLOW (value))
4293 {
4294 low = fold_convert_loc (loc, sizetype, low);
4295 low = fold_build1_loc (loc, NEGATE_EXPR, sizetype, low);
4296 return build_range_check (loc, type,
4297 fold_build2_loc (loc, POINTER_PLUS_EXPR,
4298 etype, exp, low),
4299 1, build_int_cst (etype, 0), value);
4300 }
4301 return 0;
4302 }
4303
4304 if (value != 0 && !TREE_OVERFLOW (value))
4305 return build_range_check (loc, type,
4306 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4307 1, build_int_cst (etype, 0), value);
4308
4309 return 0;
4310 }
4311 \f
4312 /* Return the predecessor of VAL in its type, handling the infinite case. */
4313
4314 static tree
4315 range_predecessor (tree val)
4316 {
4317 tree type = TREE_TYPE (val);
4318
4319 if (INTEGRAL_TYPE_P (type)
4320 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4321 return 0;
4322 else
4323 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4324 }
4325
4326 /* Return the successor of VAL in its type, handling the infinite case. */
4327
4328 static tree
4329 range_successor (tree val)
4330 {
4331 tree type = TREE_TYPE (val);
4332
4333 if (INTEGRAL_TYPE_P (type)
4334 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4335 return 0;
4336 else
4337 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4338 }
4339
4340 /* Given two ranges, see if we can merge them into one. Return 1 if we
4341 can, 0 if we can't. Set the output range into the specified parameters. */
4342
4343 bool
4344 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4345 tree high0, int in1_p, tree low1, tree high1)
4346 {
4347 int no_overlap;
4348 int subset;
4349 int temp;
4350 tree tem;
4351 int in_p;
4352 tree low, high;
4353 int lowequal = ((low0 == 0 && low1 == 0)
4354 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4355 low0, 0, low1, 0)));
4356 int highequal = ((high0 == 0 && high1 == 0)
4357 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4358 high0, 1, high1, 1)));
4359
4360 /* Make range 0 be the range that starts first, or ends last if they
4361 start at the same value. Swap them if it isn't. */
4362 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4363 low0, 0, low1, 0))
4364 || (lowequal
4365 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4366 high1, 1, high0, 1))))
4367 {
4368 temp = in0_p, in0_p = in1_p, in1_p = temp;
4369 tem = low0, low0 = low1, low1 = tem;
4370 tem = high0, high0 = high1, high1 = tem;
4371 }
4372
4373 /* Now flag two cases, whether the ranges are disjoint or whether the
4374 second range is totally subsumed in the first. Note that the tests
4375 below are simplified by the ones above. */
4376 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4377 high0, 1, low1, 0));
4378 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4379 high1, 1, high0, 1));
4380
4381 /* We now have four cases, depending on whether we are including or
4382 excluding the two ranges. */
4383 if (in0_p && in1_p)
4384 {
4385 /* If they don't overlap, the result is false. If the second range
4386 is a subset it is the result. Otherwise, the range is from the start
4387 of the second to the end of the first. */
4388 if (no_overlap)
4389 in_p = 0, low = high = 0;
4390 else if (subset)
4391 in_p = 1, low = low1, high = high1;
4392 else
4393 in_p = 1, low = low1, high = high0;
4394 }
4395
4396 else if (in0_p && ! in1_p)
4397 {
4398 /* If they don't overlap, the result is the first range. If they are
4399 equal, the result is false. If the second range is a subset of the
4400 first, and the ranges begin at the same place, we go from just after
4401 the end of the second range to the end of the first. If the second
4402 range is not a subset of the first, or if it is a subset and both
4403 ranges end at the same place, the range starts at the start of the
4404 first range and ends just before the second range.
4405 Otherwise, we can't describe this as a single range. */
4406 if (no_overlap)
4407 in_p = 1, low = low0, high = high0;
4408 else if (lowequal && highequal)
4409 in_p = 0, low = high = 0;
4410 else if (subset && lowequal)
4411 {
4412 low = range_successor (high1);
4413 high = high0;
4414 in_p = 1;
4415 if (low == 0)
4416 {
4417 /* We are in the weird situation where high0 > high1 but
4418 high1 has no successor. Punt. */
4419 return 0;
4420 }
4421 }
4422 else if (! subset || highequal)
4423 {
4424 low = low0;
4425 high = range_predecessor (low1);
4426 in_p = 1;
4427 if (high == 0)
4428 {
4429 /* low0 < low1 but low1 has no predecessor. Punt. */
4430 return 0;
4431 }
4432 }
4433 else
4434 return 0;
4435 }
4436
4437 else if (! in0_p && in1_p)
4438 {
4439 /* If they don't overlap, the result is the second range. If the second
4440 is a subset of the first, the result is false. Otherwise,
4441 the range starts just after the first range and ends at the
4442 end of the second. */
4443 if (no_overlap)
4444 in_p = 1, low = low1, high = high1;
4445 else if (subset || highequal)
4446 in_p = 0, low = high = 0;
4447 else
4448 {
4449 low = range_successor (high0);
4450 high = high1;
4451 in_p = 1;
4452 if (low == 0)
4453 {
4454 /* high1 > high0 but high0 has no successor. Punt. */
4455 return 0;
4456 }
4457 }
4458 }
4459
4460 else
4461 {
4462 /* The case where we are excluding both ranges. Here the complex case
4463 is if they don't overlap. In that case, the only time we have a
4464 range is if they are adjacent. If the second is a subset of the
4465 first, the result is the first. Otherwise, the range to exclude
4466 starts at the beginning of the first range and ends at the end of the
4467 second. */
4468 if (no_overlap)
4469 {
4470 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4471 range_successor (high0),
4472 1, low1, 0)))
4473 in_p = 0, low = low0, high = high1;
4474 else
4475 {
4476 /* Canonicalize - [min, x] into - [-, x]. */
4477 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4478 switch (TREE_CODE (TREE_TYPE (low0)))
4479 {
4480 case ENUMERAL_TYPE:
4481 if (TYPE_PRECISION (TREE_TYPE (low0))
4482 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4483 break;
4484 /* FALLTHROUGH */
4485 case INTEGER_TYPE:
4486 if (tree_int_cst_equal (low0,
4487 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4488 low0 = 0;
4489 break;
4490 case POINTER_TYPE:
4491 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4492 && integer_zerop (low0))
4493 low0 = 0;
4494 break;
4495 default:
4496 break;
4497 }
4498
4499 /* Canonicalize - [x, max] into - [x, -]. */
4500 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4501 switch (TREE_CODE (TREE_TYPE (high1)))
4502 {
4503 case ENUMERAL_TYPE:
4504 if (TYPE_PRECISION (TREE_TYPE (high1))
4505 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4506 break;
4507 /* FALLTHROUGH */
4508 case INTEGER_TYPE:
4509 if (tree_int_cst_equal (high1,
4510 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4511 high1 = 0;
4512 break;
4513 case POINTER_TYPE:
4514 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4515 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4516 high1, 1,
4517 integer_one_node, 1)))
4518 high1 = 0;
4519 break;
4520 default:
4521 break;
4522 }
4523
4524 /* The ranges might be also adjacent between the maximum and
4525 minimum values of the given type. For
4526 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4527 return + [x + 1, y - 1]. */
4528 if (low0 == 0 && high1 == 0)
4529 {
4530 low = range_successor (high0);
4531 high = range_predecessor (low1);
4532 if (low == 0 || high == 0)
4533 return 0;
4534
4535 in_p = 1;
4536 }
4537 else
4538 return 0;
4539 }
4540 }
4541 else if (subset)
4542 in_p = 0, low = low0, high = high0;
4543 else
4544 in_p = 0, low = low0, high = high1;
4545 }
4546
4547 *pin_p = in_p, *plow = low, *phigh = high;
4548 return 1;
4549 }
4550 \f
4551
4552 /* Subroutine of fold, looking inside expressions of the form
4553 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4554 of the COND_EXPR. This function is being used also to optimize
4555 A op B ? C : A, by reversing the comparison first.
4556
4557 Return a folded expression whose code is not a COND_EXPR
4558 anymore, or NULL_TREE if no folding opportunity is found. */
4559
4560 static tree
4561 fold_cond_expr_with_comparison (location_t loc, tree type,
4562 tree arg0, tree arg1, tree arg2)
4563 {
4564 enum tree_code comp_code = TREE_CODE (arg0);
4565 tree arg00 = TREE_OPERAND (arg0, 0);
4566 tree arg01 = TREE_OPERAND (arg0, 1);
4567 tree arg1_type = TREE_TYPE (arg1);
4568 tree tem;
4569
4570 STRIP_NOPS (arg1);
4571 STRIP_NOPS (arg2);
4572
4573 /* If we have A op 0 ? A : -A, consider applying the following
4574 transformations:
4575
4576 A == 0? A : -A same as -A
4577 A != 0? A : -A same as A
4578 A >= 0? A : -A same as abs (A)
4579 A > 0? A : -A same as abs (A)
4580 A <= 0? A : -A same as -abs (A)
4581 A < 0? A : -A same as -abs (A)
4582
4583 None of these transformations work for modes with signed
4584 zeros. If A is +/-0, the first two transformations will
4585 change the sign of the result (from +0 to -0, or vice
4586 versa). The last four will fix the sign of the result,
4587 even though the original expressions could be positive or
4588 negative, depending on the sign of A.
4589
4590 Note that all these transformations are correct if A is
4591 NaN, since the two alternatives (A and -A) are also NaNs. */
4592 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4593 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4594 ? real_zerop (arg01)
4595 : integer_zerop (arg01))
4596 && ((TREE_CODE (arg2) == NEGATE_EXPR
4597 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4598 /* In the case that A is of the form X-Y, '-A' (arg2) may
4599 have already been folded to Y-X, check for that. */
4600 || (TREE_CODE (arg1) == MINUS_EXPR
4601 && TREE_CODE (arg2) == MINUS_EXPR
4602 && operand_equal_p (TREE_OPERAND (arg1, 0),
4603 TREE_OPERAND (arg2, 1), 0)
4604 && operand_equal_p (TREE_OPERAND (arg1, 1),
4605 TREE_OPERAND (arg2, 0), 0))))
4606 switch (comp_code)
4607 {
4608 case EQ_EXPR:
4609 case UNEQ_EXPR:
4610 tem = fold_convert_loc (loc, arg1_type, arg1);
4611 return pedantic_non_lvalue_loc (loc,
4612 fold_convert_loc (loc, type,
4613 negate_expr (tem)));
4614 case NE_EXPR:
4615 case LTGT_EXPR:
4616 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4617 case UNGE_EXPR:
4618 case UNGT_EXPR:
4619 if (flag_trapping_math)
4620 break;
4621 /* Fall through. */
4622 case GE_EXPR:
4623 case GT_EXPR:
4624 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4625 arg1 = fold_convert_loc (loc, signed_type_for
4626 (TREE_TYPE (arg1)), arg1);
4627 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4628 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4629 case UNLE_EXPR:
4630 case UNLT_EXPR:
4631 if (flag_trapping_math)
4632 break;
4633 case LE_EXPR:
4634 case LT_EXPR:
4635 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4636 arg1 = fold_convert_loc (loc, signed_type_for
4637 (TREE_TYPE (arg1)), arg1);
4638 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4639 return negate_expr (fold_convert_loc (loc, type, tem));
4640 default:
4641 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4642 break;
4643 }
4644
4645 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4646 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4647 both transformations are correct when A is NaN: A != 0
4648 is then true, and A == 0 is false. */
4649
4650 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4651 && integer_zerop (arg01) && integer_zerop (arg2))
4652 {
4653 if (comp_code == NE_EXPR)
4654 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4655 else if (comp_code == EQ_EXPR)
4656 return build_int_cst (type, 0);
4657 }
4658
4659 /* Try some transformations of A op B ? A : B.
4660
4661 A == B? A : B same as B
4662 A != B? A : B same as A
4663 A >= B? A : B same as max (A, B)
4664 A > B? A : B same as max (B, A)
4665 A <= B? A : B same as min (A, B)
4666 A < B? A : B same as min (B, A)
4667
4668 As above, these transformations don't work in the presence
4669 of signed zeros. For example, if A and B are zeros of
4670 opposite sign, the first two transformations will change
4671 the sign of the result. In the last four, the original
4672 expressions give different results for (A=+0, B=-0) and
4673 (A=-0, B=+0), but the transformed expressions do not.
4674
4675 The first two transformations are correct if either A or B
4676 is a NaN. In the first transformation, the condition will
4677 be false, and B will indeed be chosen. In the case of the
4678 second transformation, the condition A != B will be true,
4679 and A will be chosen.
4680
4681 The conversions to max() and min() are not correct if B is
4682 a number and A is not. The conditions in the original
4683 expressions will be false, so all four give B. The min()
4684 and max() versions would give a NaN instead. */
4685 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4686 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4687 /* Avoid these transformations if the COND_EXPR may be used
4688 as an lvalue in the C++ front-end. PR c++/19199. */
4689 && (in_gimple_form
4690 || (strcmp (lang_hooks.name, "GNU C++") != 0
4691 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4692 || ! maybe_lvalue_p (arg1)
4693 || ! maybe_lvalue_p (arg2)))
4694 {
4695 tree comp_op0 = arg00;
4696 tree comp_op1 = arg01;
4697 tree comp_type = TREE_TYPE (comp_op0);
4698
4699 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4700 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4701 {
4702 comp_type = type;
4703 comp_op0 = arg1;
4704 comp_op1 = arg2;
4705 }
4706
4707 switch (comp_code)
4708 {
4709 case EQ_EXPR:
4710 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4711 case NE_EXPR:
4712 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4713 case LE_EXPR:
4714 case LT_EXPR:
4715 case UNLE_EXPR:
4716 case UNLT_EXPR:
4717 /* In C++ a ?: expression can be an lvalue, so put the
4718 operand which will be used if they are equal first
4719 so that we can convert this back to the
4720 corresponding COND_EXPR. */
4721 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4722 {
4723 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4724 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4725 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4726 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4727 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4728 comp_op1, comp_op0);
4729 return pedantic_non_lvalue_loc (loc,
4730 fold_convert_loc (loc, type, tem));
4731 }
4732 break;
4733 case GE_EXPR:
4734 case GT_EXPR:
4735 case UNGE_EXPR:
4736 case UNGT_EXPR:
4737 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4738 {
4739 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4740 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4741 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4742 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4743 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4744 comp_op1, comp_op0);
4745 return pedantic_non_lvalue_loc (loc,
4746 fold_convert_loc (loc, type, tem));
4747 }
4748 break;
4749 case UNEQ_EXPR:
4750 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4751 return pedantic_non_lvalue_loc (loc,
4752 fold_convert_loc (loc, type, arg2));
4753 break;
4754 case LTGT_EXPR:
4755 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4756 return pedantic_non_lvalue_loc (loc,
4757 fold_convert_loc (loc, type, arg1));
4758 break;
4759 default:
4760 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4761 break;
4762 }
4763 }
4764
4765 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4766 we might still be able to simplify this. For example,
4767 if C1 is one less or one more than C2, this might have started
4768 out as a MIN or MAX and been transformed by this function.
4769 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4770
4771 if (INTEGRAL_TYPE_P (type)
4772 && TREE_CODE (arg01) == INTEGER_CST
4773 && TREE_CODE (arg2) == INTEGER_CST)
4774 switch (comp_code)
4775 {
4776 case EQ_EXPR:
4777 if (TREE_CODE (arg1) == INTEGER_CST)
4778 break;
4779 /* We can replace A with C1 in this case. */
4780 arg1 = fold_convert_loc (loc, type, arg01);
4781 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4782
4783 case LT_EXPR:
4784 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4785 MIN_EXPR, to preserve the signedness of the comparison. */
4786 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4787 OEP_ONLY_CONST)
4788 && operand_equal_p (arg01,
4789 const_binop (PLUS_EXPR, arg2,
4790 build_int_cst (type, 1), 0),
4791 OEP_ONLY_CONST))
4792 {
4793 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4794 fold_convert_loc (loc, TREE_TYPE (arg00),
4795 arg2));
4796 return pedantic_non_lvalue_loc (loc,
4797 fold_convert_loc (loc, type, tem));
4798 }
4799 break;
4800
4801 case LE_EXPR:
4802 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4803 as above. */
4804 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4805 OEP_ONLY_CONST)
4806 && operand_equal_p (arg01,
4807 const_binop (MINUS_EXPR, arg2,
4808 build_int_cst (type, 1), 0),
4809 OEP_ONLY_CONST))
4810 {
4811 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4812 fold_convert_loc (loc, TREE_TYPE (arg00),
4813 arg2));
4814 return pedantic_non_lvalue_loc (loc,
4815 fold_convert_loc (loc, type, tem));
4816 }
4817 break;
4818
4819 case GT_EXPR:
4820 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4821 MAX_EXPR, to preserve the signedness of the comparison. */
4822 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4823 OEP_ONLY_CONST)
4824 && operand_equal_p (arg01,
4825 const_binop (MINUS_EXPR, arg2,
4826 build_int_cst (type, 1), 0),
4827 OEP_ONLY_CONST))
4828 {
4829 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4830 fold_convert_loc (loc, TREE_TYPE (arg00),
4831 arg2));
4832 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4833 }
4834 break;
4835
4836 case GE_EXPR:
4837 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4838 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4839 OEP_ONLY_CONST)
4840 && operand_equal_p (arg01,
4841 const_binop (PLUS_EXPR, arg2,
4842 build_int_cst (type, 1), 0),
4843 OEP_ONLY_CONST))
4844 {
4845 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4846 fold_convert_loc (loc, TREE_TYPE (arg00),
4847 arg2));
4848 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4849 }
4850 break;
4851 case NE_EXPR:
4852 break;
4853 default:
4854 gcc_unreachable ();
4855 }
4856
4857 return NULL_TREE;
4858 }
4859
4860
4861 \f
4862 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4863 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4864 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4865 false) >= 2)
4866 #endif
4867
4868 /* EXP is some logical combination of boolean tests. See if we can
4869 merge it into some range test. Return the new tree if so. */
4870
4871 static tree
4872 fold_range_test (location_t loc, enum tree_code code, tree type,
4873 tree op0, tree op1)
4874 {
4875 int or_op = (code == TRUTH_ORIF_EXPR
4876 || code == TRUTH_OR_EXPR);
4877 int in0_p, in1_p, in_p;
4878 tree low0, low1, low, high0, high1, high;
4879 bool strict_overflow_p = false;
4880 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4881 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4882 tree tem;
4883 const char * const warnmsg = G_("assuming signed overflow does not occur "
4884 "when simplifying range test");
4885
4886 /* If this is an OR operation, invert both sides; we will invert
4887 again at the end. */
4888 if (or_op)
4889 in0_p = ! in0_p, in1_p = ! in1_p;
4890
4891 /* If both expressions are the same, if we can merge the ranges, and we
4892 can build the range test, return it or it inverted. If one of the
4893 ranges is always true or always false, consider it to be the same
4894 expression as the other. */
4895 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4896 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4897 in1_p, low1, high1)
4898 && 0 != (tem = (build_range_check (UNKNOWN_LOCATION, type,
4899 lhs != 0 ? lhs
4900 : rhs != 0 ? rhs : integer_zero_node,
4901 in_p, low, high))))
4902 {
4903 if (strict_overflow_p)
4904 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4905 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
4906 }
4907
4908 /* On machines where the branch cost is expensive, if this is a
4909 short-circuited branch and the underlying object on both sides
4910 is the same, make a non-short-circuit operation. */
4911 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4912 && lhs != 0 && rhs != 0
4913 && (code == TRUTH_ANDIF_EXPR
4914 || code == TRUTH_ORIF_EXPR)
4915 && operand_equal_p (lhs, rhs, 0))
4916 {
4917 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4918 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4919 which cases we can't do this. */
4920 if (simple_operand_p (lhs))
4921 {
4922 tem = build2 (code == TRUTH_ANDIF_EXPR
4923 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4924 type, op0, op1);
4925 SET_EXPR_LOCATION (tem, loc);
4926 return tem;
4927 }
4928
4929 else if (lang_hooks.decls.global_bindings_p () == 0
4930 && ! CONTAINS_PLACEHOLDER_P (lhs))
4931 {
4932 tree common = save_expr (lhs);
4933
4934 if (0 != (lhs = build_range_check (loc, type, common,
4935 or_op ? ! in0_p : in0_p,
4936 low0, high0))
4937 && (0 != (rhs = build_range_check (loc, type, common,
4938 or_op ? ! in1_p : in1_p,
4939 low1, high1))))
4940 {
4941 if (strict_overflow_p)
4942 fold_overflow_warning (warnmsg,
4943 WARN_STRICT_OVERFLOW_COMPARISON);
4944 tem = build2 (code == TRUTH_ANDIF_EXPR
4945 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4946 type, lhs, rhs);
4947 SET_EXPR_LOCATION (tem, loc);
4948 return tem;
4949 }
4950 }
4951 }
4952
4953 return 0;
4954 }
4955 \f
4956 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4957 bit value. Arrange things so the extra bits will be set to zero if and
4958 only if C is signed-extended to its full width. If MASK is nonzero,
4959 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4960
4961 static tree
4962 unextend (tree c, int p, int unsignedp, tree mask)
4963 {
4964 tree type = TREE_TYPE (c);
4965 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4966 tree temp;
4967
4968 if (p == modesize || unsignedp)
4969 return c;
4970
4971 /* We work by getting just the sign bit into the low-order bit, then
4972 into the high-order bit, then sign-extend. We then XOR that value
4973 with C. */
4974 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4975 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4976
4977 /* We must use a signed type in order to get an arithmetic right shift.
4978 However, we must also avoid introducing accidental overflows, so that
4979 a subsequent call to integer_zerop will work. Hence we must
4980 do the type conversion here. At this point, the constant is either
4981 zero or one, and the conversion to a signed type can never overflow.
4982 We could get an overflow if this conversion is done anywhere else. */
4983 if (TYPE_UNSIGNED (type))
4984 temp = fold_convert (signed_type_for (type), temp);
4985
4986 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4987 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4988 if (mask != 0)
4989 temp = const_binop (BIT_AND_EXPR, temp,
4990 fold_convert (TREE_TYPE (c), mask),
4991 0);
4992 /* If necessary, convert the type back to match the type of C. */
4993 if (TYPE_UNSIGNED (type))
4994 temp = fold_convert (type, temp);
4995
4996 return fold_convert (type,
4997 const_binop (BIT_XOR_EXPR, c, temp, 0));
4998 }
4999 \f
5000 /* For an expression that has the form
5001 (A && B) || ~B
5002 or
5003 (A || B) && ~B,
5004 we can drop one of the inner expressions and simplify to
5005 A || ~B
5006 or
5007 A && ~B
5008 LOC is the location of the resulting expression. OP is the inner
5009 logical operation; the left-hand side in the examples above, while CMPOP
5010 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5011 removing a condition that guards another, as in
5012 (A != NULL && A->...) || A == NULL
5013 which we must not transform. If RHS_ONLY is true, only eliminate the
5014 right-most operand of the inner logical operation. */
5015
5016 static tree
5017 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5018 bool rhs_only)
5019 {
5020 tree type = TREE_TYPE (cmpop);
5021 enum tree_code code = TREE_CODE (cmpop);
5022 enum tree_code truthop_code = TREE_CODE (op);
5023 tree lhs = TREE_OPERAND (op, 0);
5024 tree rhs = TREE_OPERAND (op, 1);
5025 tree orig_lhs = lhs, orig_rhs = rhs;
5026 enum tree_code rhs_code = TREE_CODE (rhs);
5027 enum tree_code lhs_code = TREE_CODE (lhs);
5028 enum tree_code inv_code;
5029
5030 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5031 return NULL_TREE;
5032
5033 if (TREE_CODE_CLASS (code) != tcc_comparison)
5034 return NULL_TREE;
5035
5036 if (rhs_code == truthop_code)
5037 {
5038 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5039 if (newrhs != NULL_TREE)
5040 {
5041 rhs = newrhs;
5042 rhs_code = TREE_CODE (rhs);
5043 }
5044 }
5045 if (lhs_code == truthop_code && !rhs_only)
5046 {
5047 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5048 if (newlhs != NULL_TREE)
5049 {
5050 lhs = newlhs;
5051 lhs_code = TREE_CODE (lhs);
5052 }
5053 }
5054
5055 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
5056 if (inv_code == rhs_code
5057 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5058 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5059 return lhs;
5060 if (!rhs_only && inv_code == lhs_code
5061 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5062 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5063 return rhs;
5064 if (rhs != orig_rhs || lhs != orig_lhs)
5065 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5066 lhs, rhs);
5067 return NULL_TREE;
5068 }
5069
5070 /* Find ways of folding logical expressions of LHS and RHS:
5071 Try to merge two comparisons to the same innermost item.
5072 Look for range tests like "ch >= '0' && ch <= '9'".
5073 Look for combinations of simple terms on machines with expensive branches
5074 and evaluate the RHS unconditionally.
5075
5076 For example, if we have p->a == 2 && p->b == 4 and we can make an
5077 object large enough to span both A and B, we can do this with a comparison
5078 against the object ANDed with the a mask.
5079
5080 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5081 operations to do this with one comparison.
5082
5083 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5084 function and the one above.
5085
5086 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5087 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5088
5089 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5090 two operands.
5091
5092 We return the simplified tree or 0 if no optimization is possible. */
5093
5094 static tree
5095 fold_truthop (location_t loc, enum tree_code code, tree truth_type,
5096 tree lhs, tree rhs)
5097 {
5098 /* If this is the "or" of two comparisons, we can do something if
5099 the comparisons are NE_EXPR. If this is the "and", we can do something
5100 if the comparisons are EQ_EXPR. I.e.,
5101 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5102
5103 WANTED_CODE is this operation code. For single bit fields, we can
5104 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5105 comparison for one-bit fields. */
5106
5107 enum tree_code wanted_code;
5108 enum tree_code lcode, rcode;
5109 tree ll_arg, lr_arg, rl_arg, rr_arg;
5110 tree ll_inner, lr_inner, rl_inner, rr_inner;
5111 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5112 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5113 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5114 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5115 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5116 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5117 enum machine_mode lnmode, rnmode;
5118 tree ll_mask, lr_mask, rl_mask, rr_mask;
5119 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5120 tree l_const, r_const;
5121 tree lntype, rntype, result;
5122 HOST_WIDE_INT first_bit, end_bit;
5123 int volatilep;
5124 tree orig_lhs = lhs, orig_rhs = rhs;
5125 enum tree_code orig_code = code;
5126
5127 /* Start by getting the comparison codes. Fail if anything is volatile.
5128 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5129 it were surrounded with a NE_EXPR. */
5130
5131 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5132 return 0;
5133
5134 lcode = TREE_CODE (lhs);
5135 rcode = TREE_CODE (rhs);
5136
5137 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5138 {
5139 lhs = build2 (NE_EXPR, truth_type, lhs,
5140 build_int_cst (TREE_TYPE (lhs), 0));
5141 lcode = NE_EXPR;
5142 }
5143
5144 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5145 {
5146 rhs = build2 (NE_EXPR, truth_type, rhs,
5147 build_int_cst (TREE_TYPE (rhs), 0));
5148 rcode = NE_EXPR;
5149 }
5150
5151 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5152 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5153 return 0;
5154
5155 ll_arg = TREE_OPERAND (lhs, 0);
5156 lr_arg = TREE_OPERAND (lhs, 1);
5157 rl_arg = TREE_OPERAND (rhs, 0);
5158 rr_arg = TREE_OPERAND (rhs, 1);
5159
5160 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5161 if (simple_operand_p (ll_arg)
5162 && simple_operand_p (lr_arg))
5163 {
5164 tree result;
5165 if (operand_equal_p (ll_arg, rl_arg, 0)
5166 && operand_equal_p (lr_arg, rr_arg, 0))
5167 {
5168 result = combine_comparisons (loc, code, lcode, rcode,
5169 truth_type, ll_arg, lr_arg);
5170 if (result)
5171 return result;
5172 }
5173 else if (operand_equal_p (ll_arg, rr_arg, 0)
5174 && operand_equal_p (lr_arg, rl_arg, 0))
5175 {
5176 result = combine_comparisons (loc, code, lcode,
5177 swap_tree_comparison (rcode),
5178 truth_type, ll_arg, lr_arg);
5179 if (result)
5180 return result;
5181 }
5182 }
5183
5184 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5185 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5186
5187 /* If the RHS can be evaluated unconditionally and its operands are
5188 simple, it wins to evaluate the RHS unconditionally on machines
5189 with expensive branches. In this case, this isn't a comparison
5190 that can be merged. Avoid doing this if the RHS is a floating-point
5191 comparison since those can trap. */
5192
5193 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5194 false) >= 2
5195 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5196 && simple_operand_p (rl_arg)
5197 && simple_operand_p (rr_arg))
5198 {
5199 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5200 if (code == TRUTH_OR_EXPR
5201 && lcode == NE_EXPR && integer_zerop (lr_arg)
5202 && rcode == NE_EXPR && integer_zerop (rr_arg)
5203 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5204 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5205 {
5206 result = build2 (NE_EXPR, truth_type,
5207 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5208 ll_arg, rl_arg),
5209 build_int_cst (TREE_TYPE (ll_arg), 0));
5210 goto fold_truthop_exit;
5211 }
5212
5213 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5214 if (code == TRUTH_AND_EXPR
5215 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5216 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5217 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5218 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5219 {
5220 result = build2 (EQ_EXPR, truth_type,
5221 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5222 ll_arg, rl_arg),
5223 build_int_cst (TREE_TYPE (ll_arg), 0));
5224 goto fold_truthop_exit;
5225 }
5226
5227 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5228 {
5229 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5230 {
5231 result = build2 (code, truth_type, lhs, rhs);
5232 goto fold_truthop_exit;
5233 }
5234 return NULL_TREE;
5235 }
5236 }
5237
5238 /* See if the comparisons can be merged. Then get all the parameters for
5239 each side. */
5240
5241 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5242 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5243 return 0;
5244
5245 volatilep = 0;
5246 ll_inner = decode_field_reference (loc, ll_arg,
5247 &ll_bitsize, &ll_bitpos, &ll_mode,
5248 &ll_unsignedp, &volatilep, &ll_mask,
5249 &ll_and_mask);
5250 lr_inner = decode_field_reference (loc, lr_arg,
5251 &lr_bitsize, &lr_bitpos, &lr_mode,
5252 &lr_unsignedp, &volatilep, &lr_mask,
5253 &lr_and_mask);
5254 rl_inner = decode_field_reference (loc, rl_arg,
5255 &rl_bitsize, &rl_bitpos, &rl_mode,
5256 &rl_unsignedp, &volatilep, &rl_mask,
5257 &rl_and_mask);
5258 rr_inner = decode_field_reference (loc, rr_arg,
5259 &rr_bitsize, &rr_bitpos, &rr_mode,
5260 &rr_unsignedp, &volatilep, &rr_mask,
5261 &rr_and_mask);
5262
5263 /* It must be true that the inner operation on the lhs of each
5264 comparison must be the same if we are to be able to do anything.
5265 Then see if we have constants. If not, the same must be true for
5266 the rhs's. */
5267 if (volatilep || ll_inner == 0 || rl_inner == 0
5268 || ! operand_equal_p (ll_inner, rl_inner, 0))
5269 return 0;
5270
5271 if (TREE_CODE (lr_arg) == INTEGER_CST
5272 && TREE_CODE (rr_arg) == INTEGER_CST)
5273 l_const = lr_arg, r_const = rr_arg;
5274 else if (lr_inner == 0 || rr_inner == 0
5275 || ! operand_equal_p (lr_inner, rr_inner, 0))
5276 return 0;
5277 else
5278 l_const = r_const = 0;
5279
5280 /* If either comparison code is not correct for our logical operation,
5281 fail. However, we can convert a one-bit comparison against zero into
5282 the opposite comparison against that bit being set in the field. */
5283
5284 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5285 if (lcode != wanted_code)
5286 {
5287 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5288 {
5289 /* Make the left operand unsigned, since we are only interested
5290 in the value of one bit. Otherwise we are doing the wrong
5291 thing below. */
5292 ll_unsignedp = 1;
5293 l_const = ll_mask;
5294 }
5295 else
5296 return 0;
5297 }
5298
5299 /* This is analogous to the code for l_const above. */
5300 if (rcode != wanted_code)
5301 {
5302 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5303 {
5304 rl_unsignedp = 1;
5305 r_const = rl_mask;
5306 }
5307 else
5308 return 0;
5309 }
5310
5311 /* See if we can find a mode that contains both fields being compared on
5312 the left. If we can't, fail. Otherwise, update all constants and masks
5313 to be relative to a field of that size. */
5314 first_bit = MIN (ll_bitpos, rl_bitpos);
5315 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5316 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5317 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5318 volatilep);
5319 if (lnmode == VOIDmode)
5320 return 0;
5321
5322 lnbitsize = GET_MODE_BITSIZE (lnmode);
5323 lnbitpos = first_bit & ~ (lnbitsize - 1);
5324 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5325 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5326
5327 if (BYTES_BIG_ENDIAN)
5328 {
5329 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5330 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5331 }
5332
5333 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5334 size_int (xll_bitpos), 0);
5335 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5336 size_int (xrl_bitpos), 0);
5337
5338 if (l_const)
5339 {
5340 l_const = fold_convert_loc (loc, lntype, l_const);
5341 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5342 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
5343 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5344 fold_build1_loc (loc, BIT_NOT_EXPR,
5345 lntype, ll_mask),
5346 0)))
5347 {
5348 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5349
5350 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5351 }
5352 }
5353 if (r_const)
5354 {
5355 r_const = fold_convert_loc (loc, lntype, r_const);
5356 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5357 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
5358 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5359 fold_build1_loc (loc, BIT_NOT_EXPR,
5360 lntype, rl_mask),
5361 0)))
5362 {
5363 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5364
5365 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5366 }
5367 }
5368
5369 /* If the right sides are not constant, do the same for it. Also,
5370 disallow this optimization if a size or signedness mismatch occurs
5371 between the left and right sides. */
5372 if (l_const == 0)
5373 {
5374 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5375 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5376 /* Make sure the two fields on the right
5377 correspond to the left without being swapped. */
5378 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5379 return 0;
5380
5381 first_bit = MIN (lr_bitpos, rr_bitpos);
5382 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5383 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5384 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5385 volatilep);
5386 if (rnmode == VOIDmode)
5387 return 0;
5388
5389 rnbitsize = GET_MODE_BITSIZE (rnmode);
5390 rnbitpos = first_bit & ~ (rnbitsize - 1);
5391 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5392 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5393
5394 if (BYTES_BIG_ENDIAN)
5395 {
5396 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5397 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5398 }
5399
5400 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5401 rntype, lr_mask),
5402 size_int (xlr_bitpos), 0);
5403 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5404 rntype, rr_mask),
5405 size_int (xrr_bitpos), 0);
5406
5407 /* Make a mask that corresponds to both fields being compared.
5408 Do this for both items being compared. If the operands are the
5409 same size and the bits being compared are in the same position
5410 then we can do this by masking both and comparing the masked
5411 results. */
5412 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5413 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
5414 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5415 {
5416 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5417 ll_unsignedp || rl_unsignedp);
5418 if (! all_ones_mask_p (ll_mask, lnbitsize))
5419 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5420
5421 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5422 lr_unsignedp || rr_unsignedp);
5423 if (! all_ones_mask_p (lr_mask, rnbitsize))
5424 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5425
5426 result = build2 (wanted_code, truth_type, lhs, rhs);
5427 goto fold_truthop_exit;
5428 }
5429
5430 /* There is still another way we can do something: If both pairs of
5431 fields being compared are adjacent, we may be able to make a wider
5432 field containing them both.
5433
5434 Note that we still must mask the lhs/rhs expressions. Furthermore,
5435 the mask must be shifted to account for the shift done by
5436 make_bit_field_ref. */
5437 if ((ll_bitsize + ll_bitpos == rl_bitpos
5438 && lr_bitsize + lr_bitpos == rr_bitpos)
5439 || (ll_bitpos == rl_bitpos + rl_bitsize
5440 && lr_bitpos == rr_bitpos + rr_bitsize))
5441 {
5442 tree type;
5443
5444 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5445 ll_bitsize + rl_bitsize,
5446 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5447 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5448 lr_bitsize + rr_bitsize,
5449 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5450
5451 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5452 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
5453 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5454 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
5455
5456 /* Convert to the smaller type before masking out unwanted bits. */
5457 type = lntype;
5458 if (lntype != rntype)
5459 {
5460 if (lnbitsize > rnbitsize)
5461 {
5462 lhs = fold_convert_loc (loc, rntype, lhs);
5463 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5464 type = rntype;
5465 }
5466 else if (lnbitsize < rnbitsize)
5467 {
5468 rhs = fold_convert_loc (loc, lntype, rhs);
5469 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5470 type = lntype;
5471 }
5472 }
5473
5474 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5475 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5476
5477 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5478 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5479
5480 result = build2 (wanted_code, truth_type, lhs, rhs);
5481 goto fold_truthop_exit;
5482 }
5483
5484 return 0;
5485 }
5486
5487 /* Handle the case of comparisons with constants. If there is something in
5488 common between the masks, those bits of the constants must be the same.
5489 If not, the condition is always false. Test for this to avoid generating
5490 incorrect code below. */
5491 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5492 if (! integer_zerop (result)
5493 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5494 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5495 {
5496 if (wanted_code == NE_EXPR)
5497 {
5498 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5499 return constant_boolean_node (true, truth_type);
5500 }
5501 else
5502 {
5503 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5504 return constant_boolean_node (false, truth_type);
5505 }
5506 }
5507
5508 /* Construct the expression we will return. First get the component
5509 reference we will make. Unless the mask is all ones the width of
5510 that field, perform the mask operation. Then compare with the
5511 merged constant. */
5512 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5513 ll_unsignedp || rl_unsignedp);
5514
5515 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5516 if (! all_ones_mask_p (ll_mask, lnbitsize))
5517 {
5518 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5519 SET_EXPR_LOCATION (result, loc);
5520 }
5521
5522 result = build2 (wanted_code, truth_type, result,
5523 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5524
5525 fold_truthop_exit:
5526 SET_EXPR_LOCATION (result, loc);
5527 return result;
5528 }
5529 \f
5530 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5531 constant. */
5532
5533 static tree
5534 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5535 tree op0, tree op1)
5536 {
5537 tree arg0 = op0;
5538 enum tree_code op_code;
5539 tree comp_const;
5540 tree minmax_const;
5541 int consts_equal, consts_lt;
5542 tree inner;
5543
5544 STRIP_SIGN_NOPS (arg0);
5545
5546 op_code = TREE_CODE (arg0);
5547 minmax_const = TREE_OPERAND (arg0, 1);
5548 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5549 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5550 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5551 inner = TREE_OPERAND (arg0, 0);
5552
5553 /* If something does not permit us to optimize, return the original tree. */
5554 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5555 || TREE_CODE (comp_const) != INTEGER_CST
5556 || TREE_OVERFLOW (comp_const)
5557 || TREE_CODE (minmax_const) != INTEGER_CST
5558 || TREE_OVERFLOW (minmax_const))
5559 return NULL_TREE;
5560
5561 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5562 and GT_EXPR, doing the rest with recursive calls using logical
5563 simplifications. */
5564 switch (code)
5565 {
5566 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5567 {
5568 tree tem
5569 = optimize_minmax_comparison (loc,
5570 invert_tree_comparison (code, false),
5571 type, op0, op1);
5572 if (tem)
5573 return invert_truthvalue_loc (loc, tem);
5574 return NULL_TREE;
5575 }
5576
5577 case GE_EXPR:
5578 return
5579 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5580 optimize_minmax_comparison
5581 (loc, EQ_EXPR, type, arg0, comp_const),
5582 optimize_minmax_comparison
5583 (loc, GT_EXPR, type, arg0, comp_const));
5584
5585 case EQ_EXPR:
5586 if (op_code == MAX_EXPR && consts_equal)
5587 /* MAX (X, 0) == 0 -> X <= 0 */
5588 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5589
5590 else if (op_code == MAX_EXPR && consts_lt)
5591 /* MAX (X, 0) == 5 -> X == 5 */
5592 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5593
5594 else if (op_code == MAX_EXPR)
5595 /* MAX (X, 0) == -1 -> false */
5596 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5597
5598 else if (consts_equal)
5599 /* MIN (X, 0) == 0 -> X >= 0 */
5600 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5601
5602 else if (consts_lt)
5603 /* MIN (X, 0) == 5 -> false */
5604 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5605
5606 else
5607 /* MIN (X, 0) == -1 -> X == -1 */
5608 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5609
5610 case GT_EXPR:
5611 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5612 /* MAX (X, 0) > 0 -> X > 0
5613 MAX (X, 0) > 5 -> X > 5 */
5614 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5615
5616 else if (op_code == MAX_EXPR)
5617 /* MAX (X, 0) > -1 -> true */
5618 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5619
5620 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5621 /* MIN (X, 0) > 0 -> false
5622 MIN (X, 0) > 5 -> false */
5623 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5624
5625 else
5626 /* MIN (X, 0) > -1 -> X > -1 */
5627 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5628
5629 default:
5630 return NULL_TREE;
5631 }
5632 }
5633 \f
5634 /* T is an integer expression that is being multiplied, divided, or taken a
5635 modulus (CODE says which and what kind of divide or modulus) by a
5636 constant C. See if we can eliminate that operation by folding it with
5637 other operations already in T. WIDE_TYPE, if non-null, is a type that
5638 should be used for the computation if wider than our type.
5639
5640 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5641 (X * 2) + (Y * 4). We must, however, be assured that either the original
5642 expression would not overflow or that overflow is undefined for the type
5643 in the language in question.
5644
5645 If we return a non-null expression, it is an equivalent form of the
5646 original computation, but need not be in the original type.
5647
5648 We set *STRICT_OVERFLOW_P to true if the return values depends on
5649 signed overflow being undefined. Otherwise we do not change
5650 *STRICT_OVERFLOW_P. */
5651
5652 static tree
5653 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5654 bool *strict_overflow_p)
5655 {
5656 /* To avoid exponential search depth, refuse to allow recursion past
5657 three levels. Beyond that (1) it's highly unlikely that we'll find
5658 something interesting and (2) we've probably processed it before
5659 when we built the inner expression. */
5660
5661 static int depth;
5662 tree ret;
5663
5664 if (depth > 3)
5665 return NULL;
5666
5667 depth++;
5668 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5669 depth--;
5670
5671 return ret;
5672 }
5673
5674 static tree
5675 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5676 bool *strict_overflow_p)
5677 {
5678 tree type = TREE_TYPE (t);
5679 enum tree_code tcode = TREE_CODE (t);
5680 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5681 > GET_MODE_SIZE (TYPE_MODE (type)))
5682 ? wide_type : type);
5683 tree t1, t2;
5684 int same_p = tcode == code;
5685 tree op0 = NULL_TREE, op1 = NULL_TREE;
5686 bool sub_strict_overflow_p;
5687
5688 /* Don't deal with constants of zero here; they confuse the code below. */
5689 if (integer_zerop (c))
5690 return NULL_TREE;
5691
5692 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5693 op0 = TREE_OPERAND (t, 0);
5694
5695 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5696 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5697
5698 /* Note that we need not handle conditional operations here since fold
5699 already handles those cases. So just do arithmetic here. */
5700 switch (tcode)
5701 {
5702 case INTEGER_CST:
5703 /* For a constant, we can always simplify if we are a multiply
5704 or (for divide and modulus) if it is a multiple of our constant. */
5705 if (code == MULT_EXPR
5706 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5707 return const_binop (code, fold_convert (ctype, t),
5708 fold_convert (ctype, c), 0);
5709 break;
5710
5711 CASE_CONVERT: case NON_LVALUE_EXPR:
5712 /* If op0 is an expression ... */
5713 if ((COMPARISON_CLASS_P (op0)
5714 || UNARY_CLASS_P (op0)
5715 || BINARY_CLASS_P (op0)
5716 || VL_EXP_CLASS_P (op0)
5717 || EXPRESSION_CLASS_P (op0))
5718 /* ... and has wrapping overflow, and its type is smaller
5719 than ctype, then we cannot pass through as widening. */
5720 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5721 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5722 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5723 && (TYPE_PRECISION (ctype)
5724 > TYPE_PRECISION (TREE_TYPE (op0))))
5725 /* ... or this is a truncation (t is narrower than op0),
5726 then we cannot pass through this narrowing. */
5727 || (TYPE_PRECISION (type)
5728 < TYPE_PRECISION (TREE_TYPE (op0)))
5729 /* ... or signedness changes for division or modulus,
5730 then we cannot pass through this conversion. */
5731 || (code != MULT_EXPR
5732 && (TYPE_UNSIGNED (ctype)
5733 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5734 /* ... or has undefined overflow while the converted to
5735 type has not, we cannot do the operation in the inner type
5736 as that would introduce undefined overflow. */
5737 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5738 && !TYPE_OVERFLOW_UNDEFINED (type))))
5739 break;
5740
5741 /* Pass the constant down and see if we can make a simplification. If
5742 we can, replace this expression with the inner simplification for
5743 possible later conversion to our or some other type. */
5744 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5745 && TREE_CODE (t2) == INTEGER_CST
5746 && !TREE_OVERFLOW (t2)
5747 && (0 != (t1 = extract_muldiv (op0, t2, code,
5748 code == MULT_EXPR
5749 ? ctype : NULL_TREE,
5750 strict_overflow_p))))
5751 return t1;
5752 break;
5753
5754 case ABS_EXPR:
5755 /* If widening the type changes it from signed to unsigned, then we
5756 must avoid building ABS_EXPR itself as unsigned. */
5757 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5758 {
5759 tree cstype = (*signed_type_for) (ctype);
5760 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5761 != 0)
5762 {
5763 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5764 return fold_convert (ctype, t1);
5765 }
5766 break;
5767 }
5768 /* If the constant is negative, we cannot simplify this. */
5769 if (tree_int_cst_sgn (c) == -1)
5770 break;
5771 /* FALLTHROUGH */
5772 case NEGATE_EXPR:
5773 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5774 != 0)
5775 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5776 break;
5777
5778 case MIN_EXPR: case MAX_EXPR:
5779 /* If widening the type changes the signedness, then we can't perform
5780 this optimization as that changes the result. */
5781 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5782 break;
5783
5784 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5785 sub_strict_overflow_p = false;
5786 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5787 &sub_strict_overflow_p)) != 0
5788 && (t2 = extract_muldiv (op1, c, code, wide_type,
5789 &sub_strict_overflow_p)) != 0)
5790 {
5791 if (tree_int_cst_sgn (c) < 0)
5792 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5793 if (sub_strict_overflow_p)
5794 *strict_overflow_p = true;
5795 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5796 fold_convert (ctype, t2));
5797 }
5798 break;
5799
5800 case LSHIFT_EXPR: case RSHIFT_EXPR:
5801 /* If the second operand is constant, this is a multiplication
5802 or floor division, by a power of two, so we can treat it that
5803 way unless the multiplier or divisor overflows. Signed
5804 left-shift overflow is implementation-defined rather than
5805 undefined in C90, so do not convert signed left shift into
5806 multiplication. */
5807 if (TREE_CODE (op1) == INTEGER_CST
5808 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5809 /* const_binop may not detect overflow correctly,
5810 so check for it explicitly here. */
5811 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5812 && TREE_INT_CST_HIGH (op1) == 0
5813 && 0 != (t1 = fold_convert (ctype,
5814 const_binop (LSHIFT_EXPR,
5815 size_one_node,
5816 op1, 0)))
5817 && !TREE_OVERFLOW (t1))
5818 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5819 ? MULT_EXPR : FLOOR_DIV_EXPR,
5820 ctype,
5821 fold_convert (ctype, op0),
5822 t1),
5823 c, code, wide_type, strict_overflow_p);
5824 break;
5825
5826 case PLUS_EXPR: case MINUS_EXPR:
5827 /* See if we can eliminate the operation on both sides. If we can, we
5828 can return a new PLUS or MINUS. If we can't, the only remaining
5829 cases where we can do anything are if the second operand is a
5830 constant. */
5831 sub_strict_overflow_p = false;
5832 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5833 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5834 if (t1 != 0 && t2 != 0
5835 && (code == MULT_EXPR
5836 /* If not multiplication, we can only do this if both operands
5837 are divisible by c. */
5838 || (multiple_of_p (ctype, op0, c)
5839 && multiple_of_p (ctype, op1, c))))
5840 {
5841 if (sub_strict_overflow_p)
5842 *strict_overflow_p = true;
5843 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5844 fold_convert (ctype, t2));
5845 }
5846
5847 /* If this was a subtraction, negate OP1 and set it to be an addition.
5848 This simplifies the logic below. */
5849 if (tcode == MINUS_EXPR)
5850 {
5851 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5852 /* If OP1 was not easily negatable, the constant may be OP0. */
5853 if (TREE_CODE (op0) == INTEGER_CST)
5854 {
5855 tree tem = op0;
5856 op0 = op1;
5857 op1 = tem;
5858 tem = t1;
5859 t1 = t2;
5860 t2 = tem;
5861 }
5862 }
5863
5864 if (TREE_CODE (op1) != INTEGER_CST)
5865 break;
5866
5867 /* If either OP1 or C are negative, this optimization is not safe for
5868 some of the division and remainder types while for others we need
5869 to change the code. */
5870 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5871 {
5872 if (code == CEIL_DIV_EXPR)
5873 code = FLOOR_DIV_EXPR;
5874 else if (code == FLOOR_DIV_EXPR)
5875 code = CEIL_DIV_EXPR;
5876 else if (code != MULT_EXPR
5877 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5878 break;
5879 }
5880
5881 /* If it's a multiply or a division/modulus operation of a multiple
5882 of our constant, do the operation and verify it doesn't overflow. */
5883 if (code == MULT_EXPR
5884 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5885 {
5886 op1 = const_binop (code, fold_convert (ctype, op1),
5887 fold_convert (ctype, c), 0);
5888 /* We allow the constant to overflow with wrapping semantics. */
5889 if (op1 == 0
5890 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5891 break;
5892 }
5893 else
5894 break;
5895
5896 /* If we have an unsigned type is not a sizetype, we cannot widen
5897 the operation since it will change the result if the original
5898 computation overflowed. */
5899 if (TYPE_UNSIGNED (ctype)
5900 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5901 && ctype != type)
5902 break;
5903
5904 /* If we were able to eliminate our operation from the first side,
5905 apply our operation to the second side and reform the PLUS. */
5906 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5907 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5908
5909 /* The last case is if we are a multiply. In that case, we can
5910 apply the distributive law to commute the multiply and addition
5911 if the multiplication of the constants doesn't overflow. */
5912 if (code == MULT_EXPR)
5913 return fold_build2 (tcode, ctype,
5914 fold_build2 (code, ctype,
5915 fold_convert (ctype, op0),
5916 fold_convert (ctype, c)),
5917 op1);
5918
5919 break;
5920
5921 case MULT_EXPR:
5922 /* We have a special case here if we are doing something like
5923 (C * 8) % 4 since we know that's zero. */
5924 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5925 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5926 /* If the multiplication can overflow we cannot optimize this.
5927 ??? Until we can properly mark individual operations as
5928 not overflowing we need to treat sizetype special here as
5929 stor-layout relies on this opimization to make
5930 DECL_FIELD_BIT_OFFSET always a constant. */
5931 && (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5932 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
5933 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
5934 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5935 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5936 {
5937 *strict_overflow_p = true;
5938 return omit_one_operand (type, integer_zero_node, op0);
5939 }
5940
5941 /* ... fall through ... */
5942
5943 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5944 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5945 /* If we can extract our operation from the LHS, do so and return a
5946 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5947 do something only if the second operand is a constant. */
5948 if (same_p
5949 && (t1 = extract_muldiv (op0, c, code, wide_type,
5950 strict_overflow_p)) != 0)
5951 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5952 fold_convert (ctype, op1));
5953 else if (tcode == MULT_EXPR && code == MULT_EXPR
5954 && (t1 = extract_muldiv (op1, c, code, wide_type,
5955 strict_overflow_p)) != 0)
5956 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5957 fold_convert (ctype, t1));
5958 else if (TREE_CODE (op1) != INTEGER_CST)
5959 return 0;
5960
5961 /* If these are the same operation types, we can associate them
5962 assuming no overflow. */
5963 if (tcode == code
5964 && 0 != (t1 = int_const_binop (MULT_EXPR,
5965 fold_convert (ctype, op1),
5966 fold_convert (ctype, c), 1))
5967 && 0 != (t1 = force_fit_type_double (ctype, TREE_INT_CST_LOW (t1),
5968 TREE_INT_CST_HIGH (t1),
5969 (TYPE_UNSIGNED (ctype)
5970 && tcode != MULT_EXPR) ? -1 : 1,
5971 TREE_OVERFLOW (t1)))
5972 && !TREE_OVERFLOW (t1))
5973 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5974
5975 /* If these operations "cancel" each other, we have the main
5976 optimizations of this pass, which occur when either constant is a
5977 multiple of the other, in which case we replace this with either an
5978 operation or CODE or TCODE.
5979
5980 If we have an unsigned type that is not a sizetype, we cannot do
5981 this since it will change the result if the original computation
5982 overflowed. */
5983 if ((TYPE_OVERFLOW_UNDEFINED (ctype)
5984 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5985 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5986 || (tcode == MULT_EXPR
5987 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5988 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
5989 && code != MULT_EXPR)))
5990 {
5991 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5992 {
5993 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5994 *strict_overflow_p = true;
5995 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5996 fold_convert (ctype,
5997 const_binop (TRUNC_DIV_EXPR,
5998 op1, c, 0)));
5999 }
6000 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
6001 {
6002 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6003 *strict_overflow_p = true;
6004 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6005 fold_convert (ctype,
6006 const_binop (TRUNC_DIV_EXPR,
6007 c, op1, 0)));
6008 }
6009 }
6010 break;
6011
6012 default:
6013 break;
6014 }
6015
6016 return 0;
6017 }
6018 \f
6019 /* Return a node which has the indicated constant VALUE (either 0 or
6020 1), and is of the indicated TYPE. */
6021
6022 tree
6023 constant_boolean_node (int value, tree type)
6024 {
6025 if (type == integer_type_node)
6026 return value ? integer_one_node : integer_zero_node;
6027 else if (type == boolean_type_node)
6028 return value ? boolean_true_node : boolean_false_node;
6029 else
6030 return build_int_cst (type, value);
6031 }
6032
6033
6034 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6035 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6036 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6037 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6038 COND is the first argument to CODE; otherwise (as in the example
6039 given here), it is the second argument. TYPE is the type of the
6040 original expression. Return NULL_TREE if no simplification is
6041 possible. */
6042
6043 static tree
6044 fold_binary_op_with_conditional_arg (location_t loc,
6045 enum tree_code code,
6046 tree type, tree op0, tree op1,
6047 tree cond, tree arg, int cond_first_p)
6048 {
6049 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6050 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6051 tree test, true_value, false_value;
6052 tree lhs = NULL_TREE;
6053 tree rhs = NULL_TREE;
6054
6055 if (TREE_CODE (cond) == COND_EXPR)
6056 {
6057 test = TREE_OPERAND (cond, 0);
6058 true_value = TREE_OPERAND (cond, 1);
6059 false_value = TREE_OPERAND (cond, 2);
6060 /* If this operand throws an expression, then it does not make
6061 sense to try to perform a logical or arithmetic operation
6062 involving it. */
6063 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6064 lhs = true_value;
6065 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6066 rhs = false_value;
6067 }
6068 else
6069 {
6070 tree testtype = TREE_TYPE (cond);
6071 test = cond;
6072 true_value = constant_boolean_node (true, testtype);
6073 false_value = constant_boolean_node (false, testtype);
6074 }
6075
6076 /* This transformation is only worthwhile if we don't have to wrap ARG
6077 in a SAVE_EXPR and the operation can be simplified on at least one
6078 of the branches once its pushed inside the COND_EXPR. */
6079 if (!TREE_CONSTANT (arg)
6080 && (TREE_SIDE_EFFECTS (arg)
6081 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6082 return NULL_TREE;
6083
6084 arg = fold_convert_loc (loc, arg_type, arg);
6085 if (lhs == 0)
6086 {
6087 true_value = fold_convert_loc (loc, cond_type, true_value);
6088 if (cond_first_p)
6089 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6090 else
6091 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6092 }
6093 if (rhs == 0)
6094 {
6095 false_value = fold_convert_loc (loc, cond_type, false_value);
6096 if (cond_first_p)
6097 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6098 else
6099 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6100 }
6101
6102 /* Check that we have simplified at least one of the branches. */
6103 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6104 return NULL_TREE;
6105
6106 return fold_build3_loc (loc, COND_EXPR, type, test, lhs, rhs);
6107 }
6108
6109 \f
6110 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6111
6112 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6113 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6114 ADDEND is the same as X.
6115
6116 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6117 and finite. The problematic cases are when X is zero, and its mode
6118 has signed zeros. In the case of rounding towards -infinity,
6119 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6120 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6121
6122 bool
6123 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6124 {
6125 if (!real_zerop (addend))
6126 return false;
6127
6128 /* Don't allow the fold with -fsignaling-nans. */
6129 if (HONOR_SNANS (TYPE_MODE (type)))
6130 return false;
6131
6132 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6133 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6134 return true;
6135
6136 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6137 if (TREE_CODE (addend) == REAL_CST
6138 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6139 negate = !negate;
6140
6141 /* The mode has signed zeros, and we have to honor their sign.
6142 In this situation, there is only one case we can return true for.
6143 X - 0 is the same as X unless rounding towards -infinity is
6144 supported. */
6145 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6146 }
6147
6148 /* Subroutine of fold() that checks comparisons of built-in math
6149 functions against real constants.
6150
6151 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6152 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6153 is the type of the result and ARG0 and ARG1 are the operands of the
6154 comparison. ARG1 must be a TREE_REAL_CST.
6155
6156 The function returns the constant folded tree if a simplification
6157 can be made, and NULL_TREE otherwise. */
6158
6159 static tree
6160 fold_mathfn_compare (location_t loc,
6161 enum built_in_function fcode, enum tree_code code,
6162 tree type, tree arg0, tree arg1)
6163 {
6164 REAL_VALUE_TYPE c;
6165
6166 if (BUILTIN_SQRT_P (fcode))
6167 {
6168 tree arg = CALL_EXPR_ARG (arg0, 0);
6169 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6170
6171 c = TREE_REAL_CST (arg1);
6172 if (REAL_VALUE_NEGATIVE (c))
6173 {
6174 /* sqrt(x) < y is always false, if y is negative. */
6175 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6176 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6177
6178 /* sqrt(x) > y is always true, if y is negative and we
6179 don't care about NaNs, i.e. negative values of x. */
6180 if (code == NE_EXPR || !HONOR_NANS (mode))
6181 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6182
6183 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6184 return fold_build2_loc (loc, GE_EXPR, type, arg,
6185 build_real (TREE_TYPE (arg), dconst0));
6186 }
6187 else if (code == GT_EXPR || code == GE_EXPR)
6188 {
6189 REAL_VALUE_TYPE c2;
6190
6191 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6192 real_convert (&c2, mode, &c2);
6193
6194 if (REAL_VALUE_ISINF (c2))
6195 {
6196 /* sqrt(x) > y is x == +Inf, when y is very large. */
6197 if (HONOR_INFINITIES (mode))
6198 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6199 build_real (TREE_TYPE (arg), c2));
6200
6201 /* sqrt(x) > y is always false, when y is very large
6202 and we don't care about infinities. */
6203 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6204 }
6205
6206 /* sqrt(x) > c is the same as x > c*c. */
6207 return fold_build2_loc (loc, code, type, arg,
6208 build_real (TREE_TYPE (arg), c2));
6209 }
6210 else if (code == LT_EXPR || code == LE_EXPR)
6211 {
6212 REAL_VALUE_TYPE c2;
6213
6214 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6215 real_convert (&c2, mode, &c2);
6216
6217 if (REAL_VALUE_ISINF (c2))
6218 {
6219 /* sqrt(x) < y is always true, when y is a very large
6220 value and we don't care about NaNs or Infinities. */
6221 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6222 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6223
6224 /* sqrt(x) < y is x != +Inf when y is very large and we
6225 don't care about NaNs. */
6226 if (! HONOR_NANS (mode))
6227 return fold_build2_loc (loc, NE_EXPR, type, arg,
6228 build_real (TREE_TYPE (arg), c2));
6229
6230 /* sqrt(x) < y is x >= 0 when y is very large and we
6231 don't care about Infinities. */
6232 if (! HONOR_INFINITIES (mode))
6233 return fold_build2_loc (loc, GE_EXPR, type, arg,
6234 build_real (TREE_TYPE (arg), dconst0));
6235
6236 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6237 if (lang_hooks.decls.global_bindings_p () != 0
6238 || CONTAINS_PLACEHOLDER_P (arg))
6239 return NULL_TREE;
6240
6241 arg = save_expr (arg);
6242 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6243 fold_build2_loc (loc, GE_EXPR, type, arg,
6244 build_real (TREE_TYPE (arg),
6245 dconst0)),
6246 fold_build2_loc (loc, NE_EXPR, type, arg,
6247 build_real (TREE_TYPE (arg),
6248 c2)));
6249 }
6250
6251 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6252 if (! HONOR_NANS (mode))
6253 return fold_build2_loc (loc, code, type, arg,
6254 build_real (TREE_TYPE (arg), c2));
6255
6256 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6257 if (lang_hooks.decls.global_bindings_p () == 0
6258 && ! CONTAINS_PLACEHOLDER_P (arg))
6259 {
6260 arg = save_expr (arg);
6261 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6262 fold_build2_loc (loc, GE_EXPR, type, arg,
6263 build_real (TREE_TYPE (arg),
6264 dconst0)),
6265 fold_build2_loc (loc, code, type, arg,
6266 build_real (TREE_TYPE (arg),
6267 c2)));
6268 }
6269 }
6270 }
6271
6272 return NULL_TREE;
6273 }
6274
6275 /* Subroutine of fold() that optimizes comparisons against Infinities,
6276 either +Inf or -Inf.
6277
6278 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6279 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6280 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6281
6282 The function returns the constant folded tree if a simplification
6283 can be made, and NULL_TREE otherwise. */
6284
6285 static tree
6286 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6287 tree arg0, tree arg1)
6288 {
6289 enum machine_mode mode;
6290 REAL_VALUE_TYPE max;
6291 tree temp;
6292 bool neg;
6293
6294 mode = TYPE_MODE (TREE_TYPE (arg0));
6295
6296 /* For negative infinity swap the sense of the comparison. */
6297 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6298 if (neg)
6299 code = swap_tree_comparison (code);
6300
6301 switch (code)
6302 {
6303 case GT_EXPR:
6304 /* x > +Inf is always false, if with ignore sNANs. */
6305 if (HONOR_SNANS (mode))
6306 return NULL_TREE;
6307 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6308
6309 case LE_EXPR:
6310 /* x <= +Inf is always true, if we don't case about NaNs. */
6311 if (! HONOR_NANS (mode))
6312 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6313
6314 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6315 if (lang_hooks.decls.global_bindings_p () == 0
6316 && ! CONTAINS_PLACEHOLDER_P (arg0))
6317 {
6318 arg0 = save_expr (arg0);
6319 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6320 }
6321 break;
6322
6323 case EQ_EXPR:
6324 case GE_EXPR:
6325 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6326 real_maxval (&max, neg, mode);
6327 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6328 arg0, build_real (TREE_TYPE (arg0), max));
6329
6330 case LT_EXPR:
6331 /* x < +Inf is always equal to x <= DBL_MAX. */
6332 real_maxval (&max, neg, mode);
6333 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6334 arg0, build_real (TREE_TYPE (arg0), max));
6335
6336 case NE_EXPR:
6337 /* x != +Inf is always equal to !(x > DBL_MAX). */
6338 real_maxval (&max, neg, mode);
6339 if (! HONOR_NANS (mode))
6340 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6341 arg0, build_real (TREE_TYPE (arg0), max));
6342
6343 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6344 arg0, build_real (TREE_TYPE (arg0), max));
6345 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6346
6347 default:
6348 break;
6349 }
6350
6351 return NULL_TREE;
6352 }
6353
6354 /* Subroutine of fold() that optimizes comparisons of a division by
6355 a nonzero integer constant against an integer constant, i.e.
6356 X/C1 op C2.
6357
6358 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6359 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6360 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6361
6362 The function returns the constant folded tree if a simplification
6363 can be made, and NULL_TREE otherwise. */
6364
6365 static tree
6366 fold_div_compare (location_t loc,
6367 enum tree_code code, tree type, tree arg0, tree arg1)
6368 {
6369 tree prod, tmp, hi, lo;
6370 tree arg00 = TREE_OPERAND (arg0, 0);
6371 tree arg01 = TREE_OPERAND (arg0, 1);
6372 unsigned HOST_WIDE_INT lpart;
6373 HOST_WIDE_INT hpart;
6374 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6375 bool neg_overflow;
6376 int overflow;
6377
6378 /* We have to do this the hard way to detect unsigned overflow.
6379 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6380 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6381 TREE_INT_CST_HIGH (arg01),
6382 TREE_INT_CST_LOW (arg1),
6383 TREE_INT_CST_HIGH (arg1),
6384 &lpart, &hpart, unsigned_p);
6385 prod = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6386 -1, overflow);
6387 neg_overflow = false;
6388
6389 if (unsigned_p)
6390 {
6391 tmp = int_const_binop (MINUS_EXPR, arg01,
6392 build_int_cst (TREE_TYPE (arg01), 1), 0);
6393 lo = prod;
6394
6395 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6396 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6397 TREE_INT_CST_HIGH (prod),
6398 TREE_INT_CST_LOW (tmp),
6399 TREE_INT_CST_HIGH (tmp),
6400 &lpart, &hpart, unsigned_p);
6401 hi = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6402 -1, overflow | TREE_OVERFLOW (prod));
6403 }
6404 else if (tree_int_cst_sgn (arg01) >= 0)
6405 {
6406 tmp = int_const_binop (MINUS_EXPR, arg01,
6407 build_int_cst (TREE_TYPE (arg01), 1), 0);
6408 switch (tree_int_cst_sgn (arg1))
6409 {
6410 case -1:
6411 neg_overflow = true;
6412 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6413 hi = prod;
6414 break;
6415
6416 case 0:
6417 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6418 hi = tmp;
6419 break;
6420
6421 case 1:
6422 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6423 lo = prod;
6424 break;
6425
6426 default:
6427 gcc_unreachable ();
6428 }
6429 }
6430 else
6431 {
6432 /* A negative divisor reverses the relational operators. */
6433 code = swap_tree_comparison (code);
6434
6435 tmp = int_const_binop (PLUS_EXPR, arg01,
6436 build_int_cst (TREE_TYPE (arg01), 1), 0);
6437 switch (tree_int_cst_sgn (arg1))
6438 {
6439 case -1:
6440 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6441 lo = prod;
6442 break;
6443
6444 case 0:
6445 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6446 lo = tmp;
6447 break;
6448
6449 case 1:
6450 neg_overflow = true;
6451 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6452 hi = prod;
6453 break;
6454
6455 default:
6456 gcc_unreachable ();
6457 }
6458 }
6459
6460 switch (code)
6461 {
6462 case EQ_EXPR:
6463 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6464 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6465 if (TREE_OVERFLOW (hi))
6466 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6467 if (TREE_OVERFLOW (lo))
6468 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6469 return build_range_check (loc, type, arg00, 1, lo, hi);
6470
6471 case NE_EXPR:
6472 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6473 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6474 if (TREE_OVERFLOW (hi))
6475 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6476 if (TREE_OVERFLOW (lo))
6477 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6478 return build_range_check (loc, type, arg00, 0, lo, hi);
6479
6480 case LT_EXPR:
6481 if (TREE_OVERFLOW (lo))
6482 {
6483 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6484 return omit_one_operand_loc (loc, type, tmp, arg00);
6485 }
6486 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6487
6488 case LE_EXPR:
6489 if (TREE_OVERFLOW (hi))
6490 {
6491 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6492 return omit_one_operand_loc (loc, type, tmp, arg00);
6493 }
6494 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6495
6496 case GT_EXPR:
6497 if (TREE_OVERFLOW (hi))
6498 {
6499 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6500 return omit_one_operand_loc (loc, type, tmp, arg00);
6501 }
6502 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6503
6504 case GE_EXPR:
6505 if (TREE_OVERFLOW (lo))
6506 {
6507 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6508 return omit_one_operand_loc (loc, type, tmp, arg00);
6509 }
6510 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6511
6512 default:
6513 break;
6514 }
6515
6516 return NULL_TREE;
6517 }
6518
6519
6520 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6521 equality/inequality test, then return a simplified form of the test
6522 using a sign testing. Otherwise return NULL. TYPE is the desired
6523 result type. */
6524
6525 static tree
6526 fold_single_bit_test_into_sign_test (location_t loc,
6527 enum tree_code code, tree arg0, tree arg1,
6528 tree result_type)
6529 {
6530 /* If this is testing a single bit, we can optimize the test. */
6531 if ((code == NE_EXPR || code == EQ_EXPR)
6532 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6533 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6534 {
6535 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6536 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6537 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6538
6539 if (arg00 != NULL_TREE
6540 /* This is only a win if casting to a signed type is cheap,
6541 i.e. when arg00's type is not a partial mode. */
6542 && TYPE_PRECISION (TREE_TYPE (arg00))
6543 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6544 {
6545 tree stype = signed_type_for (TREE_TYPE (arg00));
6546 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6547 result_type,
6548 fold_convert_loc (loc, stype, arg00),
6549 build_int_cst (stype, 0));
6550 }
6551 }
6552
6553 return NULL_TREE;
6554 }
6555
6556 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6557 equality/inequality test, then return a simplified form of
6558 the test using shifts and logical operations. Otherwise return
6559 NULL. TYPE is the desired result type. */
6560
6561 tree
6562 fold_single_bit_test (location_t loc, enum tree_code code,
6563 tree arg0, tree arg1, tree result_type)
6564 {
6565 /* If this is testing a single bit, we can optimize the test. */
6566 if ((code == NE_EXPR || code == EQ_EXPR)
6567 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6568 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6569 {
6570 tree inner = TREE_OPERAND (arg0, 0);
6571 tree type = TREE_TYPE (arg0);
6572 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6573 enum machine_mode operand_mode = TYPE_MODE (type);
6574 int ops_unsigned;
6575 tree signed_type, unsigned_type, intermediate_type;
6576 tree tem, one;
6577
6578 /* First, see if we can fold the single bit test into a sign-bit
6579 test. */
6580 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6581 result_type);
6582 if (tem)
6583 return tem;
6584
6585 /* Otherwise we have (A & C) != 0 where C is a single bit,
6586 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6587 Similarly for (A & C) == 0. */
6588
6589 /* If INNER is a right shift of a constant and it plus BITNUM does
6590 not overflow, adjust BITNUM and INNER. */
6591 if (TREE_CODE (inner) == RSHIFT_EXPR
6592 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6593 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6594 && bitnum < TYPE_PRECISION (type)
6595 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6596 bitnum - TYPE_PRECISION (type)))
6597 {
6598 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6599 inner = TREE_OPERAND (inner, 0);
6600 }
6601
6602 /* If we are going to be able to omit the AND below, we must do our
6603 operations as unsigned. If we must use the AND, we have a choice.
6604 Normally unsigned is faster, but for some machines signed is. */
6605 #ifdef LOAD_EXTEND_OP
6606 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6607 && !flag_syntax_only) ? 0 : 1;
6608 #else
6609 ops_unsigned = 1;
6610 #endif
6611
6612 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6613 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6614 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6615 inner = fold_convert_loc (loc, intermediate_type, inner);
6616
6617 if (bitnum != 0)
6618 inner = build2 (RSHIFT_EXPR, intermediate_type,
6619 inner, size_int (bitnum));
6620
6621 one = build_int_cst (intermediate_type, 1);
6622
6623 if (code == EQ_EXPR)
6624 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6625
6626 /* Put the AND last so it can combine with more things. */
6627 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6628
6629 /* Make sure to return the proper type. */
6630 inner = fold_convert_loc (loc, result_type, inner);
6631
6632 return inner;
6633 }
6634 return NULL_TREE;
6635 }
6636
6637 /* Check whether we are allowed to reorder operands arg0 and arg1,
6638 such that the evaluation of arg1 occurs before arg0. */
6639
6640 static bool
6641 reorder_operands_p (const_tree arg0, const_tree arg1)
6642 {
6643 if (! flag_evaluation_order)
6644 return true;
6645 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6646 return true;
6647 return ! TREE_SIDE_EFFECTS (arg0)
6648 && ! TREE_SIDE_EFFECTS (arg1);
6649 }
6650
6651 /* Test whether it is preferable two swap two operands, ARG0 and
6652 ARG1, for example because ARG0 is an integer constant and ARG1
6653 isn't. If REORDER is true, only recommend swapping if we can
6654 evaluate the operands in reverse order. */
6655
6656 bool
6657 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6658 {
6659 STRIP_SIGN_NOPS (arg0);
6660 STRIP_SIGN_NOPS (arg1);
6661
6662 if (TREE_CODE (arg1) == INTEGER_CST)
6663 return 0;
6664 if (TREE_CODE (arg0) == INTEGER_CST)
6665 return 1;
6666
6667 if (TREE_CODE (arg1) == REAL_CST)
6668 return 0;
6669 if (TREE_CODE (arg0) == REAL_CST)
6670 return 1;
6671
6672 if (TREE_CODE (arg1) == FIXED_CST)
6673 return 0;
6674 if (TREE_CODE (arg0) == FIXED_CST)
6675 return 1;
6676
6677 if (TREE_CODE (arg1) == COMPLEX_CST)
6678 return 0;
6679 if (TREE_CODE (arg0) == COMPLEX_CST)
6680 return 1;
6681
6682 if (TREE_CONSTANT (arg1))
6683 return 0;
6684 if (TREE_CONSTANT (arg0))
6685 return 1;
6686
6687 if (optimize_function_for_size_p (cfun))
6688 return 0;
6689
6690 if (reorder && flag_evaluation_order
6691 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6692 return 0;
6693
6694 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6695 for commutative and comparison operators. Ensuring a canonical
6696 form allows the optimizers to find additional redundancies without
6697 having to explicitly check for both orderings. */
6698 if (TREE_CODE (arg0) == SSA_NAME
6699 && TREE_CODE (arg1) == SSA_NAME
6700 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6701 return 1;
6702
6703 /* Put SSA_NAMEs last. */
6704 if (TREE_CODE (arg1) == SSA_NAME)
6705 return 0;
6706 if (TREE_CODE (arg0) == SSA_NAME)
6707 return 1;
6708
6709 /* Put variables last. */
6710 if (DECL_P (arg1))
6711 return 0;
6712 if (DECL_P (arg0))
6713 return 1;
6714
6715 return 0;
6716 }
6717
6718 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6719 ARG0 is extended to a wider type. */
6720
6721 static tree
6722 fold_widened_comparison (location_t loc, enum tree_code code,
6723 tree type, tree arg0, tree arg1)
6724 {
6725 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6726 tree arg1_unw;
6727 tree shorter_type, outer_type;
6728 tree min, max;
6729 bool above, below;
6730
6731 if (arg0_unw == arg0)
6732 return NULL_TREE;
6733 shorter_type = TREE_TYPE (arg0_unw);
6734
6735 #ifdef HAVE_canonicalize_funcptr_for_compare
6736 /* Disable this optimization if we're casting a function pointer
6737 type on targets that require function pointer canonicalization. */
6738 if (HAVE_canonicalize_funcptr_for_compare
6739 && TREE_CODE (shorter_type) == POINTER_TYPE
6740 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6741 return NULL_TREE;
6742 #endif
6743
6744 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6745 return NULL_TREE;
6746
6747 arg1_unw = get_unwidened (arg1, NULL_TREE);
6748
6749 /* If possible, express the comparison in the shorter mode. */
6750 if ((code == EQ_EXPR || code == NE_EXPR
6751 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6752 && (TREE_TYPE (arg1_unw) == shorter_type
6753 || ((TYPE_PRECISION (shorter_type)
6754 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6755 && (TYPE_UNSIGNED (shorter_type)
6756 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6757 || (TREE_CODE (arg1_unw) == INTEGER_CST
6758 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6759 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6760 && int_fits_type_p (arg1_unw, shorter_type))))
6761 return fold_build2_loc (loc, code, type, arg0_unw,
6762 fold_convert_loc (loc, shorter_type, arg1_unw));
6763
6764 if (TREE_CODE (arg1_unw) != INTEGER_CST
6765 || TREE_CODE (shorter_type) != INTEGER_TYPE
6766 || !int_fits_type_p (arg1_unw, shorter_type))
6767 return NULL_TREE;
6768
6769 /* If we are comparing with the integer that does not fit into the range
6770 of the shorter type, the result is known. */
6771 outer_type = TREE_TYPE (arg1_unw);
6772 min = lower_bound_in_type (outer_type, shorter_type);
6773 max = upper_bound_in_type (outer_type, shorter_type);
6774
6775 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6776 max, arg1_unw));
6777 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6778 arg1_unw, min));
6779
6780 switch (code)
6781 {
6782 case EQ_EXPR:
6783 if (above || below)
6784 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6785 break;
6786
6787 case NE_EXPR:
6788 if (above || below)
6789 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6790 break;
6791
6792 case LT_EXPR:
6793 case LE_EXPR:
6794 if (above)
6795 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6796 else if (below)
6797 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6798
6799 case GT_EXPR:
6800 case GE_EXPR:
6801 if (above)
6802 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6803 else if (below)
6804 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6805
6806 default:
6807 break;
6808 }
6809
6810 return NULL_TREE;
6811 }
6812
6813 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6814 ARG0 just the signedness is changed. */
6815
6816 static tree
6817 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6818 tree arg0, tree arg1)
6819 {
6820 tree arg0_inner;
6821 tree inner_type, outer_type;
6822
6823 if (!CONVERT_EXPR_P (arg0))
6824 return NULL_TREE;
6825
6826 outer_type = TREE_TYPE (arg0);
6827 arg0_inner = TREE_OPERAND (arg0, 0);
6828 inner_type = TREE_TYPE (arg0_inner);
6829
6830 #ifdef HAVE_canonicalize_funcptr_for_compare
6831 /* Disable this optimization if we're casting a function pointer
6832 type on targets that require function pointer canonicalization. */
6833 if (HAVE_canonicalize_funcptr_for_compare
6834 && TREE_CODE (inner_type) == POINTER_TYPE
6835 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6836 return NULL_TREE;
6837 #endif
6838
6839 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6840 return NULL_TREE;
6841
6842 if (TREE_CODE (arg1) != INTEGER_CST
6843 && !(CONVERT_EXPR_P (arg1)
6844 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6845 return NULL_TREE;
6846
6847 if ((TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6848 || POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6849 && code != NE_EXPR
6850 && code != EQ_EXPR)
6851 return NULL_TREE;
6852
6853 if (TREE_CODE (arg1) == INTEGER_CST)
6854 arg1 = force_fit_type_double (inner_type, TREE_INT_CST_LOW (arg1),
6855 TREE_INT_CST_HIGH (arg1), 0,
6856 TREE_OVERFLOW (arg1));
6857 else
6858 arg1 = fold_convert_loc (loc, inner_type, arg1);
6859
6860 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6861 }
6862
6863 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6864 step of the array. Reconstructs s and delta in the case of s *
6865 delta being an integer constant (and thus already folded). ADDR is
6866 the address. MULT is the multiplicative expression. If the
6867 function succeeds, the new address expression is returned.
6868 Otherwise NULL_TREE is returned. LOC is the location of the
6869 resulting expression. */
6870
6871 static tree
6872 try_move_mult_to_index (location_t loc, tree addr, tree op1)
6873 {
6874 tree s, delta, step;
6875 tree ref = TREE_OPERAND (addr, 0), pref;
6876 tree ret, pos;
6877 tree itype;
6878 bool mdim = false;
6879
6880 /* Strip the nops that might be added when converting op1 to sizetype. */
6881 STRIP_NOPS (op1);
6882
6883 /* Canonicalize op1 into a possibly non-constant delta
6884 and an INTEGER_CST s. */
6885 if (TREE_CODE (op1) == MULT_EXPR)
6886 {
6887 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6888
6889 STRIP_NOPS (arg0);
6890 STRIP_NOPS (arg1);
6891
6892 if (TREE_CODE (arg0) == INTEGER_CST)
6893 {
6894 s = arg0;
6895 delta = arg1;
6896 }
6897 else if (TREE_CODE (arg1) == INTEGER_CST)
6898 {
6899 s = arg1;
6900 delta = arg0;
6901 }
6902 else
6903 return NULL_TREE;
6904 }
6905 else if (TREE_CODE (op1) == INTEGER_CST)
6906 {
6907 delta = op1;
6908 s = NULL_TREE;
6909 }
6910 else
6911 {
6912 /* Simulate we are delta * 1. */
6913 delta = op1;
6914 s = integer_one_node;
6915 }
6916
6917 for (;; ref = TREE_OPERAND (ref, 0))
6918 {
6919 if (TREE_CODE (ref) == ARRAY_REF)
6920 {
6921 tree domain;
6922
6923 /* Remember if this was a multi-dimensional array. */
6924 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6925 mdim = true;
6926
6927 domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6928 if (! domain)
6929 continue;
6930 itype = TREE_TYPE (domain);
6931
6932 step = array_ref_element_size (ref);
6933 if (TREE_CODE (step) != INTEGER_CST)
6934 continue;
6935
6936 if (s)
6937 {
6938 if (! tree_int_cst_equal (step, s))
6939 continue;
6940 }
6941 else
6942 {
6943 /* Try if delta is a multiple of step. */
6944 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6945 if (! tmp)
6946 continue;
6947 delta = tmp;
6948 }
6949
6950 /* Only fold here if we can verify we do not overflow one
6951 dimension of a multi-dimensional array. */
6952 if (mdim)
6953 {
6954 tree tmp;
6955
6956 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
6957 || !TYPE_MAX_VALUE (domain)
6958 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
6959 continue;
6960
6961 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
6962 fold_convert_loc (loc, itype,
6963 TREE_OPERAND (ref, 1)),
6964 fold_convert_loc (loc, itype, delta));
6965 if (!tmp
6966 || TREE_CODE (tmp) != INTEGER_CST
6967 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
6968 continue;
6969 }
6970
6971 break;
6972 }
6973 else
6974 mdim = false;
6975
6976 if (!handled_component_p (ref))
6977 return NULL_TREE;
6978 }
6979
6980 /* We found the suitable array reference. So copy everything up to it,
6981 and replace the index. */
6982
6983 pref = TREE_OPERAND (addr, 0);
6984 ret = copy_node (pref);
6985 SET_EXPR_LOCATION (ret, loc);
6986 pos = ret;
6987
6988 while (pref != ref)
6989 {
6990 pref = TREE_OPERAND (pref, 0);
6991 TREE_OPERAND (pos, 0) = copy_node (pref);
6992 pos = TREE_OPERAND (pos, 0);
6993 }
6994
6995 TREE_OPERAND (pos, 1) = fold_build2_loc (loc, PLUS_EXPR, itype,
6996 fold_convert_loc (loc, itype,
6997 TREE_OPERAND (pos, 1)),
6998 fold_convert_loc (loc, itype, delta));
6999
7000 return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
7001 }
7002
7003
7004 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7005 means A >= Y && A != MAX, but in this case we know that
7006 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7007
7008 static tree
7009 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7010 {
7011 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7012
7013 if (TREE_CODE (bound) == LT_EXPR)
7014 a = TREE_OPERAND (bound, 0);
7015 else if (TREE_CODE (bound) == GT_EXPR)
7016 a = TREE_OPERAND (bound, 1);
7017 else
7018 return NULL_TREE;
7019
7020 typea = TREE_TYPE (a);
7021 if (!INTEGRAL_TYPE_P (typea)
7022 && !POINTER_TYPE_P (typea))
7023 return NULL_TREE;
7024
7025 if (TREE_CODE (ineq) == LT_EXPR)
7026 {
7027 a1 = TREE_OPERAND (ineq, 1);
7028 y = TREE_OPERAND (ineq, 0);
7029 }
7030 else if (TREE_CODE (ineq) == GT_EXPR)
7031 {
7032 a1 = TREE_OPERAND (ineq, 0);
7033 y = TREE_OPERAND (ineq, 1);
7034 }
7035 else
7036 return NULL_TREE;
7037
7038 if (TREE_TYPE (a1) != typea)
7039 return NULL_TREE;
7040
7041 if (POINTER_TYPE_P (typea))
7042 {
7043 /* Convert the pointer types into integer before taking the difference. */
7044 tree ta = fold_convert_loc (loc, ssizetype, a);
7045 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7046 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7047 }
7048 else
7049 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7050
7051 if (!diff || !integer_onep (diff))
7052 return NULL_TREE;
7053
7054 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7055 }
7056
7057 /* Fold a sum or difference of at least one multiplication.
7058 Returns the folded tree or NULL if no simplification could be made. */
7059
7060 static tree
7061 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7062 tree arg0, tree arg1)
7063 {
7064 tree arg00, arg01, arg10, arg11;
7065 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7066
7067 /* (A * C) +- (B * C) -> (A+-B) * C.
7068 (A * C) +- A -> A * (C+-1).
7069 We are most concerned about the case where C is a constant,
7070 but other combinations show up during loop reduction. Since
7071 it is not difficult, try all four possibilities. */
7072
7073 if (TREE_CODE (arg0) == MULT_EXPR)
7074 {
7075 arg00 = TREE_OPERAND (arg0, 0);
7076 arg01 = TREE_OPERAND (arg0, 1);
7077 }
7078 else if (TREE_CODE (arg0) == INTEGER_CST)
7079 {
7080 arg00 = build_one_cst (type);
7081 arg01 = arg0;
7082 }
7083 else
7084 {
7085 /* We cannot generate constant 1 for fract. */
7086 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7087 return NULL_TREE;
7088 arg00 = arg0;
7089 arg01 = build_one_cst (type);
7090 }
7091 if (TREE_CODE (arg1) == MULT_EXPR)
7092 {
7093 arg10 = TREE_OPERAND (arg1, 0);
7094 arg11 = TREE_OPERAND (arg1, 1);
7095 }
7096 else if (TREE_CODE (arg1) == INTEGER_CST)
7097 {
7098 arg10 = build_one_cst (type);
7099 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7100 the purpose of this canonicalization. */
7101 if (TREE_INT_CST_HIGH (arg1) == -1
7102 && negate_expr_p (arg1)
7103 && code == PLUS_EXPR)
7104 {
7105 arg11 = negate_expr (arg1);
7106 code = MINUS_EXPR;
7107 }
7108 else
7109 arg11 = arg1;
7110 }
7111 else
7112 {
7113 /* We cannot generate constant 1 for fract. */
7114 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7115 return NULL_TREE;
7116 arg10 = arg1;
7117 arg11 = build_one_cst (type);
7118 }
7119 same = NULL_TREE;
7120
7121 if (operand_equal_p (arg01, arg11, 0))
7122 same = arg01, alt0 = arg00, alt1 = arg10;
7123 else if (operand_equal_p (arg00, arg10, 0))
7124 same = arg00, alt0 = arg01, alt1 = arg11;
7125 else if (operand_equal_p (arg00, arg11, 0))
7126 same = arg00, alt0 = arg01, alt1 = arg10;
7127 else if (operand_equal_p (arg01, arg10, 0))
7128 same = arg01, alt0 = arg00, alt1 = arg11;
7129
7130 /* No identical multiplicands; see if we can find a common
7131 power-of-two factor in non-power-of-two multiplies. This
7132 can help in multi-dimensional array access. */
7133 else if (host_integerp (arg01, 0)
7134 && host_integerp (arg11, 0))
7135 {
7136 HOST_WIDE_INT int01, int11, tmp;
7137 bool swap = false;
7138 tree maybe_same;
7139 int01 = TREE_INT_CST_LOW (arg01);
7140 int11 = TREE_INT_CST_LOW (arg11);
7141
7142 /* Move min of absolute values to int11. */
7143 if ((int01 >= 0 ? int01 : -int01)
7144 < (int11 >= 0 ? int11 : -int11))
7145 {
7146 tmp = int01, int01 = int11, int11 = tmp;
7147 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7148 maybe_same = arg01;
7149 swap = true;
7150 }
7151 else
7152 maybe_same = arg11;
7153
7154 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0
7155 /* The remainder should not be a constant, otherwise we
7156 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7157 increased the number of multiplications necessary. */
7158 && TREE_CODE (arg10) != INTEGER_CST)
7159 {
7160 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7161 build_int_cst (TREE_TYPE (arg00),
7162 int01 / int11));
7163 alt1 = arg10;
7164 same = maybe_same;
7165 if (swap)
7166 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7167 }
7168 }
7169
7170 if (same)
7171 return fold_build2_loc (loc, MULT_EXPR, type,
7172 fold_build2_loc (loc, code, type,
7173 fold_convert_loc (loc, type, alt0),
7174 fold_convert_loc (loc, type, alt1)),
7175 fold_convert_loc (loc, type, same));
7176
7177 return NULL_TREE;
7178 }
7179
7180 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7181 specified by EXPR into the buffer PTR of length LEN bytes.
7182 Return the number of bytes placed in the buffer, or zero
7183 upon failure. */
7184
7185 static int
7186 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7187 {
7188 tree type = TREE_TYPE (expr);
7189 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7190 int byte, offset, word, words;
7191 unsigned char value;
7192
7193 if (total_bytes > len)
7194 return 0;
7195 words = total_bytes / UNITS_PER_WORD;
7196
7197 for (byte = 0; byte < total_bytes; byte++)
7198 {
7199 int bitpos = byte * BITS_PER_UNIT;
7200 if (bitpos < HOST_BITS_PER_WIDE_INT)
7201 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7202 else
7203 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7204 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7205
7206 if (total_bytes > UNITS_PER_WORD)
7207 {
7208 word = byte / UNITS_PER_WORD;
7209 if (WORDS_BIG_ENDIAN)
7210 word = (words - 1) - word;
7211 offset = word * UNITS_PER_WORD;
7212 if (BYTES_BIG_ENDIAN)
7213 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7214 else
7215 offset += byte % UNITS_PER_WORD;
7216 }
7217 else
7218 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7219 ptr[offset] = value;
7220 }
7221 return total_bytes;
7222 }
7223
7224
7225 /* Subroutine of native_encode_expr. Encode the REAL_CST
7226 specified by EXPR into the buffer PTR of length LEN bytes.
7227 Return the number of bytes placed in the buffer, or zero
7228 upon failure. */
7229
7230 static int
7231 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7232 {
7233 tree type = TREE_TYPE (expr);
7234 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7235 int byte, offset, word, words, bitpos;
7236 unsigned char value;
7237
7238 /* There are always 32 bits in each long, no matter the size of
7239 the hosts long. We handle floating point representations with
7240 up to 192 bits. */
7241 long tmp[6];
7242
7243 if (total_bytes > len)
7244 return 0;
7245 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7246
7247 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7248
7249 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7250 bitpos += BITS_PER_UNIT)
7251 {
7252 byte = (bitpos / BITS_PER_UNIT) & 3;
7253 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7254
7255 if (UNITS_PER_WORD < 4)
7256 {
7257 word = byte / UNITS_PER_WORD;
7258 if (WORDS_BIG_ENDIAN)
7259 word = (words - 1) - word;
7260 offset = word * UNITS_PER_WORD;
7261 if (BYTES_BIG_ENDIAN)
7262 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7263 else
7264 offset += byte % UNITS_PER_WORD;
7265 }
7266 else
7267 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7268 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7269 }
7270 return total_bytes;
7271 }
7272
7273 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7274 specified by EXPR into the buffer PTR of length LEN bytes.
7275 Return the number of bytes placed in the buffer, or zero
7276 upon failure. */
7277
7278 static int
7279 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7280 {
7281 int rsize, isize;
7282 tree part;
7283
7284 part = TREE_REALPART (expr);
7285 rsize = native_encode_expr (part, ptr, len);
7286 if (rsize == 0)
7287 return 0;
7288 part = TREE_IMAGPART (expr);
7289 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7290 if (isize != rsize)
7291 return 0;
7292 return rsize + isize;
7293 }
7294
7295
7296 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7297 specified by EXPR into the buffer PTR of length LEN bytes.
7298 Return the number of bytes placed in the buffer, or zero
7299 upon failure. */
7300
7301 static int
7302 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7303 {
7304 int i, size, offset, count;
7305 tree itype, elem, elements;
7306
7307 offset = 0;
7308 elements = TREE_VECTOR_CST_ELTS (expr);
7309 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7310 itype = TREE_TYPE (TREE_TYPE (expr));
7311 size = GET_MODE_SIZE (TYPE_MODE (itype));
7312 for (i = 0; i < count; i++)
7313 {
7314 if (elements)
7315 {
7316 elem = TREE_VALUE (elements);
7317 elements = TREE_CHAIN (elements);
7318 }
7319 else
7320 elem = NULL_TREE;
7321
7322 if (elem)
7323 {
7324 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7325 return 0;
7326 }
7327 else
7328 {
7329 if (offset + size > len)
7330 return 0;
7331 memset (ptr+offset, 0, size);
7332 }
7333 offset += size;
7334 }
7335 return offset;
7336 }
7337
7338
7339 /* Subroutine of native_encode_expr. Encode the STRING_CST
7340 specified by EXPR into the buffer PTR of length LEN bytes.
7341 Return the number of bytes placed in the buffer, or zero
7342 upon failure. */
7343
7344 static int
7345 native_encode_string (const_tree expr, unsigned char *ptr, int len)
7346 {
7347 tree type = TREE_TYPE (expr);
7348 HOST_WIDE_INT total_bytes;
7349
7350 if (TREE_CODE (type) != ARRAY_TYPE
7351 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7352 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7353 || !host_integerp (TYPE_SIZE_UNIT (type), 0))
7354 return 0;
7355 total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0);
7356 if (total_bytes > len)
7357 return 0;
7358 if (TREE_STRING_LENGTH (expr) < total_bytes)
7359 {
7360 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7361 memset (ptr + TREE_STRING_LENGTH (expr), 0,
7362 total_bytes - TREE_STRING_LENGTH (expr));
7363 }
7364 else
7365 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7366 return total_bytes;
7367 }
7368
7369
7370 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7371 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7372 buffer PTR of length LEN bytes. Return the number of bytes
7373 placed in the buffer, or zero upon failure. */
7374
7375 int
7376 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7377 {
7378 switch (TREE_CODE (expr))
7379 {
7380 case INTEGER_CST:
7381 return native_encode_int (expr, ptr, len);
7382
7383 case REAL_CST:
7384 return native_encode_real (expr, ptr, len);
7385
7386 case COMPLEX_CST:
7387 return native_encode_complex (expr, ptr, len);
7388
7389 case VECTOR_CST:
7390 return native_encode_vector (expr, ptr, len);
7391
7392 case STRING_CST:
7393 return native_encode_string (expr, ptr, len);
7394
7395 default:
7396 return 0;
7397 }
7398 }
7399
7400
7401 /* Subroutine of native_interpret_expr. Interpret the contents of
7402 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7403 If the buffer cannot be interpreted, return NULL_TREE. */
7404
7405 static tree
7406 native_interpret_int (tree type, const unsigned char *ptr, int len)
7407 {
7408 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7409 int byte, offset, word, words;
7410 unsigned char value;
7411 unsigned int HOST_WIDE_INT lo = 0;
7412 HOST_WIDE_INT hi = 0;
7413
7414 if (total_bytes > len)
7415 return NULL_TREE;
7416 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7417 return NULL_TREE;
7418 words = total_bytes / UNITS_PER_WORD;
7419
7420 for (byte = 0; byte < total_bytes; byte++)
7421 {
7422 int bitpos = byte * BITS_PER_UNIT;
7423 if (total_bytes > UNITS_PER_WORD)
7424 {
7425 word = byte / UNITS_PER_WORD;
7426 if (WORDS_BIG_ENDIAN)
7427 word = (words - 1) - word;
7428 offset = word * UNITS_PER_WORD;
7429 if (BYTES_BIG_ENDIAN)
7430 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7431 else
7432 offset += byte % UNITS_PER_WORD;
7433 }
7434 else
7435 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7436 value = ptr[offset];
7437
7438 if (bitpos < HOST_BITS_PER_WIDE_INT)
7439 lo |= (unsigned HOST_WIDE_INT) value << bitpos;
7440 else
7441 hi |= (unsigned HOST_WIDE_INT) value
7442 << (bitpos - HOST_BITS_PER_WIDE_INT);
7443 }
7444
7445 return build_int_cst_wide_type (type, lo, hi);
7446 }
7447
7448
7449 /* Subroutine of native_interpret_expr. Interpret the contents of
7450 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7451 If the buffer cannot be interpreted, return NULL_TREE. */
7452
7453 static tree
7454 native_interpret_real (tree type, const unsigned char *ptr, int len)
7455 {
7456 enum machine_mode mode = TYPE_MODE (type);
7457 int total_bytes = GET_MODE_SIZE (mode);
7458 int byte, offset, word, words, bitpos;
7459 unsigned char value;
7460 /* There are always 32 bits in each long, no matter the size of
7461 the hosts long. We handle floating point representations with
7462 up to 192 bits. */
7463 REAL_VALUE_TYPE r;
7464 long tmp[6];
7465
7466 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7467 if (total_bytes > len || total_bytes > 24)
7468 return NULL_TREE;
7469 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7470
7471 memset (tmp, 0, sizeof (tmp));
7472 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7473 bitpos += BITS_PER_UNIT)
7474 {
7475 byte = (bitpos / BITS_PER_UNIT) & 3;
7476 if (UNITS_PER_WORD < 4)
7477 {
7478 word = byte / UNITS_PER_WORD;
7479 if (WORDS_BIG_ENDIAN)
7480 word = (words - 1) - word;
7481 offset = word * UNITS_PER_WORD;
7482 if (BYTES_BIG_ENDIAN)
7483 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7484 else
7485 offset += byte % UNITS_PER_WORD;
7486 }
7487 else
7488 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7489 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7490
7491 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7492 }
7493
7494 real_from_target (&r, tmp, mode);
7495 return build_real (type, r);
7496 }
7497
7498
7499 /* Subroutine of native_interpret_expr. Interpret the contents of
7500 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7501 If the buffer cannot be interpreted, return NULL_TREE. */
7502
7503 static tree
7504 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7505 {
7506 tree etype, rpart, ipart;
7507 int size;
7508
7509 etype = TREE_TYPE (type);
7510 size = GET_MODE_SIZE (TYPE_MODE (etype));
7511 if (size * 2 > len)
7512 return NULL_TREE;
7513 rpart = native_interpret_expr (etype, ptr, size);
7514 if (!rpart)
7515 return NULL_TREE;
7516 ipart = native_interpret_expr (etype, ptr+size, size);
7517 if (!ipart)
7518 return NULL_TREE;
7519 return build_complex (type, rpart, ipart);
7520 }
7521
7522
7523 /* Subroutine of native_interpret_expr. Interpret the contents of
7524 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7525 If the buffer cannot be interpreted, return NULL_TREE. */
7526
7527 static tree
7528 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7529 {
7530 tree etype, elem, elements;
7531 int i, size, count;
7532
7533 etype = TREE_TYPE (type);
7534 size = GET_MODE_SIZE (TYPE_MODE (etype));
7535 count = TYPE_VECTOR_SUBPARTS (type);
7536 if (size * count > len)
7537 return NULL_TREE;
7538
7539 elements = NULL_TREE;
7540 for (i = count - 1; i >= 0; i--)
7541 {
7542 elem = native_interpret_expr (etype, ptr+(i*size), size);
7543 if (!elem)
7544 return NULL_TREE;
7545 elements = tree_cons (NULL_TREE, elem, elements);
7546 }
7547 return build_vector (type, elements);
7548 }
7549
7550
7551 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7552 the buffer PTR of length LEN as a constant of type TYPE. For
7553 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7554 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7555 return NULL_TREE. */
7556
7557 tree
7558 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7559 {
7560 switch (TREE_CODE (type))
7561 {
7562 case INTEGER_TYPE:
7563 case ENUMERAL_TYPE:
7564 case BOOLEAN_TYPE:
7565 return native_interpret_int (type, ptr, len);
7566
7567 case REAL_TYPE:
7568 return native_interpret_real (type, ptr, len);
7569
7570 case COMPLEX_TYPE:
7571 return native_interpret_complex (type, ptr, len);
7572
7573 case VECTOR_TYPE:
7574 return native_interpret_vector (type, ptr, len);
7575
7576 default:
7577 return NULL_TREE;
7578 }
7579 }
7580
7581
7582 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7583 TYPE at compile-time. If we're unable to perform the conversion
7584 return NULL_TREE. */
7585
7586 static tree
7587 fold_view_convert_expr (tree type, tree expr)
7588 {
7589 /* We support up to 512-bit values (for V8DFmode). */
7590 unsigned char buffer[64];
7591 int len;
7592
7593 /* Check that the host and target are sane. */
7594 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7595 return NULL_TREE;
7596
7597 len = native_encode_expr (expr, buffer, sizeof (buffer));
7598 if (len == 0)
7599 return NULL_TREE;
7600
7601 return native_interpret_expr (type, buffer, len);
7602 }
7603
7604 /* Build an expression for the address of T. Folds away INDIRECT_REF
7605 to avoid confusing the gimplify process. */
7606
7607 tree
7608 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7609 {
7610 /* The size of the object is not relevant when talking about its address. */
7611 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7612 t = TREE_OPERAND (t, 0);
7613
7614 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
7615 if (TREE_CODE (t) == INDIRECT_REF
7616 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
7617 {
7618 t = TREE_OPERAND (t, 0);
7619
7620 if (TREE_TYPE (t) != ptrtype)
7621 {
7622 t = build1 (NOP_EXPR, ptrtype, t);
7623 SET_EXPR_LOCATION (t, loc);
7624 }
7625 }
7626 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7627 {
7628 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7629
7630 if (TREE_TYPE (t) != ptrtype)
7631 t = fold_convert_loc (loc, ptrtype, t);
7632 }
7633 else
7634 {
7635 t = build1 (ADDR_EXPR, ptrtype, t);
7636 SET_EXPR_LOCATION (t, loc);
7637 }
7638
7639 return t;
7640 }
7641
7642 /* Build an expression for the address of T. */
7643
7644 tree
7645 build_fold_addr_expr_loc (location_t loc, tree t)
7646 {
7647 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7648
7649 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7650 }
7651
7652 /* Fold a unary expression of code CODE and type TYPE with operand
7653 OP0. Return the folded expression if folding is successful.
7654 Otherwise, return NULL_TREE. */
7655
7656 tree
7657 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7658 {
7659 tree tem;
7660 tree arg0;
7661 enum tree_code_class kind = TREE_CODE_CLASS (code);
7662
7663 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7664 && TREE_CODE_LENGTH (code) == 1);
7665
7666 arg0 = op0;
7667 if (arg0)
7668 {
7669 if (CONVERT_EXPR_CODE_P (code)
7670 || code == FLOAT_EXPR || code == ABS_EXPR)
7671 {
7672 /* Don't use STRIP_NOPS, because signedness of argument type
7673 matters. */
7674 STRIP_SIGN_NOPS (arg0);
7675 }
7676 else
7677 {
7678 /* Strip any conversions that don't change the mode. This
7679 is safe for every expression, except for a comparison
7680 expression because its signedness is derived from its
7681 operands.
7682
7683 Note that this is done as an internal manipulation within
7684 the constant folder, in order to find the simplest
7685 representation of the arguments so that their form can be
7686 studied. In any cases, the appropriate type conversions
7687 should be put back in the tree that will get out of the
7688 constant folder. */
7689 STRIP_NOPS (arg0);
7690 }
7691 }
7692
7693 if (TREE_CODE_CLASS (code) == tcc_unary)
7694 {
7695 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7696 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7697 fold_build1_loc (loc, code, type,
7698 fold_convert_loc (loc, TREE_TYPE (op0),
7699 TREE_OPERAND (arg0, 1))));
7700 else if (TREE_CODE (arg0) == COND_EXPR)
7701 {
7702 tree arg01 = TREE_OPERAND (arg0, 1);
7703 tree arg02 = TREE_OPERAND (arg0, 2);
7704 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7705 arg01 = fold_build1_loc (loc, code, type,
7706 fold_convert_loc (loc,
7707 TREE_TYPE (op0), arg01));
7708 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7709 arg02 = fold_build1_loc (loc, code, type,
7710 fold_convert_loc (loc,
7711 TREE_TYPE (op0), arg02));
7712 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7713 arg01, arg02);
7714
7715 /* If this was a conversion, and all we did was to move into
7716 inside the COND_EXPR, bring it back out. But leave it if
7717 it is a conversion from integer to integer and the
7718 result precision is no wider than a word since such a
7719 conversion is cheap and may be optimized away by combine,
7720 while it couldn't if it were outside the COND_EXPR. Then return
7721 so we don't get into an infinite recursion loop taking the
7722 conversion out and then back in. */
7723
7724 if ((CONVERT_EXPR_CODE_P (code)
7725 || code == NON_LVALUE_EXPR)
7726 && TREE_CODE (tem) == COND_EXPR
7727 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7728 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7729 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7730 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7731 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7732 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7733 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7734 && (INTEGRAL_TYPE_P
7735 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7736 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7737 || flag_syntax_only))
7738 {
7739 tem = build1 (code, type,
7740 build3 (COND_EXPR,
7741 TREE_TYPE (TREE_OPERAND
7742 (TREE_OPERAND (tem, 1), 0)),
7743 TREE_OPERAND (tem, 0),
7744 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7745 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
7746 SET_EXPR_LOCATION (tem, loc);
7747 }
7748 return tem;
7749 }
7750 else if (COMPARISON_CLASS_P (arg0))
7751 {
7752 if (TREE_CODE (type) == BOOLEAN_TYPE)
7753 {
7754 arg0 = copy_node (arg0);
7755 TREE_TYPE (arg0) = type;
7756 return arg0;
7757 }
7758 else if (TREE_CODE (type) != INTEGER_TYPE)
7759 return fold_build3_loc (loc, COND_EXPR, type, arg0,
7760 fold_build1_loc (loc, code, type,
7761 integer_one_node),
7762 fold_build1_loc (loc, code, type,
7763 integer_zero_node));
7764 }
7765 }
7766
7767 switch (code)
7768 {
7769 case PAREN_EXPR:
7770 /* Re-association barriers around constants and other re-association
7771 barriers can be removed. */
7772 if (CONSTANT_CLASS_P (op0)
7773 || TREE_CODE (op0) == PAREN_EXPR)
7774 return fold_convert_loc (loc, type, op0);
7775 return NULL_TREE;
7776
7777 CASE_CONVERT:
7778 case FLOAT_EXPR:
7779 case FIX_TRUNC_EXPR:
7780 if (TREE_TYPE (op0) == type)
7781 return op0;
7782
7783 /* If we have (type) (a CMP b) and type is an integral type, return
7784 new expression involving the new type. */
7785 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
7786 return fold_build2_loc (loc, TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
7787 TREE_OPERAND (op0, 1));
7788
7789 /* Handle cases of two conversions in a row. */
7790 if (CONVERT_EXPR_P (op0))
7791 {
7792 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7793 tree inter_type = TREE_TYPE (op0);
7794 int inside_int = INTEGRAL_TYPE_P (inside_type);
7795 int inside_ptr = POINTER_TYPE_P (inside_type);
7796 int inside_float = FLOAT_TYPE_P (inside_type);
7797 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7798 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7799 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7800 int inter_int = INTEGRAL_TYPE_P (inter_type);
7801 int inter_ptr = POINTER_TYPE_P (inter_type);
7802 int inter_float = FLOAT_TYPE_P (inter_type);
7803 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7804 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7805 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7806 int final_int = INTEGRAL_TYPE_P (type);
7807 int final_ptr = POINTER_TYPE_P (type);
7808 int final_float = FLOAT_TYPE_P (type);
7809 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7810 unsigned int final_prec = TYPE_PRECISION (type);
7811 int final_unsignedp = TYPE_UNSIGNED (type);
7812
7813 /* In addition to the cases of two conversions in a row
7814 handled below, if we are converting something to its own
7815 type via an object of identical or wider precision, neither
7816 conversion is needed. */
7817 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7818 && (((inter_int || inter_ptr) && final_int)
7819 || (inter_float && final_float))
7820 && inter_prec >= final_prec)
7821 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7822
7823 /* Likewise, if the intermediate and initial types are either both
7824 float or both integer, we don't need the middle conversion if the
7825 former is wider than the latter and doesn't change the signedness
7826 (for integers). Avoid this if the final type is a pointer since
7827 then we sometimes need the middle conversion. Likewise if the
7828 final type has a precision not equal to the size of its mode. */
7829 if (((inter_int && inside_int)
7830 || (inter_float && inside_float)
7831 || (inter_vec && inside_vec))
7832 && inter_prec >= inside_prec
7833 && (inter_float || inter_vec
7834 || inter_unsignedp == inside_unsignedp)
7835 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7836 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7837 && ! final_ptr
7838 && (! final_vec || inter_prec == inside_prec))
7839 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7840
7841 /* If we have a sign-extension of a zero-extended value, we can
7842 replace that by a single zero-extension. */
7843 if (inside_int && inter_int && final_int
7844 && inside_prec < inter_prec && inter_prec < final_prec
7845 && inside_unsignedp && !inter_unsignedp)
7846 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7847
7848 /* Two conversions in a row are not needed unless:
7849 - some conversion is floating-point (overstrict for now), or
7850 - some conversion is a vector (overstrict for now), or
7851 - the intermediate type is narrower than both initial and
7852 final, or
7853 - the intermediate type and innermost type differ in signedness,
7854 and the outermost type is wider than the intermediate, or
7855 - the initial type is a pointer type and the precisions of the
7856 intermediate and final types differ, or
7857 - the final type is a pointer type and the precisions of the
7858 initial and intermediate types differ. */
7859 if (! inside_float && ! inter_float && ! final_float
7860 && ! inside_vec && ! inter_vec && ! final_vec
7861 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7862 && ! (inside_int && inter_int
7863 && inter_unsignedp != inside_unsignedp
7864 && inter_prec < final_prec)
7865 && ((inter_unsignedp && inter_prec > inside_prec)
7866 == (final_unsignedp && final_prec > inter_prec))
7867 && ! (inside_ptr && inter_prec != final_prec)
7868 && ! (final_ptr && inside_prec != inter_prec)
7869 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7870 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
7871 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7872 }
7873
7874 /* Handle (T *)&A.B.C for A being of type T and B and C
7875 living at offset zero. This occurs frequently in
7876 C++ upcasting and then accessing the base. */
7877 if (TREE_CODE (op0) == ADDR_EXPR
7878 && POINTER_TYPE_P (type)
7879 && handled_component_p (TREE_OPERAND (op0, 0)))
7880 {
7881 HOST_WIDE_INT bitsize, bitpos;
7882 tree offset;
7883 enum machine_mode mode;
7884 int unsignedp, volatilep;
7885 tree base = TREE_OPERAND (op0, 0);
7886 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7887 &mode, &unsignedp, &volatilep, false);
7888 /* If the reference was to a (constant) zero offset, we can use
7889 the address of the base if it has the same base type
7890 as the result type and the pointer type is unqualified. */
7891 if (! offset && bitpos == 0
7892 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7893 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7894 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7895 return fold_convert_loc (loc, type,
7896 build_fold_addr_expr_loc (loc, base));
7897 }
7898
7899 if (TREE_CODE (op0) == MODIFY_EXPR
7900 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7901 /* Detect assigning a bitfield. */
7902 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7903 && DECL_BIT_FIELD
7904 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7905 {
7906 /* Don't leave an assignment inside a conversion
7907 unless assigning a bitfield. */
7908 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7909 /* First do the assignment, then return converted constant. */
7910 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7911 TREE_NO_WARNING (tem) = 1;
7912 TREE_USED (tem) = 1;
7913 SET_EXPR_LOCATION (tem, loc);
7914 return tem;
7915 }
7916
7917 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7918 constants (if x has signed type, the sign bit cannot be set
7919 in c). This folds extension into the BIT_AND_EXPR.
7920 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7921 very likely don't have maximal range for their precision and this
7922 transformation effectively doesn't preserve non-maximal ranges. */
7923 if (TREE_CODE (type) == INTEGER_TYPE
7924 && TREE_CODE (op0) == BIT_AND_EXPR
7925 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7926 {
7927 tree and_expr = op0;
7928 tree and0 = TREE_OPERAND (and_expr, 0);
7929 tree and1 = TREE_OPERAND (and_expr, 1);
7930 int change = 0;
7931
7932 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7933 || (TYPE_PRECISION (type)
7934 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7935 change = 1;
7936 else if (TYPE_PRECISION (TREE_TYPE (and1))
7937 <= HOST_BITS_PER_WIDE_INT
7938 && host_integerp (and1, 1))
7939 {
7940 unsigned HOST_WIDE_INT cst;
7941
7942 cst = tree_low_cst (and1, 1);
7943 cst &= (HOST_WIDE_INT) -1
7944 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7945 change = (cst == 0);
7946 #ifdef LOAD_EXTEND_OP
7947 if (change
7948 && !flag_syntax_only
7949 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7950 == ZERO_EXTEND))
7951 {
7952 tree uns = unsigned_type_for (TREE_TYPE (and0));
7953 and0 = fold_convert_loc (loc, uns, and0);
7954 and1 = fold_convert_loc (loc, uns, and1);
7955 }
7956 #endif
7957 }
7958 if (change)
7959 {
7960 tem = force_fit_type_double (type, TREE_INT_CST_LOW (and1),
7961 TREE_INT_CST_HIGH (and1), 0,
7962 TREE_OVERFLOW (and1));
7963 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7964 fold_convert_loc (loc, type, and0), tem);
7965 }
7966 }
7967
7968 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7969 when one of the new casts will fold away. Conservatively we assume
7970 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7971 if (POINTER_TYPE_P (type)
7972 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7973 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7974 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7975 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
7976 {
7977 tree arg00 = TREE_OPERAND (arg0, 0);
7978 tree arg01 = TREE_OPERAND (arg0, 1);
7979
7980 return fold_build2_loc (loc,
7981 TREE_CODE (arg0), type,
7982 fold_convert_loc (loc, type, arg00),
7983 fold_convert_loc (loc, sizetype, arg01));
7984 }
7985
7986 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7987 of the same precision, and X is an integer type not narrower than
7988 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7989 if (INTEGRAL_TYPE_P (type)
7990 && TREE_CODE (op0) == BIT_NOT_EXPR
7991 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7992 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7993 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7994 {
7995 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7996 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7997 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7998 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7999 fold_convert_loc (loc, type, tem));
8000 }
8001
8002 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8003 type of X and Y (integer types only). */
8004 if (INTEGRAL_TYPE_P (type)
8005 && TREE_CODE (op0) == MULT_EXPR
8006 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8007 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8008 {
8009 /* Be careful not to introduce new overflows. */
8010 tree mult_type;
8011 if (TYPE_OVERFLOW_WRAPS (type))
8012 mult_type = type;
8013 else
8014 mult_type = unsigned_type_for (type);
8015
8016 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8017 {
8018 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8019 fold_convert_loc (loc, mult_type,
8020 TREE_OPERAND (op0, 0)),
8021 fold_convert_loc (loc, mult_type,
8022 TREE_OPERAND (op0, 1)));
8023 return fold_convert_loc (loc, type, tem);
8024 }
8025 }
8026
8027 tem = fold_convert_const (code, type, op0);
8028 return tem ? tem : NULL_TREE;
8029
8030 case ADDR_SPACE_CONVERT_EXPR:
8031 if (integer_zerop (arg0))
8032 return fold_convert_const (code, type, arg0);
8033 return NULL_TREE;
8034
8035 case FIXED_CONVERT_EXPR:
8036 tem = fold_convert_const (code, type, arg0);
8037 return tem ? tem : NULL_TREE;
8038
8039 case VIEW_CONVERT_EXPR:
8040 if (TREE_TYPE (op0) == type)
8041 return op0;
8042 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8043 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8044 type, TREE_OPERAND (op0, 0));
8045
8046 /* For integral conversions with the same precision or pointer
8047 conversions use a NOP_EXPR instead. */
8048 if ((INTEGRAL_TYPE_P (type)
8049 || POINTER_TYPE_P (type))
8050 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8051 || POINTER_TYPE_P (TREE_TYPE (op0)))
8052 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8053 return fold_convert_loc (loc, type, op0);
8054
8055 /* Strip inner integral conversions that do not change the precision. */
8056 if (CONVERT_EXPR_P (op0)
8057 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8058 || POINTER_TYPE_P (TREE_TYPE (op0)))
8059 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8060 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
8061 && (TYPE_PRECISION (TREE_TYPE (op0))
8062 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8063 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8064 type, TREE_OPERAND (op0, 0));
8065
8066 return fold_view_convert_expr (type, op0);
8067
8068 case NEGATE_EXPR:
8069 tem = fold_negate_expr (loc, arg0);
8070 if (tem)
8071 return fold_convert_loc (loc, type, tem);
8072 return NULL_TREE;
8073
8074 case ABS_EXPR:
8075 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8076 return fold_abs_const (arg0, type);
8077 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8078 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8079 /* Convert fabs((double)float) into (double)fabsf(float). */
8080 else if (TREE_CODE (arg0) == NOP_EXPR
8081 && TREE_CODE (type) == REAL_TYPE)
8082 {
8083 tree targ0 = strip_float_extensions (arg0);
8084 if (targ0 != arg0)
8085 return fold_convert_loc (loc, type,
8086 fold_build1_loc (loc, ABS_EXPR,
8087 TREE_TYPE (targ0),
8088 targ0));
8089 }
8090 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8091 else if (TREE_CODE (arg0) == ABS_EXPR)
8092 return arg0;
8093 else if (tree_expr_nonnegative_p (arg0))
8094 return arg0;
8095
8096 /* Strip sign ops from argument. */
8097 if (TREE_CODE (type) == REAL_TYPE)
8098 {
8099 tem = fold_strip_sign_ops (arg0);
8100 if (tem)
8101 return fold_build1_loc (loc, ABS_EXPR, type,
8102 fold_convert_loc (loc, type, tem));
8103 }
8104 return NULL_TREE;
8105
8106 case CONJ_EXPR:
8107 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8108 return fold_convert_loc (loc, type, arg0);
8109 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8110 {
8111 tree itype = TREE_TYPE (type);
8112 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8113 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8114 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8115 negate_expr (ipart));
8116 }
8117 if (TREE_CODE (arg0) == COMPLEX_CST)
8118 {
8119 tree itype = TREE_TYPE (type);
8120 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8121 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8122 return build_complex (type, rpart, negate_expr (ipart));
8123 }
8124 if (TREE_CODE (arg0) == CONJ_EXPR)
8125 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8126 return NULL_TREE;
8127
8128 case BIT_NOT_EXPR:
8129 if (TREE_CODE (arg0) == INTEGER_CST)
8130 return fold_not_const (arg0, type);
8131 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8132 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8133 /* Convert ~ (-A) to A - 1. */
8134 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8135 return fold_build2_loc (loc, MINUS_EXPR, type,
8136 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8137 build_int_cst (type, 1));
8138 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8139 else if (INTEGRAL_TYPE_P (type)
8140 && ((TREE_CODE (arg0) == MINUS_EXPR
8141 && integer_onep (TREE_OPERAND (arg0, 1)))
8142 || (TREE_CODE (arg0) == PLUS_EXPR
8143 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8144 return fold_build1_loc (loc, NEGATE_EXPR, type,
8145 fold_convert_loc (loc, type,
8146 TREE_OPERAND (arg0, 0)));
8147 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8148 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8149 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8150 fold_convert_loc (loc, type,
8151 TREE_OPERAND (arg0, 0)))))
8152 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8153 fold_convert_loc (loc, type,
8154 TREE_OPERAND (arg0, 1)));
8155 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8156 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8157 fold_convert_loc (loc, type,
8158 TREE_OPERAND (arg0, 1)))))
8159 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8160 fold_convert_loc (loc, type,
8161 TREE_OPERAND (arg0, 0)), tem);
8162 /* Perform BIT_NOT_EXPR on each element individually. */
8163 else if (TREE_CODE (arg0) == VECTOR_CST)
8164 {
8165 tree elements = TREE_VECTOR_CST_ELTS (arg0), elem, list = NULL_TREE;
8166 int count = TYPE_VECTOR_SUBPARTS (type), i;
8167
8168 for (i = 0; i < count; i++)
8169 {
8170 if (elements)
8171 {
8172 elem = TREE_VALUE (elements);
8173 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8174 if (elem == NULL_TREE)
8175 break;
8176 elements = TREE_CHAIN (elements);
8177 }
8178 else
8179 elem = build_int_cst (TREE_TYPE (type), -1);
8180 list = tree_cons (NULL_TREE, elem, list);
8181 }
8182 if (i == count)
8183 return build_vector (type, nreverse (list));
8184 }
8185
8186 return NULL_TREE;
8187
8188 case TRUTH_NOT_EXPR:
8189 /* The argument to invert_truthvalue must have Boolean type. */
8190 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8191 arg0 = fold_convert_loc (loc, boolean_type_node, arg0);
8192
8193 /* Note that the operand of this must be an int
8194 and its values must be 0 or 1.
8195 ("true" is a fixed value perhaps depending on the language,
8196 but we don't handle values other than 1 correctly yet.) */
8197 tem = fold_truth_not_expr (loc, arg0);
8198 if (!tem)
8199 return NULL_TREE;
8200 return fold_convert_loc (loc, type, tem);
8201
8202 case REALPART_EXPR:
8203 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8204 return fold_convert_loc (loc, type, arg0);
8205 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8206 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8207 TREE_OPERAND (arg0, 1));
8208 if (TREE_CODE (arg0) == COMPLEX_CST)
8209 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8210 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8211 {
8212 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8213 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8214 fold_build1_loc (loc, REALPART_EXPR, itype,
8215 TREE_OPERAND (arg0, 0)),
8216 fold_build1_loc (loc, REALPART_EXPR, itype,
8217 TREE_OPERAND (arg0, 1)));
8218 return fold_convert_loc (loc, type, tem);
8219 }
8220 if (TREE_CODE (arg0) == CONJ_EXPR)
8221 {
8222 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8223 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8224 TREE_OPERAND (arg0, 0));
8225 return fold_convert_loc (loc, type, tem);
8226 }
8227 if (TREE_CODE (arg0) == CALL_EXPR)
8228 {
8229 tree fn = get_callee_fndecl (arg0);
8230 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8231 switch (DECL_FUNCTION_CODE (fn))
8232 {
8233 CASE_FLT_FN (BUILT_IN_CEXPI):
8234 fn = mathfn_built_in (type, BUILT_IN_COS);
8235 if (fn)
8236 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8237 break;
8238
8239 default:
8240 break;
8241 }
8242 }
8243 return NULL_TREE;
8244
8245 case IMAGPART_EXPR:
8246 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8247 return fold_convert_loc (loc, type, integer_zero_node);
8248 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8249 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8250 TREE_OPERAND (arg0, 0));
8251 if (TREE_CODE (arg0) == COMPLEX_CST)
8252 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8253 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8254 {
8255 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8256 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8257 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8258 TREE_OPERAND (arg0, 0)),
8259 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8260 TREE_OPERAND (arg0, 1)));
8261 return fold_convert_loc (loc, type, tem);
8262 }
8263 if (TREE_CODE (arg0) == CONJ_EXPR)
8264 {
8265 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8266 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8267 return fold_convert_loc (loc, type, negate_expr (tem));
8268 }
8269 if (TREE_CODE (arg0) == CALL_EXPR)
8270 {
8271 tree fn = get_callee_fndecl (arg0);
8272 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8273 switch (DECL_FUNCTION_CODE (fn))
8274 {
8275 CASE_FLT_FN (BUILT_IN_CEXPI):
8276 fn = mathfn_built_in (type, BUILT_IN_SIN);
8277 if (fn)
8278 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8279 break;
8280
8281 default:
8282 break;
8283 }
8284 }
8285 return NULL_TREE;
8286
8287 case INDIRECT_REF:
8288 /* Fold *&X to X if X is an lvalue. */
8289 if (TREE_CODE (op0) == ADDR_EXPR)
8290 {
8291 tree op00 = TREE_OPERAND (op0, 0);
8292 if ((TREE_CODE (op00) == VAR_DECL
8293 || TREE_CODE (op00) == PARM_DECL
8294 || TREE_CODE (op00) == RESULT_DECL)
8295 && !TREE_READONLY (op00))
8296 return op00;
8297 }
8298 return NULL_TREE;
8299
8300 default:
8301 return NULL_TREE;
8302 } /* switch (code) */
8303 }
8304
8305
8306 /* If the operation was a conversion do _not_ mark a resulting constant
8307 with TREE_OVERFLOW if the original constant was not. These conversions
8308 have implementation defined behavior and retaining the TREE_OVERFLOW
8309 flag here would confuse later passes such as VRP. */
8310 tree
8311 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8312 tree type, tree op0)
8313 {
8314 tree res = fold_unary_loc (loc, code, type, op0);
8315 if (res
8316 && TREE_CODE (res) == INTEGER_CST
8317 && TREE_CODE (op0) == INTEGER_CST
8318 && CONVERT_EXPR_CODE_P (code))
8319 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8320
8321 return res;
8322 }
8323
8324 /* Fold a binary expression of code CODE and type TYPE with operands
8325 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8326 Return the folded expression if folding is successful. Otherwise,
8327 return NULL_TREE. */
8328
8329 static tree
8330 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8331 {
8332 enum tree_code compl_code;
8333
8334 if (code == MIN_EXPR)
8335 compl_code = MAX_EXPR;
8336 else if (code == MAX_EXPR)
8337 compl_code = MIN_EXPR;
8338 else
8339 gcc_unreachable ();
8340
8341 /* MIN (MAX (a, b), b) == b. */
8342 if (TREE_CODE (op0) == compl_code
8343 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8344 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8345
8346 /* MIN (MAX (b, a), b) == b. */
8347 if (TREE_CODE (op0) == compl_code
8348 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8349 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8350 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8351
8352 /* MIN (a, MAX (a, b)) == a. */
8353 if (TREE_CODE (op1) == compl_code
8354 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8355 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8356 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8357
8358 /* MIN (a, MAX (b, a)) == a. */
8359 if (TREE_CODE (op1) == compl_code
8360 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8361 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8362 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8363
8364 return NULL_TREE;
8365 }
8366
8367 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8368 by changing CODE to reduce the magnitude of constants involved in
8369 ARG0 of the comparison.
8370 Returns a canonicalized comparison tree if a simplification was
8371 possible, otherwise returns NULL_TREE.
8372 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8373 valid if signed overflow is undefined. */
8374
8375 static tree
8376 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8377 tree arg0, tree arg1,
8378 bool *strict_overflow_p)
8379 {
8380 enum tree_code code0 = TREE_CODE (arg0);
8381 tree t, cst0 = NULL_TREE;
8382 int sgn0;
8383 bool swap = false;
8384
8385 /* Match A +- CST code arg1 and CST code arg1. We can change the
8386 first form only if overflow is undefined. */
8387 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8388 /* In principle pointers also have undefined overflow behavior,
8389 but that causes problems elsewhere. */
8390 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8391 && (code0 == MINUS_EXPR
8392 || code0 == PLUS_EXPR)
8393 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8394 || code0 == INTEGER_CST))
8395 return NULL_TREE;
8396
8397 /* Identify the constant in arg0 and its sign. */
8398 if (code0 == INTEGER_CST)
8399 cst0 = arg0;
8400 else
8401 cst0 = TREE_OPERAND (arg0, 1);
8402 sgn0 = tree_int_cst_sgn (cst0);
8403
8404 /* Overflowed constants and zero will cause problems. */
8405 if (integer_zerop (cst0)
8406 || TREE_OVERFLOW (cst0))
8407 return NULL_TREE;
8408
8409 /* See if we can reduce the magnitude of the constant in
8410 arg0 by changing the comparison code. */
8411 if (code0 == INTEGER_CST)
8412 {
8413 /* CST <= arg1 -> CST-1 < arg1. */
8414 if (code == LE_EXPR && sgn0 == 1)
8415 code = LT_EXPR;
8416 /* -CST < arg1 -> -CST-1 <= arg1. */
8417 else if (code == LT_EXPR && sgn0 == -1)
8418 code = LE_EXPR;
8419 /* CST > arg1 -> CST-1 >= arg1. */
8420 else if (code == GT_EXPR && sgn0 == 1)
8421 code = GE_EXPR;
8422 /* -CST >= arg1 -> -CST-1 > arg1. */
8423 else if (code == GE_EXPR && sgn0 == -1)
8424 code = GT_EXPR;
8425 else
8426 return NULL_TREE;
8427 /* arg1 code' CST' might be more canonical. */
8428 swap = true;
8429 }
8430 else
8431 {
8432 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8433 if (code == LT_EXPR
8434 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8435 code = LE_EXPR;
8436 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8437 else if (code == GT_EXPR
8438 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8439 code = GE_EXPR;
8440 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8441 else if (code == LE_EXPR
8442 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8443 code = LT_EXPR;
8444 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8445 else if (code == GE_EXPR
8446 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8447 code = GT_EXPR;
8448 else
8449 return NULL_TREE;
8450 *strict_overflow_p = true;
8451 }
8452
8453 /* Now build the constant reduced in magnitude. But not if that
8454 would produce one outside of its types range. */
8455 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8456 && ((sgn0 == 1
8457 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8458 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8459 || (sgn0 == -1
8460 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8461 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8462 /* We cannot swap the comparison here as that would cause us to
8463 endlessly recurse. */
8464 return NULL_TREE;
8465
8466 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8467 cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
8468 if (code0 != INTEGER_CST)
8469 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8470
8471 /* If swapping might yield to a more canonical form, do so. */
8472 if (swap)
8473 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8474 else
8475 return fold_build2_loc (loc, code, type, t, arg1);
8476 }
8477
8478 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8479 overflow further. Try to decrease the magnitude of constants involved
8480 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8481 and put sole constants at the second argument position.
8482 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8483
8484 static tree
8485 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8486 tree arg0, tree arg1)
8487 {
8488 tree t;
8489 bool strict_overflow_p;
8490 const char * const warnmsg = G_("assuming signed overflow does not occur "
8491 "when reducing constant in comparison");
8492
8493 /* Try canonicalization by simplifying arg0. */
8494 strict_overflow_p = false;
8495 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8496 &strict_overflow_p);
8497 if (t)
8498 {
8499 if (strict_overflow_p)
8500 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8501 return t;
8502 }
8503
8504 /* Try canonicalization by simplifying arg1 using the swapped
8505 comparison. */
8506 code = swap_tree_comparison (code);
8507 strict_overflow_p = false;
8508 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8509 &strict_overflow_p);
8510 if (t && strict_overflow_p)
8511 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8512 return t;
8513 }
8514
8515 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8516 space. This is used to avoid issuing overflow warnings for
8517 expressions like &p->x which can not wrap. */
8518
8519 static bool
8520 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8521 {
8522 unsigned HOST_WIDE_INT offset_low, total_low;
8523 HOST_WIDE_INT size, offset_high, total_high;
8524
8525 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8526 return true;
8527
8528 if (bitpos < 0)
8529 return true;
8530
8531 if (offset == NULL_TREE)
8532 {
8533 offset_low = 0;
8534 offset_high = 0;
8535 }
8536 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8537 return true;
8538 else
8539 {
8540 offset_low = TREE_INT_CST_LOW (offset);
8541 offset_high = TREE_INT_CST_HIGH (offset);
8542 }
8543
8544 if (add_double_with_sign (offset_low, offset_high,
8545 bitpos / BITS_PER_UNIT, 0,
8546 &total_low, &total_high,
8547 true))
8548 return true;
8549
8550 if (total_high != 0)
8551 return true;
8552
8553 size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8554 if (size <= 0)
8555 return true;
8556
8557 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8558 array. */
8559 if (TREE_CODE (base) == ADDR_EXPR)
8560 {
8561 HOST_WIDE_INT base_size;
8562
8563 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8564 if (base_size > 0 && size < base_size)
8565 size = base_size;
8566 }
8567
8568 return total_low > (unsigned HOST_WIDE_INT) size;
8569 }
8570
8571 /* Subroutine of fold_binary. This routine performs all of the
8572 transformations that are common to the equality/inequality
8573 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8574 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8575 fold_binary should call fold_binary. Fold a comparison with
8576 tree code CODE and type TYPE with operands OP0 and OP1. Return
8577 the folded comparison or NULL_TREE. */
8578
8579 static tree
8580 fold_comparison (location_t loc, enum tree_code code, tree type,
8581 tree op0, tree op1)
8582 {
8583 tree arg0, arg1, tem;
8584
8585 arg0 = op0;
8586 arg1 = op1;
8587
8588 STRIP_SIGN_NOPS (arg0);
8589 STRIP_SIGN_NOPS (arg1);
8590
8591 tem = fold_relational_const (code, type, arg0, arg1);
8592 if (tem != NULL_TREE)
8593 return tem;
8594
8595 /* If one arg is a real or integer constant, put it last. */
8596 if (tree_swap_operands_p (arg0, arg1, true))
8597 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
8598
8599 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8600 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8601 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8602 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8603 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8604 && (TREE_CODE (arg1) == INTEGER_CST
8605 && !TREE_OVERFLOW (arg1)))
8606 {
8607 tree const1 = TREE_OPERAND (arg0, 1);
8608 tree const2 = arg1;
8609 tree variable = TREE_OPERAND (arg0, 0);
8610 tree lhs;
8611 int lhs_add;
8612 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8613
8614 lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR,
8615 TREE_TYPE (arg1), const2, const1);
8616
8617 /* If the constant operation overflowed this can be
8618 simplified as a comparison against INT_MAX/INT_MIN. */
8619 if (TREE_CODE (lhs) == INTEGER_CST
8620 && TREE_OVERFLOW (lhs))
8621 {
8622 int const1_sgn = tree_int_cst_sgn (const1);
8623 enum tree_code code2 = code;
8624
8625 /* Get the sign of the constant on the lhs if the
8626 operation were VARIABLE + CONST1. */
8627 if (TREE_CODE (arg0) == MINUS_EXPR)
8628 const1_sgn = -const1_sgn;
8629
8630 /* The sign of the constant determines if we overflowed
8631 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8632 Canonicalize to the INT_MIN overflow by swapping the comparison
8633 if necessary. */
8634 if (const1_sgn == -1)
8635 code2 = swap_tree_comparison (code);
8636
8637 /* We now can look at the canonicalized case
8638 VARIABLE + 1 CODE2 INT_MIN
8639 and decide on the result. */
8640 if (code2 == LT_EXPR
8641 || code2 == LE_EXPR
8642 || code2 == EQ_EXPR)
8643 return omit_one_operand_loc (loc, type, boolean_false_node, variable);
8644 else if (code2 == NE_EXPR
8645 || code2 == GE_EXPR
8646 || code2 == GT_EXPR)
8647 return omit_one_operand_loc (loc, type, boolean_true_node, variable);
8648 }
8649
8650 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8651 && (TREE_CODE (lhs) != INTEGER_CST
8652 || !TREE_OVERFLOW (lhs)))
8653 {
8654 fold_overflow_warning (("assuming signed overflow does not occur "
8655 "when changing X +- C1 cmp C2 to "
8656 "X cmp C1 +- C2"),
8657 WARN_STRICT_OVERFLOW_COMPARISON);
8658 return fold_build2_loc (loc, code, type, variable, lhs);
8659 }
8660 }
8661
8662 /* For comparisons of pointers we can decompose it to a compile time
8663 comparison of the base objects and the offsets into the object.
8664 This requires at least one operand being an ADDR_EXPR or a
8665 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8666 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8667 && (TREE_CODE (arg0) == ADDR_EXPR
8668 || TREE_CODE (arg1) == ADDR_EXPR
8669 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8670 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8671 {
8672 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8673 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8674 enum machine_mode mode;
8675 int volatilep, unsignedp;
8676 bool indirect_base0 = false, indirect_base1 = false;
8677
8678 /* Get base and offset for the access. Strip ADDR_EXPR for
8679 get_inner_reference, but put it back by stripping INDIRECT_REF
8680 off the base object if possible. indirect_baseN will be true
8681 if baseN is not an address but refers to the object itself. */
8682 base0 = arg0;
8683 if (TREE_CODE (arg0) == ADDR_EXPR)
8684 {
8685 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8686 &bitsize, &bitpos0, &offset0, &mode,
8687 &unsignedp, &volatilep, false);
8688 if (TREE_CODE (base0) == INDIRECT_REF)
8689 base0 = TREE_OPERAND (base0, 0);
8690 else
8691 indirect_base0 = true;
8692 }
8693 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8694 {
8695 base0 = TREE_OPERAND (arg0, 0);
8696 offset0 = TREE_OPERAND (arg0, 1);
8697 }
8698
8699 base1 = arg1;
8700 if (TREE_CODE (arg1) == ADDR_EXPR)
8701 {
8702 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8703 &bitsize, &bitpos1, &offset1, &mode,
8704 &unsignedp, &volatilep, false);
8705 if (TREE_CODE (base1) == INDIRECT_REF)
8706 base1 = TREE_OPERAND (base1, 0);
8707 else
8708 indirect_base1 = true;
8709 }
8710 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8711 {
8712 base1 = TREE_OPERAND (arg1, 0);
8713 offset1 = TREE_OPERAND (arg1, 1);
8714 }
8715
8716 /* A local variable can never be pointed to by
8717 the default SSA name of an incoming parameter. */
8718 if ((TREE_CODE (arg0) == ADDR_EXPR
8719 && indirect_base0
8720 && TREE_CODE (base0) == VAR_DECL
8721 && auto_var_in_fn_p (base0, current_function_decl)
8722 && !indirect_base1
8723 && TREE_CODE (base1) == SSA_NAME
8724 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL
8725 && SSA_NAME_IS_DEFAULT_DEF (base1))
8726 || (TREE_CODE (arg1) == ADDR_EXPR
8727 && indirect_base1
8728 && TREE_CODE (base1) == VAR_DECL
8729 && auto_var_in_fn_p (base1, current_function_decl)
8730 && !indirect_base0
8731 && TREE_CODE (base0) == SSA_NAME
8732 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL
8733 && SSA_NAME_IS_DEFAULT_DEF (base0)))
8734 {
8735 if (code == NE_EXPR)
8736 return constant_boolean_node (1, type);
8737 else if (code == EQ_EXPR)
8738 return constant_boolean_node (0, type);
8739 }
8740 /* If we have equivalent bases we might be able to simplify. */
8741 else if (indirect_base0 == indirect_base1
8742 && operand_equal_p (base0, base1, 0))
8743 {
8744 /* We can fold this expression to a constant if the non-constant
8745 offset parts are equal. */
8746 if ((offset0 == offset1
8747 || (offset0 && offset1
8748 && operand_equal_p (offset0, offset1, 0)))
8749 && (code == EQ_EXPR
8750 || code == NE_EXPR
8751 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8752
8753 {
8754 if (code != EQ_EXPR
8755 && code != NE_EXPR
8756 && bitpos0 != bitpos1
8757 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8758 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8759 fold_overflow_warning (("assuming pointer wraparound does not "
8760 "occur when comparing P +- C1 with "
8761 "P +- C2"),
8762 WARN_STRICT_OVERFLOW_CONDITIONAL);
8763
8764 switch (code)
8765 {
8766 case EQ_EXPR:
8767 return constant_boolean_node (bitpos0 == bitpos1, type);
8768 case NE_EXPR:
8769 return constant_boolean_node (bitpos0 != bitpos1, type);
8770 case LT_EXPR:
8771 return constant_boolean_node (bitpos0 < bitpos1, type);
8772 case LE_EXPR:
8773 return constant_boolean_node (bitpos0 <= bitpos1, type);
8774 case GE_EXPR:
8775 return constant_boolean_node (bitpos0 >= bitpos1, type);
8776 case GT_EXPR:
8777 return constant_boolean_node (bitpos0 > bitpos1, type);
8778 default:;
8779 }
8780 }
8781 /* We can simplify the comparison to a comparison of the variable
8782 offset parts if the constant offset parts are equal.
8783 Be careful to use signed size type here because otherwise we
8784 mess with array offsets in the wrong way. This is possible
8785 because pointer arithmetic is restricted to retain within an
8786 object and overflow on pointer differences is undefined as of
8787 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8788 else if (bitpos0 == bitpos1
8789 && ((code == EQ_EXPR || code == NE_EXPR)
8790 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8791 {
8792 /* By converting to signed size type we cover middle-end pointer
8793 arithmetic which operates on unsigned pointer types of size
8794 type size and ARRAY_REF offsets which are properly sign or
8795 zero extended from their type in case it is narrower than
8796 size type. */
8797 if (offset0 == NULL_TREE)
8798 offset0 = build_int_cst (ssizetype, 0);
8799 else
8800 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8801 if (offset1 == NULL_TREE)
8802 offset1 = build_int_cst (ssizetype, 0);
8803 else
8804 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8805
8806 if (code != EQ_EXPR
8807 && code != NE_EXPR
8808 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8809 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8810 fold_overflow_warning (("assuming pointer wraparound does not "
8811 "occur when comparing P +- C1 with "
8812 "P +- C2"),
8813 WARN_STRICT_OVERFLOW_COMPARISON);
8814
8815 return fold_build2_loc (loc, code, type, offset0, offset1);
8816 }
8817 }
8818 /* For non-equal bases we can simplify if they are addresses
8819 of local binding decls or constants. */
8820 else if (indirect_base0 && indirect_base1
8821 /* We know that !operand_equal_p (base0, base1, 0)
8822 because the if condition was false. But make
8823 sure two decls are not the same. */
8824 && base0 != base1
8825 && TREE_CODE (arg0) == ADDR_EXPR
8826 && TREE_CODE (arg1) == ADDR_EXPR
8827 && (((TREE_CODE (base0) == VAR_DECL
8828 || TREE_CODE (base0) == PARM_DECL)
8829 && (targetm.binds_local_p (base0)
8830 || CONSTANT_CLASS_P (base1)))
8831 || CONSTANT_CLASS_P (base0))
8832 && (((TREE_CODE (base1) == VAR_DECL
8833 || TREE_CODE (base1) == PARM_DECL)
8834 && (targetm.binds_local_p (base1)
8835 || CONSTANT_CLASS_P (base0)))
8836 || CONSTANT_CLASS_P (base1)))
8837 {
8838 if (code == EQ_EXPR)
8839 return omit_two_operands_loc (loc, type, boolean_false_node,
8840 arg0, arg1);
8841 else if (code == NE_EXPR)
8842 return omit_two_operands_loc (loc, type, boolean_true_node,
8843 arg0, arg1);
8844 }
8845 /* For equal offsets we can simplify to a comparison of the
8846 base addresses. */
8847 else if (bitpos0 == bitpos1
8848 && (indirect_base0
8849 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8850 && (indirect_base1
8851 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8852 && ((offset0 == offset1)
8853 || (offset0 && offset1
8854 && operand_equal_p (offset0, offset1, 0))))
8855 {
8856 if (indirect_base0)
8857 base0 = build_fold_addr_expr_loc (loc, base0);
8858 if (indirect_base1)
8859 base1 = build_fold_addr_expr_loc (loc, base1);
8860 return fold_build2_loc (loc, code, type, base0, base1);
8861 }
8862 }
8863
8864 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8865 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8866 the resulting offset is smaller in absolute value than the
8867 original one. */
8868 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8869 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8870 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8871 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8872 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8873 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8874 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8875 {
8876 tree const1 = TREE_OPERAND (arg0, 1);
8877 tree const2 = TREE_OPERAND (arg1, 1);
8878 tree variable1 = TREE_OPERAND (arg0, 0);
8879 tree variable2 = TREE_OPERAND (arg1, 0);
8880 tree cst;
8881 const char * const warnmsg = G_("assuming signed overflow does not "
8882 "occur when combining constants around "
8883 "a comparison");
8884
8885 /* Put the constant on the side where it doesn't overflow and is
8886 of lower absolute value than before. */
8887 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8888 ? MINUS_EXPR : PLUS_EXPR,
8889 const2, const1, 0);
8890 if (!TREE_OVERFLOW (cst)
8891 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
8892 {
8893 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8894 return fold_build2_loc (loc, code, type,
8895 variable1,
8896 fold_build2_loc (loc,
8897 TREE_CODE (arg1), TREE_TYPE (arg1),
8898 variable2, cst));
8899 }
8900
8901 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8902 ? MINUS_EXPR : PLUS_EXPR,
8903 const1, const2, 0);
8904 if (!TREE_OVERFLOW (cst)
8905 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
8906 {
8907 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8908 return fold_build2_loc (loc, code, type,
8909 fold_build2_loc (loc, TREE_CODE (arg0), TREE_TYPE (arg0),
8910 variable1, cst),
8911 variable2);
8912 }
8913 }
8914
8915 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
8916 signed arithmetic case. That form is created by the compiler
8917 often enough for folding it to be of value. One example is in
8918 computing loop trip counts after Operator Strength Reduction. */
8919 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8920 && TREE_CODE (arg0) == MULT_EXPR
8921 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8922 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8923 && integer_zerop (arg1))
8924 {
8925 tree const1 = TREE_OPERAND (arg0, 1);
8926 tree const2 = arg1; /* zero */
8927 tree variable1 = TREE_OPERAND (arg0, 0);
8928 enum tree_code cmp_code = code;
8929
8930 /* Handle unfolded multiplication by zero. */
8931 if (integer_zerop (const1))
8932 return fold_build2_loc (loc, cmp_code, type, const1, const2);
8933
8934 fold_overflow_warning (("assuming signed overflow does not occur when "
8935 "eliminating multiplication in comparison "
8936 "with zero"),
8937 WARN_STRICT_OVERFLOW_COMPARISON);
8938
8939 /* If const1 is negative we swap the sense of the comparison. */
8940 if (tree_int_cst_sgn (const1) < 0)
8941 cmp_code = swap_tree_comparison (cmp_code);
8942
8943 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
8944 }
8945
8946 tem = maybe_canonicalize_comparison (loc, code, type, op0, op1);
8947 if (tem)
8948 return tem;
8949
8950 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8951 {
8952 tree targ0 = strip_float_extensions (arg0);
8953 tree targ1 = strip_float_extensions (arg1);
8954 tree newtype = TREE_TYPE (targ0);
8955
8956 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8957 newtype = TREE_TYPE (targ1);
8958
8959 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8960 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8961 return fold_build2_loc (loc, code, type,
8962 fold_convert_loc (loc, newtype, targ0),
8963 fold_convert_loc (loc, newtype, targ1));
8964
8965 /* (-a) CMP (-b) -> b CMP a */
8966 if (TREE_CODE (arg0) == NEGATE_EXPR
8967 && TREE_CODE (arg1) == NEGATE_EXPR)
8968 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
8969 TREE_OPERAND (arg0, 0));
8970
8971 if (TREE_CODE (arg1) == REAL_CST)
8972 {
8973 REAL_VALUE_TYPE cst;
8974 cst = TREE_REAL_CST (arg1);
8975
8976 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8977 if (TREE_CODE (arg0) == NEGATE_EXPR)
8978 return fold_build2_loc (loc, swap_tree_comparison (code), type,
8979 TREE_OPERAND (arg0, 0),
8980 build_real (TREE_TYPE (arg1),
8981 REAL_VALUE_NEGATE (cst)));
8982
8983 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8984 /* a CMP (-0) -> a CMP 0 */
8985 if (REAL_VALUE_MINUS_ZERO (cst))
8986 return fold_build2_loc (loc, code, type, arg0,
8987 build_real (TREE_TYPE (arg1), dconst0));
8988
8989 /* x != NaN is always true, other ops are always false. */
8990 if (REAL_VALUE_ISNAN (cst)
8991 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8992 {
8993 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8994 return omit_one_operand_loc (loc, type, tem, arg0);
8995 }
8996
8997 /* Fold comparisons against infinity. */
8998 if (REAL_VALUE_ISINF (cst)
8999 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9000 {
9001 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9002 if (tem != NULL_TREE)
9003 return tem;
9004 }
9005 }
9006
9007 /* If this is a comparison of a real constant with a PLUS_EXPR
9008 or a MINUS_EXPR of a real constant, we can convert it into a
9009 comparison with a revised real constant as long as no overflow
9010 occurs when unsafe_math_optimizations are enabled. */
9011 if (flag_unsafe_math_optimizations
9012 && TREE_CODE (arg1) == REAL_CST
9013 && (TREE_CODE (arg0) == PLUS_EXPR
9014 || TREE_CODE (arg0) == MINUS_EXPR)
9015 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9016 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9017 ? MINUS_EXPR : PLUS_EXPR,
9018 arg1, TREE_OPERAND (arg0, 1), 0))
9019 && !TREE_OVERFLOW (tem))
9020 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9021
9022 /* Likewise, we can simplify a comparison of a real constant with
9023 a MINUS_EXPR whose first operand is also a real constant, i.e.
9024 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9025 floating-point types only if -fassociative-math is set. */
9026 if (flag_associative_math
9027 && TREE_CODE (arg1) == REAL_CST
9028 && TREE_CODE (arg0) == MINUS_EXPR
9029 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9030 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9031 arg1, 0))
9032 && !TREE_OVERFLOW (tem))
9033 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9034 TREE_OPERAND (arg0, 1), tem);
9035
9036 /* Fold comparisons against built-in math functions. */
9037 if (TREE_CODE (arg1) == REAL_CST
9038 && flag_unsafe_math_optimizations
9039 && ! flag_errno_math)
9040 {
9041 enum built_in_function fcode = builtin_mathfn_code (arg0);
9042
9043 if (fcode != END_BUILTINS)
9044 {
9045 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9046 if (tem != NULL_TREE)
9047 return tem;
9048 }
9049 }
9050 }
9051
9052 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9053 && CONVERT_EXPR_P (arg0))
9054 {
9055 /* If we are widening one operand of an integer comparison,
9056 see if the other operand is similarly being widened. Perhaps we
9057 can do the comparison in the narrower type. */
9058 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9059 if (tem)
9060 return tem;
9061
9062 /* Or if we are changing signedness. */
9063 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9064 if (tem)
9065 return tem;
9066 }
9067
9068 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9069 constant, we can simplify it. */
9070 if (TREE_CODE (arg1) == INTEGER_CST
9071 && (TREE_CODE (arg0) == MIN_EXPR
9072 || TREE_CODE (arg0) == MAX_EXPR)
9073 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9074 {
9075 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9076 if (tem)
9077 return tem;
9078 }
9079
9080 /* Simplify comparison of something with itself. (For IEEE
9081 floating-point, we can only do some of these simplifications.) */
9082 if (operand_equal_p (arg0, arg1, 0))
9083 {
9084 switch (code)
9085 {
9086 case EQ_EXPR:
9087 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9088 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9089 return constant_boolean_node (1, type);
9090 break;
9091
9092 case GE_EXPR:
9093 case LE_EXPR:
9094 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9095 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9096 return constant_boolean_node (1, type);
9097 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9098
9099 case NE_EXPR:
9100 /* For NE, we can only do this simplification if integer
9101 or we don't honor IEEE floating point NaNs. */
9102 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9103 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9104 break;
9105 /* ... fall through ... */
9106 case GT_EXPR:
9107 case LT_EXPR:
9108 return constant_boolean_node (0, type);
9109 default:
9110 gcc_unreachable ();
9111 }
9112 }
9113
9114 /* If we are comparing an expression that just has comparisons
9115 of two integer values, arithmetic expressions of those comparisons,
9116 and constants, we can simplify it. There are only three cases
9117 to check: the two values can either be equal, the first can be
9118 greater, or the second can be greater. Fold the expression for
9119 those three values. Since each value must be 0 or 1, we have
9120 eight possibilities, each of which corresponds to the constant 0
9121 or 1 or one of the six possible comparisons.
9122
9123 This handles common cases like (a > b) == 0 but also handles
9124 expressions like ((x > y) - (y > x)) > 0, which supposedly
9125 occur in macroized code. */
9126
9127 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9128 {
9129 tree cval1 = 0, cval2 = 0;
9130 int save_p = 0;
9131
9132 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9133 /* Don't handle degenerate cases here; they should already
9134 have been handled anyway. */
9135 && cval1 != 0 && cval2 != 0
9136 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9137 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9138 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9139 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9140 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9141 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9142 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9143 {
9144 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9145 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9146
9147 /* We can't just pass T to eval_subst in case cval1 or cval2
9148 was the same as ARG1. */
9149
9150 tree high_result
9151 = fold_build2_loc (loc, code, type,
9152 eval_subst (loc, arg0, cval1, maxval,
9153 cval2, minval),
9154 arg1);
9155 tree equal_result
9156 = fold_build2_loc (loc, code, type,
9157 eval_subst (loc, arg0, cval1, maxval,
9158 cval2, maxval),
9159 arg1);
9160 tree low_result
9161 = fold_build2_loc (loc, code, type,
9162 eval_subst (loc, arg0, cval1, minval,
9163 cval2, maxval),
9164 arg1);
9165
9166 /* All three of these results should be 0 or 1. Confirm they are.
9167 Then use those values to select the proper code to use. */
9168
9169 if (TREE_CODE (high_result) == INTEGER_CST
9170 && TREE_CODE (equal_result) == INTEGER_CST
9171 && TREE_CODE (low_result) == INTEGER_CST)
9172 {
9173 /* Make a 3-bit mask with the high-order bit being the
9174 value for `>', the next for '=', and the low for '<'. */
9175 switch ((integer_onep (high_result) * 4)
9176 + (integer_onep (equal_result) * 2)
9177 + integer_onep (low_result))
9178 {
9179 case 0:
9180 /* Always false. */
9181 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9182 case 1:
9183 code = LT_EXPR;
9184 break;
9185 case 2:
9186 code = EQ_EXPR;
9187 break;
9188 case 3:
9189 code = LE_EXPR;
9190 break;
9191 case 4:
9192 code = GT_EXPR;
9193 break;
9194 case 5:
9195 code = NE_EXPR;
9196 break;
9197 case 6:
9198 code = GE_EXPR;
9199 break;
9200 case 7:
9201 /* Always true. */
9202 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9203 }
9204
9205 if (save_p)
9206 {
9207 tem = save_expr (build2 (code, type, cval1, cval2));
9208 SET_EXPR_LOCATION (tem, loc);
9209 return tem;
9210 }
9211 return fold_build2_loc (loc, code, type, cval1, cval2);
9212 }
9213 }
9214 }
9215
9216 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9217 into a single range test. */
9218 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9219 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9220 && TREE_CODE (arg1) == INTEGER_CST
9221 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9222 && !integer_zerop (TREE_OPERAND (arg0, 1))
9223 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9224 && !TREE_OVERFLOW (arg1))
9225 {
9226 tem = fold_div_compare (loc, code, type, arg0, arg1);
9227 if (tem != NULL_TREE)
9228 return tem;
9229 }
9230
9231 /* Fold ~X op ~Y as Y op X. */
9232 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9233 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9234 {
9235 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9236 return fold_build2_loc (loc, code, type,
9237 fold_convert_loc (loc, cmp_type,
9238 TREE_OPERAND (arg1, 0)),
9239 TREE_OPERAND (arg0, 0));
9240 }
9241
9242 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9243 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9244 && TREE_CODE (arg1) == INTEGER_CST)
9245 {
9246 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9247 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9248 TREE_OPERAND (arg0, 0),
9249 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9250 fold_convert_loc (loc, cmp_type, arg1)));
9251 }
9252
9253 return NULL_TREE;
9254 }
9255
9256
9257 /* Subroutine of fold_binary. Optimize complex multiplications of the
9258 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9259 argument EXPR represents the expression "z" of type TYPE. */
9260
9261 static tree
9262 fold_mult_zconjz (location_t loc, tree type, tree expr)
9263 {
9264 tree itype = TREE_TYPE (type);
9265 tree rpart, ipart, tem;
9266
9267 if (TREE_CODE (expr) == COMPLEX_EXPR)
9268 {
9269 rpart = TREE_OPERAND (expr, 0);
9270 ipart = TREE_OPERAND (expr, 1);
9271 }
9272 else if (TREE_CODE (expr) == COMPLEX_CST)
9273 {
9274 rpart = TREE_REALPART (expr);
9275 ipart = TREE_IMAGPART (expr);
9276 }
9277 else
9278 {
9279 expr = save_expr (expr);
9280 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9281 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9282 }
9283
9284 rpart = save_expr (rpart);
9285 ipart = save_expr (ipart);
9286 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9287 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9288 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9289 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9290 fold_convert_loc (loc, itype, integer_zero_node));
9291 }
9292
9293
9294 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9295 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9296 guarantees that P and N have the same least significant log2(M) bits.
9297 N is not otherwise constrained. In particular, N is not normalized to
9298 0 <= N < M as is common. In general, the precise value of P is unknown.
9299 M is chosen as large as possible such that constant N can be determined.
9300
9301 Returns M and sets *RESIDUE to N.
9302
9303 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9304 account. This is not always possible due to PR 35705.
9305 */
9306
9307 static unsigned HOST_WIDE_INT
9308 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9309 bool allow_func_align)
9310 {
9311 enum tree_code code;
9312
9313 *residue = 0;
9314
9315 code = TREE_CODE (expr);
9316 if (code == ADDR_EXPR)
9317 {
9318 expr = TREE_OPERAND (expr, 0);
9319 if (handled_component_p (expr))
9320 {
9321 HOST_WIDE_INT bitsize, bitpos;
9322 tree offset;
9323 enum machine_mode mode;
9324 int unsignedp, volatilep;
9325
9326 expr = get_inner_reference (expr, &bitsize, &bitpos, &offset,
9327 &mode, &unsignedp, &volatilep, false);
9328 *residue = bitpos / BITS_PER_UNIT;
9329 if (offset)
9330 {
9331 if (TREE_CODE (offset) == INTEGER_CST)
9332 *residue += TREE_INT_CST_LOW (offset);
9333 else
9334 /* We don't handle more complicated offset expressions. */
9335 return 1;
9336 }
9337 }
9338
9339 if (DECL_P (expr)
9340 && (allow_func_align || TREE_CODE (expr) != FUNCTION_DECL))
9341 return DECL_ALIGN_UNIT (expr);
9342 }
9343 else if (code == POINTER_PLUS_EXPR)
9344 {
9345 tree op0, op1;
9346 unsigned HOST_WIDE_INT modulus;
9347 enum tree_code inner_code;
9348
9349 op0 = TREE_OPERAND (expr, 0);
9350 STRIP_NOPS (op0);
9351 modulus = get_pointer_modulus_and_residue (op0, residue,
9352 allow_func_align);
9353
9354 op1 = TREE_OPERAND (expr, 1);
9355 STRIP_NOPS (op1);
9356 inner_code = TREE_CODE (op1);
9357 if (inner_code == INTEGER_CST)
9358 {
9359 *residue += TREE_INT_CST_LOW (op1);
9360 return modulus;
9361 }
9362 else if (inner_code == MULT_EXPR)
9363 {
9364 op1 = TREE_OPERAND (op1, 1);
9365 if (TREE_CODE (op1) == INTEGER_CST)
9366 {
9367 unsigned HOST_WIDE_INT align;
9368
9369 /* Compute the greatest power-of-2 divisor of op1. */
9370 align = TREE_INT_CST_LOW (op1);
9371 align &= -align;
9372
9373 /* If align is non-zero and less than *modulus, replace
9374 *modulus with align., If align is 0, then either op1 is 0
9375 or the greatest power-of-2 divisor of op1 doesn't fit in an
9376 unsigned HOST_WIDE_INT. In either case, no additional
9377 constraint is imposed. */
9378 if (align)
9379 modulus = MIN (modulus, align);
9380
9381 return modulus;
9382 }
9383 }
9384 }
9385
9386 /* If we get here, we were unable to determine anything useful about the
9387 expression. */
9388 return 1;
9389 }
9390
9391
9392 /* Fold a binary expression of code CODE and type TYPE with operands
9393 OP0 and OP1. LOC is the location of the resulting expression.
9394 Return the folded expression if folding is successful. Otherwise,
9395 return NULL_TREE. */
9396
9397 tree
9398 fold_binary_loc (location_t loc,
9399 enum tree_code code, tree type, tree op0, tree op1)
9400 {
9401 enum tree_code_class kind = TREE_CODE_CLASS (code);
9402 tree arg0, arg1, tem;
9403 tree t1 = NULL_TREE;
9404 bool strict_overflow_p;
9405
9406 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9407 && TREE_CODE_LENGTH (code) == 2
9408 && op0 != NULL_TREE
9409 && op1 != NULL_TREE);
9410
9411 arg0 = op0;
9412 arg1 = op1;
9413
9414 /* Strip any conversions that don't change the mode. This is
9415 safe for every expression, except for a comparison expression
9416 because its signedness is derived from its operands. So, in
9417 the latter case, only strip conversions that don't change the
9418 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9419 preserved.
9420
9421 Note that this is done as an internal manipulation within the
9422 constant folder, in order to find the simplest representation
9423 of the arguments so that their form can be studied. In any
9424 cases, the appropriate type conversions should be put back in
9425 the tree that will get out of the constant folder. */
9426
9427 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9428 {
9429 STRIP_SIGN_NOPS (arg0);
9430 STRIP_SIGN_NOPS (arg1);
9431 }
9432 else
9433 {
9434 STRIP_NOPS (arg0);
9435 STRIP_NOPS (arg1);
9436 }
9437
9438 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9439 constant but we can't do arithmetic on them. */
9440 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9441 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9442 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9443 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9444 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9445 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9446 {
9447 if (kind == tcc_binary)
9448 {
9449 /* Make sure type and arg0 have the same saturating flag. */
9450 gcc_assert (TYPE_SATURATING (type)
9451 == TYPE_SATURATING (TREE_TYPE (arg0)));
9452 tem = const_binop (code, arg0, arg1, 0);
9453 }
9454 else if (kind == tcc_comparison)
9455 tem = fold_relational_const (code, type, arg0, arg1);
9456 else
9457 tem = NULL_TREE;
9458
9459 if (tem != NULL_TREE)
9460 {
9461 if (TREE_TYPE (tem) != type)
9462 tem = fold_convert_loc (loc, type, tem);
9463 return tem;
9464 }
9465 }
9466
9467 /* If this is a commutative operation, and ARG0 is a constant, move it
9468 to ARG1 to reduce the number of tests below. */
9469 if (commutative_tree_code (code)
9470 && tree_swap_operands_p (arg0, arg1, true))
9471 return fold_build2_loc (loc, code, type, op1, op0);
9472
9473 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9474
9475 First check for cases where an arithmetic operation is applied to a
9476 compound, conditional, or comparison operation. Push the arithmetic
9477 operation inside the compound or conditional to see if any folding
9478 can then be done. Convert comparison to conditional for this purpose.
9479 The also optimizes non-constant cases that used to be done in
9480 expand_expr.
9481
9482 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9483 one of the operands is a comparison and the other is a comparison, a
9484 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9485 code below would make the expression more complex. Change it to a
9486 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9487 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9488
9489 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9490 || code == EQ_EXPR || code == NE_EXPR)
9491 && ((truth_value_p (TREE_CODE (arg0))
9492 && (truth_value_p (TREE_CODE (arg1))
9493 || (TREE_CODE (arg1) == BIT_AND_EXPR
9494 && integer_onep (TREE_OPERAND (arg1, 1)))))
9495 || (truth_value_p (TREE_CODE (arg1))
9496 && (truth_value_p (TREE_CODE (arg0))
9497 || (TREE_CODE (arg0) == BIT_AND_EXPR
9498 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9499 {
9500 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9501 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9502 : TRUTH_XOR_EXPR,
9503 boolean_type_node,
9504 fold_convert_loc (loc, boolean_type_node, arg0),
9505 fold_convert_loc (loc, boolean_type_node, arg1));
9506
9507 if (code == EQ_EXPR)
9508 tem = invert_truthvalue_loc (loc, tem);
9509
9510 return fold_convert_loc (loc, type, tem);
9511 }
9512
9513 if (TREE_CODE_CLASS (code) == tcc_binary
9514 || TREE_CODE_CLASS (code) == tcc_comparison)
9515 {
9516 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9517 {
9518 tem = fold_build2_loc (loc, code, type,
9519 fold_convert_loc (loc, TREE_TYPE (op0),
9520 TREE_OPERAND (arg0, 1)), op1);
9521 tem = build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0), tem);
9522 goto fold_binary_exit;
9523 }
9524 if (TREE_CODE (arg1) == COMPOUND_EXPR
9525 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9526 {
9527 tem = fold_build2_loc (loc, code, type, op0,
9528 fold_convert_loc (loc, TREE_TYPE (op1),
9529 TREE_OPERAND (arg1, 1)));
9530 tem = build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0), tem);
9531 goto fold_binary_exit;
9532 }
9533
9534 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
9535 {
9536 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9537 arg0, arg1,
9538 /*cond_first_p=*/1);
9539 if (tem != NULL_TREE)
9540 return tem;
9541 }
9542
9543 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
9544 {
9545 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9546 arg1, arg0,
9547 /*cond_first_p=*/0);
9548 if (tem != NULL_TREE)
9549 return tem;
9550 }
9551 }
9552
9553 switch (code)
9554 {
9555 case POINTER_PLUS_EXPR:
9556 /* 0 +p index -> (type)index */
9557 if (integer_zerop (arg0))
9558 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
9559
9560 /* PTR +p 0 -> PTR */
9561 if (integer_zerop (arg1))
9562 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9563
9564 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9565 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9566 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9567 return fold_convert_loc (loc, type,
9568 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9569 fold_convert_loc (loc, sizetype,
9570 arg1),
9571 fold_convert_loc (loc, sizetype,
9572 arg0)));
9573
9574 /* index +p PTR -> PTR +p index */
9575 if (POINTER_TYPE_P (TREE_TYPE (arg1))
9576 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9577 return fold_build2_loc (loc, POINTER_PLUS_EXPR, type,
9578 fold_convert_loc (loc, type, arg1),
9579 fold_convert_loc (loc, sizetype, arg0));
9580
9581 /* (PTR +p B) +p A -> PTR +p (B + A) */
9582 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9583 {
9584 tree inner;
9585 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
9586 tree arg00 = TREE_OPERAND (arg0, 0);
9587 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
9588 arg01, fold_convert_loc (loc, sizetype, arg1));
9589 return fold_convert_loc (loc, type,
9590 fold_build2_loc (loc, POINTER_PLUS_EXPR,
9591 TREE_TYPE (arg00),
9592 arg00, inner));
9593 }
9594
9595 /* PTR_CST +p CST -> CST1 */
9596 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9597 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
9598 fold_convert_loc (loc, type, arg1));
9599
9600 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9601 of the array. Loop optimizer sometimes produce this type of
9602 expressions. */
9603 if (TREE_CODE (arg0) == ADDR_EXPR)
9604 {
9605 tem = try_move_mult_to_index (loc, arg0,
9606 fold_convert_loc (loc, sizetype, arg1));
9607 if (tem)
9608 return fold_convert_loc (loc, type, tem);
9609 }
9610
9611 return NULL_TREE;
9612
9613 case PLUS_EXPR:
9614 /* A + (-B) -> A - B */
9615 if (TREE_CODE (arg1) == NEGATE_EXPR)
9616 return fold_build2_loc (loc, MINUS_EXPR, type,
9617 fold_convert_loc (loc, type, arg0),
9618 fold_convert_loc (loc, type,
9619 TREE_OPERAND (arg1, 0)));
9620 /* (-A) + B -> B - A */
9621 if (TREE_CODE (arg0) == NEGATE_EXPR
9622 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
9623 return fold_build2_loc (loc, MINUS_EXPR, type,
9624 fold_convert_loc (loc, type, arg1),
9625 fold_convert_loc (loc, type,
9626 TREE_OPERAND (arg0, 0)));
9627
9628 if (INTEGRAL_TYPE_P (type))
9629 {
9630 /* Convert ~A + 1 to -A. */
9631 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9632 && integer_onep (arg1))
9633 return fold_build1_loc (loc, NEGATE_EXPR, type,
9634 fold_convert_loc (loc, type,
9635 TREE_OPERAND (arg0, 0)));
9636
9637 /* ~X + X is -1. */
9638 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9639 && !TYPE_OVERFLOW_TRAPS (type))
9640 {
9641 tree tem = TREE_OPERAND (arg0, 0);
9642
9643 STRIP_NOPS (tem);
9644 if (operand_equal_p (tem, arg1, 0))
9645 {
9646 t1 = build_int_cst_type (type, -1);
9647 return omit_one_operand_loc (loc, type, t1, arg1);
9648 }
9649 }
9650
9651 /* X + ~X is -1. */
9652 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9653 && !TYPE_OVERFLOW_TRAPS (type))
9654 {
9655 tree tem = TREE_OPERAND (arg1, 0);
9656
9657 STRIP_NOPS (tem);
9658 if (operand_equal_p (arg0, tem, 0))
9659 {
9660 t1 = build_int_cst_type (type, -1);
9661 return omit_one_operand_loc (loc, type, t1, arg0);
9662 }
9663 }
9664
9665 /* X + (X / CST) * -CST is X % CST. */
9666 if (TREE_CODE (arg1) == MULT_EXPR
9667 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9668 && operand_equal_p (arg0,
9669 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9670 {
9671 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9672 tree cst1 = TREE_OPERAND (arg1, 1);
9673 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
9674 cst1, cst0);
9675 if (sum && integer_zerop (sum))
9676 return fold_convert_loc (loc, type,
9677 fold_build2_loc (loc, TRUNC_MOD_EXPR,
9678 TREE_TYPE (arg0), arg0,
9679 cst0));
9680 }
9681 }
9682
9683 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
9684 same or one. Make sure type is not saturating.
9685 fold_plusminus_mult_expr will re-associate. */
9686 if ((TREE_CODE (arg0) == MULT_EXPR
9687 || TREE_CODE (arg1) == MULT_EXPR)
9688 && !TYPE_SATURATING (type)
9689 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9690 {
9691 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9692 if (tem)
9693 return tem;
9694 }
9695
9696 if (! FLOAT_TYPE_P (type))
9697 {
9698 if (integer_zerop (arg1))
9699 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9700
9701 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9702 with a constant, and the two constants have no bits in common,
9703 we should treat this as a BIT_IOR_EXPR since this may produce more
9704 simplifications. */
9705 if (TREE_CODE (arg0) == BIT_AND_EXPR
9706 && TREE_CODE (arg1) == BIT_AND_EXPR
9707 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9708 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9709 && integer_zerop (const_binop (BIT_AND_EXPR,
9710 TREE_OPERAND (arg0, 1),
9711 TREE_OPERAND (arg1, 1), 0)))
9712 {
9713 code = BIT_IOR_EXPR;
9714 goto bit_ior;
9715 }
9716
9717 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9718 (plus (plus (mult) (mult)) (foo)) so that we can
9719 take advantage of the factoring cases below. */
9720 if (((TREE_CODE (arg0) == PLUS_EXPR
9721 || TREE_CODE (arg0) == MINUS_EXPR)
9722 && TREE_CODE (arg1) == MULT_EXPR)
9723 || ((TREE_CODE (arg1) == PLUS_EXPR
9724 || TREE_CODE (arg1) == MINUS_EXPR)
9725 && TREE_CODE (arg0) == MULT_EXPR))
9726 {
9727 tree parg0, parg1, parg, marg;
9728 enum tree_code pcode;
9729
9730 if (TREE_CODE (arg1) == MULT_EXPR)
9731 parg = arg0, marg = arg1;
9732 else
9733 parg = arg1, marg = arg0;
9734 pcode = TREE_CODE (parg);
9735 parg0 = TREE_OPERAND (parg, 0);
9736 parg1 = TREE_OPERAND (parg, 1);
9737 STRIP_NOPS (parg0);
9738 STRIP_NOPS (parg1);
9739
9740 if (TREE_CODE (parg0) == MULT_EXPR
9741 && TREE_CODE (parg1) != MULT_EXPR)
9742 return fold_build2_loc (loc, pcode, type,
9743 fold_build2_loc (loc, PLUS_EXPR, type,
9744 fold_convert_loc (loc, type,
9745 parg0),
9746 fold_convert_loc (loc, type,
9747 marg)),
9748 fold_convert_loc (loc, type, parg1));
9749 if (TREE_CODE (parg0) != MULT_EXPR
9750 && TREE_CODE (parg1) == MULT_EXPR)
9751 return
9752 fold_build2_loc (loc, PLUS_EXPR, type,
9753 fold_convert_loc (loc, type, parg0),
9754 fold_build2_loc (loc, pcode, type,
9755 fold_convert_loc (loc, type, marg),
9756 fold_convert_loc (loc, type,
9757 parg1)));
9758 }
9759 }
9760 else
9761 {
9762 /* See if ARG1 is zero and X + ARG1 reduces to X. */
9763 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
9764 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9765
9766 /* Likewise if the operands are reversed. */
9767 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9768 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
9769
9770 /* Convert X + -C into X - C. */
9771 if (TREE_CODE (arg1) == REAL_CST
9772 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
9773 {
9774 tem = fold_negate_const (arg1, type);
9775 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
9776 return fold_build2_loc (loc, MINUS_EXPR, type,
9777 fold_convert_loc (loc, type, arg0),
9778 fold_convert_loc (loc, type, tem));
9779 }
9780
9781 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9782 to __complex__ ( x, y ). This is not the same for SNaNs or
9783 if signed zeros are involved. */
9784 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9785 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9786 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9787 {
9788 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9789 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9790 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9791 bool arg0rz = false, arg0iz = false;
9792 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9793 || (arg0i && (arg0iz = real_zerop (arg0i))))
9794 {
9795 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9796 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9797 if (arg0rz && arg1i && real_zerop (arg1i))
9798 {
9799 tree rp = arg1r ? arg1r
9800 : build1 (REALPART_EXPR, rtype, arg1);
9801 tree ip = arg0i ? arg0i
9802 : build1 (IMAGPART_EXPR, rtype, arg0);
9803 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9804 }
9805 else if (arg0iz && arg1r && real_zerop (arg1r))
9806 {
9807 tree rp = arg0r ? arg0r
9808 : build1 (REALPART_EXPR, rtype, arg0);
9809 tree ip = arg1i ? arg1i
9810 : build1 (IMAGPART_EXPR, rtype, arg1);
9811 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9812 }
9813 }
9814 }
9815
9816 if (flag_unsafe_math_optimizations
9817 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9818 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9819 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9820 return tem;
9821
9822 /* Convert x+x into x*2.0. */
9823 if (operand_equal_p (arg0, arg1, 0)
9824 && SCALAR_FLOAT_TYPE_P (type))
9825 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
9826 build_real (type, dconst2));
9827
9828 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9829 We associate floats only if the user has specified
9830 -fassociative-math. */
9831 if (flag_associative_math
9832 && TREE_CODE (arg1) == PLUS_EXPR
9833 && TREE_CODE (arg0) != MULT_EXPR)
9834 {
9835 tree tree10 = TREE_OPERAND (arg1, 0);
9836 tree tree11 = TREE_OPERAND (arg1, 1);
9837 if (TREE_CODE (tree11) == MULT_EXPR
9838 && TREE_CODE (tree10) == MULT_EXPR)
9839 {
9840 tree tree0;
9841 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
9842 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
9843 }
9844 }
9845 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9846 We associate floats only if the user has specified
9847 -fassociative-math. */
9848 if (flag_associative_math
9849 && TREE_CODE (arg0) == PLUS_EXPR
9850 && TREE_CODE (arg1) != MULT_EXPR)
9851 {
9852 tree tree00 = TREE_OPERAND (arg0, 0);
9853 tree tree01 = TREE_OPERAND (arg0, 1);
9854 if (TREE_CODE (tree01) == MULT_EXPR
9855 && TREE_CODE (tree00) == MULT_EXPR)
9856 {
9857 tree tree0;
9858 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
9859 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
9860 }
9861 }
9862 }
9863
9864 bit_rotate:
9865 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9866 is a rotate of A by C1 bits. */
9867 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9868 is a rotate of A by B bits. */
9869 {
9870 enum tree_code code0, code1;
9871 tree rtype;
9872 code0 = TREE_CODE (arg0);
9873 code1 = TREE_CODE (arg1);
9874 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9875 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9876 && operand_equal_p (TREE_OPERAND (arg0, 0),
9877 TREE_OPERAND (arg1, 0), 0)
9878 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9879 TYPE_UNSIGNED (rtype))
9880 /* Only create rotates in complete modes. Other cases are not
9881 expanded properly. */
9882 && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
9883 {
9884 tree tree01, tree11;
9885 enum tree_code code01, code11;
9886
9887 tree01 = TREE_OPERAND (arg0, 1);
9888 tree11 = TREE_OPERAND (arg1, 1);
9889 STRIP_NOPS (tree01);
9890 STRIP_NOPS (tree11);
9891 code01 = TREE_CODE (tree01);
9892 code11 = TREE_CODE (tree11);
9893 if (code01 == INTEGER_CST
9894 && code11 == INTEGER_CST
9895 && TREE_INT_CST_HIGH (tree01) == 0
9896 && TREE_INT_CST_HIGH (tree11) == 0
9897 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
9898 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9899 {
9900 tem = build2 (LROTATE_EXPR,
9901 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9902 TREE_OPERAND (arg0, 0),
9903 code0 == LSHIFT_EXPR
9904 ? tree01 : tree11);
9905 SET_EXPR_LOCATION (tem, loc);
9906 return fold_convert_loc (loc, type, tem);
9907 }
9908 else if (code11 == MINUS_EXPR)
9909 {
9910 tree tree110, tree111;
9911 tree110 = TREE_OPERAND (tree11, 0);
9912 tree111 = TREE_OPERAND (tree11, 1);
9913 STRIP_NOPS (tree110);
9914 STRIP_NOPS (tree111);
9915 if (TREE_CODE (tree110) == INTEGER_CST
9916 && 0 == compare_tree_int (tree110,
9917 TYPE_PRECISION
9918 (TREE_TYPE (TREE_OPERAND
9919 (arg0, 0))))
9920 && operand_equal_p (tree01, tree111, 0))
9921 return
9922 fold_convert_loc (loc, type,
9923 build2 ((code0 == LSHIFT_EXPR
9924 ? LROTATE_EXPR
9925 : RROTATE_EXPR),
9926 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9927 TREE_OPERAND (arg0, 0), tree01));
9928 }
9929 else if (code01 == MINUS_EXPR)
9930 {
9931 tree tree010, tree011;
9932 tree010 = TREE_OPERAND (tree01, 0);
9933 tree011 = TREE_OPERAND (tree01, 1);
9934 STRIP_NOPS (tree010);
9935 STRIP_NOPS (tree011);
9936 if (TREE_CODE (tree010) == INTEGER_CST
9937 && 0 == compare_tree_int (tree010,
9938 TYPE_PRECISION
9939 (TREE_TYPE (TREE_OPERAND
9940 (arg0, 0))))
9941 && operand_equal_p (tree11, tree011, 0))
9942 return fold_convert_loc
9943 (loc, type,
9944 build2 ((code0 != LSHIFT_EXPR
9945 ? LROTATE_EXPR
9946 : RROTATE_EXPR),
9947 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9948 TREE_OPERAND (arg0, 0), tree11));
9949 }
9950 }
9951 }
9952
9953 associate:
9954 /* In most languages, can't associate operations on floats through
9955 parentheses. Rather than remember where the parentheses were, we
9956 don't associate floats at all, unless the user has specified
9957 -fassociative-math.
9958 And, we need to make sure type is not saturating. */
9959
9960 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
9961 && !TYPE_SATURATING (type))
9962 {
9963 tree var0, con0, lit0, minus_lit0;
9964 tree var1, con1, lit1, minus_lit1;
9965 bool ok = true;
9966
9967 /* Split both trees into variables, constants, and literals. Then
9968 associate each group together, the constants with literals,
9969 then the result with variables. This increases the chances of
9970 literals being recombined later and of generating relocatable
9971 expressions for the sum of a constant and literal. */
9972 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
9973 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
9974 code == MINUS_EXPR);
9975
9976 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9977 if (code == MINUS_EXPR)
9978 code = PLUS_EXPR;
9979
9980 /* With undefined overflow we can only associate constants with one
9981 variable, and constants whose association doesn't overflow. */
9982 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9983 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
9984 {
9985 if (var0 && var1)
9986 {
9987 tree tmp0 = var0;
9988 tree tmp1 = var1;
9989
9990 if (TREE_CODE (tmp0) == NEGATE_EXPR)
9991 tmp0 = TREE_OPERAND (tmp0, 0);
9992 if (TREE_CODE (tmp1) == NEGATE_EXPR)
9993 tmp1 = TREE_OPERAND (tmp1, 0);
9994 /* The only case we can still associate with two variables
9995 is if they are the same, modulo negation. */
9996 if (!operand_equal_p (tmp0, tmp1, 0))
9997 ok = false;
9998 }
9999
10000 if (ok && lit0 && lit1)
10001 {
10002 tree tmp0 = fold_convert (type, lit0);
10003 tree tmp1 = fold_convert (type, lit1);
10004
10005 if (!TREE_OVERFLOW (tmp0) && !TREE_OVERFLOW (tmp1)
10006 && TREE_OVERFLOW (fold_build2 (code, type, tmp0, tmp1)))
10007 ok = false;
10008 }
10009 }
10010
10011 /* Only do something if we found more than two objects. Otherwise,
10012 nothing has changed and we risk infinite recursion. */
10013 if (ok
10014 && (2 < ((var0 != 0) + (var1 != 0)
10015 + (con0 != 0) + (con1 != 0)
10016 + (lit0 != 0) + (lit1 != 0)
10017 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10018 {
10019 var0 = associate_trees (loc, var0, var1, code, type);
10020 con0 = associate_trees (loc, con0, con1, code, type);
10021 lit0 = associate_trees (loc, lit0, lit1, code, type);
10022 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1, code, type);
10023
10024 /* Preserve the MINUS_EXPR if the negative part of the literal is
10025 greater than the positive part. Otherwise, the multiplicative
10026 folding code (i.e extract_muldiv) may be fooled in case
10027 unsigned constants are subtracted, like in the following
10028 example: ((X*2 + 4) - 8U)/2. */
10029 if (minus_lit0 && lit0)
10030 {
10031 if (TREE_CODE (lit0) == INTEGER_CST
10032 && TREE_CODE (minus_lit0) == INTEGER_CST
10033 && tree_int_cst_lt (lit0, minus_lit0))
10034 {
10035 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10036 MINUS_EXPR, type);
10037 lit0 = 0;
10038 }
10039 else
10040 {
10041 lit0 = associate_trees (loc, lit0, minus_lit0,
10042 MINUS_EXPR, type);
10043 minus_lit0 = 0;
10044 }
10045 }
10046 if (minus_lit0)
10047 {
10048 if (con0 == 0)
10049 return
10050 fold_convert_loc (loc, type,
10051 associate_trees (loc, var0, minus_lit0,
10052 MINUS_EXPR, type));
10053 else
10054 {
10055 con0 = associate_trees (loc, con0, minus_lit0,
10056 MINUS_EXPR, type);
10057 return
10058 fold_convert_loc (loc, type,
10059 associate_trees (loc, var0, con0,
10060 PLUS_EXPR, type));
10061 }
10062 }
10063
10064 con0 = associate_trees (loc, con0, lit0, code, type);
10065 return
10066 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10067 code, type));
10068 }
10069 }
10070
10071 return NULL_TREE;
10072
10073 case MINUS_EXPR:
10074 /* Pointer simplifications for subtraction, simple reassociations. */
10075 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10076 {
10077 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10078 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10079 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10080 {
10081 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10082 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10083 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10084 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10085 return fold_build2_loc (loc, PLUS_EXPR, type,
10086 fold_build2_loc (loc, MINUS_EXPR, type,
10087 arg00, arg10),
10088 fold_build2_loc (loc, MINUS_EXPR, type,
10089 arg01, arg11));
10090 }
10091 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10092 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10093 {
10094 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10095 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10096 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10097 fold_convert_loc (loc, type, arg1));
10098 if (tmp)
10099 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10100 }
10101 }
10102 /* A - (-B) -> A + B */
10103 if (TREE_CODE (arg1) == NEGATE_EXPR)
10104 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10105 fold_convert_loc (loc, type,
10106 TREE_OPERAND (arg1, 0)));
10107 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10108 if (TREE_CODE (arg0) == NEGATE_EXPR
10109 && (FLOAT_TYPE_P (type)
10110 || INTEGRAL_TYPE_P (type))
10111 && negate_expr_p (arg1)
10112 && reorder_operands_p (arg0, arg1))
10113 return fold_build2_loc (loc, MINUS_EXPR, type,
10114 fold_convert_loc (loc, type,
10115 negate_expr (arg1)),
10116 fold_convert_loc (loc, type,
10117 TREE_OPERAND (arg0, 0)));
10118 /* Convert -A - 1 to ~A. */
10119 if (INTEGRAL_TYPE_P (type)
10120 && TREE_CODE (arg0) == NEGATE_EXPR
10121 && integer_onep (arg1)
10122 && !TYPE_OVERFLOW_TRAPS (type))
10123 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10124 fold_convert_loc (loc, type,
10125 TREE_OPERAND (arg0, 0)));
10126
10127 /* Convert -1 - A to ~A. */
10128 if (INTEGRAL_TYPE_P (type)
10129 && integer_all_onesp (arg0))
10130 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10131
10132
10133 /* X - (X / CST) * CST is X % CST. */
10134 if (INTEGRAL_TYPE_P (type)
10135 && TREE_CODE (arg1) == MULT_EXPR
10136 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10137 && operand_equal_p (arg0,
10138 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10139 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10140 TREE_OPERAND (arg1, 1), 0))
10141 return
10142 fold_convert_loc (loc, type,
10143 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10144 arg0, TREE_OPERAND (arg1, 1)));
10145
10146 if (! FLOAT_TYPE_P (type))
10147 {
10148 if (integer_zerop (arg0))
10149 return negate_expr (fold_convert_loc (loc, type, arg1));
10150 if (integer_zerop (arg1))
10151 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10152
10153 /* Fold A - (A & B) into ~B & A. */
10154 if (!TREE_SIDE_EFFECTS (arg0)
10155 && TREE_CODE (arg1) == BIT_AND_EXPR)
10156 {
10157 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10158 {
10159 tree arg10 = fold_convert_loc (loc, type,
10160 TREE_OPERAND (arg1, 0));
10161 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10162 fold_build1_loc (loc, BIT_NOT_EXPR,
10163 type, arg10),
10164 fold_convert_loc (loc, type, arg0));
10165 }
10166 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10167 {
10168 tree arg11 = fold_convert_loc (loc,
10169 type, TREE_OPERAND (arg1, 1));
10170 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10171 fold_build1_loc (loc, BIT_NOT_EXPR,
10172 type, arg11),
10173 fold_convert_loc (loc, type, arg0));
10174 }
10175 }
10176
10177 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10178 any power of 2 minus 1. */
10179 if (TREE_CODE (arg0) == BIT_AND_EXPR
10180 && TREE_CODE (arg1) == BIT_AND_EXPR
10181 && operand_equal_p (TREE_OPERAND (arg0, 0),
10182 TREE_OPERAND (arg1, 0), 0))
10183 {
10184 tree mask0 = TREE_OPERAND (arg0, 1);
10185 tree mask1 = TREE_OPERAND (arg1, 1);
10186 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10187
10188 if (operand_equal_p (tem, mask1, 0))
10189 {
10190 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10191 TREE_OPERAND (arg0, 0), mask1);
10192 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10193 }
10194 }
10195 }
10196
10197 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10198 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10199 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10200
10201 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10202 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10203 (-ARG1 + ARG0) reduces to -ARG1. */
10204 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10205 return negate_expr (fold_convert_loc (loc, type, arg1));
10206
10207 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10208 __complex__ ( x, -y ). This is not the same for SNaNs or if
10209 signed zeros are involved. */
10210 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10211 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10212 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10213 {
10214 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10215 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10216 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10217 bool arg0rz = false, arg0iz = false;
10218 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10219 || (arg0i && (arg0iz = real_zerop (arg0i))))
10220 {
10221 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10222 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10223 if (arg0rz && arg1i && real_zerop (arg1i))
10224 {
10225 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10226 arg1r ? arg1r
10227 : build1 (REALPART_EXPR, rtype, arg1));
10228 tree ip = arg0i ? arg0i
10229 : build1 (IMAGPART_EXPR, rtype, arg0);
10230 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10231 }
10232 else if (arg0iz && arg1r && real_zerop (arg1r))
10233 {
10234 tree rp = arg0r ? arg0r
10235 : build1 (REALPART_EXPR, rtype, arg0);
10236 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10237 arg1i ? arg1i
10238 : build1 (IMAGPART_EXPR, rtype, arg1));
10239 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10240 }
10241 }
10242 }
10243
10244 /* Fold &x - &x. This can happen from &x.foo - &x.
10245 This is unsafe for certain floats even in non-IEEE formats.
10246 In IEEE, it is unsafe because it does wrong for NaNs.
10247 Also note that operand_equal_p is always false if an operand
10248 is volatile. */
10249
10250 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10251 && operand_equal_p (arg0, arg1, 0))
10252 return fold_convert_loc (loc, type, integer_zero_node);
10253
10254 /* A - B -> A + (-B) if B is easily negatable. */
10255 if (negate_expr_p (arg1)
10256 && ((FLOAT_TYPE_P (type)
10257 /* Avoid this transformation if B is a positive REAL_CST. */
10258 && (TREE_CODE (arg1) != REAL_CST
10259 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10260 || INTEGRAL_TYPE_P (type)))
10261 return fold_build2_loc (loc, PLUS_EXPR, type,
10262 fold_convert_loc (loc, type, arg0),
10263 fold_convert_loc (loc, type,
10264 negate_expr (arg1)));
10265
10266 /* Try folding difference of addresses. */
10267 {
10268 HOST_WIDE_INT diff;
10269
10270 if ((TREE_CODE (arg0) == ADDR_EXPR
10271 || TREE_CODE (arg1) == ADDR_EXPR)
10272 && ptr_difference_const (arg0, arg1, &diff))
10273 return build_int_cst_type (type, diff);
10274 }
10275
10276 /* Fold &a[i] - &a[j] to i-j. */
10277 if (TREE_CODE (arg0) == ADDR_EXPR
10278 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10279 && TREE_CODE (arg1) == ADDR_EXPR
10280 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10281 {
10282 tree aref0 = TREE_OPERAND (arg0, 0);
10283 tree aref1 = TREE_OPERAND (arg1, 0);
10284 if (operand_equal_p (TREE_OPERAND (aref0, 0),
10285 TREE_OPERAND (aref1, 0), 0))
10286 {
10287 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
10288 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
10289 tree esz = array_ref_element_size (aref0);
10290 tree diff = build2 (MINUS_EXPR, type, op0, op1);
10291 return fold_build2_loc (loc, MULT_EXPR, type, diff,
10292 fold_convert_loc (loc, type, esz));
10293
10294 }
10295 }
10296
10297 if (FLOAT_TYPE_P (type)
10298 && flag_unsafe_math_optimizations
10299 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10300 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10301 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10302 return tem;
10303
10304 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
10305 same or one. Make sure type is not saturating.
10306 fold_plusminus_mult_expr will re-associate. */
10307 if ((TREE_CODE (arg0) == MULT_EXPR
10308 || TREE_CODE (arg1) == MULT_EXPR)
10309 && !TYPE_SATURATING (type)
10310 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10311 {
10312 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10313 if (tem)
10314 return tem;
10315 }
10316
10317 goto associate;
10318
10319 case MULT_EXPR:
10320 /* (-A) * (-B) -> A * B */
10321 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10322 return fold_build2_loc (loc, MULT_EXPR, type,
10323 fold_convert_loc (loc, type,
10324 TREE_OPERAND (arg0, 0)),
10325 fold_convert_loc (loc, type,
10326 negate_expr (arg1)));
10327 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10328 return fold_build2_loc (loc, MULT_EXPR, type,
10329 fold_convert_loc (loc, type,
10330 negate_expr (arg0)),
10331 fold_convert_loc (loc, type,
10332 TREE_OPERAND (arg1, 0)));
10333
10334 if (! FLOAT_TYPE_P (type))
10335 {
10336 if (integer_zerop (arg1))
10337 return omit_one_operand_loc (loc, type, arg1, arg0);
10338 if (integer_onep (arg1))
10339 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10340 /* Transform x * -1 into -x. Make sure to do the negation
10341 on the original operand with conversions not stripped
10342 because we can only strip non-sign-changing conversions. */
10343 if (integer_all_onesp (arg1))
10344 return fold_convert_loc (loc, type, negate_expr (op0));
10345 /* Transform x * -C into -x * C if x is easily negatable. */
10346 if (TREE_CODE (arg1) == INTEGER_CST
10347 && tree_int_cst_sgn (arg1) == -1
10348 && negate_expr_p (arg0)
10349 && (tem = negate_expr (arg1)) != arg1
10350 && !TREE_OVERFLOW (tem))
10351 return fold_build2_loc (loc, MULT_EXPR, type,
10352 fold_convert_loc (loc, type,
10353 negate_expr (arg0)),
10354 tem);
10355
10356 /* (a * (1 << b)) is (a << b) */
10357 if (TREE_CODE (arg1) == LSHIFT_EXPR
10358 && integer_onep (TREE_OPERAND (arg1, 0)))
10359 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10360 TREE_OPERAND (arg1, 1));
10361 if (TREE_CODE (arg0) == LSHIFT_EXPR
10362 && integer_onep (TREE_OPERAND (arg0, 0)))
10363 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10364 TREE_OPERAND (arg0, 1));
10365
10366 /* (A + A) * C -> A * 2 * C */
10367 if (TREE_CODE (arg0) == PLUS_EXPR
10368 && TREE_CODE (arg1) == INTEGER_CST
10369 && operand_equal_p (TREE_OPERAND (arg0, 0),
10370 TREE_OPERAND (arg0, 1), 0))
10371 return fold_build2_loc (loc, MULT_EXPR, type,
10372 omit_one_operand_loc (loc, type,
10373 TREE_OPERAND (arg0, 0),
10374 TREE_OPERAND (arg0, 1)),
10375 fold_build2_loc (loc, MULT_EXPR, type,
10376 build_int_cst (type, 2) , arg1));
10377
10378 strict_overflow_p = false;
10379 if (TREE_CODE (arg1) == INTEGER_CST
10380 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10381 &strict_overflow_p)))
10382 {
10383 if (strict_overflow_p)
10384 fold_overflow_warning (("assuming signed overflow does not "
10385 "occur when simplifying "
10386 "multiplication"),
10387 WARN_STRICT_OVERFLOW_MISC);
10388 return fold_convert_loc (loc, type, tem);
10389 }
10390
10391 /* Optimize z * conj(z) for integer complex numbers. */
10392 if (TREE_CODE (arg0) == CONJ_EXPR
10393 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10394 return fold_mult_zconjz (loc, type, arg1);
10395 if (TREE_CODE (arg1) == CONJ_EXPR
10396 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10397 return fold_mult_zconjz (loc, type, arg0);
10398 }
10399 else
10400 {
10401 /* Maybe fold x * 0 to 0. The expressions aren't the same
10402 when x is NaN, since x * 0 is also NaN. Nor are they the
10403 same in modes with signed zeros, since multiplying a
10404 negative value by 0 gives -0, not +0. */
10405 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10406 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10407 && real_zerop (arg1))
10408 return omit_one_operand_loc (loc, type, arg1, arg0);
10409 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10410 Likewise for complex arithmetic with signed zeros. */
10411 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10412 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10413 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10414 && real_onep (arg1))
10415 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10416
10417 /* Transform x * -1.0 into -x. */
10418 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10419 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10420 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10421 && real_minus_onep (arg1))
10422 return fold_convert_loc (loc, type, negate_expr (arg0));
10423
10424 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10425 the result for floating point types due to rounding so it is applied
10426 only if -fassociative-math was specify. */
10427 if (flag_associative_math
10428 && TREE_CODE (arg0) == RDIV_EXPR
10429 && TREE_CODE (arg1) == REAL_CST
10430 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10431 {
10432 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10433 arg1, 0);
10434 if (tem)
10435 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10436 TREE_OPERAND (arg0, 1));
10437 }
10438
10439 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10440 if (operand_equal_p (arg0, arg1, 0))
10441 {
10442 tree tem = fold_strip_sign_ops (arg0);
10443 if (tem != NULL_TREE)
10444 {
10445 tem = fold_convert_loc (loc, type, tem);
10446 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
10447 }
10448 }
10449
10450 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10451 This is not the same for NaNs or if signed zeros are
10452 involved. */
10453 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10454 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10455 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10456 && TREE_CODE (arg1) == COMPLEX_CST
10457 && real_zerop (TREE_REALPART (arg1)))
10458 {
10459 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10460 if (real_onep (TREE_IMAGPART (arg1)))
10461 return
10462 fold_build2_loc (loc, COMPLEX_EXPR, type,
10463 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10464 rtype, arg0)),
10465 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10466 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10467 return
10468 fold_build2_loc (loc, COMPLEX_EXPR, type,
10469 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10470 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10471 rtype, arg0)));
10472 }
10473
10474 /* Optimize z * conj(z) for floating point complex numbers.
10475 Guarded by flag_unsafe_math_optimizations as non-finite
10476 imaginary components don't produce scalar results. */
10477 if (flag_unsafe_math_optimizations
10478 && TREE_CODE (arg0) == CONJ_EXPR
10479 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10480 return fold_mult_zconjz (loc, type, arg1);
10481 if (flag_unsafe_math_optimizations
10482 && TREE_CODE (arg1) == CONJ_EXPR
10483 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10484 return fold_mult_zconjz (loc, type, arg0);
10485
10486 if (flag_unsafe_math_optimizations)
10487 {
10488 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10489 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10490
10491 /* Optimizations of root(...)*root(...). */
10492 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10493 {
10494 tree rootfn, arg;
10495 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10496 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10497
10498 /* Optimize sqrt(x)*sqrt(x) as x. */
10499 if (BUILTIN_SQRT_P (fcode0)
10500 && operand_equal_p (arg00, arg10, 0)
10501 && ! HONOR_SNANS (TYPE_MODE (type)))
10502 return arg00;
10503
10504 /* Optimize root(x)*root(y) as root(x*y). */
10505 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10506 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
10507 return build_call_expr_loc (loc, rootfn, 1, arg);
10508 }
10509
10510 /* Optimize expN(x)*expN(y) as expN(x+y). */
10511 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10512 {
10513 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10514 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10515 CALL_EXPR_ARG (arg0, 0),
10516 CALL_EXPR_ARG (arg1, 0));
10517 return build_call_expr_loc (loc, expfn, 1, arg);
10518 }
10519
10520 /* Optimizations of pow(...)*pow(...). */
10521 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10522 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10523 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10524 {
10525 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10526 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10527 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10528 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10529
10530 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10531 if (operand_equal_p (arg01, arg11, 0))
10532 {
10533 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10534 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
10535 arg00, arg10);
10536 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
10537 }
10538
10539 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10540 if (operand_equal_p (arg00, arg10, 0))
10541 {
10542 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10543 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10544 arg01, arg11);
10545 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
10546 }
10547 }
10548
10549 /* Optimize tan(x)*cos(x) as sin(x). */
10550 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10551 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10552 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10553 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10554 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10555 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10556 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10557 CALL_EXPR_ARG (arg1, 0), 0))
10558 {
10559 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10560
10561 if (sinfn != NULL_TREE)
10562 return build_call_expr_loc (loc, sinfn, 1,
10563 CALL_EXPR_ARG (arg0, 0));
10564 }
10565
10566 /* Optimize x*pow(x,c) as pow(x,c+1). */
10567 if (fcode1 == BUILT_IN_POW
10568 || fcode1 == BUILT_IN_POWF
10569 || fcode1 == BUILT_IN_POWL)
10570 {
10571 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10572 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10573 if (TREE_CODE (arg11) == REAL_CST
10574 && !TREE_OVERFLOW (arg11)
10575 && operand_equal_p (arg0, arg10, 0))
10576 {
10577 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10578 REAL_VALUE_TYPE c;
10579 tree arg;
10580
10581 c = TREE_REAL_CST (arg11);
10582 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10583 arg = build_real (type, c);
10584 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10585 }
10586 }
10587
10588 /* Optimize pow(x,c)*x as pow(x,c+1). */
10589 if (fcode0 == BUILT_IN_POW
10590 || fcode0 == BUILT_IN_POWF
10591 || fcode0 == BUILT_IN_POWL)
10592 {
10593 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10594 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10595 if (TREE_CODE (arg01) == REAL_CST
10596 && !TREE_OVERFLOW (arg01)
10597 && operand_equal_p (arg1, arg00, 0))
10598 {
10599 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10600 REAL_VALUE_TYPE c;
10601 tree arg;
10602
10603 c = TREE_REAL_CST (arg01);
10604 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10605 arg = build_real (type, c);
10606 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
10607 }
10608 }
10609
10610 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
10611 if (optimize_function_for_speed_p (cfun)
10612 && operand_equal_p (arg0, arg1, 0))
10613 {
10614 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10615
10616 if (powfn)
10617 {
10618 tree arg = build_real (type, dconst2);
10619 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10620 }
10621 }
10622 }
10623 }
10624 goto associate;
10625
10626 case BIT_IOR_EXPR:
10627 bit_ior:
10628 if (integer_all_onesp (arg1))
10629 return omit_one_operand_loc (loc, type, arg1, arg0);
10630 if (integer_zerop (arg1))
10631 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10632 if (operand_equal_p (arg0, arg1, 0))
10633 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10634
10635 /* ~X | X is -1. */
10636 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10637 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10638 {
10639 t1 = fold_convert_loc (loc, type, integer_zero_node);
10640 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10641 return omit_one_operand_loc (loc, type, t1, arg1);
10642 }
10643
10644 /* X | ~X is -1. */
10645 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10646 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10647 {
10648 t1 = fold_convert_loc (loc, type, integer_zero_node);
10649 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10650 return omit_one_operand_loc (loc, type, t1, arg0);
10651 }
10652
10653 /* Canonicalize (X & C1) | C2. */
10654 if (TREE_CODE (arg0) == BIT_AND_EXPR
10655 && TREE_CODE (arg1) == INTEGER_CST
10656 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10657 {
10658 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, hi3, lo3, mlo, mhi;
10659 int width = TYPE_PRECISION (type), w;
10660 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
10661 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10662 hi2 = TREE_INT_CST_HIGH (arg1);
10663 lo2 = TREE_INT_CST_LOW (arg1);
10664
10665 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10666 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
10667 return omit_one_operand_loc (loc, type, arg1,
10668 TREE_OPERAND (arg0, 0));
10669
10670 if (width > HOST_BITS_PER_WIDE_INT)
10671 {
10672 mhi = (unsigned HOST_WIDE_INT) -1
10673 >> (2 * HOST_BITS_PER_WIDE_INT - width);
10674 mlo = -1;
10675 }
10676 else
10677 {
10678 mhi = 0;
10679 mlo = (unsigned HOST_WIDE_INT) -1
10680 >> (HOST_BITS_PER_WIDE_INT - width);
10681 }
10682
10683 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10684 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
10685 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10686 TREE_OPERAND (arg0, 0), arg1);
10687
10688 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10689 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10690 mode which allows further optimizations. */
10691 hi1 &= mhi;
10692 lo1 &= mlo;
10693 hi2 &= mhi;
10694 lo2 &= mlo;
10695 hi3 = hi1 & ~hi2;
10696 lo3 = lo1 & ~lo2;
10697 for (w = BITS_PER_UNIT;
10698 w <= width && w <= HOST_BITS_PER_WIDE_INT;
10699 w <<= 1)
10700 {
10701 unsigned HOST_WIDE_INT mask
10702 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
10703 if (((lo1 | lo2) & mask) == mask
10704 && (lo1 & ~mask) == 0 && hi1 == 0)
10705 {
10706 hi3 = 0;
10707 lo3 = mask;
10708 break;
10709 }
10710 }
10711 if (hi3 != hi1 || lo3 != lo1)
10712 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10713 fold_build2_loc (loc, BIT_AND_EXPR, type,
10714 TREE_OPERAND (arg0, 0),
10715 build_int_cst_wide (type,
10716 lo3, hi3)),
10717 arg1);
10718 }
10719
10720 /* (X & Y) | Y is (X, Y). */
10721 if (TREE_CODE (arg0) == BIT_AND_EXPR
10722 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10723 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
10724 /* (X & Y) | X is (Y, X). */
10725 if (TREE_CODE (arg0) == BIT_AND_EXPR
10726 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10727 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10728 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
10729 /* X | (X & Y) is (Y, X). */
10730 if (TREE_CODE (arg1) == BIT_AND_EXPR
10731 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10732 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10733 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
10734 /* X | (Y & X) is (Y, X). */
10735 if (TREE_CODE (arg1) == BIT_AND_EXPR
10736 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10737 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10738 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
10739
10740 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
10741 if (t1 != NULL_TREE)
10742 return t1;
10743
10744 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
10745
10746 This results in more efficient code for machines without a NAND
10747 instruction. Combine will canonicalize to the first form
10748 which will allow use of NAND instructions provided by the
10749 backend if they exist. */
10750 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10751 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10752 {
10753 return
10754 fold_build1_loc (loc, BIT_NOT_EXPR, type,
10755 build2 (BIT_AND_EXPR, type,
10756 fold_convert_loc (loc, type,
10757 TREE_OPERAND (arg0, 0)),
10758 fold_convert_loc (loc, type,
10759 TREE_OPERAND (arg1, 0))));
10760 }
10761
10762 /* See if this can be simplified into a rotate first. If that
10763 is unsuccessful continue in the association code. */
10764 goto bit_rotate;
10765
10766 case BIT_XOR_EXPR:
10767 if (integer_zerop (arg1))
10768 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10769 if (integer_all_onesp (arg1))
10770 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
10771 if (operand_equal_p (arg0, arg1, 0))
10772 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10773
10774 /* ~X ^ X is -1. */
10775 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10776 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10777 {
10778 t1 = fold_convert_loc (loc, type, integer_zero_node);
10779 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10780 return omit_one_operand_loc (loc, type, t1, arg1);
10781 }
10782
10783 /* X ^ ~X is -1. */
10784 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10785 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10786 {
10787 t1 = fold_convert_loc (loc, type, integer_zero_node);
10788 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10789 return omit_one_operand_loc (loc, type, t1, arg0);
10790 }
10791
10792 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
10793 with a constant, and the two constants have no bits in common,
10794 we should treat this as a BIT_IOR_EXPR since this may produce more
10795 simplifications. */
10796 if (TREE_CODE (arg0) == BIT_AND_EXPR
10797 && TREE_CODE (arg1) == BIT_AND_EXPR
10798 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10799 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10800 && integer_zerop (const_binop (BIT_AND_EXPR,
10801 TREE_OPERAND (arg0, 1),
10802 TREE_OPERAND (arg1, 1), 0)))
10803 {
10804 code = BIT_IOR_EXPR;
10805 goto bit_ior;
10806 }
10807
10808 /* (X | Y) ^ X -> Y & ~ X*/
10809 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10810 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10811 {
10812 tree t2 = TREE_OPERAND (arg0, 1);
10813 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
10814 arg1);
10815 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10816 fold_convert_loc (loc, type, t2),
10817 fold_convert_loc (loc, type, t1));
10818 return t1;
10819 }
10820
10821 /* (Y | X) ^ X -> Y & ~ X*/
10822 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10823 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10824 {
10825 tree t2 = TREE_OPERAND (arg0, 0);
10826 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
10827 arg1);
10828 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10829 fold_convert_loc (loc, type, t2),
10830 fold_convert_loc (loc, type, t1));
10831 return t1;
10832 }
10833
10834 /* X ^ (X | Y) -> Y & ~ X*/
10835 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10836 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
10837 {
10838 tree t2 = TREE_OPERAND (arg1, 1);
10839 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
10840 arg0);
10841 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10842 fold_convert_loc (loc, type, t2),
10843 fold_convert_loc (loc, type, t1));
10844 return t1;
10845 }
10846
10847 /* X ^ (Y | X) -> Y & ~ X*/
10848 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10849 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
10850 {
10851 tree t2 = TREE_OPERAND (arg1, 0);
10852 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
10853 arg0);
10854 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10855 fold_convert_loc (loc, type, t2),
10856 fold_convert_loc (loc, type, t1));
10857 return t1;
10858 }
10859
10860 /* Convert ~X ^ ~Y to X ^ Y. */
10861 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10862 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10863 return fold_build2_loc (loc, code, type,
10864 fold_convert_loc (loc, type,
10865 TREE_OPERAND (arg0, 0)),
10866 fold_convert_loc (loc, type,
10867 TREE_OPERAND (arg1, 0)));
10868
10869 /* Convert ~X ^ C to X ^ ~C. */
10870 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10871 && TREE_CODE (arg1) == INTEGER_CST)
10872 return fold_build2_loc (loc, code, type,
10873 fold_convert_loc (loc, type,
10874 TREE_OPERAND (arg0, 0)),
10875 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
10876
10877 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10878 if (TREE_CODE (arg0) == BIT_AND_EXPR
10879 && integer_onep (TREE_OPERAND (arg0, 1))
10880 && integer_onep (arg1))
10881 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
10882 build_int_cst (TREE_TYPE (arg0), 0));
10883
10884 /* Fold (X & Y) ^ Y as ~X & Y. */
10885 if (TREE_CODE (arg0) == BIT_AND_EXPR
10886 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10887 {
10888 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10889 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10890 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10891 fold_convert_loc (loc, type, arg1));
10892 }
10893 /* Fold (X & Y) ^ X as ~Y & X. */
10894 if (TREE_CODE (arg0) == BIT_AND_EXPR
10895 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10896 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10897 {
10898 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10899 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10900 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10901 fold_convert_loc (loc, type, arg1));
10902 }
10903 /* Fold X ^ (X & Y) as X & ~Y. */
10904 if (TREE_CODE (arg1) == BIT_AND_EXPR
10905 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10906 {
10907 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10908 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10909 fold_convert_loc (loc, type, arg0),
10910 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
10911 }
10912 /* Fold X ^ (Y & X) as ~Y & X. */
10913 if (TREE_CODE (arg1) == BIT_AND_EXPR
10914 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10915 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10916 {
10917 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10918 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10919 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10920 fold_convert_loc (loc, type, arg0));
10921 }
10922
10923 /* See if this can be simplified into a rotate first. If that
10924 is unsuccessful continue in the association code. */
10925 goto bit_rotate;
10926
10927 case BIT_AND_EXPR:
10928 if (integer_all_onesp (arg1))
10929 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10930 if (integer_zerop (arg1))
10931 return omit_one_operand_loc (loc, type, arg1, arg0);
10932 if (operand_equal_p (arg0, arg1, 0))
10933 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10934
10935 /* ~X & X is always zero. */
10936 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10937 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10938 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
10939
10940 /* X & ~X is always zero. */
10941 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10942 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10943 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10944
10945 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
10946 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10947 && TREE_CODE (arg1) == INTEGER_CST
10948 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10949 {
10950 tree tmp1 = fold_convert_loc (loc, type, arg1);
10951 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10952 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10953 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
10954 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
10955 return
10956 fold_convert_loc (loc, type,
10957 fold_build2_loc (loc, BIT_IOR_EXPR,
10958 type, tmp2, tmp3));
10959 }
10960
10961 /* (X | Y) & Y is (X, Y). */
10962 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10963 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10964 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
10965 /* (X | Y) & X is (Y, X). */
10966 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10967 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10968 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10969 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
10970 /* X & (X | Y) is (Y, X). */
10971 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10972 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10973 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10974 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
10975 /* X & (Y | X) is (Y, X). */
10976 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10977 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10978 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10979 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
10980
10981 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
10982 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10983 && integer_onep (TREE_OPERAND (arg0, 1))
10984 && integer_onep (arg1))
10985 {
10986 tem = TREE_OPERAND (arg0, 0);
10987 return fold_build2_loc (loc, EQ_EXPR, type,
10988 fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem,
10989 build_int_cst (TREE_TYPE (tem), 1)),
10990 build_int_cst (TREE_TYPE (tem), 0));
10991 }
10992 /* Fold ~X & 1 as (X & 1) == 0. */
10993 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10994 && integer_onep (arg1))
10995 {
10996 tem = TREE_OPERAND (arg0, 0);
10997 return fold_build2_loc (loc, EQ_EXPR, type,
10998 fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem,
10999 build_int_cst (TREE_TYPE (tem), 1)),
11000 build_int_cst (TREE_TYPE (tem), 0));
11001 }
11002
11003 /* Fold (X ^ Y) & Y as ~X & Y. */
11004 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11005 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11006 {
11007 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11008 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11009 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11010 fold_convert_loc (loc, type, arg1));
11011 }
11012 /* Fold (X ^ Y) & X as ~Y & X. */
11013 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11014 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11015 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11016 {
11017 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11018 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11019 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11020 fold_convert_loc (loc, type, arg1));
11021 }
11022 /* Fold X & (X ^ Y) as X & ~Y. */
11023 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11024 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11025 {
11026 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11027 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11028 fold_convert_loc (loc, type, arg0),
11029 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11030 }
11031 /* Fold X & (Y ^ X) as ~Y & X. */
11032 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11033 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11034 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11035 {
11036 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11037 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11038 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11039 fold_convert_loc (loc, type, arg0));
11040 }
11041
11042 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11043 if (t1 != NULL_TREE)
11044 return t1;
11045 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11046 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11047 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11048 {
11049 unsigned int prec
11050 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11051
11052 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
11053 && (~TREE_INT_CST_LOW (arg1)
11054 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
11055 return
11056 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11057 }
11058
11059 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11060
11061 This results in more efficient code for machines without a NOR
11062 instruction. Combine will canonicalize to the first form
11063 which will allow use of NOR instructions provided by the
11064 backend if they exist. */
11065 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11066 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11067 {
11068 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11069 build2 (BIT_IOR_EXPR, type,
11070 fold_convert_loc (loc, type,
11071 TREE_OPERAND (arg0, 0)),
11072 fold_convert_loc (loc, type,
11073 TREE_OPERAND (arg1, 0))));
11074 }
11075
11076 /* If arg0 is derived from the address of an object or function, we may
11077 be able to fold this expression using the object or function's
11078 alignment. */
11079 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
11080 {
11081 unsigned HOST_WIDE_INT modulus, residue;
11082 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
11083
11084 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11085 integer_onep (arg1));
11086
11087 /* This works because modulus is a power of 2. If this weren't the
11088 case, we'd have to replace it by its greatest power-of-2
11089 divisor: modulus & -modulus. */
11090 if (low < modulus)
11091 return build_int_cst (type, residue & low);
11092 }
11093
11094 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11095 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11096 if the new mask might be further optimized. */
11097 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11098 || TREE_CODE (arg0) == RSHIFT_EXPR)
11099 && host_integerp (TREE_OPERAND (arg0, 1), 1)
11100 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
11101 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
11102 < TYPE_PRECISION (TREE_TYPE (arg0))
11103 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11104 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
11105 {
11106 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
11107 unsigned HOST_WIDE_INT mask
11108 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
11109 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11110 tree shift_type = TREE_TYPE (arg0);
11111
11112 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11113 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11114 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11115 && TYPE_PRECISION (TREE_TYPE (arg0))
11116 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
11117 {
11118 unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
11119 tree arg00 = TREE_OPERAND (arg0, 0);
11120 /* See if more bits can be proven as zero because of
11121 zero extension. */
11122 if (TREE_CODE (arg00) == NOP_EXPR
11123 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11124 {
11125 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11126 if (TYPE_PRECISION (inner_type)
11127 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
11128 && TYPE_PRECISION (inner_type) < prec)
11129 {
11130 prec = TYPE_PRECISION (inner_type);
11131 /* See if we can shorten the right shift. */
11132 if (shiftc < prec)
11133 shift_type = inner_type;
11134 }
11135 }
11136 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11137 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11138 zerobits <<= prec - shiftc;
11139 /* For arithmetic shift if sign bit could be set, zerobits
11140 can contain actually sign bits, so no transformation is
11141 possible, unless MASK masks them all away. In that
11142 case the shift needs to be converted into logical shift. */
11143 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11144 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11145 {
11146 if ((mask & zerobits) == 0)
11147 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11148 else
11149 zerobits = 0;
11150 }
11151 }
11152
11153 /* ((X << 16) & 0xff00) is (X, 0). */
11154 if ((mask & zerobits) == mask)
11155 return omit_one_operand_loc (loc, type,
11156 build_int_cst (type, 0), arg0);
11157
11158 newmask = mask | zerobits;
11159 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11160 {
11161 unsigned int prec;
11162
11163 /* Only do the transformation if NEWMASK is some integer
11164 mode's mask. */
11165 for (prec = BITS_PER_UNIT;
11166 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11167 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11168 break;
11169 if (prec < HOST_BITS_PER_WIDE_INT
11170 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11171 {
11172 tree newmaskt;
11173
11174 if (shift_type != TREE_TYPE (arg0))
11175 {
11176 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11177 fold_convert_loc (loc, shift_type,
11178 TREE_OPERAND (arg0, 0)),
11179 TREE_OPERAND (arg0, 1));
11180 tem = fold_convert_loc (loc, type, tem);
11181 }
11182 else
11183 tem = op0;
11184 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11185 if (!tree_int_cst_equal (newmaskt, arg1))
11186 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11187 }
11188 }
11189 }
11190
11191 goto associate;
11192
11193 case RDIV_EXPR:
11194 /* Don't touch a floating-point divide by zero unless the mode
11195 of the constant can represent infinity. */
11196 if (TREE_CODE (arg1) == REAL_CST
11197 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11198 && real_zerop (arg1))
11199 return NULL_TREE;
11200
11201 /* Optimize A / A to 1.0 if we don't care about
11202 NaNs or Infinities. Skip the transformation
11203 for non-real operands. */
11204 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11205 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11206 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11207 && operand_equal_p (arg0, arg1, 0))
11208 {
11209 tree r = build_real (TREE_TYPE (arg0), dconst1);
11210
11211 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11212 }
11213
11214 /* The complex version of the above A / A optimization. */
11215 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11216 && operand_equal_p (arg0, arg1, 0))
11217 {
11218 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11219 if (! HONOR_NANS (TYPE_MODE (elem_type))
11220 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11221 {
11222 tree r = build_real (elem_type, dconst1);
11223 /* omit_two_operands will call fold_convert for us. */
11224 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11225 }
11226 }
11227
11228 /* (-A) / (-B) -> A / B */
11229 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11230 return fold_build2_loc (loc, RDIV_EXPR, type,
11231 TREE_OPERAND (arg0, 0),
11232 negate_expr (arg1));
11233 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11234 return fold_build2_loc (loc, RDIV_EXPR, type,
11235 negate_expr (arg0),
11236 TREE_OPERAND (arg1, 0));
11237
11238 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11239 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11240 && real_onep (arg1))
11241 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11242
11243 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11244 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11245 && real_minus_onep (arg1))
11246 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
11247 negate_expr (arg0)));
11248
11249 /* If ARG1 is a constant, we can convert this to a multiply by the
11250 reciprocal. This does not have the same rounding properties,
11251 so only do this if -freciprocal-math. We can actually
11252 always safely do it if ARG1 is a power of two, but it's hard to
11253 tell if it is or not in a portable manner. */
11254 if (TREE_CODE (arg1) == REAL_CST)
11255 {
11256 if (flag_reciprocal_math
11257 && 0 != (tem = const_binop (code, build_real (type, dconst1),
11258 arg1, 0)))
11259 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
11260 /* Find the reciprocal if optimizing and the result is exact. */
11261 if (optimize)
11262 {
11263 REAL_VALUE_TYPE r;
11264 r = TREE_REAL_CST (arg1);
11265 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
11266 {
11267 tem = build_real (type, r);
11268 return fold_build2_loc (loc, MULT_EXPR, type,
11269 fold_convert_loc (loc, type, arg0), tem);
11270 }
11271 }
11272 }
11273 /* Convert A/B/C to A/(B*C). */
11274 if (flag_reciprocal_math
11275 && TREE_CODE (arg0) == RDIV_EXPR)
11276 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11277 fold_build2_loc (loc, MULT_EXPR, type,
11278 TREE_OPERAND (arg0, 1), arg1));
11279
11280 /* Convert A/(B/C) to (A/B)*C. */
11281 if (flag_reciprocal_math
11282 && TREE_CODE (arg1) == RDIV_EXPR)
11283 return fold_build2_loc (loc, MULT_EXPR, type,
11284 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11285 TREE_OPERAND (arg1, 0)),
11286 TREE_OPERAND (arg1, 1));
11287
11288 /* Convert C1/(X*C2) into (C1/C2)/X. */
11289 if (flag_reciprocal_math
11290 && TREE_CODE (arg1) == MULT_EXPR
11291 && TREE_CODE (arg0) == REAL_CST
11292 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11293 {
11294 tree tem = const_binop (RDIV_EXPR, arg0,
11295 TREE_OPERAND (arg1, 1), 0);
11296 if (tem)
11297 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11298 TREE_OPERAND (arg1, 0));
11299 }
11300
11301 if (flag_unsafe_math_optimizations)
11302 {
11303 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11304 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11305
11306 /* Optimize sin(x)/cos(x) as tan(x). */
11307 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11308 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11309 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11310 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11311 CALL_EXPR_ARG (arg1, 0), 0))
11312 {
11313 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11314
11315 if (tanfn != NULL_TREE)
11316 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11317 }
11318
11319 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11320 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11321 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11322 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11323 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11324 CALL_EXPR_ARG (arg1, 0), 0))
11325 {
11326 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11327
11328 if (tanfn != NULL_TREE)
11329 {
11330 tree tmp = build_call_expr_loc (loc, tanfn, 1,
11331 CALL_EXPR_ARG (arg0, 0));
11332 return fold_build2_loc (loc, RDIV_EXPR, type,
11333 build_real (type, dconst1), tmp);
11334 }
11335 }
11336
11337 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11338 NaNs or Infinities. */
11339 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11340 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11341 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11342 {
11343 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11344 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11345
11346 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11347 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11348 && operand_equal_p (arg00, arg01, 0))
11349 {
11350 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11351
11352 if (cosfn != NULL_TREE)
11353 return build_call_expr_loc (loc, cosfn, 1, arg00);
11354 }
11355 }
11356
11357 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11358 NaNs or Infinities. */
11359 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11360 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11361 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11362 {
11363 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11364 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11365
11366 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11367 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11368 && operand_equal_p (arg00, arg01, 0))
11369 {
11370 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11371
11372 if (cosfn != NULL_TREE)
11373 {
11374 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
11375 return fold_build2_loc (loc, RDIV_EXPR, type,
11376 build_real (type, dconst1),
11377 tmp);
11378 }
11379 }
11380 }
11381
11382 /* Optimize pow(x,c)/x as pow(x,c-1). */
11383 if (fcode0 == BUILT_IN_POW
11384 || fcode0 == BUILT_IN_POWF
11385 || fcode0 == BUILT_IN_POWL)
11386 {
11387 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11388 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11389 if (TREE_CODE (arg01) == REAL_CST
11390 && !TREE_OVERFLOW (arg01)
11391 && operand_equal_p (arg1, arg00, 0))
11392 {
11393 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11394 REAL_VALUE_TYPE c;
11395 tree arg;
11396
11397 c = TREE_REAL_CST (arg01);
11398 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11399 arg = build_real (type, c);
11400 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11401 }
11402 }
11403
11404 /* Optimize a/root(b/c) into a*root(c/b). */
11405 if (BUILTIN_ROOT_P (fcode1))
11406 {
11407 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11408
11409 if (TREE_CODE (rootarg) == RDIV_EXPR)
11410 {
11411 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11412 tree b = TREE_OPERAND (rootarg, 0);
11413 tree c = TREE_OPERAND (rootarg, 1);
11414
11415 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
11416
11417 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
11418 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
11419 }
11420 }
11421
11422 /* Optimize x/expN(y) into x*expN(-y). */
11423 if (BUILTIN_EXPONENT_P (fcode1))
11424 {
11425 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11426 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11427 arg1 = build_call_expr_loc (loc,
11428 expfn, 1,
11429 fold_convert_loc (loc, type, arg));
11430 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11431 }
11432
11433 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11434 if (fcode1 == BUILT_IN_POW
11435 || fcode1 == BUILT_IN_POWF
11436 || fcode1 == BUILT_IN_POWL)
11437 {
11438 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11439 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11440 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11441 tree neg11 = fold_convert_loc (loc, type,
11442 negate_expr (arg11));
11443 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
11444 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11445 }
11446 }
11447 return NULL_TREE;
11448
11449 case TRUNC_DIV_EXPR:
11450 case FLOOR_DIV_EXPR:
11451 /* Simplify A / (B << N) where A and B are positive and B is
11452 a power of 2, to A >> (N + log2(B)). */
11453 strict_overflow_p = false;
11454 if (TREE_CODE (arg1) == LSHIFT_EXPR
11455 && (TYPE_UNSIGNED (type)
11456 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11457 {
11458 tree sval = TREE_OPERAND (arg1, 0);
11459 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11460 {
11461 tree sh_cnt = TREE_OPERAND (arg1, 1);
11462 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
11463
11464 if (strict_overflow_p)
11465 fold_overflow_warning (("assuming signed overflow does not "
11466 "occur when simplifying A / (B << N)"),
11467 WARN_STRICT_OVERFLOW_MISC);
11468
11469 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
11470 sh_cnt, build_int_cst (NULL_TREE, pow2));
11471 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11472 fold_convert_loc (loc, type, arg0), sh_cnt);
11473 }
11474 }
11475
11476 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
11477 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
11478 if (INTEGRAL_TYPE_P (type)
11479 && TYPE_UNSIGNED (type)
11480 && code == FLOOR_DIV_EXPR)
11481 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
11482
11483 /* Fall thru */
11484
11485 case ROUND_DIV_EXPR:
11486 case CEIL_DIV_EXPR:
11487 case EXACT_DIV_EXPR:
11488 if (integer_onep (arg1))
11489 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11490 if (integer_zerop (arg1))
11491 return NULL_TREE;
11492 /* X / -1 is -X. */
11493 if (!TYPE_UNSIGNED (type)
11494 && TREE_CODE (arg1) == INTEGER_CST
11495 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11496 && TREE_INT_CST_HIGH (arg1) == -1)
11497 return fold_convert_loc (loc, type, negate_expr (arg0));
11498
11499 /* Convert -A / -B to A / B when the type is signed and overflow is
11500 undefined. */
11501 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11502 && TREE_CODE (arg0) == NEGATE_EXPR
11503 && negate_expr_p (arg1))
11504 {
11505 if (INTEGRAL_TYPE_P (type))
11506 fold_overflow_warning (("assuming signed overflow does not occur "
11507 "when distributing negation across "
11508 "division"),
11509 WARN_STRICT_OVERFLOW_MISC);
11510 return fold_build2_loc (loc, code, type,
11511 fold_convert_loc (loc, type,
11512 TREE_OPERAND (arg0, 0)),
11513 fold_convert_loc (loc, type,
11514 negate_expr (arg1)));
11515 }
11516 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11517 && TREE_CODE (arg1) == NEGATE_EXPR
11518 && negate_expr_p (arg0))
11519 {
11520 if (INTEGRAL_TYPE_P (type))
11521 fold_overflow_warning (("assuming signed overflow does not occur "
11522 "when distributing negation across "
11523 "division"),
11524 WARN_STRICT_OVERFLOW_MISC);
11525 return fold_build2_loc (loc, code, type,
11526 fold_convert_loc (loc, type,
11527 negate_expr (arg0)),
11528 fold_convert_loc (loc, type,
11529 TREE_OPERAND (arg1, 0)));
11530 }
11531
11532 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11533 operation, EXACT_DIV_EXPR.
11534
11535 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11536 At one time others generated faster code, it's not clear if they do
11537 after the last round to changes to the DIV code in expmed.c. */
11538 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11539 && multiple_of_p (type, arg0, arg1))
11540 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
11541
11542 strict_overflow_p = false;
11543 if (TREE_CODE (arg1) == INTEGER_CST
11544 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11545 &strict_overflow_p)))
11546 {
11547 if (strict_overflow_p)
11548 fold_overflow_warning (("assuming signed overflow does not occur "
11549 "when simplifying division"),
11550 WARN_STRICT_OVERFLOW_MISC);
11551 return fold_convert_loc (loc, type, tem);
11552 }
11553
11554 return NULL_TREE;
11555
11556 case CEIL_MOD_EXPR:
11557 case FLOOR_MOD_EXPR:
11558 case ROUND_MOD_EXPR:
11559 case TRUNC_MOD_EXPR:
11560 /* X % 1 is always zero, but be sure to preserve any side
11561 effects in X. */
11562 if (integer_onep (arg1))
11563 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11564
11565 /* X % 0, return X % 0 unchanged so that we can get the
11566 proper warnings and errors. */
11567 if (integer_zerop (arg1))
11568 return NULL_TREE;
11569
11570 /* 0 % X is always zero, but be sure to preserve any side
11571 effects in X. Place this after checking for X == 0. */
11572 if (integer_zerop (arg0))
11573 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11574
11575 /* X % -1 is zero. */
11576 if (!TYPE_UNSIGNED (type)
11577 && TREE_CODE (arg1) == INTEGER_CST
11578 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11579 && TREE_INT_CST_HIGH (arg1) == -1)
11580 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11581
11582 /* X % -C is the same as X % C. */
11583 if (code == TRUNC_MOD_EXPR
11584 && !TYPE_UNSIGNED (type)
11585 && TREE_CODE (arg1) == INTEGER_CST
11586 && !TREE_OVERFLOW (arg1)
11587 && TREE_INT_CST_HIGH (arg1) < 0
11588 && !TYPE_OVERFLOW_TRAPS (type)
11589 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
11590 && !sign_bit_p (arg1, arg1))
11591 return fold_build2_loc (loc, code, type,
11592 fold_convert_loc (loc, type, arg0),
11593 fold_convert_loc (loc, type,
11594 negate_expr (arg1)));
11595
11596 /* X % -Y is the same as X % Y. */
11597 if (code == TRUNC_MOD_EXPR
11598 && !TYPE_UNSIGNED (type)
11599 && TREE_CODE (arg1) == NEGATE_EXPR
11600 && !TYPE_OVERFLOW_TRAPS (type))
11601 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
11602 fold_convert_loc (loc, type,
11603 TREE_OPERAND (arg1, 0)));
11604
11605 strict_overflow_p = false;
11606 if (TREE_CODE (arg1) == INTEGER_CST
11607 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11608 &strict_overflow_p)))
11609 {
11610 if (strict_overflow_p)
11611 fold_overflow_warning (("assuming signed overflow does not occur "
11612 "when simplifying modulus"),
11613 WARN_STRICT_OVERFLOW_MISC);
11614 return fold_convert_loc (loc, type, tem);
11615 }
11616
11617 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
11618 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
11619 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
11620 && (TYPE_UNSIGNED (type)
11621 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11622 {
11623 tree c = arg1;
11624 /* Also optimize A % (C << N) where C is a power of 2,
11625 to A & ((C << N) - 1). */
11626 if (TREE_CODE (arg1) == LSHIFT_EXPR)
11627 c = TREE_OPERAND (arg1, 0);
11628
11629 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
11630 {
11631 tree mask
11632 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
11633 build_int_cst (TREE_TYPE (arg1), 1));
11634 if (strict_overflow_p)
11635 fold_overflow_warning (("assuming signed overflow does not "
11636 "occur when simplifying "
11637 "X % (power of two)"),
11638 WARN_STRICT_OVERFLOW_MISC);
11639 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11640 fold_convert_loc (loc, type, arg0),
11641 fold_convert_loc (loc, type, mask));
11642 }
11643 }
11644
11645 return NULL_TREE;
11646
11647 case LROTATE_EXPR:
11648 case RROTATE_EXPR:
11649 if (integer_all_onesp (arg0))
11650 return omit_one_operand_loc (loc, type, arg0, arg1);
11651 goto shift;
11652
11653 case RSHIFT_EXPR:
11654 /* Optimize -1 >> x for arithmetic right shifts. */
11655 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
11656 && tree_expr_nonnegative_p (arg1))
11657 return omit_one_operand_loc (loc, type, arg0, arg1);
11658 /* ... fall through ... */
11659
11660 case LSHIFT_EXPR:
11661 shift:
11662 if (integer_zerop (arg1))
11663 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11664 if (integer_zerop (arg0))
11665 return omit_one_operand_loc (loc, type, arg0, arg1);
11666
11667 /* Since negative shift count is not well-defined,
11668 don't try to compute it in the compiler. */
11669 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11670 return NULL_TREE;
11671
11672 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
11673 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
11674 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11675 && host_integerp (TREE_OPERAND (arg0, 1), false)
11676 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11677 {
11678 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
11679 + TREE_INT_CST_LOW (arg1));
11680
11681 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11682 being well defined. */
11683 if (low >= TYPE_PRECISION (type))
11684 {
11685 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
11686 low = low % TYPE_PRECISION (type);
11687 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
11688 return omit_one_operand_loc (loc, type, build_int_cst (type, 0),
11689 TREE_OPERAND (arg0, 0));
11690 else
11691 low = TYPE_PRECISION (type) - 1;
11692 }
11693
11694 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
11695 build_int_cst (type, low));
11696 }
11697
11698 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
11699 into x & ((unsigned)-1 >> c) for unsigned types. */
11700 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
11701 || (TYPE_UNSIGNED (type)
11702 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
11703 && host_integerp (arg1, false)
11704 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11705 && host_integerp (TREE_OPERAND (arg0, 1), false)
11706 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11707 {
11708 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
11709 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
11710 tree lshift;
11711 tree arg00;
11712
11713 if (low0 == low1)
11714 {
11715 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11716
11717 lshift = build_int_cst (type, -1);
11718 lshift = int_const_binop (code, lshift, arg1, 0);
11719
11720 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
11721 }
11722 }
11723
11724 /* Rewrite an LROTATE_EXPR by a constant into an
11725 RROTATE_EXPR by a new constant. */
11726 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
11727 {
11728 tree tem = build_int_cst (TREE_TYPE (arg1),
11729 TYPE_PRECISION (type));
11730 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
11731 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
11732 }
11733
11734 /* If we have a rotate of a bit operation with the rotate count and
11735 the second operand of the bit operation both constant,
11736 permute the two operations. */
11737 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11738 && (TREE_CODE (arg0) == BIT_AND_EXPR
11739 || TREE_CODE (arg0) == BIT_IOR_EXPR
11740 || TREE_CODE (arg0) == BIT_XOR_EXPR)
11741 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11742 return fold_build2_loc (loc, TREE_CODE (arg0), type,
11743 fold_build2_loc (loc, code, type,
11744 TREE_OPERAND (arg0, 0), arg1),
11745 fold_build2_loc (loc, code, type,
11746 TREE_OPERAND (arg0, 1), arg1));
11747
11748 /* Two consecutive rotates adding up to the precision of the
11749 type can be ignored. */
11750 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11751 && TREE_CODE (arg0) == RROTATE_EXPR
11752 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11753 && TREE_INT_CST_HIGH (arg1) == 0
11754 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
11755 && ((TREE_INT_CST_LOW (arg1)
11756 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
11757 == (unsigned int) TYPE_PRECISION (type)))
11758 return TREE_OPERAND (arg0, 0);
11759
11760 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
11761 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
11762 if the latter can be further optimized. */
11763 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
11764 && TREE_CODE (arg0) == BIT_AND_EXPR
11765 && TREE_CODE (arg1) == INTEGER_CST
11766 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11767 {
11768 tree mask = fold_build2_loc (loc, code, type,
11769 fold_convert_loc (loc, type,
11770 TREE_OPERAND (arg0, 1)),
11771 arg1);
11772 tree shift = fold_build2_loc (loc, code, type,
11773 fold_convert_loc (loc, type,
11774 TREE_OPERAND (arg0, 0)),
11775 arg1);
11776 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
11777 if (tem)
11778 return tem;
11779 }
11780
11781 return NULL_TREE;
11782
11783 case MIN_EXPR:
11784 if (operand_equal_p (arg0, arg1, 0))
11785 return omit_one_operand_loc (loc, type, arg0, arg1);
11786 if (INTEGRAL_TYPE_P (type)
11787 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
11788 return omit_one_operand_loc (loc, type, arg1, arg0);
11789 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
11790 if (tem)
11791 return tem;
11792 goto associate;
11793
11794 case MAX_EXPR:
11795 if (operand_equal_p (arg0, arg1, 0))
11796 return omit_one_operand_loc (loc, type, arg0, arg1);
11797 if (INTEGRAL_TYPE_P (type)
11798 && TYPE_MAX_VALUE (type)
11799 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
11800 return omit_one_operand_loc (loc, type, arg1, arg0);
11801 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
11802 if (tem)
11803 return tem;
11804 goto associate;
11805
11806 case TRUTH_ANDIF_EXPR:
11807 /* Note that the operands of this must be ints
11808 and their values must be 0 or 1.
11809 ("true" is a fixed value perhaps depending on the language.) */
11810 /* If first arg is constant zero, return it. */
11811 if (integer_zerop (arg0))
11812 return fold_convert_loc (loc, type, arg0);
11813 case TRUTH_AND_EXPR:
11814 /* If either arg is constant true, drop it. */
11815 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11816 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
11817 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
11818 /* Preserve sequence points. */
11819 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11820 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11821 /* If second arg is constant zero, result is zero, but first arg
11822 must be evaluated. */
11823 if (integer_zerop (arg1))
11824 return omit_one_operand_loc (loc, type, arg1, arg0);
11825 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
11826 case will be handled here. */
11827 if (integer_zerop (arg0))
11828 return omit_one_operand_loc (loc, type, arg0, arg1);
11829
11830 /* !X && X is always false. */
11831 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11832 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11833 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11834 /* X && !X is always false. */
11835 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11836 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11837 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11838
11839 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
11840 means A >= Y && A != MAX, but in this case we know that
11841 A < X <= MAX. */
11842
11843 if (!TREE_SIDE_EFFECTS (arg0)
11844 && !TREE_SIDE_EFFECTS (arg1))
11845 {
11846 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
11847 if (tem && !operand_equal_p (tem, arg0, 0))
11848 return fold_build2_loc (loc, code, type, tem, arg1);
11849
11850 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
11851 if (tem && !operand_equal_p (tem, arg1, 0))
11852 return fold_build2_loc (loc, code, type, arg0, tem);
11853 }
11854
11855 truth_andor:
11856 /* We only do these simplifications if we are optimizing. */
11857 if (!optimize)
11858 return NULL_TREE;
11859
11860 /* Check for things like (A || B) && (A || C). We can convert this
11861 to A || (B && C). Note that either operator can be any of the four
11862 truth and/or operations and the transformation will still be
11863 valid. Also note that we only care about order for the
11864 ANDIF and ORIF operators. If B contains side effects, this
11865 might change the truth-value of A. */
11866 if (TREE_CODE (arg0) == TREE_CODE (arg1)
11867 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
11868 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
11869 || TREE_CODE (arg0) == TRUTH_AND_EXPR
11870 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
11871 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
11872 {
11873 tree a00 = TREE_OPERAND (arg0, 0);
11874 tree a01 = TREE_OPERAND (arg0, 1);
11875 tree a10 = TREE_OPERAND (arg1, 0);
11876 tree a11 = TREE_OPERAND (arg1, 1);
11877 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
11878 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
11879 && (code == TRUTH_AND_EXPR
11880 || code == TRUTH_OR_EXPR));
11881
11882 if (operand_equal_p (a00, a10, 0))
11883 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
11884 fold_build2_loc (loc, code, type, a01, a11));
11885 else if (commutative && operand_equal_p (a00, a11, 0))
11886 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
11887 fold_build2_loc (loc, code, type, a01, a10));
11888 else if (commutative && operand_equal_p (a01, a10, 0))
11889 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
11890 fold_build2_loc (loc, code, type, a00, a11));
11891
11892 /* This case if tricky because we must either have commutative
11893 operators or else A10 must not have side-effects. */
11894
11895 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
11896 && operand_equal_p (a01, a11, 0))
11897 return fold_build2_loc (loc, TREE_CODE (arg0), type,
11898 fold_build2_loc (loc, code, type, a00, a10),
11899 a01);
11900 }
11901
11902 /* See if we can build a range comparison. */
11903 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
11904 return tem;
11905
11906 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
11907 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
11908 {
11909 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
11910 if (tem)
11911 return fold_build2_loc (loc, code, type, tem, arg1);
11912 }
11913
11914 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
11915 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
11916 {
11917 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
11918 if (tem)
11919 return fold_build2_loc (loc, code, type, arg0, tem);
11920 }
11921
11922 /* Check for the possibility of merging component references. If our
11923 lhs is another similar operation, try to merge its rhs with our
11924 rhs. Then try to merge our lhs and rhs. */
11925 if (TREE_CODE (arg0) == code
11926 && 0 != (tem = fold_truthop (loc, code, type,
11927 TREE_OPERAND (arg0, 1), arg1)))
11928 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
11929
11930 if ((tem = fold_truthop (loc, code, type, arg0, arg1)) != 0)
11931 return tem;
11932
11933 return NULL_TREE;
11934
11935 case TRUTH_ORIF_EXPR:
11936 /* Note that the operands of this must be ints
11937 and their values must be 0 or true.
11938 ("true" is a fixed value perhaps depending on the language.) */
11939 /* If first arg is constant true, return it. */
11940 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11941 return fold_convert_loc (loc, type, arg0);
11942 case TRUTH_OR_EXPR:
11943 /* If either arg is constant zero, drop it. */
11944 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
11945 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
11946 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
11947 /* Preserve sequence points. */
11948 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11949 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11950 /* If second arg is constant true, result is true, but we must
11951 evaluate first arg. */
11952 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
11953 return omit_one_operand_loc (loc, type, arg1, arg0);
11954 /* Likewise for first arg, but note this only occurs here for
11955 TRUTH_OR_EXPR. */
11956 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11957 return omit_one_operand_loc (loc, type, arg0, arg1);
11958
11959 /* !X || X is always true. */
11960 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11961 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11962 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
11963 /* X || !X is always true. */
11964 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11965 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11966 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
11967
11968 goto truth_andor;
11969
11970 case TRUTH_XOR_EXPR:
11971 /* If the second arg is constant zero, drop it. */
11972 if (integer_zerop (arg1))
11973 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11974 /* If the second arg is constant true, this is a logical inversion. */
11975 if (integer_onep (arg1))
11976 {
11977 /* Only call invert_truthvalue if operand is a truth value. */
11978 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
11979 tem = fold_build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
11980 else
11981 tem = invert_truthvalue_loc (loc, arg0);
11982 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
11983 }
11984 /* Identical arguments cancel to zero. */
11985 if (operand_equal_p (arg0, arg1, 0))
11986 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11987
11988 /* !X ^ X is always true. */
11989 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11990 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11991 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
11992
11993 /* X ^ !X is always true. */
11994 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11995 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11996 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
11997
11998 return NULL_TREE;
11999
12000 case EQ_EXPR:
12001 case NE_EXPR:
12002 tem = fold_comparison (loc, code, type, op0, op1);
12003 if (tem != NULL_TREE)
12004 return tem;
12005
12006 /* bool_var != 0 becomes bool_var. */
12007 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12008 && code == NE_EXPR)
12009 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12010
12011 /* bool_var == 1 becomes bool_var. */
12012 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12013 && code == EQ_EXPR)
12014 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12015
12016 /* bool_var != 1 becomes !bool_var. */
12017 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12018 && code == NE_EXPR)
12019 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
12020 fold_convert_loc (loc, type, arg0));
12021
12022 /* bool_var == 0 becomes !bool_var. */
12023 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12024 && code == EQ_EXPR)
12025 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
12026 fold_convert_loc (loc, type, arg0));
12027
12028 /* !exp != 0 becomes !exp */
12029 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12030 && code == NE_EXPR)
12031 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12032
12033 /* If this is an equality comparison of the address of two non-weak,
12034 unaliased symbols neither of which are extern (since we do not
12035 have access to attributes for externs), then we know the result. */
12036 if (TREE_CODE (arg0) == ADDR_EXPR
12037 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12038 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12039 && ! lookup_attribute ("alias",
12040 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12041 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12042 && TREE_CODE (arg1) == ADDR_EXPR
12043 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12044 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12045 && ! lookup_attribute ("alias",
12046 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12047 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12048 {
12049 /* We know that we're looking at the address of two
12050 non-weak, unaliased, static _DECL nodes.
12051
12052 It is both wasteful and incorrect to call operand_equal_p
12053 to compare the two ADDR_EXPR nodes. It is wasteful in that
12054 all we need to do is test pointer equality for the arguments
12055 to the two ADDR_EXPR nodes. It is incorrect to use
12056 operand_equal_p as that function is NOT equivalent to a
12057 C equality test. It can in fact return false for two
12058 objects which would test as equal using the C equality
12059 operator. */
12060 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12061 return constant_boolean_node (equal
12062 ? code == EQ_EXPR : code != EQ_EXPR,
12063 type);
12064 }
12065
12066 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12067 a MINUS_EXPR of a constant, we can convert it into a comparison with
12068 a revised constant as long as no overflow occurs. */
12069 if (TREE_CODE (arg1) == INTEGER_CST
12070 && (TREE_CODE (arg0) == PLUS_EXPR
12071 || TREE_CODE (arg0) == MINUS_EXPR)
12072 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12073 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
12074 ? MINUS_EXPR : PLUS_EXPR,
12075 fold_convert_loc (loc, TREE_TYPE (arg0),
12076 arg1),
12077 TREE_OPERAND (arg0, 1), 0))
12078 && !TREE_OVERFLOW (tem))
12079 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12080
12081 /* Similarly for a NEGATE_EXPR. */
12082 if (TREE_CODE (arg0) == NEGATE_EXPR
12083 && TREE_CODE (arg1) == INTEGER_CST
12084 && 0 != (tem = negate_expr (arg1))
12085 && TREE_CODE (tem) == INTEGER_CST
12086 && !TREE_OVERFLOW (tem))
12087 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12088
12089 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12090 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12091 && TREE_CODE (arg1) == INTEGER_CST
12092 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12093 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12094 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12095 fold_convert_loc (loc,
12096 TREE_TYPE (arg0),
12097 arg1),
12098 TREE_OPERAND (arg0, 1)));
12099
12100 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12101 if ((TREE_CODE (arg0) == PLUS_EXPR
12102 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12103 || TREE_CODE (arg0) == MINUS_EXPR)
12104 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12105 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12106 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12107 {
12108 tree val = TREE_OPERAND (arg0, 1);
12109 return omit_two_operands_loc (loc, type,
12110 fold_build2_loc (loc, code, type,
12111 val,
12112 build_int_cst (TREE_TYPE (val),
12113 0)),
12114 TREE_OPERAND (arg0, 0), arg1);
12115 }
12116
12117 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12118 if (TREE_CODE (arg0) == MINUS_EXPR
12119 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12120 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0)
12121 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
12122 {
12123 return omit_two_operands_loc (loc, type,
12124 code == NE_EXPR
12125 ? boolean_true_node : boolean_false_node,
12126 TREE_OPERAND (arg0, 1), arg1);
12127 }
12128
12129 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12130 for !=. Don't do this for ordered comparisons due to overflow. */
12131 if (TREE_CODE (arg0) == MINUS_EXPR
12132 && integer_zerop (arg1))
12133 return fold_build2_loc (loc, code, type,
12134 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
12135
12136 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12137 if (TREE_CODE (arg0) == ABS_EXPR
12138 && (integer_zerop (arg1) || real_zerop (arg1)))
12139 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12140
12141 /* If this is an EQ or NE comparison with zero and ARG0 is
12142 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12143 two operations, but the latter can be done in one less insn
12144 on machines that have only two-operand insns or on which a
12145 constant cannot be the first operand. */
12146 if (TREE_CODE (arg0) == BIT_AND_EXPR
12147 && integer_zerop (arg1))
12148 {
12149 tree arg00 = TREE_OPERAND (arg0, 0);
12150 tree arg01 = TREE_OPERAND (arg0, 1);
12151 if (TREE_CODE (arg00) == LSHIFT_EXPR
12152 && integer_onep (TREE_OPERAND (arg00, 0)))
12153 {
12154 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12155 arg01, TREE_OPERAND (arg00, 1));
12156 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12157 build_int_cst (TREE_TYPE (arg0), 1));
12158 return fold_build2_loc (loc, code, type,
12159 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12160 arg1);
12161 }
12162 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12163 && integer_onep (TREE_OPERAND (arg01, 0)))
12164 {
12165 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12166 arg00, TREE_OPERAND (arg01, 1));
12167 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12168 build_int_cst (TREE_TYPE (arg0), 1));
12169 return fold_build2_loc (loc, code, type,
12170 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12171 arg1);
12172 }
12173 }
12174
12175 /* If this is an NE or EQ comparison of zero against the result of a
12176 signed MOD operation whose second operand is a power of 2, make
12177 the MOD operation unsigned since it is simpler and equivalent. */
12178 if (integer_zerop (arg1)
12179 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12180 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12181 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12182 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12183 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12184 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12185 {
12186 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12187 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12188 fold_convert_loc (loc, newtype,
12189 TREE_OPERAND (arg0, 0)),
12190 fold_convert_loc (loc, newtype,
12191 TREE_OPERAND (arg0, 1)));
12192
12193 return fold_build2_loc (loc, code, type, newmod,
12194 fold_convert_loc (loc, newtype, arg1));
12195 }
12196
12197 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12198 C1 is a valid shift constant, and C2 is a power of two, i.e.
12199 a single bit. */
12200 if (TREE_CODE (arg0) == BIT_AND_EXPR
12201 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12202 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12203 == INTEGER_CST
12204 && integer_pow2p (TREE_OPERAND (arg0, 1))
12205 && integer_zerop (arg1))
12206 {
12207 tree itype = TREE_TYPE (arg0);
12208 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
12209 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12210
12211 /* Check for a valid shift count. */
12212 if (TREE_INT_CST_HIGH (arg001) == 0
12213 && TREE_INT_CST_LOW (arg001) < prec)
12214 {
12215 tree arg01 = TREE_OPERAND (arg0, 1);
12216 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12217 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12218 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12219 can be rewritten as (X & (C2 << C1)) != 0. */
12220 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12221 {
12222 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12223 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12224 return fold_build2_loc (loc, code, type, tem, arg1);
12225 }
12226 /* Otherwise, for signed (arithmetic) shifts,
12227 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12228 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12229 else if (!TYPE_UNSIGNED (itype))
12230 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12231 arg000, build_int_cst (itype, 0));
12232 /* Otherwise, of unsigned (logical) shifts,
12233 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12234 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12235 else
12236 return omit_one_operand_loc (loc, type,
12237 code == EQ_EXPR ? integer_one_node
12238 : integer_zero_node,
12239 arg000);
12240 }
12241 }
12242
12243 /* If this is an NE comparison of zero with an AND of one, remove the
12244 comparison since the AND will give the correct value. */
12245 if (code == NE_EXPR
12246 && integer_zerop (arg1)
12247 && TREE_CODE (arg0) == BIT_AND_EXPR
12248 && integer_onep (TREE_OPERAND (arg0, 1)))
12249 return fold_convert_loc (loc, type, arg0);
12250
12251 /* If we have (A & C) == C where C is a power of 2, convert this into
12252 (A & C) != 0. Similarly for NE_EXPR. */
12253 if (TREE_CODE (arg0) == BIT_AND_EXPR
12254 && integer_pow2p (TREE_OPERAND (arg0, 1))
12255 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12256 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12257 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12258 integer_zero_node));
12259
12260 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12261 bit, then fold the expression into A < 0 or A >= 0. */
12262 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12263 if (tem)
12264 return tem;
12265
12266 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12267 Similarly for NE_EXPR. */
12268 if (TREE_CODE (arg0) == BIT_AND_EXPR
12269 && TREE_CODE (arg1) == INTEGER_CST
12270 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12271 {
12272 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12273 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12274 TREE_OPERAND (arg0, 1));
12275 tree dandnotc = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12276 arg1, notc);
12277 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12278 if (integer_nonzerop (dandnotc))
12279 return omit_one_operand_loc (loc, type, rslt, arg0);
12280 }
12281
12282 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12283 Similarly for NE_EXPR. */
12284 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12285 && TREE_CODE (arg1) == INTEGER_CST
12286 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12287 {
12288 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12289 tree candnotd = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12290 TREE_OPERAND (arg0, 1), notd);
12291 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12292 if (integer_nonzerop (candnotd))
12293 return omit_one_operand_loc (loc, type, rslt, arg0);
12294 }
12295
12296 /* If this is a comparison of a field, we may be able to simplify it. */
12297 if ((TREE_CODE (arg0) == COMPONENT_REF
12298 || TREE_CODE (arg0) == BIT_FIELD_REF)
12299 /* Handle the constant case even without -O
12300 to make sure the warnings are given. */
12301 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12302 {
12303 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12304 if (t1)
12305 return t1;
12306 }
12307
12308 /* Optimize comparisons of strlen vs zero to a compare of the
12309 first character of the string vs zero. To wit,
12310 strlen(ptr) == 0 => *ptr == 0
12311 strlen(ptr) != 0 => *ptr != 0
12312 Other cases should reduce to one of these two (or a constant)
12313 due to the return value of strlen being unsigned. */
12314 if (TREE_CODE (arg0) == CALL_EXPR
12315 && integer_zerop (arg1))
12316 {
12317 tree fndecl = get_callee_fndecl (arg0);
12318
12319 if (fndecl
12320 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12321 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12322 && call_expr_nargs (arg0) == 1
12323 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12324 {
12325 tree iref = build_fold_indirect_ref_loc (loc,
12326 CALL_EXPR_ARG (arg0, 0));
12327 return fold_build2_loc (loc, code, type, iref,
12328 build_int_cst (TREE_TYPE (iref), 0));
12329 }
12330 }
12331
12332 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12333 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12334 if (TREE_CODE (arg0) == RSHIFT_EXPR
12335 && integer_zerop (arg1)
12336 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12337 {
12338 tree arg00 = TREE_OPERAND (arg0, 0);
12339 tree arg01 = TREE_OPERAND (arg0, 1);
12340 tree itype = TREE_TYPE (arg00);
12341 if (TREE_INT_CST_HIGH (arg01) == 0
12342 && TREE_INT_CST_LOW (arg01)
12343 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
12344 {
12345 if (TYPE_UNSIGNED (itype))
12346 {
12347 itype = signed_type_for (itype);
12348 arg00 = fold_convert_loc (loc, itype, arg00);
12349 }
12350 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12351 type, arg00, build_int_cst (itype, 0));
12352 }
12353 }
12354
12355 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12356 if (integer_zerop (arg1)
12357 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12358 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12359 TREE_OPERAND (arg0, 1));
12360
12361 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12362 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12363 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12364 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12365 build_int_cst (TREE_TYPE (arg1), 0));
12366 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12367 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12368 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12369 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12370 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
12371 build_int_cst (TREE_TYPE (arg1), 0));
12372
12373 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12374 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12375 && TREE_CODE (arg1) == INTEGER_CST
12376 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12377 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12378 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
12379 TREE_OPERAND (arg0, 1), arg1));
12380
12381 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12382 (X & C) == 0 when C is a single bit. */
12383 if (TREE_CODE (arg0) == BIT_AND_EXPR
12384 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12385 && integer_zerop (arg1)
12386 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12387 {
12388 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12389 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12390 TREE_OPERAND (arg0, 1));
12391 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12392 type, tem, arg1);
12393 }
12394
12395 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12396 constant C is a power of two, i.e. a single bit. */
12397 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12398 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12399 && integer_zerop (arg1)
12400 && integer_pow2p (TREE_OPERAND (arg0, 1))
12401 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12402 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12403 {
12404 tree arg00 = TREE_OPERAND (arg0, 0);
12405 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12406 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12407 }
12408
12409 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12410 when is C is a power of two, i.e. a single bit. */
12411 if (TREE_CODE (arg0) == BIT_AND_EXPR
12412 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12413 && integer_zerop (arg1)
12414 && integer_pow2p (TREE_OPERAND (arg0, 1))
12415 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12416 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12417 {
12418 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12419 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
12420 arg000, TREE_OPERAND (arg0, 1));
12421 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12422 tem, build_int_cst (TREE_TYPE (tem), 0));
12423 }
12424
12425 if (integer_zerop (arg1)
12426 && tree_expr_nonzero_p (arg0))
12427 {
12428 tree res = constant_boolean_node (code==NE_EXPR, type);
12429 return omit_one_operand_loc (loc, type, res, arg0);
12430 }
12431
12432 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12433 if (TREE_CODE (arg0) == NEGATE_EXPR
12434 && TREE_CODE (arg1) == NEGATE_EXPR)
12435 return fold_build2_loc (loc, code, type,
12436 TREE_OPERAND (arg0, 0),
12437 TREE_OPERAND (arg1, 0));
12438
12439 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12440 if (TREE_CODE (arg0) == BIT_AND_EXPR
12441 && TREE_CODE (arg1) == BIT_AND_EXPR)
12442 {
12443 tree arg00 = TREE_OPERAND (arg0, 0);
12444 tree arg01 = TREE_OPERAND (arg0, 1);
12445 tree arg10 = TREE_OPERAND (arg1, 0);
12446 tree arg11 = TREE_OPERAND (arg1, 1);
12447 tree itype = TREE_TYPE (arg0);
12448
12449 if (operand_equal_p (arg01, arg11, 0))
12450 return fold_build2_loc (loc, code, type,
12451 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12452 fold_build2_loc (loc,
12453 BIT_XOR_EXPR, itype,
12454 arg00, arg10),
12455 arg01),
12456 build_int_cst (itype, 0));
12457
12458 if (operand_equal_p (arg01, arg10, 0))
12459 return fold_build2_loc (loc, code, type,
12460 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12461 fold_build2_loc (loc,
12462 BIT_XOR_EXPR, itype,
12463 arg00, arg11),
12464 arg01),
12465 build_int_cst (itype, 0));
12466
12467 if (operand_equal_p (arg00, arg11, 0))
12468 return fold_build2_loc (loc, code, type,
12469 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12470 fold_build2_loc (loc,
12471 BIT_XOR_EXPR, itype,
12472 arg01, arg10),
12473 arg00),
12474 build_int_cst (itype, 0));
12475
12476 if (operand_equal_p (arg00, arg10, 0))
12477 return fold_build2_loc (loc, code, type,
12478 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12479 fold_build2_loc (loc,
12480 BIT_XOR_EXPR, itype,
12481 arg01, arg11),
12482 arg00),
12483 build_int_cst (itype, 0));
12484 }
12485
12486 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12487 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12488 {
12489 tree arg00 = TREE_OPERAND (arg0, 0);
12490 tree arg01 = TREE_OPERAND (arg0, 1);
12491 tree arg10 = TREE_OPERAND (arg1, 0);
12492 tree arg11 = TREE_OPERAND (arg1, 1);
12493 tree itype = TREE_TYPE (arg0);
12494
12495 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12496 operand_equal_p guarantees no side-effects so we don't need
12497 to use omit_one_operand on Z. */
12498 if (operand_equal_p (arg01, arg11, 0))
12499 return fold_build2_loc (loc, code, type, arg00, arg10);
12500 if (operand_equal_p (arg01, arg10, 0))
12501 return fold_build2_loc (loc, code, type, arg00, arg11);
12502 if (operand_equal_p (arg00, arg11, 0))
12503 return fold_build2_loc (loc, code, type, arg01, arg10);
12504 if (operand_equal_p (arg00, arg10, 0))
12505 return fold_build2_loc (loc, code, type, arg01, arg11);
12506
12507 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12508 if (TREE_CODE (arg01) == INTEGER_CST
12509 && TREE_CODE (arg11) == INTEGER_CST)
12510 return fold_build2_loc (loc, code, type,
12511 fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00,
12512 fold_build2_loc (loc,
12513 BIT_XOR_EXPR, itype,
12514 arg01, arg11)),
12515 arg10);
12516 }
12517
12518 /* Attempt to simplify equality/inequality comparisons of complex
12519 values. Only lower the comparison if the result is known or
12520 can be simplified to a single scalar comparison. */
12521 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12522 || TREE_CODE (arg0) == COMPLEX_CST)
12523 && (TREE_CODE (arg1) == COMPLEX_EXPR
12524 || TREE_CODE (arg1) == COMPLEX_CST))
12525 {
12526 tree real0, imag0, real1, imag1;
12527 tree rcond, icond;
12528
12529 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12530 {
12531 real0 = TREE_OPERAND (arg0, 0);
12532 imag0 = TREE_OPERAND (arg0, 1);
12533 }
12534 else
12535 {
12536 real0 = TREE_REALPART (arg0);
12537 imag0 = TREE_IMAGPART (arg0);
12538 }
12539
12540 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12541 {
12542 real1 = TREE_OPERAND (arg1, 0);
12543 imag1 = TREE_OPERAND (arg1, 1);
12544 }
12545 else
12546 {
12547 real1 = TREE_REALPART (arg1);
12548 imag1 = TREE_IMAGPART (arg1);
12549 }
12550
12551 rcond = fold_binary_loc (loc, code, type, real0, real1);
12552 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12553 {
12554 if (integer_zerop (rcond))
12555 {
12556 if (code == EQ_EXPR)
12557 return omit_two_operands_loc (loc, type, boolean_false_node,
12558 imag0, imag1);
12559 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
12560 }
12561 else
12562 {
12563 if (code == NE_EXPR)
12564 return omit_two_operands_loc (loc, type, boolean_true_node,
12565 imag0, imag1);
12566 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
12567 }
12568 }
12569
12570 icond = fold_binary_loc (loc, code, type, imag0, imag1);
12571 if (icond && TREE_CODE (icond) == INTEGER_CST)
12572 {
12573 if (integer_zerop (icond))
12574 {
12575 if (code == EQ_EXPR)
12576 return omit_two_operands_loc (loc, type, boolean_false_node,
12577 real0, real1);
12578 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
12579 }
12580 else
12581 {
12582 if (code == NE_EXPR)
12583 return omit_two_operands_loc (loc, type, boolean_true_node,
12584 real0, real1);
12585 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
12586 }
12587 }
12588 }
12589
12590 return NULL_TREE;
12591
12592 case LT_EXPR:
12593 case GT_EXPR:
12594 case LE_EXPR:
12595 case GE_EXPR:
12596 tem = fold_comparison (loc, code, type, op0, op1);
12597 if (tem != NULL_TREE)
12598 return tem;
12599
12600 /* Transform comparisons of the form X +- C CMP X. */
12601 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12602 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12603 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12604 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
12605 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12606 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
12607 {
12608 tree arg01 = TREE_OPERAND (arg0, 1);
12609 enum tree_code code0 = TREE_CODE (arg0);
12610 int is_positive;
12611
12612 if (TREE_CODE (arg01) == REAL_CST)
12613 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12614 else
12615 is_positive = tree_int_cst_sgn (arg01);
12616
12617 /* (X - c) > X becomes false. */
12618 if (code == GT_EXPR
12619 && ((code0 == MINUS_EXPR && is_positive >= 0)
12620 || (code0 == PLUS_EXPR && is_positive <= 0)))
12621 {
12622 if (TREE_CODE (arg01) == INTEGER_CST
12623 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12624 fold_overflow_warning (("assuming signed overflow does not "
12625 "occur when assuming that (X - c) > X "
12626 "is always false"),
12627 WARN_STRICT_OVERFLOW_ALL);
12628 return constant_boolean_node (0, type);
12629 }
12630
12631 /* Likewise (X + c) < X becomes false. */
12632 if (code == LT_EXPR
12633 && ((code0 == PLUS_EXPR && is_positive >= 0)
12634 || (code0 == MINUS_EXPR && is_positive <= 0)))
12635 {
12636 if (TREE_CODE (arg01) == INTEGER_CST
12637 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12638 fold_overflow_warning (("assuming signed overflow does not "
12639 "occur when assuming that "
12640 "(X + c) < X is always false"),
12641 WARN_STRICT_OVERFLOW_ALL);
12642 return constant_boolean_node (0, type);
12643 }
12644
12645 /* Convert (X - c) <= X to true. */
12646 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12647 && code == LE_EXPR
12648 && ((code0 == MINUS_EXPR && is_positive >= 0)
12649 || (code0 == PLUS_EXPR && is_positive <= 0)))
12650 {
12651 if (TREE_CODE (arg01) == INTEGER_CST
12652 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12653 fold_overflow_warning (("assuming signed overflow does not "
12654 "occur when assuming that "
12655 "(X - c) <= X is always true"),
12656 WARN_STRICT_OVERFLOW_ALL);
12657 return constant_boolean_node (1, type);
12658 }
12659
12660 /* Convert (X + c) >= X to true. */
12661 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12662 && code == GE_EXPR
12663 && ((code0 == PLUS_EXPR && is_positive >= 0)
12664 || (code0 == MINUS_EXPR && is_positive <= 0)))
12665 {
12666 if (TREE_CODE (arg01) == INTEGER_CST
12667 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12668 fold_overflow_warning (("assuming signed overflow does not "
12669 "occur when assuming that "
12670 "(X + c) >= X is always true"),
12671 WARN_STRICT_OVERFLOW_ALL);
12672 return constant_boolean_node (1, type);
12673 }
12674
12675 if (TREE_CODE (arg01) == INTEGER_CST)
12676 {
12677 /* Convert X + c > X and X - c < X to true for integers. */
12678 if (code == GT_EXPR
12679 && ((code0 == PLUS_EXPR && is_positive > 0)
12680 || (code0 == MINUS_EXPR && is_positive < 0)))
12681 {
12682 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12683 fold_overflow_warning (("assuming signed overflow does "
12684 "not occur when assuming that "
12685 "(X + c) > X is always true"),
12686 WARN_STRICT_OVERFLOW_ALL);
12687 return constant_boolean_node (1, type);
12688 }
12689
12690 if (code == LT_EXPR
12691 && ((code0 == MINUS_EXPR && is_positive > 0)
12692 || (code0 == PLUS_EXPR && is_positive < 0)))
12693 {
12694 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12695 fold_overflow_warning (("assuming signed overflow does "
12696 "not occur when assuming that "
12697 "(X - c) < X is always true"),
12698 WARN_STRICT_OVERFLOW_ALL);
12699 return constant_boolean_node (1, type);
12700 }
12701
12702 /* Convert X + c <= X and X - c >= X to false for integers. */
12703 if (code == LE_EXPR
12704 && ((code0 == PLUS_EXPR && is_positive > 0)
12705 || (code0 == MINUS_EXPR && is_positive < 0)))
12706 {
12707 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12708 fold_overflow_warning (("assuming signed overflow does "
12709 "not occur when assuming that "
12710 "(X + c) <= X is always false"),
12711 WARN_STRICT_OVERFLOW_ALL);
12712 return constant_boolean_node (0, type);
12713 }
12714
12715 if (code == GE_EXPR
12716 && ((code0 == MINUS_EXPR && is_positive > 0)
12717 || (code0 == PLUS_EXPR && is_positive < 0)))
12718 {
12719 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12720 fold_overflow_warning (("assuming signed overflow does "
12721 "not occur when assuming that "
12722 "(X - c) >= X is always false"),
12723 WARN_STRICT_OVERFLOW_ALL);
12724 return constant_boolean_node (0, type);
12725 }
12726 }
12727 }
12728
12729 /* Comparisons with the highest or lowest possible integer of
12730 the specified precision will have known values. */
12731 {
12732 tree arg1_type = TREE_TYPE (arg1);
12733 unsigned int width = TYPE_PRECISION (arg1_type);
12734
12735 if (TREE_CODE (arg1) == INTEGER_CST
12736 && width <= 2 * HOST_BITS_PER_WIDE_INT
12737 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
12738 {
12739 HOST_WIDE_INT signed_max_hi;
12740 unsigned HOST_WIDE_INT signed_max_lo;
12741 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
12742
12743 if (width <= HOST_BITS_PER_WIDE_INT)
12744 {
12745 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12746 - 1;
12747 signed_max_hi = 0;
12748 max_hi = 0;
12749
12750 if (TYPE_UNSIGNED (arg1_type))
12751 {
12752 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12753 min_lo = 0;
12754 min_hi = 0;
12755 }
12756 else
12757 {
12758 max_lo = signed_max_lo;
12759 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12760 min_hi = -1;
12761 }
12762 }
12763 else
12764 {
12765 width -= HOST_BITS_PER_WIDE_INT;
12766 signed_max_lo = -1;
12767 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12768 - 1;
12769 max_lo = -1;
12770 min_lo = 0;
12771
12772 if (TYPE_UNSIGNED (arg1_type))
12773 {
12774 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12775 min_hi = 0;
12776 }
12777 else
12778 {
12779 max_hi = signed_max_hi;
12780 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12781 }
12782 }
12783
12784 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
12785 && TREE_INT_CST_LOW (arg1) == max_lo)
12786 switch (code)
12787 {
12788 case GT_EXPR:
12789 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12790
12791 case GE_EXPR:
12792 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
12793
12794 case LE_EXPR:
12795 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12796
12797 case LT_EXPR:
12798 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
12799
12800 /* The GE_EXPR and LT_EXPR cases above are not normally
12801 reached because of previous transformations. */
12802
12803 default:
12804 break;
12805 }
12806 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12807 == max_hi
12808 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
12809 switch (code)
12810 {
12811 case GT_EXPR:
12812 arg1 = const_binop (PLUS_EXPR, arg1,
12813 build_int_cst (TREE_TYPE (arg1), 1), 0);
12814 return fold_build2_loc (loc, EQ_EXPR, type,
12815 fold_convert_loc (loc,
12816 TREE_TYPE (arg1), arg0),
12817 arg1);
12818 case LE_EXPR:
12819 arg1 = const_binop (PLUS_EXPR, arg1,
12820 build_int_cst (TREE_TYPE (arg1), 1), 0);
12821 return fold_build2_loc (loc, NE_EXPR, type,
12822 fold_convert_loc (loc, TREE_TYPE (arg1),
12823 arg0),
12824 arg1);
12825 default:
12826 break;
12827 }
12828 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12829 == min_hi
12830 && TREE_INT_CST_LOW (arg1) == min_lo)
12831 switch (code)
12832 {
12833 case LT_EXPR:
12834 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12835
12836 case LE_EXPR:
12837 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
12838
12839 case GE_EXPR:
12840 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12841
12842 case GT_EXPR:
12843 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
12844
12845 default:
12846 break;
12847 }
12848 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12849 == min_hi
12850 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
12851 switch (code)
12852 {
12853 case GE_EXPR:
12854 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
12855 return fold_build2_loc (loc, NE_EXPR, type,
12856 fold_convert_loc (loc,
12857 TREE_TYPE (arg1), arg0),
12858 arg1);
12859 case LT_EXPR:
12860 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
12861 return fold_build2_loc (loc, EQ_EXPR, type,
12862 fold_convert_loc (loc, TREE_TYPE (arg1),
12863 arg0),
12864 arg1);
12865 default:
12866 break;
12867 }
12868
12869 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
12870 && TREE_INT_CST_LOW (arg1) == signed_max_lo
12871 && TYPE_UNSIGNED (arg1_type)
12872 /* We will flip the signedness of the comparison operator
12873 associated with the mode of arg1, so the sign bit is
12874 specified by this mode. Check that arg1 is the signed
12875 max associated with this sign bit. */
12876 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
12877 /* signed_type does not work on pointer types. */
12878 && INTEGRAL_TYPE_P (arg1_type))
12879 {
12880 /* The following case also applies to X < signed_max+1
12881 and X >= signed_max+1 because previous transformations. */
12882 if (code == LE_EXPR || code == GT_EXPR)
12883 {
12884 tree st;
12885 st = signed_type_for (TREE_TYPE (arg1));
12886 return fold_build2_loc (loc,
12887 code == LE_EXPR ? GE_EXPR : LT_EXPR,
12888 type, fold_convert_loc (loc, st, arg0),
12889 build_int_cst (st, 0));
12890 }
12891 }
12892 }
12893 }
12894
12895 /* If we are comparing an ABS_EXPR with a constant, we can
12896 convert all the cases into explicit comparisons, but they may
12897 well not be faster than doing the ABS and one comparison.
12898 But ABS (X) <= C is a range comparison, which becomes a subtraction
12899 and a comparison, and is probably faster. */
12900 if (code == LE_EXPR
12901 && TREE_CODE (arg1) == INTEGER_CST
12902 && TREE_CODE (arg0) == ABS_EXPR
12903 && ! TREE_SIDE_EFFECTS (arg0)
12904 && (0 != (tem = negate_expr (arg1)))
12905 && TREE_CODE (tem) == INTEGER_CST
12906 && !TREE_OVERFLOW (tem))
12907 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
12908 build2 (GE_EXPR, type,
12909 TREE_OPERAND (arg0, 0), tem),
12910 build2 (LE_EXPR, type,
12911 TREE_OPERAND (arg0, 0), arg1));
12912
12913 /* Convert ABS_EXPR<x> >= 0 to true. */
12914 strict_overflow_p = false;
12915 if (code == GE_EXPR
12916 && (integer_zerop (arg1)
12917 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
12918 && real_zerop (arg1)))
12919 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12920 {
12921 if (strict_overflow_p)
12922 fold_overflow_warning (("assuming signed overflow does not occur "
12923 "when simplifying comparison of "
12924 "absolute value and zero"),
12925 WARN_STRICT_OVERFLOW_CONDITIONAL);
12926 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12927 }
12928
12929 /* Convert ABS_EXPR<x> < 0 to false. */
12930 strict_overflow_p = false;
12931 if (code == LT_EXPR
12932 && (integer_zerop (arg1) || real_zerop (arg1))
12933 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12934 {
12935 if (strict_overflow_p)
12936 fold_overflow_warning (("assuming signed overflow does not occur "
12937 "when simplifying comparison of "
12938 "absolute value and zero"),
12939 WARN_STRICT_OVERFLOW_CONDITIONAL);
12940 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12941 }
12942
12943 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
12944 and similarly for >= into !=. */
12945 if ((code == LT_EXPR || code == GE_EXPR)
12946 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12947 && TREE_CODE (arg1) == LSHIFT_EXPR
12948 && integer_onep (TREE_OPERAND (arg1, 0)))
12949 {
12950 tem = build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12951 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12952 TREE_OPERAND (arg1, 1)),
12953 build_int_cst (TREE_TYPE (arg0), 0));
12954 goto fold_binary_exit;
12955 }
12956
12957 if ((code == LT_EXPR || code == GE_EXPR)
12958 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12959 && CONVERT_EXPR_P (arg1)
12960 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
12961 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
12962 {
12963 tem = build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12964 fold_convert_loc (loc, TREE_TYPE (arg0),
12965 build2 (RSHIFT_EXPR,
12966 TREE_TYPE (arg0), arg0,
12967 TREE_OPERAND (TREE_OPERAND (arg1, 0),
12968 1))),
12969 build_int_cst (TREE_TYPE (arg0), 0));
12970 goto fold_binary_exit;
12971 }
12972
12973 return NULL_TREE;
12974
12975 case UNORDERED_EXPR:
12976 case ORDERED_EXPR:
12977 case UNLT_EXPR:
12978 case UNLE_EXPR:
12979 case UNGT_EXPR:
12980 case UNGE_EXPR:
12981 case UNEQ_EXPR:
12982 case LTGT_EXPR:
12983 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
12984 {
12985 t1 = fold_relational_const (code, type, arg0, arg1);
12986 if (t1 != NULL_TREE)
12987 return t1;
12988 }
12989
12990 /* If the first operand is NaN, the result is constant. */
12991 if (TREE_CODE (arg0) == REAL_CST
12992 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
12993 && (code != LTGT_EXPR || ! flag_trapping_math))
12994 {
12995 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
12996 ? integer_zero_node
12997 : integer_one_node;
12998 return omit_one_operand_loc (loc, type, t1, arg1);
12999 }
13000
13001 /* If the second operand is NaN, the result is constant. */
13002 if (TREE_CODE (arg1) == REAL_CST
13003 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13004 && (code != LTGT_EXPR || ! flag_trapping_math))
13005 {
13006 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13007 ? integer_zero_node
13008 : integer_one_node;
13009 return omit_one_operand_loc (loc, type, t1, arg0);
13010 }
13011
13012 /* Simplify unordered comparison of something with itself. */
13013 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13014 && operand_equal_p (arg0, arg1, 0))
13015 return constant_boolean_node (1, type);
13016
13017 if (code == LTGT_EXPR
13018 && !flag_trapping_math
13019 && operand_equal_p (arg0, arg1, 0))
13020 return constant_boolean_node (0, type);
13021
13022 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13023 {
13024 tree targ0 = strip_float_extensions (arg0);
13025 tree targ1 = strip_float_extensions (arg1);
13026 tree newtype = TREE_TYPE (targ0);
13027
13028 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13029 newtype = TREE_TYPE (targ1);
13030
13031 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13032 return fold_build2_loc (loc, code, type,
13033 fold_convert_loc (loc, newtype, targ0),
13034 fold_convert_loc (loc, newtype, targ1));
13035 }
13036
13037 return NULL_TREE;
13038
13039 case COMPOUND_EXPR:
13040 /* When pedantic, a compound expression can be neither an lvalue
13041 nor an integer constant expression. */
13042 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13043 return NULL_TREE;
13044 /* Don't let (0, 0) be null pointer constant. */
13045 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13046 : fold_convert_loc (loc, type, arg1);
13047 return pedantic_non_lvalue_loc (loc, tem);
13048
13049 case COMPLEX_EXPR:
13050 if ((TREE_CODE (arg0) == REAL_CST
13051 && TREE_CODE (arg1) == REAL_CST)
13052 || (TREE_CODE (arg0) == INTEGER_CST
13053 && TREE_CODE (arg1) == INTEGER_CST))
13054 return build_complex (type, arg0, arg1);
13055 return NULL_TREE;
13056
13057 case ASSERT_EXPR:
13058 /* An ASSERT_EXPR should never be passed to fold_binary. */
13059 gcc_unreachable ();
13060
13061 default:
13062 return NULL_TREE;
13063 } /* switch (code) */
13064 fold_binary_exit:
13065 protected_set_expr_location (tem, loc);
13066 return tem;
13067 }
13068
13069 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13070 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13071 of GOTO_EXPR. */
13072
13073 static tree
13074 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13075 {
13076 switch (TREE_CODE (*tp))
13077 {
13078 case LABEL_EXPR:
13079 return *tp;
13080
13081 case GOTO_EXPR:
13082 *walk_subtrees = 0;
13083
13084 /* ... fall through ... */
13085
13086 default:
13087 return NULL_TREE;
13088 }
13089 }
13090
13091 /* Return whether the sub-tree ST contains a label which is accessible from
13092 outside the sub-tree. */
13093
13094 static bool
13095 contains_label_p (tree st)
13096 {
13097 return
13098 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13099 }
13100
13101 /* Fold a ternary expression of code CODE and type TYPE with operands
13102 OP0, OP1, and OP2. Return the folded expression if folding is
13103 successful. Otherwise, return NULL_TREE. */
13104
13105 tree
13106 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13107 tree op0, tree op1, tree op2)
13108 {
13109 tree tem;
13110 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
13111 enum tree_code_class kind = TREE_CODE_CLASS (code);
13112
13113 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13114 && TREE_CODE_LENGTH (code) == 3);
13115
13116 /* Strip any conversions that don't change the mode. This is safe
13117 for every expression, except for a comparison expression because
13118 its signedness is derived from its operands. So, in the latter
13119 case, only strip conversions that don't change the signedness.
13120
13121 Note that this is done as an internal manipulation within the
13122 constant folder, in order to find the simplest representation of
13123 the arguments so that their form can be studied. In any cases,
13124 the appropriate type conversions should be put back in the tree
13125 that will get out of the constant folder. */
13126 if (op0)
13127 {
13128 arg0 = op0;
13129 STRIP_NOPS (arg0);
13130 }
13131
13132 if (op1)
13133 {
13134 arg1 = op1;
13135 STRIP_NOPS (arg1);
13136 }
13137
13138 switch (code)
13139 {
13140 case COMPONENT_REF:
13141 if (TREE_CODE (arg0) == CONSTRUCTOR
13142 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13143 {
13144 unsigned HOST_WIDE_INT idx;
13145 tree field, value;
13146 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13147 if (field == arg1)
13148 return value;
13149 }
13150 return NULL_TREE;
13151
13152 case COND_EXPR:
13153 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13154 so all simple results must be passed through pedantic_non_lvalue. */
13155 if (TREE_CODE (arg0) == INTEGER_CST)
13156 {
13157 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13158 tem = integer_zerop (arg0) ? op2 : op1;
13159 /* Only optimize constant conditions when the selected branch
13160 has the same type as the COND_EXPR. This avoids optimizing
13161 away "c ? x : throw", where the throw has a void type.
13162 Avoid throwing away that operand which contains label. */
13163 if ((!TREE_SIDE_EFFECTS (unused_op)
13164 || !contains_label_p (unused_op))
13165 && (! VOID_TYPE_P (TREE_TYPE (tem))
13166 || VOID_TYPE_P (type)))
13167 return pedantic_non_lvalue_loc (loc, tem);
13168 return NULL_TREE;
13169 }
13170 if (operand_equal_p (arg1, op2, 0))
13171 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
13172
13173 /* If we have A op B ? A : C, we may be able to convert this to a
13174 simpler expression, depending on the operation and the values
13175 of B and C. Signed zeros prevent all of these transformations,
13176 for reasons given above each one.
13177
13178 Also try swapping the arguments and inverting the conditional. */
13179 if (COMPARISON_CLASS_P (arg0)
13180 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13181 arg1, TREE_OPERAND (arg0, 1))
13182 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13183 {
13184 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13185 if (tem)
13186 return tem;
13187 }
13188
13189 if (COMPARISON_CLASS_P (arg0)
13190 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13191 op2,
13192 TREE_OPERAND (arg0, 1))
13193 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
13194 {
13195 tem = fold_truth_not_expr (loc, arg0);
13196 if (tem && COMPARISON_CLASS_P (tem))
13197 {
13198 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13199 if (tem)
13200 return tem;
13201 }
13202 }
13203
13204 /* If the second operand is simpler than the third, swap them
13205 since that produces better jump optimization results. */
13206 if (truth_value_p (TREE_CODE (arg0))
13207 && tree_swap_operands_p (op1, op2, false))
13208 {
13209 /* See if this can be inverted. If it can't, possibly because
13210 it was a floating-point inequality comparison, don't do
13211 anything. */
13212 tem = fold_truth_not_expr (loc, arg0);
13213 if (tem)
13214 return fold_build3_loc (loc, code, type, tem, op2, op1);
13215 }
13216
13217 /* Convert A ? 1 : 0 to simply A. */
13218 if (integer_onep (op1)
13219 && integer_zerop (op2)
13220 /* If we try to convert OP0 to our type, the
13221 call to fold will try to move the conversion inside
13222 a COND, which will recurse. In that case, the COND_EXPR
13223 is probably the best choice, so leave it alone. */
13224 && type == TREE_TYPE (arg0))
13225 return pedantic_non_lvalue_loc (loc, arg0);
13226
13227 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13228 over COND_EXPR in cases such as floating point comparisons. */
13229 if (integer_zerop (op1)
13230 && integer_onep (op2)
13231 && truth_value_p (TREE_CODE (arg0)))
13232 return pedantic_non_lvalue_loc (loc,
13233 fold_convert_loc (loc, type,
13234 invert_truthvalue_loc (loc,
13235 arg0)));
13236
13237 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13238 if (TREE_CODE (arg0) == LT_EXPR
13239 && integer_zerop (TREE_OPERAND (arg0, 1))
13240 && integer_zerop (op2)
13241 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13242 {
13243 /* sign_bit_p only checks ARG1 bits within A's precision.
13244 If <sign bit of A> has wider type than A, bits outside
13245 of A's precision in <sign bit of A> need to be checked.
13246 If they are all 0, this optimization needs to be done
13247 in unsigned A's type, if they are all 1 in signed A's type,
13248 otherwise this can't be done. */
13249 if (TYPE_PRECISION (TREE_TYPE (tem))
13250 < TYPE_PRECISION (TREE_TYPE (arg1))
13251 && TYPE_PRECISION (TREE_TYPE (tem))
13252 < TYPE_PRECISION (type))
13253 {
13254 unsigned HOST_WIDE_INT mask_lo;
13255 HOST_WIDE_INT mask_hi;
13256 int inner_width, outer_width;
13257 tree tem_type;
13258
13259 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13260 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13261 if (outer_width > TYPE_PRECISION (type))
13262 outer_width = TYPE_PRECISION (type);
13263
13264 if (outer_width > HOST_BITS_PER_WIDE_INT)
13265 {
13266 mask_hi = ((unsigned HOST_WIDE_INT) -1
13267 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
13268 mask_lo = -1;
13269 }
13270 else
13271 {
13272 mask_hi = 0;
13273 mask_lo = ((unsigned HOST_WIDE_INT) -1
13274 >> (HOST_BITS_PER_WIDE_INT - outer_width));
13275 }
13276 if (inner_width > HOST_BITS_PER_WIDE_INT)
13277 {
13278 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
13279 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13280 mask_lo = 0;
13281 }
13282 else
13283 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
13284 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13285
13286 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
13287 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
13288 {
13289 tem_type = signed_type_for (TREE_TYPE (tem));
13290 tem = fold_convert_loc (loc, tem_type, tem);
13291 }
13292 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
13293 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
13294 {
13295 tem_type = unsigned_type_for (TREE_TYPE (tem));
13296 tem = fold_convert_loc (loc, tem_type, tem);
13297 }
13298 else
13299 tem = NULL;
13300 }
13301
13302 if (tem)
13303 return
13304 fold_convert_loc (loc, type,
13305 fold_build2_loc (loc, BIT_AND_EXPR,
13306 TREE_TYPE (tem), tem,
13307 fold_convert_loc (loc,
13308 TREE_TYPE (tem),
13309 arg1)));
13310 }
13311
13312 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13313 already handled above. */
13314 if (TREE_CODE (arg0) == BIT_AND_EXPR
13315 && integer_onep (TREE_OPERAND (arg0, 1))
13316 && integer_zerop (op2)
13317 && integer_pow2p (arg1))
13318 {
13319 tree tem = TREE_OPERAND (arg0, 0);
13320 STRIP_NOPS (tem);
13321 if (TREE_CODE (tem) == RSHIFT_EXPR
13322 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
13323 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13324 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
13325 return fold_build2_loc (loc, BIT_AND_EXPR, type,
13326 TREE_OPERAND (tem, 0), arg1);
13327 }
13328
13329 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13330 is probably obsolete because the first operand should be a
13331 truth value (that's why we have the two cases above), but let's
13332 leave it in until we can confirm this for all front-ends. */
13333 if (integer_zerop (op2)
13334 && TREE_CODE (arg0) == NE_EXPR
13335 && integer_zerop (TREE_OPERAND (arg0, 1))
13336 && integer_pow2p (arg1)
13337 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13338 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13339 arg1, OEP_ONLY_CONST))
13340 return pedantic_non_lvalue_loc (loc,
13341 fold_convert_loc (loc, type,
13342 TREE_OPERAND (arg0, 0)));
13343
13344 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13345 if (integer_zerop (op2)
13346 && truth_value_p (TREE_CODE (arg0))
13347 && truth_value_p (TREE_CODE (arg1)))
13348 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13349 fold_convert_loc (loc, type, arg0),
13350 arg1);
13351
13352 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13353 if (integer_onep (op2)
13354 && truth_value_p (TREE_CODE (arg0))
13355 && truth_value_p (TREE_CODE (arg1)))
13356 {
13357 /* Only perform transformation if ARG0 is easily inverted. */
13358 tem = fold_truth_not_expr (loc, arg0);
13359 if (tem)
13360 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
13361 fold_convert_loc (loc, type, tem),
13362 arg1);
13363 }
13364
13365 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13366 if (integer_zerop (arg1)
13367 && truth_value_p (TREE_CODE (arg0))
13368 && truth_value_p (TREE_CODE (op2)))
13369 {
13370 /* Only perform transformation if ARG0 is easily inverted. */
13371 tem = fold_truth_not_expr (loc, arg0);
13372 if (tem)
13373 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13374 fold_convert_loc (loc, type, tem),
13375 op2);
13376 }
13377
13378 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13379 if (integer_onep (arg1)
13380 && truth_value_p (TREE_CODE (arg0))
13381 && truth_value_p (TREE_CODE (op2)))
13382 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
13383 fold_convert_loc (loc, type, arg0),
13384 op2);
13385
13386 return NULL_TREE;
13387
13388 case CALL_EXPR:
13389 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13390 of fold_ternary on them. */
13391 gcc_unreachable ();
13392
13393 case BIT_FIELD_REF:
13394 if ((TREE_CODE (arg0) == VECTOR_CST
13395 || (TREE_CODE (arg0) == CONSTRUCTOR && TREE_CONSTANT (arg0)))
13396 && type == TREE_TYPE (TREE_TYPE (arg0)))
13397 {
13398 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
13399 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
13400
13401 if (width != 0
13402 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
13403 && (idx % width) == 0
13404 && (idx = idx / width)
13405 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
13406 {
13407 tree elements = NULL_TREE;
13408
13409 if (TREE_CODE (arg0) == VECTOR_CST)
13410 elements = TREE_VECTOR_CST_ELTS (arg0);
13411 else
13412 {
13413 unsigned HOST_WIDE_INT idx;
13414 tree value;
13415
13416 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (arg0), idx, value)
13417 elements = tree_cons (NULL_TREE, value, elements);
13418 }
13419 while (idx-- > 0 && elements)
13420 elements = TREE_CHAIN (elements);
13421 if (elements)
13422 return TREE_VALUE (elements);
13423 else
13424 return fold_convert_loc (loc, type, integer_zero_node);
13425 }
13426 }
13427
13428 /* A bit-field-ref that referenced the full argument can be stripped. */
13429 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
13430 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
13431 && integer_zerop (op2))
13432 return fold_convert_loc (loc, type, arg0);
13433
13434 return NULL_TREE;
13435
13436 default:
13437 return NULL_TREE;
13438 } /* switch (code) */
13439 }
13440
13441 /* Perform constant folding and related simplification of EXPR.
13442 The related simplifications include x*1 => x, x*0 => 0, etc.,
13443 and application of the associative law.
13444 NOP_EXPR conversions may be removed freely (as long as we
13445 are careful not to change the type of the overall expression).
13446 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13447 but we can constant-fold them if they have constant operands. */
13448
13449 #ifdef ENABLE_FOLD_CHECKING
13450 # define fold(x) fold_1 (x)
13451 static tree fold_1 (tree);
13452 static
13453 #endif
13454 tree
13455 fold (tree expr)
13456 {
13457 const tree t = expr;
13458 enum tree_code code = TREE_CODE (t);
13459 enum tree_code_class kind = TREE_CODE_CLASS (code);
13460 tree tem;
13461 location_t loc = EXPR_LOCATION (expr);
13462
13463 /* Return right away if a constant. */
13464 if (kind == tcc_constant)
13465 return t;
13466
13467 /* CALL_EXPR-like objects with variable numbers of operands are
13468 treated specially. */
13469 if (kind == tcc_vl_exp)
13470 {
13471 if (code == CALL_EXPR)
13472 {
13473 tem = fold_call_expr (loc, expr, false);
13474 return tem ? tem : expr;
13475 }
13476 return expr;
13477 }
13478
13479 if (IS_EXPR_CODE_CLASS (kind))
13480 {
13481 tree type = TREE_TYPE (t);
13482 tree op0, op1, op2;
13483
13484 switch (TREE_CODE_LENGTH (code))
13485 {
13486 case 1:
13487 op0 = TREE_OPERAND (t, 0);
13488 tem = fold_unary_loc (loc, code, type, op0);
13489 return tem ? tem : expr;
13490 case 2:
13491 op0 = TREE_OPERAND (t, 0);
13492 op1 = TREE_OPERAND (t, 1);
13493 tem = fold_binary_loc (loc, code, type, op0, op1);
13494 return tem ? tem : expr;
13495 case 3:
13496 op0 = TREE_OPERAND (t, 0);
13497 op1 = TREE_OPERAND (t, 1);
13498 op2 = TREE_OPERAND (t, 2);
13499 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13500 return tem ? tem : expr;
13501 default:
13502 break;
13503 }
13504 }
13505
13506 switch (code)
13507 {
13508 case ARRAY_REF:
13509 {
13510 tree op0 = TREE_OPERAND (t, 0);
13511 tree op1 = TREE_OPERAND (t, 1);
13512
13513 if (TREE_CODE (op1) == INTEGER_CST
13514 && TREE_CODE (op0) == CONSTRUCTOR
13515 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13516 {
13517 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (op0);
13518 unsigned HOST_WIDE_INT end = VEC_length (constructor_elt, elts);
13519 unsigned HOST_WIDE_INT begin = 0;
13520
13521 /* Find a matching index by means of a binary search. */
13522 while (begin != end)
13523 {
13524 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
13525 tree index = VEC_index (constructor_elt, elts, middle)->index;
13526
13527 if (TREE_CODE (index) == INTEGER_CST
13528 && tree_int_cst_lt (index, op1))
13529 begin = middle + 1;
13530 else if (TREE_CODE (index) == INTEGER_CST
13531 && tree_int_cst_lt (op1, index))
13532 end = middle;
13533 else if (TREE_CODE (index) == RANGE_EXPR
13534 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
13535 begin = middle + 1;
13536 else if (TREE_CODE (index) == RANGE_EXPR
13537 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
13538 end = middle;
13539 else
13540 return VEC_index (constructor_elt, elts, middle)->value;
13541 }
13542 }
13543
13544 return t;
13545 }
13546
13547 case CONST_DECL:
13548 return fold (DECL_INITIAL (t));
13549
13550 default:
13551 return t;
13552 } /* switch (code) */
13553 }
13554
13555 #ifdef ENABLE_FOLD_CHECKING
13556 #undef fold
13557
13558 static void fold_checksum_tree (const_tree, struct md5_ctx *, htab_t);
13559 static void fold_check_failed (const_tree, const_tree);
13560 void print_fold_checksum (const_tree);
13561
13562 /* When --enable-checking=fold, compute a digest of expr before
13563 and after actual fold call to see if fold did not accidentally
13564 change original expr. */
13565
13566 tree
13567 fold (tree expr)
13568 {
13569 tree ret;
13570 struct md5_ctx ctx;
13571 unsigned char checksum_before[16], checksum_after[16];
13572 htab_t ht;
13573
13574 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13575 md5_init_ctx (&ctx);
13576 fold_checksum_tree (expr, &ctx, ht);
13577 md5_finish_ctx (&ctx, checksum_before);
13578 htab_empty (ht);
13579
13580 ret = fold_1 (expr);
13581
13582 md5_init_ctx (&ctx);
13583 fold_checksum_tree (expr, &ctx, ht);
13584 md5_finish_ctx (&ctx, checksum_after);
13585 htab_delete (ht);
13586
13587 if (memcmp (checksum_before, checksum_after, 16))
13588 fold_check_failed (expr, ret);
13589
13590 return ret;
13591 }
13592
13593 void
13594 print_fold_checksum (const_tree expr)
13595 {
13596 struct md5_ctx ctx;
13597 unsigned char checksum[16], cnt;
13598 htab_t ht;
13599
13600 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13601 md5_init_ctx (&ctx);
13602 fold_checksum_tree (expr, &ctx, ht);
13603 md5_finish_ctx (&ctx, checksum);
13604 htab_delete (ht);
13605 for (cnt = 0; cnt < 16; ++cnt)
13606 fprintf (stderr, "%02x", checksum[cnt]);
13607 putc ('\n', stderr);
13608 }
13609
13610 static void
13611 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
13612 {
13613 internal_error ("fold check: original tree changed by fold");
13614 }
13615
13616 static void
13617 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx, htab_t ht)
13618 {
13619 const void **slot;
13620 enum tree_code code;
13621 union tree_node buf;
13622 int i, len;
13623
13624 recursive_label:
13625
13626 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
13627 <= sizeof (struct tree_function_decl))
13628 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
13629 if (expr == NULL)
13630 return;
13631 slot = (const void **) htab_find_slot (ht, expr, INSERT);
13632 if (*slot != NULL)
13633 return;
13634 *slot = expr;
13635 code = TREE_CODE (expr);
13636 if (TREE_CODE_CLASS (code) == tcc_declaration
13637 && DECL_ASSEMBLER_NAME_SET_P (expr))
13638 {
13639 /* Allow DECL_ASSEMBLER_NAME to be modified. */
13640 memcpy ((char *) &buf, expr, tree_size (expr));
13641 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
13642 expr = (tree) &buf;
13643 }
13644 else if (TREE_CODE_CLASS (code) == tcc_type
13645 && (TYPE_POINTER_TO (expr)
13646 || TYPE_REFERENCE_TO (expr)
13647 || TYPE_CACHED_VALUES_P (expr)
13648 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
13649 || TYPE_NEXT_VARIANT (expr)))
13650 {
13651 /* Allow these fields to be modified. */
13652 tree tmp;
13653 memcpy ((char *) &buf, expr, tree_size (expr));
13654 expr = tmp = (tree) &buf;
13655 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
13656 TYPE_POINTER_TO (tmp) = NULL;
13657 TYPE_REFERENCE_TO (tmp) = NULL;
13658 TYPE_NEXT_VARIANT (tmp) = NULL;
13659 if (TYPE_CACHED_VALUES_P (tmp))
13660 {
13661 TYPE_CACHED_VALUES_P (tmp) = 0;
13662 TYPE_CACHED_VALUES (tmp) = NULL;
13663 }
13664 }
13665 md5_process_bytes (expr, tree_size (expr), ctx);
13666 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
13667 if (TREE_CODE_CLASS (code) != tcc_type
13668 && TREE_CODE_CLASS (code) != tcc_declaration
13669 && code != TREE_LIST
13670 && code != SSA_NAME)
13671 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
13672 switch (TREE_CODE_CLASS (code))
13673 {
13674 case tcc_constant:
13675 switch (code)
13676 {
13677 case STRING_CST:
13678 md5_process_bytes (TREE_STRING_POINTER (expr),
13679 TREE_STRING_LENGTH (expr), ctx);
13680 break;
13681 case COMPLEX_CST:
13682 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
13683 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
13684 break;
13685 case VECTOR_CST:
13686 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
13687 break;
13688 default:
13689 break;
13690 }
13691 break;
13692 case tcc_exceptional:
13693 switch (code)
13694 {
13695 case TREE_LIST:
13696 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
13697 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
13698 expr = TREE_CHAIN (expr);
13699 goto recursive_label;
13700 break;
13701 case TREE_VEC:
13702 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
13703 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
13704 break;
13705 default:
13706 break;
13707 }
13708 break;
13709 case tcc_expression:
13710 case tcc_reference:
13711 case tcc_comparison:
13712 case tcc_unary:
13713 case tcc_binary:
13714 case tcc_statement:
13715 case tcc_vl_exp:
13716 len = TREE_OPERAND_LENGTH (expr);
13717 for (i = 0; i < len; ++i)
13718 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
13719 break;
13720 case tcc_declaration:
13721 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
13722 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
13723 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
13724 {
13725 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
13726 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
13727 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
13728 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
13729 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
13730 }
13731 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
13732 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
13733
13734 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
13735 {
13736 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
13737 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
13738 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
13739 }
13740 break;
13741 case tcc_type:
13742 if (TREE_CODE (expr) == ENUMERAL_TYPE)
13743 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
13744 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
13745 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
13746 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
13747 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
13748 if (INTEGRAL_TYPE_P (expr)
13749 || SCALAR_FLOAT_TYPE_P (expr))
13750 {
13751 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
13752 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
13753 }
13754 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
13755 if (TREE_CODE (expr) == RECORD_TYPE
13756 || TREE_CODE (expr) == UNION_TYPE
13757 || TREE_CODE (expr) == QUAL_UNION_TYPE)
13758 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
13759 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
13760 break;
13761 default:
13762 break;
13763 }
13764 }
13765
13766 /* Helper function for outputting the checksum of a tree T. When
13767 debugging with gdb, you can "define mynext" to be "next" followed
13768 by "call debug_fold_checksum (op0)", then just trace down till the
13769 outputs differ. */
13770
13771 void
13772 debug_fold_checksum (const_tree t)
13773 {
13774 int i;
13775 unsigned char checksum[16];
13776 struct md5_ctx ctx;
13777 htab_t ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13778
13779 md5_init_ctx (&ctx);
13780 fold_checksum_tree (t, &ctx, ht);
13781 md5_finish_ctx (&ctx, checksum);
13782 htab_empty (ht);
13783
13784 for (i = 0; i < 16; i++)
13785 fprintf (stderr, "%d ", checksum[i]);
13786
13787 fprintf (stderr, "\n");
13788 }
13789
13790 #endif
13791
13792 /* Fold a unary tree expression with code CODE of type TYPE with an
13793 operand OP0. LOC is the location of the resulting expression.
13794 Return a folded expression if successful. Otherwise, return a tree
13795 expression with code CODE of type TYPE with an operand OP0. */
13796
13797 tree
13798 fold_build1_stat_loc (location_t loc,
13799 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
13800 {
13801 tree tem;
13802 #ifdef ENABLE_FOLD_CHECKING
13803 unsigned char checksum_before[16], checksum_after[16];
13804 struct md5_ctx ctx;
13805 htab_t ht;
13806
13807 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13808 md5_init_ctx (&ctx);
13809 fold_checksum_tree (op0, &ctx, ht);
13810 md5_finish_ctx (&ctx, checksum_before);
13811 htab_empty (ht);
13812 #endif
13813
13814 tem = fold_unary_loc (loc, code, type, op0);
13815 if (!tem)
13816 {
13817 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
13818 SET_EXPR_LOCATION (tem, loc);
13819 }
13820
13821 #ifdef ENABLE_FOLD_CHECKING
13822 md5_init_ctx (&ctx);
13823 fold_checksum_tree (op0, &ctx, ht);
13824 md5_finish_ctx (&ctx, checksum_after);
13825 htab_delete (ht);
13826
13827 if (memcmp (checksum_before, checksum_after, 16))
13828 fold_check_failed (op0, tem);
13829 #endif
13830 return tem;
13831 }
13832
13833 /* Fold a binary tree expression with code CODE of type TYPE with
13834 operands OP0 and OP1. LOC is the location of the resulting
13835 expression. Return a folded expression if successful. Otherwise,
13836 return a tree expression with code CODE of type TYPE with operands
13837 OP0 and OP1. */
13838
13839 tree
13840 fold_build2_stat_loc (location_t loc,
13841 enum tree_code code, tree type, tree op0, tree op1
13842 MEM_STAT_DECL)
13843 {
13844 tree tem;
13845 #ifdef ENABLE_FOLD_CHECKING
13846 unsigned char checksum_before_op0[16],
13847 checksum_before_op1[16],
13848 checksum_after_op0[16],
13849 checksum_after_op1[16];
13850 struct md5_ctx ctx;
13851 htab_t ht;
13852
13853 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13854 md5_init_ctx (&ctx);
13855 fold_checksum_tree (op0, &ctx, ht);
13856 md5_finish_ctx (&ctx, checksum_before_op0);
13857 htab_empty (ht);
13858
13859 md5_init_ctx (&ctx);
13860 fold_checksum_tree (op1, &ctx, ht);
13861 md5_finish_ctx (&ctx, checksum_before_op1);
13862 htab_empty (ht);
13863 #endif
13864
13865 tem = fold_binary_loc (loc, code, type, op0, op1);
13866 if (!tem)
13867 {
13868 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
13869 SET_EXPR_LOCATION (tem, loc);
13870 }
13871
13872 #ifdef ENABLE_FOLD_CHECKING
13873 md5_init_ctx (&ctx);
13874 fold_checksum_tree (op0, &ctx, ht);
13875 md5_finish_ctx (&ctx, checksum_after_op0);
13876 htab_empty (ht);
13877
13878 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13879 fold_check_failed (op0, tem);
13880
13881 md5_init_ctx (&ctx);
13882 fold_checksum_tree (op1, &ctx, ht);
13883 md5_finish_ctx (&ctx, checksum_after_op1);
13884 htab_delete (ht);
13885
13886 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13887 fold_check_failed (op1, tem);
13888 #endif
13889 return tem;
13890 }
13891
13892 /* Fold a ternary tree expression with code CODE of type TYPE with
13893 operands OP0, OP1, and OP2. Return a folded expression if
13894 successful. Otherwise, return a tree expression with code CODE of
13895 type TYPE with operands OP0, OP1, and OP2. */
13896
13897 tree
13898 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
13899 tree op0, tree op1, tree op2 MEM_STAT_DECL)
13900 {
13901 tree tem;
13902 #ifdef ENABLE_FOLD_CHECKING
13903 unsigned char checksum_before_op0[16],
13904 checksum_before_op1[16],
13905 checksum_before_op2[16],
13906 checksum_after_op0[16],
13907 checksum_after_op1[16],
13908 checksum_after_op2[16];
13909 struct md5_ctx ctx;
13910 htab_t ht;
13911
13912 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13913 md5_init_ctx (&ctx);
13914 fold_checksum_tree (op0, &ctx, ht);
13915 md5_finish_ctx (&ctx, checksum_before_op0);
13916 htab_empty (ht);
13917
13918 md5_init_ctx (&ctx);
13919 fold_checksum_tree (op1, &ctx, ht);
13920 md5_finish_ctx (&ctx, checksum_before_op1);
13921 htab_empty (ht);
13922
13923 md5_init_ctx (&ctx);
13924 fold_checksum_tree (op2, &ctx, ht);
13925 md5_finish_ctx (&ctx, checksum_before_op2);
13926 htab_empty (ht);
13927 #endif
13928
13929 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
13930 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13931 if (!tem)
13932 {
13933 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
13934 SET_EXPR_LOCATION (tem, loc);
13935 }
13936
13937 #ifdef ENABLE_FOLD_CHECKING
13938 md5_init_ctx (&ctx);
13939 fold_checksum_tree (op0, &ctx, ht);
13940 md5_finish_ctx (&ctx, checksum_after_op0);
13941 htab_empty (ht);
13942
13943 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13944 fold_check_failed (op0, tem);
13945
13946 md5_init_ctx (&ctx);
13947 fold_checksum_tree (op1, &ctx, ht);
13948 md5_finish_ctx (&ctx, checksum_after_op1);
13949 htab_empty (ht);
13950
13951 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13952 fold_check_failed (op1, tem);
13953
13954 md5_init_ctx (&ctx);
13955 fold_checksum_tree (op2, &ctx, ht);
13956 md5_finish_ctx (&ctx, checksum_after_op2);
13957 htab_delete (ht);
13958
13959 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
13960 fold_check_failed (op2, tem);
13961 #endif
13962 return tem;
13963 }
13964
13965 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
13966 arguments in ARGARRAY, and a null static chain.
13967 Return a folded expression if successful. Otherwise, return a CALL_EXPR
13968 of type TYPE from the given operands as constructed by build_call_array. */
13969
13970 tree
13971 fold_build_call_array_loc (location_t loc, tree type, tree fn,
13972 int nargs, tree *argarray)
13973 {
13974 tree tem;
13975 #ifdef ENABLE_FOLD_CHECKING
13976 unsigned char checksum_before_fn[16],
13977 checksum_before_arglist[16],
13978 checksum_after_fn[16],
13979 checksum_after_arglist[16];
13980 struct md5_ctx ctx;
13981 htab_t ht;
13982 int i;
13983
13984 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13985 md5_init_ctx (&ctx);
13986 fold_checksum_tree (fn, &ctx, ht);
13987 md5_finish_ctx (&ctx, checksum_before_fn);
13988 htab_empty (ht);
13989
13990 md5_init_ctx (&ctx);
13991 for (i = 0; i < nargs; i++)
13992 fold_checksum_tree (argarray[i], &ctx, ht);
13993 md5_finish_ctx (&ctx, checksum_before_arglist);
13994 htab_empty (ht);
13995 #endif
13996
13997 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
13998
13999 #ifdef ENABLE_FOLD_CHECKING
14000 md5_init_ctx (&ctx);
14001 fold_checksum_tree (fn, &ctx, ht);
14002 md5_finish_ctx (&ctx, checksum_after_fn);
14003 htab_empty (ht);
14004
14005 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14006 fold_check_failed (fn, tem);
14007
14008 md5_init_ctx (&ctx);
14009 for (i = 0; i < nargs; i++)
14010 fold_checksum_tree (argarray[i], &ctx, ht);
14011 md5_finish_ctx (&ctx, checksum_after_arglist);
14012 htab_delete (ht);
14013
14014 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14015 fold_check_failed (NULL_TREE, tem);
14016 #endif
14017 return tem;
14018 }
14019
14020 /* Perform constant folding and related simplification of initializer
14021 expression EXPR. These behave identically to "fold_buildN" but ignore
14022 potential run-time traps and exceptions that fold must preserve. */
14023
14024 #define START_FOLD_INIT \
14025 int saved_signaling_nans = flag_signaling_nans;\
14026 int saved_trapping_math = flag_trapping_math;\
14027 int saved_rounding_math = flag_rounding_math;\
14028 int saved_trapv = flag_trapv;\
14029 int saved_folding_initializer = folding_initializer;\
14030 flag_signaling_nans = 0;\
14031 flag_trapping_math = 0;\
14032 flag_rounding_math = 0;\
14033 flag_trapv = 0;\
14034 folding_initializer = 1;
14035
14036 #define END_FOLD_INIT \
14037 flag_signaling_nans = saved_signaling_nans;\
14038 flag_trapping_math = saved_trapping_math;\
14039 flag_rounding_math = saved_rounding_math;\
14040 flag_trapv = saved_trapv;\
14041 folding_initializer = saved_folding_initializer;
14042
14043 tree
14044 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14045 tree type, tree op)
14046 {
14047 tree result;
14048 START_FOLD_INIT;
14049
14050 result = fold_build1_loc (loc, code, type, op);
14051
14052 END_FOLD_INIT;
14053 return result;
14054 }
14055
14056 tree
14057 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14058 tree type, tree op0, tree op1)
14059 {
14060 tree result;
14061 START_FOLD_INIT;
14062
14063 result = fold_build2_loc (loc, code, type, op0, op1);
14064
14065 END_FOLD_INIT;
14066 return result;
14067 }
14068
14069 tree
14070 fold_build3_initializer_loc (location_t loc, enum tree_code code,
14071 tree type, tree op0, tree op1, tree op2)
14072 {
14073 tree result;
14074 START_FOLD_INIT;
14075
14076 result = fold_build3_loc (loc, code, type, op0, op1, op2);
14077
14078 END_FOLD_INIT;
14079 return result;
14080 }
14081
14082 tree
14083 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14084 int nargs, tree *argarray)
14085 {
14086 tree result;
14087 START_FOLD_INIT;
14088
14089 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14090
14091 END_FOLD_INIT;
14092 return result;
14093 }
14094
14095 #undef START_FOLD_INIT
14096 #undef END_FOLD_INIT
14097
14098 /* Determine if first argument is a multiple of second argument. Return 0 if
14099 it is not, or we cannot easily determined it to be.
14100
14101 An example of the sort of thing we care about (at this point; this routine
14102 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14103 fold cases do now) is discovering that
14104
14105 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14106
14107 is a multiple of
14108
14109 SAVE_EXPR (J * 8)
14110
14111 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14112
14113 This code also handles discovering that
14114
14115 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14116
14117 is a multiple of 8 so we don't have to worry about dealing with a
14118 possible remainder.
14119
14120 Note that we *look* inside a SAVE_EXPR only to determine how it was
14121 calculated; it is not safe for fold to do much of anything else with the
14122 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14123 at run time. For example, the latter example above *cannot* be implemented
14124 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14125 evaluation time of the original SAVE_EXPR is not necessarily the same at
14126 the time the new expression is evaluated. The only optimization of this
14127 sort that would be valid is changing
14128
14129 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14130
14131 divided by 8 to
14132
14133 SAVE_EXPR (I) * SAVE_EXPR (J)
14134
14135 (where the same SAVE_EXPR (J) is used in the original and the
14136 transformed version). */
14137
14138 int
14139 multiple_of_p (tree type, const_tree top, const_tree bottom)
14140 {
14141 if (operand_equal_p (top, bottom, 0))
14142 return 1;
14143
14144 if (TREE_CODE (type) != INTEGER_TYPE)
14145 return 0;
14146
14147 switch (TREE_CODE (top))
14148 {
14149 case BIT_AND_EXPR:
14150 /* Bitwise and provides a power of two multiple. If the mask is
14151 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14152 if (!integer_pow2p (bottom))
14153 return 0;
14154 /* FALLTHRU */
14155
14156 case MULT_EXPR:
14157 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14158 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14159
14160 case PLUS_EXPR:
14161 case MINUS_EXPR:
14162 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14163 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14164
14165 case LSHIFT_EXPR:
14166 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14167 {
14168 tree op1, t1;
14169
14170 op1 = TREE_OPERAND (top, 1);
14171 /* const_binop may not detect overflow correctly,
14172 so check for it explicitly here. */
14173 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
14174 > TREE_INT_CST_LOW (op1)
14175 && TREE_INT_CST_HIGH (op1) == 0
14176 && 0 != (t1 = fold_convert (type,
14177 const_binop (LSHIFT_EXPR,
14178 size_one_node,
14179 op1, 0)))
14180 && !TREE_OVERFLOW (t1))
14181 return multiple_of_p (type, t1, bottom);
14182 }
14183 return 0;
14184
14185 case NOP_EXPR:
14186 /* Can't handle conversions from non-integral or wider integral type. */
14187 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14188 || (TYPE_PRECISION (type)
14189 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14190 return 0;
14191
14192 /* .. fall through ... */
14193
14194 case SAVE_EXPR:
14195 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14196
14197 case COND_EXPR:
14198 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
14199 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
14200
14201 case INTEGER_CST:
14202 if (TREE_CODE (bottom) != INTEGER_CST
14203 || integer_zerop (bottom)
14204 || (TYPE_UNSIGNED (type)
14205 && (tree_int_cst_sgn (top) < 0
14206 || tree_int_cst_sgn (bottom) < 0)))
14207 return 0;
14208 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
14209 top, bottom, 0));
14210
14211 default:
14212 return 0;
14213 }
14214 }
14215
14216 /* Return true if CODE or TYPE is known to be non-negative. */
14217
14218 static bool
14219 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14220 {
14221 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14222 && truth_value_p (code))
14223 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14224 have a signed:1 type (where the value is -1 and 0). */
14225 return true;
14226 return false;
14227 }
14228
14229 /* Return true if (CODE OP0) is known to be non-negative. If the return
14230 value is based on the assumption that signed overflow is undefined,
14231 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14232 *STRICT_OVERFLOW_P. */
14233
14234 bool
14235 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14236 bool *strict_overflow_p)
14237 {
14238 if (TYPE_UNSIGNED (type))
14239 return true;
14240
14241 switch (code)
14242 {
14243 case ABS_EXPR:
14244 /* We can't return 1 if flag_wrapv is set because
14245 ABS_EXPR<INT_MIN> = INT_MIN. */
14246 if (!INTEGRAL_TYPE_P (type))
14247 return true;
14248 if (TYPE_OVERFLOW_UNDEFINED (type))
14249 {
14250 *strict_overflow_p = true;
14251 return true;
14252 }
14253 break;
14254
14255 case NON_LVALUE_EXPR:
14256 case FLOAT_EXPR:
14257 case FIX_TRUNC_EXPR:
14258 return tree_expr_nonnegative_warnv_p (op0,
14259 strict_overflow_p);
14260
14261 case NOP_EXPR:
14262 {
14263 tree inner_type = TREE_TYPE (op0);
14264 tree outer_type = type;
14265
14266 if (TREE_CODE (outer_type) == REAL_TYPE)
14267 {
14268 if (TREE_CODE (inner_type) == REAL_TYPE)
14269 return tree_expr_nonnegative_warnv_p (op0,
14270 strict_overflow_p);
14271 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14272 {
14273 if (TYPE_UNSIGNED (inner_type))
14274 return true;
14275 return tree_expr_nonnegative_warnv_p (op0,
14276 strict_overflow_p);
14277 }
14278 }
14279 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
14280 {
14281 if (TREE_CODE (inner_type) == REAL_TYPE)
14282 return tree_expr_nonnegative_warnv_p (op0,
14283 strict_overflow_p);
14284 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14285 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14286 && TYPE_UNSIGNED (inner_type);
14287 }
14288 }
14289 break;
14290
14291 default:
14292 return tree_simple_nonnegative_warnv_p (code, type);
14293 }
14294
14295 /* We don't know sign of `t', so be conservative and return false. */
14296 return false;
14297 }
14298
14299 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14300 value is based on the assumption that signed overflow is undefined,
14301 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14302 *STRICT_OVERFLOW_P. */
14303
14304 bool
14305 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14306 tree op1, bool *strict_overflow_p)
14307 {
14308 if (TYPE_UNSIGNED (type))
14309 return true;
14310
14311 switch (code)
14312 {
14313 case POINTER_PLUS_EXPR:
14314 case PLUS_EXPR:
14315 if (FLOAT_TYPE_P (type))
14316 return (tree_expr_nonnegative_warnv_p (op0,
14317 strict_overflow_p)
14318 && tree_expr_nonnegative_warnv_p (op1,
14319 strict_overflow_p));
14320
14321 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14322 both unsigned and at least 2 bits shorter than the result. */
14323 if (TREE_CODE (type) == INTEGER_TYPE
14324 && TREE_CODE (op0) == NOP_EXPR
14325 && TREE_CODE (op1) == NOP_EXPR)
14326 {
14327 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14328 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14329 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14330 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14331 {
14332 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14333 TYPE_PRECISION (inner2)) + 1;
14334 return prec < TYPE_PRECISION (type);
14335 }
14336 }
14337 break;
14338
14339 case MULT_EXPR:
14340 if (FLOAT_TYPE_P (type))
14341 {
14342 /* x * x for floating point x is always non-negative. */
14343 if (operand_equal_p (op0, op1, 0))
14344 return true;
14345 return (tree_expr_nonnegative_warnv_p (op0,
14346 strict_overflow_p)
14347 && tree_expr_nonnegative_warnv_p (op1,
14348 strict_overflow_p));
14349 }
14350
14351 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14352 both unsigned and their total bits is shorter than the result. */
14353 if (TREE_CODE (type) == INTEGER_TYPE
14354 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14355 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14356 {
14357 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14358 ? TREE_TYPE (TREE_OPERAND (op0, 0))
14359 : TREE_TYPE (op0);
14360 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14361 ? TREE_TYPE (TREE_OPERAND (op1, 0))
14362 : TREE_TYPE (op1);
14363
14364 bool unsigned0 = TYPE_UNSIGNED (inner0);
14365 bool unsigned1 = TYPE_UNSIGNED (inner1);
14366
14367 if (TREE_CODE (op0) == INTEGER_CST)
14368 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14369
14370 if (TREE_CODE (op1) == INTEGER_CST)
14371 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14372
14373 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14374 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14375 {
14376 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14377 ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
14378 : TYPE_PRECISION (inner0);
14379
14380 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14381 ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
14382 : TYPE_PRECISION (inner1);
14383
14384 return precision0 + precision1 < TYPE_PRECISION (type);
14385 }
14386 }
14387 return false;
14388
14389 case BIT_AND_EXPR:
14390 case MAX_EXPR:
14391 return (tree_expr_nonnegative_warnv_p (op0,
14392 strict_overflow_p)
14393 || tree_expr_nonnegative_warnv_p (op1,
14394 strict_overflow_p));
14395
14396 case BIT_IOR_EXPR:
14397 case BIT_XOR_EXPR:
14398 case MIN_EXPR:
14399 case RDIV_EXPR:
14400 case TRUNC_DIV_EXPR:
14401 case CEIL_DIV_EXPR:
14402 case FLOOR_DIV_EXPR:
14403 case ROUND_DIV_EXPR:
14404 return (tree_expr_nonnegative_warnv_p (op0,
14405 strict_overflow_p)
14406 && tree_expr_nonnegative_warnv_p (op1,
14407 strict_overflow_p));
14408
14409 case TRUNC_MOD_EXPR:
14410 case CEIL_MOD_EXPR:
14411 case FLOOR_MOD_EXPR:
14412 case ROUND_MOD_EXPR:
14413 return tree_expr_nonnegative_warnv_p (op0,
14414 strict_overflow_p);
14415 default:
14416 return tree_simple_nonnegative_warnv_p (code, type);
14417 }
14418
14419 /* We don't know sign of `t', so be conservative and return false. */
14420 return false;
14421 }
14422
14423 /* Return true if T is known to be non-negative. If the return
14424 value is based on the assumption that signed overflow is undefined,
14425 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14426 *STRICT_OVERFLOW_P. */
14427
14428 bool
14429 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14430 {
14431 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14432 return true;
14433
14434 switch (TREE_CODE (t))
14435 {
14436 case INTEGER_CST:
14437 return tree_int_cst_sgn (t) >= 0;
14438
14439 case REAL_CST:
14440 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14441
14442 case FIXED_CST:
14443 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14444
14445 case COND_EXPR:
14446 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14447 strict_overflow_p)
14448 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
14449 strict_overflow_p));
14450 default:
14451 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14452 TREE_TYPE (t));
14453 }
14454 /* We don't know sign of `t', so be conservative and return false. */
14455 return false;
14456 }
14457
14458 /* Return true if T is known to be non-negative. If the return
14459 value is based on the assumption that signed overflow is undefined,
14460 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14461 *STRICT_OVERFLOW_P. */
14462
14463 bool
14464 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
14465 tree arg0, tree arg1, bool *strict_overflow_p)
14466 {
14467 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
14468 switch (DECL_FUNCTION_CODE (fndecl))
14469 {
14470 CASE_FLT_FN (BUILT_IN_ACOS):
14471 CASE_FLT_FN (BUILT_IN_ACOSH):
14472 CASE_FLT_FN (BUILT_IN_CABS):
14473 CASE_FLT_FN (BUILT_IN_COSH):
14474 CASE_FLT_FN (BUILT_IN_ERFC):
14475 CASE_FLT_FN (BUILT_IN_EXP):
14476 CASE_FLT_FN (BUILT_IN_EXP10):
14477 CASE_FLT_FN (BUILT_IN_EXP2):
14478 CASE_FLT_FN (BUILT_IN_FABS):
14479 CASE_FLT_FN (BUILT_IN_FDIM):
14480 CASE_FLT_FN (BUILT_IN_HYPOT):
14481 CASE_FLT_FN (BUILT_IN_POW10):
14482 CASE_INT_FN (BUILT_IN_FFS):
14483 CASE_INT_FN (BUILT_IN_PARITY):
14484 CASE_INT_FN (BUILT_IN_POPCOUNT):
14485 case BUILT_IN_BSWAP32:
14486 case BUILT_IN_BSWAP64:
14487 /* Always true. */
14488 return true;
14489
14490 CASE_FLT_FN (BUILT_IN_SQRT):
14491 /* sqrt(-0.0) is -0.0. */
14492 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
14493 return true;
14494 return tree_expr_nonnegative_warnv_p (arg0,
14495 strict_overflow_p);
14496
14497 CASE_FLT_FN (BUILT_IN_ASINH):
14498 CASE_FLT_FN (BUILT_IN_ATAN):
14499 CASE_FLT_FN (BUILT_IN_ATANH):
14500 CASE_FLT_FN (BUILT_IN_CBRT):
14501 CASE_FLT_FN (BUILT_IN_CEIL):
14502 CASE_FLT_FN (BUILT_IN_ERF):
14503 CASE_FLT_FN (BUILT_IN_EXPM1):
14504 CASE_FLT_FN (BUILT_IN_FLOOR):
14505 CASE_FLT_FN (BUILT_IN_FMOD):
14506 CASE_FLT_FN (BUILT_IN_FREXP):
14507 CASE_FLT_FN (BUILT_IN_LCEIL):
14508 CASE_FLT_FN (BUILT_IN_LDEXP):
14509 CASE_FLT_FN (BUILT_IN_LFLOOR):
14510 CASE_FLT_FN (BUILT_IN_LLCEIL):
14511 CASE_FLT_FN (BUILT_IN_LLFLOOR):
14512 CASE_FLT_FN (BUILT_IN_LLRINT):
14513 CASE_FLT_FN (BUILT_IN_LLROUND):
14514 CASE_FLT_FN (BUILT_IN_LRINT):
14515 CASE_FLT_FN (BUILT_IN_LROUND):
14516 CASE_FLT_FN (BUILT_IN_MODF):
14517 CASE_FLT_FN (BUILT_IN_NEARBYINT):
14518 CASE_FLT_FN (BUILT_IN_RINT):
14519 CASE_FLT_FN (BUILT_IN_ROUND):
14520 CASE_FLT_FN (BUILT_IN_SCALB):
14521 CASE_FLT_FN (BUILT_IN_SCALBLN):
14522 CASE_FLT_FN (BUILT_IN_SCALBN):
14523 CASE_FLT_FN (BUILT_IN_SIGNBIT):
14524 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
14525 CASE_FLT_FN (BUILT_IN_SINH):
14526 CASE_FLT_FN (BUILT_IN_TANH):
14527 CASE_FLT_FN (BUILT_IN_TRUNC):
14528 /* True if the 1st argument is nonnegative. */
14529 return tree_expr_nonnegative_warnv_p (arg0,
14530 strict_overflow_p);
14531
14532 CASE_FLT_FN (BUILT_IN_FMAX):
14533 /* True if the 1st OR 2nd arguments are nonnegative. */
14534 return (tree_expr_nonnegative_warnv_p (arg0,
14535 strict_overflow_p)
14536 || (tree_expr_nonnegative_warnv_p (arg1,
14537 strict_overflow_p)));
14538
14539 CASE_FLT_FN (BUILT_IN_FMIN):
14540 /* True if the 1st AND 2nd arguments are nonnegative. */
14541 return (tree_expr_nonnegative_warnv_p (arg0,
14542 strict_overflow_p)
14543 && (tree_expr_nonnegative_warnv_p (arg1,
14544 strict_overflow_p)));
14545
14546 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14547 /* True if the 2nd argument is nonnegative. */
14548 return tree_expr_nonnegative_warnv_p (arg1,
14549 strict_overflow_p);
14550
14551 CASE_FLT_FN (BUILT_IN_POWI):
14552 /* True if the 1st argument is nonnegative or the second
14553 argument is an even integer. */
14554 if (TREE_CODE (arg1) == INTEGER_CST
14555 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
14556 return true;
14557 return tree_expr_nonnegative_warnv_p (arg0,
14558 strict_overflow_p);
14559
14560 CASE_FLT_FN (BUILT_IN_POW):
14561 /* True if the 1st argument is nonnegative or the second
14562 argument is an even integer valued real. */
14563 if (TREE_CODE (arg1) == REAL_CST)
14564 {
14565 REAL_VALUE_TYPE c;
14566 HOST_WIDE_INT n;
14567
14568 c = TREE_REAL_CST (arg1);
14569 n = real_to_integer (&c);
14570 if ((n & 1) == 0)
14571 {
14572 REAL_VALUE_TYPE cint;
14573 real_from_integer (&cint, VOIDmode, n,
14574 n < 0 ? -1 : 0, 0);
14575 if (real_identical (&c, &cint))
14576 return true;
14577 }
14578 }
14579 return tree_expr_nonnegative_warnv_p (arg0,
14580 strict_overflow_p);
14581
14582 default:
14583 break;
14584 }
14585 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
14586 type);
14587 }
14588
14589 /* Return true if T is known to be non-negative. If the return
14590 value is based on the assumption that signed overflow is undefined,
14591 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14592 *STRICT_OVERFLOW_P. */
14593
14594 bool
14595 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14596 {
14597 enum tree_code code = TREE_CODE (t);
14598 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14599 return true;
14600
14601 switch (code)
14602 {
14603 case TARGET_EXPR:
14604 {
14605 tree temp = TARGET_EXPR_SLOT (t);
14606 t = TARGET_EXPR_INITIAL (t);
14607
14608 /* If the initializer is non-void, then it's a normal expression
14609 that will be assigned to the slot. */
14610 if (!VOID_TYPE_P (t))
14611 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
14612
14613 /* Otherwise, the initializer sets the slot in some way. One common
14614 way is an assignment statement at the end of the initializer. */
14615 while (1)
14616 {
14617 if (TREE_CODE (t) == BIND_EXPR)
14618 t = expr_last (BIND_EXPR_BODY (t));
14619 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
14620 || TREE_CODE (t) == TRY_CATCH_EXPR)
14621 t = expr_last (TREE_OPERAND (t, 0));
14622 else if (TREE_CODE (t) == STATEMENT_LIST)
14623 t = expr_last (t);
14624 else
14625 break;
14626 }
14627 if (TREE_CODE (t) == MODIFY_EXPR
14628 && TREE_OPERAND (t, 0) == temp)
14629 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14630 strict_overflow_p);
14631
14632 return false;
14633 }
14634
14635 case CALL_EXPR:
14636 {
14637 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
14638 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
14639
14640 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
14641 get_callee_fndecl (t),
14642 arg0,
14643 arg1,
14644 strict_overflow_p);
14645 }
14646 case COMPOUND_EXPR:
14647 case MODIFY_EXPR:
14648 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14649 strict_overflow_p);
14650 case BIND_EXPR:
14651 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
14652 strict_overflow_p);
14653 case SAVE_EXPR:
14654 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14655 strict_overflow_p);
14656
14657 default:
14658 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14659 TREE_TYPE (t));
14660 }
14661
14662 /* We don't know sign of `t', so be conservative and return false. */
14663 return false;
14664 }
14665
14666 /* Return true if T is known to be non-negative. If the return
14667 value is based on the assumption that signed overflow is undefined,
14668 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14669 *STRICT_OVERFLOW_P. */
14670
14671 bool
14672 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14673 {
14674 enum tree_code code;
14675 if (t == error_mark_node)
14676 return false;
14677
14678 code = TREE_CODE (t);
14679 switch (TREE_CODE_CLASS (code))
14680 {
14681 case tcc_binary:
14682 case tcc_comparison:
14683 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14684 TREE_TYPE (t),
14685 TREE_OPERAND (t, 0),
14686 TREE_OPERAND (t, 1),
14687 strict_overflow_p);
14688
14689 case tcc_unary:
14690 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14691 TREE_TYPE (t),
14692 TREE_OPERAND (t, 0),
14693 strict_overflow_p);
14694
14695 case tcc_constant:
14696 case tcc_declaration:
14697 case tcc_reference:
14698 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14699
14700 default:
14701 break;
14702 }
14703
14704 switch (code)
14705 {
14706 case TRUTH_AND_EXPR:
14707 case TRUTH_OR_EXPR:
14708 case TRUTH_XOR_EXPR:
14709 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14710 TREE_TYPE (t),
14711 TREE_OPERAND (t, 0),
14712 TREE_OPERAND (t, 1),
14713 strict_overflow_p);
14714 case TRUTH_NOT_EXPR:
14715 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14716 TREE_TYPE (t),
14717 TREE_OPERAND (t, 0),
14718 strict_overflow_p);
14719
14720 case COND_EXPR:
14721 case CONSTRUCTOR:
14722 case OBJ_TYPE_REF:
14723 case ASSERT_EXPR:
14724 case ADDR_EXPR:
14725 case WITH_SIZE_EXPR:
14726 case SSA_NAME:
14727 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14728
14729 default:
14730 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
14731 }
14732 }
14733
14734 /* Return true if `t' is known to be non-negative. Handle warnings
14735 about undefined signed overflow. */
14736
14737 bool
14738 tree_expr_nonnegative_p (tree t)
14739 {
14740 bool ret, strict_overflow_p;
14741
14742 strict_overflow_p = false;
14743 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
14744 if (strict_overflow_p)
14745 fold_overflow_warning (("assuming signed overflow does not occur when "
14746 "determining that expression is always "
14747 "non-negative"),
14748 WARN_STRICT_OVERFLOW_MISC);
14749 return ret;
14750 }
14751
14752
14753 /* Return true when (CODE OP0) is an address and is known to be nonzero.
14754 For floating point we further ensure that T is not denormal.
14755 Similar logic is present in nonzero_address in rtlanal.h.
14756
14757 If the return value is based on the assumption that signed overflow
14758 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14759 change *STRICT_OVERFLOW_P. */
14760
14761 bool
14762 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
14763 bool *strict_overflow_p)
14764 {
14765 switch (code)
14766 {
14767 case ABS_EXPR:
14768 return tree_expr_nonzero_warnv_p (op0,
14769 strict_overflow_p);
14770
14771 case NOP_EXPR:
14772 {
14773 tree inner_type = TREE_TYPE (op0);
14774 tree outer_type = type;
14775
14776 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
14777 && tree_expr_nonzero_warnv_p (op0,
14778 strict_overflow_p));
14779 }
14780 break;
14781
14782 case NON_LVALUE_EXPR:
14783 return tree_expr_nonzero_warnv_p (op0,
14784 strict_overflow_p);
14785
14786 default:
14787 break;
14788 }
14789
14790 return false;
14791 }
14792
14793 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
14794 For floating point we further ensure that T is not denormal.
14795 Similar logic is present in nonzero_address in rtlanal.h.
14796
14797 If the return value is based on the assumption that signed overflow
14798 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14799 change *STRICT_OVERFLOW_P. */
14800
14801 bool
14802 tree_binary_nonzero_warnv_p (enum tree_code code,
14803 tree type,
14804 tree op0,
14805 tree op1, bool *strict_overflow_p)
14806 {
14807 bool sub_strict_overflow_p;
14808 switch (code)
14809 {
14810 case POINTER_PLUS_EXPR:
14811 case PLUS_EXPR:
14812 if (TYPE_OVERFLOW_UNDEFINED (type))
14813 {
14814 /* With the presence of negative values it is hard
14815 to say something. */
14816 sub_strict_overflow_p = false;
14817 if (!tree_expr_nonnegative_warnv_p (op0,
14818 &sub_strict_overflow_p)
14819 || !tree_expr_nonnegative_warnv_p (op1,
14820 &sub_strict_overflow_p))
14821 return false;
14822 /* One of operands must be positive and the other non-negative. */
14823 /* We don't set *STRICT_OVERFLOW_P here: even if this value
14824 overflows, on a twos-complement machine the sum of two
14825 nonnegative numbers can never be zero. */
14826 return (tree_expr_nonzero_warnv_p (op0,
14827 strict_overflow_p)
14828 || tree_expr_nonzero_warnv_p (op1,
14829 strict_overflow_p));
14830 }
14831 break;
14832
14833 case MULT_EXPR:
14834 if (TYPE_OVERFLOW_UNDEFINED (type))
14835 {
14836 if (tree_expr_nonzero_warnv_p (op0,
14837 strict_overflow_p)
14838 && tree_expr_nonzero_warnv_p (op1,
14839 strict_overflow_p))
14840 {
14841 *strict_overflow_p = true;
14842 return true;
14843 }
14844 }
14845 break;
14846
14847 case MIN_EXPR:
14848 sub_strict_overflow_p = false;
14849 if (tree_expr_nonzero_warnv_p (op0,
14850 &sub_strict_overflow_p)
14851 && tree_expr_nonzero_warnv_p (op1,
14852 &sub_strict_overflow_p))
14853 {
14854 if (sub_strict_overflow_p)
14855 *strict_overflow_p = true;
14856 }
14857 break;
14858
14859 case MAX_EXPR:
14860 sub_strict_overflow_p = false;
14861 if (tree_expr_nonzero_warnv_p (op0,
14862 &sub_strict_overflow_p))
14863 {
14864 if (sub_strict_overflow_p)
14865 *strict_overflow_p = true;
14866
14867 /* When both operands are nonzero, then MAX must be too. */
14868 if (tree_expr_nonzero_warnv_p (op1,
14869 strict_overflow_p))
14870 return true;
14871
14872 /* MAX where operand 0 is positive is positive. */
14873 return tree_expr_nonnegative_warnv_p (op0,
14874 strict_overflow_p);
14875 }
14876 /* MAX where operand 1 is positive is positive. */
14877 else if (tree_expr_nonzero_warnv_p (op1,
14878 &sub_strict_overflow_p)
14879 && tree_expr_nonnegative_warnv_p (op1,
14880 &sub_strict_overflow_p))
14881 {
14882 if (sub_strict_overflow_p)
14883 *strict_overflow_p = true;
14884 return true;
14885 }
14886 break;
14887
14888 case BIT_IOR_EXPR:
14889 return (tree_expr_nonzero_warnv_p (op1,
14890 strict_overflow_p)
14891 || tree_expr_nonzero_warnv_p (op0,
14892 strict_overflow_p));
14893
14894 default:
14895 break;
14896 }
14897
14898 return false;
14899 }
14900
14901 /* Return true when T is an address and is known to be nonzero.
14902 For floating point we further ensure that T is not denormal.
14903 Similar logic is present in nonzero_address in rtlanal.h.
14904
14905 If the return value is based on the assumption that signed overflow
14906 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14907 change *STRICT_OVERFLOW_P. */
14908
14909 bool
14910 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
14911 {
14912 bool sub_strict_overflow_p;
14913 switch (TREE_CODE (t))
14914 {
14915 case INTEGER_CST:
14916 return !integer_zerop (t);
14917
14918 case ADDR_EXPR:
14919 {
14920 tree base = get_base_address (TREE_OPERAND (t, 0));
14921
14922 if (!base)
14923 return false;
14924
14925 /* Weak declarations may link to NULL. Other things may also be NULL
14926 so protect with -fdelete-null-pointer-checks; but not variables
14927 allocated on the stack. */
14928 if (DECL_P (base)
14929 && (flag_delete_null_pointer_checks
14930 || (TREE_CODE (base) == VAR_DECL && !TREE_STATIC (base))))
14931 return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
14932
14933 /* Constants are never weak. */
14934 if (CONSTANT_CLASS_P (base))
14935 return true;
14936
14937 return false;
14938 }
14939
14940 case COND_EXPR:
14941 sub_strict_overflow_p = false;
14942 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
14943 &sub_strict_overflow_p)
14944 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
14945 &sub_strict_overflow_p))
14946 {
14947 if (sub_strict_overflow_p)
14948 *strict_overflow_p = true;
14949 return true;
14950 }
14951 break;
14952
14953 default:
14954 break;
14955 }
14956 return false;
14957 }
14958
14959 /* Return true when T is an address and is known to be nonzero.
14960 For floating point we further ensure that T is not denormal.
14961 Similar logic is present in nonzero_address in rtlanal.h.
14962
14963 If the return value is based on the assumption that signed overflow
14964 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14965 change *STRICT_OVERFLOW_P. */
14966
14967 bool
14968 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
14969 {
14970 tree type = TREE_TYPE (t);
14971 enum tree_code code;
14972
14973 /* Doing something useful for floating point would need more work. */
14974 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
14975 return false;
14976
14977 code = TREE_CODE (t);
14978 switch (TREE_CODE_CLASS (code))
14979 {
14980 case tcc_unary:
14981 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
14982 strict_overflow_p);
14983 case tcc_binary:
14984 case tcc_comparison:
14985 return tree_binary_nonzero_warnv_p (code, type,
14986 TREE_OPERAND (t, 0),
14987 TREE_OPERAND (t, 1),
14988 strict_overflow_p);
14989 case tcc_constant:
14990 case tcc_declaration:
14991 case tcc_reference:
14992 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
14993
14994 default:
14995 break;
14996 }
14997
14998 switch (code)
14999 {
15000 case TRUTH_NOT_EXPR:
15001 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15002 strict_overflow_p);
15003
15004 case TRUTH_AND_EXPR:
15005 case TRUTH_OR_EXPR:
15006 case TRUTH_XOR_EXPR:
15007 return tree_binary_nonzero_warnv_p (code, type,
15008 TREE_OPERAND (t, 0),
15009 TREE_OPERAND (t, 1),
15010 strict_overflow_p);
15011
15012 case COND_EXPR:
15013 case CONSTRUCTOR:
15014 case OBJ_TYPE_REF:
15015 case ASSERT_EXPR:
15016 case ADDR_EXPR:
15017 case WITH_SIZE_EXPR:
15018 case SSA_NAME:
15019 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15020
15021 case COMPOUND_EXPR:
15022 case MODIFY_EXPR:
15023 case BIND_EXPR:
15024 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15025 strict_overflow_p);
15026
15027 case SAVE_EXPR:
15028 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
15029 strict_overflow_p);
15030
15031 case CALL_EXPR:
15032 return alloca_call_p (t);
15033
15034 default:
15035 break;
15036 }
15037 return false;
15038 }
15039
15040 /* Return true when T is an address and is known to be nonzero.
15041 Handle warnings about undefined signed overflow. */
15042
15043 bool
15044 tree_expr_nonzero_p (tree t)
15045 {
15046 bool ret, strict_overflow_p;
15047
15048 strict_overflow_p = false;
15049 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
15050 if (strict_overflow_p)
15051 fold_overflow_warning (("assuming signed overflow does not occur when "
15052 "determining that expression is always "
15053 "non-zero"),
15054 WARN_STRICT_OVERFLOW_MISC);
15055 return ret;
15056 }
15057
15058 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15059 attempt to fold the expression to a constant without modifying TYPE,
15060 OP0 or OP1.
15061
15062 If the expression could be simplified to a constant, then return
15063 the constant. If the expression would not be simplified to a
15064 constant, then return NULL_TREE. */
15065
15066 tree
15067 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15068 {
15069 tree tem = fold_binary (code, type, op0, op1);
15070 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15071 }
15072
15073 /* Given the components of a unary expression CODE, TYPE and OP0,
15074 attempt to fold the expression to a constant without modifying
15075 TYPE or OP0.
15076
15077 If the expression could be simplified to a constant, then return
15078 the constant. If the expression would not be simplified to a
15079 constant, then return NULL_TREE. */
15080
15081 tree
15082 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15083 {
15084 tree tem = fold_unary (code, type, op0);
15085 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15086 }
15087
15088 /* If EXP represents referencing an element in a constant string
15089 (either via pointer arithmetic or array indexing), return the
15090 tree representing the value accessed, otherwise return NULL. */
15091
15092 tree
15093 fold_read_from_constant_string (tree exp)
15094 {
15095 if ((TREE_CODE (exp) == INDIRECT_REF
15096 || TREE_CODE (exp) == ARRAY_REF)
15097 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15098 {
15099 tree exp1 = TREE_OPERAND (exp, 0);
15100 tree index;
15101 tree string;
15102 location_t loc = EXPR_LOCATION (exp);
15103
15104 if (TREE_CODE (exp) == INDIRECT_REF)
15105 string = string_constant (exp1, &index);
15106 else
15107 {
15108 tree low_bound = array_ref_low_bound (exp);
15109 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15110
15111 /* Optimize the special-case of a zero lower bound.
15112
15113 We convert the low_bound to sizetype to avoid some problems
15114 with constant folding. (E.g. suppose the lower bound is 1,
15115 and its mode is QI. Without the conversion,l (ARRAY
15116 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15117 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15118 if (! integer_zerop (low_bound))
15119 index = size_diffop_loc (loc, index,
15120 fold_convert_loc (loc, sizetype, low_bound));
15121
15122 string = exp1;
15123 }
15124
15125 if (string
15126 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15127 && TREE_CODE (string) == STRING_CST
15128 && TREE_CODE (index) == INTEGER_CST
15129 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15130 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15131 == MODE_INT)
15132 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15133 return build_int_cst_type (TREE_TYPE (exp),
15134 (TREE_STRING_POINTER (string)
15135 [TREE_INT_CST_LOW (index)]));
15136 }
15137 return NULL;
15138 }
15139
15140 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15141 an integer constant, real, or fixed-point constant.
15142
15143 TYPE is the type of the result. */
15144
15145 static tree
15146 fold_negate_const (tree arg0, tree type)
15147 {
15148 tree t = NULL_TREE;
15149
15150 switch (TREE_CODE (arg0))
15151 {
15152 case INTEGER_CST:
15153 {
15154 unsigned HOST_WIDE_INT low;
15155 HOST_WIDE_INT high;
15156 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
15157 TREE_INT_CST_HIGH (arg0),
15158 &low, &high);
15159 t = force_fit_type_double (type, low, high, 1,
15160 (overflow | TREE_OVERFLOW (arg0))
15161 && !TYPE_UNSIGNED (type));
15162 break;
15163 }
15164
15165 case REAL_CST:
15166 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
15167 break;
15168
15169 case FIXED_CST:
15170 {
15171 FIXED_VALUE_TYPE f;
15172 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15173 &(TREE_FIXED_CST (arg0)), NULL,
15174 TYPE_SATURATING (type));
15175 t = build_fixed (type, f);
15176 /* Propagate overflow flags. */
15177 if (overflow_p | TREE_OVERFLOW (arg0))
15178 TREE_OVERFLOW (t) = 1;
15179 break;
15180 }
15181
15182 default:
15183 gcc_unreachable ();
15184 }
15185
15186 return t;
15187 }
15188
15189 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15190 an integer constant or real constant.
15191
15192 TYPE is the type of the result. */
15193
15194 tree
15195 fold_abs_const (tree arg0, tree type)
15196 {
15197 tree t = NULL_TREE;
15198
15199 switch (TREE_CODE (arg0))
15200 {
15201 case INTEGER_CST:
15202 /* If the value is unsigned, then the absolute value is
15203 the same as the ordinary value. */
15204 if (TYPE_UNSIGNED (type))
15205 t = arg0;
15206 /* Similarly, if the value is non-negative. */
15207 else if (INT_CST_LT (integer_minus_one_node, arg0))
15208 t = arg0;
15209 /* If the value is negative, then the absolute value is
15210 its negation. */
15211 else
15212 {
15213 unsigned HOST_WIDE_INT low;
15214 HOST_WIDE_INT high;
15215 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
15216 TREE_INT_CST_HIGH (arg0),
15217 &low, &high);
15218 t = force_fit_type_double (type, low, high, -1,
15219 overflow | TREE_OVERFLOW (arg0));
15220 }
15221 break;
15222
15223 case REAL_CST:
15224 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15225 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
15226 else
15227 t = arg0;
15228 break;
15229
15230 default:
15231 gcc_unreachable ();
15232 }
15233
15234 return t;
15235 }
15236
15237 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15238 constant. TYPE is the type of the result. */
15239
15240 static tree
15241 fold_not_const (tree arg0, tree type)
15242 {
15243 tree t = NULL_TREE;
15244
15245 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15246
15247 t = force_fit_type_double (type, ~TREE_INT_CST_LOW (arg0),
15248 ~TREE_INT_CST_HIGH (arg0), 0,
15249 TREE_OVERFLOW (arg0));
15250
15251 return t;
15252 }
15253
15254 /* Given CODE, a relational operator, the target type, TYPE and two
15255 constant operands OP0 and OP1, return the result of the
15256 relational operation. If the result is not a compile time
15257 constant, then return NULL_TREE. */
15258
15259 static tree
15260 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15261 {
15262 int result, invert;
15263
15264 /* From here on, the only cases we handle are when the result is
15265 known to be a constant. */
15266
15267 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15268 {
15269 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15270 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15271
15272 /* Handle the cases where either operand is a NaN. */
15273 if (real_isnan (c0) || real_isnan (c1))
15274 {
15275 switch (code)
15276 {
15277 case EQ_EXPR:
15278 case ORDERED_EXPR:
15279 result = 0;
15280 break;
15281
15282 case NE_EXPR:
15283 case UNORDERED_EXPR:
15284 case UNLT_EXPR:
15285 case UNLE_EXPR:
15286 case UNGT_EXPR:
15287 case UNGE_EXPR:
15288 case UNEQ_EXPR:
15289 result = 1;
15290 break;
15291
15292 case LT_EXPR:
15293 case LE_EXPR:
15294 case GT_EXPR:
15295 case GE_EXPR:
15296 case LTGT_EXPR:
15297 if (flag_trapping_math)
15298 return NULL_TREE;
15299 result = 0;
15300 break;
15301
15302 default:
15303 gcc_unreachable ();
15304 }
15305
15306 return constant_boolean_node (result, type);
15307 }
15308
15309 return constant_boolean_node (real_compare (code, c0, c1), type);
15310 }
15311
15312 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15313 {
15314 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15315 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15316 return constant_boolean_node (fixed_compare (code, c0, c1), type);
15317 }
15318
15319 /* Handle equality/inequality of complex constants. */
15320 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15321 {
15322 tree rcond = fold_relational_const (code, type,
15323 TREE_REALPART (op0),
15324 TREE_REALPART (op1));
15325 tree icond = fold_relational_const (code, type,
15326 TREE_IMAGPART (op0),
15327 TREE_IMAGPART (op1));
15328 if (code == EQ_EXPR)
15329 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15330 else if (code == NE_EXPR)
15331 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15332 else
15333 return NULL_TREE;
15334 }
15335
15336 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15337
15338 To compute GT, swap the arguments and do LT.
15339 To compute GE, do LT and invert the result.
15340 To compute LE, swap the arguments, do LT and invert the result.
15341 To compute NE, do EQ and invert the result.
15342
15343 Therefore, the code below must handle only EQ and LT. */
15344
15345 if (code == LE_EXPR || code == GT_EXPR)
15346 {
15347 tree tem = op0;
15348 op0 = op1;
15349 op1 = tem;
15350 code = swap_tree_comparison (code);
15351 }
15352
15353 /* Note that it is safe to invert for real values here because we
15354 have already handled the one case that it matters. */
15355
15356 invert = 0;
15357 if (code == NE_EXPR || code == GE_EXPR)
15358 {
15359 invert = 1;
15360 code = invert_tree_comparison (code, false);
15361 }
15362
15363 /* Compute a result for LT or EQ if args permit;
15364 Otherwise return T. */
15365 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15366 {
15367 if (code == EQ_EXPR)
15368 result = tree_int_cst_equal (op0, op1);
15369 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
15370 result = INT_CST_LT_UNSIGNED (op0, op1);
15371 else
15372 result = INT_CST_LT (op0, op1);
15373 }
15374 else
15375 return NULL_TREE;
15376
15377 if (invert)
15378 result ^= 1;
15379 return constant_boolean_node (result, type);
15380 }
15381
15382 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15383 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15384 itself. */
15385
15386 tree
15387 fold_build_cleanup_point_expr (tree type, tree expr)
15388 {
15389 /* If the expression does not have side effects then we don't have to wrap
15390 it with a cleanup point expression. */
15391 if (!TREE_SIDE_EFFECTS (expr))
15392 return expr;
15393
15394 /* If the expression is a return, check to see if the expression inside the
15395 return has no side effects or the right hand side of the modify expression
15396 inside the return. If either don't have side effects set we don't need to
15397 wrap the expression in a cleanup point expression. Note we don't check the
15398 left hand side of the modify because it should always be a return decl. */
15399 if (TREE_CODE (expr) == RETURN_EXPR)
15400 {
15401 tree op = TREE_OPERAND (expr, 0);
15402 if (!op || !TREE_SIDE_EFFECTS (op))
15403 return expr;
15404 op = TREE_OPERAND (op, 1);
15405 if (!TREE_SIDE_EFFECTS (op))
15406 return expr;
15407 }
15408
15409 return build1 (CLEANUP_POINT_EXPR, type, expr);
15410 }
15411
15412 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15413 of an indirection through OP0, or NULL_TREE if no simplification is
15414 possible. */
15415
15416 tree
15417 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
15418 {
15419 tree sub = op0;
15420 tree subtype;
15421
15422 STRIP_NOPS (sub);
15423 subtype = TREE_TYPE (sub);
15424 if (!POINTER_TYPE_P (subtype))
15425 return NULL_TREE;
15426
15427 if (TREE_CODE (sub) == ADDR_EXPR)
15428 {
15429 tree op = TREE_OPERAND (sub, 0);
15430 tree optype = TREE_TYPE (op);
15431 /* *&CONST_DECL -> to the value of the const decl. */
15432 if (TREE_CODE (op) == CONST_DECL)
15433 return DECL_INITIAL (op);
15434 /* *&p => p; make sure to handle *&"str"[cst] here. */
15435 if (type == optype)
15436 {
15437 tree fop = fold_read_from_constant_string (op);
15438 if (fop)
15439 return fop;
15440 else
15441 return op;
15442 }
15443 /* *(foo *)&fooarray => fooarray[0] */
15444 else if (TREE_CODE (optype) == ARRAY_TYPE
15445 && type == TREE_TYPE (optype))
15446 {
15447 tree type_domain = TYPE_DOMAIN (optype);
15448 tree min_val = size_zero_node;
15449 if (type_domain && TYPE_MIN_VALUE (type_domain))
15450 min_val = TYPE_MIN_VALUE (type_domain);
15451 op0 = build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
15452 SET_EXPR_LOCATION (op0, loc);
15453 return op0;
15454 }
15455 /* *(foo *)&complexfoo => __real__ complexfoo */
15456 else if (TREE_CODE (optype) == COMPLEX_TYPE
15457 && type == TREE_TYPE (optype))
15458 return fold_build1_loc (loc, REALPART_EXPR, type, op);
15459 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15460 else if (TREE_CODE (optype) == VECTOR_TYPE
15461 && type == TREE_TYPE (optype))
15462 {
15463 tree part_width = TYPE_SIZE (type);
15464 tree index = bitsize_int (0);
15465 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
15466 }
15467 }
15468
15469 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15470 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15471 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15472 {
15473 tree op00 = TREE_OPERAND (sub, 0);
15474 tree op01 = TREE_OPERAND (sub, 1);
15475 tree op00type;
15476
15477 STRIP_NOPS (op00);
15478 op00type = TREE_TYPE (op00);
15479 if (TREE_CODE (op00) == ADDR_EXPR
15480 && TREE_CODE (TREE_TYPE (op00type)) == VECTOR_TYPE
15481 && type == TREE_TYPE (TREE_TYPE (op00type)))
15482 {
15483 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
15484 tree part_width = TYPE_SIZE (type);
15485 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
15486 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
15487 tree index = bitsize_int (indexi);
15488
15489 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (op00type)))
15490 return fold_build3_loc (loc,
15491 BIT_FIELD_REF, type, TREE_OPERAND (op00, 0),
15492 part_width, index);
15493
15494 }
15495 }
15496
15497
15498 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15499 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15500 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15501 {
15502 tree op00 = TREE_OPERAND (sub, 0);
15503 tree op01 = TREE_OPERAND (sub, 1);
15504 tree op00type;
15505
15506 STRIP_NOPS (op00);
15507 op00type = TREE_TYPE (op00);
15508 if (TREE_CODE (op00) == ADDR_EXPR
15509 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
15510 && type == TREE_TYPE (TREE_TYPE (op00type)))
15511 {
15512 tree size = TYPE_SIZE_UNIT (type);
15513 if (tree_int_cst_equal (size, op01))
15514 return fold_build1_loc (loc, IMAGPART_EXPR, type,
15515 TREE_OPERAND (op00, 0));
15516 }
15517 }
15518
15519 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15520 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
15521 && type == TREE_TYPE (TREE_TYPE (subtype)))
15522 {
15523 tree type_domain;
15524 tree min_val = size_zero_node;
15525 sub = build_fold_indirect_ref_loc (loc, sub);
15526 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
15527 if (type_domain && TYPE_MIN_VALUE (type_domain))
15528 min_val = TYPE_MIN_VALUE (type_domain);
15529 op0 = build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
15530 SET_EXPR_LOCATION (op0, loc);
15531 return op0;
15532 }
15533
15534 return NULL_TREE;
15535 }
15536
15537 /* Builds an expression for an indirection through T, simplifying some
15538 cases. */
15539
15540 tree
15541 build_fold_indirect_ref_loc (location_t loc, tree t)
15542 {
15543 tree type = TREE_TYPE (TREE_TYPE (t));
15544 tree sub = fold_indirect_ref_1 (loc, type, t);
15545
15546 if (sub)
15547 return sub;
15548
15549 t = build1 (INDIRECT_REF, type, t);
15550 SET_EXPR_LOCATION (t, loc);
15551 return t;
15552 }
15553
15554 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15555
15556 tree
15557 fold_indirect_ref_loc (location_t loc, tree t)
15558 {
15559 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
15560
15561 if (sub)
15562 return sub;
15563 else
15564 return t;
15565 }
15566
15567 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15568 whose result is ignored. The type of the returned tree need not be
15569 the same as the original expression. */
15570
15571 tree
15572 fold_ignored_result (tree t)
15573 {
15574 if (!TREE_SIDE_EFFECTS (t))
15575 return integer_zero_node;
15576
15577 for (;;)
15578 switch (TREE_CODE_CLASS (TREE_CODE (t)))
15579 {
15580 case tcc_unary:
15581 t = TREE_OPERAND (t, 0);
15582 break;
15583
15584 case tcc_binary:
15585 case tcc_comparison:
15586 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15587 t = TREE_OPERAND (t, 0);
15588 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
15589 t = TREE_OPERAND (t, 1);
15590 else
15591 return t;
15592 break;
15593
15594 case tcc_expression:
15595 switch (TREE_CODE (t))
15596 {
15597 case COMPOUND_EXPR:
15598 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15599 return t;
15600 t = TREE_OPERAND (t, 0);
15601 break;
15602
15603 case COND_EXPR:
15604 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
15605 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
15606 return t;
15607 t = TREE_OPERAND (t, 0);
15608 break;
15609
15610 default:
15611 return t;
15612 }
15613 break;
15614
15615 default:
15616 return t;
15617 }
15618 }
15619
15620 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
15621 This can only be applied to objects of a sizetype. */
15622
15623 tree
15624 round_up_loc (location_t loc, tree value, int divisor)
15625 {
15626 tree div = NULL_TREE;
15627
15628 gcc_assert (divisor > 0);
15629 if (divisor == 1)
15630 return value;
15631
15632 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15633 have to do anything. Only do this when we are not given a const,
15634 because in that case, this check is more expensive than just
15635 doing it. */
15636 if (TREE_CODE (value) != INTEGER_CST)
15637 {
15638 div = build_int_cst (TREE_TYPE (value), divisor);
15639
15640 if (multiple_of_p (TREE_TYPE (value), value, div))
15641 return value;
15642 }
15643
15644 /* If divisor is a power of two, simplify this to bit manipulation. */
15645 if (divisor == (divisor & -divisor))
15646 {
15647 if (TREE_CODE (value) == INTEGER_CST)
15648 {
15649 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (value);
15650 unsigned HOST_WIDE_INT high;
15651 bool overflow_p;
15652
15653 if ((low & (divisor - 1)) == 0)
15654 return value;
15655
15656 overflow_p = TREE_OVERFLOW (value);
15657 high = TREE_INT_CST_HIGH (value);
15658 low &= ~(divisor - 1);
15659 low += divisor;
15660 if (low == 0)
15661 {
15662 high++;
15663 if (high == 0)
15664 overflow_p = true;
15665 }
15666
15667 return force_fit_type_double (TREE_TYPE (value), low, high,
15668 -1, overflow_p);
15669 }
15670 else
15671 {
15672 tree t;
15673
15674 t = build_int_cst (TREE_TYPE (value), divisor - 1);
15675 value = size_binop_loc (loc, PLUS_EXPR, value, t);
15676 t = build_int_cst (TREE_TYPE (value), -divisor);
15677 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
15678 }
15679 }
15680 else
15681 {
15682 if (!div)
15683 div = build_int_cst (TREE_TYPE (value), divisor);
15684 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
15685 value = size_binop_loc (loc, MULT_EXPR, value, div);
15686 }
15687
15688 return value;
15689 }
15690
15691 /* Likewise, but round down. */
15692
15693 tree
15694 round_down_loc (location_t loc, tree value, int divisor)
15695 {
15696 tree div = NULL_TREE;
15697
15698 gcc_assert (divisor > 0);
15699 if (divisor == 1)
15700 return value;
15701
15702 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15703 have to do anything. Only do this when we are not given a const,
15704 because in that case, this check is more expensive than just
15705 doing it. */
15706 if (TREE_CODE (value) != INTEGER_CST)
15707 {
15708 div = build_int_cst (TREE_TYPE (value), divisor);
15709
15710 if (multiple_of_p (TREE_TYPE (value), value, div))
15711 return value;
15712 }
15713
15714 /* If divisor is a power of two, simplify this to bit manipulation. */
15715 if (divisor == (divisor & -divisor))
15716 {
15717 tree t;
15718
15719 t = build_int_cst (TREE_TYPE (value), -divisor);
15720 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
15721 }
15722 else
15723 {
15724 if (!div)
15725 div = build_int_cst (TREE_TYPE (value), divisor);
15726 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
15727 value = size_binop_loc (loc, MULT_EXPR, value, div);
15728 }
15729
15730 return value;
15731 }
15732
15733 /* Returns the pointer to the base of the object addressed by EXP and
15734 extracts the information about the offset of the access, storing it
15735 to PBITPOS and POFFSET. */
15736
15737 static tree
15738 split_address_to_core_and_offset (tree exp,
15739 HOST_WIDE_INT *pbitpos, tree *poffset)
15740 {
15741 tree core;
15742 enum machine_mode mode;
15743 int unsignedp, volatilep;
15744 HOST_WIDE_INT bitsize;
15745 location_t loc = EXPR_LOCATION (exp);
15746
15747 if (TREE_CODE (exp) == ADDR_EXPR)
15748 {
15749 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
15750 poffset, &mode, &unsignedp, &volatilep,
15751 false);
15752 core = build_fold_addr_expr_loc (loc, core);
15753 }
15754 else
15755 {
15756 core = exp;
15757 *pbitpos = 0;
15758 *poffset = NULL_TREE;
15759 }
15760
15761 return core;
15762 }
15763
15764 /* Returns true if addresses of E1 and E2 differ by a constant, false
15765 otherwise. If they do, E1 - E2 is stored in *DIFF. */
15766
15767 bool
15768 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
15769 {
15770 tree core1, core2;
15771 HOST_WIDE_INT bitpos1, bitpos2;
15772 tree toffset1, toffset2, tdiff, type;
15773
15774 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
15775 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
15776
15777 if (bitpos1 % BITS_PER_UNIT != 0
15778 || bitpos2 % BITS_PER_UNIT != 0
15779 || !operand_equal_p (core1, core2, 0))
15780 return false;
15781
15782 if (toffset1 && toffset2)
15783 {
15784 type = TREE_TYPE (toffset1);
15785 if (type != TREE_TYPE (toffset2))
15786 toffset2 = fold_convert (type, toffset2);
15787
15788 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
15789 if (!cst_and_fits_in_hwi (tdiff))
15790 return false;
15791
15792 *diff = int_cst_value (tdiff);
15793 }
15794 else if (toffset1 || toffset2)
15795 {
15796 /* If only one of the offsets is non-constant, the difference cannot
15797 be a constant. */
15798 return false;
15799 }
15800 else
15801 *diff = 0;
15802
15803 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
15804 return true;
15805 }
15806
15807 /* Simplify the floating point expression EXP when the sign of the
15808 result is not significant. Return NULL_TREE if no simplification
15809 is possible. */
15810
15811 tree
15812 fold_strip_sign_ops (tree exp)
15813 {
15814 tree arg0, arg1;
15815 location_t loc = EXPR_LOCATION (exp);
15816
15817 switch (TREE_CODE (exp))
15818 {
15819 case ABS_EXPR:
15820 case NEGATE_EXPR:
15821 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15822 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
15823
15824 case MULT_EXPR:
15825 case RDIV_EXPR:
15826 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
15827 return NULL_TREE;
15828 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15829 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15830 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
15831 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
15832 arg0 ? arg0 : TREE_OPERAND (exp, 0),
15833 arg1 ? arg1 : TREE_OPERAND (exp, 1));
15834 break;
15835
15836 case COMPOUND_EXPR:
15837 arg0 = TREE_OPERAND (exp, 0);
15838 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15839 if (arg1)
15840 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
15841 break;
15842
15843 case COND_EXPR:
15844 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15845 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
15846 if (arg0 || arg1)
15847 return fold_build3_loc (loc,
15848 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
15849 arg0 ? arg0 : TREE_OPERAND (exp, 1),
15850 arg1 ? arg1 : TREE_OPERAND (exp, 2));
15851 break;
15852
15853 case CALL_EXPR:
15854 {
15855 const enum built_in_function fcode = builtin_mathfn_code (exp);
15856 switch (fcode)
15857 {
15858 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15859 /* Strip copysign function call, return the 1st argument. */
15860 arg0 = CALL_EXPR_ARG (exp, 0);
15861 arg1 = CALL_EXPR_ARG (exp, 1);
15862 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
15863
15864 default:
15865 /* Strip sign ops from the argument of "odd" math functions. */
15866 if (negate_mathfn_p (fcode))
15867 {
15868 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
15869 if (arg0)
15870 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
15871 }
15872 break;
15873 }
15874 }
15875 break;
15876
15877 default:
15878 break;
15879 }
15880 return NULL_TREE;
15881 }