intrinsic.h (gfc_check_selected_real_kind, [...]): Update prototypes.
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
29
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type_double.
32
33 fold takes a tree as argument and returns a simplified tree.
34
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
38
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
41
42 force_fit_type_double takes a constant, an overflowable flag and a
43 prior overflow indicator. It forces the value to fit the type and
44 sets TREE_OVERFLOW.
45
46 Note: Since the folders get called on non-gimple code as well as
47 gimple code, we need to handle GIMPLE tuples as well as their
48 corresponding tree equivalents. */
49
50 #include "config.h"
51 #include "system.h"
52 #include "coretypes.h"
53 #include "tm.h"
54 #include "flags.h"
55 #include "tree.h"
56 #include "realmpfr.h"
57 #include "rtl.h"
58 #include "expr.h"
59 #include "tm_p.h"
60 #include "target.h"
61 #include "toplev.h"
62 #include "intl.h"
63 #include "ggc.h"
64 #include "hashtab.h"
65 #include "langhooks.h"
66 #include "md5.h"
67 #include "gimple.h"
68
69 /* Nonzero if we are folding constants inside an initializer; zero
70 otherwise. */
71 int folding_initializer = 0;
72
73 /* The following constants represent a bit based encoding of GCC's
74 comparison operators. This encoding simplifies transformations
75 on relational comparison operators, such as AND and OR. */
76 enum comparison_code {
77 COMPCODE_FALSE = 0,
78 COMPCODE_LT = 1,
79 COMPCODE_EQ = 2,
80 COMPCODE_LE = 3,
81 COMPCODE_GT = 4,
82 COMPCODE_LTGT = 5,
83 COMPCODE_GE = 6,
84 COMPCODE_ORD = 7,
85 COMPCODE_UNORD = 8,
86 COMPCODE_UNLT = 9,
87 COMPCODE_UNEQ = 10,
88 COMPCODE_UNLE = 11,
89 COMPCODE_UNGT = 12,
90 COMPCODE_NE = 13,
91 COMPCODE_UNGE = 14,
92 COMPCODE_TRUE = 15
93 };
94
95 static bool negate_mathfn_p (enum built_in_function);
96 static bool negate_expr_p (tree);
97 static tree negate_expr (tree);
98 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
99 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
100 static tree const_binop (enum tree_code, tree, tree);
101 static enum comparison_code comparison_to_compcode (enum tree_code);
102 static enum tree_code compcode_to_comparison (enum comparison_code);
103 static int operand_equal_for_comparison_p (tree, tree, tree);
104 static int twoval_comparison_p (tree, tree *, tree *, int *);
105 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
106 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
107 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
108 static tree make_bit_field_ref (location_t, tree, tree,
109 HOST_WIDE_INT, HOST_WIDE_INT, int);
110 static tree optimize_bit_field_compare (location_t, enum tree_code,
111 tree, tree, tree);
112 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
113 HOST_WIDE_INT *,
114 enum machine_mode *, int *, int *,
115 tree *, tree *);
116 static int all_ones_mask_p (const_tree, int);
117 static tree sign_bit_p (tree, const_tree);
118 static int simple_operand_p (const_tree);
119 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
120 static tree range_predecessor (tree);
121 static tree range_successor (tree);
122 extern tree make_range (tree, int *, tree *, tree *, bool *);
123 extern bool merge_ranges (int *, tree *, tree *, int, tree, tree, int,
124 tree, tree);
125 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
126 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
127 static tree unextend (tree, int, int, tree);
128 static tree fold_truthop (location_t, enum tree_code, tree, tree, tree);
129 static tree optimize_minmax_comparison (location_t, enum tree_code,
130 tree, tree, tree);
131 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
132 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
133 static tree fold_binary_op_with_conditional_arg (location_t,
134 enum tree_code, tree,
135 tree, tree,
136 tree, tree, int);
137 static tree fold_mathfn_compare (location_t,
138 enum built_in_function, enum tree_code,
139 tree, tree, tree);
140 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
141 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
142 static bool reorder_operands_p (const_tree, const_tree);
143 static tree fold_negate_const (tree, tree);
144 static tree fold_not_const (tree, tree);
145 static tree fold_relational_const (enum tree_code, tree, tree, tree);
146 static tree fold_convert_const (enum tree_code, tree, tree);
147
148
149 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
150 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
151 and SUM1. Then this yields nonzero if overflow occurred during the
152 addition.
153
154 Overflow occurs if A and B have the same sign, but A and SUM differ in
155 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
156 sign. */
157 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
158 \f
159 /* If ARG2 divides ARG1 with zero remainder, carries out the division
160 of type CODE and returns the quotient.
161 Otherwise returns NULL_TREE. */
162
163 tree
164 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
165 {
166 double_int quo, rem;
167 int uns;
168
169 /* The sign of the division is according to operand two, that
170 does the correct thing for POINTER_PLUS_EXPR where we want
171 a signed division. */
172 uns = TYPE_UNSIGNED (TREE_TYPE (arg2));
173 if (TREE_CODE (TREE_TYPE (arg2)) == INTEGER_TYPE
174 && TYPE_IS_SIZETYPE (TREE_TYPE (arg2)))
175 uns = false;
176
177 quo = double_int_divmod (tree_to_double_int (arg1),
178 tree_to_double_int (arg2),
179 uns, code, &rem);
180
181 if (double_int_zero_p (rem))
182 return build_int_cst_wide (TREE_TYPE (arg1), quo.low, quo.high);
183
184 return NULL_TREE;
185 }
186 \f
187 /* This is nonzero if we should defer warnings about undefined
188 overflow. This facility exists because these warnings are a
189 special case. The code to estimate loop iterations does not want
190 to issue any warnings, since it works with expressions which do not
191 occur in user code. Various bits of cleanup code call fold(), but
192 only use the result if it has certain characteristics (e.g., is a
193 constant); that code only wants to issue a warning if the result is
194 used. */
195
196 static int fold_deferring_overflow_warnings;
197
198 /* If a warning about undefined overflow is deferred, this is the
199 warning. Note that this may cause us to turn two warnings into
200 one, but that is fine since it is sufficient to only give one
201 warning per expression. */
202
203 static const char* fold_deferred_overflow_warning;
204
205 /* If a warning about undefined overflow is deferred, this is the
206 level at which the warning should be emitted. */
207
208 static enum warn_strict_overflow_code fold_deferred_overflow_code;
209
210 /* Start deferring overflow warnings. We could use a stack here to
211 permit nested calls, but at present it is not necessary. */
212
213 void
214 fold_defer_overflow_warnings (void)
215 {
216 ++fold_deferring_overflow_warnings;
217 }
218
219 /* Stop deferring overflow warnings. If there is a pending warning,
220 and ISSUE is true, then issue the warning if appropriate. STMT is
221 the statement with which the warning should be associated (used for
222 location information); STMT may be NULL. CODE is the level of the
223 warning--a warn_strict_overflow_code value. This function will use
224 the smaller of CODE and the deferred code when deciding whether to
225 issue the warning. CODE may be zero to mean to always use the
226 deferred code. */
227
228 void
229 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
230 {
231 const char *warnmsg;
232 location_t locus;
233
234 gcc_assert (fold_deferring_overflow_warnings > 0);
235 --fold_deferring_overflow_warnings;
236 if (fold_deferring_overflow_warnings > 0)
237 {
238 if (fold_deferred_overflow_warning != NULL
239 && code != 0
240 && code < (int) fold_deferred_overflow_code)
241 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
242 return;
243 }
244
245 warnmsg = fold_deferred_overflow_warning;
246 fold_deferred_overflow_warning = NULL;
247
248 if (!issue || warnmsg == NULL)
249 return;
250
251 if (gimple_no_warning_p (stmt))
252 return;
253
254 /* Use the smallest code level when deciding to issue the
255 warning. */
256 if (code == 0 || code > (int) fold_deferred_overflow_code)
257 code = fold_deferred_overflow_code;
258
259 if (!issue_strict_overflow_warning (code))
260 return;
261
262 if (stmt == NULL)
263 locus = input_location;
264 else
265 locus = gimple_location (stmt);
266 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
267 }
268
269 /* Stop deferring overflow warnings, ignoring any deferred
270 warnings. */
271
272 void
273 fold_undefer_and_ignore_overflow_warnings (void)
274 {
275 fold_undefer_overflow_warnings (false, NULL, 0);
276 }
277
278 /* Whether we are deferring overflow warnings. */
279
280 bool
281 fold_deferring_overflow_warnings_p (void)
282 {
283 return fold_deferring_overflow_warnings > 0;
284 }
285
286 /* This is called when we fold something based on the fact that signed
287 overflow is undefined. */
288
289 static void
290 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
291 {
292 if (fold_deferring_overflow_warnings > 0)
293 {
294 if (fold_deferred_overflow_warning == NULL
295 || wc < fold_deferred_overflow_code)
296 {
297 fold_deferred_overflow_warning = gmsgid;
298 fold_deferred_overflow_code = wc;
299 }
300 }
301 else if (issue_strict_overflow_warning (wc))
302 warning (OPT_Wstrict_overflow, gmsgid);
303 }
304 \f
305 /* Return true if the built-in mathematical function specified by CODE
306 is odd, i.e. -f(x) == f(-x). */
307
308 static bool
309 negate_mathfn_p (enum built_in_function code)
310 {
311 switch (code)
312 {
313 CASE_FLT_FN (BUILT_IN_ASIN):
314 CASE_FLT_FN (BUILT_IN_ASINH):
315 CASE_FLT_FN (BUILT_IN_ATAN):
316 CASE_FLT_FN (BUILT_IN_ATANH):
317 CASE_FLT_FN (BUILT_IN_CASIN):
318 CASE_FLT_FN (BUILT_IN_CASINH):
319 CASE_FLT_FN (BUILT_IN_CATAN):
320 CASE_FLT_FN (BUILT_IN_CATANH):
321 CASE_FLT_FN (BUILT_IN_CBRT):
322 CASE_FLT_FN (BUILT_IN_CPROJ):
323 CASE_FLT_FN (BUILT_IN_CSIN):
324 CASE_FLT_FN (BUILT_IN_CSINH):
325 CASE_FLT_FN (BUILT_IN_CTAN):
326 CASE_FLT_FN (BUILT_IN_CTANH):
327 CASE_FLT_FN (BUILT_IN_ERF):
328 CASE_FLT_FN (BUILT_IN_LLROUND):
329 CASE_FLT_FN (BUILT_IN_LROUND):
330 CASE_FLT_FN (BUILT_IN_ROUND):
331 CASE_FLT_FN (BUILT_IN_SIN):
332 CASE_FLT_FN (BUILT_IN_SINH):
333 CASE_FLT_FN (BUILT_IN_TAN):
334 CASE_FLT_FN (BUILT_IN_TANH):
335 CASE_FLT_FN (BUILT_IN_TRUNC):
336 return true;
337
338 CASE_FLT_FN (BUILT_IN_LLRINT):
339 CASE_FLT_FN (BUILT_IN_LRINT):
340 CASE_FLT_FN (BUILT_IN_NEARBYINT):
341 CASE_FLT_FN (BUILT_IN_RINT):
342 return !flag_rounding_math;
343
344 default:
345 break;
346 }
347 return false;
348 }
349
350 /* Check whether we may negate an integer constant T without causing
351 overflow. */
352
353 bool
354 may_negate_without_overflow_p (const_tree t)
355 {
356 unsigned HOST_WIDE_INT val;
357 unsigned int prec;
358 tree type;
359
360 gcc_assert (TREE_CODE (t) == INTEGER_CST);
361
362 type = TREE_TYPE (t);
363 if (TYPE_UNSIGNED (type))
364 return false;
365
366 prec = TYPE_PRECISION (type);
367 if (prec > HOST_BITS_PER_WIDE_INT)
368 {
369 if (TREE_INT_CST_LOW (t) != 0)
370 return true;
371 prec -= HOST_BITS_PER_WIDE_INT;
372 val = TREE_INT_CST_HIGH (t);
373 }
374 else
375 val = TREE_INT_CST_LOW (t);
376 if (prec < HOST_BITS_PER_WIDE_INT)
377 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
378 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
379 }
380
381 /* Determine whether an expression T can be cheaply negated using
382 the function negate_expr without introducing undefined overflow. */
383
384 static bool
385 negate_expr_p (tree t)
386 {
387 tree type;
388
389 if (t == 0)
390 return false;
391
392 type = TREE_TYPE (t);
393
394 STRIP_SIGN_NOPS (t);
395 switch (TREE_CODE (t))
396 {
397 case INTEGER_CST:
398 if (TYPE_OVERFLOW_WRAPS (type))
399 return true;
400
401 /* Check that -CST will not overflow type. */
402 return may_negate_without_overflow_p (t);
403 case BIT_NOT_EXPR:
404 return (INTEGRAL_TYPE_P (type)
405 && TYPE_OVERFLOW_WRAPS (type));
406
407 case FIXED_CST:
408 case NEGATE_EXPR:
409 return true;
410
411 case REAL_CST:
412 /* We want to canonicalize to positive real constants. Pretend
413 that only negative ones can be easily negated. */
414 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
415
416 case COMPLEX_CST:
417 return negate_expr_p (TREE_REALPART (t))
418 && negate_expr_p (TREE_IMAGPART (t));
419
420 case COMPLEX_EXPR:
421 return negate_expr_p (TREE_OPERAND (t, 0))
422 && negate_expr_p (TREE_OPERAND (t, 1));
423
424 case CONJ_EXPR:
425 return negate_expr_p (TREE_OPERAND (t, 0));
426
427 case PLUS_EXPR:
428 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
429 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
430 return false;
431 /* -(A + B) -> (-B) - A. */
432 if (negate_expr_p (TREE_OPERAND (t, 1))
433 && reorder_operands_p (TREE_OPERAND (t, 0),
434 TREE_OPERAND (t, 1)))
435 return true;
436 /* -(A + B) -> (-A) - B. */
437 return negate_expr_p (TREE_OPERAND (t, 0));
438
439 case MINUS_EXPR:
440 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
441 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
442 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
443 && reorder_operands_p (TREE_OPERAND (t, 0),
444 TREE_OPERAND (t, 1));
445
446 case MULT_EXPR:
447 if (TYPE_UNSIGNED (TREE_TYPE (t)))
448 break;
449
450 /* Fall through. */
451
452 case RDIV_EXPR:
453 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
454 return negate_expr_p (TREE_OPERAND (t, 1))
455 || negate_expr_p (TREE_OPERAND (t, 0));
456 break;
457
458 case TRUNC_DIV_EXPR:
459 case ROUND_DIV_EXPR:
460 case FLOOR_DIV_EXPR:
461 case CEIL_DIV_EXPR:
462 case EXACT_DIV_EXPR:
463 /* In general we can't negate A / B, because if A is INT_MIN and
464 B is 1, we may turn this into INT_MIN / -1 which is undefined
465 and actually traps on some architectures. But if overflow is
466 undefined, we can negate, because - (INT_MIN / 1) is an
467 overflow. */
468 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
469 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
470 break;
471 return negate_expr_p (TREE_OPERAND (t, 1))
472 || negate_expr_p (TREE_OPERAND (t, 0));
473
474 case NOP_EXPR:
475 /* Negate -((double)float) as (double)(-float). */
476 if (TREE_CODE (type) == REAL_TYPE)
477 {
478 tree tem = strip_float_extensions (t);
479 if (tem != t)
480 return negate_expr_p (tem);
481 }
482 break;
483
484 case CALL_EXPR:
485 /* Negate -f(x) as f(-x). */
486 if (negate_mathfn_p (builtin_mathfn_code (t)))
487 return negate_expr_p (CALL_EXPR_ARG (t, 0));
488 break;
489
490 case RSHIFT_EXPR:
491 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
492 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
493 {
494 tree op1 = TREE_OPERAND (t, 1);
495 if (TREE_INT_CST_HIGH (op1) == 0
496 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
497 == TREE_INT_CST_LOW (op1))
498 return true;
499 }
500 break;
501
502 default:
503 break;
504 }
505 return false;
506 }
507
508 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
509 simplification is possible.
510 If negate_expr_p would return true for T, NULL_TREE will never be
511 returned. */
512
513 static tree
514 fold_negate_expr (location_t loc, tree t)
515 {
516 tree type = TREE_TYPE (t);
517 tree tem;
518
519 switch (TREE_CODE (t))
520 {
521 /* Convert - (~A) to A + 1. */
522 case BIT_NOT_EXPR:
523 if (INTEGRAL_TYPE_P (type))
524 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
525 build_int_cst (type, 1));
526 break;
527
528 case INTEGER_CST:
529 tem = fold_negate_const (t, type);
530 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
531 || !TYPE_OVERFLOW_TRAPS (type))
532 return tem;
533 break;
534
535 case REAL_CST:
536 tem = fold_negate_const (t, type);
537 /* Two's complement FP formats, such as c4x, may overflow. */
538 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
539 return tem;
540 break;
541
542 case FIXED_CST:
543 tem = fold_negate_const (t, type);
544 return tem;
545
546 case COMPLEX_CST:
547 {
548 tree rpart = negate_expr (TREE_REALPART (t));
549 tree ipart = negate_expr (TREE_IMAGPART (t));
550
551 if ((TREE_CODE (rpart) == REAL_CST
552 && TREE_CODE (ipart) == REAL_CST)
553 || (TREE_CODE (rpart) == INTEGER_CST
554 && TREE_CODE (ipart) == INTEGER_CST))
555 return build_complex (type, rpart, ipart);
556 }
557 break;
558
559 case COMPLEX_EXPR:
560 if (negate_expr_p (t))
561 return fold_build2_loc (loc, COMPLEX_EXPR, type,
562 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
563 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
564 break;
565
566 case CONJ_EXPR:
567 if (negate_expr_p (t))
568 return fold_build1_loc (loc, CONJ_EXPR, type,
569 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
570 break;
571
572 case NEGATE_EXPR:
573 return TREE_OPERAND (t, 0);
574
575 case PLUS_EXPR:
576 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
577 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
578 {
579 /* -(A + B) -> (-B) - A. */
580 if (negate_expr_p (TREE_OPERAND (t, 1))
581 && reorder_operands_p (TREE_OPERAND (t, 0),
582 TREE_OPERAND (t, 1)))
583 {
584 tem = negate_expr (TREE_OPERAND (t, 1));
585 return fold_build2_loc (loc, MINUS_EXPR, type,
586 tem, TREE_OPERAND (t, 0));
587 }
588
589 /* -(A + B) -> (-A) - B. */
590 if (negate_expr_p (TREE_OPERAND (t, 0)))
591 {
592 tem = negate_expr (TREE_OPERAND (t, 0));
593 return fold_build2_loc (loc, MINUS_EXPR, type,
594 tem, TREE_OPERAND (t, 1));
595 }
596 }
597 break;
598
599 case MINUS_EXPR:
600 /* - (A - B) -> B - A */
601 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
602 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
603 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
604 return fold_build2_loc (loc, MINUS_EXPR, type,
605 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
606 break;
607
608 case MULT_EXPR:
609 if (TYPE_UNSIGNED (type))
610 break;
611
612 /* Fall through. */
613
614 case RDIV_EXPR:
615 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
616 {
617 tem = TREE_OPERAND (t, 1);
618 if (negate_expr_p (tem))
619 return fold_build2_loc (loc, TREE_CODE (t), type,
620 TREE_OPERAND (t, 0), negate_expr (tem));
621 tem = TREE_OPERAND (t, 0);
622 if (negate_expr_p (tem))
623 return fold_build2_loc (loc, TREE_CODE (t), type,
624 negate_expr (tem), TREE_OPERAND (t, 1));
625 }
626 break;
627
628 case TRUNC_DIV_EXPR:
629 case ROUND_DIV_EXPR:
630 case FLOOR_DIV_EXPR:
631 case CEIL_DIV_EXPR:
632 case EXACT_DIV_EXPR:
633 /* In general we can't negate A / B, because if A is INT_MIN and
634 B is 1, we may turn this into INT_MIN / -1 which is undefined
635 and actually traps on some architectures. But if overflow is
636 undefined, we can negate, because - (INT_MIN / 1) is an
637 overflow. */
638 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
639 {
640 const char * const warnmsg = G_("assuming signed overflow does not "
641 "occur when negating a division");
642 tem = TREE_OPERAND (t, 1);
643 if (negate_expr_p (tem))
644 {
645 if (INTEGRAL_TYPE_P (type)
646 && (TREE_CODE (tem) != INTEGER_CST
647 || integer_onep (tem)))
648 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
649 return fold_build2_loc (loc, TREE_CODE (t), type,
650 TREE_OPERAND (t, 0), negate_expr (tem));
651 }
652 tem = TREE_OPERAND (t, 0);
653 if (negate_expr_p (tem))
654 {
655 if (INTEGRAL_TYPE_P (type)
656 && (TREE_CODE (tem) != INTEGER_CST
657 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
658 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
659 return fold_build2_loc (loc, TREE_CODE (t), type,
660 negate_expr (tem), TREE_OPERAND (t, 1));
661 }
662 }
663 break;
664
665 case NOP_EXPR:
666 /* Convert -((double)float) into (double)(-float). */
667 if (TREE_CODE (type) == REAL_TYPE)
668 {
669 tem = strip_float_extensions (t);
670 if (tem != t && negate_expr_p (tem))
671 return fold_convert_loc (loc, type, negate_expr (tem));
672 }
673 break;
674
675 case CALL_EXPR:
676 /* Negate -f(x) as f(-x). */
677 if (negate_mathfn_p (builtin_mathfn_code (t))
678 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
679 {
680 tree fndecl, arg;
681
682 fndecl = get_callee_fndecl (t);
683 arg = negate_expr (CALL_EXPR_ARG (t, 0));
684 return build_call_expr_loc (loc, fndecl, 1, arg);
685 }
686 break;
687
688 case RSHIFT_EXPR:
689 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
690 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
691 {
692 tree op1 = TREE_OPERAND (t, 1);
693 if (TREE_INT_CST_HIGH (op1) == 0
694 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
695 == TREE_INT_CST_LOW (op1))
696 {
697 tree ntype = TYPE_UNSIGNED (type)
698 ? signed_type_for (type)
699 : unsigned_type_for (type);
700 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
701 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
702 return fold_convert_loc (loc, type, temp);
703 }
704 }
705 break;
706
707 default:
708 break;
709 }
710
711 return NULL_TREE;
712 }
713
714 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
715 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
716 return NULL_TREE. */
717
718 static tree
719 negate_expr (tree t)
720 {
721 tree type, tem;
722 location_t loc;
723
724 if (t == NULL_TREE)
725 return NULL_TREE;
726
727 loc = EXPR_LOCATION (t);
728 type = TREE_TYPE (t);
729 STRIP_SIGN_NOPS (t);
730
731 tem = fold_negate_expr (loc, t);
732 if (!tem)
733 {
734 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
735 SET_EXPR_LOCATION (tem, loc);
736 }
737 return fold_convert_loc (loc, type, tem);
738 }
739 \f
740 /* Split a tree IN into a constant, literal and variable parts that could be
741 combined with CODE to make IN. "constant" means an expression with
742 TREE_CONSTANT but that isn't an actual constant. CODE must be a
743 commutative arithmetic operation. Store the constant part into *CONP,
744 the literal in *LITP and return the variable part. If a part isn't
745 present, set it to null. If the tree does not decompose in this way,
746 return the entire tree as the variable part and the other parts as null.
747
748 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
749 case, we negate an operand that was subtracted. Except if it is a
750 literal for which we use *MINUS_LITP instead.
751
752 If NEGATE_P is true, we are negating all of IN, again except a literal
753 for which we use *MINUS_LITP instead.
754
755 If IN is itself a literal or constant, return it as appropriate.
756
757 Note that we do not guarantee that any of the three values will be the
758 same type as IN, but they will have the same signedness and mode. */
759
760 static tree
761 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
762 tree *minus_litp, int negate_p)
763 {
764 tree var = 0;
765
766 *conp = 0;
767 *litp = 0;
768 *minus_litp = 0;
769
770 /* Strip any conversions that don't change the machine mode or signedness. */
771 STRIP_SIGN_NOPS (in);
772
773 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
774 || TREE_CODE (in) == FIXED_CST)
775 *litp = in;
776 else if (TREE_CODE (in) == code
777 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
778 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
779 /* We can associate addition and subtraction together (even
780 though the C standard doesn't say so) for integers because
781 the value is not affected. For reals, the value might be
782 affected, so we can't. */
783 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
784 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
785 {
786 tree op0 = TREE_OPERAND (in, 0);
787 tree op1 = TREE_OPERAND (in, 1);
788 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
789 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
790
791 /* First see if either of the operands is a literal, then a constant. */
792 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
793 || TREE_CODE (op0) == FIXED_CST)
794 *litp = op0, op0 = 0;
795 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
796 || TREE_CODE (op1) == FIXED_CST)
797 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
798
799 if (op0 != 0 && TREE_CONSTANT (op0))
800 *conp = op0, op0 = 0;
801 else if (op1 != 0 && TREE_CONSTANT (op1))
802 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
803
804 /* If we haven't dealt with either operand, this is not a case we can
805 decompose. Otherwise, VAR is either of the ones remaining, if any. */
806 if (op0 != 0 && op1 != 0)
807 var = in;
808 else if (op0 != 0)
809 var = op0;
810 else
811 var = op1, neg_var_p = neg1_p;
812
813 /* Now do any needed negations. */
814 if (neg_litp_p)
815 *minus_litp = *litp, *litp = 0;
816 if (neg_conp_p)
817 *conp = negate_expr (*conp);
818 if (neg_var_p)
819 var = negate_expr (var);
820 }
821 else if (TREE_CONSTANT (in))
822 *conp = in;
823 else
824 var = in;
825
826 if (negate_p)
827 {
828 if (*litp)
829 *minus_litp = *litp, *litp = 0;
830 else if (*minus_litp)
831 *litp = *minus_litp, *minus_litp = 0;
832 *conp = negate_expr (*conp);
833 var = negate_expr (var);
834 }
835
836 return var;
837 }
838
839 /* Re-associate trees split by the above function. T1 and T2 are
840 either expressions to associate or null. Return the new
841 expression, if any. LOC is the location of the new expression. If
842 we build an operation, do it in TYPE and with CODE. */
843
844 static tree
845 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
846 {
847 tree tem;
848
849 if (t1 == 0)
850 return t2;
851 else if (t2 == 0)
852 return t1;
853
854 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
855 try to fold this since we will have infinite recursion. But do
856 deal with any NEGATE_EXPRs. */
857 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
858 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
859 {
860 if (code == PLUS_EXPR)
861 {
862 if (TREE_CODE (t1) == NEGATE_EXPR)
863 tem = build2 (MINUS_EXPR, type, fold_convert_loc (loc, type, t2),
864 fold_convert_loc (loc, type, TREE_OPERAND (t1, 0)));
865 else if (TREE_CODE (t2) == NEGATE_EXPR)
866 tem = build2 (MINUS_EXPR, type, fold_convert_loc (loc, type, t1),
867 fold_convert_loc (loc, type, TREE_OPERAND (t2, 0)));
868 else if (integer_zerop (t2))
869 return fold_convert_loc (loc, type, t1);
870 }
871 else if (code == MINUS_EXPR)
872 {
873 if (integer_zerop (t2))
874 return fold_convert_loc (loc, type, t1);
875 }
876
877 tem = build2 (code, type, fold_convert_loc (loc, type, t1),
878 fold_convert_loc (loc, type, t2));
879 goto associate_trees_exit;
880 }
881
882 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
883 fold_convert_loc (loc, type, t2));
884 associate_trees_exit:
885 protected_set_expr_location (tem, loc);
886 return tem;
887 }
888 \f
889 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
890 for use in int_const_binop, size_binop and size_diffop. */
891
892 static bool
893 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
894 {
895 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
896 return false;
897 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
898 return false;
899
900 switch (code)
901 {
902 case LSHIFT_EXPR:
903 case RSHIFT_EXPR:
904 case LROTATE_EXPR:
905 case RROTATE_EXPR:
906 return true;
907
908 default:
909 break;
910 }
911
912 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
913 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
914 && TYPE_MODE (type1) == TYPE_MODE (type2);
915 }
916
917
918 /* Combine two integer constants ARG1 and ARG2 under operation CODE
919 to produce a new constant. Return NULL_TREE if we don't know how
920 to evaluate CODE at compile-time.
921
922 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
923
924 tree
925 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2, int notrunc)
926 {
927 double_int op1, op2, res, tmp;
928 tree t;
929 tree type = TREE_TYPE (arg1);
930 bool uns = TYPE_UNSIGNED (type);
931 bool is_sizetype
932 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
933 bool overflow = false;
934
935 op1 = tree_to_double_int (arg1);
936 op2 = tree_to_double_int (arg2);
937
938 switch (code)
939 {
940 case BIT_IOR_EXPR:
941 res = double_int_ior (op1, op2);
942 break;
943
944 case BIT_XOR_EXPR:
945 res = double_int_xor (op1, op2);
946 break;
947
948 case BIT_AND_EXPR:
949 res = double_int_and (op1, op2);
950 break;
951
952 case RSHIFT_EXPR:
953 res = double_int_rshift (op1, double_int_to_shwi (op2),
954 TYPE_PRECISION (type), !uns);
955 break;
956
957 case LSHIFT_EXPR:
958 /* It's unclear from the C standard whether shifts can overflow.
959 The following code ignores overflow; perhaps a C standard
960 interpretation ruling is needed. */
961 res = double_int_lshift (op1, double_int_to_shwi (op2),
962 TYPE_PRECISION (type), !uns);
963 break;
964
965 case RROTATE_EXPR:
966 res = double_int_rrotate (op1, double_int_to_shwi (op2),
967 TYPE_PRECISION (type));
968 break;
969
970 case LROTATE_EXPR:
971 res = double_int_lrotate (op1, double_int_to_shwi (op2),
972 TYPE_PRECISION (type));
973 break;
974
975 case PLUS_EXPR:
976 overflow = add_double (op1.low, op1.high, op2.low, op2.high,
977 &res.low, &res.high);
978 break;
979
980 case MINUS_EXPR:
981 neg_double (op2.low, op2.high, &res.low, &res.high);
982 add_double (op1.low, op1.high, res.low, res.high,
983 &res.low, &res.high);
984 overflow = OVERFLOW_SUM_SIGN (res.high, op2.high, op1.high);
985 break;
986
987 case MULT_EXPR:
988 overflow = mul_double (op1.low, op1.high, op2.low, op2.high,
989 &res.low, &res.high);
990 break;
991
992 case TRUNC_DIV_EXPR:
993 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
994 case EXACT_DIV_EXPR:
995 /* This is a shortcut for a common special case. */
996 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
997 && !TREE_OVERFLOW (arg1)
998 && !TREE_OVERFLOW (arg2)
999 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1000 {
1001 if (code == CEIL_DIV_EXPR)
1002 op1.low += op2.low - 1;
1003
1004 res.low = op1.low / op2.low, res.high = 0;
1005 break;
1006 }
1007
1008 /* ... fall through ... */
1009
1010 case ROUND_DIV_EXPR:
1011 if (double_int_zero_p (op2))
1012 return NULL_TREE;
1013 if (double_int_one_p (op2))
1014 {
1015 res = op1;
1016 break;
1017 }
1018 if (double_int_equal_p (op1, op2)
1019 && ! double_int_zero_p (op1))
1020 {
1021 res = double_int_one;
1022 break;
1023 }
1024 overflow = div_and_round_double (code, uns,
1025 op1.low, op1.high, op2.low, op2.high,
1026 &res.low, &res.high,
1027 &tmp.low, &tmp.high);
1028 break;
1029
1030 case TRUNC_MOD_EXPR:
1031 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1032 /* This is a shortcut for a common special case. */
1033 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1034 && !TREE_OVERFLOW (arg1)
1035 && !TREE_OVERFLOW (arg2)
1036 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1037 {
1038 if (code == CEIL_MOD_EXPR)
1039 op1.low += op2.low - 1;
1040 res.low = op1.low % op2.low, res.high = 0;
1041 break;
1042 }
1043
1044 /* ... fall through ... */
1045
1046 case ROUND_MOD_EXPR:
1047 if (double_int_zero_p (op2))
1048 return NULL_TREE;
1049 overflow = div_and_round_double (code, uns,
1050 op1.low, op1.high, op2.low, op2.high,
1051 &tmp.low, &tmp.high,
1052 &res.low, &res.high);
1053 break;
1054
1055 case MIN_EXPR:
1056 res = double_int_min (op1, op2, uns);
1057 break;
1058
1059 case MAX_EXPR:
1060 res = double_int_max (op1, op2, uns);
1061 break;
1062
1063 default:
1064 return NULL_TREE;
1065 }
1066
1067 if (notrunc)
1068 {
1069 t = build_int_cst_wide (TREE_TYPE (arg1), res.low, res.high);
1070
1071 /* Propagate overflow flags ourselves. */
1072 if (((!uns || is_sizetype) && overflow)
1073 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1074 {
1075 t = copy_node (t);
1076 TREE_OVERFLOW (t) = 1;
1077 }
1078 }
1079 else
1080 t = force_fit_type_double (TREE_TYPE (arg1), res.low, res.high, 1,
1081 ((!uns || is_sizetype) && overflow)
1082 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1083
1084 return t;
1085 }
1086
1087 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1088 constant. We assume ARG1 and ARG2 have the same data type, or at least
1089 are the same kind of constant and the same machine mode. Return zero if
1090 combining the constants is not allowed in the current operating mode. */
1091
1092 static tree
1093 const_binop (enum tree_code code, tree arg1, tree arg2)
1094 {
1095 /* Sanity check for the recursive cases. */
1096 if (!arg1 || !arg2)
1097 return NULL_TREE;
1098
1099 STRIP_NOPS (arg1);
1100 STRIP_NOPS (arg2);
1101
1102 if (TREE_CODE (arg1) == INTEGER_CST)
1103 return int_const_binop (code, arg1, arg2, 0);
1104
1105 if (TREE_CODE (arg1) == REAL_CST)
1106 {
1107 enum machine_mode mode;
1108 REAL_VALUE_TYPE d1;
1109 REAL_VALUE_TYPE d2;
1110 REAL_VALUE_TYPE value;
1111 REAL_VALUE_TYPE result;
1112 bool inexact;
1113 tree t, type;
1114
1115 /* The following codes are handled by real_arithmetic. */
1116 switch (code)
1117 {
1118 case PLUS_EXPR:
1119 case MINUS_EXPR:
1120 case MULT_EXPR:
1121 case RDIV_EXPR:
1122 case MIN_EXPR:
1123 case MAX_EXPR:
1124 break;
1125
1126 default:
1127 return NULL_TREE;
1128 }
1129
1130 d1 = TREE_REAL_CST (arg1);
1131 d2 = TREE_REAL_CST (arg2);
1132
1133 type = TREE_TYPE (arg1);
1134 mode = TYPE_MODE (type);
1135
1136 /* Don't perform operation if we honor signaling NaNs and
1137 either operand is a NaN. */
1138 if (HONOR_SNANS (mode)
1139 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1140 return NULL_TREE;
1141
1142 /* Don't perform operation if it would raise a division
1143 by zero exception. */
1144 if (code == RDIV_EXPR
1145 && REAL_VALUES_EQUAL (d2, dconst0)
1146 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1147 return NULL_TREE;
1148
1149 /* If either operand is a NaN, just return it. Otherwise, set up
1150 for floating-point trap; we return an overflow. */
1151 if (REAL_VALUE_ISNAN (d1))
1152 return arg1;
1153 else if (REAL_VALUE_ISNAN (d2))
1154 return arg2;
1155
1156 inexact = real_arithmetic (&value, code, &d1, &d2);
1157 real_convert (&result, mode, &value);
1158
1159 /* Don't constant fold this floating point operation if
1160 the result has overflowed and flag_trapping_math. */
1161 if (flag_trapping_math
1162 && MODE_HAS_INFINITIES (mode)
1163 && REAL_VALUE_ISINF (result)
1164 && !REAL_VALUE_ISINF (d1)
1165 && !REAL_VALUE_ISINF (d2))
1166 return NULL_TREE;
1167
1168 /* Don't constant fold this floating point operation if the
1169 result may dependent upon the run-time rounding mode and
1170 flag_rounding_math is set, or if GCC's software emulation
1171 is unable to accurately represent the result. */
1172 if ((flag_rounding_math
1173 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1174 && (inexact || !real_identical (&result, &value)))
1175 return NULL_TREE;
1176
1177 t = build_real (type, result);
1178
1179 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1180 return t;
1181 }
1182
1183 if (TREE_CODE (arg1) == FIXED_CST)
1184 {
1185 FIXED_VALUE_TYPE f1;
1186 FIXED_VALUE_TYPE f2;
1187 FIXED_VALUE_TYPE result;
1188 tree t, type;
1189 int sat_p;
1190 bool overflow_p;
1191
1192 /* The following codes are handled by fixed_arithmetic. */
1193 switch (code)
1194 {
1195 case PLUS_EXPR:
1196 case MINUS_EXPR:
1197 case MULT_EXPR:
1198 case TRUNC_DIV_EXPR:
1199 f2 = TREE_FIXED_CST (arg2);
1200 break;
1201
1202 case LSHIFT_EXPR:
1203 case RSHIFT_EXPR:
1204 f2.data.high = TREE_INT_CST_HIGH (arg2);
1205 f2.data.low = TREE_INT_CST_LOW (arg2);
1206 f2.mode = SImode;
1207 break;
1208
1209 default:
1210 return NULL_TREE;
1211 }
1212
1213 f1 = TREE_FIXED_CST (arg1);
1214 type = TREE_TYPE (arg1);
1215 sat_p = TYPE_SATURATING (type);
1216 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1217 t = build_fixed (type, result);
1218 /* Propagate overflow flags. */
1219 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1220 TREE_OVERFLOW (t) = 1;
1221 return t;
1222 }
1223
1224 if (TREE_CODE (arg1) == COMPLEX_CST)
1225 {
1226 tree type = TREE_TYPE (arg1);
1227 tree r1 = TREE_REALPART (arg1);
1228 tree i1 = TREE_IMAGPART (arg1);
1229 tree r2 = TREE_REALPART (arg2);
1230 tree i2 = TREE_IMAGPART (arg2);
1231 tree real, imag;
1232
1233 switch (code)
1234 {
1235 case PLUS_EXPR:
1236 case MINUS_EXPR:
1237 real = const_binop (code, r1, r2);
1238 imag = const_binop (code, i1, i2);
1239 break;
1240
1241 case MULT_EXPR:
1242 if (COMPLEX_FLOAT_TYPE_P (type))
1243 return do_mpc_arg2 (arg1, arg2, type,
1244 /* do_nonfinite= */ folding_initializer,
1245 mpc_mul);
1246
1247 real = const_binop (MINUS_EXPR,
1248 const_binop (MULT_EXPR, r1, r2),
1249 const_binop (MULT_EXPR, i1, i2));
1250 imag = const_binop (PLUS_EXPR,
1251 const_binop (MULT_EXPR, r1, i2),
1252 const_binop (MULT_EXPR, i1, r2));
1253 break;
1254
1255 case RDIV_EXPR:
1256 if (COMPLEX_FLOAT_TYPE_P (type))
1257 return do_mpc_arg2 (arg1, arg2, type,
1258 /* do_nonfinite= */ folding_initializer,
1259 mpc_div);
1260 /* Fallthru ... */
1261 case TRUNC_DIV_EXPR:
1262 case CEIL_DIV_EXPR:
1263 case FLOOR_DIV_EXPR:
1264 case ROUND_DIV_EXPR:
1265 if (flag_complex_method == 0)
1266 {
1267 /* Keep this algorithm in sync with
1268 tree-complex.c:expand_complex_div_straight().
1269
1270 Expand complex division to scalars, straightforward algorithm.
1271 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1272 t = br*br + bi*bi
1273 */
1274 tree magsquared
1275 = const_binop (PLUS_EXPR,
1276 const_binop (MULT_EXPR, r2, r2),
1277 const_binop (MULT_EXPR, i2, i2));
1278 tree t1
1279 = const_binop (PLUS_EXPR,
1280 const_binop (MULT_EXPR, r1, r2),
1281 const_binop (MULT_EXPR, i1, i2));
1282 tree t2
1283 = const_binop (MINUS_EXPR,
1284 const_binop (MULT_EXPR, i1, r2),
1285 const_binop (MULT_EXPR, r1, i2));
1286
1287 real = const_binop (code, t1, magsquared);
1288 imag = const_binop (code, t2, magsquared);
1289 }
1290 else
1291 {
1292 /* Keep this algorithm in sync with
1293 tree-complex.c:expand_complex_div_wide().
1294
1295 Expand complex division to scalars, modified algorithm to minimize
1296 overflow with wide input ranges. */
1297 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1298 fold_abs_const (r2, TREE_TYPE (type)),
1299 fold_abs_const (i2, TREE_TYPE (type)));
1300
1301 if (integer_nonzerop (compare))
1302 {
1303 /* In the TRUE branch, we compute
1304 ratio = br/bi;
1305 div = (br * ratio) + bi;
1306 tr = (ar * ratio) + ai;
1307 ti = (ai * ratio) - ar;
1308 tr = tr / div;
1309 ti = ti / div; */
1310 tree ratio = const_binop (code, r2, i2);
1311 tree div = const_binop (PLUS_EXPR, i2,
1312 const_binop (MULT_EXPR, r2, ratio));
1313 real = const_binop (MULT_EXPR, r1, ratio);
1314 real = const_binop (PLUS_EXPR, real, i1);
1315 real = const_binop (code, real, div);
1316
1317 imag = const_binop (MULT_EXPR, i1, ratio);
1318 imag = const_binop (MINUS_EXPR, imag, r1);
1319 imag = const_binop (code, imag, div);
1320 }
1321 else
1322 {
1323 /* In the FALSE branch, we compute
1324 ratio = d/c;
1325 divisor = (d * ratio) + c;
1326 tr = (b * ratio) + a;
1327 ti = b - (a * ratio);
1328 tr = tr / div;
1329 ti = ti / div; */
1330 tree ratio = const_binop (code, i2, r2);
1331 tree div = const_binop (PLUS_EXPR, r2,
1332 const_binop (MULT_EXPR, i2, ratio));
1333
1334 real = const_binop (MULT_EXPR, i1, ratio);
1335 real = const_binop (PLUS_EXPR, real, r1);
1336 real = const_binop (code, real, div);
1337
1338 imag = const_binop (MULT_EXPR, r1, ratio);
1339 imag = const_binop (MINUS_EXPR, i1, imag);
1340 imag = const_binop (code, imag, div);
1341 }
1342 }
1343 break;
1344
1345 default:
1346 return NULL_TREE;
1347 }
1348
1349 if (real && imag)
1350 return build_complex (type, real, imag);
1351 }
1352
1353 if (TREE_CODE (arg1) == VECTOR_CST)
1354 {
1355 tree type = TREE_TYPE(arg1);
1356 int count = TYPE_VECTOR_SUBPARTS (type), i;
1357 tree elements1, elements2, list = NULL_TREE;
1358
1359 if(TREE_CODE(arg2) != VECTOR_CST)
1360 return NULL_TREE;
1361
1362 elements1 = TREE_VECTOR_CST_ELTS (arg1);
1363 elements2 = TREE_VECTOR_CST_ELTS (arg2);
1364
1365 for (i = 0; i < count; i++)
1366 {
1367 tree elem1, elem2, elem;
1368
1369 /* The trailing elements can be empty and should be treated as 0 */
1370 if(!elements1)
1371 elem1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1372 else
1373 {
1374 elem1 = TREE_VALUE(elements1);
1375 elements1 = TREE_CHAIN (elements1);
1376 }
1377
1378 if(!elements2)
1379 elem2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1380 else
1381 {
1382 elem2 = TREE_VALUE(elements2);
1383 elements2 = TREE_CHAIN (elements2);
1384 }
1385
1386 elem = const_binop (code, elem1, elem2);
1387
1388 /* It is possible that const_binop cannot handle the given
1389 code and return NULL_TREE */
1390 if(elem == NULL_TREE)
1391 return NULL_TREE;
1392
1393 list = tree_cons (NULL_TREE, elem, list);
1394 }
1395 return build_vector(type, nreverse(list));
1396 }
1397 return NULL_TREE;
1398 }
1399
1400 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1401 indicates which particular sizetype to create. */
1402
1403 tree
1404 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1405 {
1406 return build_int_cst (sizetype_tab[(int) kind], number);
1407 }
1408 \f
1409 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1410 is a tree code. The type of the result is taken from the operands.
1411 Both must be equivalent integer types, ala int_binop_types_match_p.
1412 If the operands are constant, so is the result. */
1413
1414 tree
1415 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1416 {
1417 tree type = TREE_TYPE (arg0);
1418
1419 if (arg0 == error_mark_node || arg1 == error_mark_node)
1420 return error_mark_node;
1421
1422 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1423 TREE_TYPE (arg1)));
1424
1425 /* Handle the special case of two integer constants faster. */
1426 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1427 {
1428 /* And some specific cases even faster than that. */
1429 if (code == PLUS_EXPR)
1430 {
1431 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1432 return arg1;
1433 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1434 return arg0;
1435 }
1436 else if (code == MINUS_EXPR)
1437 {
1438 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1439 return arg0;
1440 }
1441 else if (code == MULT_EXPR)
1442 {
1443 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1444 return arg1;
1445 }
1446
1447 /* Handle general case of two integer constants. */
1448 return int_const_binop (code, arg0, arg1, 0);
1449 }
1450
1451 return fold_build2_loc (loc, code, type, arg0, arg1);
1452 }
1453
1454 /* Given two values, either both of sizetype or both of bitsizetype,
1455 compute the difference between the two values. Return the value
1456 in signed type corresponding to the type of the operands. */
1457
1458 tree
1459 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1460 {
1461 tree type = TREE_TYPE (arg0);
1462 tree ctype;
1463
1464 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1465 TREE_TYPE (arg1)));
1466
1467 /* If the type is already signed, just do the simple thing. */
1468 if (!TYPE_UNSIGNED (type))
1469 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1470
1471 if (type == sizetype)
1472 ctype = ssizetype;
1473 else if (type == bitsizetype)
1474 ctype = sbitsizetype;
1475 else
1476 ctype = signed_type_for (type);
1477
1478 /* If either operand is not a constant, do the conversions to the signed
1479 type and subtract. The hardware will do the right thing with any
1480 overflow in the subtraction. */
1481 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1482 return size_binop_loc (loc, MINUS_EXPR,
1483 fold_convert_loc (loc, ctype, arg0),
1484 fold_convert_loc (loc, ctype, arg1));
1485
1486 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1487 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1488 overflow) and negate (which can't either). Special-case a result
1489 of zero while we're here. */
1490 if (tree_int_cst_equal (arg0, arg1))
1491 return build_int_cst (ctype, 0);
1492 else if (tree_int_cst_lt (arg1, arg0))
1493 return fold_convert_loc (loc, ctype,
1494 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1495 else
1496 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1497 fold_convert_loc (loc, ctype,
1498 size_binop_loc (loc,
1499 MINUS_EXPR,
1500 arg1, arg0)));
1501 }
1502 \f
1503 /* A subroutine of fold_convert_const handling conversions of an
1504 INTEGER_CST to another integer type. */
1505
1506 static tree
1507 fold_convert_const_int_from_int (tree type, const_tree arg1)
1508 {
1509 tree t;
1510
1511 /* Given an integer constant, make new constant with new type,
1512 appropriately sign-extended or truncated. */
1513 t = force_fit_type_double (type, TREE_INT_CST_LOW (arg1),
1514 TREE_INT_CST_HIGH (arg1),
1515 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1516 (TREE_INT_CST_HIGH (arg1) < 0
1517 && (TYPE_UNSIGNED (type)
1518 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1519 | TREE_OVERFLOW (arg1));
1520
1521 return t;
1522 }
1523
1524 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1525 to an integer type. */
1526
1527 static tree
1528 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1529 {
1530 int overflow = 0;
1531 tree t;
1532
1533 /* The following code implements the floating point to integer
1534 conversion rules required by the Java Language Specification,
1535 that IEEE NaNs are mapped to zero and values that overflow
1536 the target precision saturate, i.e. values greater than
1537 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1538 are mapped to INT_MIN. These semantics are allowed by the
1539 C and C++ standards that simply state that the behavior of
1540 FP-to-integer conversion is unspecified upon overflow. */
1541
1542 double_int val;
1543 REAL_VALUE_TYPE r;
1544 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1545
1546 switch (code)
1547 {
1548 case FIX_TRUNC_EXPR:
1549 real_trunc (&r, VOIDmode, &x);
1550 break;
1551
1552 default:
1553 gcc_unreachable ();
1554 }
1555
1556 /* If R is NaN, return zero and show we have an overflow. */
1557 if (REAL_VALUE_ISNAN (r))
1558 {
1559 overflow = 1;
1560 val = double_int_zero;
1561 }
1562
1563 /* See if R is less than the lower bound or greater than the
1564 upper bound. */
1565
1566 if (! overflow)
1567 {
1568 tree lt = TYPE_MIN_VALUE (type);
1569 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1570 if (REAL_VALUES_LESS (r, l))
1571 {
1572 overflow = 1;
1573 val = tree_to_double_int (lt);
1574 }
1575 }
1576
1577 if (! overflow)
1578 {
1579 tree ut = TYPE_MAX_VALUE (type);
1580 if (ut)
1581 {
1582 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1583 if (REAL_VALUES_LESS (u, r))
1584 {
1585 overflow = 1;
1586 val = tree_to_double_int (ut);
1587 }
1588 }
1589 }
1590
1591 if (! overflow)
1592 real_to_integer2 ((HOST_WIDE_INT *) &val.low, &val.high, &r);
1593
1594 t = force_fit_type_double (type, val.low, val.high, -1,
1595 overflow | TREE_OVERFLOW (arg1));
1596 return t;
1597 }
1598
1599 /* A subroutine of fold_convert_const handling conversions of a
1600 FIXED_CST to an integer type. */
1601
1602 static tree
1603 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1604 {
1605 tree t;
1606 double_int temp, temp_trunc;
1607 unsigned int mode;
1608
1609 /* Right shift FIXED_CST to temp by fbit. */
1610 temp = TREE_FIXED_CST (arg1).data;
1611 mode = TREE_FIXED_CST (arg1).mode;
1612 if (GET_MODE_FBIT (mode) < 2 * HOST_BITS_PER_WIDE_INT)
1613 {
1614 temp = double_int_rshift (temp, GET_MODE_FBIT (mode),
1615 HOST_BITS_PER_DOUBLE_INT,
1616 SIGNED_FIXED_POINT_MODE_P (mode));
1617
1618 /* Left shift temp to temp_trunc by fbit. */
1619 temp_trunc = double_int_lshift (temp, GET_MODE_FBIT (mode),
1620 HOST_BITS_PER_DOUBLE_INT,
1621 SIGNED_FIXED_POINT_MODE_P (mode));
1622 }
1623 else
1624 {
1625 temp = double_int_zero;
1626 temp_trunc = double_int_zero;
1627 }
1628
1629 /* If FIXED_CST is negative, we need to round the value toward 0.
1630 By checking if the fractional bits are not zero to add 1 to temp. */
1631 if (SIGNED_FIXED_POINT_MODE_P (mode)
1632 && double_int_negative_p (temp_trunc)
1633 && !double_int_equal_p (TREE_FIXED_CST (arg1).data, temp_trunc))
1634 temp = double_int_add (temp, double_int_one);
1635
1636 /* Given a fixed-point constant, make new constant with new type,
1637 appropriately sign-extended or truncated. */
1638 t = force_fit_type_double (type, temp.low, temp.high, -1,
1639 (double_int_negative_p (temp)
1640 && (TYPE_UNSIGNED (type)
1641 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1642 | TREE_OVERFLOW (arg1));
1643
1644 return t;
1645 }
1646
1647 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1648 to another floating point type. */
1649
1650 static tree
1651 fold_convert_const_real_from_real (tree type, const_tree arg1)
1652 {
1653 REAL_VALUE_TYPE value;
1654 tree t;
1655
1656 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1657 t = build_real (type, value);
1658
1659 /* If converting an infinity or NAN to a representation that doesn't
1660 have one, set the overflow bit so that we can produce some kind of
1661 error message at the appropriate point if necessary. It's not the
1662 most user-friendly message, but it's better than nothing. */
1663 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1664 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1665 TREE_OVERFLOW (t) = 1;
1666 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1667 && !MODE_HAS_NANS (TYPE_MODE (type)))
1668 TREE_OVERFLOW (t) = 1;
1669 /* Regular overflow, conversion produced an infinity in a mode that
1670 can't represent them. */
1671 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1672 && REAL_VALUE_ISINF (value)
1673 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1674 TREE_OVERFLOW (t) = 1;
1675 else
1676 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1677 return t;
1678 }
1679
1680 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1681 to a floating point type. */
1682
1683 static tree
1684 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1685 {
1686 REAL_VALUE_TYPE value;
1687 tree t;
1688
1689 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1690 t = build_real (type, value);
1691
1692 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1693 return t;
1694 }
1695
1696 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1697 to another fixed-point type. */
1698
1699 static tree
1700 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1701 {
1702 FIXED_VALUE_TYPE value;
1703 tree t;
1704 bool overflow_p;
1705
1706 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1707 TYPE_SATURATING (type));
1708 t = build_fixed (type, value);
1709
1710 /* Propagate overflow flags. */
1711 if (overflow_p | TREE_OVERFLOW (arg1))
1712 TREE_OVERFLOW (t) = 1;
1713 return t;
1714 }
1715
1716 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1717 to a fixed-point type. */
1718
1719 static tree
1720 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1721 {
1722 FIXED_VALUE_TYPE value;
1723 tree t;
1724 bool overflow_p;
1725
1726 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
1727 TREE_INT_CST (arg1),
1728 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1729 TYPE_SATURATING (type));
1730 t = build_fixed (type, value);
1731
1732 /* Propagate overflow flags. */
1733 if (overflow_p | TREE_OVERFLOW (arg1))
1734 TREE_OVERFLOW (t) = 1;
1735 return t;
1736 }
1737
1738 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1739 to a fixed-point type. */
1740
1741 static tree
1742 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1743 {
1744 FIXED_VALUE_TYPE value;
1745 tree t;
1746 bool overflow_p;
1747
1748 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1749 &TREE_REAL_CST (arg1),
1750 TYPE_SATURATING (type));
1751 t = build_fixed (type, value);
1752
1753 /* Propagate overflow flags. */
1754 if (overflow_p | TREE_OVERFLOW (arg1))
1755 TREE_OVERFLOW (t) = 1;
1756 return t;
1757 }
1758
1759 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1760 type TYPE. If no simplification can be done return NULL_TREE. */
1761
1762 static tree
1763 fold_convert_const (enum tree_code code, tree type, tree arg1)
1764 {
1765 if (TREE_TYPE (arg1) == type)
1766 return arg1;
1767
1768 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1769 || TREE_CODE (type) == OFFSET_TYPE)
1770 {
1771 if (TREE_CODE (arg1) == INTEGER_CST)
1772 return fold_convert_const_int_from_int (type, arg1);
1773 else if (TREE_CODE (arg1) == REAL_CST)
1774 return fold_convert_const_int_from_real (code, type, arg1);
1775 else if (TREE_CODE (arg1) == FIXED_CST)
1776 return fold_convert_const_int_from_fixed (type, arg1);
1777 }
1778 else if (TREE_CODE (type) == REAL_TYPE)
1779 {
1780 if (TREE_CODE (arg1) == INTEGER_CST)
1781 return build_real_from_int_cst (type, arg1);
1782 else if (TREE_CODE (arg1) == REAL_CST)
1783 return fold_convert_const_real_from_real (type, arg1);
1784 else if (TREE_CODE (arg1) == FIXED_CST)
1785 return fold_convert_const_real_from_fixed (type, arg1);
1786 }
1787 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1788 {
1789 if (TREE_CODE (arg1) == FIXED_CST)
1790 return fold_convert_const_fixed_from_fixed (type, arg1);
1791 else if (TREE_CODE (arg1) == INTEGER_CST)
1792 return fold_convert_const_fixed_from_int (type, arg1);
1793 else if (TREE_CODE (arg1) == REAL_CST)
1794 return fold_convert_const_fixed_from_real (type, arg1);
1795 }
1796 return NULL_TREE;
1797 }
1798
1799 /* Construct a vector of zero elements of vector type TYPE. */
1800
1801 static tree
1802 build_zero_vector (tree type)
1803 {
1804 tree elem, list;
1805 int i, units;
1806
1807 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1808 units = TYPE_VECTOR_SUBPARTS (type);
1809
1810 list = NULL_TREE;
1811 for (i = 0; i < units; i++)
1812 list = tree_cons (NULL_TREE, elem, list);
1813 return build_vector (type, list);
1814 }
1815
1816 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1817
1818 bool
1819 fold_convertible_p (const_tree type, const_tree arg)
1820 {
1821 tree orig = TREE_TYPE (arg);
1822
1823 if (type == orig)
1824 return true;
1825
1826 if (TREE_CODE (arg) == ERROR_MARK
1827 || TREE_CODE (type) == ERROR_MARK
1828 || TREE_CODE (orig) == ERROR_MARK)
1829 return false;
1830
1831 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1832 return true;
1833
1834 switch (TREE_CODE (type))
1835 {
1836 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1837 case POINTER_TYPE: case REFERENCE_TYPE:
1838 case OFFSET_TYPE:
1839 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1840 || TREE_CODE (orig) == OFFSET_TYPE)
1841 return true;
1842 return (TREE_CODE (orig) == VECTOR_TYPE
1843 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1844
1845 case REAL_TYPE:
1846 case FIXED_POINT_TYPE:
1847 case COMPLEX_TYPE:
1848 case VECTOR_TYPE:
1849 case VOID_TYPE:
1850 return TREE_CODE (type) == TREE_CODE (orig);
1851
1852 default:
1853 return false;
1854 }
1855 }
1856
1857 /* Convert expression ARG to type TYPE. Used by the middle-end for
1858 simple conversions in preference to calling the front-end's convert. */
1859
1860 tree
1861 fold_convert_loc (location_t loc, tree type, tree arg)
1862 {
1863 tree orig = TREE_TYPE (arg);
1864 tree tem;
1865
1866 if (type == orig)
1867 return arg;
1868
1869 if (TREE_CODE (arg) == ERROR_MARK
1870 || TREE_CODE (type) == ERROR_MARK
1871 || TREE_CODE (orig) == ERROR_MARK)
1872 return error_mark_node;
1873
1874 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1875 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1876
1877 switch (TREE_CODE (type))
1878 {
1879 case POINTER_TYPE:
1880 case REFERENCE_TYPE:
1881 /* Handle conversions between pointers to different address spaces. */
1882 if (POINTER_TYPE_P (orig)
1883 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1884 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1885 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1886 /* fall through */
1887
1888 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1889 case OFFSET_TYPE:
1890 if (TREE_CODE (arg) == INTEGER_CST)
1891 {
1892 tem = fold_convert_const (NOP_EXPR, type, arg);
1893 if (tem != NULL_TREE)
1894 return tem;
1895 }
1896 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1897 || TREE_CODE (orig) == OFFSET_TYPE)
1898 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1899 if (TREE_CODE (orig) == COMPLEX_TYPE)
1900 return fold_convert_loc (loc, type,
1901 fold_build1_loc (loc, REALPART_EXPR,
1902 TREE_TYPE (orig), arg));
1903 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1904 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1905 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1906
1907 case REAL_TYPE:
1908 if (TREE_CODE (arg) == INTEGER_CST)
1909 {
1910 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1911 if (tem != NULL_TREE)
1912 return tem;
1913 }
1914 else if (TREE_CODE (arg) == REAL_CST)
1915 {
1916 tem = fold_convert_const (NOP_EXPR, type, arg);
1917 if (tem != NULL_TREE)
1918 return tem;
1919 }
1920 else if (TREE_CODE (arg) == FIXED_CST)
1921 {
1922 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1923 if (tem != NULL_TREE)
1924 return tem;
1925 }
1926
1927 switch (TREE_CODE (orig))
1928 {
1929 case INTEGER_TYPE:
1930 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1931 case POINTER_TYPE: case REFERENCE_TYPE:
1932 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
1933
1934 case REAL_TYPE:
1935 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1936
1937 case FIXED_POINT_TYPE:
1938 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1939
1940 case COMPLEX_TYPE:
1941 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1942 return fold_convert_loc (loc, type, tem);
1943
1944 default:
1945 gcc_unreachable ();
1946 }
1947
1948 case FIXED_POINT_TYPE:
1949 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
1950 || TREE_CODE (arg) == REAL_CST)
1951 {
1952 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1953 if (tem != NULL_TREE)
1954 goto fold_convert_exit;
1955 }
1956
1957 switch (TREE_CODE (orig))
1958 {
1959 case FIXED_POINT_TYPE:
1960 case INTEGER_TYPE:
1961 case ENUMERAL_TYPE:
1962 case BOOLEAN_TYPE:
1963 case REAL_TYPE:
1964 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1965
1966 case COMPLEX_TYPE:
1967 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1968 return fold_convert_loc (loc, type, tem);
1969
1970 default:
1971 gcc_unreachable ();
1972 }
1973
1974 case COMPLEX_TYPE:
1975 switch (TREE_CODE (orig))
1976 {
1977 case INTEGER_TYPE:
1978 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1979 case POINTER_TYPE: case REFERENCE_TYPE:
1980 case REAL_TYPE:
1981 case FIXED_POINT_TYPE:
1982 return fold_build2_loc (loc, COMPLEX_EXPR, type,
1983 fold_convert_loc (loc, TREE_TYPE (type), arg),
1984 fold_convert_loc (loc, TREE_TYPE (type),
1985 integer_zero_node));
1986 case COMPLEX_TYPE:
1987 {
1988 tree rpart, ipart;
1989
1990 if (TREE_CODE (arg) == COMPLEX_EXPR)
1991 {
1992 rpart = fold_convert_loc (loc, TREE_TYPE (type),
1993 TREE_OPERAND (arg, 0));
1994 ipart = fold_convert_loc (loc, TREE_TYPE (type),
1995 TREE_OPERAND (arg, 1));
1996 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
1997 }
1998
1999 arg = save_expr (arg);
2000 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2001 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2002 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2003 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2004 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2005 }
2006
2007 default:
2008 gcc_unreachable ();
2009 }
2010
2011 case VECTOR_TYPE:
2012 if (integer_zerop (arg))
2013 return build_zero_vector (type);
2014 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2015 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2016 || TREE_CODE (orig) == VECTOR_TYPE);
2017 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2018
2019 case VOID_TYPE:
2020 tem = fold_ignored_result (arg);
2021 if (TREE_CODE (tem) == MODIFY_EXPR)
2022 goto fold_convert_exit;
2023 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2024
2025 default:
2026 gcc_unreachable ();
2027 }
2028 fold_convert_exit:
2029 protected_set_expr_location (tem, loc);
2030 return tem;
2031 }
2032 \f
2033 /* Return false if expr can be assumed not to be an lvalue, true
2034 otherwise. */
2035
2036 static bool
2037 maybe_lvalue_p (const_tree x)
2038 {
2039 /* We only need to wrap lvalue tree codes. */
2040 switch (TREE_CODE (x))
2041 {
2042 case VAR_DECL:
2043 case PARM_DECL:
2044 case RESULT_DECL:
2045 case LABEL_DECL:
2046 case FUNCTION_DECL:
2047 case SSA_NAME:
2048
2049 case COMPONENT_REF:
2050 case INDIRECT_REF:
2051 case ALIGN_INDIRECT_REF:
2052 case MISALIGNED_INDIRECT_REF:
2053 case ARRAY_REF:
2054 case ARRAY_RANGE_REF:
2055 case BIT_FIELD_REF:
2056 case OBJ_TYPE_REF:
2057
2058 case REALPART_EXPR:
2059 case IMAGPART_EXPR:
2060 case PREINCREMENT_EXPR:
2061 case PREDECREMENT_EXPR:
2062 case SAVE_EXPR:
2063 case TRY_CATCH_EXPR:
2064 case WITH_CLEANUP_EXPR:
2065 case COMPOUND_EXPR:
2066 case MODIFY_EXPR:
2067 case TARGET_EXPR:
2068 case COND_EXPR:
2069 case BIND_EXPR:
2070 break;
2071
2072 default:
2073 /* Assume the worst for front-end tree codes. */
2074 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2075 break;
2076 return false;
2077 }
2078
2079 return true;
2080 }
2081
2082 /* Return an expr equal to X but certainly not valid as an lvalue. */
2083
2084 tree
2085 non_lvalue_loc (location_t loc, tree x)
2086 {
2087 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2088 us. */
2089 if (in_gimple_form)
2090 return x;
2091
2092 if (! maybe_lvalue_p (x))
2093 return x;
2094 x = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2095 SET_EXPR_LOCATION (x, loc);
2096 return x;
2097 }
2098
2099 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2100 Zero means allow extended lvalues. */
2101
2102 int pedantic_lvalues;
2103
2104 /* When pedantic, return an expr equal to X but certainly not valid as a
2105 pedantic lvalue. Otherwise, return X. */
2106
2107 static tree
2108 pedantic_non_lvalue_loc (location_t loc, tree x)
2109 {
2110 if (pedantic_lvalues)
2111 return non_lvalue_loc (loc, x);
2112 protected_set_expr_location (x, loc);
2113 return x;
2114 }
2115 \f
2116 /* Given a tree comparison code, return the code that is the logical inverse
2117 of the given code. It is not safe to do this for floating-point
2118 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2119 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2120
2121 enum tree_code
2122 invert_tree_comparison (enum tree_code code, bool honor_nans)
2123 {
2124 if (honor_nans && flag_trapping_math)
2125 return ERROR_MARK;
2126
2127 switch (code)
2128 {
2129 case EQ_EXPR:
2130 return NE_EXPR;
2131 case NE_EXPR:
2132 return EQ_EXPR;
2133 case GT_EXPR:
2134 return honor_nans ? UNLE_EXPR : LE_EXPR;
2135 case GE_EXPR:
2136 return honor_nans ? UNLT_EXPR : LT_EXPR;
2137 case LT_EXPR:
2138 return honor_nans ? UNGE_EXPR : GE_EXPR;
2139 case LE_EXPR:
2140 return honor_nans ? UNGT_EXPR : GT_EXPR;
2141 case LTGT_EXPR:
2142 return UNEQ_EXPR;
2143 case UNEQ_EXPR:
2144 return LTGT_EXPR;
2145 case UNGT_EXPR:
2146 return LE_EXPR;
2147 case UNGE_EXPR:
2148 return LT_EXPR;
2149 case UNLT_EXPR:
2150 return GE_EXPR;
2151 case UNLE_EXPR:
2152 return GT_EXPR;
2153 case ORDERED_EXPR:
2154 return UNORDERED_EXPR;
2155 case UNORDERED_EXPR:
2156 return ORDERED_EXPR;
2157 default:
2158 gcc_unreachable ();
2159 }
2160 }
2161
2162 /* Similar, but return the comparison that results if the operands are
2163 swapped. This is safe for floating-point. */
2164
2165 enum tree_code
2166 swap_tree_comparison (enum tree_code code)
2167 {
2168 switch (code)
2169 {
2170 case EQ_EXPR:
2171 case NE_EXPR:
2172 case ORDERED_EXPR:
2173 case UNORDERED_EXPR:
2174 case LTGT_EXPR:
2175 case UNEQ_EXPR:
2176 return code;
2177 case GT_EXPR:
2178 return LT_EXPR;
2179 case GE_EXPR:
2180 return LE_EXPR;
2181 case LT_EXPR:
2182 return GT_EXPR;
2183 case LE_EXPR:
2184 return GE_EXPR;
2185 case UNGT_EXPR:
2186 return UNLT_EXPR;
2187 case UNGE_EXPR:
2188 return UNLE_EXPR;
2189 case UNLT_EXPR:
2190 return UNGT_EXPR;
2191 case UNLE_EXPR:
2192 return UNGE_EXPR;
2193 default:
2194 gcc_unreachable ();
2195 }
2196 }
2197
2198
2199 /* Convert a comparison tree code from an enum tree_code representation
2200 into a compcode bit-based encoding. This function is the inverse of
2201 compcode_to_comparison. */
2202
2203 static enum comparison_code
2204 comparison_to_compcode (enum tree_code code)
2205 {
2206 switch (code)
2207 {
2208 case LT_EXPR:
2209 return COMPCODE_LT;
2210 case EQ_EXPR:
2211 return COMPCODE_EQ;
2212 case LE_EXPR:
2213 return COMPCODE_LE;
2214 case GT_EXPR:
2215 return COMPCODE_GT;
2216 case NE_EXPR:
2217 return COMPCODE_NE;
2218 case GE_EXPR:
2219 return COMPCODE_GE;
2220 case ORDERED_EXPR:
2221 return COMPCODE_ORD;
2222 case UNORDERED_EXPR:
2223 return COMPCODE_UNORD;
2224 case UNLT_EXPR:
2225 return COMPCODE_UNLT;
2226 case UNEQ_EXPR:
2227 return COMPCODE_UNEQ;
2228 case UNLE_EXPR:
2229 return COMPCODE_UNLE;
2230 case UNGT_EXPR:
2231 return COMPCODE_UNGT;
2232 case LTGT_EXPR:
2233 return COMPCODE_LTGT;
2234 case UNGE_EXPR:
2235 return COMPCODE_UNGE;
2236 default:
2237 gcc_unreachable ();
2238 }
2239 }
2240
2241 /* Convert a compcode bit-based encoding of a comparison operator back
2242 to GCC's enum tree_code representation. This function is the
2243 inverse of comparison_to_compcode. */
2244
2245 static enum tree_code
2246 compcode_to_comparison (enum comparison_code code)
2247 {
2248 switch (code)
2249 {
2250 case COMPCODE_LT:
2251 return LT_EXPR;
2252 case COMPCODE_EQ:
2253 return EQ_EXPR;
2254 case COMPCODE_LE:
2255 return LE_EXPR;
2256 case COMPCODE_GT:
2257 return GT_EXPR;
2258 case COMPCODE_NE:
2259 return NE_EXPR;
2260 case COMPCODE_GE:
2261 return GE_EXPR;
2262 case COMPCODE_ORD:
2263 return ORDERED_EXPR;
2264 case COMPCODE_UNORD:
2265 return UNORDERED_EXPR;
2266 case COMPCODE_UNLT:
2267 return UNLT_EXPR;
2268 case COMPCODE_UNEQ:
2269 return UNEQ_EXPR;
2270 case COMPCODE_UNLE:
2271 return UNLE_EXPR;
2272 case COMPCODE_UNGT:
2273 return UNGT_EXPR;
2274 case COMPCODE_LTGT:
2275 return LTGT_EXPR;
2276 case COMPCODE_UNGE:
2277 return UNGE_EXPR;
2278 default:
2279 gcc_unreachable ();
2280 }
2281 }
2282
2283 /* Return a tree for the comparison which is the combination of
2284 doing the AND or OR (depending on CODE) of the two operations LCODE
2285 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2286 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2287 if this makes the transformation invalid. */
2288
2289 tree
2290 combine_comparisons (location_t loc,
2291 enum tree_code code, enum tree_code lcode,
2292 enum tree_code rcode, tree truth_type,
2293 tree ll_arg, tree lr_arg)
2294 {
2295 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2296 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2297 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2298 int compcode;
2299
2300 switch (code)
2301 {
2302 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2303 compcode = lcompcode & rcompcode;
2304 break;
2305
2306 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2307 compcode = lcompcode | rcompcode;
2308 break;
2309
2310 default:
2311 return NULL_TREE;
2312 }
2313
2314 if (!honor_nans)
2315 {
2316 /* Eliminate unordered comparisons, as well as LTGT and ORD
2317 which are not used unless the mode has NaNs. */
2318 compcode &= ~COMPCODE_UNORD;
2319 if (compcode == COMPCODE_LTGT)
2320 compcode = COMPCODE_NE;
2321 else if (compcode == COMPCODE_ORD)
2322 compcode = COMPCODE_TRUE;
2323 }
2324 else if (flag_trapping_math)
2325 {
2326 /* Check that the original operation and the optimized ones will trap
2327 under the same condition. */
2328 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2329 && (lcompcode != COMPCODE_EQ)
2330 && (lcompcode != COMPCODE_ORD);
2331 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2332 && (rcompcode != COMPCODE_EQ)
2333 && (rcompcode != COMPCODE_ORD);
2334 bool trap = (compcode & COMPCODE_UNORD) == 0
2335 && (compcode != COMPCODE_EQ)
2336 && (compcode != COMPCODE_ORD);
2337
2338 /* In a short-circuited boolean expression the LHS might be
2339 such that the RHS, if evaluated, will never trap. For
2340 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2341 if neither x nor y is NaN. (This is a mixed blessing: for
2342 example, the expression above will never trap, hence
2343 optimizing it to x < y would be invalid). */
2344 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2345 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2346 rtrap = false;
2347
2348 /* If the comparison was short-circuited, and only the RHS
2349 trapped, we may now generate a spurious trap. */
2350 if (rtrap && !ltrap
2351 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2352 return NULL_TREE;
2353
2354 /* If we changed the conditions that cause a trap, we lose. */
2355 if ((ltrap || rtrap) != trap)
2356 return NULL_TREE;
2357 }
2358
2359 if (compcode == COMPCODE_TRUE)
2360 return constant_boolean_node (true, truth_type);
2361 else if (compcode == COMPCODE_FALSE)
2362 return constant_boolean_node (false, truth_type);
2363 else
2364 {
2365 enum tree_code tcode;
2366
2367 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2368 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2369 }
2370 }
2371 \f
2372 /* Return nonzero if two operands (typically of the same tree node)
2373 are necessarily equal. If either argument has side-effects this
2374 function returns zero. FLAGS modifies behavior as follows:
2375
2376 If OEP_ONLY_CONST is set, only return nonzero for constants.
2377 This function tests whether the operands are indistinguishable;
2378 it does not test whether they are equal using C's == operation.
2379 The distinction is important for IEEE floating point, because
2380 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2381 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2382
2383 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2384 even though it may hold multiple values during a function.
2385 This is because a GCC tree node guarantees that nothing else is
2386 executed between the evaluation of its "operands" (which may often
2387 be evaluated in arbitrary order). Hence if the operands themselves
2388 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2389 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2390 unset means assuming isochronic (or instantaneous) tree equivalence.
2391 Unless comparing arbitrary expression trees, such as from different
2392 statements, this flag can usually be left unset.
2393
2394 If OEP_PURE_SAME is set, then pure functions with identical arguments
2395 are considered the same. It is used when the caller has other ways
2396 to ensure that global memory is unchanged in between. */
2397
2398 int
2399 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2400 {
2401 /* If either is ERROR_MARK, they aren't equal. */
2402 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2403 || TREE_TYPE (arg0) == error_mark_node
2404 || TREE_TYPE (arg1) == error_mark_node)
2405 return 0;
2406
2407 /* Similar, if either does not have a type (like a released SSA name),
2408 they aren't equal. */
2409 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2410 return 0;
2411
2412 /* Check equality of integer constants before bailing out due to
2413 precision differences. */
2414 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2415 return tree_int_cst_equal (arg0, arg1);
2416
2417 /* If both types don't have the same signedness, then we can't consider
2418 them equal. We must check this before the STRIP_NOPS calls
2419 because they may change the signedness of the arguments. As pointers
2420 strictly don't have a signedness, require either two pointers or
2421 two non-pointers as well. */
2422 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2423 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2424 return 0;
2425
2426 /* We cannot consider pointers to different address space equal. */
2427 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2428 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2429 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2430 return 0;
2431
2432 /* If both types don't have the same precision, then it is not safe
2433 to strip NOPs. */
2434 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2435 return 0;
2436
2437 STRIP_NOPS (arg0);
2438 STRIP_NOPS (arg1);
2439
2440 /* In case both args are comparisons but with different comparison
2441 code, try to swap the comparison operands of one arg to produce
2442 a match and compare that variant. */
2443 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2444 && COMPARISON_CLASS_P (arg0)
2445 && COMPARISON_CLASS_P (arg1))
2446 {
2447 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2448
2449 if (TREE_CODE (arg0) == swap_code)
2450 return operand_equal_p (TREE_OPERAND (arg0, 0),
2451 TREE_OPERAND (arg1, 1), flags)
2452 && operand_equal_p (TREE_OPERAND (arg0, 1),
2453 TREE_OPERAND (arg1, 0), flags);
2454 }
2455
2456 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2457 /* This is needed for conversions and for COMPONENT_REF.
2458 Might as well play it safe and always test this. */
2459 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2460 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2461 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2462 return 0;
2463
2464 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2465 We don't care about side effects in that case because the SAVE_EXPR
2466 takes care of that for us. In all other cases, two expressions are
2467 equal if they have no side effects. If we have two identical
2468 expressions with side effects that should be treated the same due
2469 to the only side effects being identical SAVE_EXPR's, that will
2470 be detected in the recursive calls below. */
2471 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2472 && (TREE_CODE (arg0) == SAVE_EXPR
2473 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2474 return 1;
2475
2476 /* Next handle constant cases, those for which we can return 1 even
2477 if ONLY_CONST is set. */
2478 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2479 switch (TREE_CODE (arg0))
2480 {
2481 case INTEGER_CST:
2482 return tree_int_cst_equal (arg0, arg1);
2483
2484 case FIXED_CST:
2485 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2486 TREE_FIXED_CST (arg1));
2487
2488 case REAL_CST:
2489 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2490 TREE_REAL_CST (arg1)))
2491 return 1;
2492
2493
2494 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2495 {
2496 /* If we do not distinguish between signed and unsigned zero,
2497 consider them equal. */
2498 if (real_zerop (arg0) && real_zerop (arg1))
2499 return 1;
2500 }
2501 return 0;
2502
2503 case VECTOR_CST:
2504 {
2505 tree v1, v2;
2506
2507 v1 = TREE_VECTOR_CST_ELTS (arg0);
2508 v2 = TREE_VECTOR_CST_ELTS (arg1);
2509 while (v1 && v2)
2510 {
2511 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2512 flags))
2513 return 0;
2514 v1 = TREE_CHAIN (v1);
2515 v2 = TREE_CHAIN (v2);
2516 }
2517
2518 return v1 == v2;
2519 }
2520
2521 case COMPLEX_CST:
2522 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2523 flags)
2524 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2525 flags));
2526
2527 case STRING_CST:
2528 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2529 && ! memcmp (TREE_STRING_POINTER (arg0),
2530 TREE_STRING_POINTER (arg1),
2531 TREE_STRING_LENGTH (arg0)));
2532
2533 case ADDR_EXPR:
2534 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2535 0);
2536 default:
2537 break;
2538 }
2539
2540 if (flags & OEP_ONLY_CONST)
2541 return 0;
2542
2543 /* Define macros to test an operand from arg0 and arg1 for equality and a
2544 variant that allows null and views null as being different from any
2545 non-null value. In the latter case, if either is null, the both
2546 must be; otherwise, do the normal comparison. */
2547 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2548 TREE_OPERAND (arg1, N), flags)
2549
2550 #define OP_SAME_WITH_NULL(N) \
2551 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2552 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2553
2554 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2555 {
2556 case tcc_unary:
2557 /* Two conversions are equal only if signedness and modes match. */
2558 switch (TREE_CODE (arg0))
2559 {
2560 CASE_CONVERT:
2561 case FIX_TRUNC_EXPR:
2562 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2563 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2564 return 0;
2565 break;
2566 default:
2567 break;
2568 }
2569
2570 return OP_SAME (0);
2571
2572
2573 case tcc_comparison:
2574 case tcc_binary:
2575 if (OP_SAME (0) && OP_SAME (1))
2576 return 1;
2577
2578 /* For commutative ops, allow the other order. */
2579 return (commutative_tree_code (TREE_CODE (arg0))
2580 && operand_equal_p (TREE_OPERAND (arg0, 0),
2581 TREE_OPERAND (arg1, 1), flags)
2582 && operand_equal_p (TREE_OPERAND (arg0, 1),
2583 TREE_OPERAND (arg1, 0), flags));
2584
2585 case tcc_reference:
2586 /* If either of the pointer (or reference) expressions we are
2587 dereferencing contain a side effect, these cannot be equal. */
2588 if (TREE_SIDE_EFFECTS (arg0)
2589 || TREE_SIDE_EFFECTS (arg1))
2590 return 0;
2591
2592 switch (TREE_CODE (arg0))
2593 {
2594 case INDIRECT_REF:
2595 case ALIGN_INDIRECT_REF:
2596 case MISALIGNED_INDIRECT_REF:
2597 case REALPART_EXPR:
2598 case IMAGPART_EXPR:
2599 return OP_SAME (0);
2600
2601 case ARRAY_REF:
2602 case ARRAY_RANGE_REF:
2603 /* Operands 2 and 3 may be null.
2604 Compare the array index by value if it is constant first as we
2605 may have different types but same value here. */
2606 return (OP_SAME (0)
2607 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2608 TREE_OPERAND (arg1, 1))
2609 || OP_SAME (1))
2610 && OP_SAME_WITH_NULL (2)
2611 && OP_SAME_WITH_NULL (3));
2612
2613 case COMPONENT_REF:
2614 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2615 may be NULL when we're called to compare MEM_EXPRs. */
2616 return OP_SAME_WITH_NULL (0)
2617 && OP_SAME (1)
2618 && OP_SAME_WITH_NULL (2);
2619
2620 case BIT_FIELD_REF:
2621 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2622
2623 default:
2624 return 0;
2625 }
2626
2627 case tcc_expression:
2628 switch (TREE_CODE (arg0))
2629 {
2630 case ADDR_EXPR:
2631 case TRUTH_NOT_EXPR:
2632 return OP_SAME (0);
2633
2634 case TRUTH_ANDIF_EXPR:
2635 case TRUTH_ORIF_EXPR:
2636 return OP_SAME (0) && OP_SAME (1);
2637
2638 case TRUTH_AND_EXPR:
2639 case TRUTH_OR_EXPR:
2640 case TRUTH_XOR_EXPR:
2641 if (OP_SAME (0) && OP_SAME (1))
2642 return 1;
2643
2644 /* Otherwise take into account this is a commutative operation. */
2645 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2646 TREE_OPERAND (arg1, 1), flags)
2647 && operand_equal_p (TREE_OPERAND (arg0, 1),
2648 TREE_OPERAND (arg1, 0), flags));
2649
2650 case COND_EXPR:
2651 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2652
2653 default:
2654 return 0;
2655 }
2656
2657 case tcc_vl_exp:
2658 switch (TREE_CODE (arg0))
2659 {
2660 case CALL_EXPR:
2661 /* If the CALL_EXPRs call different functions, then they
2662 clearly can not be equal. */
2663 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2664 flags))
2665 return 0;
2666
2667 {
2668 unsigned int cef = call_expr_flags (arg0);
2669 if (flags & OEP_PURE_SAME)
2670 cef &= ECF_CONST | ECF_PURE;
2671 else
2672 cef &= ECF_CONST;
2673 if (!cef)
2674 return 0;
2675 }
2676
2677 /* Now see if all the arguments are the same. */
2678 {
2679 const_call_expr_arg_iterator iter0, iter1;
2680 const_tree a0, a1;
2681 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2682 a1 = first_const_call_expr_arg (arg1, &iter1);
2683 a0 && a1;
2684 a0 = next_const_call_expr_arg (&iter0),
2685 a1 = next_const_call_expr_arg (&iter1))
2686 if (! operand_equal_p (a0, a1, flags))
2687 return 0;
2688
2689 /* If we get here and both argument lists are exhausted
2690 then the CALL_EXPRs are equal. */
2691 return ! (a0 || a1);
2692 }
2693 default:
2694 return 0;
2695 }
2696
2697 case tcc_declaration:
2698 /* Consider __builtin_sqrt equal to sqrt. */
2699 return (TREE_CODE (arg0) == FUNCTION_DECL
2700 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2701 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2702 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2703
2704 default:
2705 return 0;
2706 }
2707
2708 #undef OP_SAME
2709 #undef OP_SAME_WITH_NULL
2710 }
2711 \f
2712 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2713 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2714
2715 When in doubt, return 0. */
2716
2717 static int
2718 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2719 {
2720 int unsignedp1, unsignedpo;
2721 tree primarg0, primarg1, primother;
2722 unsigned int correct_width;
2723
2724 if (operand_equal_p (arg0, arg1, 0))
2725 return 1;
2726
2727 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2728 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2729 return 0;
2730
2731 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2732 and see if the inner values are the same. This removes any
2733 signedness comparison, which doesn't matter here. */
2734 primarg0 = arg0, primarg1 = arg1;
2735 STRIP_NOPS (primarg0);
2736 STRIP_NOPS (primarg1);
2737 if (operand_equal_p (primarg0, primarg1, 0))
2738 return 1;
2739
2740 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2741 actual comparison operand, ARG0.
2742
2743 First throw away any conversions to wider types
2744 already present in the operands. */
2745
2746 primarg1 = get_narrower (arg1, &unsignedp1);
2747 primother = get_narrower (other, &unsignedpo);
2748
2749 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2750 if (unsignedp1 == unsignedpo
2751 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2752 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2753 {
2754 tree type = TREE_TYPE (arg0);
2755
2756 /* Make sure shorter operand is extended the right way
2757 to match the longer operand. */
2758 primarg1 = fold_convert (signed_or_unsigned_type_for
2759 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2760
2761 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2762 return 1;
2763 }
2764
2765 return 0;
2766 }
2767 \f
2768 /* See if ARG is an expression that is either a comparison or is performing
2769 arithmetic on comparisons. The comparisons must only be comparing
2770 two different values, which will be stored in *CVAL1 and *CVAL2; if
2771 they are nonzero it means that some operands have already been found.
2772 No variables may be used anywhere else in the expression except in the
2773 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2774 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2775
2776 If this is true, return 1. Otherwise, return zero. */
2777
2778 static int
2779 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2780 {
2781 enum tree_code code = TREE_CODE (arg);
2782 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2783
2784 /* We can handle some of the tcc_expression cases here. */
2785 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2786 tclass = tcc_unary;
2787 else if (tclass == tcc_expression
2788 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2789 || code == COMPOUND_EXPR))
2790 tclass = tcc_binary;
2791
2792 else if (tclass == tcc_expression && code == SAVE_EXPR
2793 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2794 {
2795 /* If we've already found a CVAL1 or CVAL2, this expression is
2796 two complex to handle. */
2797 if (*cval1 || *cval2)
2798 return 0;
2799
2800 tclass = tcc_unary;
2801 *save_p = 1;
2802 }
2803
2804 switch (tclass)
2805 {
2806 case tcc_unary:
2807 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2808
2809 case tcc_binary:
2810 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2811 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2812 cval1, cval2, save_p));
2813
2814 case tcc_constant:
2815 return 1;
2816
2817 case tcc_expression:
2818 if (code == COND_EXPR)
2819 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2820 cval1, cval2, save_p)
2821 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2822 cval1, cval2, save_p)
2823 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2824 cval1, cval2, save_p));
2825 return 0;
2826
2827 case tcc_comparison:
2828 /* First see if we can handle the first operand, then the second. For
2829 the second operand, we know *CVAL1 can't be zero. It must be that
2830 one side of the comparison is each of the values; test for the
2831 case where this isn't true by failing if the two operands
2832 are the same. */
2833
2834 if (operand_equal_p (TREE_OPERAND (arg, 0),
2835 TREE_OPERAND (arg, 1), 0))
2836 return 0;
2837
2838 if (*cval1 == 0)
2839 *cval1 = TREE_OPERAND (arg, 0);
2840 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2841 ;
2842 else if (*cval2 == 0)
2843 *cval2 = TREE_OPERAND (arg, 0);
2844 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2845 ;
2846 else
2847 return 0;
2848
2849 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2850 ;
2851 else if (*cval2 == 0)
2852 *cval2 = TREE_OPERAND (arg, 1);
2853 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2854 ;
2855 else
2856 return 0;
2857
2858 return 1;
2859
2860 default:
2861 return 0;
2862 }
2863 }
2864 \f
2865 /* ARG is a tree that is known to contain just arithmetic operations and
2866 comparisons. Evaluate the operations in the tree substituting NEW0 for
2867 any occurrence of OLD0 as an operand of a comparison and likewise for
2868 NEW1 and OLD1. */
2869
2870 static tree
2871 eval_subst (location_t loc, tree arg, tree old0, tree new0,
2872 tree old1, tree new1)
2873 {
2874 tree type = TREE_TYPE (arg);
2875 enum tree_code code = TREE_CODE (arg);
2876 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2877
2878 /* We can handle some of the tcc_expression cases here. */
2879 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2880 tclass = tcc_unary;
2881 else if (tclass == tcc_expression
2882 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2883 tclass = tcc_binary;
2884
2885 switch (tclass)
2886 {
2887 case tcc_unary:
2888 return fold_build1_loc (loc, code, type,
2889 eval_subst (loc, TREE_OPERAND (arg, 0),
2890 old0, new0, old1, new1));
2891
2892 case tcc_binary:
2893 return fold_build2_loc (loc, code, type,
2894 eval_subst (loc, TREE_OPERAND (arg, 0),
2895 old0, new0, old1, new1),
2896 eval_subst (loc, TREE_OPERAND (arg, 1),
2897 old0, new0, old1, new1));
2898
2899 case tcc_expression:
2900 switch (code)
2901 {
2902 case SAVE_EXPR:
2903 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
2904 old1, new1);
2905
2906 case COMPOUND_EXPR:
2907 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
2908 old1, new1);
2909
2910 case COND_EXPR:
2911 return fold_build3_loc (loc, code, type,
2912 eval_subst (loc, TREE_OPERAND (arg, 0),
2913 old0, new0, old1, new1),
2914 eval_subst (loc, TREE_OPERAND (arg, 1),
2915 old0, new0, old1, new1),
2916 eval_subst (loc, TREE_OPERAND (arg, 2),
2917 old0, new0, old1, new1));
2918 default:
2919 break;
2920 }
2921 /* Fall through - ??? */
2922
2923 case tcc_comparison:
2924 {
2925 tree arg0 = TREE_OPERAND (arg, 0);
2926 tree arg1 = TREE_OPERAND (arg, 1);
2927
2928 /* We need to check both for exact equality and tree equality. The
2929 former will be true if the operand has a side-effect. In that
2930 case, we know the operand occurred exactly once. */
2931
2932 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2933 arg0 = new0;
2934 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2935 arg0 = new1;
2936
2937 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2938 arg1 = new0;
2939 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2940 arg1 = new1;
2941
2942 return fold_build2_loc (loc, code, type, arg0, arg1);
2943 }
2944
2945 default:
2946 return arg;
2947 }
2948 }
2949 \f
2950 /* Return a tree for the case when the result of an expression is RESULT
2951 converted to TYPE and OMITTED was previously an operand of the expression
2952 but is now not needed (e.g., we folded OMITTED * 0).
2953
2954 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2955 the conversion of RESULT to TYPE. */
2956
2957 tree
2958 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
2959 {
2960 tree t = fold_convert_loc (loc, type, result);
2961
2962 /* If the resulting operand is an empty statement, just return the omitted
2963 statement casted to void. */
2964 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
2965 {
2966 t = build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
2967 goto omit_one_operand_exit;
2968 }
2969
2970 if (TREE_SIDE_EFFECTS (omitted))
2971 {
2972 t = build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2973 goto omit_one_operand_exit;
2974 }
2975
2976 return non_lvalue_loc (loc, t);
2977
2978 omit_one_operand_exit:
2979 protected_set_expr_location (t, loc);
2980 return t;
2981 }
2982
2983 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2984
2985 static tree
2986 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
2987 tree omitted)
2988 {
2989 tree t = fold_convert_loc (loc, type, result);
2990
2991 /* If the resulting operand is an empty statement, just return the omitted
2992 statement casted to void. */
2993 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
2994 {
2995 t = build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
2996 goto pedantic_omit_one_operand_exit;
2997 }
2998
2999 if (TREE_SIDE_EFFECTS (omitted))
3000 {
3001 t = build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3002 goto pedantic_omit_one_operand_exit;
3003 }
3004
3005 return pedantic_non_lvalue_loc (loc, t);
3006
3007 pedantic_omit_one_operand_exit:
3008 protected_set_expr_location (t, loc);
3009 return t;
3010 }
3011
3012 /* Return a tree for the case when the result of an expression is RESULT
3013 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3014 of the expression but are now not needed.
3015
3016 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3017 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3018 evaluated before OMITTED2. Otherwise, if neither has side effects,
3019 just do the conversion of RESULT to TYPE. */
3020
3021 tree
3022 omit_two_operands_loc (location_t loc, tree type, tree result,
3023 tree omitted1, tree omitted2)
3024 {
3025 tree t = fold_convert_loc (loc, type, result);
3026
3027 if (TREE_SIDE_EFFECTS (omitted2))
3028 {
3029 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3030 SET_EXPR_LOCATION (t, loc);
3031 }
3032 if (TREE_SIDE_EFFECTS (omitted1))
3033 {
3034 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3035 SET_EXPR_LOCATION (t, loc);
3036 }
3037
3038 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3039 }
3040
3041 \f
3042 /* Return a simplified tree node for the truth-negation of ARG. This
3043 never alters ARG itself. We assume that ARG is an operation that
3044 returns a truth value (0 or 1).
3045
3046 FIXME: one would think we would fold the result, but it causes
3047 problems with the dominator optimizer. */
3048
3049 tree
3050 fold_truth_not_expr (location_t loc, tree arg)
3051 {
3052 tree t, type = TREE_TYPE (arg);
3053 enum tree_code code = TREE_CODE (arg);
3054 location_t loc1, loc2;
3055
3056 /* If this is a comparison, we can simply invert it, except for
3057 floating-point non-equality comparisons, in which case we just
3058 enclose a TRUTH_NOT_EXPR around what we have. */
3059
3060 if (TREE_CODE_CLASS (code) == tcc_comparison)
3061 {
3062 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3063 if (FLOAT_TYPE_P (op_type)
3064 && flag_trapping_math
3065 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3066 && code != NE_EXPR && code != EQ_EXPR)
3067 return NULL_TREE;
3068
3069 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3070 if (code == ERROR_MARK)
3071 return NULL_TREE;
3072
3073 t = build2 (code, type, TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3074 SET_EXPR_LOCATION (t, loc);
3075 return t;
3076 }
3077
3078 switch (code)
3079 {
3080 case INTEGER_CST:
3081 return constant_boolean_node (integer_zerop (arg), type);
3082
3083 case TRUTH_AND_EXPR:
3084 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3085 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3086 if (loc1 == UNKNOWN_LOCATION)
3087 loc1 = loc;
3088 if (loc2 == UNKNOWN_LOCATION)
3089 loc2 = loc;
3090 t = build2 (TRUTH_OR_EXPR, type,
3091 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3092 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3093 break;
3094
3095 case TRUTH_OR_EXPR:
3096 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3097 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3098 if (loc1 == UNKNOWN_LOCATION)
3099 loc1 = loc;
3100 if (loc2 == UNKNOWN_LOCATION)
3101 loc2 = loc;
3102 t = build2 (TRUTH_AND_EXPR, type,
3103 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3104 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3105 break;
3106
3107 case TRUTH_XOR_EXPR:
3108 /* Here we can invert either operand. We invert the first operand
3109 unless the second operand is a TRUTH_NOT_EXPR in which case our
3110 result is the XOR of the first operand with the inside of the
3111 negation of the second operand. */
3112
3113 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3114 t = build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3115 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3116 else
3117 t = build2 (TRUTH_XOR_EXPR, type,
3118 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3119 TREE_OPERAND (arg, 1));
3120 break;
3121
3122 case TRUTH_ANDIF_EXPR:
3123 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3124 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3125 if (loc1 == UNKNOWN_LOCATION)
3126 loc1 = loc;
3127 if (loc2 == UNKNOWN_LOCATION)
3128 loc2 = loc;
3129 t = build2 (TRUTH_ORIF_EXPR, type,
3130 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3131 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3132 break;
3133
3134 case TRUTH_ORIF_EXPR:
3135 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3136 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3137 if (loc1 == UNKNOWN_LOCATION)
3138 loc1 = loc;
3139 if (loc2 == UNKNOWN_LOCATION)
3140 loc2 = loc;
3141 t = build2 (TRUTH_ANDIF_EXPR, type,
3142 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3143 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3144 break;
3145
3146 case TRUTH_NOT_EXPR:
3147 return TREE_OPERAND (arg, 0);
3148
3149 case COND_EXPR:
3150 {
3151 tree arg1 = TREE_OPERAND (arg, 1);
3152 tree arg2 = TREE_OPERAND (arg, 2);
3153
3154 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3155 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 2));
3156 if (loc1 == UNKNOWN_LOCATION)
3157 loc1 = loc;
3158 if (loc2 == UNKNOWN_LOCATION)
3159 loc2 = loc;
3160
3161 /* A COND_EXPR may have a throw as one operand, which
3162 then has void type. Just leave void operands
3163 as they are. */
3164 t = build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3165 VOID_TYPE_P (TREE_TYPE (arg1))
3166 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3167 VOID_TYPE_P (TREE_TYPE (arg2))
3168 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3169 break;
3170 }
3171
3172 case COMPOUND_EXPR:
3173 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3174 if (loc1 == UNKNOWN_LOCATION)
3175 loc1 = loc;
3176 t = build2 (COMPOUND_EXPR, type,
3177 TREE_OPERAND (arg, 0),
3178 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3179 break;
3180
3181 case NON_LVALUE_EXPR:
3182 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3183 if (loc1 == UNKNOWN_LOCATION)
3184 loc1 = loc;
3185 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3186
3187 CASE_CONVERT:
3188 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3189 {
3190 t = build1 (TRUTH_NOT_EXPR, type, arg);
3191 break;
3192 }
3193
3194 /* ... fall through ... */
3195
3196 case FLOAT_EXPR:
3197 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3198 if (loc1 == UNKNOWN_LOCATION)
3199 loc1 = loc;
3200 t = build1 (TREE_CODE (arg), type,
3201 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3202 break;
3203
3204 case BIT_AND_EXPR:
3205 if (!integer_onep (TREE_OPERAND (arg, 1)))
3206 return NULL_TREE;
3207 t = build2 (EQ_EXPR, type, arg, build_int_cst (type, 0));
3208 break;
3209
3210 case SAVE_EXPR:
3211 t = build1 (TRUTH_NOT_EXPR, type, arg);
3212 break;
3213
3214 case CLEANUP_POINT_EXPR:
3215 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3216 if (loc1 == UNKNOWN_LOCATION)
3217 loc1 = loc;
3218 t = build1 (CLEANUP_POINT_EXPR, type,
3219 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3220 break;
3221
3222 default:
3223 t = NULL_TREE;
3224 break;
3225 }
3226
3227 if (t)
3228 SET_EXPR_LOCATION (t, loc);
3229
3230 return t;
3231 }
3232
3233 /* Return a simplified tree node for the truth-negation of ARG. This
3234 never alters ARG itself. We assume that ARG is an operation that
3235 returns a truth value (0 or 1).
3236
3237 FIXME: one would think we would fold the result, but it causes
3238 problems with the dominator optimizer. */
3239
3240 tree
3241 invert_truthvalue_loc (location_t loc, tree arg)
3242 {
3243 tree tem;
3244
3245 if (TREE_CODE (arg) == ERROR_MARK)
3246 return arg;
3247
3248 tem = fold_truth_not_expr (loc, arg);
3249 if (!tem)
3250 {
3251 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3252 SET_EXPR_LOCATION (tem, loc);
3253 }
3254
3255 return tem;
3256 }
3257
3258 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3259 operands are another bit-wise operation with a common input. If so,
3260 distribute the bit operations to save an operation and possibly two if
3261 constants are involved. For example, convert
3262 (A | B) & (A | C) into A | (B & C)
3263 Further simplification will occur if B and C are constants.
3264
3265 If this optimization cannot be done, 0 will be returned. */
3266
3267 static tree
3268 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3269 tree arg0, tree arg1)
3270 {
3271 tree common;
3272 tree left, right;
3273
3274 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3275 || TREE_CODE (arg0) == code
3276 || (TREE_CODE (arg0) != BIT_AND_EXPR
3277 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3278 return 0;
3279
3280 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3281 {
3282 common = TREE_OPERAND (arg0, 0);
3283 left = TREE_OPERAND (arg0, 1);
3284 right = TREE_OPERAND (arg1, 1);
3285 }
3286 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3287 {
3288 common = TREE_OPERAND (arg0, 0);
3289 left = TREE_OPERAND (arg0, 1);
3290 right = TREE_OPERAND (arg1, 0);
3291 }
3292 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3293 {
3294 common = TREE_OPERAND (arg0, 1);
3295 left = TREE_OPERAND (arg0, 0);
3296 right = TREE_OPERAND (arg1, 1);
3297 }
3298 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3299 {
3300 common = TREE_OPERAND (arg0, 1);
3301 left = TREE_OPERAND (arg0, 0);
3302 right = TREE_OPERAND (arg1, 0);
3303 }
3304 else
3305 return 0;
3306
3307 common = fold_convert_loc (loc, type, common);
3308 left = fold_convert_loc (loc, type, left);
3309 right = fold_convert_loc (loc, type, right);
3310 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3311 fold_build2_loc (loc, code, type, left, right));
3312 }
3313
3314 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3315 with code CODE. This optimization is unsafe. */
3316 static tree
3317 distribute_real_division (location_t loc, enum tree_code code, tree type,
3318 tree arg0, tree arg1)
3319 {
3320 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3321 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3322
3323 /* (A / C) +- (B / C) -> (A +- B) / C. */
3324 if (mul0 == mul1
3325 && operand_equal_p (TREE_OPERAND (arg0, 1),
3326 TREE_OPERAND (arg1, 1), 0))
3327 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3328 fold_build2_loc (loc, code, type,
3329 TREE_OPERAND (arg0, 0),
3330 TREE_OPERAND (arg1, 0)),
3331 TREE_OPERAND (arg0, 1));
3332
3333 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3334 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3335 TREE_OPERAND (arg1, 0), 0)
3336 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3337 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3338 {
3339 REAL_VALUE_TYPE r0, r1;
3340 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3341 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3342 if (!mul0)
3343 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3344 if (!mul1)
3345 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3346 real_arithmetic (&r0, code, &r0, &r1);
3347 return fold_build2_loc (loc, MULT_EXPR, type,
3348 TREE_OPERAND (arg0, 0),
3349 build_real (type, r0));
3350 }
3351
3352 return NULL_TREE;
3353 }
3354 \f
3355 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3356 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3357
3358 static tree
3359 make_bit_field_ref (location_t loc, tree inner, tree type,
3360 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3361 {
3362 tree result, bftype;
3363
3364 if (bitpos == 0)
3365 {
3366 tree size = TYPE_SIZE (TREE_TYPE (inner));
3367 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3368 || POINTER_TYPE_P (TREE_TYPE (inner)))
3369 && host_integerp (size, 0)
3370 && tree_low_cst (size, 0) == bitsize)
3371 return fold_convert_loc (loc, type, inner);
3372 }
3373
3374 bftype = type;
3375 if (TYPE_PRECISION (bftype) != bitsize
3376 || TYPE_UNSIGNED (bftype) == !unsignedp)
3377 bftype = build_nonstandard_integer_type (bitsize, 0);
3378
3379 result = build3 (BIT_FIELD_REF, bftype, inner,
3380 size_int (bitsize), bitsize_int (bitpos));
3381 SET_EXPR_LOCATION (result, loc);
3382
3383 if (bftype != type)
3384 result = fold_convert_loc (loc, type, result);
3385
3386 return result;
3387 }
3388
3389 /* Optimize a bit-field compare.
3390
3391 There are two cases: First is a compare against a constant and the
3392 second is a comparison of two items where the fields are at the same
3393 bit position relative to the start of a chunk (byte, halfword, word)
3394 large enough to contain it. In these cases we can avoid the shift
3395 implicit in bitfield extractions.
3396
3397 For constants, we emit a compare of the shifted constant with the
3398 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3399 compared. For two fields at the same position, we do the ANDs with the
3400 similar mask and compare the result of the ANDs.
3401
3402 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3403 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3404 are the left and right operands of the comparison, respectively.
3405
3406 If the optimization described above can be done, we return the resulting
3407 tree. Otherwise we return zero. */
3408
3409 static tree
3410 optimize_bit_field_compare (location_t loc, enum tree_code code,
3411 tree compare_type, tree lhs, tree rhs)
3412 {
3413 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3414 tree type = TREE_TYPE (lhs);
3415 tree signed_type, unsigned_type;
3416 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3417 enum machine_mode lmode, rmode, nmode;
3418 int lunsignedp, runsignedp;
3419 int lvolatilep = 0, rvolatilep = 0;
3420 tree linner, rinner = NULL_TREE;
3421 tree mask;
3422 tree offset;
3423
3424 /* Get all the information about the extractions being done. If the bit size
3425 if the same as the size of the underlying object, we aren't doing an
3426 extraction at all and so can do nothing. We also don't want to
3427 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3428 then will no longer be able to replace it. */
3429 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3430 &lunsignedp, &lvolatilep, false);
3431 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3432 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3433 return 0;
3434
3435 if (!const_p)
3436 {
3437 /* If this is not a constant, we can only do something if bit positions,
3438 sizes, and signedness are the same. */
3439 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3440 &runsignedp, &rvolatilep, false);
3441
3442 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3443 || lunsignedp != runsignedp || offset != 0
3444 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3445 return 0;
3446 }
3447
3448 /* See if we can find a mode to refer to this field. We should be able to,
3449 but fail if we can't. */
3450 if (lvolatilep
3451 && GET_MODE_BITSIZE (lmode) > 0
3452 && flag_strict_volatile_bitfields > 0)
3453 nmode = lmode;
3454 else
3455 nmode = get_best_mode (lbitsize, lbitpos,
3456 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3457 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3458 TYPE_ALIGN (TREE_TYPE (rinner))),
3459 word_mode, lvolatilep || rvolatilep);
3460 if (nmode == VOIDmode)
3461 return 0;
3462
3463 /* Set signed and unsigned types of the precision of this mode for the
3464 shifts below. */
3465 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3466 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3467
3468 /* Compute the bit position and size for the new reference and our offset
3469 within it. If the new reference is the same size as the original, we
3470 won't optimize anything, so return zero. */
3471 nbitsize = GET_MODE_BITSIZE (nmode);
3472 nbitpos = lbitpos & ~ (nbitsize - 1);
3473 lbitpos -= nbitpos;
3474 if (nbitsize == lbitsize)
3475 return 0;
3476
3477 if (BYTES_BIG_ENDIAN)
3478 lbitpos = nbitsize - lbitsize - lbitpos;
3479
3480 /* Make the mask to be used against the extracted field. */
3481 mask = build_int_cst_type (unsigned_type, -1);
3482 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3483 mask = const_binop (RSHIFT_EXPR, mask,
3484 size_int (nbitsize - lbitsize - lbitpos));
3485
3486 if (! const_p)
3487 /* If not comparing with constant, just rework the comparison
3488 and return. */
3489 return fold_build2_loc (loc, code, compare_type,
3490 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3491 make_bit_field_ref (loc, linner,
3492 unsigned_type,
3493 nbitsize, nbitpos,
3494 1),
3495 mask),
3496 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3497 make_bit_field_ref (loc, rinner,
3498 unsigned_type,
3499 nbitsize, nbitpos,
3500 1),
3501 mask));
3502
3503 /* Otherwise, we are handling the constant case. See if the constant is too
3504 big for the field. Warn and return a tree of for 0 (false) if so. We do
3505 this not only for its own sake, but to avoid having to test for this
3506 error case below. If we didn't, we might generate wrong code.
3507
3508 For unsigned fields, the constant shifted right by the field length should
3509 be all zero. For signed fields, the high-order bits should agree with
3510 the sign bit. */
3511
3512 if (lunsignedp)
3513 {
3514 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3515 fold_convert_loc (loc,
3516 unsigned_type, rhs),
3517 size_int (lbitsize))))
3518 {
3519 warning (0, "comparison is always %d due to width of bit-field",
3520 code == NE_EXPR);
3521 return constant_boolean_node (code == NE_EXPR, compare_type);
3522 }
3523 }
3524 else
3525 {
3526 tree tem = const_binop (RSHIFT_EXPR,
3527 fold_convert_loc (loc, signed_type, rhs),
3528 size_int (lbitsize - 1));
3529 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3530 {
3531 warning (0, "comparison is always %d due to width of bit-field",
3532 code == NE_EXPR);
3533 return constant_boolean_node (code == NE_EXPR, compare_type);
3534 }
3535 }
3536
3537 /* Single-bit compares should always be against zero. */
3538 if (lbitsize == 1 && ! integer_zerop (rhs))
3539 {
3540 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3541 rhs = build_int_cst (type, 0);
3542 }
3543
3544 /* Make a new bitfield reference, shift the constant over the
3545 appropriate number of bits and mask it with the computed mask
3546 (in case this was a signed field). If we changed it, make a new one. */
3547 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3548 if (lvolatilep)
3549 {
3550 TREE_SIDE_EFFECTS (lhs) = 1;
3551 TREE_THIS_VOLATILE (lhs) = 1;
3552 }
3553
3554 rhs = const_binop (BIT_AND_EXPR,
3555 const_binop (LSHIFT_EXPR,
3556 fold_convert_loc (loc, unsigned_type, rhs),
3557 size_int (lbitpos)),
3558 mask);
3559
3560 lhs = build2 (code, compare_type,
3561 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3562 rhs);
3563 SET_EXPR_LOCATION (lhs, loc);
3564 return lhs;
3565 }
3566 \f
3567 /* Subroutine for fold_truthop: decode a field reference.
3568
3569 If EXP is a comparison reference, we return the innermost reference.
3570
3571 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3572 set to the starting bit number.
3573
3574 If the innermost field can be completely contained in a mode-sized
3575 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3576
3577 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3578 otherwise it is not changed.
3579
3580 *PUNSIGNEDP is set to the signedness of the field.
3581
3582 *PMASK is set to the mask used. This is either contained in a
3583 BIT_AND_EXPR or derived from the width of the field.
3584
3585 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3586
3587 Return 0 if this is not a component reference or is one that we can't
3588 do anything with. */
3589
3590 static tree
3591 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3592 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3593 int *punsignedp, int *pvolatilep,
3594 tree *pmask, tree *pand_mask)
3595 {
3596 tree outer_type = 0;
3597 tree and_mask = 0;
3598 tree mask, inner, offset;
3599 tree unsigned_type;
3600 unsigned int precision;
3601
3602 /* All the optimizations using this function assume integer fields.
3603 There are problems with FP fields since the type_for_size call
3604 below can fail for, e.g., XFmode. */
3605 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3606 return 0;
3607
3608 /* We are interested in the bare arrangement of bits, so strip everything
3609 that doesn't affect the machine mode. However, record the type of the
3610 outermost expression if it may matter below. */
3611 if (CONVERT_EXPR_P (exp)
3612 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3613 outer_type = TREE_TYPE (exp);
3614 STRIP_NOPS (exp);
3615
3616 if (TREE_CODE (exp) == BIT_AND_EXPR)
3617 {
3618 and_mask = TREE_OPERAND (exp, 1);
3619 exp = TREE_OPERAND (exp, 0);
3620 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3621 if (TREE_CODE (and_mask) != INTEGER_CST)
3622 return 0;
3623 }
3624
3625 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3626 punsignedp, pvolatilep, false);
3627 if ((inner == exp && and_mask == 0)
3628 || *pbitsize < 0 || offset != 0
3629 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3630 return 0;
3631
3632 /* If the number of bits in the reference is the same as the bitsize of
3633 the outer type, then the outer type gives the signedness. Otherwise
3634 (in case of a small bitfield) the signedness is unchanged. */
3635 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3636 *punsignedp = TYPE_UNSIGNED (outer_type);
3637
3638 /* Compute the mask to access the bitfield. */
3639 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3640 precision = TYPE_PRECISION (unsigned_type);
3641
3642 mask = build_int_cst_type (unsigned_type, -1);
3643
3644 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3645 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3646
3647 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3648 if (and_mask != 0)
3649 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3650 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3651
3652 *pmask = mask;
3653 *pand_mask = and_mask;
3654 return inner;
3655 }
3656
3657 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3658 bit positions. */
3659
3660 static int
3661 all_ones_mask_p (const_tree mask, int size)
3662 {
3663 tree type = TREE_TYPE (mask);
3664 unsigned int precision = TYPE_PRECISION (type);
3665 tree tmask;
3666
3667 tmask = build_int_cst_type (signed_type_for (type), -1);
3668
3669 return
3670 tree_int_cst_equal (mask,
3671 const_binop (RSHIFT_EXPR,
3672 const_binop (LSHIFT_EXPR, tmask,
3673 size_int (precision - size)),
3674 size_int (precision - size)));
3675 }
3676
3677 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3678 represents the sign bit of EXP's type. If EXP represents a sign
3679 or zero extension, also test VAL against the unextended type.
3680 The return value is the (sub)expression whose sign bit is VAL,
3681 or NULL_TREE otherwise. */
3682
3683 static tree
3684 sign_bit_p (tree exp, const_tree val)
3685 {
3686 unsigned HOST_WIDE_INT mask_lo, lo;
3687 HOST_WIDE_INT mask_hi, hi;
3688 int width;
3689 tree t;
3690
3691 /* Tree EXP must have an integral type. */
3692 t = TREE_TYPE (exp);
3693 if (! INTEGRAL_TYPE_P (t))
3694 return NULL_TREE;
3695
3696 /* Tree VAL must be an integer constant. */
3697 if (TREE_CODE (val) != INTEGER_CST
3698 || TREE_OVERFLOW (val))
3699 return NULL_TREE;
3700
3701 width = TYPE_PRECISION (t);
3702 if (width > HOST_BITS_PER_WIDE_INT)
3703 {
3704 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3705 lo = 0;
3706
3707 mask_hi = ((unsigned HOST_WIDE_INT) -1
3708 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3709 mask_lo = -1;
3710 }
3711 else
3712 {
3713 hi = 0;
3714 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3715
3716 mask_hi = 0;
3717 mask_lo = ((unsigned HOST_WIDE_INT) -1
3718 >> (HOST_BITS_PER_WIDE_INT - width));
3719 }
3720
3721 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3722 treat VAL as if it were unsigned. */
3723 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3724 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3725 return exp;
3726
3727 /* Handle extension from a narrower type. */
3728 if (TREE_CODE (exp) == NOP_EXPR
3729 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3730 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3731
3732 return NULL_TREE;
3733 }
3734
3735 /* Subroutine for fold_truthop: determine if an operand is simple enough
3736 to be evaluated unconditionally. */
3737
3738 static int
3739 simple_operand_p (const_tree exp)
3740 {
3741 /* Strip any conversions that don't change the machine mode. */
3742 STRIP_NOPS (exp);
3743
3744 return (CONSTANT_CLASS_P (exp)
3745 || TREE_CODE (exp) == SSA_NAME
3746 || (DECL_P (exp)
3747 && ! TREE_ADDRESSABLE (exp)
3748 && ! TREE_THIS_VOLATILE (exp)
3749 && ! DECL_NONLOCAL (exp)
3750 /* Don't regard global variables as simple. They may be
3751 allocated in ways unknown to the compiler (shared memory,
3752 #pragma weak, etc). */
3753 && ! TREE_PUBLIC (exp)
3754 && ! DECL_EXTERNAL (exp)
3755 /* Loading a static variable is unduly expensive, but global
3756 registers aren't expensive. */
3757 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3758 }
3759 \f
3760 /* The following functions are subroutines to fold_range_test and allow it to
3761 try to change a logical combination of comparisons into a range test.
3762
3763 For example, both
3764 X == 2 || X == 3 || X == 4 || X == 5
3765 and
3766 X >= 2 && X <= 5
3767 are converted to
3768 (unsigned) (X - 2) <= 3
3769
3770 We describe each set of comparisons as being either inside or outside
3771 a range, using a variable named like IN_P, and then describe the
3772 range with a lower and upper bound. If one of the bounds is omitted,
3773 it represents either the highest or lowest value of the type.
3774
3775 In the comments below, we represent a range by two numbers in brackets
3776 preceded by a "+" to designate being inside that range, or a "-" to
3777 designate being outside that range, so the condition can be inverted by
3778 flipping the prefix. An omitted bound is represented by a "-". For
3779 example, "- [-, 10]" means being outside the range starting at the lowest
3780 possible value and ending at 10, in other words, being greater than 10.
3781 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3782 always false.
3783
3784 We set up things so that the missing bounds are handled in a consistent
3785 manner so neither a missing bound nor "true" and "false" need to be
3786 handled using a special case. */
3787
3788 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3789 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3790 and UPPER1_P are nonzero if the respective argument is an upper bound
3791 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3792 must be specified for a comparison. ARG1 will be converted to ARG0's
3793 type if both are specified. */
3794
3795 static tree
3796 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3797 tree arg1, int upper1_p)
3798 {
3799 tree tem;
3800 int result;
3801 int sgn0, sgn1;
3802
3803 /* If neither arg represents infinity, do the normal operation.
3804 Else, if not a comparison, return infinity. Else handle the special
3805 comparison rules. Note that most of the cases below won't occur, but
3806 are handled for consistency. */
3807
3808 if (arg0 != 0 && arg1 != 0)
3809 {
3810 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3811 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3812 STRIP_NOPS (tem);
3813 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3814 }
3815
3816 if (TREE_CODE_CLASS (code) != tcc_comparison)
3817 return 0;
3818
3819 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3820 for neither. In real maths, we cannot assume open ended ranges are
3821 the same. But, this is computer arithmetic, where numbers are finite.
3822 We can therefore make the transformation of any unbounded range with
3823 the value Z, Z being greater than any representable number. This permits
3824 us to treat unbounded ranges as equal. */
3825 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3826 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3827 switch (code)
3828 {
3829 case EQ_EXPR:
3830 result = sgn0 == sgn1;
3831 break;
3832 case NE_EXPR:
3833 result = sgn0 != sgn1;
3834 break;
3835 case LT_EXPR:
3836 result = sgn0 < sgn1;
3837 break;
3838 case LE_EXPR:
3839 result = sgn0 <= sgn1;
3840 break;
3841 case GT_EXPR:
3842 result = sgn0 > sgn1;
3843 break;
3844 case GE_EXPR:
3845 result = sgn0 >= sgn1;
3846 break;
3847 default:
3848 gcc_unreachable ();
3849 }
3850
3851 return constant_boolean_node (result, type);
3852 }
3853 \f
3854 /* Given EXP, a logical expression, set the range it is testing into
3855 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3856 actually being tested. *PLOW and *PHIGH will be made of the same
3857 type as the returned expression. If EXP is not a comparison, we
3858 will most likely not be returning a useful value and range. Set
3859 *STRICT_OVERFLOW_P to true if the return value is only valid
3860 because signed overflow is undefined; otherwise, do not change
3861 *STRICT_OVERFLOW_P. */
3862
3863 tree
3864 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
3865 bool *strict_overflow_p)
3866 {
3867 enum tree_code code;
3868 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3869 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3870 int in_p, n_in_p;
3871 tree low, high, n_low, n_high;
3872 location_t loc = EXPR_LOCATION (exp);
3873
3874 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3875 and see if we can refine the range. Some of the cases below may not
3876 happen, but it doesn't seem worth worrying about this. We "continue"
3877 the outer loop when we've changed something; otherwise we "break"
3878 the switch, which will "break" the while. */
3879
3880 in_p = 0;
3881 low = high = build_int_cst (TREE_TYPE (exp), 0);
3882
3883 while (1)
3884 {
3885 code = TREE_CODE (exp);
3886 exp_type = TREE_TYPE (exp);
3887
3888 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3889 {
3890 if (TREE_OPERAND_LENGTH (exp) > 0)
3891 arg0 = TREE_OPERAND (exp, 0);
3892 if (TREE_CODE_CLASS (code) == tcc_comparison
3893 || TREE_CODE_CLASS (code) == tcc_unary
3894 || TREE_CODE_CLASS (code) == tcc_binary)
3895 arg0_type = TREE_TYPE (arg0);
3896 if (TREE_CODE_CLASS (code) == tcc_binary
3897 || TREE_CODE_CLASS (code) == tcc_comparison
3898 || (TREE_CODE_CLASS (code) == tcc_expression
3899 && TREE_OPERAND_LENGTH (exp) > 1))
3900 arg1 = TREE_OPERAND (exp, 1);
3901 }
3902
3903 switch (code)
3904 {
3905 case TRUTH_NOT_EXPR:
3906 in_p = ! in_p, exp = arg0;
3907 continue;
3908
3909 case EQ_EXPR: case NE_EXPR:
3910 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3911 /* We can only do something if the range is testing for zero
3912 and if the second operand is an integer constant. Note that
3913 saying something is "in" the range we make is done by
3914 complementing IN_P since it will set in the initial case of
3915 being not equal to zero; "out" is leaving it alone. */
3916 if (low == 0 || high == 0
3917 || ! integer_zerop (low) || ! integer_zerop (high)
3918 || TREE_CODE (arg1) != INTEGER_CST)
3919 break;
3920
3921 switch (code)
3922 {
3923 case NE_EXPR: /* - [c, c] */
3924 low = high = arg1;
3925 break;
3926 case EQ_EXPR: /* + [c, c] */
3927 in_p = ! in_p, low = high = arg1;
3928 break;
3929 case GT_EXPR: /* - [-, c] */
3930 low = 0, high = arg1;
3931 break;
3932 case GE_EXPR: /* + [c, -] */
3933 in_p = ! in_p, low = arg1, high = 0;
3934 break;
3935 case LT_EXPR: /* - [c, -] */
3936 low = arg1, high = 0;
3937 break;
3938 case LE_EXPR: /* + [-, c] */
3939 in_p = ! in_p, low = 0, high = arg1;
3940 break;
3941 default:
3942 gcc_unreachable ();
3943 }
3944
3945 /* If this is an unsigned comparison, we also know that EXP is
3946 greater than or equal to zero. We base the range tests we make
3947 on that fact, so we record it here so we can parse existing
3948 range tests. We test arg0_type since often the return type
3949 of, e.g. EQ_EXPR, is boolean. */
3950 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3951 {
3952 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3953 in_p, low, high, 1,
3954 build_int_cst (arg0_type, 0),
3955 NULL_TREE))
3956 break;
3957
3958 in_p = n_in_p, low = n_low, high = n_high;
3959
3960 /* If the high bound is missing, but we have a nonzero low
3961 bound, reverse the range so it goes from zero to the low bound
3962 minus 1. */
3963 if (high == 0 && low && ! integer_zerop (low))
3964 {
3965 in_p = ! in_p;
3966 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3967 integer_one_node, 0);
3968 low = build_int_cst (arg0_type, 0);
3969 }
3970 }
3971
3972 exp = arg0;
3973 continue;
3974
3975 case NEGATE_EXPR:
3976 /* (-x) IN [a,b] -> x in [-b, -a] */
3977 n_low = range_binop (MINUS_EXPR, exp_type,
3978 build_int_cst (exp_type, 0),
3979 0, high, 1);
3980 n_high = range_binop (MINUS_EXPR, exp_type,
3981 build_int_cst (exp_type, 0),
3982 0, low, 0);
3983 low = n_low, high = n_high;
3984 exp = arg0;
3985 continue;
3986
3987 case BIT_NOT_EXPR:
3988 /* ~ X -> -X - 1 */
3989 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
3990 build_int_cst (exp_type, 1));
3991 SET_EXPR_LOCATION (exp, loc);
3992 continue;
3993
3994 case PLUS_EXPR: case MINUS_EXPR:
3995 if (TREE_CODE (arg1) != INTEGER_CST)
3996 break;
3997
3998 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3999 move a constant to the other side. */
4000 if (!TYPE_UNSIGNED (arg0_type)
4001 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4002 break;
4003
4004 /* If EXP is signed, any overflow in the computation is undefined,
4005 so we don't worry about it so long as our computations on
4006 the bounds don't overflow. For unsigned, overflow is defined
4007 and this is exactly the right thing. */
4008 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4009 arg0_type, low, 0, arg1, 0);
4010 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4011 arg0_type, high, 1, arg1, 0);
4012 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4013 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4014 break;
4015
4016 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4017 *strict_overflow_p = true;
4018
4019 /* Check for an unsigned range which has wrapped around the maximum
4020 value thus making n_high < n_low, and normalize it. */
4021 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4022 {
4023 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4024 integer_one_node, 0);
4025 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4026 integer_one_node, 0);
4027
4028 /* If the range is of the form +/- [ x+1, x ], we won't
4029 be able to normalize it. But then, it represents the
4030 whole range or the empty set, so make it
4031 +/- [ -, - ]. */
4032 if (tree_int_cst_equal (n_low, low)
4033 && tree_int_cst_equal (n_high, high))
4034 low = high = 0;
4035 else
4036 in_p = ! in_p;
4037 }
4038 else
4039 low = n_low, high = n_high;
4040
4041 exp = arg0;
4042 continue;
4043
4044 CASE_CONVERT: case NON_LVALUE_EXPR:
4045 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4046 break;
4047
4048 if (! INTEGRAL_TYPE_P (arg0_type)
4049 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4050 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4051 break;
4052
4053 n_low = low, n_high = high;
4054
4055 if (n_low != 0)
4056 n_low = fold_convert_loc (loc, arg0_type, n_low);
4057
4058 if (n_high != 0)
4059 n_high = fold_convert_loc (loc, arg0_type, n_high);
4060
4061
4062 /* If we're converting arg0 from an unsigned type, to exp,
4063 a signed type, we will be doing the comparison as unsigned.
4064 The tests above have already verified that LOW and HIGH
4065 are both positive.
4066
4067 So we have to ensure that we will handle large unsigned
4068 values the same way that the current signed bounds treat
4069 negative values. */
4070
4071 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4072 {
4073 tree high_positive;
4074 tree equiv_type;
4075 /* For fixed-point modes, we need to pass the saturating flag
4076 as the 2nd parameter. */
4077 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4078 equiv_type = lang_hooks.types.type_for_mode
4079 (TYPE_MODE (arg0_type),
4080 TYPE_SATURATING (arg0_type));
4081 else
4082 equiv_type = lang_hooks.types.type_for_mode
4083 (TYPE_MODE (arg0_type), 1);
4084
4085 /* A range without an upper bound is, naturally, unbounded.
4086 Since convert would have cropped a very large value, use
4087 the max value for the destination type. */
4088 high_positive
4089 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4090 : TYPE_MAX_VALUE (arg0_type);
4091
4092 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4093 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4094 fold_convert_loc (loc, arg0_type,
4095 high_positive),
4096 build_int_cst (arg0_type, 1));
4097
4098 /* If the low bound is specified, "and" the range with the
4099 range for which the original unsigned value will be
4100 positive. */
4101 if (low != 0)
4102 {
4103 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4104 1, n_low, n_high, 1,
4105 fold_convert_loc (loc, arg0_type,
4106 integer_zero_node),
4107 high_positive))
4108 break;
4109
4110 in_p = (n_in_p == in_p);
4111 }
4112 else
4113 {
4114 /* Otherwise, "or" the range with the range of the input
4115 that will be interpreted as negative. */
4116 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4117 0, n_low, n_high, 1,
4118 fold_convert_loc (loc, arg0_type,
4119 integer_zero_node),
4120 high_positive))
4121 break;
4122
4123 in_p = (in_p != n_in_p);
4124 }
4125 }
4126
4127 exp = arg0;
4128 low = n_low, high = n_high;
4129 continue;
4130
4131 default:
4132 break;
4133 }
4134
4135 break;
4136 }
4137
4138 /* If EXP is a constant, we can evaluate whether this is true or false. */
4139 if (TREE_CODE (exp) == INTEGER_CST)
4140 {
4141 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4142 exp, 0, low, 0))
4143 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4144 exp, 1, high, 1)));
4145 low = high = 0;
4146 exp = 0;
4147 }
4148
4149 *pin_p = in_p, *plow = low, *phigh = high;
4150 return exp;
4151 }
4152 \f
4153 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4154 type, TYPE, return an expression to test if EXP is in (or out of, depending
4155 on IN_P) the range. Return 0 if the test couldn't be created. */
4156
4157 tree
4158 build_range_check (location_t loc, tree type, tree exp, int in_p,
4159 tree low, tree high)
4160 {
4161 tree etype = TREE_TYPE (exp), value;
4162
4163 #ifdef HAVE_canonicalize_funcptr_for_compare
4164 /* Disable this optimization for function pointer expressions
4165 on targets that require function pointer canonicalization. */
4166 if (HAVE_canonicalize_funcptr_for_compare
4167 && TREE_CODE (etype) == POINTER_TYPE
4168 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4169 return NULL_TREE;
4170 #endif
4171
4172 if (! in_p)
4173 {
4174 value = build_range_check (loc, type, exp, 1, low, high);
4175 if (value != 0)
4176 return invert_truthvalue_loc (loc, value);
4177
4178 return 0;
4179 }
4180
4181 if (low == 0 && high == 0)
4182 return build_int_cst (type, 1);
4183
4184 if (low == 0)
4185 return fold_build2_loc (loc, LE_EXPR, type, exp,
4186 fold_convert_loc (loc, etype, high));
4187
4188 if (high == 0)
4189 return fold_build2_loc (loc, GE_EXPR, type, exp,
4190 fold_convert_loc (loc, etype, low));
4191
4192 if (operand_equal_p (low, high, 0))
4193 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4194 fold_convert_loc (loc, etype, low));
4195
4196 if (integer_zerop (low))
4197 {
4198 if (! TYPE_UNSIGNED (etype))
4199 {
4200 etype = unsigned_type_for (etype);
4201 high = fold_convert_loc (loc, etype, high);
4202 exp = fold_convert_loc (loc, etype, exp);
4203 }
4204 return build_range_check (loc, type, exp, 1, 0, high);
4205 }
4206
4207 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4208 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4209 {
4210 unsigned HOST_WIDE_INT lo;
4211 HOST_WIDE_INT hi;
4212 int prec;
4213
4214 prec = TYPE_PRECISION (etype);
4215 if (prec <= HOST_BITS_PER_WIDE_INT)
4216 {
4217 hi = 0;
4218 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4219 }
4220 else
4221 {
4222 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4223 lo = (unsigned HOST_WIDE_INT) -1;
4224 }
4225
4226 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4227 {
4228 if (TYPE_UNSIGNED (etype))
4229 {
4230 tree signed_etype = signed_type_for (etype);
4231 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4232 etype
4233 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4234 else
4235 etype = signed_etype;
4236 exp = fold_convert_loc (loc, etype, exp);
4237 }
4238 return fold_build2_loc (loc, GT_EXPR, type, exp,
4239 build_int_cst (etype, 0));
4240 }
4241 }
4242
4243 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4244 This requires wrap-around arithmetics for the type of the expression.
4245 First make sure that arithmetics in this type is valid, then make sure
4246 that it wraps around. */
4247 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4248 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4249 TYPE_UNSIGNED (etype));
4250
4251 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4252 {
4253 tree utype, minv, maxv;
4254
4255 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4256 for the type in question, as we rely on this here. */
4257 utype = unsigned_type_for (etype);
4258 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4259 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4260 integer_one_node, 1);
4261 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4262
4263 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4264 minv, 1, maxv, 1)))
4265 etype = utype;
4266 else
4267 return 0;
4268 }
4269
4270 high = fold_convert_loc (loc, etype, high);
4271 low = fold_convert_loc (loc, etype, low);
4272 exp = fold_convert_loc (loc, etype, exp);
4273
4274 value = const_binop (MINUS_EXPR, high, low);
4275
4276
4277 if (POINTER_TYPE_P (etype))
4278 {
4279 if (value != 0 && !TREE_OVERFLOW (value))
4280 {
4281 low = fold_convert_loc (loc, sizetype, low);
4282 low = fold_build1_loc (loc, NEGATE_EXPR, sizetype, low);
4283 return build_range_check (loc, type,
4284 fold_build2_loc (loc, POINTER_PLUS_EXPR,
4285 etype, exp, low),
4286 1, build_int_cst (etype, 0), value);
4287 }
4288 return 0;
4289 }
4290
4291 if (value != 0 && !TREE_OVERFLOW (value))
4292 return build_range_check (loc, type,
4293 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4294 1, build_int_cst (etype, 0), value);
4295
4296 return 0;
4297 }
4298 \f
4299 /* Return the predecessor of VAL in its type, handling the infinite case. */
4300
4301 static tree
4302 range_predecessor (tree val)
4303 {
4304 tree type = TREE_TYPE (val);
4305
4306 if (INTEGRAL_TYPE_P (type)
4307 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4308 return 0;
4309 else
4310 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4311 }
4312
4313 /* Return the successor of VAL in its type, handling the infinite case. */
4314
4315 static tree
4316 range_successor (tree val)
4317 {
4318 tree type = TREE_TYPE (val);
4319
4320 if (INTEGRAL_TYPE_P (type)
4321 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4322 return 0;
4323 else
4324 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4325 }
4326
4327 /* Given two ranges, see if we can merge them into one. Return 1 if we
4328 can, 0 if we can't. Set the output range into the specified parameters. */
4329
4330 bool
4331 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4332 tree high0, int in1_p, tree low1, tree high1)
4333 {
4334 int no_overlap;
4335 int subset;
4336 int temp;
4337 tree tem;
4338 int in_p;
4339 tree low, high;
4340 int lowequal = ((low0 == 0 && low1 == 0)
4341 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4342 low0, 0, low1, 0)));
4343 int highequal = ((high0 == 0 && high1 == 0)
4344 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4345 high0, 1, high1, 1)));
4346
4347 /* Make range 0 be the range that starts first, or ends last if they
4348 start at the same value. Swap them if it isn't. */
4349 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4350 low0, 0, low1, 0))
4351 || (lowequal
4352 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4353 high1, 1, high0, 1))))
4354 {
4355 temp = in0_p, in0_p = in1_p, in1_p = temp;
4356 tem = low0, low0 = low1, low1 = tem;
4357 tem = high0, high0 = high1, high1 = tem;
4358 }
4359
4360 /* Now flag two cases, whether the ranges are disjoint or whether the
4361 second range is totally subsumed in the first. Note that the tests
4362 below are simplified by the ones above. */
4363 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4364 high0, 1, low1, 0));
4365 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4366 high1, 1, high0, 1));
4367
4368 /* We now have four cases, depending on whether we are including or
4369 excluding the two ranges. */
4370 if (in0_p && in1_p)
4371 {
4372 /* If they don't overlap, the result is false. If the second range
4373 is a subset it is the result. Otherwise, the range is from the start
4374 of the second to the end of the first. */
4375 if (no_overlap)
4376 in_p = 0, low = high = 0;
4377 else if (subset)
4378 in_p = 1, low = low1, high = high1;
4379 else
4380 in_p = 1, low = low1, high = high0;
4381 }
4382
4383 else if (in0_p && ! in1_p)
4384 {
4385 /* If they don't overlap, the result is the first range. If they are
4386 equal, the result is false. If the second range is a subset of the
4387 first, and the ranges begin at the same place, we go from just after
4388 the end of the second range to the end of the first. If the second
4389 range is not a subset of the first, or if it is a subset and both
4390 ranges end at the same place, the range starts at the start of the
4391 first range and ends just before the second range.
4392 Otherwise, we can't describe this as a single range. */
4393 if (no_overlap)
4394 in_p = 1, low = low0, high = high0;
4395 else if (lowequal && highequal)
4396 in_p = 0, low = high = 0;
4397 else if (subset && lowequal)
4398 {
4399 low = range_successor (high1);
4400 high = high0;
4401 in_p = 1;
4402 if (low == 0)
4403 {
4404 /* We are in the weird situation where high0 > high1 but
4405 high1 has no successor. Punt. */
4406 return 0;
4407 }
4408 }
4409 else if (! subset || highequal)
4410 {
4411 low = low0;
4412 high = range_predecessor (low1);
4413 in_p = 1;
4414 if (high == 0)
4415 {
4416 /* low0 < low1 but low1 has no predecessor. Punt. */
4417 return 0;
4418 }
4419 }
4420 else
4421 return 0;
4422 }
4423
4424 else if (! in0_p && in1_p)
4425 {
4426 /* If they don't overlap, the result is the second range. If the second
4427 is a subset of the first, the result is false. Otherwise,
4428 the range starts just after the first range and ends at the
4429 end of the second. */
4430 if (no_overlap)
4431 in_p = 1, low = low1, high = high1;
4432 else if (subset || highequal)
4433 in_p = 0, low = high = 0;
4434 else
4435 {
4436 low = range_successor (high0);
4437 high = high1;
4438 in_p = 1;
4439 if (low == 0)
4440 {
4441 /* high1 > high0 but high0 has no successor. Punt. */
4442 return 0;
4443 }
4444 }
4445 }
4446
4447 else
4448 {
4449 /* The case where we are excluding both ranges. Here the complex case
4450 is if they don't overlap. In that case, the only time we have a
4451 range is if they are adjacent. If the second is a subset of the
4452 first, the result is the first. Otherwise, the range to exclude
4453 starts at the beginning of the first range and ends at the end of the
4454 second. */
4455 if (no_overlap)
4456 {
4457 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4458 range_successor (high0),
4459 1, low1, 0)))
4460 in_p = 0, low = low0, high = high1;
4461 else
4462 {
4463 /* Canonicalize - [min, x] into - [-, x]. */
4464 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4465 switch (TREE_CODE (TREE_TYPE (low0)))
4466 {
4467 case ENUMERAL_TYPE:
4468 if (TYPE_PRECISION (TREE_TYPE (low0))
4469 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4470 break;
4471 /* FALLTHROUGH */
4472 case INTEGER_TYPE:
4473 if (tree_int_cst_equal (low0,
4474 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4475 low0 = 0;
4476 break;
4477 case POINTER_TYPE:
4478 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4479 && integer_zerop (low0))
4480 low0 = 0;
4481 break;
4482 default:
4483 break;
4484 }
4485
4486 /* Canonicalize - [x, max] into - [x, -]. */
4487 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4488 switch (TREE_CODE (TREE_TYPE (high1)))
4489 {
4490 case ENUMERAL_TYPE:
4491 if (TYPE_PRECISION (TREE_TYPE (high1))
4492 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4493 break;
4494 /* FALLTHROUGH */
4495 case INTEGER_TYPE:
4496 if (tree_int_cst_equal (high1,
4497 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4498 high1 = 0;
4499 break;
4500 case POINTER_TYPE:
4501 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4502 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4503 high1, 1,
4504 integer_one_node, 1)))
4505 high1 = 0;
4506 break;
4507 default:
4508 break;
4509 }
4510
4511 /* The ranges might be also adjacent between the maximum and
4512 minimum values of the given type. For
4513 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4514 return + [x + 1, y - 1]. */
4515 if (low0 == 0 && high1 == 0)
4516 {
4517 low = range_successor (high0);
4518 high = range_predecessor (low1);
4519 if (low == 0 || high == 0)
4520 return 0;
4521
4522 in_p = 1;
4523 }
4524 else
4525 return 0;
4526 }
4527 }
4528 else if (subset)
4529 in_p = 0, low = low0, high = high0;
4530 else
4531 in_p = 0, low = low0, high = high1;
4532 }
4533
4534 *pin_p = in_p, *plow = low, *phigh = high;
4535 return 1;
4536 }
4537 \f
4538
4539 /* Subroutine of fold, looking inside expressions of the form
4540 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4541 of the COND_EXPR. This function is being used also to optimize
4542 A op B ? C : A, by reversing the comparison first.
4543
4544 Return a folded expression whose code is not a COND_EXPR
4545 anymore, or NULL_TREE if no folding opportunity is found. */
4546
4547 static tree
4548 fold_cond_expr_with_comparison (location_t loc, tree type,
4549 tree arg0, tree arg1, tree arg2)
4550 {
4551 enum tree_code comp_code = TREE_CODE (arg0);
4552 tree arg00 = TREE_OPERAND (arg0, 0);
4553 tree arg01 = TREE_OPERAND (arg0, 1);
4554 tree arg1_type = TREE_TYPE (arg1);
4555 tree tem;
4556
4557 STRIP_NOPS (arg1);
4558 STRIP_NOPS (arg2);
4559
4560 /* If we have A op 0 ? A : -A, consider applying the following
4561 transformations:
4562
4563 A == 0? A : -A same as -A
4564 A != 0? A : -A same as A
4565 A >= 0? A : -A same as abs (A)
4566 A > 0? A : -A same as abs (A)
4567 A <= 0? A : -A same as -abs (A)
4568 A < 0? A : -A same as -abs (A)
4569
4570 None of these transformations work for modes with signed
4571 zeros. If A is +/-0, the first two transformations will
4572 change the sign of the result (from +0 to -0, or vice
4573 versa). The last four will fix the sign of the result,
4574 even though the original expressions could be positive or
4575 negative, depending on the sign of A.
4576
4577 Note that all these transformations are correct if A is
4578 NaN, since the two alternatives (A and -A) are also NaNs. */
4579 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4580 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4581 ? real_zerop (arg01)
4582 : integer_zerop (arg01))
4583 && ((TREE_CODE (arg2) == NEGATE_EXPR
4584 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4585 /* In the case that A is of the form X-Y, '-A' (arg2) may
4586 have already been folded to Y-X, check for that. */
4587 || (TREE_CODE (arg1) == MINUS_EXPR
4588 && TREE_CODE (arg2) == MINUS_EXPR
4589 && operand_equal_p (TREE_OPERAND (arg1, 0),
4590 TREE_OPERAND (arg2, 1), 0)
4591 && operand_equal_p (TREE_OPERAND (arg1, 1),
4592 TREE_OPERAND (arg2, 0), 0))))
4593 switch (comp_code)
4594 {
4595 case EQ_EXPR:
4596 case UNEQ_EXPR:
4597 tem = fold_convert_loc (loc, arg1_type, arg1);
4598 return pedantic_non_lvalue_loc (loc,
4599 fold_convert_loc (loc, type,
4600 negate_expr (tem)));
4601 case NE_EXPR:
4602 case LTGT_EXPR:
4603 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4604 case UNGE_EXPR:
4605 case UNGT_EXPR:
4606 if (flag_trapping_math)
4607 break;
4608 /* Fall through. */
4609 case GE_EXPR:
4610 case GT_EXPR:
4611 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4612 arg1 = fold_convert_loc (loc, signed_type_for
4613 (TREE_TYPE (arg1)), arg1);
4614 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4615 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4616 case UNLE_EXPR:
4617 case UNLT_EXPR:
4618 if (flag_trapping_math)
4619 break;
4620 case LE_EXPR:
4621 case LT_EXPR:
4622 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4623 arg1 = fold_convert_loc (loc, signed_type_for
4624 (TREE_TYPE (arg1)), arg1);
4625 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4626 return negate_expr (fold_convert_loc (loc, type, tem));
4627 default:
4628 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4629 break;
4630 }
4631
4632 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4633 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4634 both transformations are correct when A is NaN: A != 0
4635 is then true, and A == 0 is false. */
4636
4637 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4638 && integer_zerop (arg01) && integer_zerop (arg2))
4639 {
4640 if (comp_code == NE_EXPR)
4641 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4642 else if (comp_code == EQ_EXPR)
4643 return build_int_cst (type, 0);
4644 }
4645
4646 /* Try some transformations of A op B ? A : B.
4647
4648 A == B? A : B same as B
4649 A != B? A : B same as A
4650 A >= B? A : B same as max (A, B)
4651 A > B? A : B same as max (B, A)
4652 A <= B? A : B same as min (A, B)
4653 A < B? A : B same as min (B, A)
4654
4655 As above, these transformations don't work in the presence
4656 of signed zeros. For example, if A and B are zeros of
4657 opposite sign, the first two transformations will change
4658 the sign of the result. In the last four, the original
4659 expressions give different results for (A=+0, B=-0) and
4660 (A=-0, B=+0), but the transformed expressions do not.
4661
4662 The first two transformations are correct if either A or B
4663 is a NaN. In the first transformation, the condition will
4664 be false, and B will indeed be chosen. In the case of the
4665 second transformation, the condition A != B will be true,
4666 and A will be chosen.
4667
4668 The conversions to max() and min() are not correct if B is
4669 a number and A is not. The conditions in the original
4670 expressions will be false, so all four give B. The min()
4671 and max() versions would give a NaN instead. */
4672 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4673 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4674 /* Avoid these transformations if the COND_EXPR may be used
4675 as an lvalue in the C++ front-end. PR c++/19199. */
4676 && (in_gimple_form
4677 || (strcmp (lang_hooks.name, "GNU C++") != 0
4678 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4679 || ! maybe_lvalue_p (arg1)
4680 || ! maybe_lvalue_p (arg2)))
4681 {
4682 tree comp_op0 = arg00;
4683 tree comp_op1 = arg01;
4684 tree comp_type = TREE_TYPE (comp_op0);
4685
4686 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4687 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4688 {
4689 comp_type = type;
4690 comp_op0 = arg1;
4691 comp_op1 = arg2;
4692 }
4693
4694 switch (comp_code)
4695 {
4696 case EQ_EXPR:
4697 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4698 case NE_EXPR:
4699 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4700 case LE_EXPR:
4701 case LT_EXPR:
4702 case UNLE_EXPR:
4703 case UNLT_EXPR:
4704 /* In C++ a ?: expression can be an lvalue, so put the
4705 operand which will be used if they are equal first
4706 so that we can convert this back to the
4707 corresponding COND_EXPR. */
4708 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4709 {
4710 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4711 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4712 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4713 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4714 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4715 comp_op1, comp_op0);
4716 return pedantic_non_lvalue_loc (loc,
4717 fold_convert_loc (loc, type, tem));
4718 }
4719 break;
4720 case GE_EXPR:
4721 case GT_EXPR:
4722 case UNGE_EXPR:
4723 case UNGT_EXPR:
4724 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4725 {
4726 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4727 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4728 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4729 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4730 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4731 comp_op1, comp_op0);
4732 return pedantic_non_lvalue_loc (loc,
4733 fold_convert_loc (loc, type, tem));
4734 }
4735 break;
4736 case UNEQ_EXPR:
4737 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4738 return pedantic_non_lvalue_loc (loc,
4739 fold_convert_loc (loc, type, arg2));
4740 break;
4741 case LTGT_EXPR:
4742 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4743 return pedantic_non_lvalue_loc (loc,
4744 fold_convert_loc (loc, type, arg1));
4745 break;
4746 default:
4747 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4748 break;
4749 }
4750 }
4751
4752 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4753 we might still be able to simplify this. For example,
4754 if C1 is one less or one more than C2, this might have started
4755 out as a MIN or MAX and been transformed by this function.
4756 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4757
4758 if (INTEGRAL_TYPE_P (type)
4759 && TREE_CODE (arg01) == INTEGER_CST
4760 && TREE_CODE (arg2) == INTEGER_CST)
4761 switch (comp_code)
4762 {
4763 case EQ_EXPR:
4764 if (TREE_CODE (arg1) == INTEGER_CST)
4765 break;
4766 /* We can replace A with C1 in this case. */
4767 arg1 = fold_convert_loc (loc, type, arg01);
4768 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4769
4770 case LT_EXPR:
4771 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4772 MIN_EXPR, to preserve the signedness of the comparison. */
4773 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4774 OEP_ONLY_CONST)
4775 && operand_equal_p (arg01,
4776 const_binop (PLUS_EXPR, arg2,
4777 build_int_cst (type, 1)),
4778 OEP_ONLY_CONST))
4779 {
4780 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4781 fold_convert_loc (loc, TREE_TYPE (arg00),
4782 arg2));
4783 return pedantic_non_lvalue_loc (loc,
4784 fold_convert_loc (loc, type, tem));
4785 }
4786 break;
4787
4788 case LE_EXPR:
4789 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4790 as above. */
4791 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4792 OEP_ONLY_CONST)
4793 && operand_equal_p (arg01,
4794 const_binop (MINUS_EXPR, arg2,
4795 build_int_cst (type, 1)),
4796 OEP_ONLY_CONST))
4797 {
4798 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4799 fold_convert_loc (loc, TREE_TYPE (arg00),
4800 arg2));
4801 return pedantic_non_lvalue_loc (loc,
4802 fold_convert_loc (loc, type, tem));
4803 }
4804 break;
4805
4806 case GT_EXPR:
4807 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4808 MAX_EXPR, to preserve the signedness of the comparison. */
4809 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4810 OEP_ONLY_CONST)
4811 && operand_equal_p (arg01,
4812 const_binop (MINUS_EXPR, arg2,
4813 build_int_cst (type, 1)),
4814 OEP_ONLY_CONST))
4815 {
4816 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4817 fold_convert_loc (loc, TREE_TYPE (arg00),
4818 arg2));
4819 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4820 }
4821 break;
4822
4823 case GE_EXPR:
4824 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4825 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4826 OEP_ONLY_CONST)
4827 && operand_equal_p (arg01,
4828 const_binop (PLUS_EXPR, arg2,
4829 build_int_cst (type, 1)),
4830 OEP_ONLY_CONST))
4831 {
4832 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4833 fold_convert_loc (loc, TREE_TYPE (arg00),
4834 arg2));
4835 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4836 }
4837 break;
4838 case NE_EXPR:
4839 break;
4840 default:
4841 gcc_unreachable ();
4842 }
4843
4844 return NULL_TREE;
4845 }
4846
4847
4848 \f
4849 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4850 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4851 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4852 false) >= 2)
4853 #endif
4854
4855 /* EXP is some logical combination of boolean tests. See if we can
4856 merge it into some range test. Return the new tree if so. */
4857
4858 static tree
4859 fold_range_test (location_t loc, enum tree_code code, tree type,
4860 tree op0, tree op1)
4861 {
4862 int or_op = (code == TRUTH_ORIF_EXPR
4863 || code == TRUTH_OR_EXPR);
4864 int in0_p, in1_p, in_p;
4865 tree low0, low1, low, high0, high1, high;
4866 bool strict_overflow_p = false;
4867 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4868 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4869 tree tem;
4870 const char * const warnmsg = G_("assuming signed overflow does not occur "
4871 "when simplifying range test");
4872
4873 /* If this is an OR operation, invert both sides; we will invert
4874 again at the end. */
4875 if (or_op)
4876 in0_p = ! in0_p, in1_p = ! in1_p;
4877
4878 /* If both expressions are the same, if we can merge the ranges, and we
4879 can build the range test, return it or it inverted. If one of the
4880 ranges is always true or always false, consider it to be the same
4881 expression as the other. */
4882 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4883 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4884 in1_p, low1, high1)
4885 && 0 != (tem = (build_range_check (UNKNOWN_LOCATION, type,
4886 lhs != 0 ? lhs
4887 : rhs != 0 ? rhs : integer_zero_node,
4888 in_p, low, high))))
4889 {
4890 if (strict_overflow_p)
4891 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4892 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
4893 }
4894
4895 /* On machines where the branch cost is expensive, if this is a
4896 short-circuited branch and the underlying object on both sides
4897 is the same, make a non-short-circuit operation. */
4898 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4899 && lhs != 0 && rhs != 0
4900 && (code == TRUTH_ANDIF_EXPR
4901 || code == TRUTH_ORIF_EXPR)
4902 && operand_equal_p (lhs, rhs, 0))
4903 {
4904 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4905 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4906 which cases we can't do this. */
4907 if (simple_operand_p (lhs))
4908 {
4909 tem = build2 (code == TRUTH_ANDIF_EXPR
4910 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4911 type, op0, op1);
4912 SET_EXPR_LOCATION (tem, loc);
4913 return tem;
4914 }
4915
4916 else if (lang_hooks.decls.global_bindings_p () == 0
4917 && ! CONTAINS_PLACEHOLDER_P (lhs))
4918 {
4919 tree common = save_expr (lhs);
4920
4921 if (0 != (lhs = build_range_check (loc, type, common,
4922 or_op ? ! in0_p : in0_p,
4923 low0, high0))
4924 && (0 != (rhs = build_range_check (loc, type, common,
4925 or_op ? ! in1_p : in1_p,
4926 low1, high1))))
4927 {
4928 if (strict_overflow_p)
4929 fold_overflow_warning (warnmsg,
4930 WARN_STRICT_OVERFLOW_COMPARISON);
4931 tem = build2 (code == TRUTH_ANDIF_EXPR
4932 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4933 type, lhs, rhs);
4934 SET_EXPR_LOCATION (tem, loc);
4935 return tem;
4936 }
4937 }
4938 }
4939
4940 return 0;
4941 }
4942 \f
4943 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4944 bit value. Arrange things so the extra bits will be set to zero if and
4945 only if C is signed-extended to its full width. If MASK is nonzero,
4946 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4947
4948 static tree
4949 unextend (tree c, int p, int unsignedp, tree mask)
4950 {
4951 tree type = TREE_TYPE (c);
4952 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4953 tree temp;
4954
4955 if (p == modesize || unsignedp)
4956 return c;
4957
4958 /* We work by getting just the sign bit into the low-order bit, then
4959 into the high-order bit, then sign-extend. We then XOR that value
4960 with C. */
4961 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1));
4962 temp = const_binop (BIT_AND_EXPR, temp, size_int (1));
4963
4964 /* We must use a signed type in order to get an arithmetic right shift.
4965 However, we must also avoid introducing accidental overflows, so that
4966 a subsequent call to integer_zerop will work. Hence we must
4967 do the type conversion here. At this point, the constant is either
4968 zero or one, and the conversion to a signed type can never overflow.
4969 We could get an overflow if this conversion is done anywhere else. */
4970 if (TYPE_UNSIGNED (type))
4971 temp = fold_convert (signed_type_for (type), temp);
4972
4973 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
4974 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
4975 if (mask != 0)
4976 temp = const_binop (BIT_AND_EXPR, temp,
4977 fold_convert (TREE_TYPE (c), mask));
4978 /* If necessary, convert the type back to match the type of C. */
4979 if (TYPE_UNSIGNED (type))
4980 temp = fold_convert (type, temp);
4981
4982 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
4983 }
4984 \f
4985 /* For an expression that has the form
4986 (A && B) || ~B
4987 or
4988 (A || B) && ~B,
4989 we can drop one of the inner expressions and simplify to
4990 A || ~B
4991 or
4992 A && ~B
4993 LOC is the location of the resulting expression. OP is the inner
4994 logical operation; the left-hand side in the examples above, while CMPOP
4995 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
4996 removing a condition that guards another, as in
4997 (A != NULL && A->...) || A == NULL
4998 which we must not transform. If RHS_ONLY is true, only eliminate the
4999 right-most operand of the inner logical operation. */
5000
5001 static tree
5002 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5003 bool rhs_only)
5004 {
5005 tree type = TREE_TYPE (cmpop);
5006 enum tree_code code = TREE_CODE (cmpop);
5007 enum tree_code truthop_code = TREE_CODE (op);
5008 tree lhs = TREE_OPERAND (op, 0);
5009 tree rhs = TREE_OPERAND (op, 1);
5010 tree orig_lhs = lhs, orig_rhs = rhs;
5011 enum tree_code rhs_code = TREE_CODE (rhs);
5012 enum tree_code lhs_code = TREE_CODE (lhs);
5013 enum tree_code inv_code;
5014
5015 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5016 return NULL_TREE;
5017
5018 if (TREE_CODE_CLASS (code) != tcc_comparison)
5019 return NULL_TREE;
5020
5021 if (rhs_code == truthop_code)
5022 {
5023 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5024 if (newrhs != NULL_TREE)
5025 {
5026 rhs = newrhs;
5027 rhs_code = TREE_CODE (rhs);
5028 }
5029 }
5030 if (lhs_code == truthop_code && !rhs_only)
5031 {
5032 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5033 if (newlhs != NULL_TREE)
5034 {
5035 lhs = newlhs;
5036 lhs_code = TREE_CODE (lhs);
5037 }
5038 }
5039
5040 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
5041 if (inv_code == rhs_code
5042 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5043 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5044 return lhs;
5045 if (!rhs_only && inv_code == lhs_code
5046 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5047 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5048 return rhs;
5049 if (rhs != orig_rhs || lhs != orig_lhs)
5050 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5051 lhs, rhs);
5052 return NULL_TREE;
5053 }
5054
5055 /* Find ways of folding logical expressions of LHS and RHS:
5056 Try to merge two comparisons to the same innermost item.
5057 Look for range tests like "ch >= '0' && ch <= '9'".
5058 Look for combinations of simple terms on machines with expensive branches
5059 and evaluate the RHS unconditionally.
5060
5061 For example, if we have p->a == 2 && p->b == 4 and we can make an
5062 object large enough to span both A and B, we can do this with a comparison
5063 against the object ANDed with the a mask.
5064
5065 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5066 operations to do this with one comparison.
5067
5068 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5069 function and the one above.
5070
5071 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5072 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5073
5074 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5075 two operands.
5076
5077 We return the simplified tree or 0 if no optimization is possible. */
5078
5079 static tree
5080 fold_truthop (location_t loc, enum tree_code code, tree truth_type,
5081 tree lhs, tree rhs)
5082 {
5083 /* If this is the "or" of two comparisons, we can do something if
5084 the comparisons are NE_EXPR. If this is the "and", we can do something
5085 if the comparisons are EQ_EXPR. I.e.,
5086 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5087
5088 WANTED_CODE is this operation code. For single bit fields, we can
5089 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5090 comparison for one-bit fields. */
5091
5092 enum tree_code wanted_code;
5093 enum tree_code lcode, rcode;
5094 tree ll_arg, lr_arg, rl_arg, rr_arg;
5095 tree ll_inner, lr_inner, rl_inner, rr_inner;
5096 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5097 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5098 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5099 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5100 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5101 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5102 enum machine_mode lnmode, rnmode;
5103 tree ll_mask, lr_mask, rl_mask, rr_mask;
5104 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5105 tree l_const, r_const;
5106 tree lntype, rntype, result;
5107 HOST_WIDE_INT first_bit, end_bit;
5108 int volatilep;
5109 tree orig_lhs = lhs, orig_rhs = rhs;
5110 enum tree_code orig_code = code;
5111
5112 /* Start by getting the comparison codes. Fail if anything is volatile.
5113 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5114 it were surrounded with a NE_EXPR. */
5115
5116 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5117 return 0;
5118
5119 lcode = TREE_CODE (lhs);
5120 rcode = TREE_CODE (rhs);
5121
5122 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5123 {
5124 lhs = build2 (NE_EXPR, truth_type, lhs,
5125 build_int_cst (TREE_TYPE (lhs), 0));
5126 lcode = NE_EXPR;
5127 }
5128
5129 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5130 {
5131 rhs = build2 (NE_EXPR, truth_type, rhs,
5132 build_int_cst (TREE_TYPE (rhs), 0));
5133 rcode = NE_EXPR;
5134 }
5135
5136 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5137 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5138 return 0;
5139
5140 ll_arg = TREE_OPERAND (lhs, 0);
5141 lr_arg = TREE_OPERAND (lhs, 1);
5142 rl_arg = TREE_OPERAND (rhs, 0);
5143 rr_arg = TREE_OPERAND (rhs, 1);
5144
5145 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5146 if (simple_operand_p (ll_arg)
5147 && simple_operand_p (lr_arg))
5148 {
5149 tree result;
5150 if (operand_equal_p (ll_arg, rl_arg, 0)
5151 && operand_equal_p (lr_arg, rr_arg, 0))
5152 {
5153 result = combine_comparisons (loc, code, lcode, rcode,
5154 truth_type, ll_arg, lr_arg);
5155 if (result)
5156 return result;
5157 }
5158 else if (operand_equal_p (ll_arg, rr_arg, 0)
5159 && operand_equal_p (lr_arg, rl_arg, 0))
5160 {
5161 result = combine_comparisons (loc, code, lcode,
5162 swap_tree_comparison (rcode),
5163 truth_type, ll_arg, lr_arg);
5164 if (result)
5165 return result;
5166 }
5167 }
5168
5169 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5170 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5171
5172 /* If the RHS can be evaluated unconditionally and its operands are
5173 simple, it wins to evaluate the RHS unconditionally on machines
5174 with expensive branches. In this case, this isn't a comparison
5175 that can be merged. Avoid doing this if the RHS is a floating-point
5176 comparison since those can trap. */
5177
5178 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5179 false) >= 2
5180 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5181 && simple_operand_p (rl_arg)
5182 && simple_operand_p (rr_arg))
5183 {
5184 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5185 if (code == TRUTH_OR_EXPR
5186 && lcode == NE_EXPR && integer_zerop (lr_arg)
5187 && rcode == NE_EXPR && integer_zerop (rr_arg)
5188 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5189 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5190 {
5191 result = build2 (NE_EXPR, truth_type,
5192 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5193 ll_arg, rl_arg),
5194 build_int_cst (TREE_TYPE (ll_arg), 0));
5195 goto fold_truthop_exit;
5196 }
5197
5198 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5199 if (code == TRUTH_AND_EXPR
5200 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5201 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5202 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5203 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5204 {
5205 result = build2 (EQ_EXPR, truth_type,
5206 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5207 ll_arg, rl_arg),
5208 build_int_cst (TREE_TYPE (ll_arg), 0));
5209 goto fold_truthop_exit;
5210 }
5211
5212 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5213 {
5214 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5215 {
5216 result = build2 (code, truth_type, lhs, rhs);
5217 goto fold_truthop_exit;
5218 }
5219 return NULL_TREE;
5220 }
5221 }
5222
5223 /* See if the comparisons can be merged. Then get all the parameters for
5224 each side. */
5225
5226 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5227 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5228 return 0;
5229
5230 volatilep = 0;
5231 ll_inner = decode_field_reference (loc, ll_arg,
5232 &ll_bitsize, &ll_bitpos, &ll_mode,
5233 &ll_unsignedp, &volatilep, &ll_mask,
5234 &ll_and_mask);
5235 lr_inner = decode_field_reference (loc, lr_arg,
5236 &lr_bitsize, &lr_bitpos, &lr_mode,
5237 &lr_unsignedp, &volatilep, &lr_mask,
5238 &lr_and_mask);
5239 rl_inner = decode_field_reference (loc, rl_arg,
5240 &rl_bitsize, &rl_bitpos, &rl_mode,
5241 &rl_unsignedp, &volatilep, &rl_mask,
5242 &rl_and_mask);
5243 rr_inner = decode_field_reference (loc, rr_arg,
5244 &rr_bitsize, &rr_bitpos, &rr_mode,
5245 &rr_unsignedp, &volatilep, &rr_mask,
5246 &rr_and_mask);
5247
5248 /* It must be true that the inner operation on the lhs of each
5249 comparison must be the same if we are to be able to do anything.
5250 Then see if we have constants. If not, the same must be true for
5251 the rhs's. */
5252 if (volatilep || ll_inner == 0 || rl_inner == 0
5253 || ! operand_equal_p (ll_inner, rl_inner, 0))
5254 return 0;
5255
5256 if (TREE_CODE (lr_arg) == INTEGER_CST
5257 && TREE_CODE (rr_arg) == INTEGER_CST)
5258 l_const = lr_arg, r_const = rr_arg;
5259 else if (lr_inner == 0 || rr_inner == 0
5260 || ! operand_equal_p (lr_inner, rr_inner, 0))
5261 return 0;
5262 else
5263 l_const = r_const = 0;
5264
5265 /* If either comparison code is not correct for our logical operation,
5266 fail. However, we can convert a one-bit comparison against zero into
5267 the opposite comparison against that bit being set in the field. */
5268
5269 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5270 if (lcode != wanted_code)
5271 {
5272 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5273 {
5274 /* Make the left operand unsigned, since we are only interested
5275 in the value of one bit. Otherwise we are doing the wrong
5276 thing below. */
5277 ll_unsignedp = 1;
5278 l_const = ll_mask;
5279 }
5280 else
5281 return 0;
5282 }
5283
5284 /* This is analogous to the code for l_const above. */
5285 if (rcode != wanted_code)
5286 {
5287 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5288 {
5289 rl_unsignedp = 1;
5290 r_const = rl_mask;
5291 }
5292 else
5293 return 0;
5294 }
5295
5296 /* See if we can find a mode that contains both fields being compared on
5297 the left. If we can't, fail. Otherwise, update all constants and masks
5298 to be relative to a field of that size. */
5299 first_bit = MIN (ll_bitpos, rl_bitpos);
5300 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5301 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5302 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5303 volatilep);
5304 if (lnmode == VOIDmode)
5305 return 0;
5306
5307 lnbitsize = GET_MODE_BITSIZE (lnmode);
5308 lnbitpos = first_bit & ~ (lnbitsize - 1);
5309 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5310 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5311
5312 if (BYTES_BIG_ENDIAN)
5313 {
5314 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5315 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5316 }
5317
5318 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5319 size_int (xll_bitpos));
5320 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5321 size_int (xrl_bitpos));
5322
5323 if (l_const)
5324 {
5325 l_const = fold_convert_loc (loc, lntype, l_const);
5326 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5327 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5328 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5329 fold_build1_loc (loc, BIT_NOT_EXPR,
5330 lntype, ll_mask))))
5331 {
5332 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5333
5334 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5335 }
5336 }
5337 if (r_const)
5338 {
5339 r_const = fold_convert_loc (loc, lntype, r_const);
5340 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5341 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5342 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5343 fold_build1_loc (loc, BIT_NOT_EXPR,
5344 lntype, rl_mask))))
5345 {
5346 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5347
5348 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5349 }
5350 }
5351
5352 /* If the right sides are not constant, do the same for it. Also,
5353 disallow this optimization if a size or signedness mismatch occurs
5354 between the left and right sides. */
5355 if (l_const == 0)
5356 {
5357 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5358 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5359 /* Make sure the two fields on the right
5360 correspond to the left without being swapped. */
5361 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5362 return 0;
5363
5364 first_bit = MIN (lr_bitpos, rr_bitpos);
5365 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5366 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5367 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5368 volatilep);
5369 if (rnmode == VOIDmode)
5370 return 0;
5371
5372 rnbitsize = GET_MODE_BITSIZE (rnmode);
5373 rnbitpos = first_bit & ~ (rnbitsize - 1);
5374 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5375 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5376
5377 if (BYTES_BIG_ENDIAN)
5378 {
5379 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5380 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5381 }
5382
5383 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5384 rntype, lr_mask),
5385 size_int (xlr_bitpos));
5386 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5387 rntype, rr_mask),
5388 size_int (xrr_bitpos));
5389
5390 /* Make a mask that corresponds to both fields being compared.
5391 Do this for both items being compared. If the operands are the
5392 same size and the bits being compared are in the same position
5393 then we can do this by masking both and comparing the masked
5394 results. */
5395 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5396 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5397 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5398 {
5399 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5400 ll_unsignedp || rl_unsignedp);
5401 if (! all_ones_mask_p (ll_mask, lnbitsize))
5402 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5403
5404 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5405 lr_unsignedp || rr_unsignedp);
5406 if (! all_ones_mask_p (lr_mask, rnbitsize))
5407 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5408
5409 result = build2 (wanted_code, truth_type, lhs, rhs);
5410 goto fold_truthop_exit;
5411 }
5412
5413 /* There is still another way we can do something: If both pairs of
5414 fields being compared are adjacent, we may be able to make a wider
5415 field containing them both.
5416
5417 Note that we still must mask the lhs/rhs expressions. Furthermore,
5418 the mask must be shifted to account for the shift done by
5419 make_bit_field_ref. */
5420 if ((ll_bitsize + ll_bitpos == rl_bitpos
5421 && lr_bitsize + lr_bitpos == rr_bitpos)
5422 || (ll_bitpos == rl_bitpos + rl_bitsize
5423 && lr_bitpos == rr_bitpos + rr_bitsize))
5424 {
5425 tree type;
5426
5427 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5428 ll_bitsize + rl_bitsize,
5429 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5430 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5431 lr_bitsize + rr_bitsize,
5432 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5433
5434 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5435 size_int (MIN (xll_bitpos, xrl_bitpos)));
5436 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5437 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5438
5439 /* Convert to the smaller type before masking out unwanted bits. */
5440 type = lntype;
5441 if (lntype != rntype)
5442 {
5443 if (lnbitsize > rnbitsize)
5444 {
5445 lhs = fold_convert_loc (loc, rntype, lhs);
5446 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5447 type = rntype;
5448 }
5449 else if (lnbitsize < rnbitsize)
5450 {
5451 rhs = fold_convert_loc (loc, lntype, rhs);
5452 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5453 type = lntype;
5454 }
5455 }
5456
5457 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5458 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5459
5460 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5461 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5462
5463 result = build2 (wanted_code, truth_type, lhs, rhs);
5464 goto fold_truthop_exit;
5465 }
5466
5467 return 0;
5468 }
5469
5470 /* Handle the case of comparisons with constants. If there is something in
5471 common between the masks, those bits of the constants must be the same.
5472 If not, the condition is always false. Test for this to avoid generating
5473 incorrect code below. */
5474 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5475 if (! integer_zerop (result)
5476 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5477 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5478 {
5479 if (wanted_code == NE_EXPR)
5480 {
5481 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5482 return constant_boolean_node (true, truth_type);
5483 }
5484 else
5485 {
5486 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5487 return constant_boolean_node (false, truth_type);
5488 }
5489 }
5490
5491 /* Construct the expression we will return. First get the component
5492 reference we will make. Unless the mask is all ones the width of
5493 that field, perform the mask operation. Then compare with the
5494 merged constant. */
5495 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5496 ll_unsignedp || rl_unsignedp);
5497
5498 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5499 if (! all_ones_mask_p (ll_mask, lnbitsize))
5500 {
5501 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5502 SET_EXPR_LOCATION (result, loc);
5503 }
5504
5505 result = build2 (wanted_code, truth_type, result,
5506 const_binop (BIT_IOR_EXPR, l_const, r_const));
5507
5508 fold_truthop_exit:
5509 SET_EXPR_LOCATION (result, loc);
5510 return result;
5511 }
5512 \f
5513 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5514 constant. */
5515
5516 static tree
5517 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5518 tree op0, tree op1)
5519 {
5520 tree arg0 = op0;
5521 enum tree_code op_code;
5522 tree comp_const;
5523 tree minmax_const;
5524 int consts_equal, consts_lt;
5525 tree inner;
5526
5527 STRIP_SIGN_NOPS (arg0);
5528
5529 op_code = TREE_CODE (arg0);
5530 minmax_const = TREE_OPERAND (arg0, 1);
5531 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5532 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5533 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5534 inner = TREE_OPERAND (arg0, 0);
5535
5536 /* If something does not permit us to optimize, return the original tree. */
5537 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5538 || TREE_CODE (comp_const) != INTEGER_CST
5539 || TREE_OVERFLOW (comp_const)
5540 || TREE_CODE (minmax_const) != INTEGER_CST
5541 || TREE_OVERFLOW (minmax_const))
5542 return NULL_TREE;
5543
5544 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5545 and GT_EXPR, doing the rest with recursive calls using logical
5546 simplifications. */
5547 switch (code)
5548 {
5549 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5550 {
5551 tree tem
5552 = optimize_minmax_comparison (loc,
5553 invert_tree_comparison (code, false),
5554 type, op0, op1);
5555 if (tem)
5556 return invert_truthvalue_loc (loc, tem);
5557 return NULL_TREE;
5558 }
5559
5560 case GE_EXPR:
5561 return
5562 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5563 optimize_minmax_comparison
5564 (loc, EQ_EXPR, type, arg0, comp_const),
5565 optimize_minmax_comparison
5566 (loc, GT_EXPR, type, arg0, comp_const));
5567
5568 case EQ_EXPR:
5569 if (op_code == MAX_EXPR && consts_equal)
5570 /* MAX (X, 0) == 0 -> X <= 0 */
5571 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5572
5573 else if (op_code == MAX_EXPR && consts_lt)
5574 /* MAX (X, 0) == 5 -> X == 5 */
5575 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5576
5577 else if (op_code == MAX_EXPR)
5578 /* MAX (X, 0) == -1 -> false */
5579 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5580
5581 else if (consts_equal)
5582 /* MIN (X, 0) == 0 -> X >= 0 */
5583 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5584
5585 else if (consts_lt)
5586 /* MIN (X, 0) == 5 -> false */
5587 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5588
5589 else
5590 /* MIN (X, 0) == -1 -> X == -1 */
5591 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5592
5593 case GT_EXPR:
5594 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5595 /* MAX (X, 0) > 0 -> X > 0
5596 MAX (X, 0) > 5 -> X > 5 */
5597 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5598
5599 else if (op_code == MAX_EXPR)
5600 /* MAX (X, 0) > -1 -> true */
5601 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5602
5603 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5604 /* MIN (X, 0) > 0 -> false
5605 MIN (X, 0) > 5 -> false */
5606 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5607
5608 else
5609 /* MIN (X, 0) > -1 -> X > -1 */
5610 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5611
5612 default:
5613 return NULL_TREE;
5614 }
5615 }
5616 \f
5617 /* T is an integer expression that is being multiplied, divided, or taken a
5618 modulus (CODE says which and what kind of divide or modulus) by a
5619 constant C. See if we can eliminate that operation by folding it with
5620 other operations already in T. WIDE_TYPE, if non-null, is a type that
5621 should be used for the computation if wider than our type.
5622
5623 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5624 (X * 2) + (Y * 4). We must, however, be assured that either the original
5625 expression would not overflow or that overflow is undefined for the type
5626 in the language in question.
5627
5628 If we return a non-null expression, it is an equivalent form of the
5629 original computation, but need not be in the original type.
5630
5631 We set *STRICT_OVERFLOW_P to true if the return values depends on
5632 signed overflow being undefined. Otherwise we do not change
5633 *STRICT_OVERFLOW_P. */
5634
5635 static tree
5636 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5637 bool *strict_overflow_p)
5638 {
5639 /* To avoid exponential search depth, refuse to allow recursion past
5640 three levels. Beyond that (1) it's highly unlikely that we'll find
5641 something interesting and (2) we've probably processed it before
5642 when we built the inner expression. */
5643
5644 static int depth;
5645 tree ret;
5646
5647 if (depth > 3)
5648 return NULL;
5649
5650 depth++;
5651 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5652 depth--;
5653
5654 return ret;
5655 }
5656
5657 static tree
5658 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5659 bool *strict_overflow_p)
5660 {
5661 tree type = TREE_TYPE (t);
5662 enum tree_code tcode = TREE_CODE (t);
5663 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5664 > GET_MODE_SIZE (TYPE_MODE (type)))
5665 ? wide_type : type);
5666 tree t1, t2;
5667 int same_p = tcode == code;
5668 tree op0 = NULL_TREE, op1 = NULL_TREE;
5669 bool sub_strict_overflow_p;
5670
5671 /* Don't deal with constants of zero here; they confuse the code below. */
5672 if (integer_zerop (c))
5673 return NULL_TREE;
5674
5675 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5676 op0 = TREE_OPERAND (t, 0);
5677
5678 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5679 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5680
5681 /* Note that we need not handle conditional operations here since fold
5682 already handles those cases. So just do arithmetic here. */
5683 switch (tcode)
5684 {
5685 case INTEGER_CST:
5686 /* For a constant, we can always simplify if we are a multiply
5687 or (for divide and modulus) if it is a multiple of our constant. */
5688 if (code == MULT_EXPR
5689 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c)))
5690 return const_binop (code, fold_convert (ctype, t),
5691 fold_convert (ctype, c));
5692 break;
5693
5694 CASE_CONVERT: case NON_LVALUE_EXPR:
5695 /* If op0 is an expression ... */
5696 if ((COMPARISON_CLASS_P (op0)
5697 || UNARY_CLASS_P (op0)
5698 || BINARY_CLASS_P (op0)
5699 || VL_EXP_CLASS_P (op0)
5700 || EXPRESSION_CLASS_P (op0))
5701 /* ... and has wrapping overflow, and its type is smaller
5702 than ctype, then we cannot pass through as widening. */
5703 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5704 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5705 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5706 && (TYPE_PRECISION (ctype)
5707 > TYPE_PRECISION (TREE_TYPE (op0))))
5708 /* ... or this is a truncation (t is narrower than op0),
5709 then we cannot pass through this narrowing. */
5710 || (TYPE_PRECISION (type)
5711 < TYPE_PRECISION (TREE_TYPE (op0)))
5712 /* ... or signedness changes for division or modulus,
5713 then we cannot pass through this conversion. */
5714 || (code != MULT_EXPR
5715 && (TYPE_UNSIGNED (ctype)
5716 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5717 /* ... or has undefined overflow while the converted to
5718 type has not, we cannot do the operation in the inner type
5719 as that would introduce undefined overflow. */
5720 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5721 && !TYPE_OVERFLOW_UNDEFINED (type))))
5722 break;
5723
5724 /* Pass the constant down and see if we can make a simplification. If
5725 we can, replace this expression with the inner simplification for
5726 possible later conversion to our or some other type. */
5727 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5728 && TREE_CODE (t2) == INTEGER_CST
5729 && !TREE_OVERFLOW (t2)
5730 && (0 != (t1 = extract_muldiv (op0, t2, code,
5731 code == MULT_EXPR
5732 ? ctype : NULL_TREE,
5733 strict_overflow_p))))
5734 return t1;
5735 break;
5736
5737 case ABS_EXPR:
5738 /* If widening the type changes it from signed to unsigned, then we
5739 must avoid building ABS_EXPR itself as unsigned. */
5740 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5741 {
5742 tree cstype = (*signed_type_for) (ctype);
5743 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5744 != 0)
5745 {
5746 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5747 return fold_convert (ctype, t1);
5748 }
5749 break;
5750 }
5751 /* If the constant is negative, we cannot simplify this. */
5752 if (tree_int_cst_sgn (c) == -1)
5753 break;
5754 /* FALLTHROUGH */
5755 case NEGATE_EXPR:
5756 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5757 != 0)
5758 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5759 break;
5760
5761 case MIN_EXPR: case MAX_EXPR:
5762 /* If widening the type changes the signedness, then we can't perform
5763 this optimization as that changes the result. */
5764 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5765 break;
5766
5767 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5768 sub_strict_overflow_p = false;
5769 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5770 &sub_strict_overflow_p)) != 0
5771 && (t2 = extract_muldiv (op1, c, code, wide_type,
5772 &sub_strict_overflow_p)) != 0)
5773 {
5774 if (tree_int_cst_sgn (c) < 0)
5775 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5776 if (sub_strict_overflow_p)
5777 *strict_overflow_p = true;
5778 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5779 fold_convert (ctype, t2));
5780 }
5781 break;
5782
5783 case LSHIFT_EXPR: case RSHIFT_EXPR:
5784 /* If the second operand is constant, this is a multiplication
5785 or floor division, by a power of two, so we can treat it that
5786 way unless the multiplier or divisor overflows. Signed
5787 left-shift overflow is implementation-defined rather than
5788 undefined in C90, so do not convert signed left shift into
5789 multiplication. */
5790 if (TREE_CODE (op1) == INTEGER_CST
5791 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5792 /* const_binop may not detect overflow correctly,
5793 so check for it explicitly here. */
5794 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5795 && TREE_INT_CST_HIGH (op1) == 0
5796 && 0 != (t1 = fold_convert (ctype,
5797 const_binop (LSHIFT_EXPR,
5798 size_one_node,
5799 op1)))
5800 && !TREE_OVERFLOW (t1))
5801 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5802 ? MULT_EXPR : FLOOR_DIV_EXPR,
5803 ctype,
5804 fold_convert (ctype, op0),
5805 t1),
5806 c, code, wide_type, strict_overflow_p);
5807 break;
5808
5809 case PLUS_EXPR: case MINUS_EXPR:
5810 /* See if we can eliminate the operation on both sides. If we can, we
5811 can return a new PLUS or MINUS. If we can't, the only remaining
5812 cases where we can do anything are if the second operand is a
5813 constant. */
5814 sub_strict_overflow_p = false;
5815 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5816 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5817 if (t1 != 0 && t2 != 0
5818 && (code == MULT_EXPR
5819 /* If not multiplication, we can only do this if both operands
5820 are divisible by c. */
5821 || (multiple_of_p (ctype, op0, c)
5822 && multiple_of_p (ctype, op1, c))))
5823 {
5824 if (sub_strict_overflow_p)
5825 *strict_overflow_p = true;
5826 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5827 fold_convert (ctype, t2));
5828 }
5829
5830 /* If this was a subtraction, negate OP1 and set it to be an addition.
5831 This simplifies the logic below. */
5832 if (tcode == MINUS_EXPR)
5833 {
5834 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5835 /* If OP1 was not easily negatable, the constant may be OP0. */
5836 if (TREE_CODE (op0) == INTEGER_CST)
5837 {
5838 tree tem = op0;
5839 op0 = op1;
5840 op1 = tem;
5841 tem = t1;
5842 t1 = t2;
5843 t2 = tem;
5844 }
5845 }
5846
5847 if (TREE_CODE (op1) != INTEGER_CST)
5848 break;
5849
5850 /* If either OP1 or C are negative, this optimization is not safe for
5851 some of the division and remainder types while for others we need
5852 to change the code. */
5853 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5854 {
5855 if (code == CEIL_DIV_EXPR)
5856 code = FLOOR_DIV_EXPR;
5857 else if (code == FLOOR_DIV_EXPR)
5858 code = CEIL_DIV_EXPR;
5859 else if (code != MULT_EXPR
5860 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5861 break;
5862 }
5863
5864 /* If it's a multiply or a division/modulus operation of a multiple
5865 of our constant, do the operation and verify it doesn't overflow. */
5866 if (code == MULT_EXPR
5867 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5868 {
5869 op1 = const_binop (code, fold_convert (ctype, op1),
5870 fold_convert (ctype, c));
5871 /* We allow the constant to overflow with wrapping semantics. */
5872 if (op1 == 0
5873 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5874 break;
5875 }
5876 else
5877 break;
5878
5879 /* If we have an unsigned type is not a sizetype, we cannot widen
5880 the operation since it will change the result if the original
5881 computation overflowed. */
5882 if (TYPE_UNSIGNED (ctype)
5883 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5884 && ctype != type)
5885 break;
5886
5887 /* If we were able to eliminate our operation from the first side,
5888 apply our operation to the second side and reform the PLUS. */
5889 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5890 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5891
5892 /* The last case is if we are a multiply. In that case, we can
5893 apply the distributive law to commute the multiply and addition
5894 if the multiplication of the constants doesn't overflow. */
5895 if (code == MULT_EXPR)
5896 return fold_build2 (tcode, ctype,
5897 fold_build2 (code, ctype,
5898 fold_convert (ctype, op0),
5899 fold_convert (ctype, c)),
5900 op1);
5901
5902 break;
5903
5904 case MULT_EXPR:
5905 /* We have a special case here if we are doing something like
5906 (C * 8) % 4 since we know that's zero. */
5907 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5908 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5909 /* If the multiplication can overflow we cannot optimize this.
5910 ??? Until we can properly mark individual operations as
5911 not overflowing we need to treat sizetype special here as
5912 stor-layout relies on this opimization to make
5913 DECL_FIELD_BIT_OFFSET always a constant. */
5914 && (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5915 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
5916 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
5917 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5918 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5919 {
5920 *strict_overflow_p = true;
5921 return omit_one_operand (type, integer_zero_node, op0);
5922 }
5923
5924 /* ... fall through ... */
5925
5926 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5927 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5928 /* If we can extract our operation from the LHS, do so and return a
5929 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5930 do something only if the second operand is a constant. */
5931 if (same_p
5932 && (t1 = extract_muldiv (op0, c, code, wide_type,
5933 strict_overflow_p)) != 0)
5934 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5935 fold_convert (ctype, op1));
5936 else if (tcode == MULT_EXPR && code == MULT_EXPR
5937 && (t1 = extract_muldiv (op1, c, code, wide_type,
5938 strict_overflow_p)) != 0)
5939 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5940 fold_convert (ctype, t1));
5941 else if (TREE_CODE (op1) != INTEGER_CST)
5942 return 0;
5943
5944 /* If these are the same operation types, we can associate them
5945 assuming no overflow. */
5946 if (tcode == code
5947 && 0 != (t1 = int_const_binop (MULT_EXPR,
5948 fold_convert (ctype, op1),
5949 fold_convert (ctype, c), 1))
5950 && 0 != (t1 = force_fit_type_double (ctype, TREE_INT_CST_LOW (t1),
5951 TREE_INT_CST_HIGH (t1),
5952 (TYPE_UNSIGNED (ctype)
5953 && tcode != MULT_EXPR) ? -1 : 1,
5954 TREE_OVERFLOW (t1)))
5955 && !TREE_OVERFLOW (t1))
5956 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5957
5958 /* If these operations "cancel" each other, we have the main
5959 optimizations of this pass, which occur when either constant is a
5960 multiple of the other, in which case we replace this with either an
5961 operation or CODE or TCODE.
5962
5963 If we have an unsigned type that is not a sizetype, we cannot do
5964 this since it will change the result if the original computation
5965 overflowed. */
5966 if ((TYPE_OVERFLOW_UNDEFINED (ctype)
5967 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5968 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5969 || (tcode == MULT_EXPR
5970 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5971 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
5972 && code != MULT_EXPR)))
5973 {
5974 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5975 {
5976 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5977 *strict_overflow_p = true;
5978 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5979 fold_convert (ctype,
5980 const_binop (TRUNC_DIV_EXPR,
5981 op1, c)));
5982 }
5983 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1)))
5984 {
5985 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5986 *strict_overflow_p = true;
5987 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5988 fold_convert (ctype,
5989 const_binop (TRUNC_DIV_EXPR,
5990 c, op1)));
5991 }
5992 }
5993 break;
5994
5995 default:
5996 break;
5997 }
5998
5999 return 0;
6000 }
6001 \f
6002 /* Return a node which has the indicated constant VALUE (either 0 or
6003 1), and is of the indicated TYPE. */
6004
6005 tree
6006 constant_boolean_node (int value, tree type)
6007 {
6008 if (type == integer_type_node)
6009 return value ? integer_one_node : integer_zero_node;
6010 else if (type == boolean_type_node)
6011 return value ? boolean_true_node : boolean_false_node;
6012 else
6013 return build_int_cst (type, value);
6014 }
6015
6016
6017 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6018 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6019 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6020 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6021 COND is the first argument to CODE; otherwise (as in the example
6022 given here), it is the second argument. TYPE is the type of the
6023 original expression. Return NULL_TREE if no simplification is
6024 possible. */
6025
6026 static tree
6027 fold_binary_op_with_conditional_arg (location_t loc,
6028 enum tree_code code,
6029 tree type, tree op0, tree op1,
6030 tree cond, tree arg, int cond_first_p)
6031 {
6032 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6033 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6034 tree test, true_value, false_value;
6035 tree lhs = NULL_TREE;
6036 tree rhs = NULL_TREE;
6037
6038 if (TREE_CODE (cond) == COND_EXPR)
6039 {
6040 test = TREE_OPERAND (cond, 0);
6041 true_value = TREE_OPERAND (cond, 1);
6042 false_value = TREE_OPERAND (cond, 2);
6043 /* If this operand throws an expression, then it does not make
6044 sense to try to perform a logical or arithmetic operation
6045 involving it. */
6046 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6047 lhs = true_value;
6048 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6049 rhs = false_value;
6050 }
6051 else
6052 {
6053 tree testtype = TREE_TYPE (cond);
6054 test = cond;
6055 true_value = constant_boolean_node (true, testtype);
6056 false_value = constant_boolean_node (false, testtype);
6057 }
6058
6059 /* This transformation is only worthwhile if we don't have to wrap ARG
6060 in a SAVE_EXPR and the operation can be simplified on at least one
6061 of the branches once its pushed inside the COND_EXPR. */
6062 if (!TREE_CONSTANT (arg)
6063 && (TREE_SIDE_EFFECTS (arg)
6064 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6065 return NULL_TREE;
6066
6067 arg = fold_convert_loc (loc, arg_type, arg);
6068 if (lhs == 0)
6069 {
6070 true_value = fold_convert_loc (loc, cond_type, true_value);
6071 if (cond_first_p)
6072 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6073 else
6074 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6075 }
6076 if (rhs == 0)
6077 {
6078 false_value = fold_convert_loc (loc, cond_type, false_value);
6079 if (cond_first_p)
6080 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6081 else
6082 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6083 }
6084
6085 /* Check that we have simplified at least one of the branches. */
6086 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6087 return NULL_TREE;
6088
6089 return fold_build3_loc (loc, COND_EXPR, type, test, lhs, rhs);
6090 }
6091
6092 \f
6093 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6094
6095 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6096 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6097 ADDEND is the same as X.
6098
6099 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6100 and finite. The problematic cases are when X is zero, and its mode
6101 has signed zeros. In the case of rounding towards -infinity,
6102 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6103 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6104
6105 bool
6106 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6107 {
6108 if (!real_zerop (addend))
6109 return false;
6110
6111 /* Don't allow the fold with -fsignaling-nans. */
6112 if (HONOR_SNANS (TYPE_MODE (type)))
6113 return false;
6114
6115 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6116 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6117 return true;
6118
6119 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6120 if (TREE_CODE (addend) == REAL_CST
6121 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6122 negate = !negate;
6123
6124 /* The mode has signed zeros, and we have to honor their sign.
6125 In this situation, there is only one case we can return true for.
6126 X - 0 is the same as X unless rounding towards -infinity is
6127 supported. */
6128 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6129 }
6130
6131 /* Subroutine of fold() that checks comparisons of built-in math
6132 functions against real constants.
6133
6134 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6135 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6136 is the type of the result and ARG0 and ARG1 are the operands of the
6137 comparison. ARG1 must be a TREE_REAL_CST.
6138
6139 The function returns the constant folded tree if a simplification
6140 can be made, and NULL_TREE otherwise. */
6141
6142 static tree
6143 fold_mathfn_compare (location_t loc,
6144 enum built_in_function fcode, enum tree_code code,
6145 tree type, tree arg0, tree arg1)
6146 {
6147 REAL_VALUE_TYPE c;
6148
6149 if (BUILTIN_SQRT_P (fcode))
6150 {
6151 tree arg = CALL_EXPR_ARG (arg0, 0);
6152 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6153
6154 c = TREE_REAL_CST (arg1);
6155 if (REAL_VALUE_NEGATIVE (c))
6156 {
6157 /* sqrt(x) < y is always false, if y is negative. */
6158 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6159 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6160
6161 /* sqrt(x) > y is always true, if y is negative and we
6162 don't care about NaNs, i.e. negative values of x. */
6163 if (code == NE_EXPR || !HONOR_NANS (mode))
6164 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6165
6166 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6167 return fold_build2_loc (loc, GE_EXPR, type, arg,
6168 build_real (TREE_TYPE (arg), dconst0));
6169 }
6170 else if (code == GT_EXPR || code == GE_EXPR)
6171 {
6172 REAL_VALUE_TYPE c2;
6173
6174 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6175 real_convert (&c2, mode, &c2);
6176
6177 if (REAL_VALUE_ISINF (c2))
6178 {
6179 /* sqrt(x) > y is x == +Inf, when y is very large. */
6180 if (HONOR_INFINITIES (mode))
6181 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6182 build_real (TREE_TYPE (arg), c2));
6183
6184 /* sqrt(x) > y is always false, when y is very large
6185 and we don't care about infinities. */
6186 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6187 }
6188
6189 /* sqrt(x) > c is the same as x > c*c. */
6190 return fold_build2_loc (loc, code, type, arg,
6191 build_real (TREE_TYPE (arg), c2));
6192 }
6193 else if (code == LT_EXPR || code == LE_EXPR)
6194 {
6195 REAL_VALUE_TYPE c2;
6196
6197 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6198 real_convert (&c2, mode, &c2);
6199
6200 if (REAL_VALUE_ISINF (c2))
6201 {
6202 /* sqrt(x) < y is always true, when y is a very large
6203 value and we don't care about NaNs or Infinities. */
6204 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6205 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6206
6207 /* sqrt(x) < y is x != +Inf when y is very large and we
6208 don't care about NaNs. */
6209 if (! HONOR_NANS (mode))
6210 return fold_build2_loc (loc, NE_EXPR, type, arg,
6211 build_real (TREE_TYPE (arg), c2));
6212
6213 /* sqrt(x) < y is x >= 0 when y is very large and we
6214 don't care about Infinities. */
6215 if (! HONOR_INFINITIES (mode))
6216 return fold_build2_loc (loc, GE_EXPR, type, arg,
6217 build_real (TREE_TYPE (arg), dconst0));
6218
6219 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6220 if (lang_hooks.decls.global_bindings_p () != 0
6221 || CONTAINS_PLACEHOLDER_P (arg))
6222 return NULL_TREE;
6223
6224 arg = save_expr (arg);
6225 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6226 fold_build2_loc (loc, GE_EXPR, type, arg,
6227 build_real (TREE_TYPE (arg),
6228 dconst0)),
6229 fold_build2_loc (loc, NE_EXPR, type, arg,
6230 build_real (TREE_TYPE (arg),
6231 c2)));
6232 }
6233
6234 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6235 if (! HONOR_NANS (mode))
6236 return fold_build2_loc (loc, code, type, arg,
6237 build_real (TREE_TYPE (arg), c2));
6238
6239 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6240 if (lang_hooks.decls.global_bindings_p () == 0
6241 && ! CONTAINS_PLACEHOLDER_P (arg))
6242 {
6243 arg = save_expr (arg);
6244 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6245 fold_build2_loc (loc, GE_EXPR, type, arg,
6246 build_real (TREE_TYPE (arg),
6247 dconst0)),
6248 fold_build2_loc (loc, code, type, arg,
6249 build_real (TREE_TYPE (arg),
6250 c2)));
6251 }
6252 }
6253 }
6254
6255 return NULL_TREE;
6256 }
6257
6258 /* Subroutine of fold() that optimizes comparisons against Infinities,
6259 either +Inf or -Inf.
6260
6261 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6262 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6263 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6264
6265 The function returns the constant folded tree if a simplification
6266 can be made, and NULL_TREE otherwise. */
6267
6268 static tree
6269 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6270 tree arg0, tree arg1)
6271 {
6272 enum machine_mode mode;
6273 REAL_VALUE_TYPE max;
6274 tree temp;
6275 bool neg;
6276
6277 mode = TYPE_MODE (TREE_TYPE (arg0));
6278
6279 /* For negative infinity swap the sense of the comparison. */
6280 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6281 if (neg)
6282 code = swap_tree_comparison (code);
6283
6284 switch (code)
6285 {
6286 case GT_EXPR:
6287 /* x > +Inf is always false, if with ignore sNANs. */
6288 if (HONOR_SNANS (mode))
6289 return NULL_TREE;
6290 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6291
6292 case LE_EXPR:
6293 /* x <= +Inf is always true, if we don't case about NaNs. */
6294 if (! HONOR_NANS (mode))
6295 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6296
6297 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6298 if (lang_hooks.decls.global_bindings_p () == 0
6299 && ! CONTAINS_PLACEHOLDER_P (arg0))
6300 {
6301 arg0 = save_expr (arg0);
6302 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6303 }
6304 break;
6305
6306 case EQ_EXPR:
6307 case GE_EXPR:
6308 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6309 real_maxval (&max, neg, mode);
6310 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6311 arg0, build_real (TREE_TYPE (arg0), max));
6312
6313 case LT_EXPR:
6314 /* x < +Inf is always equal to x <= DBL_MAX. */
6315 real_maxval (&max, neg, mode);
6316 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6317 arg0, build_real (TREE_TYPE (arg0), max));
6318
6319 case NE_EXPR:
6320 /* x != +Inf is always equal to !(x > DBL_MAX). */
6321 real_maxval (&max, neg, mode);
6322 if (! HONOR_NANS (mode))
6323 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6324 arg0, build_real (TREE_TYPE (arg0), max));
6325
6326 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6327 arg0, build_real (TREE_TYPE (arg0), max));
6328 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6329
6330 default:
6331 break;
6332 }
6333
6334 return NULL_TREE;
6335 }
6336
6337 /* Subroutine of fold() that optimizes comparisons of a division by
6338 a nonzero integer constant against an integer constant, i.e.
6339 X/C1 op C2.
6340
6341 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6342 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6343 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6344
6345 The function returns the constant folded tree if a simplification
6346 can be made, and NULL_TREE otherwise. */
6347
6348 static tree
6349 fold_div_compare (location_t loc,
6350 enum tree_code code, tree type, tree arg0, tree arg1)
6351 {
6352 tree prod, tmp, hi, lo;
6353 tree arg00 = TREE_OPERAND (arg0, 0);
6354 tree arg01 = TREE_OPERAND (arg0, 1);
6355 unsigned HOST_WIDE_INT lpart;
6356 HOST_WIDE_INT hpart;
6357 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6358 bool neg_overflow;
6359 int overflow;
6360
6361 /* We have to do this the hard way to detect unsigned overflow.
6362 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6363 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6364 TREE_INT_CST_HIGH (arg01),
6365 TREE_INT_CST_LOW (arg1),
6366 TREE_INT_CST_HIGH (arg1),
6367 &lpart, &hpart, unsigned_p);
6368 prod = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6369 -1, overflow);
6370 neg_overflow = false;
6371
6372 if (unsigned_p)
6373 {
6374 tmp = int_const_binop (MINUS_EXPR, arg01,
6375 build_int_cst (TREE_TYPE (arg01), 1), 0);
6376 lo = prod;
6377
6378 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6379 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6380 TREE_INT_CST_HIGH (prod),
6381 TREE_INT_CST_LOW (tmp),
6382 TREE_INT_CST_HIGH (tmp),
6383 &lpart, &hpart, unsigned_p);
6384 hi = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6385 -1, overflow | TREE_OVERFLOW (prod));
6386 }
6387 else if (tree_int_cst_sgn (arg01) >= 0)
6388 {
6389 tmp = int_const_binop (MINUS_EXPR, arg01,
6390 build_int_cst (TREE_TYPE (arg01), 1), 0);
6391 switch (tree_int_cst_sgn (arg1))
6392 {
6393 case -1:
6394 neg_overflow = true;
6395 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6396 hi = prod;
6397 break;
6398
6399 case 0:
6400 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6401 hi = tmp;
6402 break;
6403
6404 case 1:
6405 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6406 lo = prod;
6407 break;
6408
6409 default:
6410 gcc_unreachable ();
6411 }
6412 }
6413 else
6414 {
6415 /* A negative divisor reverses the relational operators. */
6416 code = swap_tree_comparison (code);
6417
6418 tmp = int_const_binop (PLUS_EXPR, arg01,
6419 build_int_cst (TREE_TYPE (arg01), 1), 0);
6420 switch (tree_int_cst_sgn (arg1))
6421 {
6422 case -1:
6423 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6424 lo = prod;
6425 break;
6426
6427 case 0:
6428 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6429 lo = tmp;
6430 break;
6431
6432 case 1:
6433 neg_overflow = true;
6434 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6435 hi = prod;
6436 break;
6437
6438 default:
6439 gcc_unreachable ();
6440 }
6441 }
6442
6443 switch (code)
6444 {
6445 case EQ_EXPR:
6446 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6447 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6448 if (TREE_OVERFLOW (hi))
6449 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6450 if (TREE_OVERFLOW (lo))
6451 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6452 return build_range_check (loc, type, arg00, 1, lo, hi);
6453
6454 case NE_EXPR:
6455 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6456 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6457 if (TREE_OVERFLOW (hi))
6458 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6459 if (TREE_OVERFLOW (lo))
6460 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6461 return build_range_check (loc, type, arg00, 0, lo, hi);
6462
6463 case LT_EXPR:
6464 if (TREE_OVERFLOW (lo))
6465 {
6466 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6467 return omit_one_operand_loc (loc, type, tmp, arg00);
6468 }
6469 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6470
6471 case LE_EXPR:
6472 if (TREE_OVERFLOW (hi))
6473 {
6474 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6475 return omit_one_operand_loc (loc, type, tmp, arg00);
6476 }
6477 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6478
6479 case GT_EXPR:
6480 if (TREE_OVERFLOW (hi))
6481 {
6482 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6483 return omit_one_operand_loc (loc, type, tmp, arg00);
6484 }
6485 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6486
6487 case GE_EXPR:
6488 if (TREE_OVERFLOW (lo))
6489 {
6490 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6491 return omit_one_operand_loc (loc, type, tmp, arg00);
6492 }
6493 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6494
6495 default:
6496 break;
6497 }
6498
6499 return NULL_TREE;
6500 }
6501
6502
6503 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6504 equality/inequality test, then return a simplified form of the test
6505 using a sign testing. Otherwise return NULL. TYPE is the desired
6506 result type. */
6507
6508 static tree
6509 fold_single_bit_test_into_sign_test (location_t loc,
6510 enum tree_code code, tree arg0, tree arg1,
6511 tree result_type)
6512 {
6513 /* If this is testing a single bit, we can optimize the test. */
6514 if ((code == NE_EXPR || code == EQ_EXPR)
6515 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6516 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6517 {
6518 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6519 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6520 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6521
6522 if (arg00 != NULL_TREE
6523 /* This is only a win if casting to a signed type is cheap,
6524 i.e. when arg00's type is not a partial mode. */
6525 && TYPE_PRECISION (TREE_TYPE (arg00))
6526 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6527 {
6528 tree stype = signed_type_for (TREE_TYPE (arg00));
6529 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6530 result_type,
6531 fold_convert_loc (loc, stype, arg00),
6532 build_int_cst (stype, 0));
6533 }
6534 }
6535
6536 return NULL_TREE;
6537 }
6538
6539 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6540 equality/inequality test, then return a simplified form of
6541 the test using shifts and logical operations. Otherwise return
6542 NULL. TYPE is the desired result type. */
6543
6544 tree
6545 fold_single_bit_test (location_t loc, enum tree_code code,
6546 tree arg0, tree arg1, tree result_type)
6547 {
6548 /* If this is testing a single bit, we can optimize the test. */
6549 if ((code == NE_EXPR || code == EQ_EXPR)
6550 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6551 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6552 {
6553 tree inner = TREE_OPERAND (arg0, 0);
6554 tree type = TREE_TYPE (arg0);
6555 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6556 enum machine_mode operand_mode = TYPE_MODE (type);
6557 int ops_unsigned;
6558 tree signed_type, unsigned_type, intermediate_type;
6559 tree tem, one;
6560
6561 /* First, see if we can fold the single bit test into a sign-bit
6562 test. */
6563 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6564 result_type);
6565 if (tem)
6566 return tem;
6567
6568 /* Otherwise we have (A & C) != 0 where C is a single bit,
6569 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6570 Similarly for (A & C) == 0. */
6571
6572 /* If INNER is a right shift of a constant and it plus BITNUM does
6573 not overflow, adjust BITNUM and INNER. */
6574 if (TREE_CODE (inner) == RSHIFT_EXPR
6575 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6576 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6577 && bitnum < TYPE_PRECISION (type)
6578 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6579 bitnum - TYPE_PRECISION (type)))
6580 {
6581 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6582 inner = TREE_OPERAND (inner, 0);
6583 }
6584
6585 /* If we are going to be able to omit the AND below, we must do our
6586 operations as unsigned. If we must use the AND, we have a choice.
6587 Normally unsigned is faster, but for some machines signed is. */
6588 #ifdef LOAD_EXTEND_OP
6589 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6590 && !flag_syntax_only) ? 0 : 1;
6591 #else
6592 ops_unsigned = 1;
6593 #endif
6594
6595 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6596 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6597 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6598 inner = fold_convert_loc (loc, intermediate_type, inner);
6599
6600 if (bitnum != 0)
6601 inner = build2 (RSHIFT_EXPR, intermediate_type,
6602 inner, size_int (bitnum));
6603
6604 one = build_int_cst (intermediate_type, 1);
6605
6606 if (code == EQ_EXPR)
6607 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6608
6609 /* Put the AND last so it can combine with more things. */
6610 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6611
6612 /* Make sure to return the proper type. */
6613 inner = fold_convert_loc (loc, result_type, inner);
6614
6615 return inner;
6616 }
6617 return NULL_TREE;
6618 }
6619
6620 /* Check whether we are allowed to reorder operands arg0 and arg1,
6621 such that the evaluation of arg1 occurs before arg0. */
6622
6623 static bool
6624 reorder_operands_p (const_tree arg0, const_tree arg1)
6625 {
6626 if (! flag_evaluation_order)
6627 return true;
6628 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6629 return true;
6630 return ! TREE_SIDE_EFFECTS (arg0)
6631 && ! TREE_SIDE_EFFECTS (arg1);
6632 }
6633
6634 /* Test whether it is preferable two swap two operands, ARG0 and
6635 ARG1, for example because ARG0 is an integer constant and ARG1
6636 isn't. If REORDER is true, only recommend swapping if we can
6637 evaluate the operands in reverse order. */
6638
6639 bool
6640 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6641 {
6642 STRIP_SIGN_NOPS (arg0);
6643 STRIP_SIGN_NOPS (arg1);
6644
6645 if (TREE_CODE (arg1) == INTEGER_CST)
6646 return 0;
6647 if (TREE_CODE (arg0) == INTEGER_CST)
6648 return 1;
6649
6650 if (TREE_CODE (arg1) == REAL_CST)
6651 return 0;
6652 if (TREE_CODE (arg0) == REAL_CST)
6653 return 1;
6654
6655 if (TREE_CODE (arg1) == FIXED_CST)
6656 return 0;
6657 if (TREE_CODE (arg0) == FIXED_CST)
6658 return 1;
6659
6660 if (TREE_CODE (arg1) == COMPLEX_CST)
6661 return 0;
6662 if (TREE_CODE (arg0) == COMPLEX_CST)
6663 return 1;
6664
6665 if (TREE_CONSTANT (arg1))
6666 return 0;
6667 if (TREE_CONSTANT (arg0))
6668 return 1;
6669
6670 if (optimize_function_for_size_p (cfun))
6671 return 0;
6672
6673 if (reorder && flag_evaluation_order
6674 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6675 return 0;
6676
6677 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6678 for commutative and comparison operators. Ensuring a canonical
6679 form allows the optimizers to find additional redundancies without
6680 having to explicitly check for both orderings. */
6681 if (TREE_CODE (arg0) == SSA_NAME
6682 && TREE_CODE (arg1) == SSA_NAME
6683 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6684 return 1;
6685
6686 /* Put SSA_NAMEs last. */
6687 if (TREE_CODE (arg1) == SSA_NAME)
6688 return 0;
6689 if (TREE_CODE (arg0) == SSA_NAME)
6690 return 1;
6691
6692 /* Put variables last. */
6693 if (DECL_P (arg1))
6694 return 0;
6695 if (DECL_P (arg0))
6696 return 1;
6697
6698 return 0;
6699 }
6700
6701 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6702 ARG0 is extended to a wider type. */
6703
6704 static tree
6705 fold_widened_comparison (location_t loc, enum tree_code code,
6706 tree type, tree arg0, tree arg1)
6707 {
6708 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6709 tree arg1_unw;
6710 tree shorter_type, outer_type;
6711 tree min, max;
6712 bool above, below;
6713
6714 if (arg0_unw == arg0)
6715 return NULL_TREE;
6716 shorter_type = TREE_TYPE (arg0_unw);
6717
6718 #ifdef HAVE_canonicalize_funcptr_for_compare
6719 /* Disable this optimization if we're casting a function pointer
6720 type on targets that require function pointer canonicalization. */
6721 if (HAVE_canonicalize_funcptr_for_compare
6722 && TREE_CODE (shorter_type) == POINTER_TYPE
6723 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6724 return NULL_TREE;
6725 #endif
6726
6727 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6728 return NULL_TREE;
6729
6730 arg1_unw = get_unwidened (arg1, NULL_TREE);
6731
6732 /* If possible, express the comparison in the shorter mode. */
6733 if ((code == EQ_EXPR || code == NE_EXPR
6734 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6735 && (TREE_TYPE (arg1_unw) == shorter_type
6736 || ((TYPE_PRECISION (shorter_type)
6737 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6738 && (TYPE_UNSIGNED (shorter_type)
6739 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6740 || (TREE_CODE (arg1_unw) == INTEGER_CST
6741 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6742 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6743 && int_fits_type_p (arg1_unw, shorter_type))))
6744 return fold_build2_loc (loc, code, type, arg0_unw,
6745 fold_convert_loc (loc, shorter_type, arg1_unw));
6746
6747 if (TREE_CODE (arg1_unw) != INTEGER_CST
6748 || TREE_CODE (shorter_type) != INTEGER_TYPE
6749 || !int_fits_type_p (arg1_unw, shorter_type))
6750 return NULL_TREE;
6751
6752 /* If we are comparing with the integer that does not fit into the range
6753 of the shorter type, the result is known. */
6754 outer_type = TREE_TYPE (arg1_unw);
6755 min = lower_bound_in_type (outer_type, shorter_type);
6756 max = upper_bound_in_type (outer_type, shorter_type);
6757
6758 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6759 max, arg1_unw));
6760 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6761 arg1_unw, min));
6762
6763 switch (code)
6764 {
6765 case EQ_EXPR:
6766 if (above || below)
6767 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6768 break;
6769
6770 case NE_EXPR:
6771 if (above || below)
6772 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6773 break;
6774
6775 case LT_EXPR:
6776 case LE_EXPR:
6777 if (above)
6778 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6779 else if (below)
6780 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6781
6782 case GT_EXPR:
6783 case GE_EXPR:
6784 if (above)
6785 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6786 else if (below)
6787 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6788
6789 default:
6790 break;
6791 }
6792
6793 return NULL_TREE;
6794 }
6795
6796 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6797 ARG0 just the signedness is changed. */
6798
6799 static tree
6800 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6801 tree arg0, tree arg1)
6802 {
6803 tree arg0_inner;
6804 tree inner_type, outer_type;
6805
6806 if (!CONVERT_EXPR_P (arg0))
6807 return NULL_TREE;
6808
6809 outer_type = TREE_TYPE (arg0);
6810 arg0_inner = TREE_OPERAND (arg0, 0);
6811 inner_type = TREE_TYPE (arg0_inner);
6812
6813 #ifdef HAVE_canonicalize_funcptr_for_compare
6814 /* Disable this optimization if we're casting a function pointer
6815 type on targets that require function pointer canonicalization. */
6816 if (HAVE_canonicalize_funcptr_for_compare
6817 && TREE_CODE (inner_type) == POINTER_TYPE
6818 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6819 return NULL_TREE;
6820 #endif
6821
6822 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6823 return NULL_TREE;
6824
6825 if (TREE_CODE (arg1) != INTEGER_CST
6826 && !(CONVERT_EXPR_P (arg1)
6827 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6828 return NULL_TREE;
6829
6830 if ((TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6831 || POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6832 && code != NE_EXPR
6833 && code != EQ_EXPR)
6834 return NULL_TREE;
6835
6836 if (TREE_CODE (arg1) == INTEGER_CST)
6837 arg1 = force_fit_type_double (inner_type, TREE_INT_CST_LOW (arg1),
6838 TREE_INT_CST_HIGH (arg1), 0,
6839 TREE_OVERFLOW (arg1));
6840 else
6841 arg1 = fold_convert_loc (loc, inner_type, arg1);
6842
6843 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6844 }
6845
6846 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6847 step of the array. Reconstructs s and delta in the case of s *
6848 delta being an integer constant (and thus already folded). ADDR is
6849 the address. MULT is the multiplicative expression. If the
6850 function succeeds, the new address expression is returned.
6851 Otherwise NULL_TREE is returned. LOC is the location of the
6852 resulting expression. */
6853
6854 static tree
6855 try_move_mult_to_index (location_t loc, tree addr, tree op1)
6856 {
6857 tree s, delta, step;
6858 tree ref = TREE_OPERAND (addr, 0), pref;
6859 tree ret, pos;
6860 tree itype;
6861 bool mdim = false;
6862
6863 /* Strip the nops that might be added when converting op1 to sizetype. */
6864 STRIP_NOPS (op1);
6865
6866 /* Canonicalize op1 into a possibly non-constant delta
6867 and an INTEGER_CST s. */
6868 if (TREE_CODE (op1) == MULT_EXPR)
6869 {
6870 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6871
6872 STRIP_NOPS (arg0);
6873 STRIP_NOPS (arg1);
6874
6875 if (TREE_CODE (arg0) == INTEGER_CST)
6876 {
6877 s = arg0;
6878 delta = arg1;
6879 }
6880 else if (TREE_CODE (arg1) == INTEGER_CST)
6881 {
6882 s = arg1;
6883 delta = arg0;
6884 }
6885 else
6886 return NULL_TREE;
6887 }
6888 else if (TREE_CODE (op1) == INTEGER_CST)
6889 {
6890 delta = op1;
6891 s = NULL_TREE;
6892 }
6893 else
6894 {
6895 /* Simulate we are delta * 1. */
6896 delta = op1;
6897 s = integer_one_node;
6898 }
6899
6900 for (;; ref = TREE_OPERAND (ref, 0))
6901 {
6902 if (TREE_CODE (ref) == ARRAY_REF)
6903 {
6904 tree domain;
6905
6906 /* Remember if this was a multi-dimensional array. */
6907 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6908 mdim = true;
6909
6910 domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6911 if (! domain)
6912 continue;
6913 itype = TREE_TYPE (domain);
6914
6915 step = array_ref_element_size (ref);
6916 if (TREE_CODE (step) != INTEGER_CST)
6917 continue;
6918
6919 if (s)
6920 {
6921 if (! tree_int_cst_equal (step, s))
6922 continue;
6923 }
6924 else
6925 {
6926 /* Try if delta is a multiple of step. */
6927 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6928 if (! tmp)
6929 continue;
6930 delta = tmp;
6931 }
6932
6933 /* Only fold here if we can verify we do not overflow one
6934 dimension of a multi-dimensional array. */
6935 if (mdim)
6936 {
6937 tree tmp;
6938
6939 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
6940 || !TYPE_MAX_VALUE (domain)
6941 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
6942 continue;
6943
6944 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
6945 fold_convert_loc (loc, itype,
6946 TREE_OPERAND (ref, 1)),
6947 fold_convert_loc (loc, itype, delta));
6948 if (!tmp
6949 || TREE_CODE (tmp) != INTEGER_CST
6950 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
6951 continue;
6952 }
6953
6954 break;
6955 }
6956 else
6957 mdim = false;
6958
6959 if (!handled_component_p (ref))
6960 return NULL_TREE;
6961 }
6962
6963 /* We found the suitable array reference. So copy everything up to it,
6964 and replace the index. */
6965
6966 pref = TREE_OPERAND (addr, 0);
6967 ret = copy_node (pref);
6968 SET_EXPR_LOCATION (ret, loc);
6969 pos = ret;
6970
6971 while (pref != ref)
6972 {
6973 pref = TREE_OPERAND (pref, 0);
6974 TREE_OPERAND (pos, 0) = copy_node (pref);
6975 pos = TREE_OPERAND (pos, 0);
6976 }
6977
6978 TREE_OPERAND (pos, 1) = fold_build2_loc (loc, PLUS_EXPR, itype,
6979 fold_convert_loc (loc, itype,
6980 TREE_OPERAND (pos, 1)),
6981 fold_convert_loc (loc, itype, delta));
6982
6983 return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
6984 }
6985
6986
6987 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6988 means A >= Y && A != MAX, but in this case we know that
6989 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6990
6991 static tree
6992 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6993 {
6994 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6995
6996 if (TREE_CODE (bound) == LT_EXPR)
6997 a = TREE_OPERAND (bound, 0);
6998 else if (TREE_CODE (bound) == GT_EXPR)
6999 a = TREE_OPERAND (bound, 1);
7000 else
7001 return NULL_TREE;
7002
7003 typea = TREE_TYPE (a);
7004 if (!INTEGRAL_TYPE_P (typea)
7005 && !POINTER_TYPE_P (typea))
7006 return NULL_TREE;
7007
7008 if (TREE_CODE (ineq) == LT_EXPR)
7009 {
7010 a1 = TREE_OPERAND (ineq, 1);
7011 y = TREE_OPERAND (ineq, 0);
7012 }
7013 else if (TREE_CODE (ineq) == GT_EXPR)
7014 {
7015 a1 = TREE_OPERAND (ineq, 0);
7016 y = TREE_OPERAND (ineq, 1);
7017 }
7018 else
7019 return NULL_TREE;
7020
7021 if (TREE_TYPE (a1) != typea)
7022 return NULL_TREE;
7023
7024 if (POINTER_TYPE_P (typea))
7025 {
7026 /* Convert the pointer types into integer before taking the difference. */
7027 tree ta = fold_convert_loc (loc, ssizetype, a);
7028 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7029 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7030 }
7031 else
7032 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7033
7034 if (!diff || !integer_onep (diff))
7035 return NULL_TREE;
7036
7037 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7038 }
7039
7040 /* Fold a sum or difference of at least one multiplication.
7041 Returns the folded tree or NULL if no simplification could be made. */
7042
7043 static tree
7044 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7045 tree arg0, tree arg1)
7046 {
7047 tree arg00, arg01, arg10, arg11;
7048 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7049
7050 /* (A * C) +- (B * C) -> (A+-B) * C.
7051 (A * C) +- A -> A * (C+-1).
7052 We are most concerned about the case where C is a constant,
7053 but other combinations show up during loop reduction. Since
7054 it is not difficult, try all four possibilities. */
7055
7056 if (TREE_CODE (arg0) == MULT_EXPR)
7057 {
7058 arg00 = TREE_OPERAND (arg0, 0);
7059 arg01 = TREE_OPERAND (arg0, 1);
7060 }
7061 else if (TREE_CODE (arg0) == INTEGER_CST)
7062 {
7063 arg00 = build_one_cst (type);
7064 arg01 = arg0;
7065 }
7066 else
7067 {
7068 /* We cannot generate constant 1 for fract. */
7069 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7070 return NULL_TREE;
7071 arg00 = arg0;
7072 arg01 = build_one_cst (type);
7073 }
7074 if (TREE_CODE (arg1) == MULT_EXPR)
7075 {
7076 arg10 = TREE_OPERAND (arg1, 0);
7077 arg11 = TREE_OPERAND (arg1, 1);
7078 }
7079 else if (TREE_CODE (arg1) == INTEGER_CST)
7080 {
7081 arg10 = build_one_cst (type);
7082 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7083 the purpose of this canonicalization. */
7084 if (TREE_INT_CST_HIGH (arg1) == -1
7085 && negate_expr_p (arg1)
7086 && code == PLUS_EXPR)
7087 {
7088 arg11 = negate_expr (arg1);
7089 code = MINUS_EXPR;
7090 }
7091 else
7092 arg11 = arg1;
7093 }
7094 else
7095 {
7096 /* We cannot generate constant 1 for fract. */
7097 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7098 return NULL_TREE;
7099 arg10 = arg1;
7100 arg11 = build_one_cst (type);
7101 }
7102 same = NULL_TREE;
7103
7104 if (operand_equal_p (arg01, arg11, 0))
7105 same = arg01, alt0 = arg00, alt1 = arg10;
7106 else if (operand_equal_p (arg00, arg10, 0))
7107 same = arg00, alt0 = arg01, alt1 = arg11;
7108 else if (operand_equal_p (arg00, arg11, 0))
7109 same = arg00, alt0 = arg01, alt1 = arg10;
7110 else if (operand_equal_p (arg01, arg10, 0))
7111 same = arg01, alt0 = arg00, alt1 = arg11;
7112
7113 /* No identical multiplicands; see if we can find a common
7114 power-of-two factor in non-power-of-two multiplies. This
7115 can help in multi-dimensional array access. */
7116 else if (host_integerp (arg01, 0)
7117 && host_integerp (arg11, 0))
7118 {
7119 HOST_WIDE_INT int01, int11, tmp;
7120 bool swap = false;
7121 tree maybe_same;
7122 int01 = TREE_INT_CST_LOW (arg01);
7123 int11 = TREE_INT_CST_LOW (arg11);
7124
7125 /* Move min of absolute values to int11. */
7126 if ((int01 >= 0 ? int01 : -int01)
7127 < (int11 >= 0 ? int11 : -int11))
7128 {
7129 tmp = int01, int01 = int11, int11 = tmp;
7130 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7131 maybe_same = arg01;
7132 swap = true;
7133 }
7134 else
7135 maybe_same = arg11;
7136
7137 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0
7138 /* The remainder should not be a constant, otherwise we
7139 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7140 increased the number of multiplications necessary. */
7141 && TREE_CODE (arg10) != INTEGER_CST)
7142 {
7143 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7144 build_int_cst (TREE_TYPE (arg00),
7145 int01 / int11));
7146 alt1 = arg10;
7147 same = maybe_same;
7148 if (swap)
7149 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7150 }
7151 }
7152
7153 if (same)
7154 return fold_build2_loc (loc, MULT_EXPR, type,
7155 fold_build2_loc (loc, code, type,
7156 fold_convert_loc (loc, type, alt0),
7157 fold_convert_loc (loc, type, alt1)),
7158 fold_convert_loc (loc, type, same));
7159
7160 return NULL_TREE;
7161 }
7162
7163 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7164 specified by EXPR into the buffer PTR of length LEN bytes.
7165 Return the number of bytes placed in the buffer, or zero
7166 upon failure. */
7167
7168 static int
7169 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7170 {
7171 tree type = TREE_TYPE (expr);
7172 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7173 int byte, offset, word, words;
7174 unsigned char value;
7175
7176 if (total_bytes > len)
7177 return 0;
7178 words = total_bytes / UNITS_PER_WORD;
7179
7180 for (byte = 0; byte < total_bytes; byte++)
7181 {
7182 int bitpos = byte * BITS_PER_UNIT;
7183 if (bitpos < HOST_BITS_PER_WIDE_INT)
7184 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7185 else
7186 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7187 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7188
7189 if (total_bytes > UNITS_PER_WORD)
7190 {
7191 word = byte / UNITS_PER_WORD;
7192 if (WORDS_BIG_ENDIAN)
7193 word = (words - 1) - word;
7194 offset = word * UNITS_PER_WORD;
7195 if (BYTES_BIG_ENDIAN)
7196 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7197 else
7198 offset += byte % UNITS_PER_WORD;
7199 }
7200 else
7201 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7202 ptr[offset] = value;
7203 }
7204 return total_bytes;
7205 }
7206
7207
7208 /* Subroutine of native_encode_expr. Encode the REAL_CST
7209 specified by EXPR into the buffer PTR of length LEN bytes.
7210 Return the number of bytes placed in the buffer, or zero
7211 upon failure. */
7212
7213 static int
7214 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7215 {
7216 tree type = TREE_TYPE (expr);
7217 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7218 int byte, offset, word, words, bitpos;
7219 unsigned char value;
7220
7221 /* There are always 32 bits in each long, no matter the size of
7222 the hosts long. We handle floating point representations with
7223 up to 192 bits. */
7224 long tmp[6];
7225
7226 if (total_bytes > len)
7227 return 0;
7228 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7229
7230 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7231
7232 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7233 bitpos += BITS_PER_UNIT)
7234 {
7235 byte = (bitpos / BITS_PER_UNIT) & 3;
7236 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7237
7238 if (UNITS_PER_WORD < 4)
7239 {
7240 word = byte / UNITS_PER_WORD;
7241 if (WORDS_BIG_ENDIAN)
7242 word = (words - 1) - word;
7243 offset = word * UNITS_PER_WORD;
7244 if (BYTES_BIG_ENDIAN)
7245 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7246 else
7247 offset += byte % UNITS_PER_WORD;
7248 }
7249 else
7250 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7251 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7252 }
7253 return total_bytes;
7254 }
7255
7256 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7257 specified by EXPR into the buffer PTR of length LEN bytes.
7258 Return the number of bytes placed in the buffer, or zero
7259 upon failure. */
7260
7261 static int
7262 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7263 {
7264 int rsize, isize;
7265 tree part;
7266
7267 part = TREE_REALPART (expr);
7268 rsize = native_encode_expr (part, ptr, len);
7269 if (rsize == 0)
7270 return 0;
7271 part = TREE_IMAGPART (expr);
7272 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7273 if (isize != rsize)
7274 return 0;
7275 return rsize + isize;
7276 }
7277
7278
7279 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7280 specified by EXPR into the buffer PTR of length LEN bytes.
7281 Return the number of bytes placed in the buffer, or zero
7282 upon failure. */
7283
7284 static int
7285 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7286 {
7287 int i, size, offset, count;
7288 tree itype, elem, elements;
7289
7290 offset = 0;
7291 elements = TREE_VECTOR_CST_ELTS (expr);
7292 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7293 itype = TREE_TYPE (TREE_TYPE (expr));
7294 size = GET_MODE_SIZE (TYPE_MODE (itype));
7295 for (i = 0; i < count; i++)
7296 {
7297 if (elements)
7298 {
7299 elem = TREE_VALUE (elements);
7300 elements = TREE_CHAIN (elements);
7301 }
7302 else
7303 elem = NULL_TREE;
7304
7305 if (elem)
7306 {
7307 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7308 return 0;
7309 }
7310 else
7311 {
7312 if (offset + size > len)
7313 return 0;
7314 memset (ptr+offset, 0, size);
7315 }
7316 offset += size;
7317 }
7318 return offset;
7319 }
7320
7321
7322 /* Subroutine of native_encode_expr. Encode the STRING_CST
7323 specified by EXPR into the buffer PTR of length LEN bytes.
7324 Return the number of bytes placed in the buffer, or zero
7325 upon failure. */
7326
7327 static int
7328 native_encode_string (const_tree expr, unsigned char *ptr, int len)
7329 {
7330 tree type = TREE_TYPE (expr);
7331 HOST_WIDE_INT total_bytes;
7332
7333 if (TREE_CODE (type) != ARRAY_TYPE
7334 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7335 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7336 || !host_integerp (TYPE_SIZE_UNIT (type), 0))
7337 return 0;
7338 total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0);
7339 if (total_bytes > len)
7340 return 0;
7341 if (TREE_STRING_LENGTH (expr) < total_bytes)
7342 {
7343 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7344 memset (ptr + TREE_STRING_LENGTH (expr), 0,
7345 total_bytes - TREE_STRING_LENGTH (expr));
7346 }
7347 else
7348 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7349 return total_bytes;
7350 }
7351
7352
7353 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7354 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7355 buffer PTR of length LEN bytes. Return the number of bytes
7356 placed in the buffer, or zero upon failure. */
7357
7358 int
7359 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7360 {
7361 switch (TREE_CODE (expr))
7362 {
7363 case INTEGER_CST:
7364 return native_encode_int (expr, ptr, len);
7365
7366 case REAL_CST:
7367 return native_encode_real (expr, ptr, len);
7368
7369 case COMPLEX_CST:
7370 return native_encode_complex (expr, ptr, len);
7371
7372 case VECTOR_CST:
7373 return native_encode_vector (expr, ptr, len);
7374
7375 case STRING_CST:
7376 return native_encode_string (expr, ptr, len);
7377
7378 default:
7379 return 0;
7380 }
7381 }
7382
7383
7384 /* Subroutine of native_interpret_expr. Interpret the contents of
7385 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7386 If the buffer cannot be interpreted, return NULL_TREE. */
7387
7388 static tree
7389 native_interpret_int (tree type, const unsigned char *ptr, int len)
7390 {
7391 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7392 int byte, offset, word, words;
7393 unsigned char value;
7394 double_int result;
7395
7396 if (total_bytes > len)
7397 return NULL_TREE;
7398 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7399 return NULL_TREE;
7400
7401 result = double_int_zero;
7402 words = total_bytes / UNITS_PER_WORD;
7403
7404 for (byte = 0; byte < total_bytes; byte++)
7405 {
7406 int bitpos = byte * BITS_PER_UNIT;
7407 if (total_bytes > UNITS_PER_WORD)
7408 {
7409 word = byte / UNITS_PER_WORD;
7410 if (WORDS_BIG_ENDIAN)
7411 word = (words - 1) - word;
7412 offset = word * UNITS_PER_WORD;
7413 if (BYTES_BIG_ENDIAN)
7414 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7415 else
7416 offset += byte % UNITS_PER_WORD;
7417 }
7418 else
7419 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7420 value = ptr[offset];
7421
7422 if (bitpos < HOST_BITS_PER_WIDE_INT)
7423 result.low |= (unsigned HOST_WIDE_INT) value << bitpos;
7424 else
7425 result.high |= (unsigned HOST_WIDE_INT) value
7426 << (bitpos - HOST_BITS_PER_WIDE_INT);
7427 }
7428
7429 return double_int_to_tree (type, result);
7430 }
7431
7432
7433 /* Subroutine of native_interpret_expr. Interpret the contents of
7434 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7435 If the buffer cannot be interpreted, return NULL_TREE. */
7436
7437 static tree
7438 native_interpret_real (tree type, const unsigned char *ptr, int len)
7439 {
7440 enum machine_mode mode = TYPE_MODE (type);
7441 int total_bytes = GET_MODE_SIZE (mode);
7442 int byte, offset, word, words, bitpos;
7443 unsigned char value;
7444 /* There are always 32 bits in each long, no matter the size of
7445 the hosts long. We handle floating point representations with
7446 up to 192 bits. */
7447 REAL_VALUE_TYPE r;
7448 long tmp[6];
7449
7450 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7451 if (total_bytes > len || total_bytes > 24)
7452 return NULL_TREE;
7453 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7454
7455 memset (tmp, 0, sizeof (tmp));
7456 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7457 bitpos += BITS_PER_UNIT)
7458 {
7459 byte = (bitpos / BITS_PER_UNIT) & 3;
7460 if (UNITS_PER_WORD < 4)
7461 {
7462 word = byte / UNITS_PER_WORD;
7463 if (WORDS_BIG_ENDIAN)
7464 word = (words - 1) - word;
7465 offset = word * UNITS_PER_WORD;
7466 if (BYTES_BIG_ENDIAN)
7467 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7468 else
7469 offset += byte % UNITS_PER_WORD;
7470 }
7471 else
7472 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7473 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7474
7475 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7476 }
7477
7478 real_from_target (&r, tmp, mode);
7479 return build_real (type, r);
7480 }
7481
7482
7483 /* Subroutine of native_interpret_expr. Interpret the contents of
7484 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7485 If the buffer cannot be interpreted, return NULL_TREE. */
7486
7487 static tree
7488 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7489 {
7490 tree etype, rpart, ipart;
7491 int size;
7492
7493 etype = TREE_TYPE (type);
7494 size = GET_MODE_SIZE (TYPE_MODE (etype));
7495 if (size * 2 > len)
7496 return NULL_TREE;
7497 rpart = native_interpret_expr (etype, ptr, size);
7498 if (!rpart)
7499 return NULL_TREE;
7500 ipart = native_interpret_expr (etype, ptr+size, size);
7501 if (!ipart)
7502 return NULL_TREE;
7503 return build_complex (type, rpart, ipart);
7504 }
7505
7506
7507 /* Subroutine of native_interpret_expr. Interpret the contents of
7508 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7509 If the buffer cannot be interpreted, return NULL_TREE. */
7510
7511 static tree
7512 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7513 {
7514 tree etype, elem, elements;
7515 int i, size, count;
7516
7517 etype = TREE_TYPE (type);
7518 size = GET_MODE_SIZE (TYPE_MODE (etype));
7519 count = TYPE_VECTOR_SUBPARTS (type);
7520 if (size * count > len)
7521 return NULL_TREE;
7522
7523 elements = NULL_TREE;
7524 for (i = count - 1; i >= 0; i--)
7525 {
7526 elem = native_interpret_expr (etype, ptr+(i*size), size);
7527 if (!elem)
7528 return NULL_TREE;
7529 elements = tree_cons (NULL_TREE, elem, elements);
7530 }
7531 return build_vector (type, elements);
7532 }
7533
7534
7535 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7536 the buffer PTR of length LEN as a constant of type TYPE. For
7537 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7538 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7539 return NULL_TREE. */
7540
7541 tree
7542 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7543 {
7544 switch (TREE_CODE (type))
7545 {
7546 case INTEGER_TYPE:
7547 case ENUMERAL_TYPE:
7548 case BOOLEAN_TYPE:
7549 return native_interpret_int (type, ptr, len);
7550
7551 case REAL_TYPE:
7552 return native_interpret_real (type, ptr, len);
7553
7554 case COMPLEX_TYPE:
7555 return native_interpret_complex (type, ptr, len);
7556
7557 case VECTOR_TYPE:
7558 return native_interpret_vector (type, ptr, len);
7559
7560 default:
7561 return NULL_TREE;
7562 }
7563 }
7564
7565
7566 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7567 TYPE at compile-time. If we're unable to perform the conversion
7568 return NULL_TREE. */
7569
7570 static tree
7571 fold_view_convert_expr (tree type, tree expr)
7572 {
7573 /* We support up to 512-bit values (for V8DFmode). */
7574 unsigned char buffer[64];
7575 int len;
7576
7577 /* Check that the host and target are sane. */
7578 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7579 return NULL_TREE;
7580
7581 len = native_encode_expr (expr, buffer, sizeof (buffer));
7582 if (len == 0)
7583 return NULL_TREE;
7584
7585 return native_interpret_expr (type, buffer, len);
7586 }
7587
7588 /* Build an expression for the address of T. Folds away INDIRECT_REF
7589 to avoid confusing the gimplify process. */
7590
7591 tree
7592 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7593 {
7594 /* The size of the object is not relevant when talking about its address. */
7595 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7596 t = TREE_OPERAND (t, 0);
7597
7598 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
7599 if (TREE_CODE (t) == INDIRECT_REF
7600 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
7601 {
7602 t = TREE_OPERAND (t, 0);
7603
7604 if (TREE_TYPE (t) != ptrtype)
7605 {
7606 t = build1 (NOP_EXPR, ptrtype, t);
7607 SET_EXPR_LOCATION (t, loc);
7608 }
7609 }
7610 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7611 {
7612 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7613
7614 if (TREE_TYPE (t) != ptrtype)
7615 t = fold_convert_loc (loc, ptrtype, t);
7616 }
7617 else
7618 {
7619 t = build1 (ADDR_EXPR, ptrtype, t);
7620 SET_EXPR_LOCATION (t, loc);
7621 }
7622
7623 return t;
7624 }
7625
7626 /* Build an expression for the address of T. */
7627
7628 tree
7629 build_fold_addr_expr_loc (location_t loc, tree t)
7630 {
7631 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7632
7633 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7634 }
7635
7636 /* Fold a unary expression of code CODE and type TYPE with operand
7637 OP0. Return the folded expression if folding is successful.
7638 Otherwise, return NULL_TREE. */
7639
7640 tree
7641 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7642 {
7643 tree tem;
7644 tree arg0;
7645 enum tree_code_class kind = TREE_CODE_CLASS (code);
7646
7647 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7648 && TREE_CODE_LENGTH (code) == 1);
7649
7650 arg0 = op0;
7651 if (arg0)
7652 {
7653 if (CONVERT_EXPR_CODE_P (code)
7654 || code == FLOAT_EXPR || code == ABS_EXPR)
7655 {
7656 /* Don't use STRIP_NOPS, because signedness of argument type
7657 matters. */
7658 STRIP_SIGN_NOPS (arg0);
7659 }
7660 else
7661 {
7662 /* Strip any conversions that don't change the mode. This
7663 is safe for every expression, except for a comparison
7664 expression because its signedness is derived from its
7665 operands.
7666
7667 Note that this is done as an internal manipulation within
7668 the constant folder, in order to find the simplest
7669 representation of the arguments so that their form can be
7670 studied. In any cases, the appropriate type conversions
7671 should be put back in the tree that will get out of the
7672 constant folder. */
7673 STRIP_NOPS (arg0);
7674 }
7675 }
7676
7677 if (TREE_CODE_CLASS (code) == tcc_unary)
7678 {
7679 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7680 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7681 fold_build1_loc (loc, code, type,
7682 fold_convert_loc (loc, TREE_TYPE (op0),
7683 TREE_OPERAND (arg0, 1))));
7684 else if (TREE_CODE (arg0) == COND_EXPR)
7685 {
7686 tree arg01 = TREE_OPERAND (arg0, 1);
7687 tree arg02 = TREE_OPERAND (arg0, 2);
7688 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7689 arg01 = fold_build1_loc (loc, code, type,
7690 fold_convert_loc (loc,
7691 TREE_TYPE (op0), arg01));
7692 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7693 arg02 = fold_build1_loc (loc, code, type,
7694 fold_convert_loc (loc,
7695 TREE_TYPE (op0), arg02));
7696 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7697 arg01, arg02);
7698
7699 /* If this was a conversion, and all we did was to move into
7700 inside the COND_EXPR, bring it back out. But leave it if
7701 it is a conversion from integer to integer and the
7702 result precision is no wider than a word since such a
7703 conversion is cheap and may be optimized away by combine,
7704 while it couldn't if it were outside the COND_EXPR. Then return
7705 so we don't get into an infinite recursion loop taking the
7706 conversion out and then back in. */
7707
7708 if ((CONVERT_EXPR_CODE_P (code)
7709 || code == NON_LVALUE_EXPR)
7710 && TREE_CODE (tem) == COND_EXPR
7711 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7712 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7713 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7714 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7715 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7716 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7717 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7718 && (INTEGRAL_TYPE_P
7719 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7720 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7721 || flag_syntax_only))
7722 {
7723 tem = build1 (code, type,
7724 build3 (COND_EXPR,
7725 TREE_TYPE (TREE_OPERAND
7726 (TREE_OPERAND (tem, 1), 0)),
7727 TREE_OPERAND (tem, 0),
7728 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7729 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
7730 SET_EXPR_LOCATION (tem, loc);
7731 }
7732 return tem;
7733 }
7734 else if (COMPARISON_CLASS_P (arg0))
7735 {
7736 if (TREE_CODE (type) == BOOLEAN_TYPE)
7737 {
7738 arg0 = copy_node (arg0);
7739 TREE_TYPE (arg0) = type;
7740 return arg0;
7741 }
7742 else if (TREE_CODE (type) != INTEGER_TYPE)
7743 return fold_build3_loc (loc, COND_EXPR, type, arg0,
7744 fold_build1_loc (loc, code, type,
7745 integer_one_node),
7746 fold_build1_loc (loc, code, type,
7747 integer_zero_node));
7748 }
7749 }
7750
7751 switch (code)
7752 {
7753 case PAREN_EXPR:
7754 /* Re-association barriers around constants and other re-association
7755 barriers can be removed. */
7756 if (CONSTANT_CLASS_P (op0)
7757 || TREE_CODE (op0) == PAREN_EXPR)
7758 return fold_convert_loc (loc, type, op0);
7759 return NULL_TREE;
7760
7761 CASE_CONVERT:
7762 case FLOAT_EXPR:
7763 case FIX_TRUNC_EXPR:
7764 if (TREE_TYPE (op0) == type)
7765 return op0;
7766
7767 /* If we have (type) (a CMP b) and type is an integral type, return
7768 new expression involving the new type. */
7769 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
7770 return fold_build2_loc (loc, TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
7771 TREE_OPERAND (op0, 1));
7772
7773 /* Handle cases of two conversions in a row. */
7774 if (CONVERT_EXPR_P (op0))
7775 {
7776 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7777 tree inter_type = TREE_TYPE (op0);
7778 int inside_int = INTEGRAL_TYPE_P (inside_type);
7779 int inside_ptr = POINTER_TYPE_P (inside_type);
7780 int inside_float = FLOAT_TYPE_P (inside_type);
7781 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7782 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7783 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7784 int inter_int = INTEGRAL_TYPE_P (inter_type);
7785 int inter_ptr = POINTER_TYPE_P (inter_type);
7786 int inter_float = FLOAT_TYPE_P (inter_type);
7787 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7788 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7789 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7790 int final_int = INTEGRAL_TYPE_P (type);
7791 int final_ptr = POINTER_TYPE_P (type);
7792 int final_float = FLOAT_TYPE_P (type);
7793 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7794 unsigned int final_prec = TYPE_PRECISION (type);
7795 int final_unsignedp = TYPE_UNSIGNED (type);
7796
7797 /* In addition to the cases of two conversions in a row
7798 handled below, if we are converting something to its own
7799 type via an object of identical or wider precision, neither
7800 conversion is needed. */
7801 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7802 && (((inter_int || inter_ptr) && final_int)
7803 || (inter_float && final_float))
7804 && inter_prec >= final_prec)
7805 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7806
7807 /* Likewise, if the intermediate and initial types are either both
7808 float or both integer, we don't need the middle conversion if the
7809 former is wider than the latter and doesn't change the signedness
7810 (for integers). Avoid this if the final type is a pointer since
7811 then we sometimes need the middle conversion. Likewise if the
7812 final type has a precision not equal to the size of its mode. */
7813 if (((inter_int && inside_int)
7814 || (inter_float && inside_float)
7815 || (inter_vec && inside_vec))
7816 && inter_prec >= inside_prec
7817 && (inter_float || inter_vec
7818 || inter_unsignedp == inside_unsignedp)
7819 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7820 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7821 && ! final_ptr
7822 && (! final_vec || inter_prec == inside_prec))
7823 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7824
7825 /* If we have a sign-extension of a zero-extended value, we can
7826 replace that by a single zero-extension. */
7827 if (inside_int && inter_int && final_int
7828 && inside_prec < inter_prec && inter_prec < final_prec
7829 && inside_unsignedp && !inter_unsignedp)
7830 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7831
7832 /* Two conversions in a row are not needed unless:
7833 - some conversion is floating-point (overstrict for now), or
7834 - some conversion is a vector (overstrict for now), or
7835 - the intermediate type is narrower than both initial and
7836 final, or
7837 - the intermediate type and innermost type differ in signedness,
7838 and the outermost type is wider than the intermediate, or
7839 - the initial type is a pointer type and the precisions of the
7840 intermediate and final types differ, or
7841 - the final type is a pointer type and the precisions of the
7842 initial and intermediate types differ. */
7843 if (! inside_float && ! inter_float && ! final_float
7844 && ! inside_vec && ! inter_vec && ! final_vec
7845 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7846 && ! (inside_int && inter_int
7847 && inter_unsignedp != inside_unsignedp
7848 && inter_prec < final_prec)
7849 && ((inter_unsignedp && inter_prec > inside_prec)
7850 == (final_unsignedp && final_prec > inter_prec))
7851 && ! (inside_ptr && inter_prec != final_prec)
7852 && ! (final_ptr && inside_prec != inter_prec)
7853 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7854 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
7855 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7856 }
7857
7858 /* Handle (T *)&A.B.C for A being of type T and B and C
7859 living at offset zero. This occurs frequently in
7860 C++ upcasting and then accessing the base. */
7861 if (TREE_CODE (op0) == ADDR_EXPR
7862 && POINTER_TYPE_P (type)
7863 && handled_component_p (TREE_OPERAND (op0, 0)))
7864 {
7865 HOST_WIDE_INT bitsize, bitpos;
7866 tree offset;
7867 enum machine_mode mode;
7868 int unsignedp, volatilep;
7869 tree base = TREE_OPERAND (op0, 0);
7870 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7871 &mode, &unsignedp, &volatilep, false);
7872 /* If the reference was to a (constant) zero offset, we can use
7873 the address of the base if it has the same base type
7874 as the result type and the pointer type is unqualified. */
7875 if (! offset && bitpos == 0
7876 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7877 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7878 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7879 return fold_convert_loc (loc, type,
7880 build_fold_addr_expr_loc (loc, base));
7881 }
7882
7883 if (TREE_CODE (op0) == MODIFY_EXPR
7884 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7885 /* Detect assigning a bitfield. */
7886 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7887 && DECL_BIT_FIELD
7888 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7889 {
7890 /* Don't leave an assignment inside a conversion
7891 unless assigning a bitfield. */
7892 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7893 /* First do the assignment, then return converted constant. */
7894 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7895 TREE_NO_WARNING (tem) = 1;
7896 TREE_USED (tem) = 1;
7897 SET_EXPR_LOCATION (tem, loc);
7898 return tem;
7899 }
7900
7901 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7902 constants (if x has signed type, the sign bit cannot be set
7903 in c). This folds extension into the BIT_AND_EXPR.
7904 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7905 very likely don't have maximal range for their precision and this
7906 transformation effectively doesn't preserve non-maximal ranges. */
7907 if (TREE_CODE (type) == INTEGER_TYPE
7908 && TREE_CODE (op0) == BIT_AND_EXPR
7909 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7910 {
7911 tree and_expr = op0;
7912 tree and0 = TREE_OPERAND (and_expr, 0);
7913 tree and1 = TREE_OPERAND (and_expr, 1);
7914 int change = 0;
7915
7916 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7917 || (TYPE_PRECISION (type)
7918 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7919 change = 1;
7920 else if (TYPE_PRECISION (TREE_TYPE (and1))
7921 <= HOST_BITS_PER_WIDE_INT
7922 && host_integerp (and1, 1))
7923 {
7924 unsigned HOST_WIDE_INT cst;
7925
7926 cst = tree_low_cst (and1, 1);
7927 cst &= (HOST_WIDE_INT) -1
7928 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7929 change = (cst == 0);
7930 #ifdef LOAD_EXTEND_OP
7931 if (change
7932 && !flag_syntax_only
7933 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7934 == ZERO_EXTEND))
7935 {
7936 tree uns = unsigned_type_for (TREE_TYPE (and0));
7937 and0 = fold_convert_loc (loc, uns, and0);
7938 and1 = fold_convert_loc (loc, uns, and1);
7939 }
7940 #endif
7941 }
7942 if (change)
7943 {
7944 tem = force_fit_type_double (type, TREE_INT_CST_LOW (and1),
7945 TREE_INT_CST_HIGH (and1), 0,
7946 TREE_OVERFLOW (and1));
7947 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7948 fold_convert_loc (loc, type, and0), tem);
7949 }
7950 }
7951
7952 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7953 when one of the new casts will fold away. Conservatively we assume
7954 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7955 if (POINTER_TYPE_P (type)
7956 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7957 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7958 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7959 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
7960 {
7961 tree arg00 = TREE_OPERAND (arg0, 0);
7962 tree arg01 = TREE_OPERAND (arg0, 1);
7963
7964 return fold_build2_loc (loc,
7965 TREE_CODE (arg0), type,
7966 fold_convert_loc (loc, type, arg00),
7967 fold_convert_loc (loc, sizetype, arg01));
7968 }
7969
7970 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7971 of the same precision, and X is an integer type not narrower than
7972 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7973 if (INTEGRAL_TYPE_P (type)
7974 && TREE_CODE (op0) == BIT_NOT_EXPR
7975 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7976 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7977 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7978 {
7979 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7980 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7981 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7982 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7983 fold_convert_loc (loc, type, tem));
7984 }
7985
7986 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7987 type of X and Y (integer types only). */
7988 if (INTEGRAL_TYPE_P (type)
7989 && TREE_CODE (op0) == MULT_EXPR
7990 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7991 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7992 {
7993 /* Be careful not to introduce new overflows. */
7994 tree mult_type;
7995 if (TYPE_OVERFLOW_WRAPS (type))
7996 mult_type = type;
7997 else
7998 mult_type = unsigned_type_for (type);
7999
8000 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8001 {
8002 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8003 fold_convert_loc (loc, mult_type,
8004 TREE_OPERAND (op0, 0)),
8005 fold_convert_loc (loc, mult_type,
8006 TREE_OPERAND (op0, 1)));
8007 return fold_convert_loc (loc, type, tem);
8008 }
8009 }
8010
8011 tem = fold_convert_const (code, type, op0);
8012 return tem ? tem : NULL_TREE;
8013
8014 case ADDR_SPACE_CONVERT_EXPR:
8015 if (integer_zerop (arg0))
8016 return fold_convert_const (code, type, arg0);
8017 return NULL_TREE;
8018
8019 case FIXED_CONVERT_EXPR:
8020 tem = fold_convert_const (code, type, arg0);
8021 return tem ? tem : NULL_TREE;
8022
8023 case VIEW_CONVERT_EXPR:
8024 if (TREE_TYPE (op0) == type)
8025 return op0;
8026 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8027 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8028 type, TREE_OPERAND (op0, 0));
8029
8030 /* For integral conversions with the same precision or pointer
8031 conversions use a NOP_EXPR instead. */
8032 if ((INTEGRAL_TYPE_P (type)
8033 || POINTER_TYPE_P (type))
8034 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8035 || POINTER_TYPE_P (TREE_TYPE (op0)))
8036 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8037 return fold_convert_loc (loc, type, op0);
8038
8039 /* Strip inner integral conversions that do not change the precision. */
8040 if (CONVERT_EXPR_P (op0)
8041 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8042 || POINTER_TYPE_P (TREE_TYPE (op0)))
8043 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8044 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
8045 && (TYPE_PRECISION (TREE_TYPE (op0))
8046 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8047 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8048 type, TREE_OPERAND (op0, 0));
8049
8050 return fold_view_convert_expr (type, op0);
8051
8052 case NEGATE_EXPR:
8053 tem = fold_negate_expr (loc, arg0);
8054 if (tem)
8055 return fold_convert_loc (loc, type, tem);
8056 return NULL_TREE;
8057
8058 case ABS_EXPR:
8059 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8060 return fold_abs_const (arg0, type);
8061 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8062 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8063 /* Convert fabs((double)float) into (double)fabsf(float). */
8064 else if (TREE_CODE (arg0) == NOP_EXPR
8065 && TREE_CODE (type) == REAL_TYPE)
8066 {
8067 tree targ0 = strip_float_extensions (arg0);
8068 if (targ0 != arg0)
8069 return fold_convert_loc (loc, type,
8070 fold_build1_loc (loc, ABS_EXPR,
8071 TREE_TYPE (targ0),
8072 targ0));
8073 }
8074 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8075 else if (TREE_CODE (arg0) == ABS_EXPR)
8076 return arg0;
8077 else if (tree_expr_nonnegative_p (arg0))
8078 return arg0;
8079
8080 /* Strip sign ops from argument. */
8081 if (TREE_CODE (type) == REAL_TYPE)
8082 {
8083 tem = fold_strip_sign_ops (arg0);
8084 if (tem)
8085 return fold_build1_loc (loc, ABS_EXPR, type,
8086 fold_convert_loc (loc, type, tem));
8087 }
8088 return NULL_TREE;
8089
8090 case CONJ_EXPR:
8091 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8092 return fold_convert_loc (loc, type, arg0);
8093 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8094 {
8095 tree itype = TREE_TYPE (type);
8096 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8097 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8098 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8099 negate_expr (ipart));
8100 }
8101 if (TREE_CODE (arg0) == COMPLEX_CST)
8102 {
8103 tree itype = TREE_TYPE (type);
8104 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8105 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8106 return build_complex (type, rpart, negate_expr (ipart));
8107 }
8108 if (TREE_CODE (arg0) == CONJ_EXPR)
8109 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8110 return NULL_TREE;
8111
8112 case BIT_NOT_EXPR:
8113 if (TREE_CODE (arg0) == INTEGER_CST)
8114 return fold_not_const (arg0, type);
8115 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8116 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8117 /* Convert ~ (-A) to A - 1. */
8118 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8119 return fold_build2_loc (loc, MINUS_EXPR, type,
8120 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8121 build_int_cst (type, 1));
8122 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8123 else if (INTEGRAL_TYPE_P (type)
8124 && ((TREE_CODE (arg0) == MINUS_EXPR
8125 && integer_onep (TREE_OPERAND (arg0, 1)))
8126 || (TREE_CODE (arg0) == PLUS_EXPR
8127 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8128 return fold_build1_loc (loc, NEGATE_EXPR, type,
8129 fold_convert_loc (loc, type,
8130 TREE_OPERAND (arg0, 0)));
8131 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8132 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8133 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8134 fold_convert_loc (loc, type,
8135 TREE_OPERAND (arg0, 0)))))
8136 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8137 fold_convert_loc (loc, type,
8138 TREE_OPERAND (arg0, 1)));
8139 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8140 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8141 fold_convert_loc (loc, type,
8142 TREE_OPERAND (arg0, 1)))))
8143 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8144 fold_convert_loc (loc, type,
8145 TREE_OPERAND (arg0, 0)), tem);
8146 /* Perform BIT_NOT_EXPR on each element individually. */
8147 else if (TREE_CODE (arg0) == VECTOR_CST)
8148 {
8149 tree elements = TREE_VECTOR_CST_ELTS (arg0), elem, list = NULL_TREE;
8150 int count = TYPE_VECTOR_SUBPARTS (type), i;
8151
8152 for (i = 0; i < count; i++)
8153 {
8154 if (elements)
8155 {
8156 elem = TREE_VALUE (elements);
8157 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8158 if (elem == NULL_TREE)
8159 break;
8160 elements = TREE_CHAIN (elements);
8161 }
8162 else
8163 elem = build_int_cst (TREE_TYPE (type), -1);
8164 list = tree_cons (NULL_TREE, elem, list);
8165 }
8166 if (i == count)
8167 return build_vector (type, nreverse (list));
8168 }
8169
8170 return NULL_TREE;
8171
8172 case TRUTH_NOT_EXPR:
8173 /* The argument to invert_truthvalue must have Boolean type. */
8174 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8175 arg0 = fold_convert_loc (loc, boolean_type_node, arg0);
8176
8177 /* Note that the operand of this must be an int
8178 and its values must be 0 or 1.
8179 ("true" is a fixed value perhaps depending on the language,
8180 but we don't handle values other than 1 correctly yet.) */
8181 tem = fold_truth_not_expr (loc, arg0);
8182 if (!tem)
8183 return NULL_TREE;
8184 return fold_convert_loc (loc, type, tem);
8185
8186 case REALPART_EXPR:
8187 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8188 return fold_convert_loc (loc, type, arg0);
8189 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8190 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8191 TREE_OPERAND (arg0, 1));
8192 if (TREE_CODE (arg0) == COMPLEX_CST)
8193 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8194 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8195 {
8196 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8197 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8198 fold_build1_loc (loc, REALPART_EXPR, itype,
8199 TREE_OPERAND (arg0, 0)),
8200 fold_build1_loc (loc, REALPART_EXPR, itype,
8201 TREE_OPERAND (arg0, 1)));
8202 return fold_convert_loc (loc, type, tem);
8203 }
8204 if (TREE_CODE (arg0) == CONJ_EXPR)
8205 {
8206 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8207 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8208 TREE_OPERAND (arg0, 0));
8209 return fold_convert_loc (loc, type, tem);
8210 }
8211 if (TREE_CODE (arg0) == CALL_EXPR)
8212 {
8213 tree fn = get_callee_fndecl (arg0);
8214 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8215 switch (DECL_FUNCTION_CODE (fn))
8216 {
8217 CASE_FLT_FN (BUILT_IN_CEXPI):
8218 fn = mathfn_built_in (type, BUILT_IN_COS);
8219 if (fn)
8220 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8221 break;
8222
8223 default:
8224 break;
8225 }
8226 }
8227 return NULL_TREE;
8228
8229 case IMAGPART_EXPR:
8230 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8231 return fold_convert_loc (loc, type, integer_zero_node);
8232 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8233 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8234 TREE_OPERAND (arg0, 0));
8235 if (TREE_CODE (arg0) == COMPLEX_CST)
8236 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8237 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8238 {
8239 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8240 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8241 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8242 TREE_OPERAND (arg0, 0)),
8243 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8244 TREE_OPERAND (arg0, 1)));
8245 return fold_convert_loc (loc, type, tem);
8246 }
8247 if (TREE_CODE (arg0) == CONJ_EXPR)
8248 {
8249 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8250 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8251 return fold_convert_loc (loc, type, negate_expr (tem));
8252 }
8253 if (TREE_CODE (arg0) == CALL_EXPR)
8254 {
8255 tree fn = get_callee_fndecl (arg0);
8256 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8257 switch (DECL_FUNCTION_CODE (fn))
8258 {
8259 CASE_FLT_FN (BUILT_IN_CEXPI):
8260 fn = mathfn_built_in (type, BUILT_IN_SIN);
8261 if (fn)
8262 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8263 break;
8264
8265 default:
8266 break;
8267 }
8268 }
8269 return NULL_TREE;
8270
8271 case INDIRECT_REF:
8272 /* Fold *&X to X if X is an lvalue. */
8273 if (TREE_CODE (op0) == ADDR_EXPR)
8274 {
8275 tree op00 = TREE_OPERAND (op0, 0);
8276 if ((TREE_CODE (op00) == VAR_DECL
8277 || TREE_CODE (op00) == PARM_DECL
8278 || TREE_CODE (op00) == RESULT_DECL)
8279 && !TREE_READONLY (op00))
8280 return op00;
8281 }
8282 return NULL_TREE;
8283
8284 default:
8285 return NULL_TREE;
8286 } /* switch (code) */
8287 }
8288
8289
8290 /* If the operation was a conversion do _not_ mark a resulting constant
8291 with TREE_OVERFLOW if the original constant was not. These conversions
8292 have implementation defined behavior and retaining the TREE_OVERFLOW
8293 flag here would confuse later passes such as VRP. */
8294 tree
8295 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8296 tree type, tree op0)
8297 {
8298 tree res = fold_unary_loc (loc, code, type, op0);
8299 if (res
8300 && TREE_CODE (res) == INTEGER_CST
8301 && TREE_CODE (op0) == INTEGER_CST
8302 && CONVERT_EXPR_CODE_P (code))
8303 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8304
8305 return res;
8306 }
8307
8308 /* Fold a binary expression of code CODE and type TYPE with operands
8309 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8310 Return the folded expression if folding is successful. Otherwise,
8311 return NULL_TREE. */
8312
8313 static tree
8314 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8315 {
8316 enum tree_code compl_code;
8317
8318 if (code == MIN_EXPR)
8319 compl_code = MAX_EXPR;
8320 else if (code == MAX_EXPR)
8321 compl_code = MIN_EXPR;
8322 else
8323 gcc_unreachable ();
8324
8325 /* MIN (MAX (a, b), b) == b. */
8326 if (TREE_CODE (op0) == compl_code
8327 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8328 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8329
8330 /* MIN (MAX (b, a), b) == b. */
8331 if (TREE_CODE (op0) == compl_code
8332 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8333 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8334 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8335
8336 /* MIN (a, MAX (a, b)) == a. */
8337 if (TREE_CODE (op1) == compl_code
8338 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8339 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8340 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8341
8342 /* MIN (a, MAX (b, a)) == a. */
8343 if (TREE_CODE (op1) == compl_code
8344 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8345 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8346 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8347
8348 return NULL_TREE;
8349 }
8350
8351 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8352 by changing CODE to reduce the magnitude of constants involved in
8353 ARG0 of the comparison.
8354 Returns a canonicalized comparison tree if a simplification was
8355 possible, otherwise returns NULL_TREE.
8356 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8357 valid if signed overflow is undefined. */
8358
8359 static tree
8360 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8361 tree arg0, tree arg1,
8362 bool *strict_overflow_p)
8363 {
8364 enum tree_code code0 = TREE_CODE (arg0);
8365 tree t, cst0 = NULL_TREE;
8366 int sgn0;
8367 bool swap = false;
8368
8369 /* Match A +- CST code arg1 and CST code arg1. We can change the
8370 first form only if overflow is undefined. */
8371 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8372 /* In principle pointers also have undefined overflow behavior,
8373 but that causes problems elsewhere. */
8374 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8375 && (code0 == MINUS_EXPR
8376 || code0 == PLUS_EXPR)
8377 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8378 || code0 == INTEGER_CST))
8379 return NULL_TREE;
8380
8381 /* Identify the constant in arg0 and its sign. */
8382 if (code0 == INTEGER_CST)
8383 cst0 = arg0;
8384 else
8385 cst0 = TREE_OPERAND (arg0, 1);
8386 sgn0 = tree_int_cst_sgn (cst0);
8387
8388 /* Overflowed constants and zero will cause problems. */
8389 if (integer_zerop (cst0)
8390 || TREE_OVERFLOW (cst0))
8391 return NULL_TREE;
8392
8393 /* See if we can reduce the magnitude of the constant in
8394 arg0 by changing the comparison code. */
8395 if (code0 == INTEGER_CST)
8396 {
8397 /* CST <= arg1 -> CST-1 < arg1. */
8398 if (code == LE_EXPR && sgn0 == 1)
8399 code = LT_EXPR;
8400 /* -CST < arg1 -> -CST-1 <= arg1. */
8401 else if (code == LT_EXPR && sgn0 == -1)
8402 code = LE_EXPR;
8403 /* CST > arg1 -> CST-1 >= arg1. */
8404 else if (code == GT_EXPR && sgn0 == 1)
8405 code = GE_EXPR;
8406 /* -CST >= arg1 -> -CST-1 > arg1. */
8407 else if (code == GE_EXPR && sgn0 == -1)
8408 code = GT_EXPR;
8409 else
8410 return NULL_TREE;
8411 /* arg1 code' CST' might be more canonical. */
8412 swap = true;
8413 }
8414 else
8415 {
8416 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8417 if (code == LT_EXPR
8418 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8419 code = LE_EXPR;
8420 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8421 else if (code == GT_EXPR
8422 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8423 code = GE_EXPR;
8424 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8425 else if (code == LE_EXPR
8426 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8427 code = LT_EXPR;
8428 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8429 else if (code == GE_EXPR
8430 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8431 code = GT_EXPR;
8432 else
8433 return NULL_TREE;
8434 *strict_overflow_p = true;
8435 }
8436
8437 /* Now build the constant reduced in magnitude. But not if that
8438 would produce one outside of its types range. */
8439 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8440 && ((sgn0 == 1
8441 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8442 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8443 || (sgn0 == -1
8444 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8445 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8446 /* We cannot swap the comparison here as that would cause us to
8447 endlessly recurse. */
8448 return NULL_TREE;
8449
8450 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8451 cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
8452 if (code0 != INTEGER_CST)
8453 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8454
8455 /* If swapping might yield to a more canonical form, do so. */
8456 if (swap)
8457 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8458 else
8459 return fold_build2_loc (loc, code, type, t, arg1);
8460 }
8461
8462 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8463 overflow further. Try to decrease the magnitude of constants involved
8464 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8465 and put sole constants at the second argument position.
8466 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8467
8468 static tree
8469 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8470 tree arg0, tree arg1)
8471 {
8472 tree t;
8473 bool strict_overflow_p;
8474 const char * const warnmsg = G_("assuming signed overflow does not occur "
8475 "when reducing constant in comparison");
8476
8477 /* Try canonicalization by simplifying arg0. */
8478 strict_overflow_p = false;
8479 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8480 &strict_overflow_p);
8481 if (t)
8482 {
8483 if (strict_overflow_p)
8484 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8485 return t;
8486 }
8487
8488 /* Try canonicalization by simplifying arg1 using the swapped
8489 comparison. */
8490 code = swap_tree_comparison (code);
8491 strict_overflow_p = false;
8492 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8493 &strict_overflow_p);
8494 if (t && strict_overflow_p)
8495 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8496 return t;
8497 }
8498
8499 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8500 space. This is used to avoid issuing overflow warnings for
8501 expressions like &p->x which can not wrap. */
8502
8503 static bool
8504 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8505 {
8506 unsigned HOST_WIDE_INT offset_low, total_low;
8507 HOST_WIDE_INT size, offset_high, total_high;
8508
8509 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8510 return true;
8511
8512 if (bitpos < 0)
8513 return true;
8514
8515 if (offset == NULL_TREE)
8516 {
8517 offset_low = 0;
8518 offset_high = 0;
8519 }
8520 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8521 return true;
8522 else
8523 {
8524 offset_low = TREE_INT_CST_LOW (offset);
8525 offset_high = TREE_INT_CST_HIGH (offset);
8526 }
8527
8528 if (add_double_with_sign (offset_low, offset_high,
8529 bitpos / BITS_PER_UNIT, 0,
8530 &total_low, &total_high,
8531 true))
8532 return true;
8533
8534 if (total_high != 0)
8535 return true;
8536
8537 size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8538 if (size <= 0)
8539 return true;
8540
8541 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8542 array. */
8543 if (TREE_CODE (base) == ADDR_EXPR)
8544 {
8545 HOST_WIDE_INT base_size;
8546
8547 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8548 if (base_size > 0 && size < base_size)
8549 size = base_size;
8550 }
8551
8552 return total_low > (unsigned HOST_WIDE_INT) size;
8553 }
8554
8555 /* Subroutine of fold_binary. This routine performs all of the
8556 transformations that are common to the equality/inequality
8557 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8558 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8559 fold_binary should call fold_binary. Fold a comparison with
8560 tree code CODE and type TYPE with operands OP0 and OP1. Return
8561 the folded comparison or NULL_TREE. */
8562
8563 static tree
8564 fold_comparison (location_t loc, enum tree_code code, tree type,
8565 tree op0, tree op1)
8566 {
8567 tree arg0, arg1, tem;
8568
8569 arg0 = op0;
8570 arg1 = op1;
8571
8572 STRIP_SIGN_NOPS (arg0);
8573 STRIP_SIGN_NOPS (arg1);
8574
8575 tem = fold_relational_const (code, type, arg0, arg1);
8576 if (tem != NULL_TREE)
8577 return tem;
8578
8579 /* If one arg is a real or integer constant, put it last. */
8580 if (tree_swap_operands_p (arg0, arg1, true))
8581 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
8582
8583 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8584 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8585 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8586 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8587 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8588 && (TREE_CODE (arg1) == INTEGER_CST
8589 && !TREE_OVERFLOW (arg1)))
8590 {
8591 tree const1 = TREE_OPERAND (arg0, 1);
8592 tree const2 = arg1;
8593 tree variable = TREE_OPERAND (arg0, 0);
8594 tree lhs;
8595 int lhs_add;
8596 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8597
8598 lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR,
8599 TREE_TYPE (arg1), const2, const1);
8600
8601 /* If the constant operation overflowed this can be
8602 simplified as a comparison against INT_MAX/INT_MIN. */
8603 if (TREE_CODE (lhs) == INTEGER_CST
8604 && TREE_OVERFLOW (lhs))
8605 {
8606 int const1_sgn = tree_int_cst_sgn (const1);
8607 enum tree_code code2 = code;
8608
8609 /* Get the sign of the constant on the lhs if the
8610 operation were VARIABLE + CONST1. */
8611 if (TREE_CODE (arg0) == MINUS_EXPR)
8612 const1_sgn = -const1_sgn;
8613
8614 /* The sign of the constant determines if we overflowed
8615 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8616 Canonicalize to the INT_MIN overflow by swapping the comparison
8617 if necessary. */
8618 if (const1_sgn == -1)
8619 code2 = swap_tree_comparison (code);
8620
8621 /* We now can look at the canonicalized case
8622 VARIABLE + 1 CODE2 INT_MIN
8623 and decide on the result. */
8624 if (code2 == LT_EXPR
8625 || code2 == LE_EXPR
8626 || code2 == EQ_EXPR)
8627 return omit_one_operand_loc (loc, type, boolean_false_node, variable);
8628 else if (code2 == NE_EXPR
8629 || code2 == GE_EXPR
8630 || code2 == GT_EXPR)
8631 return omit_one_operand_loc (loc, type, boolean_true_node, variable);
8632 }
8633
8634 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8635 && (TREE_CODE (lhs) != INTEGER_CST
8636 || !TREE_OVERFLOW (lhs)))
8637 {
8638 fold_overflow_warning ("assuming signed overflow does not occur "
8639 "when changing X +- C1 cmp C2 to "
8640 "X cmp C1 +- C2",
8641 WARN_STRICT_OVERFLOW_COMPARISON);
8642 return fold_build2_loc (loc, code, type, variable, lhs);
8643 }
8644 }
8645
8646 /* For comparisons of pointers we can decompose it to a compile time
8647 comparison of the base objects and the offsets into the object.
8648 This requires at least one operand being an ADDR_EXPR or a
8649 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8650 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8651 && (TREE_CODE (arg0) == ADDR_EXPR
8652 || TREE_CODE (arg1) == ADDR_EXPR
8653 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8654 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8655 {
8656 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8657 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8658 enum machine_mode mode;
8659 int volatilep, unsignedp;
8660 bool indirect_base0 = false, indirect_base1 = false;
8661
8662 /* Get base and offset for the access. Strip ADDR_EXPR for
8663 get_inner_reference, but put it back by stripping INDIRECT_REF
8664 off the base object if possible. indirect_baseN will be true
8665 if baseN is not an address but refers to the object itself. */
8666 base0 = arg0;
8667 if (TREE_CODE (arg0) == ADDR_EXPR)
8668 {
8669 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8670 &bitsize, &bitpos0, &offset0, &mode,
8671 &unsignedp, &volatilep, false);
8672 if (TREE_CODE (base0) == INDIRECT_REF)
8673 base0 = TREE_OPERAND (base0, 0);
8674 else
8675 indirect_base0 = true;
8676 }
8677 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8678 {
8679 base0 = TREE_OPERAND (arg0, 0);
8680 offset0 = TREE_OPERAND (arg0, 1);
8681 }
8682
8683 base1 = arg1;
8684 if (TREE_CODE (arg1) == ADDR_EXPR)
8685 {
8686 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8687 &bitsize, &bitpos1, &offset1, &mode,
8688 &unsignedp, &volatilep, false);
8689 if (TREE_CODE (base1) == INDIRECT_REF)
8690 base1 = TREE_OPERAND (base1, 0);
8691 else
8692 indirect_base1 = true;
8693 }
8694 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8695 {
8696 base1 = TREE_OPERAND (arg1, 0);
8697 offset1 = TREE_OPERAND (arg1, 1);
8698 }
8699
8700 /* A local variable can never be pointed to by
8701 the default SSA name of an incoming parameter. */
8702 if ((TREE_CODE (arg0) == ADDR_EXPR
8703 && indirect_base0
8704 && TREE_CODE (base0) == VAR_DECL
8705 && auto_var_in_fn_p (base0, current_function_decl)
8706 && !indirect_base1
8707 && TREE_CODE (base1) == SSA_NAME
8708 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL
8709 && SSA_NAME_IS_DEFAULT_DEF (base1))
8710 || (TREE_CODE (arg1) == ADDR_EXPR
8711 && indirect_base1
8712 && TREE_CODE (base1) == VAR_DECL
8713 && auto_var_in_fn_p (base1, current_function_decl)
8714 && !indirect_base0
8715 && TREE_CODE (base0) == SSA_NAME
8716 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL
8717 && SSA_NAME_IS_DEFAULT_DEF (base0)))
8718 {
8719 if (code == NE_EXPR)
8720 return constant_boolean_node (1, type);
8721 else if (code == EQ_EXPR)
8722 return constant_boolean_node (0, type);
8723 }
8724 /* If we have equivalent bases we might be able to simplify. */
8725 else if (indirect_base0 == indirect_base1
8726 && operand_equal_p (base0, base1, 0))
8727 {
8728 /* We can fold this expression to a constant if the non-constant
8729 offset parts are equal. */
8730 if ((offset0 == offset1
8731 || (offset0 && offset1
8732 && operand_equal_p (offset0, offset1, 0)))
8733 && (code == EQ_EXPR
8734 || code == NE_EXPR
8735 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8736
8737 {
8738 if (code != EQ_EXPR
8739 && code != NE_EXPR
8740 && bitpos0 != bitpos1
8741 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8742 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8743 fold_overflow_warning (("assuming pointer wraparound does not "
8744 "occur when comparing P +- C1 with "
8745 "P +- C2"),
8746 WARN_STRICT_OVERFLOW_CONDITIONAL);
8747
8748 switch (code)
8749 {
8750 case EQ_EXPR:
8751 return constant_boolean_node (bitpos0 == bitpos1, type);
8752 case NE_EXPR:
8753 return constant_boolean_node (bitpos0 != bitpos1, type);
8754 case LT_EXPR:
8755 return constant_boolean_node (bitpos0 < bitpos1, type);
8756 case LE_EXPR:
8757 return constant_boolean_node (bitpos0 <= bitpos1, type);
8758 case GE_EXPR:
8759 return constant_boolean_node (bitpos0 >= bitpos1, type);
8760 case GT_EXPR:
8761 return constant_boolean_node (bitpos0 > bitpos1, type);
8762 default:;
8763 }
8764 }
8765 /* We can simplify the comparison to a comparison of the variable
8766 offset parts if the constant offset parts are equal.
8767 Be careful to use signed size type here because otherwise we
8768 mess with array offsets in the wrong way. This is possible
8769 because pointer arithmetic is restricted to retain within an
8770 object and overflow on pointer differences is undefined as of
8771 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8772 else if (bitpos0 == bitpos1
8773 && ((code == EQ_EXPR || code == NE_EXPR)
8774 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8775 {
8776 /* By converting to signed size type we cover middle-end pointer
8777 arithmetic which operates on unsigned pointer types of size
8778 type size and ARRAY_REF offsets which are properly sign or
8779 zero extended from their type in case it is narrower than
8780 size type. */
8781 if (offset0 == NULL_TREE)
8782 offset0 = build_int_cst (ssizetype, 0);
8783 else
8784 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8785 if (offset1 == NULL_TREE)
8786 offset1 = build_int_cst (ssizetype, 0);
8787 else
8788 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8789
8790 if (code != EQ_EXPR
8791 && code != NE_EXPR
8792 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8793 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8794 fold_overflow_warning (("assuming pointer wraparound does not "
8795 "occur when comparing P +- C1 with "
8796 "P +- C2"),
8797 WARN_STRICT_OVERFLOW_COMPARISON);
8798
8799 return fold_build2_loc (loc, code, type, offset0, offset1);
8800 }
8801 }
8802 /* For non-equal bases we can simplify if they are addresses
8803 of local binding decls or constants. */
8804 else if (indirect_base0 && indirect_base1
8805 /* We know that !operand_equal_p (base0, base1, 0)
8806 because the if condition was false. But make
8807 sure two decls are not the same. */
8808 && base0 != base1
8809 && TREE_CODE (arg0) == ADDR_EXPR
8810 && TREE_CODE (arg1) == ADDR_EXPR
8811 && (((TREE_CODE (base0) == VAR_DECL
8812 || TREE_CODE (base0) == PARM_DECL)
8813 && (targetm.binds_local_p (base0)
8814 || CONSTANT_CLASS_P (base1)))
8815 || CONSTANT_CLASS_P (base0))
8816 && (((TREE_CODE (base1) == VAR_DECL
8817 || TREE_CODE (base1) == PARM_DECL)
8818 && (targetm.binds_local_p (base1)
8819 || CONSTANT_CLASS_P (base0)))
8820 || CONSTANT_CLASS_P (base1)))
8821 {
8822 if (code == EQ_EXPR)
8823 return omit_two_operands_loc (loc, type, boolean_false_node,
8824 arg0, arg1);
8825 else if (code == NE_EXPR)
8826 return omit_two_operands_loc (loc, type, boolean_true_node,
8827 arg0, arg1);
8828 }
8829 /* For equal offsets we can simplify to a comparison of the
8830 base addresses. */
8831 else if (bitpos0 == bitpos1
8832 && (indirect_base0
8833 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8834 && (indirect_base1
8835 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8836 && ((offset0 == offset1)
8837 || (offset0 && offset1
8838 && operand_equal_p (offset0, offset1, 0))))
8839 {
8840 if (indirect_base0)
8841 base0 = build_fold_addr_expr_loc (loc, base0);
8842 if (indirect_base1)
8843 base1 = build_fold_addr_expr_loc (loc, base1);
8844 return fold_build2_loc (loc, code, type, base0, base1);
8845 }
8846 }
8847
8848 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8849 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8850 the resulting offset is smaller in absolute value than the
8851 original one. */
8852 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8853 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8854 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8855 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8856 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8857 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8858 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8859 {
8860 tree const1 = TREE_OPERAND (arg0, 1);
8861 tree const2 = TREE_OPERAND (arg1, 1);
8862 tree variable1 = TREE_OPERAND (arg0, 0);
8863 tree variable2 = TREE_OPERAND (arg1, 0);
8864 tree cst;
8865 const char * const warnmsg = G_("assuming signed overflow does not "
8866 "occur when combining constants around "
8867 "a comparison");
8868
8869 /* Put the constant on the side where it doesn't overflow and is
8870 of lower absolute value than before. */
8871 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8872 ? MINUS_EXPR : PLUS_EXPR,
8873 const2, const1, 0);
8874 if (!TREE_OVERFLOW (cst)
8875 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
8876 {
8877 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8878 return fold_build2_loc (loc, code, type,
8879 variable1,
8880 fold_build2_loc (loc,
8881 TREE_CODE (arg1), TREE_TYPE (arg1),
8882 variable2, cst));
8883 }
8884
8885 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8886 ? MINUS_EXPR : PLUS_EXPR,
8887 const1, const2, 0);
8888 if (!TREE_OVERFLOW (cst)
8889 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
8890 {
8891 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8892 return fold_build2_loc (loc, code, type,
8893 fold_build2_loc (loc, TREE_CODE (arg0), TREE_TYPE (arg0),
8894 variable1, cst),
8895 variable2);
8896 }
8897 }
8898
8899 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
8900 signed arithmetic case. That form is created by the compiler
8901 often enough for folding it to be of value. One example is in
8902 computing loop trip counts after Operator Strength Reduction. */
8903 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8904 && TREE_CODE (arg0) == MULT_EXPR
8905 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8906 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8907 && integer_zerop (arg1))
8908 {
8909 tree const1 = TREE_OPERAND (arg0, 1);
8910 tree const2 = arg1; /* zero */
8911 tree variable1 = TREE_OPERAND (arg0, 0);
8912 enum tree_code cmp_code = code;
8913
8914 /* Handle unfolded multiplication by zero. */
8915 if (integer_zerop (const1))
8916 return fold_build2_loc (loc, cmp_code, type, const1, const2);
8917
8918 fold_overflow_warning (("assuming signed overflow does not occur when "
8919 "eliminating multiplication in comparison "
8920 "with zero"),
8921 WARN_STRICT_OVERFLOW_COMPARISON);
8922
8923 /* If const1 is negative we swap the sense of the comparison. */
8924 if (tree_int_cst_sgn (const1) < 0)
8925 cmp_code = swap_tree_comparison (cmp_code);
8926
8927 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
8928 }
8929
8930 tem = maybe_canonicalize_comparison (loc, code, type, op0, op1);
8931 if (tem)
8932 return tem;
8933
8934 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8935 {
8936 tree targ0 = strip_float_extensions (arg0);
8937 tree targ1 = strip_float_extensions (arg1);
8938 tree newtype = TREE_TYPE (targ0);
8939
8940 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8941 newtype = TREE_TYPE (targ1);
8942
8943 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8944 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8945 return fold_build2_loc (loc, code, type,
8946 fold_convert_loc (loc, newtype, targ0),
8947 fold_convert_loc (loc, newtype, targ1));
8948
8949 /* (-a) CMP (-b) -> b CMP a */
8950 if (TREE_CODE (arg0) == NEGATE_EXPR
8951 && TREE_CODE (arg1) == NEGATE_EXPR)
8952 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
8953 TREE_OPERAND (arg0, 0));
8954
8955 if (TREE_CODE (arg1) == REAL_CST)
8956 {
8957 REAL_VALUE_TYPE cst;
8958 cst = TREE_REAL_CST (arg1);
8959
8960 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8961 if (TREE_CODE (arg0) == NEGATE_EXPR)
8962 return fold_build2_loc (loc, swap_tree_comparison (code), type,
8963 TREE_OPERAND (arg0, 0),
8964 build_real (TREE_TYPE (arg1),
8965 real_value_negate (&cst)));
8966
8967 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8968 /* a CMP (-0) -> a CMP 0 */
8969 if (REAL_VALUE_MINUS_ZERO (cst))
8970 return fold_build2_loc (loc, code, type, arg0,
8971 build_real (TREE_TYPE (arg1), dconst0));
8972
8973 /* x != NaN is always true, other ops are always false. */
8974 if (REAL_VALUE_ISNAN (cst)
8975 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8976 {
8977 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8978 return omit_one_operand_loc (loc, type, tem, arg0);
8979 }
8980
8981 /* Fold comparisons against infinity. */
8982 if (REAL_VALUE_ISINF (cst)
8983 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
8984 {
8985 tem = fold_inf_compare (loc, code, type, arg0, arg1);
8986 if (tem != NULL_TREE)
8987 return tem;
8988 }
8989 }
8990
8991 /* If this is a comparison of a real constant with a PLUS_EXPR
8992 or a MINUS_EXPR of a real constant, we can convert it into a
8993 comparison with a revised real constant as long as no overflow
8994 occurs when unsafe_math_optimizations are enabled. */
8995 if (flag_unsafe_math_optimizations
8996 && TREE_CODE (arg1) == REAL_CST
8997 && (TREE_CODE (arg0) == PLUS_EXPR
8998 || TREE_CODE (arg0) == MINUS_EXPR)
8999 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9000 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9001 ? MINUS_EXPR : PLUS_EXPR,
9002 arg1, TREE_OPERAND (arg0, 1)))
9003 && !TREE_OVERFLOW (tem))
9004 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9005
9006 /* Likewise, we can simplify a comparison of a real constant with
9007 a MINUS_EXPR whose first operand is also a real constant, i.e.
9008 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9009 floating-point types only if -fassociative-math is set. */
9010 if (flag_associative_math
9011 && TREE_CODE (arg1) == REAL_CST
9012 && TREE_CODE (arg0) == MINUS_EXPR
9013 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9014 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9015 arg1))
9016 && !TREE_OVERFLOW (tem))
9017 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9018 TREE_OPERAND (arg0, 1), tem);
9019
9020 /* Fold comparisons against built-in math functions. */
9021 if (TREE_CODE (arg1) == REAL_CST
9022 && flag_unsafe_math_optimizations
9023 && ! flag_errno_math)
9024 {
9025 enum built_in_function fcode = builtin_mathfn_code (arg0);
9026
9027 if (fcode != END_BUILTINS)
9028 {
9029 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9030 if (tem != NULL_TREE)
9031 return tem;
9032 }
9033 }
9034 }
9035
9036 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9037 && CONVERT_EXPR_P (arg0))
9038 {
9039 /* If we are widening one operand of an integer comparison,
9040 see if the other operand is similarly being widened. Perhaps we
9041 can do the comparison in the narrower type. */
9042 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9043 if (tem)
9044 return tem;
9045
9046 /* Or if we are changing signedness. */
9047 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9048 if (tem)
9049 return tem;
9050 }
9051
9052 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9053 constant, we can simplify it. */
9054 if (TREE_CODE (arg1) == INTEGER_CST
9055 && (TREE_CODE (arg0) == MIN_EXPR
9056 || TREE_CODE (arg0) == MAX_EXPR)
9057 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9058 {
9059 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9060 if (tem)
9061 return tem;
9062 }
9063
9064 /* Simplify comparison of something with itself. (For IEEE
9065 floating-point, we can only do some of these simplifications.) */
9066 if (operand_equal_p (arg0, arg1, 0))
9067 {
9068 switch (code)
9069 {
9070 case EQ_EXPR:
9071 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9072 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9073 return constant_boolean_node (1, type);
9074 break;
9075
9076 case GE_EXPR:
9077 case LE_EXPR:
9078 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9079 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9080 return constant_boolean_node (1, type);
9081 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9082
9083 case NE_EXPR:
9084 /* For NE, we can only do this simplification if integer
9085 or we don't honor IEEE floating point NaNs. */
9086 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9087 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9088 break;
9089 /* ... fall through ... */
9090 case GT_EXPR:
9091 case LT_EXPR:
9092 return constant_boolean_node (0, type);
9093 default:
9094 gcc_unreachable ();
9095 }
9096 }
9097
9098 /* If we are comparing an expression that just has comparisons
9099 of two integer values, arithmetic expressions of those comparisons,
9100 and constants, we can simplify it. There are only three cases
9101 to check: the two values can either be equal, the first can be
9102 greater, or the second can be greater. Fold the expression for
9103 those three values. Since each value must be 0 or 1, we have
9104 eight possibilities, each of which corresponds to the constant 0
9105 or 1 or one of the six possible comparisons.
9106
9107 This handles common cases like (a > b) == 0 but also handles
9108 expressions like ((x > y) - (y > x)) > 0, which supposedly
9109 occur in macroized code. */
9110
9111 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9112 {
9113 tree cval1 = 0, cval2 = 0;
9114 int save_p = 0;
9115
9116 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9117 /* Don't handle degenerate cases here; they should already
9118 have been handled anyway. */
9119 && cval1 != 0 && cval2 != 0
9120 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9121 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9122 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9123 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9124 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9125 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9126 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9127 {
9128 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9129 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9130
9131 /* We can't just pass T to eval_subst in case cval1 or cval2
9132 was the same as ARG1. */
9133
9134 tree high_result
9135 = fold_build2_loc (loc, code, type,
9136 eval_subst (loc, arg0, cval1, maxval,
9137 cval2, minval),
9138 arg1);
9139 tree equal_result
9140 = fold_build2_loc (loc, code, type,
9141 eval_subst (loc, arg0, cval1, maxval,
9142 cval2, maxval),
9143 arg1);
9144 tree low_result
9145 = fold_build2_loc (loc, code, type,
9146 eval_subst (loc, arg0, cval1, minval,
9147 cval2, maxval),
9148 arg1);
9149
9150 /* All three of these results should be 0 or 1. Confirm they are.
9151 Then use those values to select the proper code to use. */
9152
9153 if (TREE_CODE (high_result) == INTEGER_CST
9154 && TREE_CODE (equal_result) == INTEGER_CST
9155 && TREE_CODE (low_result) == INTEGER_CST)
9156 {
9157 /* Make a 3-bit mask with the high-order bit being the
9158 value for `>', the next for '=', and the low for '<'. */
9159 switch ((integer_onep (high_result) * 4)
9160 + (integer_onep (equal_result) * 2)
9161 + integer_onep (low_result))
9162 {
9163 case 0:
9164 /* Always false. */
9165 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9166 case 1:
9167 code = LT_EXPR;
9168 break;
9169 case 2:
9170 code = EQ_EXPR;
9171 break;
9172 case 3:
9173 code = LE_EXPR;
9174 break;
9175 case 4:
9176 code = GT_EXPR;
9177 break;
9178 case 5:
9179 code = NE_EXPR;
9180 break;
9181 case 6:
9182 code = GE_EXPR;
9183 break;
9184 case 7:
9185 /* Always true. */
9186 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9187 }
9188
9189 if (save_p)
9190 {
9191 tem = save_expr (build2 (code, type, cval1, cval2));
9192 SET_EXPR_LOCATION (tem, loc);
9193 return tem;
9194 }
9195 return fold_build2_loc (loc, code, type, cval1, cval2);
9196 }
9197 }
9198 }
9199
9200 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9201 into a single range test. */
9202 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9203 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9204 && TREE_CODE (arg1) == INTEGER_CST
9205 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9206 && !integer_zerop (TREE_OPERAND (arg0, 1))
9207 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9208 && !TREE_OVERFLOW (arg1))
9209 {
9210 tem = fold_div_compare (loc, code, type, arg0, arg1);
9211 if (tem != NULL_TREE)
9212 return tem;
9213 }
9214
9215 /* Fold ~X op ~Y as Y op X. */
9216 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9217 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9218 {
9219 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9220 return fold_build2_loc (loc, code, type,
9221 fold_convert_loc (loc, cmp_type,
9222 TREE_OPERAND (arg1, 0)),
9223 TREE_OPERAND (arg0, 0));
9224 }
9225
9226 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9227 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9228 && TREE_CODE (arg1) == INTEGER_CST)
9229 {
9230 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9231 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9232 TREE_OPERAND (arg0, 0),
9233 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9234 fold_convert_loc (loc, cmp_type, arg1)));
9235 }
9236
9237 return NULL_TREE;
9238 }
9239
9240
9241 /* Subroutine of fold_binary. Optimize complex multiplications of the
9242 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9243 argument EXPR represents the expression "z" of type TYPE. */
9244
9245 static tree
9246 fold_mult_zconjz (location_t loc, tree type, tree expr)
9247 {
9248 tree itype = TREE_TYPE (type);
9249 tree rpart, ipart, tem;
9250
9251 if (TREE_CODE (expr) == COMPLEX_EXPR)
9252 {
9253 rpart = TREE_OPERAND (expr, 0);
9254 ipart = TREE_OPERAND (expr, 1);
9255 }
9256 else if (TREE_CODE (expr) == COMPLEX_CST)
9257 {
9258 rpart = TREE_REALPART (expr);
9259 ipart = TREE_IMAGPART (expr);
9260 }
9261 else
9262 {
9263 expr = save_expr (expr);
9264 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9265 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9266 }
9267
9268 rpart = save_expr (rpart);
9269 ipart = save_expr (ipart);
9270 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9271 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9272 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9273 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9274 fold_convert_loc (loc, itype, integer_zero_node));
9275 }
9276
9277
9278 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9279 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9280 guarantees that P and N have the same least significant log2(M) bits.
9281 N is not otherwise constrained. In particular, N is not normalized to
9282 0 <= N < M as is common. In general, the precise value of P is unknown.
9283 M is chosen as large as possible such that constant N can be determined.
9284
9285 Returns M and sets *RESIDUE to N.
9286
9287 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9288 account. This is not always possible due to PR 35705.
9289 */
9290
9291 static unsigned HOST_WIDE_INT
9292 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9293 bool allow_func_align)
9294 {
9295 enum tree_code code;
9296
9297 *residue = 0;
9298
9299 code = TREE_CODE (expr);
9300 if (code == ADDR_EXPR)
9301 {
9302 expr = TREE_OPERAND (expr, 0);
9303 if (handled_component_p (expr))
9304 {
9305 HOST_WIDE_INT bitsize, bitpos;
9306 tree offset;
9307 enum machine_mode mode;
9308 int unsignedp, volatilep;
9309
9310 expr = get_inner_reference (expr, &bitsize, &bitpos, &offset,
9311 &mode, &unsignedp, &volatilep, false);
9312 *residue = bitpos / BITS_PER_UNIT;
9313 if (offset)
9314 {
9315 if (TREE_CODE (offset) == INTEGER_CST)
9316 *residue += TREE_INT_CST_LOW (offset);
9317 else
9318 /* We don't handle more complicated offset expressions. */
9319 return 1;
9320 }
9321 }
9322
9323 if (DECL_P (expr)
9324 && (allow_func_align || TREE_CODE (expr) != FUNCTION_DECL))
9325 return DECL_ALIGN_UNIT (expr);
9326 }
9327 else if (code == POINTER_PLUS_EXPR)
9328 {
9329 tree op0, op1;
9330 unsigned HOST_WIDE_INT modulus;
9331 enum tree_code inner_code;
9332
9333 op0 = TREE_OPERAND (expr, 0);
9334 STRIP_NOPS (op0);
9335 modulus = get_pointer_modulus_and_residue (op0, residue,
9336 allow_func_align);
9337
9338 op1 = TREE_OPERAND (expr, 1);
9339 STRIP_NOPS (op1);
9340 inner_code = TREE_CODE (op1);
9341 if (inner_code == INTEGER_CST)
9342 {
9343 *residue += TREE_INT_CST_LOW (op1);
9344 return modulus;
9345 }
9346 else if (inner_code == MULT_EXPR)
9347 {
9348 op1 = TREE_OPERAND (op1, 1);
9349 if (TREE_CODE (op1) == INTEGER_CST)
9350 {
9351 unsigned HOST_WIDE_INT align;
9352
9353 /* Compute the greatest power-of-2 divisor of op1. */
9354 align = TREE_INT_CST_LOW (op1);
9355 align &= -align;
9356
9357 /* If align is non-zero and less than *modulus, replace
9358 *modulus with align., If align is 0, then either op1 is 0
9359 or the greatest power-of-2 divisor of op1 doesn't fit in an
9360 unsigned HOST_WIDE_INT. In either case, no additional
9361 constraint is imposed. */
9362 if (align)
9363 modulus = MIN (modulus, align);
9364
9365 return modulus;
9366 }
9367 }
9368 }
9369
9370 /* If we get here, we were unable to determine anything useful about the
9371 expression. */
9372 return 1;
9373 }
9374
9375
9376 /* Fold a binary expression of code CODE and type TYPE with operands
9377 OP0 and OP1. LOC is the location of the resulting expression.
9378 Return the folded expression if folding is successful. Otherwise,
9379 return NULL_TREE. */
9380
9381 tree
9382 fold_binary_loc (location_t loc,
9383 enum tree_code code, tree type, tree op0, tree op1)
9384 {
9385 enum tree_code_class kind = TREE_CODE_CLASS (code);
9386 tree arg0, arg1, tem;
9387 tree t1 = NULL_TREE;
9388 bool strict_overflow_p;
9389
9390 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9391 && TREE_CODE_LENGTH (code) == 2
9392 && op0 != NULL_TREE
9393 && op1 != NULL_TREE);
9394
9395 arg0 = op0;
9396 arg1 = op1;
9397
9398 /* Strip any conversions that don't change the mode. This is
9399 safe for every expression, except for a comparison expression
9400 because its signedness is derived from its operands. So, in
9401 the latter case, only strip conversions that don't change the
9402 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9403 preserved.
9404
9405 Note that this is done as an internal manipulation within the
9406 constant folder, in order to find the simplest representation
9407 of the arguments so that their form can be studied. In any
9408 cases, the appropriate type conversions should be put back in
9409 the tree that will get out of the constant folder. */
9410
9411 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9412 {
9413 STRIP_SIGN_NOPS (arg0);
9414 STRIP_SIGN_NOPS (arg1);
9415 }
9416 else
9417 {
9418 STRIP_NOPS (arg0);
9419 STRIP_NOPS (arg1);
9420 }
9421
9422 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9423 constant but we can't do arithmetic on them. */
9424 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9425 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9426 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9427 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9428 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9429 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9430 {
9431 if (kind == tcc_binary)
9432 {
9433 /* Make sure type and arg0 have the same saturating flag. */
9434 gcc_assert (TYPE_SATURATING (type)
9435 == TYPE_SATURATING (TREE_TYPE (arg0)));
9436 tem = const_binop (code, arg0, arg1);
9437 }
9438 else if (kind == tcc_comparison)
9439 tem = fold_relational_const (code, type, arg0, arg1);
9440 else
9441 tem = NULL_TREE;
9442
9443 if (tem != NULL_TREE)
9444 {
9445 if (TREE_TYPE (tem) != type)
9446 tem = fold_convert_loc (loc, type, tem);
9447 return tem;
9448 }
9449 }
9450
9451 /* If this is a commutative operation, and ARG0 is a constant, move it
9452 to ARG1 to reduce the number of tests below. */
9453 if (commutative_tree_code (code)
9454 && tree_swap_operands_p (arg0, arg1, true))
9455 return fold_build2_loc (loc, code, type, op1, op0);
9456
9457 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9458
9459 First check for cases where an arithmetic operation is applied to a
9460 compound, conditional, or comparison operation. Push the arithmetic
9461 operation inside the compound or conditional to see if any folding
9462 can then be done. Convert comparison to conditional for this purpose.
9463 The also optimizes non-constant cases that used to be done in
9464 expand_expr.
9465
9466 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9467 one of the operands is a comparison and the other is a comparison, a
9468 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9469 code below would make the expression more complex. Change it to a
9470 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9471 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9472
9473 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9474 || code == EQ_EXPR || code == NE_EXPR)
9475 && ((truth_value_p (TREE_CODE (arg0))
9476 && (truth_value_p (TREE_CODE (arg1))
9477 || (TREE_CODE (arg1) == BIT_AND_EXPR
9478 && integer_onep (TREE_OPERAND (arg1, 1)))))
9479 || (truth_value_p (TREE_CODE (arg1))
9480 && (truth_value_p (TREE_CODE (arg0))
9481 || (TREE_CODE (arg0) == BIT_AND_EXPR
9482 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9483 {
9484 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9485 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9486 : TRUTH_XOR_EXPR,
9487 boolean_type_node,
9488 fold_convert_loc (loc, boolean_type_node, arg0),
9489 fold_convert_loc (loc, boolean_type_node, arg1));
9490
9491 if (code == EQ_EXPR)
9492 tem = invert_truthvalue_loc (loc, tem);
9493
9494 return fold_convert_loc (loc, type, tem);
9495 }
9496
9497 if (TREE_CODE_CLASS (code) == tcc_binary
9498 || TREE_CODE_CLASS (code) == tcc_comparison)
9499 {
9500 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9501 {
9502 tem = fold_build2_loc (loc, code, type,
9503 fold_convert_loc (loc, TREE_TYPE (op0),
9504 TREE_OPERAND (arg0, 1)), op1);
9505 tem = build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0), tem);
9506 goto fold_binary_exit;
9507 }
9508 if (TREE_CODE (arg1) == COMPOUND_EXPR
9509 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9510 {
9511 tem = fold_build2_loc (loc, code, type, op0,
9512 fold_convert_loc (loc, TREE_TYPE (op1),
9513 TREE_OPERAND (arg1, 1)));
9514 tem = build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0), tem);
9515 goto fold_binary_exit;
9516 }
9517
9518 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
9519 {
9520 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9521 arg0, arg1,
9522 /*cond_first_p=*/1);
9523 if (tem != NULL_TREE)
9524 return tem;
9525 }
9526
9527 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
9528 {
9529 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9530 arg1, arg0,
9531 /*cond_first_p=*/0);
9532 if (tem != NULL_TREE)
9533 return tem;
9534 }
9535 }
9536
9537 switch (code)
9538 {
9539 case POINTER_PLUS_EXPR:
9540 /* 0 +p index -> (type)index */
9541 if (integer_zerop (arg0))
9542 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
9543
9544 /* PTR +p 0 -> PTR */
9545 if (integer_zerop (arg1))
9546 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9547
9548 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9549 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9550 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9551 return fold_convert_loc (loc, type,
9552 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9553 fold_convert_loc (loc, sizetype,
9554 arg1),
9555 fold_convert_loc (loc, sizetype,
9556 arg0)));
9557
9558 /* index +p PTR -> PTR +p index */
9559 if (POINTER_TYPE_P (TREE_TYPE (arg1))
9560 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9561 return fold_build2_loc (loc, POINTER_PLUS_EXPR, type,
9562 fold_convert_loc (loc, type, arg1),
9563 fold_convert_loc (loc, sizetype, arg0));
9564
9565 /* (PTR +p B) +p A -> PTR +p (B + A) */
9566 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9567 {
9568 tree inner;
9569 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
9570 tree arg00 = TREE_OPERAND (arg0, 0);
9571 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
9572 arg01, fold_convert_loc (loc, sizetype, arg1));
9573 return fold_convert_loc (loc, type,
9574 fold_build2_loc (loc, POINTER_PLUS_EXPR,
9575 TREE_TYPE (arg00),
9576 arg00, inner));
9577 }
9578
9579 /* PTR_CST +p CST -> CST1 */
9580 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9581 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
9582 fold_convert_loc (loc, type, arg1));
9583
9584 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9585 of the array. Loop optimizer sometimes produce this type of
9586 expressions. */
9587 if (TREE_CODE (arg0) == ADDR_EXPR)
9588 {
9589 tem = try_move_mult_to_index (loc, arg0,
9590 fold_convert_loc (loc, sizetype, arg1));
9591 if (tem)
9592 return fold_convert_loc (loc, type, tem);
9593 }
9594
9595 return NULL_TREE;
9596
9597 case PLUS_EXPR:
9598 /* A + (-B) -> A - B */
9599 if (TREE_CODE (arg1) == NEGATE_EXPR)
9600 return fold_build2_loc (loc, MINUS_EXPR, type,
9601 fold_convert_loc (loc, type, arg0),
9602 fold_convert_loc (loc, type,
9603 TREE_OPERAND (arg1, 0)));
9604 /* (-A) + B -> B - A */
9605 if (TREE_CODE (arg0) == NEGATE_EXPR
9606 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
9607 return fold_build2_loc (loc, MINUS_EXPR, type,
9608 fold_convert_loc (loc, type, arg1),
9609 fold_convert_loc (loc, type,
9610 TREE_OPERAND (arg0, 0)));
9611
9612 if (INTEGRAL_TYPE_P (type))
9613 {
9614 /* Convert ~A + 1 to -A. */
9615 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9616 && integer_onep (arg1))
9617 return fold_build1_loc (loc, NEGATE_EXPR, type,
9618 fold_convert_loc (loc, type,
9619 TREE_OPERAND (arg0, 0)));
9620
9621 /* ~X + X is -1. */
9622 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9623 && !TYPE_OVERFLOW_TRAPS (type))
9624 {
9625 tree tem = TREE_OPERAND (arg0, 0);
9626
9627 STRIP_NOPS (tem);
9628 if (operand_equal_p (tem, arg1, 0))
9629 {
9630 t1 = build_int_cst_type (type, -1);
9631 return omit_one_operand_loc (loc, type, t1, arg1);
9632 }
9633 }
9634
9635 /* X + ~X is -1. */
9636 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9637 && !TYPE_OVERFLOW_TRAPS (type))
9638 {
9639 tree tem = TREE_OPERAND (arg1, 0);
9640
9641 STRIP_NOPS (tem);
9642 if (operand_equal_p (arg0, tem, 0))
9643 {
9644 t1 = build_int_cst_type (type, -1);
9645 return omit_one_operand_loc (loc, type, t1, arg0);
9646 }
9647 }
9648
9649 /* X + (X / CST) * -CST is X % CST. */
9650 if (TREE_CODE (arg1) == MULT_EXPR
9651 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9652 && operand_equal_p (arg0,
9653 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9654 {
9655 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9656 tree cst1 = TREE_OPERAND (arg1, 1);
9657 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
9658 cst1, cst0);
9659 if (sum && integer_zerop (sum))
9660 return fold_convert_loc (loc, type,
9661 fold_build2_loc (loc, TRUNC_MOD_EXPR,
9662 TREE_TYPE (arg0), arg0,
9663 cst0));
9664 }
9665 }
9666
9667 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
9668 same or one. Make sure type is not saturating.
9669 fold_plusminus_mult_expr will re-associate. */
9670 if ((TREE_CODE (arg0) == MULT_EXPR
9671 || TREE_CODE (arg1) == MULT_EXPR)
9672 && !TYPE_SATURATING (type)
9673 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9674 {
9675 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9676 if (tem)
9677 return tem;
9678 }
9679
9680 if (! FLOAT_TYPE_P (type))
9681 {
9682 if (integer_zerop (arg1))
9683 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9684
9685 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9686 with a constant, and the two constants have no bits in common,
9687 we should treat this as a BIT_IOR_EXPR since this may produce more
9688 simplifications. */
9689 if (TREE_CODE (arg0) == BIT_AND_EXPR
9690 && TREE_CODE (arg1) == BIT_AND_EXPR
9691 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9692 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9693 && integer_zerop (const_binop (BIT_AND_EXPR,
9694 TREE_OPERAND (arg0, 1),
9695 TREE_OPERAND (arg1, 1))))
9696 {
9697 code = BIT_IOR_EXPR;
9698 goto bit_ior;
9699 }
9700
9701 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9702 (plus (plus (mult) (mult)) (foo)) so that we can
9703 take advantage of the factoring cases below. */
9704 if (((TREE_CODE (arg0) == PLUS_EXPR
9705 || TREE_CODE (arg0) == MINUS_EXPR)
9706 && TREE_CODE (arg1) == MULT_EXPR)
9707 || ((TREE_CODE (arg1) == PLUS_EXPR
9708 || TREE_CODE (arg1) == MINUS_EXPR)
9709 && TREE_CODE (arg0) == MULT_EXPR))
9710 {
9711 tree parg0, parg1, parg, marg;
9712 enum tree_code pcode;
9713
9714 if (TREE_CODE (arg1) == MULT_EXPR)
9715 parg = arg0, marg = arg1;
9716 else
9717 parg = arg1, marg = arg0;
9718 pcode = TREE_CODE (parg);
9719 parg0 = TREE_OPERAND (parg, 0);
9720 parg1 = TREE_OPERAND (parg, 1);
9721 STRIP_NOPS (parg0);
9722 STRIP_NOPS (parg1);
9723
9724 if (TREE_CODE (parg0) == MULT_EXPR
9725 && TREE_CODE (parg1) != MULT_EXPR)
9726 return fold_build2_loc (loc, pcode, type,
9727 fold_build2_loc (loc, PLUS_EXPR, type,
9728 fold_convert_loc (loc, type,
9729 parg0),
9730 fold_convert_loc (loc, type,
9731 marg)),
9732 fold_convert_loc (loc, type, parg1));
9733 if (TREE_CODE (parg0) != MULT_EXPR
9734 && TREE_CODE (parg1) == MULT_EXPR)
9735 return
9736 fold_build2_loc (loc, PLUS_EXPR, type,
9737 fold_convert_loc (loc, type, parg0),
9738 fold_build2_loc (loc, pcode, type,
9739 fold_convert_loc (loc, type, marg),
9740 fold_convert_loc (loc, type,
9741 parg1)));
9742 }
9743 }
9744 else
9745 {
9746 /* See if ARG1 is zero and X + ARG1 reduces to X. */
9747 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
9748 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9749
9750 /* Likewise if the operands are reversed. */
9751 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9752 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
9753
9754 /* Convert X + -C into X - C. */
9755 if (TREE_CODE (arg1) == REAL_CST
9756 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
9757 {
9758 tem = fold_negate_const (arg1, type);
9759 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
9760 return fold_build2_loc (loc, MINUS_EXPR, type,
9761 fold_convert_loc (loc, type, arg0),
9762 fold_convert_loc (loc, type, tem));
9763 }
9764
9765 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9766 to __complex__ ( x, y ). This is not the same for SNaNs or
9767 if signed zeros are involved. */
9768 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9769 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9770 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9771 {
9772 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9773 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9774 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9775 bool arg0rz = false, arg0iz = false;
9776 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9777 || (arg0i && (arg0iz = real_zerop (arg0i))))
9778 {
9779 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9780 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9781 if (arg0rz && arg1i && real_zerop (arg1i))
9782 {
9783 tree rp = arg1r ? arg1r
9784 : build1 (REALPART_EXPR, rtype, arg1);
9785 tree ip = arg0i ? arg0i
9786 : build1 (IMAGPART_EXPR, rtype, arg0);
9787 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9788 }
9789 else if (arg0iz && arg1r && real_zerop (arg1r))
9790 {
9791 tree rp = arg0r ? arg0r
9792 : build1 (REALPART_EXPR, rtype, arg0);
9793 tree ip = arg1i ? arg1i
9794 : build1 (IMAGPART_EXPR, rtype, arg1);
9795 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9796 }
9797 }
9798 }
9799
9800 if (flag_unsafe_math_optimizations
9801 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9802 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9803 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9804 return tem;
9805
9806 /* Convert x+x into x*2.0. */
9807 if (operand_equal_p (arg0, arg1, 0)
9808 && SCALAR_FLOAT_TYPE_P (type))
9809 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
9810 build_real (type, dconst2));
9811
9812 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9813 We associate floats only if the user has specified
9814 -fassociative-math. */
9815 if (flag_associative_math
9816 && TREE_CODE (arg1) == PLUS_EXPR
9817 && TREE_CODE (arg0) != MULT_EXPR)
9818 {
9819 tree tree10 = TREE_OPERAND (arg1, 0);
9820 tree tree11 = TREE_OPERAND (arg1, 1);
9821 if (TREE_CODE (tree11) == MULT_EXPR
9822 && TREE_CODE (tree10) == MULT_EXPR)
9823 {
9824 tree tree0;
9825 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
9826 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
9827 }
9828 }
9829 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9830 We associate floats only if the user has specified
9831 -fassociative-math. */
9832 if (flag_associative_math
9833 && TREE_CODE (arg0) == PLUS_EXPR
9834 && TREE_CODE (arg1) != MULT_EXPR)
9835 {
9836 tree tree00 = TREE_OPERAND (arg0, 0);
9837 tree tree01 = TREE_OPERAND (arg0, 1);
9838 if (TREE_CODE (tree01) == MULT_EXPR
9839 && TREE_CODE (tree00) == MULT_EXPR)
9840 {
9841 tree tree0;
9842 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
9843 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
9844 }
9845 }
9846 }
9847
9848 bit_rotate:
9849 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9850 is a rotate of A by C1 bits. */
9851 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9852 is a rotate of A by B bits. */
9853 {
9854 enum tree_code code0, code1;
9855 tree rtype;
9856 code0 = TREE_CODE (arg0);
9857 code1 = TREE_CODE (arg1);
9858 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9859 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9860 && operand_equal_p (TREE_OPERAND (arg0, 0),
9861 TREE_OPERAND (arg1, 0), 0)
9862 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9863 TYPE_UNSIGNED (rtype))
9864 /* Only create rotates in complete modes. Other cases are not
9865 expanded properly. */
9866 && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
9867 {
9868 tree tree01, tree11;
9869 enum tree_code code01, code11;
9870
9871 tree01 = TREE_OPERAND (arg0, 1);
9872 tree11 = TREE_OPERAND (arg1, 1);
9873 STRIP_NOPS (tree01);
9874 STRIP_NOPS (tree11);
9875 code01 = TREE_CODE (tree01);
9876 code11 = TREE_CODE (tree11);
9877 if (code01 == INTEGER_CST
9878 && code11 == INTEGER_CST
9879 && TREE_INT_CST_HIGH (tree01) == 0
9880 && TREE_INT_CST_HIGH (tree11) == 0
9881 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
9882 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9883 {
9884 tem = build2 (LROTATE_EXPR,
9885 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9886 TREE_OPERAND (arg0, 0),
9887 code0 == LSHIFT_EXPR
9888 ? tree01 : tree11);
9889 SET_EXPR_LOCATION (tem, loc);
9890 return fold_convert_loc (loc, type, tem);
9891 }
9892 else if (code11 == MINUS_EXPR)
9893 {
9894 tree tree110, tree111;
9895 tree110 = TREE_OPERAND (tree11, 0);
9896 tree111 = TREE_OPERAND (tree11, 1);
9897 STRIP_NOPS (tree110);
9898 STRIP_NOPS (tree111);
9899 if (TREE_CODE (tree110) == INTEGER_CST
9900 && 0 == compare_tree_int (tree110,
9901 TYPE_PRECISION
9902 (TREE_TYPE (TREE_OPERAND
9903 (arg0, 0))))
9904 && operand_equal_p (tree01, tree111, 0))
9905 return
9906 fold_convert_loc (loc, type,
9907 build2 ((code0 == LSHIFT_EXPR
9908 ? LROTATE_EXPR
9909 : RROTATE_EXPR),
9910 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9911 TREE_OPERAND (arg0, 0), tree01));
9912 }
9913 else if (code01 == MINUS_EXPR)
9914 {
9915 tree tree010, tree011;
9916 tree010 = TREE_OPERAND (tree01, 0);
9917 tree011 = TREE_OPERAND (tree01, 1);
9918 STRIP_NOPS (tree010);
9919 STRIP_NOPS (tree011);
9920 if (TREE_CODE (tree010) == INTEGER_CST
9921 && 0 == compare_tree_int (tree010,
9922 TYPE_PRECISION
9923 (TREE_TYPE (TREE_OPERAND
9924 (arg0, 0))))
9925 && operand_equal_p (tree11, tree011, 0))
9926 return fold_convert_loc
9927 (loc, type,
9928 build2 ((code0 != LSHIFT_EXPR
9929 ? LROTATE_EXPR
9930 : RROTATE_EXPR),
9931 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9932 TREE_OPERAND (arg0, 0), tree11));
9933 }
9934 }
9935 }
9936
9937 associate:
9938 /* In most languages, can't associate operations on floats through
9939 parentheses. Rather than remember where the parentheses were, we
9940 don't associate floats at all, unless the user has specified
9941 -fassociative-math.
9942 And, we need to make sure type is not saturating. */
9943
9944 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
9945 && !TYPE_SATURATING (type))
9946 {
9947 tree var0, con0, lit0, minus_lit0;
9948 tree var1, con1, lit1, minus_lit1;
9949 bool ok = true;
9950
9951 /* Split both trees into variables, constants, and literals. Then
9952 associate each group together, the constants with literals,
9953 then the result with variables. This increases the chances of
9954 literals being recombined later and of generating relocatable
9955 expressions for the sum of a constant and literal. */
9956 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
9957 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
9958 code == MINUS_EXPR);
9959
9960 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9961 if (code == MINUS_EXPR)
9962 code = PLUS_EXPR;
9963
9964 /* With undefined overflow we can only associate constants with one
9965 variable, and constants whose association doesn't overflow. */
9966 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9967 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
9968 {
9969 if (var0 && var1)
9970 {
9971 tree tmp0 = var0;
9972 tree tmp1 = var1;
9973
9974 if (TREE_CODE (tmp0) == NEGATE_EXPR)
9975 tmp0 = TREE_OPERAND (tmp0, 0);
9976 if (TREE_CODE (tmp1) == NEGATE_EXPR)
9977 tmp1 = TREE_OPERAND (tmp1, 0);
9978 /* The only case we can still associate with two variables
9979 is if they are the same, modulo negation. */
9980 if (!operand_equal_p (tmp0, tmp1, 0))
9981 ok = false;
9982 }
9983
9984 if (ok && lit0 && lit1)
9985 {
9986 tree tmp0 = fold_convert (type, lit0);
9987 tree tmp1 = fold_convert (type, lit1);
9988
9989 if (!TREE_OVERFLOW (tmp0) && !TREE_OVERFLOW (tmp1)
9990 && TREE_OVERFLOW (fold_build2 (code, type, tmp0, tmp1)))
9991 ok = false;
9992 }
9993 }
9994
9995 /* Only do something if we found more than two objects. Otherwise,
9996 nothing has changed and we risk infinite recursion. */
9997 if (ok
9998 && (2 < ((var0 != 0) + (var1 != 0)
9999 + (con0 != 0) + (con1 != 0)
10000 + (lit0 != 0) + (lit1 != 0)
10001 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10002 {
10003 var0 = associate_trees (loc, var0, var1, code, type);
10004 con0 = associate_trees (loc, con0, con1, code, type);
10005 lit0 = associate_trees (loc, lit0, lit1, code, type);
10006 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1, code, type);
10007
10008 /* Preserve the MINUS_EXPR if the negative part of the literal is
10009 greater than the positive part. Otherwise, the multiplicative
10010 folding code (i.e extract_muldiv) may be fooled in case
10011 unsigned constants are subtracted, like in the following
10012 example: ((X*2 + 4) - 8U)/2. */
10013 if (minus_lit0 && lit0)
10014 {
10015 if (TREE_CODE (lit0) == INTEGER_CST
10016 && TREE_CODE (minus_lit0) == INTEGER_CST
10017 && tree_int_cst_lt (lit0, minus_lit0))
10018 {
10019 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10020 MINUS_EXPR, type);
10021 lit0 = 0;
10022 }
10023 else
10024 {
10025 lit0 = associate_trees (loc, lit0, minus_lit0,
10026 MINUS_EXPR, type);
10027 minus_lit0 = 0;
10028 }
10029 }
10030 if (minus_lit0)
10031 {
10032 if (con0 == 0)
10033 return
10034 fold_convert_loc (loc, type,
10035 associate_trees (loc, var0, minus_lit0,
10036 MINUS_EXPR, type));
10037 else
10038 {
10039 con0 = associate_trees (loc, con0, minus_lit0,
10040 MINUS_EXPR, type);
10041 return
10042 fold_convert_loc (loc, type,
10043 associate_trees (loc, var0, con0,
10044 PLUS_EXPR, type));
10045 }
10046 }
10047
10048 con0 = associate_trees (loc, con0, lit0, code, type);
10049 return
10050 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10051 code, type));
10052 }
10053 }
10054
10055 return NULL_TREE;
10056
10057 case MINUS_EXPR:
10058 /* Pointer simplifications for subtraction, simple reassociations. */
10059 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10060 {
10061 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10062 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10063 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10064 {
10065 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10066 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10067 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10068 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10069 return fold_build2_loc (loc, PLUS_EXPR, type,
10070 fold_build2_loc (loc, MINUS_EXPR, type,
10071 arg00, arg10),
10072 fold_build2_loc (loc, MINUS_EXPR, type,
10073 arg01, arg11));
10074 }
10075 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10076 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10077 {
10078 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10079 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10080 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10081 fold_convert_loc (loc, type, arg1));
10082 if (tmp)
10083 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10084 }
10085 }
10086 /* A - (-B) -> A + B */
10087 if (TREE_CODE (arg1) == NEGATE_EXPR)
10088 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10089 fold_convert_loc (loc, type,
10090 TREE_OPERAND (arg1, 0)));
10091 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10092 if (TREE_CODE (arg0) == NEGATE_EXPR
10093 && (FLOAT_TYPE_P (type)
10094 || INTEGRAL_TYPE_P (type))
10095 && negate_expr_p (arg1)
10096 && reorder_operands_p (arg0, arg1))
10097 return fold_build2_loc (loc, MINUS_EXPR, type,
10098 fold_convert_loc (loc, type,
10099 negate_expr (arg1)),
10100 fold_convert_loc (loc, type,
10101 TREE_OPERAND (arg0, 0)));
10102 /* Convert -A - 1 to ~A. */
10103 if (INTEGRAL_TYPE_P (type)
10104 && TREE_CODE (arg0) == NEGATE_EXPR
10105 && integer_onep (arg1)
10106 && !TYPE_OVERFLOW_TRAPS (type))
10107 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10108 fold_convert_loc (loc, type,
10109 TREE_OPERAND (arg0, 0)));
10110
10111 /* Convert -1 - A to ~A. */
10112 if (INTEGRAL_TYPE_P (type)
10113 && integer_all_onesp (arg0))
10114 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10115
10116
10117 /* X - (X / CST) * CST is X % CST. */
10118 if (INTEGRAL_TYPE_P (type)
10119 && TREE_CODE (arg1) == MULT_EXPR
10120 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10121 && operand_equal_p (arg0,
10122 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10123 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10124 TREE_OPERAND (arg1, 1), 0))
10125 return
10126 fold_convert_loc (loc, type,
10127 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10128 arg0, TREE_OPERAND (arg1, 1)));
10129
10130 if (! FLOAT_TYPE_P (type))
10131 {
10132 if (integer_zerop (arg0))
10133 return negate_expr (fold_convert_loc (loc, type, arg1));
10134 if (integer_zerop (arg1))
10135 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10136
10137 /* Fold A - (A & B) into ~B & A. */
10138 if (!TREE_SIDE_EFFECTS (arg0)
10139 && TREE_CODE (arg1) == BIT_AND_EXPR)
10140 {
10141 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10142 {
10143 tree arg10 = fold_convert_loc (loc, type,
10144 TREE_OPERAND (arg1, 0));
10145 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10146 fold_build1_loc (loc, BIT_NOT_EXPR,
10147 type, arg10),
10148 fold_convert_loc (loc, type, arg0));
10149 }
10150 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10151 {
10152 tree arg11 = fold_convert_loc (loc,
10153 type, TREE_OPERAND (arg1, 1));
10154 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10155 fold_build1_loc (loc, BIT_NOT_EXPR,
10156 type, arg11),
10157 fold_convert_loc (loc, type, arg0));
10158 }
10159 }
10160
10161 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10162 any power of 2 minus 1. */
10163 if (TREE_CODE (arg0) == BIT_AND_EXPR
10164 && TREE_CODE (arg1) == BIT_AND_EXPR
10165 && operand_equal_p (TREE_OPERAND (arg0, 0),
10166 TREE_OPERAND (arg1, 0), 0))
10167 {
10168 tree mask0 = TREE_OPERAND (arg0, 1);
10169 tree mask1 = TREE_OPERAND (arg1, 1);
10170 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10171
10172 if (operand_equal_p (tem, mask1, 0))
10173 {
10174 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10175 TREE_OPERAND (arg0, 0), mask1);
10176 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10177 }
10178 }
10179 }
10180
10181 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10182 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10183 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10184
10185 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10186 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10187 (-ARG1 + ARG0) reduces to -ARG1. */
10188 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10189 return negate_expr (fold_convert_loc (loc, type, arg1));
10190
10191 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10192 __complex__ ( x, -y ). This is not the same for SNaNs or if
10193 signed zeros are involved. */
10194 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10195 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10196 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10197 {
10198 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10199 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10200 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10201 bool arg0rz = false, arg0iz = false;
10202 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10203 || (arg0i && (arg0iz = real_zerop (arg0i))))
10204 {
10205 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10206 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10207 if (arg0rz && arg1i && real_zerop (arg1i))
10208 {
10209 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10210 arg1r ? arg1r
10211 : build1 (REALPART_EXPR, rtype, arg1));
10212 tree ip = arg0i ? arg0i
10213 : build1 (IMAGPART_EXPR, rtype, arg0);
10214 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10215 }
10216 else if (arg0iz && arg1r && real_zerop (arg1r))
10217 {
10218 tree rp = arg0r ? arg0r
10219 : build1 (REALPART_EXPR, rtype, arg0);
10220 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10221 arg1i ? arg1i
10222 : build1 (IMAGPART_EXPR, rtype, arg1));
10223 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10224 }
10225 }
10226 }
10227
10228 /* Fold &x - &x. This can happen from &x.foo - &x.
10229 This is unsafe for certain floats even in non-IEEE formats.
10230 In IEEE, it is unsafe because it does wrong for NaNs.
10231 Also note that operand_equal_p is always false if an operand
10232 is volatile. */
10233
10234 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10235 && operand_equal_p (arg0, arg1, 0))
10236 return fold_convert_loc (loc, type, integer_zero_node);
10237
10238 /* A - B -> A + (-B) if B is easily negatable. */
10239 if (negate_expr_p (arg1)
10240 && ((FLOAT_TYPE_P (type)
10241 /* Avoid this transformation if B is a positive REAL_CST. */
10242 && (TREE_CODE (arg1) != REAL_CST
10243 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10244 || INTEGRAL_TYPE_P (type)))
10245 return fold_build2_loc (loc, PLUS_EXPR, type,
10246 fold_convert_loc (loc, type, arg0),
10247 fold_convert_loc (loc, type,
10248 negate_expr (arg1)));
10249
10250 /* Try folding difference of addresses. */
10251 {
10252 HOST_WIDE_INT diff;
10253
10254 if ((TREE_CODE (arg0) == ADDR_EXPR
10255 || TREE_CODE (arg1) == ADDR_EXPR)
10256 && ptr_difference_const (arg0, arg1, &diff))
10257 return build_int_cst_type (type, diff);
10258 }
10259
10260 /* Fold &a[i] - &a[j] to i-j. */
10261 if (TREE_CODE (arg0) == ADDR_EXPR
10262 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10263 && TREE_CODE (arg1) == ADDR_EXPR
10264 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10265 {
10266 tree aref0 = TREE_OPERAND (arg0, 0);
10267 tree aref1 = TREE_OPERAND (arg1, 0);
10268 if (operand_equal_p (TREE_OPERAND (aref0, 0),
10269 TREE_OPERAND (aref1, 0), 0))
10270 {
10271 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
10272 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
10273 tree esz = array_ref_element_size (aref0);
10274 tree diff = build2 (MINUS_EXPR, type, op0, op1);
10275 return fold_build2_loc (loc, MULT_EXPR, type, diff,
10276 fold_convert_loc (loc, type, esz));
10277
10278 }
10279 }
10280
10281 if (FLOAT_TYPE_P (type)
10282 && flag_unsafe_math_optimizations
10283 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10284 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10285 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10286 return tem;
10287
10288 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
10289 same or one. Make sure type is not saturating.
10290 fold_plusminus_mult_expr will re-associate. */
10291 if ((TREE_CODE (arg0) == MULT_EXPR
10292 || TREE_CODE (arg1) == MULT_EXPR)
10293 && !TYPE_SATURATING (type)
10294 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10295 {
10296 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10297 if (tem)
10298 return tem;
10299 }
10300
10301 goto associate;
10302
10303 case MULT_EXPR:
10304 /* (-A) * (-B) -> A * B */
10305 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10306 return fold_build2_loc (loc, MULT_EXPR, type,
10307 fold_convert_loc (loc, type,
10308 TREE_OPERAND (arg0, 0)),
10309 fold_convert_loc (loc, type,
10310 negate_expr (arg1)));
10311 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10312 return fold_build2_loc (loc, MULT_EXPR, type,
10313 fold_convert_loc (loc, type,
10314 negate_expr (arg0)),
10315 fold_convert_loc (loc, type,
10316 TREE_OPERAND (arg1, 0)));
10317
10318 if (! FLOAT_TYPE_P (type))
10319 {
10320 if (integer_zerop (arg1))
10321 return omit_one_operand_loc (loc, type, arg1, arg0);
10322 if (integer_onep (arg1))
10323 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10324 /* Transform x * -1 into -x. Make sure to do the negation
10325 on the original operand with conversions not stripped
10326 because we can only strip non-sign-changing conversions. */
10327 if (integer_all_onesp (arg1))
10328 return fold_convert_loc (loc, type, negate_expr (op0));
10329 /* Transform x * -C into -x * C if x is easily negatable. */
10330 if (TREE_CODE (arg1) == INTEGER_CST
10331 && tree_int_cst_sgn (arg1) == -1
10332 && negate_expr_p (arg0)
10333 && (tem = negate_expr (arg1)) != arg1
10334 && !TREE_OVERFLOW (tem))
10335 return fold_build2_loc (loc, MULT_EXPR, type,
10336 fold_convert_loc (loc, type,
10337 negate_expr (arg0)),
10338 tem);
10339
10340 /* (a * (1 << b)) is (a << b) */
10341 if (TREE_CODE (arg1) == LSHIFT_EXPR
10342 && integer_onep (TREE_OPERAND (arg1, 0)))
10343 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10344 TREE_OPERAND (arg1, 1));
10345 if (TREE_CODE (arg0) == LSHIFT_EXPR
10346 && integer_onep (TREE_OPERAND (arg0, 0)))
10347 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10348 TREE_OPERAND (arg0, 1));
10349
10350 /* (A + A) * C -> A * 2 * C */
10351 if (TREE_CODE (arg0) == PLUS_EXPR
10352 && TREE_CODE (arg1) == INTEGER_CST
10353 && operand_equal_p (TREE_OPERAND (arg0, 0),
10354 TREE_OPERAND (arg0, 1), 0))
10355 return fold_build2_loc (loc, MULT_EXPR, type,
10356 omit_one_operand_loc (loc, type,
10357 TREE_OPERAND (arg0, 0),
10358 TREE_OPERAND (arg0, 1)),
10359 fold_build2_loc (loc, MULT_EXPR, type,
10360 build_int_cst (type, 2) , arg1));
10361
10362 strict_overflow_p = false;
10363 if (TREE_CODE (arg1) == INTEGER_CST
10364 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10365 &strict_overflow_p)))
10366 {
10367 if (strict_overflow_p)
10368 fold_overflow_warning (("assuming signed overflow does not "
10369 "occur when simplifying "
10370 "multiplication"),
10371 WARN_STRICT_OVERFLOW_MISC);
10372 return fold_convert_loc (loc, type, tem);
10373 }
10374
10375 /* Optimize z * conj(z) for integer complex numbers. */
10376 if (TREE_CODE (arg0) == CONJ_EXPR
10377 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10378 return fold_mult_zconjz (loc, type, arg1);
10379 if (TREE_CODE (arg1) == CONJ_EXPR
10380 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10381 return fold_mult_zconjz (loc, type, arg0);
10382 }
10383 else
10384 {
10385 /* Maybe fold x * 0 to 0. The expressions aren't the same
10386 when x is NaN, since x * 0 is also NaN. Nor are they the
10387 same in modes with signed zeros, since multiplying a
10388 negative value by 0 gives -0, not +0. */
10389 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10390 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10391 && real_zerop (arg1))
10392 return omit_one_operand_loc (loc, type, arg1, arg0);
10393 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10394 Likewise for complex arithmetic with signed zeros. */
10395 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10396 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10397 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10398 && real_onep (arg1))
10399 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10400
10401 /* Transform x * -1.0 into -x. */
10402 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10403 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10404 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10405 && real_minus_onep (arg1))
10406 return fold_convert_loc (loc, type, negate_expr (arg0));
10407
10408 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10409 the result for floating point types due to rounding so it is applied
10410 only if -fassociative-math was specify. */
10411 if (flag_associative_math
10412 && TREE_CODE (arg0) == RDIV_EXPR
10413 && TREE_CODE (arg1) == REAL_CST
10414 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10415 {
10416 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10417 arg1);
10418 if (tem)
10419 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10420 TREE_OPERAND (arg0, 1));
10421 }
10422
10423 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10424 if (operand_equal_p (arg0, arg1, 0))
10425 {
10426 tree tem = fold_strip_sign_ops (arg0);
10427 if (tem != NULL_TREE)
10428 {
10429 tem = fold_convert_loc (loc, type, tem);
10430 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
10431 }
10432 }
10433
10434 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10435 This is not the same for NaNs or if signed zeros are
10436 involved. */
10437 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10438 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10439 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10440 && TREE_CODE (arg1) == COMPLEX_CST
10441 && real_zerop (TREE_REALPART (arg1)))
10442 {
10443 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10444 if (real_onep (TREE_IMAGPART (arg1)))
10445 return
10446 fold_build2_loc (loc, COMPLEX_EXPR, type,
10447 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10448 rtype, arg0)),
10449 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10450 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10451 return
10452 fold_build2_loc (loc, COMPLEX_EXPR, type,
10453 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10454 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10455 rtype, arg0)));
10456 }
10457
10458 /* Optimize z * conj(z) for floating point complex numbers.
10459 Guarded by flag_unsafe_math_optimizations as non-finite
10460 imaginary components don't produce scalar results. */
10461 if (flag_unsafe_math_optimizations
10462 && TREE_CODE (arg0) == CONJ_EXPR
10463 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10464 return fold_mult_zconjz (loc, type, arg1);
10465 if (flag_unsafe_math_optimizations
10466 && TREE_CODE (arg1) == CONJ_EXPR
10467 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10468 return fold_mult_zconjz (loc, type, arg0);
10469
10470 if (flag_unsafe_math_optimizations)
10471 {
10472 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10473 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10474
10475 /* Optimizations of root(...)*root(...). */
10476 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10477 {
10478 tree rootfn, arg;
10479 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10480 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10481
10482 /* Optimize sqrt(x)*sqrt(x) as x. */
10483 if (BUILTIN_SQRT_P (fcode0)
10484 && operand_equal_p (arg00, arg10, 0)
10485 && ! HONOR_SNANS (TYPE_MODE (type)))
10486 return arg00;
10487
10488 /* Optimize root(x)*root(y) as root(x*y). */
10489 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10490 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
10491 return build_call_expr_loc (loc, rootfn, 1, arg);
10492 }
10493
10494 /* Optimize expN(x)*expN(y) as expN(x+y). */
10495 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10496 {
10497 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10498 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10499 CALL_EXPR_ARG (arg0, 0),
10500 CALL_EXPR_ARG (arg1, 0));
10501 return build_call_expr_loc (loc, expfn, 1, arg);
10502 }
10503
10504 /* Optimizations of pow(...)*pow(...). */
10505 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10506 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10507 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10508 {
10509 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10510 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10511 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10512 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10513
10514 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10515 if (operand_equal_p (arg01, arg11, 0))
10516 {
10517 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10518 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
10519 arg00, arg10);
10520 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
10521 }
10522
10523 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10524 if (operand_equal_p (arg00, arg10, 0))
10525 {
10526 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10527 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10528 arg01, arg11);
10529 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
10530 }
10531 }
10532
10533 /* Optimize tan(x)*cos(x) as sin(x). */
10534 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10535 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10536 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10537 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10538 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10539 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10540 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10541 CALL_EXPR_ARG (arg1, 0), 0))
10542 {
10543 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10544
10545 if (sinfn != NULL_TREE)
10546 return build_call_expr_loc (loc, sinfn, 1,
10547 CALL_EXPR_ARG (arg0, 0));
10548 }
10549
10550 /* Optimize x*pow(x,c) as pow(x,c+1). */
10551 if (fcode1 == BUILT_IN_POW
10552 || fcode1 == BUILT_IN_POWF
10553 || fcode1 == BUILT_IN_POWL)
10554 {
10555 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10556 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10557 if (TREE_CODE (arg11) == REAL_CST
10558 && !TREE_OVERFLOW (arg11)
10559 && operand_equal_p (arg0, arg10, 0))
10560 {
10561 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10562 REAL_VALUE_TYPE c;
10563 tree arg;
10564
10565 c = TREE_REAL_CST (arg11);
10566 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10567 arg = build_real (type, c);
10568 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10569 }
10570 }
10571
10572 /* Optimize pow(x,c)*x as pow(x,c+1). */
10573 if (fcode0 == BUILT_IN_POW
10574 || fcode0 == BUILT_IN_POWF
10575 || fcode0 == BUILT_IN_POWL)
10576 {
10577 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10578 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10579 if (TREE_CODE (arg01) == REAL_CST
10580 && !TREE_OVERFLOW (arg01)
10581 && operand_equal_p (arg1, arg00, 0))
10582 {
10583 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10584 REAL_VALUE_TYPE c;
10585 tree arg;
10586
10587 c = TREE_REAL_CST (arg01);
10588 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10589 arg = build_real (type, c);
10590 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
10591 }
10592 }
10593
10594 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
10595 if (optimize_function_for_speed_p (cfun)
10596 && operand_equal_p (arg0, arg1, 0))
10597 {
10598 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10599
10600 if (powfn)
10601 {
10602 tree arg = build_real (type, dconst2);
10603 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10604 }
10605 }
10606 }
10607 }
10608 goto associate;
10609
10610 case BIT_IOR_EXPR:
10611 bit_ior:
10612 if (integer_all_onesp (arg1))
10613 return omit_one_operand_loc (loc, type, arg1, arg0);
10614 if (integer_zerop (arg1))
10615 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10616 if (operand_equal_p (arg0, arg1, 0))
10617 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10618
10619 /* ~X | X is -1. */
10620 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10621 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10622 {
10623 t1 = fold_convert_loc (loc, type, integer_zero_node);
10624 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10625 return omit_one_operand_loc (loc, type, t1, arg1);
10626 }
10627
10628 /* X | ~X is -1. */
10629 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10630 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10631 {
10632 t1 = fold_convert_loc (loc, type, integer_zero_node);
10633 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10634 return omit_one_operand_loc (loc, type, t1, arg0);
10635 }
10636
10637 /* Canonicalize (X & C1) | C2. */
10638 if (TREE_CODE (arg0) == BIT_AND_EXPR
10639 && TREE_CODE (arg1) == INTEGER_CST
10640 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10641 {
10642 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, hi3, lo3, mlo, mhi;
10643 int width = TYPE_PRECISION (type), w;
10644 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
10645 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10646 hi2 = TREE_INT_CST_HIGH (arg1);
10647 lo2 = TREE_INT_CST_LOW (arg1);
10648
10649 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10650 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
10651 return omit_one_operand_loc (loc, type, arg1,
10652 TREE_OPERAND (arg0, 0));
10653
10654 if (width > HOST_BITS_PER_WIDE_INT)
10655 {
10656 mhi = (unsigned HOST_WIDE_INT) -1
10657 >> (2 * HOST_BITS_PER_WIDE_INT - width);
10658 mlo = -1;
10659 }
10660 else
10661 {
10662 mhi = 0;
10663 mlo = (unsigned HOST_WIDE_INT) -1
10664 >> (HOST_BITS_PER_WIDE_INT - width);
10665 }
10666
10667 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10668 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
10669 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10670 TREE_OPERAND (arg0, 0), arg1);
10671
10672 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10673 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10674 mode which allows further optimizations. */
10675 hi1 &= mhi;
10676 lo1 &= mlo;
10677 hi2 &= mhi;
10678 lo2 &= mlo;
10679 hi3 = hi1 & ~hi2;
10680 lo3 = lo1 & ~lo2;
10681 for (w = BITS_PER_UNIT;
10682 w <= width && w <= HOST_BITS_PER_WIDE_INT;
10683 w <<= 1)
10684 {
10685 unsigned HOST_WIDE_INT mask
10686 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
10687 if (((lo1 | lo2) & mask) == mask
10688 && (lo1 & ~mask) == 0 && hi1 == 0)
10689 {
10690 hi3 = 0;
10691 lo3 = mask;
10692 break;
10693 }
10694 }
10695 if (hi3 != hi1 || lo3 != lo1)
10696 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10697 fold_build2_loc (loc, BIT_AND_EXPR, type,
10698 TREE_OPERAND (arg0, 0),
10699 build_int_cst_wide (type,
10700 lo3, hi3)),
10701 arg1);
10702 }
10703
10704 /* (X & Y) | Y is (X, Y). */
10705 if (TREE_CODE (arg0) == BIT_AND_EXPR
10706 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10707 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
10708 /* (X & Y) | X is (Y, X). */
10709 if (TREE_CODE (arg0) == BIT_AND_EXPR
10710 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10711 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10712 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
10713 /* X | (X & Y) is (Y, X). */
10714 if (TREE_CODE (arg1) == BIT_AND_EXPR
10715 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10716 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10717 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
10718 /* X | (Y & X) is (Y, X). */
10719 if (TREE_CODE (arg1) == BIT_AND_EXPR
10720 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10721 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10722 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
10723
10724 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
10725 if (t1 != NULL_TREE)
10726 return t1;
10727
10728 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
10729
10730 This results in more efficient code for machines without a NAND
10731 instruction. Combine will canonicalize to the first form
10732 which will allow use of NAND instructions provided by the
10733 backend if they exist. */
10734 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10735 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10736 {
10737 return
10738 fold_build1_loc (loc, BIT_NOT_EXPR, type,
10739 build2 (BIT_AND_EXPR, type,
10740 fold_convert_loc (loc, type,
10741 TREE_OPERAND (arg0, 0)),
10742 fold_convert_loc (loc, type,
10743 TREE_OPERAND (arg1, 0))));
10744 }
10745
10746 /* See if this can be simplified into a rotate first. If that
10747 is unsuccessful continue in the association code. */
10748 goto bit_rotate;
10749
10750 case BIT_XOR_EXPR:
10751 if (integer_zerop (arg1))
10752 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10753 if (integer_all_onesp (arg1))
10754 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
10755 if (operand_equal_p (arg0, arg1, 0))
10756 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10757
10758 /* ~X ^ X is -1. */
10759 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10760 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10761 {
10762 t1 = fold_convert_loc (loc, type, integer_zero_node);
10763 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10764 return omit_one_operand_loc (loc, type, t1, arg1);
10765 }
10766
10767 /* X ^ ~X is -1. */
10768 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10769 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10770 {
10771 t1 = fold_convert_loc (loc, type, integer_zero_node);
10772 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10773 return omit_one_operand_loc (loc, type, t1, arg0);
10774 }
10775
10776 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
10777 with a constant, and the two constants have no bits in common,
10778 we should treat this as a BIT_IOR_EXPR since this may produce more
10779 simplifications. */
10780 if (TREE_CODE (arg0) == BIT_AND_EXPR
10781 && TREE_CODE (arg1) == BIT_AND_EXPR
10782 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10783 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10784 && integer_zerop (const_binop (BIT_AND_EXPR,
10785 TREE_OPERAND (arg0, 1),
10786 TREE_OPERAND (arg1, 1))))
10787 {
10788 code = BIT_IOR_EXPR;
10789 goto bit_ior;
10790 }
10791
10792 /* (X | Y) ^ X -> Y & ~ X*/
10793 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10794 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10795 {
10796 tree t2 = TREE_OPERAND (arg0, 1);
10797 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
10798 arg1);
10799 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10800 fold_convert_loc (loc, type, t2),
10801 fold_convert_loc (loc, type, t1));
10802 return t1;
10803 }
10804
10805 /* (Y | X) ^ X -> Y & ~ X*/
10806 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10807 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10808 {
10809 tree t2 = TREE_OPERAND (arg0, 0);
10810 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
10811 arg1);
10812 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10813 fold_convert_loc (loc, type, t2),
10814 fold_convert_loc (loc, type, t1));
10815 return t1;
10816 }
10817
10818 /* X ^ (X | Y) -> Y & ~ X*/
10819 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10820 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
10821 {
10822 tree t2 = TREE_OPERAND (arg1, 1);
10823 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
10824 arg0);
10825 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10826 fold_convert_loc (loc, type, t2),
10827 fold_convert_loc (loc, type, t1));
10828 return t1;
10829 }
10830
10831 /* X ^ (Y | X) -> Y & ~ X*/
10832 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10833 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
10834 {
10835 tree t2 = TREE_OPERAND (arg1, 0);
10836 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
10837 arg0);
10838 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10839 fold_convert_loc (loc, type, t2),
10840 fold_convert_loc (loc, type, t1));
10841 return t1;
10842 }
10843
10844 /* Convert ~X ^ ~Y to X ^ Y. */
10845 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10846 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10847 return fold_build2_loc (loc, code, type,
10848 fold_convert_loc (loc, type,
10849 TREE_OPERAND (arg0, 0)),
10850 fold_convert_loc (loc, type,
10851 TREE_OPERAND (arg1, 0)));
10852
10853 /* Convert ~X ^ C to X ^ ~C. */
10854 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10855 && TREE_CODE (arg1) == INTEGER_CST)
10856 return fold_build2_loc (loc, code, type,
10857 fold_convert_loc (loc, type,
10858 TREE_OPERAND (arg0, 0)),
10859 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
10860
10861 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10862 if (TREE_CODE (arg0) == BIT_AND_EXPR
10863 && integer_onep (TREE_OPERAND (arg0, 1))
10864 && integer_onep (arg1))
10865 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
10866 build_int_cst (TREE_TYPE (arg0), 0));
10867
10868 /* Fold (X & Y) ^ Y as ~X & Y. */
10869 if (TREE_CODE (arg0) == BIT_AND_EXPR
10870 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10871 {
10872 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10873 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10874 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10875 fold_convert_loc (loc, type, arg1));
10876 }
10877 /* Fold (X & Y) ^ X as ~Y & X. */
10878 if (TREE_CODE (arg0) == BIT_AND_EXPR
10879 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10880 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10881 {
10882 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10883 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10884 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10885 fold_convert_loc (loc, type, arg1));
10886 }
10887 /* Fold X ^ (X & Y) as X & ~Y. */
10888 if (TREE_CODE (arg1) == BIT_AND_EXPR
10889 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10890 {
10891 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10892 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10893 fold_convert_loc (loc, type, arg0),
10894 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
10895 }
10896 /* Fold X ^ (Y & X) as ~Y & X. */
10897 if (TREE_CODE (arg1) == BIT_AND_EXPR
10898 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10899 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10900 {
10901 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10902 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10903 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10904 fold_convert_loc (loc, type, arg0));
10905 }
10906
10907 /* See if this can be simplified into a rotate first. If that
10908 is unsuccessful continue in the association code. */
10909 goto bit_rotate;
10910
10911 case BIT_AND_EXPR:
10912 if (integer_all_onesp (arg1))
10913 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10914 if (integer_zerop (arg1))
10915 return omit_one_operand_loc (loc, type, arg1, arg0);
10916 if (operand_equal_p (arg0, arg1, 0))
10917 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10918
10919 /* ~X & X is always zero. */
10920 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10921 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10922 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
10923
10924 /* X & ~X is always zero. */
10925 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10926 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10927 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10928
10929 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
10930 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10931 && TREE_CODE (arg1) == INTEGER_CST
10932 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10933 {
10934 tree tmp1 = fold_convert_loc (loc, type, arg1);
10935 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10936 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10937 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
10938 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
10939 return
10940 fold_convert_loc (loc, type,
10941 fold_build2_loc (loc, BIT_IOR_EXPR,
10942 type, tmp2, tmp3));
10943 }
10944
10945 /* (X | Y) & Y is (X, Y). */
10946 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10947 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10948 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
10949 /* (X | Y) & X is (Y, X). */
10950 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10951 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10952 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10953 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
10954 /* X & (X | Y) is (Y, X). */
10955 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10956 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10957 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10958 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
10959 /* X & (Y | X) is (Y, X). */
10960 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10961 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10962 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10963 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
10964
10965 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
10966 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10967 && integer_onep (TREE_OPERAND (arg0, 1))
10968 && integer_onep (arg1))
10969 {
10970 tem = TREE_OPERAND (arg0, 0);
10971 return fold_build2_loc (loc, EQ_EXPR, type,
10972 fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem,
10973 build_int_cst (TREE_TYPE (tem), 1)),
10974 build_int_cst (TREE_TYPE (tem), 0));
10975 }
10976 /* Fold ~X & 1 as (X & 1) == 0. */
10977 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10978 && integer_onep (arg1))
10979 {
10980 tem = TREE_OPERAND (arg0, 0);
10981 return fold_build2_loc (loc, EQ_EXPR, type,
10982 fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem,
10983 build_int_cst (TREE_TYPE (tem), 1)),
10984 build_int_cst (TREE_TYPE (tem), 0));
10985 }
10986
10987 /* Fold (X ^ Y) & Y as ~X & Y. */
10988 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10989 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10990 {
10991 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10992 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10993 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10994 fold_convert_loc (loc, type, arg1));
10995 }
10996 /* Fold (X ^ Y) & X as ~Y & X. */
10997 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10998 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10999 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11000 {
11001 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11002 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11003 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11004 fold_convert_loc (loc, type, arg1));
11005 }
11006 /* Fold X & (X ^ Y) as X & ~Y. */
11007 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11008 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11009 {
11010 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11011 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11012 fold_convert_loc (loc, type, arg0),
11013 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11014 }
11015 /* Fold X & (Y ^ X) as ~Y & X. */
11016 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11017 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11018 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11019 {
11020 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11021 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11022 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11023 fold_convert_loc (loc, type, arg0));
11024 }
11025
11026 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11027 if (t1 != NULL_TREE)
11028 return t1;
11029 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11030 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11031 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11032 {
11033 unsigned int prec
11034 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11035
11036 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
11037 && (~TREE_INT_CST_LOW (arg1)
11038 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
11039 return
11040 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11041 }
11042
11043 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11044
11045 This results in more efficient code for machines without a NOR
11046 instruction. Combine will canonicalize to the first form
11047 which will allow use of NOR instructions provided by the
11048 backend if they exist. */
11049 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11050 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11051 {
11052 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11053 build2 (BIT_IOR_EXPR, type,
11054 fold_convert_loc (loc, type,
11055 TREE_OPERAND (arg0, 0)),
11056 fold_convert_loc (loc, type,
11057 TREE_OPERAND (arg1, 0))));
11058 }
11059
11060 /* If arg0 is derived from the address of an object or function, we may
11061 be able to fold this expression using the object or function's
11062 alignment. */
11063 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
11064 {
11065 unsigned HOST_WIDE_INT modulus, residue;
11066 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
11067
11068 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11069 integer_onep (arg1));
11070
11071 /* This works because modulus is a power of 2. If this weren't the
11072 case, we'd have to replace it by its greatest power-of-2
11073 divisor: modulus & -modulus. */
11074 if (low < modulus)
11075 return build_int_cst (type, residue & low);
11076 }
11077
11078 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11079 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11080 if the new mask might be further optimized. */
11081 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11082 || TREE_CODE (arg0) == RSHIFT_EXPR)
11083 && host_integerp (TREE_OPERAND (arg0, 1), 1)
11084 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
11085 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
11086 < TYPE_PRECISION (TREE_TYPE (arg0))
11087 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11088 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
11089 {
11090 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
11091 unsigned HOST_WIDE_INT mask
11092 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
11093 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11094 tree shift_type = TREE_TYPE (arg0);
11095
11096 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11097 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11098 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11099 && TYPE_PRECISION (TREE_TYPE (arg0))
11100 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
11101 {
11102 unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
11103 tree arg00 = TREE_OPERAND (arg0, 0);
11104 /* See if more bits can be proven as zero because of
11105 zero extension. */
11106 if (TREE_CODE (arg00) == NOP_EXPR
11107 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11108 {
11109 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11110 if (TYPE_PRECISION (inner_type)
11111 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
11112 && TYPE_PRECISION (inner_type) < prec)
11113 {
11114 prec = TYPE_PRECISION (inner_type);
11115 /* See if we can shorten the right shift. */
11116 if (shiftc < prec)
11117 shift_type = inner_type;
11118 }
11119 }
11120 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11121 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11122 zerobits <<= prec - shiftc;
11123 /* For arithmetic shift if sign bit could be set, zerobits
11124 can contain actually sign bits, so no transformation is
11125 possible, unless MASK masks them all away. In that
11126 case the shift needs to be converted into logical shift. */
11127 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11128 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11129 {
11130 if ((mask & zerobits) == 0)
11131 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11132 else
11133 zerobits = 0;
11134 }
11135 }
11136
11137 /* ((X << 16) & 0xff00) is (X, 0). */
11138 if ((mask & zerobits) == mask)
11139 return omit_one_operand_loc (loc, type,
11140 build_int_cst (type, 0), arg0);
11141
11142 newmask = mask | zerobits;
11143 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11144 {
11145 unsigned int prec;
11146
11147 /* Only do the transformation if NEWMASK is some integer
11148 mode's mask. */
11149 for (prec = BITS_PER_UNIT;
11150 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11151 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11152 break;
11153 if (prec < HOST_BITS_PER_WIDE_INT
11154 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11155 {
11156 tree newmaskt;
11157
11158 if (shift_type != TREE_TYPE (arg0))
11159 {
11160 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11161 fold_convert_loc (loc, shift_type,
11162 TREE_OPERAND (arg0, 0)),
11163 TREE_OPERAND (arg0, 1));
11164 tem = fold_convert_loc (loc, type, tem);
11165 }
11166 else
11167 tem = op0;
11168 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11169 if (!tree_int_cst_equal (newmaskt, arg1))
11170 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11171 }
11172 }
11173 }
11174
11175 goto associate;
11176
11177 case RDIV_EXPR:
11178 /* Don't touch a floating-point divide by zero unless the mode
11179 of the constant can represent infinity. */
11180 if (TREE_CODE (arg1) == REAL_CST
11181 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11182 && real_zerop (arg1))
11183 return NULL_TREE;
11184
11185 /* Optimize A / A to 1.0 if we don't care about
11186 NaNs or Infinities. Skip the transformation
11187 for non-real operands. */
11188 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11189 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11190 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11191 && operand_equal_p (arg0, arg1, 0))
11192 {
11193 tree r = build_real (TREE_TYPE (arg0), dconst1);
11194
11195 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11196 }
11197
11198 /* The complex version of the above A / A optimization. */
11199 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11200 && operand_equal_p (arg0, arg1, 0))
11201 {
11202 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11203 if (! HONOR_NANS (TYPE_MODE (elem_type))
11204 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11205 {
11206 tree r = build_real (elem_type, dconst1);
11207 /* omit_two_operands will call fold_convert for us. */
11208 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11209 }
11210 }
11211
11212 /* (-A) / (-B) -> A / B */
11213 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11214 return fold_build2_loc (loc, RDIV_EXPR, type,
11215 TREE_OPERAND (arg0, 0),
11216 negate_expr (arg1));
11217 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11218 return fold_build2_loc (loc, RDIV_EXPR, type,
11219 negate_expr (arg0),
11220 TREE_OPERAND (arg1, 0));
11221
11222 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11223 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11224 && real_onep (arg1))
11225 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11226
11227 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11228 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11229 && real_minus_onep (arg1))
11230 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
11231 negate_expr (arg0)));
11232
11233 /* If ARG1 is a constant, we can convert this to a multiply by the
11234 reciprocal. This does not have the same rounding properties,
11235 so only do this if -freciprocal-math. We can actually
11236 always safely do it if ARG1 is a power of two, but it's hard to
11237 tell if it is or not in a portable manner. */
11238 if (TREE_CODE (arg1) == REAL_CST)
11239 {
11240 if (flag_reciprocal_math
11241 && 0 != (tem = const_binop (code, build_real (type, dconst1),
11242 arg1)))
11243 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
11244 /* Find the reciprocal if optimizing and the result is exact. */
11245 if (optimize)
11246 {
11247 REAL_VALUE_TYPE r;
11248 r = TREE_REAL_CST (arg1);
11249 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
11250 {
11251 tem = build_real (type, r);
11252 return fold_build2_loc (loc, MULT_EXPR, type,
11253 fold_convert_loc (loc, type, arg0), tem);
11254 }
11255 }
11256 }
11257 /* Convert A/B/C to A/(B*C). */
11258 if (flag_reciprocal_math
11259 && TREE_CODE (arg0) == RDIV_EXPR)
11260 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11261 fold_build2_loc (loc, MULT_EXPR, type,
11262 TREE_OPERAND (arg0, 1), arg1));
11263
11264 /* Convert A/(B/C) to (A/B)*C. */
11265 if (flag_reciprocal_math
11266 && TREE_CODE (arg1) == RDIV_EXPR)
11267 return fold_build2_loc (loc, MULT_EXPR, type,
11268 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11269 TREE_OPERAND (arg1, 0)),
11270 TREE_OPERAND (arg1, 1));
11271
11272 /* Convert C1/(X*C2) into (C1/C2)/X. */
11273 if (flag_reciprocal_math
11274 && TREE_CODE (arg1) == MULT_EXPR
11275 && TREE_CODE (arg0) == REAL_CST
11276 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11277 {
11278 tree tem = const_binop (RDIV_EXPR, arg0,
11279 TREE_OPERAND (arg1, 1));
11280 if (tem)
11281 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11282 TREE_OPERAND (arg1, 0));
11283 }
11284
11285 if (flag_unsafe_math_optimizations)
11286 {
11287 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11288 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11289
11290 /* Optimize sin(x)/cos(x) as tan(x). */
11291 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11292 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11293 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11294 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11295 CALL_EXPR_ARG (arg1, 0), 0))
11296 {
11297 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11298
11299 if (tanfn != NULL_TREE)
11300 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11301 }
11302
11303 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11304 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11305 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11306 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11307 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11308 CALL_EXPR_ARG (arg1, 0), 0))
11309 {
11310 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11311
11312 if (tanfn != NULL_TREE)
11313 {
11314 tree tmp = build_call_expr_loc (loc, tanfn, 1,
11315 CALL_EXPR_ARG (arg0, 0));
11316 return fold_build2_loc (loc, RDIV_EXPR, type,
11317 build_real (type, dconst1), tmp);
11318 }
11319 }
11320
11321 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11322 NaNs or Infinities. */
11323 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11324 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11325 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11326 {
11327 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11328 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11329
11330 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11331 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11332 && operand_equal_p (arg00, arg01, 0))
11333 {
11334 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11335
11336 if (cosfn != NULL_TREE)
11337 return build_call_expr_loc (loc, cosfn, 1, arg00);
11338 }
11339 }
11340
11341 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11342 NaNs or Infinities. */
11343 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11344 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11345 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11346 {
11347 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11348 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11349
11350 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11351 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11352 && operand_equal_p (arg00, arg01, 0))
11353 {
11354 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11355
11356 if (cosfn != NULL_TREE)
11357 {
11358 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
11359 return fold_build2_loc (loc, RDIV_EXPR, type,
11360 build_real (type, dconst1),
11361 tmp);
11362 }
11363 }
11364 }
11365
11366 /* Optimize pow(x,c)/x as pow(x,c-1). */
11367 if (fcode0 == BUILT_IN_POW
11368 || fcode0 == BUILT_IN_POWF
11369 || fcode0 == BUILT_IN_POWL)
11370 {
11371 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11372 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11373 if (TREE_CODE (arg01) == REAL_CST
11374 && !TREE_OVERFLOW (arg01)
11375 && operand_equal_p (arg1, arg00, 0))
11376 {
11377 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11378 REAL_VALUE_TYPE c;
11379 tree arg;
11380
11381 c = TREE_REAL_CST (arg01);
11382 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11383 arg = build_real (type, c);
11384 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11385 }
11386 }
11387
11388 /* Optimize a/root(b/c) into a*root(c/b). */
11389 if (BUILTIN_ROOT_P (fcode1))
11390 {
11391 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11392
11393 if (TREE_CODE (rootarg) == RDIV_EXPR)
11394 {
11395 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11396 tree b = TREE_OPERAND (rootarg, 0);
11397 tree c = TREE_OPERAND (rootarg, 1);
11398
11399 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
11400
11401 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
11402 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
11403 }
11404 }
11405
11406 /* Optimize x/expN(y) into x*expN(-y). */
11407 if (BUILTIN_EXPONENT_P (fcode1))
11408 {
11409 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11410 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11411 arg1 = build_call_expr_loc (loc,
11412 expfn, 1,
11413 fold_convert_loc (loc, type, arg));
11414 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11415 }
11416
11417 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11418 if (fcode1 == BUILT_IN_POW
11419 || fcode1 == BUILT_IN_POWF
11420 || fcode1 == BUILT_IN_POWL)
11421 {
11422 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11423 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11424 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11425 tree neg11 = fold_convert_loc (loc, type,
11426 negate_expr (arg11));
11427 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
11428 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11429 }
11430 }
11431 return NULL_TREE;
11432
11433 case TRUNC_DIV_EXPR:
11434 case FLOOR_DIV_EXPR:
11435 /* Simplify A / (B << N) where A and B are positive and B is
11436 a power of 2, to A >> (N + log2(B)). */
11437 strict_overflow_p = false;
11438 if (TREE_CODE (arg1) == LSHIFT_EXPR
11439 && (TYPE_UNSIGNED (type)
11440 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11441 {
11442 tree sval = TREE_OPERAND (arg1, 0);
11443 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11444 {
11445 tree sh_cnt = TREE_OPERAND (arg1, 1);
11446 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
11447
11448 if (strict_overflow_p)
11449 fold_overflow_warning (("assuming signed overflow does not "
11450 "occur when simplifying A / (B << N)"),
11451 WARN_STRICT_OVERFLOW_MISC);
11452
11453 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
11454 sh_cnt, build_int_cst (NULL_TREE, pow2));
11455 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11456 fold_convert_loc (loc, type, arg0), sh_cnt);
11457 }
11458 }
11459
11460 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
11461 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
11462 if (INTEGRAL_TYPE_P (type)
11463 && TYPE_UNSIGNED (type)
11464 && code == FLOOR_DIV_EXPR)
11465 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
11466
11467 /* Fall thru */
11468
11469 case ROUND_DIV_EXPR:
11470 case CEIL_DIV_EXPR:
11471 case EXACT_DIV_EXPR:
11472 if (integer_onep (arg1))
11473 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11474 if (integer_zerop (arg1))
11475 return NULL_TREE;
11476 /* X / -1 is -X. */
11477 if (!TYPE_UNSIGNED (type)
11478 && TREE_CODE (arg1) == INTEGER_CST
11479 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11480 && TREE_INT_CST_HIGH (arg1) == -1)
11481 return fold_convert_loc (loc, type, negate_expr (arg0));
11482
11483 /* Convert -A / -B to A / B when the type is signed and overflow is
11484 undefined. */
11485 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11486 && TREE_CODE (arg0) == NEGATE_EXPR
11487 && negate_expr_p (arg1))
11488 {
11489 if (INTEGRAL_TYPE_P (type))
11490 fold_overflow_warning (("assuming signed overflow does not occur "
11491 "when distributing negation across "
11492 "division"),
11493 WARN_STRICT_OVERFLOW_MISC);
11494 return fold_build2_loc (loc, code, type,
11495 fold_convert_loc (loc, type,
11496 TREE_OPERAND (arg0, 0)),
11497 fold_convert_loc (loc, type,
11498 negate_expr (arg1)));
11499 }
11500 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11501 && TREE_CODE (arg1) == NEGATE_EXPR
11502 && negate_expr_p (arg0))
11503 {
11504 if (INTEGRAL_TYPE_P (type))
11505 fold_overflow_warning (("assuming signed overflow does not occur "
11506 "when distributing negation across "
11507 "division"),
11508 WARN_STRICT_OVERFLOW_MISC);
11509 return fold_build2_loc (loc, code, type,
11510 fold_convert_loc (loc, type,
11511 negate_expr (arg0)),
11512 fold_convert_loc (loc, type,
11513 TREE_OPERAND (arg1, 0)));
11514 }
11515
11516 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11517 operation, EXACT_DIV_EXPR.
11518
11519 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11520 At one time others generated faster code, it's not clear if they do
11521 after the last round to changes to the DIV code in expmed.c. */
11522 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11523 && multiple_of_p (type, arg0, arg1))
11524 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
11525
11526 strict_overflow_p = false;
11527 if (TREE_CODE (arg1) == INTEGER_CST
11528 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11529 &strict_overflow_p)))
11530 {
11531 if (strict_overflow_p)
11532 fold_overflow_warning (("assuming signed overflow does not occur "
11533 "when simplifying division"),
11534 WARN_STRICT_OVERFLOW_MISC);
11535 return fold_convert_loc (loc, type, tem);
11536 }
11537
11538 return NULL_TREE;
11539
11540 case CEIL_MOD_EXPR:
11541 case FLOOR_MOD_EXPR:
11542 case ROUND_MOD_EXPR:
11543 case TRUNC_MOD_EXPR:
11544 /* X % 1 is always zero, but be sure to preserve any side
11545 effects in X. */
11546 if (integer_onep (arg1))
11547 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11548
11549 /* X % 0, return X % 0 unchanged so that we can get the
11550 proper warnings and errors. */
11551 if (integer_zerop (arg1))
11552 return NULL_TREE;
11553
11554 /* 0 % X is always zero, but be sure to preserve any side
11555 effects in X. Place this after checking for X == 0. */
11556 if (integer_zerop (arg0))
11557 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11558
11559 /* X % -1 is zero. */
11560 if (!TYPE_UNSIGNED (type)
11561 && TREE_CODE (arg1) == INTEGER_CST
11562 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11563 && TREE_INT_CST_HIGH (arg1) == -1)
11564 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11565
11566 /* X % -C is the same as X % C. */
11567 if (code == TRUNC_MOD_EXPR
11568 && !TYPE_UNSIGNED (type)
11569 && TREE_CODE (arg1) == INTEGER_CST
11570 && !TREE_OVERFLOW (arg1)
11571 && TREE_INT_CST_HIGH (arg1) < 0
11572 && !TYPE_OVERFLOW_TRAPS (type)
11573 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
11574 && !sign_bit_p (arg1, arg1))
11575 return fold_build2_loc (loc, code, type,
11576 fold_convert_loc (loc, type, arg0),
11577 fold_convert_loc (loc, type,
11578 negate_expr (arg1)));
11579
11580 /* X % -Y is the same as X % Y. */
11581 if (code == TRUNC_MOD_EXPR
11582 && !TYPE_UNSIGNED (type)
11583 && TREE_CODE (arg1) == NEGATE_EXPR
11584 && !TYPE_OVERFLOW_TRAPS (type))
11585 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
11586 fold_convert_loc (loc, type,
11587 TREE_OPERAND (arg1, 0)));
11588
11589 strict_overflow_p = false;
11590 if (TREE_CODE (arg1) == INTEGER_CST
11591 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11592 &strict_overflow_p)))
11593 {
11594 if (strict_overflow_p)
11595 fold_overflow_warning (("assuming signed overflow does not occur "
11596 "when simplifying modulus"),
11597 WARN_STRICT_OVERFLOW_MISC);
11598 return fold_convert_loc (loc, type, tem);
11599 }
11600
11601 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
11602 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
11603 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
11604 && (TYPE_UNSIGNED (type)
11605 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11606 {
11607 tree c = arg1;
11608 /* Also optimize A % (C << N) where C is a power of 2,
11609 to A & ((C << N) - 1). */
11610 if (TREE_CODE (arg1) == LSHIFT_EXPR)
11611 c = TREE_OPERAND (arg1, 0);
11612
11613 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
11614 {
11615 tree mask
11616 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
11617 build_int_cst (TREE_TYPE (arg1), 1));
11618 if (strict_overflow_p)
11619 fold_overflow_warning (("assuming signed overflow does not "
11620 "occur when simplifying "
11621 "X % (power of two)"),
11622 WARN_STRICT_OVERFLOW_MISC);
11623 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11624 fold_convert_loc (loc, type, arg0),
11625 fold_convert_loc (loc, type, mask));
11626 }
11627 }
11628
11629 return NULL_TREE;
11630
11631 case LROTATE_EXPR:
11632 case RROTATE_EXPR:
11633 if (integer_all_onesp (arg0))
11634 return omit_one_operand_loc (loc, type, arg0, arg1);
11635 goto shift;
11636
11637 case RSHIFT_EXPR:
11638 /* Optimize -1 >> x for arithmetic right shifts. */
11639 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
11640 && tree_expr_nonnegative_p (arg1))
11641 return omit_one_operand_loc (loc, type, arg0, arg1);
11642 /* ... fall through ... */
11643
11644 case LSHIFT_EXPR:
11645 shift:
11646 if (integer_zerop (arg1))
11647 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11648 if (integer_zerop (arg0))
11649 return omit_one_operand_loc (loc, type, arg0, arg1);
11650
11651 /* Since negative shift count is not well-defined,
11652 don't try to compute it in the compiler. */
11653 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11654 return NULL_TREE;
11655
11656 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
11657 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
11658 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11659 && host_integerp (TREE_OPERAND (arg0, 1), false)
11660 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11661 {
11662 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
11663 + TREE_INT_CST_LOW (arg1));
11664
11665 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11666 being well defined. */
11667 if (low >= TYPE_PRECISION (type))
11668 {
11669 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
11670 low = low % TYPE_PRECISION (type);
11671 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
11672 return omit_one_operand_loc (loc, type, build_int_cst (type, 0),
11673 TREE_OPERAND (arg0, 0));
11674 else
11675 low = TYPE_PRECISION (type) - 1;
11676 }
11677
11678 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
11679 build_int_cst (type, low));
11680 }
11681
11682 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
11683 into x & ((unsigned)-1 >> c) for unsigned types. */
11684 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
11685 || (TYPE_UNSIGNED (type)
11686 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
11687 && host_integerp (arg1, false)
11688 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11689 && host_integerp (TREE_OPERAND (arg0, 1), false)
11690 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11691 {
11692 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
11693 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
11694 tree lshift;
11695 tree arg00;
11696
11697 if (low0 == low1)
11698 {
11699 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11700
11701 lshift = build_int_cst (type, -1);
11702 lshift = int_const_binop (code, lshift, arg1, 0);
11703
11704 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
11705 }
11706 }
11707
11708 /* Rewrite an LROTATE_EXPR by a constant into an
11709 RROTATE_EXPR by a new constant. */
11710 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
11711 {
11712 tree tem = build_int_cst (TREE_TYPE (arg1),
11713 TYPE_PRECISION (type));
11714 tem = const_binop (MINUS_EXPR, tem, arg1);
11715 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
11716 }
11717
11718 /* If we have a rotate of a bit operation with the rotate count and
11719 the second operand of the bit operation both constant,
11720 permute the two operations. */
11721 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11722 && (TREE_CODE (arg0) == BIT_AND_EXPR
11723 || TREE_CODE (arg0) == BIT_IOR_EXPR
11724 || TREE_CODE (arg0) == BIT_XOR_EXPR)
11725 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11726 return fold_build2_loc (loc, TREE_CODE (arg0), type,
11727 fold_build2_loc (loc, code, type,
11728 TREE_OPERAND (arg0, 0), arg1),
11729 fold_build2_loc (loc, code, type,
11730 TREE_OPERAND (arg0, 1), arg1));
11731
11732 /* Two consecutive rotates adding up to the precision of the
11733 type can be ignored. */
11734 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11735 && TREE_CODE (arg0) == RROTATE_EXPR
11736 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11737 && TREE_INT_CST_HIGH (arg1) == 0
11738 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
11739 && ((TREE_INT_CST_LOW (arg1)
11740 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
11741 == (unsigned int) TYPE_PRECISION (type)))
11742 return TREE_OPERAND (arg0, 0);
11743
11744 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
11745 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
11746 if the latter can be further optimized. */
11747 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
11748 && TREE_CODE (arg0) == BIT_AND_EXPR
11749 && TREE_CODE (arg1) == INTEGER_CST
11750 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11751 {
11752 tree mask = fold_build2_loc (loc, code, type,
11753 fold_convert_loc (loc, type,
11754 TREE_OPERAND (arg0, 1)),
11755 arg1);
11756 tree shift = fold_build2_loc (loc, code, type,
11757 fold_convert_loc (loc, type,
11758 TREE_OPERAND (arg0, 0)),
11759 arg1);
11760 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
11761 if (tem)
11762 return tem;
11763 }
11764
11765 return NULL_TREE;
11766
11767 case MIN_EXPR:
11768 if (operand_equal_p (arg0, arg1, 0))
11769 return omit_one_operand_loc (loc, type, arg0, arg1);
11770 if (INTEGRAL_TYPE_P (type)
11771 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
11772 return omit_one_operand_loc (loc, type, arg1, arg0);
11773 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
11774 if (tem)
11775 return tem;
11776 goto associate;
11777
11778 case MAX_EXPR:
11779 if (operand_equal_p (arg0, arg1, 0))
11780 return omit_one_operand_loc (loc, type, arg0, arg1);
11781 if (INTEGRAL_TYPE_P (type)
11782 && TYPE_MAX_VALUE (type)
11783 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
11784 return omit_one_operand_loc (loc, type, arg1, arg0);
11785 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
11786 if (tem)
11787 return tem;
11788 goto associate;
11789
11790 case TRUTH_ANDIF_EXPR:
11791 /* Note that the operands of this must be ints
11792 and their values must be 0 or 1.
11793 ("true" is a fixed value perhaps depending on the language.) */
11794 /* If first arg is constant zero, return it. */
11795 if (integer_zerop (arg0))
11796 return fold_convert_loc (loc, type, arg0);
11797 case TRUTH_AND_EXPR:
11798 /* If either arg is constant true, drop it. */
11799 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11800 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
11801 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
11802 /* Preserve sequence points. */
11803 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11804 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11805 /* If second arg is constant zero, result is zero, but first arg
11806 must be evaluated. */
11807 if (integer_zerop (arg1))
11808 return omit_one_operand_loc (loc, type, arg1, arg0);
11809 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
11810 case will be handled here. */
11811 if (integer_zerop (arg0))
11812 return omit_one_operand_loc (loc, type, arg0, arg1);
11813
11814 /* !X && X is always false. */
11815 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11816 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11817 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11818 /* X && !X is always false. */
11819 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11820 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11821 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11822
11823 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
11824 means A >= Y && A != MAX, but in this case we know that
11825 A < X <= MAX. */
11826
11827 if (!TREE_SIDE_EFFECTS (arg0)
11828 && !TREE_SIDE_EFFECTS (arg1))
11829 {
11830 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
11831 if (tem && !operand_equal_p (tem, arg0, 0))
11832 return fold_build2_loc (loc, code, type, tem, arg1);
11833
11834 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
11835 if (tem && !operand_equal_p (tem, arg1, 0))
11836 return fold_build2_loc (loc, code, type, arg0, tem);
11837 }
11838
11839 truth_andor:
11840 /* We only do these simplifications if we are optimizing. */
11841 if (!optimize)
11842 return NULL_TREE;
11843
11844 /* Check for things like (A || B) && (A || C). We can convert this
11845 to A || (B && C). Note that either operator can be any of the four
11846 truth and/or operations and the transformation will still be
11847 valid. Also note that we only care about order for the
11848 ANDIF and ORIF operators. If B contains side effects, this
11849 might change the truth-value of A. */
11850 if (TREE_CODE (arg0) == TREE_CODE (arg1)
11851 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
11852 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
11853 || TREE_CODE (arg0) == TRUTH_AND_EXPR
11854 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
11855 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
11856 {
11857 tree a00 = TREE_OPERAND (arg0, 0);
11858 tree a01 = TREE_OPERAND (arg0, 1);
11859 tree a10 = TREE_OPERAND (arg1, 0);
11860 tree a11 = TREE_OPERAND (arg1, 1);
11861 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
11862 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
11863 && (code == TRUTH_AND_EXPR
11864 || code == TRUTH_OR_EXPR));
11865
11866 if (operand_equal_p (a00, a10, 0))
11867 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
11868 fold_build2_loc (loc, code, type, a01, a11));
11869 else if (commutative && operand_equal_p (a00, a11, 0))
11870 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
11871 fold_build2_loc (loc, code, type, a01, a10));
11872 else if (commutative && operand_equal_p (a01, a10, 0))
11873 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
11874 fold_build2_loc (loc, code, type, a00, a11));
11875
11876 /* This case if tricky because we must either have commutative
11877 operators or else A10 must not have side-effects. */
11878
11879 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
11880 && operand_equal_p (a01, a11, 0))
11881 return fold_build2_loc (loc, TREE_CODE (arg0), type,
11882 fold_build2_loc (loc, code, type, a00, a10),
11883 a01);
11884 }
11885
11886 /* See if we can build a range comparison. */
11887 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
11888 return tem;
11889
11890 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
11891 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
11892 {
11893 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
11894 if (tem)
11895 return fold_build2_loc (loc, code, type, tem, arg1);
11896 }
11897
11898 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
11899 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
11900 {
11901 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
11902 if (tem)
11903 return fold_build2_loc (loc, code, type, arg0, tem);
11904 }
11905
11906 /* Check for the possibility of merging component references. If our
11907 lhs is another similar operation, try to merge its rhs with our
11908 rhs. Then try to merge our lhs and rhs. */
11909 if (TREE_CODE (arg0) == code
11910 && 0 != (tem = fold_truthop (loc, code, type,
11911 TREE_OPERAND (arg0, 1), arg1)))
11912 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
11913
11914 if ((tem = fold_truthop (loc, code, type, arg0, arg1)) != 0)
11915 return tem;
11916
11917 return NULL_TREE;
11918
11919 case TRUTH_ORIF_EXPR:
11920 /* Note that the operands of this must be ints
11921 and their values must be 0 or true.
11922 ("true" is a fixed value perhaps depending on the language.) */
11923 /* If first arg is constant true, return it. */
11924 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11925 return fold_convert_loc (loc, type, arg0);
11926 case TRUTH_OR_EXPR:
11927 /* If either arg is constant zero, drop it. */
11928 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
11929 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
11930 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
11931 /* Preserve sequence points. */
11932 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11933 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11934 /* If second arg is constant true, result is true, but we must
11935 evaluate first arg. */
11936 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
11937 return omit_one_operand_loc (loc, type, arg1, arg0);
11938 /* Likewise for first arg, but note this only occurs here for
11939 TRUTH_OR_EXPR. */
11940 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11941 return omit_one_operand_loc (loc, type, arg0, arg1);
11942
11943 /* !X || X is always true. */
11944 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11945 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11946 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
11947 /* X || !X is always true. */
11948 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11949 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11950 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
11951
11952 goto truth_andor;
11953
11954 case TRUTH_XOR_EXPR:
11955 /* If the second arg is constant zero, drop it. */
11956 if (integer_zerop (arg1))
11957 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11958 /* If the second arg is constant true, this is a logical inversion. */
11959 if (integer_onep (arg1))
11960 {
11961 /* Only call invert_truthvalue if operand is a truth value. */
11962 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
11963 tem = fold_build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
11964 else
11965 tem = invert_truthvalue_loc (loc, arg0);
11966 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
11967 }
11968 /* Identical arguments cancel to zero. */
11969 if (operand_equal_p (arg0, arg1, 0))
11970 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11971
11972 /* !X ^ X is always true. */
11973 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11974 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11975 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
11976
11977 /* X ^ !X is always true. */
11978 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11979 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11980 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
11981
11982 return NULL_TREE;
11983
11984 case EQ_EXPR:
11985 case NE_EXPR:
11986 tem = fold_comparison (loc, code, type, op0, op1);
11987 if (tem != NULL_TREE)
11988 return tem;
11989
11990 /* bool_var != 0 becomes bool_var. */
11991 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
11992 && code == NE_EXPR)
11993 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11994
11995 /* bool_var == 1 becomes bool_var. */
11996 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
11997 && code == EQ_EXPR)
11998 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11999
12000 /* bool_var != 1 becomes !bool_var. */
12001 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12002 && code == NE_EXPR)
12003 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
12004 fold_convert_loc (loc, type, arg0));
12005
12006 /* bool_var == 0 becomes !bool_var. */
12007 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12008 && code == EQ_EXPR)
12009 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
12010 fold_convert_loc (loc, type, arg0));
12011
12012 /* !exp != 0 becomes !exp */
12013 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12014 && code == NE_EXPR)
12015 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12016
12017 /* If this is an equality comparison of the address of two non-weak,
12018 unaliased symbols neither of which are extern (since we do not
12019 have access to attributes for externs), then we know the result. */
12020 if (TREE_CODE (arg0) == ADDR_EXPR
12021 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12022 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12023 && ! lookup_attribute ("alias",
12024 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12025 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12026 && TREE_CODE (arg1) == ADDR_EXPR
12027 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12028 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12029 && ! lookup_attribute ("alias",
12030 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12031 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12032 {
12033 /* We know that we're looking at the address of two
12034 non-weak, unaliased, static _DECL nodes.
12035
12036 It is both wasteful and incorrect to call operand_equal_p
12037 to compare the two ADDR_EXPR nodes. It is wasteful in that
12038 all we need to do is test pointer equality for the arguments
12039 to the two ADDR_EXPR nodes. It is incorrect to use
12040 operand_equal_p as that function is NOT equivalent to a
12041 C equality test. It can in fact return false for two
12042 objects which would test as equal using the C equality
12043 operator. */
12044 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12045 return constant_boolean_node (equal
12046 ? code == EQ_EXPR : code != EQ_EXPR,
12047 type);
12048 }
12049
12050 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12051 a MINUS_EXPR of a constant, we can convert it into a comparison with
12052 a revised constant as long as no overflow occurs. */
12053 if (TREE_CODE (arg1) == INTEGER_CST
12054 && (TREE_CODE (arg0) == PLUS_EXPR
12055 || TREE_CODE (arg0) == MINUS_EXPR)
12056 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12057 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
12058 ? MINUS_EXPR : PLUS_EXPR,
12059 fold_convert_loc (loc, TREE_TYPE (arg0),
12060 arg1),
12061 TREE_OPERAND (arg0, 1)))
12062 && !TREE_OVERFLOW (tem))
12063 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12064
12065 /* Similarly for a NEGATE_EXPR. */
12066 if (TREE_CODE (arg0) == NEGATE_EXPR
12067 && TREE_CODE (arg1) == INTEGER_CST
12068 && 0 != (tem = negate_expr (arg1))
12069 && TREE_CODE (tem) == INTEGER_CST
12070 && !TREE_OVERFLOW (tem))
12071 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12072
12073 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12074 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12075 && TREE_CODE (arg1) == INTEGER_CST
12076 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12077 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12078 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12079 fold_convert_loc (loc,
12080 TREE_TYPE (arg0),
12081 arg1),
12082 TREE_OPERAND (arg0, 1)));
12083
12084 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12085 if ((TREE_CODE (arg0) == PLUS_EXPR
12086 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12087 || TREE_CODE (arg0) == MINUS_EXPR)
12088 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12089 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12090 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12091 {
12092 tree val = TREE_OPERAND (arg0, 1);
12093 return omit_two_operands_loc (loc, type,
12094 fold_build2_loc (loc, code, type,
12095 val,
12096 build_int_cst (TREE_TYPE (val),
12097 0)),
12098 TREE_OPERAND (arg0, 0), arg1);
12099 }
12100
12101 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12102 if (TREE_CODE (arg0) == MINUS_EXPR
12103 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12104 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0)
12105 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
12106 {
12107 return omit_two_operands_loc (loc, type,
12108 code == NE_EXPR
12109 ? boolean_true_node : boolean_false_node,
12110 TREE_OPERAND (arg0, 1), arg1);
12111 }
12112
12113 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12114 for !=. Don't do this for ordered comparisons due to overflow. */
12115 if (TREE_CODE (arg0) == MINUS_EXPR
12116 && integer_zerop (arg1))
12117 return fold_build2_loc (loc, code, type,
12118 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
12119
12120 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12121 if (TREE_CODE (arg0) == ABS_EXPR
12122 && (integer_zerop (arg1) || real_zerop (arg1)))
12123 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12124
12125 /* If this is an EQ or NE comparison with zero and ARG0 is
12126 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12127 two operations, but the latter can be done in one less insn
12128 on machines that have only two-operand insns or on which a
12129 constant cannot be the first operand. */
12130 if (TREE_CODE (arg0) == BIT_AND_EXPR
12131 && integer_zerop (arg1))
12132 {
12133 tree arg00 = TREE_OPERAND (arg0, 0);
12134 tree arg01 = TREE_OPERAND (arg0, 1);
12135 if (TREE_CODE (arg00) == LSHIFT_EXPR
12136 && integer_onep (TREE_OPERAND (arg00, 0)))
12137 {
12138 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12139 arg01, TREE_OPERAND (arg00, 1));
12140 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12141 build_int_cst (TREE_TYPE (arg0), 1));
12142 return fold_build2_loc (loc, code, type,
12143 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12144 arg1);
12145 }
12146 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12147 && integer_onep (TREE_OPERAND (arg01, 0)))
12148 {
12149 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12150 arg00, TREE_OPERAND (arg01, 1));
12151 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12152 build_int_cst (TREE_TYPE (arg0), 1));
12153 return fold_build2_loc (loc, code, type,
12154 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12155 arg1);
12156 }
12157 }
12158
12159 /* If this is an NE or EQ comparison of zero against the result of a
12160 signed MOD operation whose second operand is a power of 2, make
12161 the MOD operation unsigned since it is simpler and equivalent. */
12162 if (integer_zerop (arg1)
12163 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12164 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12165 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12166 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12167 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12168 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12169 {
12170 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12171 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12172 fold_convert_loc (loc, newtype,
12173 TREE_OPERAND (arg0, 0)),
12174 fold_convert_loc (loc, newtype,
12175 TREE_OPERAND (arg0, 1)));
12176
12177 return fold_build2_loc (loc, code, type, newmod,
12178 fold_convert_loc (loc, newtype, arg1));
12179 }
12180
12181 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12182 C1 is a valid shift constant, and C2 is a power of two, i.e.
12183 a single bit. */
12184 if (TREE_CODE (arg0) == BIT_AND_EXPR
12185 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12186 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12187 == INTEGER_CST
12188 && integer_pow2p (TREE_OPERAND (arg0, 1))
12189 && integer_zerop (arg1))
12190 {
12191 tree itype = TREE_TYPE (arg0);
12192 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
12193 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12194
12195 /* Check for a valid shift count. */
12196 if (TREE_INT_CST_HIGH (arg001) == 0
12197 && TREE_INT_CST_LOW (arg001) < prec)
12198 {
12199 tree arg01 = TREE_OPERAND (arg0, 1);
12200 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12201 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12202 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12203 can be rewritten as (X & (C2 << C1)) != 0. */
12204 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12205 {
12206 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12207 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12208 return fold_build2_loc (loc, code, type, tem, arg1);
12209 }
12210 /* Otherwise, for signed (arithmetic) shifts,
12211 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12212 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12213 else if (!TYPE_UNSIGNED (itype))
12214 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12215 arg000, build_int_cst (itype, 0));
12216 /* Otherwise, of unsigned (logical) shifts,
12217 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12218 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12219 else
12220 return omit_one_operand_loc (loc, type,
12221 code == EQ_EXPR ? integer_one_node
12222 : integer_zero_node,
12223 arg000);
12224 }
12225 }
12226
12227 /* If this is an NE comparison of zero with an AND of one, remove the
12228 comparison since the AND will give the correct value. */
12229 if (code == NE_EXPR
12230 && integer_zerop (arg1)
12231 && TREE_CODE (arg0) == BIT_AND_EXPR
12232 && integer_onep (TREE_OPERAND (arg0, 1)))
12233 return fold_convert_loc (loc, type, arg0);
12234
12235 /* If we have (A & C) == C where C is a power of 2, convert this into
12236 (A & C) != 0. Similarly for NE_EXPR. */
12237 if (TREE_CODE (arg0) == BIT_AND_EXPR
12238 && integer_pow2p (TREE_OPERAND (arg0, 1))
12239 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12240 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12241 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12242 integer_zero_node));
12243
12244 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12245 bit, then fold the expression into A < 0 or A >= 0. */
12246 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12247 if (tem)
12248 return tem;
12249
12250 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12251 Similarly for NE_EXPR. */
12252 if (TREE_CODE (arg0) == BIT_AND_EXPR
12253 && TREE_CODE (arg1) == INTEGER_CST
12254 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12255 {
12256 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12257 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12258 TREE_OPERAND (arg0, 1));
12259 tree dandnotc = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12260 arg1, notc);
12261 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12262 if (integer_nonzerop (dandnotc))
12263 return omit_one_operand_loc (loc, type, rslt, arg0);
12264 }
12265
12266 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12267 Similarly for NE_EXPR. */
12268 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12269 && TREE_CODE (arg1) == INTEGER_CST
12270 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12271 {
12272 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12273 tree candnotd = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12274 TREE_OPERAND (arg0, 1), notd);
12275 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12276 if (integer_nonzerop (candnotd))
12277 return omit_one_operand_loc (loc, type, rslt, arg0);
12278 }
12279
12280 /* If this is a comparison of a field, we may be able to simplify it. */
12281 if ((TREE_CODE (arg0) == COMPONENT_REF
12282 || TREE_CODE (arg0) == BIT_FIELD_REF)
12283 /* Handle the constant case even without -O
12284 to make sure the warnings are given. */
12285 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12286 {
12287 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12288 if (t1)
12289 return t1;
12290 }
12291
12292 /* Optimize comparisons of strlen vs zero to a compare of the
12293 first character of the string vs zero. To wit,
12294 strlen(ptr) == 0 => *ptr == 0
12295 strlen(ptr) != 0 => *ptr != 0
12296 Other cases should reduce to one of these two (or a constant)
12297 due to the return value of strlen being unsigned. */
12298 if (TREE_CODE (arg0) == CALL_EXPR
12299 && integer_zerop (arg1))
12300 {
12301 tree fndecl = get_callee_fndecl (arg0);
12302
12303 if (fndecl
12304 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12305 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12306 && call_expr_nargs (arg0) == 1
12307 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12308 {
12309 tree iref = build_fold_indirect_ref_loc (loc,
12310 CALL_EXPR_ARG (arg0, 0));
12311 return fold_build2_loc (loc, code, type, iref,
12312 build_int_cst (TREE_TYPE (iref), 0));
12313 }
12314 }
12315
12316 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12317 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12318 if (TREE_CODE (arg0) == RSHIFT_EXPR
12319 && integer_zerop (arg1)
12320 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12321 {
12322 tree arg00 = TREE_OPERAND (arg0, 0);
12323 tree arg01 = TREE_OPERAND (arg0, 1);
12324 tree itype = TREE_TYPE (arg00);
12325 if (TREE_INT_CST_HIGH (arg01) == 0
12326 && TREE_INT_CST_LOW (arg01)
12327 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
12328 {
12329 if (TYPE_UNSIGNED (itype))
12330 {
12331 itype = signed_type_for (itype);
12332 arg00 = fold_convert_loc (loc, itype, arg00);
12333 }
12334 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12335 type, arg00, build_int_cst (itype, 0));
12336 }
12337 }
12338
12339 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12340 if (integer_zerop (arg1)
12341 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12342 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12343 TREE_OPERAND (arg0, 1));
12344
12345 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12346 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12347 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12348 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12349 build_int_cst (TREE_TYPE (arg1), 0));
12350 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12351 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12352 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12353 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12354 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
12355 build_int_cst (TREE_TYPE (arg1), 0));
12356
12357 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12358 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12359 && TREE_CODE (arg1) == INTEGER_CST
12360 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12361 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12362 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
12363 TREE_OPERAND (arg0, 1), arg1));
12364
12365 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12366 (X & C) == 0 when C is a single bit. */
12367 if (TREE_CODE (arg0) == BIT_AND_EXPR
12368 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12369 && integer_zerop (arg1)
12370 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12371 {
12372 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12373 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12374 TREE_OPERAND (arg0, 1));
12375 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12376 type, tem, arg1);
12377 }
12378
12379 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12380 constant C is a power of two, i.e. a single bit. */
12381 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12382 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12383 && integer_zerop (arg1)
12384 && integer_pow2p (TREE_OPERAND (arg0, 1))
12385 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12386 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12387 {
12388 tree arg00 = TREE_OPERAND (arg0, 0);
12389 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12390 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12391 }
12392
12393 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12394 when is C is a power of two, i.e. a single bit. */
12395 if (TREE_CODE (arg0) == BIT_AND_EXPR
12396 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12397 && integer_zerop (arg1)
12398 && integer_pow2p (TREE_OPERAND (arg0, 1))
12399 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12400 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12401 {
12402 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12403 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
12404 arg000, TREE_OPERAND (arg0, 1));
12405 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12406 tem, build_int_cst (TREE_TYPE (tem), 0));
12407 }
12408
12409 if (integer_zerop (arg1)
12410 && tree_expr_nonzero_p (arg0))
12411 {
12412 tree res = constant_boolean_node (code==NE_EXPR, type);
12413 return omit_one_operand_loc (loc, type, res, arg0);
12414 }
12415
12416 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12417 if (TREE_CODE (arg0) == NEGATE_EXPR
12418 && TREE_CODE (arg1) == NEGATE_EXPR)
12419 return fold_build2_loc (loc, code, type,
12420 TREE_OPERAND (arg0, 0),
12421 TREE_OPERAND (arg1, 0));
12422
12423 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12424 if (TREE_CODE (arg0) == BIT_AND_EXPR
12425 && TREE_CODE (arg1) == BIT_AND_EXPR)
12426 {
12427 tree arg00 = TREE_OPERAND (arg0, 0);
12428 tree arg01 = TREE_OPERAND (arg0, 1);
12429 tree arg10 = TREE_OPERAND (arg1, 0);
12430 tree arg11 = TREE_OPERAND (arg1, 1);
12431 tree itype = TREE_TYPE (arg0);
12432
12433 if (operand_equal_p (arg01, arg11, 0))
12434 return fold_build2_loc (loc, code, type,
12435 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12436 fold_build2_loc (loc,
12437 BIT_XOR_EXPR, itype,
12438 arg00, arg10),
12439 arg01),
12440 build_int_cst (itype, 0));
12441
12442 if (operand_equal_p (arg01, arg10, 0))
12443 return fold_build2_loc (loc, code, type,
12444 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12445 fold_build2_loc (loc,
12446 BIT_XOR_EXPR, itype,
12447 arg00, arg11),
12448 arg01),
12449 build_int_cst (itype, 0));
12450
12451 if (operand_equal_p (arg00, arg11, 0))
12452 return fold_build2_loc (loc, code, type,
12453 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12454 fold_build2_loc (loc,
12455 BIT_XOR_EXPR, itype,
12456 arg01, arg10),
12457 arg00),
12458 build_int_cst (itype, 0));
12459
12460 if (operand_equal_p (arg00, arg10, 0))
12461 return fold_build2_loc (loc, code, type,
12462 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12463 fold_build2_loc (loc,
12464 BIT_XOR_EXPR, itype,
12465 arg01, arg11),
12466 arg00),
12467 build_int_cst (itype, 0));
12468 }
12469
12470 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12471 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12472 {
12473 tree arg00 = TREE_OPERAND (arg0, 0);
12474 tree arg01 = TREE_OPERAND (arg0, 1);
12475 tree arg10 = TREE_OPERAND (arg1, 0);
12476 tree arg11 = TREE_OPERAND (arg1, 1);
12477 tree itype = TREE_TYPE (arg0);
12478
12479 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12480 operand_equal_p guarantees no side-effects so we don't need
12481 to use omit_one_operand on Z. */
12482 if (operand_equal_p (arg01, arg11, 0))
12483 return fold_build2_loc (loc, code, type, arg00, arg10);
12484 if (operand_equal_p (arg01, arg10, 0))
12485 return fold_build2_loc (loc, code, type, arg00, arg11);
12486 if (operand_equal_p (arg00, arg11, 0))
12487 return fold_build2_loc (loc, code, type, arg01, arg10);
12488 if (operand_equal_p (arg00, arg10, 0))
12489 return fold_build2_loc (loc, code, type, arg01, arg11);
12490
12491 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12492 if (TREE_CODE (arg01) == INTEGER_CST
12493 && TREE_CODE (arg11) == INTEGER_CST)
12494 return fold_build2_loc (loc, code, type,
12495 fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00,
12496 fold_build2_loc (loc,
12497 BIT_XOR_EXPR, itype,
12498 arg01, arg11)),
12499 arg10);
12500 }
12501
12502 /* Attempt to simplify equality/inequality comparisons of complex
12503 values. Only lower the comparison if the result is known or
12504 can be simplified to a single scalar comparison. */
12505 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12506 || TREE_CODE (arg0) == COMPLEX_CST)
12507 && (TREE_CODE (arg1) == COMPLEX_EXPR
12508 || TREE_CODE (arg1) == COMPLEX_CST))
12509 {
12510 tree real0, imag0, real1, imag1;
12511 tree rcond, icond;
12512
12513 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12514 {
12515 real0 = TREE_OPERAND (arg0, 0);
12516 imag0 = TREE_OPERAND (arg0, 1);
12517 }
12518 else
12519 {
12520 real0 = TREE_REALPART (arg0);
12521 imag0 = TREE_IMAGPART (arg0);
12522 }
12523
12524 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12525 {
12526 real1 = TREE_OPERAND (arg1, 0);
12527 imag1 = TREE_OPERAND (arg1, 1);
12528 }
12529 else
12530 {
12531 real1 = TREE_REALPART (arg1);
12532 imag1 = TREE_IMAGPART (arg1);
12533 }
12534
12535 rcond = fold_binary_loc (loc, code, type, real0, real1);
12536 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12537 {
12538 if (integer_zerop (rcond))
12539 {
12540 if (code == EQ_EXPR)
12541 return omit_two_operands_loc (loc, type, boolean_false_node,
12542 imag0, imag1);
12543 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
12544 }
12545 else
12546 {
12547 if (code == NE_EXPR)
12548 return omit_two_operands_loc (loc, type, boolean_true_node,
12549 imag0, imag1);
12550 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
12551 }
12552 }
12553
12554 icond = fold_binary_loc (loc, code, type, imag0, imag1);
12555 if (icond && TREE_CODE (icond) == INTEGER_CST)
12556 {
12557 if (integer_zerop (icond))
12558 {
12559 if (code == EQ_EXPR)
12560 return omit_two_operands_loc (loc, type, boolean_false_node,
12561 real0, real1);
12562 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
12563 }
12564 else
12565 {
12566 if (code == NE_EXPR)
12567 return omit_two_operands_loc (loc, type, boolean_true_node,
12568 real0, real1);
12569 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
12570 }
12571 }
12572 }
12573
12574 return NULL_TREE;
12575
12576 case LT_EXPR:
12577 case GT_EXPR:
12578 case LE_EXPR:
12579 case GE_EXPR:
12580 tem = fold_comparison (loc, code, type, op0, op1);
12581 if (tem != NULL_TREE)
12582 return tem;
12583
12584 /* Transform comparisons of the form X +- C CMP X. */
12585 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12586 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12587 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12588 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
12589 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12590 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
12591 {
12592 tree arg01 = TREE_OPERAND (arg0, 1);
12593 enum tree_code code0 = TREE_CODE (arg0);
12594 int is_positive;
12595
12596 if (TREE_CODE (arg01) == REAL_CST)
12597 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12598 else
12599 is_positive = tree_int_cst_sgn (arg01);
12600
12601 /* (X - c) > X becomes false. */
12602 if (code == GT_EXPR
12603 && ((code0 == MINUS_EXPR && is_positive >= 0)
12604 || (code0 == PLUS_EXPR && is_positive <= 0)))
12605 {
12606 if (TREE_CODE (arg01) == INTEGER_CST
12607 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12608 fold_overflow_warning (("assuming signed overflow does not "
12609 "occur when assuming that (X - c) > X "
12610 "is always false"),
12611 WARN_STRICT_OVERFLOW_ALL);
12612 return constant_boolean_node (0, type);
12613 }
12614
12615 /* Likewise (X + c) < X becomes false. */
12616 if (code == LT_EXPR
12617 && ((code0 == PLUS_EXPR && is_positive >= 0)
12618 || (code0 == MINUS_EXPR && is_positive <= 0)))
12619 {
12620 if (TREE_CODE (arg01) == INTEGER_CST
12621 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12622 fold_overflow_warning (("assuming signed overflow does not "
12623 "occur when assuming that "
12624 "(X + c) < X is always false"),
12625 WARN_STRICT_OVERFLOW_ALL);
12626 return constant_boolean_node (0, type);
12627 }
12628
12629 /* Convert (X - c) <= X to true. */
12630 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12631 && code == LE_EXPR
12632 && ((code0 == MINUS_EXPR && is_positive >= 0)
12633 || (code0 == PLUS_EXPR && is_positive <= 0)))
12634 {
12635 if (TREE_CODE (arg01) == INTEGER_CST
12636 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12637 fold_overflow_warning (("assuming signed overflow does not "
12638 "occur when assuming that "
12639 "(X - c) <= X is always true"),
12640 WARN_STRICT_OVERFLOW_ALL);
12641 return constant_boolean_node (1, type);
12642 }
12643
12644 /* Convert (X + c) >= X to true. */
12645 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12646 && code == GE_EXPR
12647 && ((code0 == PLUS_EXPR && is_positive >= 0)
12648 || (code0 == MINUS_EXPR && is_positive <= 0)))
12649 {
12650 if (TREE_CODE (arg01) == INTEGER_CST
12651 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12652 fold_overflow_warning (("assuming signed overflow does not "
12653 "occur when assuming that "
12654 "(X + c) >= X is always true"),
12655 WARN_STRICT_OVERFLOW_ALL);
12656 return constant_boolean_node (1, type);
12657 }
12658
12659 if (TREE_CODE (arg01) == INTEGER_CST)
12660 {
12661 /* Convert X + c > X and X - c < X to true for integers. */
12662 if (code == GT_EXPR
12663 && ((code0 == PLUS_EXPR && is_positive > 0)
12664 || (code0 == MINUS_EXPR && is_positive < 0)))
12665 {
12666 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12667 fold_overflow_warning (("assuming signed overflow does "
12668 "not occur when assuming that "
12669 "(X + c) > X is always true"),
12670 WARN_STRICT_OVERFLOW_ALL);
12671 return constant_boolean_node (1, type);
12672 }
12673
12674 if (code == LT_EXPR
12675 && ((code0 == MINUS_EXPR && is_positive > 0)
12676 || (code0 == PLUS_EXPR && is_positive < 0)))
12677 {
12678 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12679 fold_overflow_warning (("assuming signed overflow does "
12680 "not occur when assuming that "
12681 "(X - c) < X is always true"),
12682 WARN_STRICT_OVERFLOW_ALL);
12683 return constant_boolean_node (1, type);
12684 }
12685
12686 /* Convert X + c <= X and X - c >= X to false for integers. */
12687 if (code == LE_EXPR
12688 && ((code0 == PLUS_EXPR && is_positive > 0)
12689 || (code0 == MINUS_EXPR && is_positive < 0)))
12690 {
12691 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12692 fold_overflow_warning (("assuming signed overflow does "
12693 "not occur when assuming that "
12694 "(X + c) <= X is always false"),
12695 WARN_STRICT_OVERFLOW_ALL);
12696 return constant_boolean_node (0, type);
12697 }
12698
12699 if (code == GE_EXPR
12700 && ((code0 == MINUS_EXPR && is_positive > 0)
12701 || (code0 == PLUS_EXPR && is_positive < 0)))
12702 {
12703 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12704 fold_overflow_warning (("assuming signed overflow does "
12705 "not occur when assuming that "
12706 "(X - c) >= X is always false"),
12707 WARN_STRICT_OVERFLOW_ALL);
12708 return constant_boolean_node (0, type);
12709 }
12710 }
12711 }
12712
12713 /* Comparisons with the highest or lowest possible integer of
12714 the specified precision will have known values. */
12715 {
12716 tree arg1_type = TREE_TYPE (arg1);
12717 unsigned int width = TYPE_PRECISION (arg1_type);
12718
12719 if (TREE_CODE (arg1) == INTEGER_CST
12720 && width <= 2 * HOST_BITS_PER_WIDE_INT
12721 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
12722 {
12723 HOST_WIDE_INT signed_max_hi;
12724 unsigned HOST_WIDE_INT signed_max_lo;
12725 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
12726
12727 if (width <= HOST_BITS_PER_WIDE_INT)
12728 {
12729 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12730 - 1;
12731 signed_max_hi = 0;
12732 max_hi = 0;
12733
12734 if (TYPE_UNSIGNED (arg1_type))
12735 {
12736 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12737 min_lo = 0;
12738 min_hi = 0;
12739 }
12740 else
12741 {
12742 max_lo = signed_max_lo;
12743 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12744 min_hi = -1;
12745 }
12746 }
12747 else
12748 {
12749 width -= HOST_BITS_PER_WIDE_INT;
12750 signed_max_lo = -1;
12751 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12752 - 1;
12753 max_lo = -1;
12754 min_lo = 0;
12755
12756 if (TYPE_UNSIGNED (arg1_type))
12757 {
12758 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12759 min_hi = 0;
12760 }
12761 else
12762 {
12763 max_hi = signed_max_hi;
12764 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12765 }
12766 }
12767
12768 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
12769 && TREE_INT_CST_LOW (arg1) == max_lo)
12770 switch (code)
12771 {
12772 case GT_EXPR:
12773 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12774
12775 case GE_EXPR:
12776 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
12777
12778 case LE_EXPR:
12779 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12780
12781 case LT_EXPR:
12782 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
12783
12784 /* The GE_EXPR and LT_EXPR cases above are not normally
12785 reached because of previous transformations. */
12786
12787 default:
12788 break;
12789 }
12790 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12791 == max_hi
12792 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
12793 switch (code)
12794 {
12795 case GT_EXPR:
12796 arg1 = const_binop (PLUS_EXPR, arg1,
12797 build_int_cst (TREE_TYPE (arg1), 1));
12798 return fold_build2_loc (loc, EQ_EXPR, type,
12799 fold_convert_loc (loc,
12800 TREE_TYPE (arg1), arg0),
12801 arg1);
12802 case LE_EXPR:
12803 arg1 = const_binop (PLUS_EXPR, arg1,
12804 build_int_cst (TREE_TYPE (arg1), 1));
12805 return fold_build2_loc (loc, NE_EXPR, type,
12806 fold_convert_loc (loc, TREE_TYPE (arg1),
12807 arg0),
12808 arg1);
12809 default:
12810 break;
12811 }
12812 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12813 == min_hi
12814 && TREE_INT_CST_LOW (arg1) == min_lo)
12815 switch (code)
12816 {
12817 case LT_EXPR:
12818 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12819
12820 case LE_EXPR:
12821 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
12822
12823 case GE_EXPR:
12824 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12825
12826 case GT_EXPR:
12827 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
12828
12829 default:
12830 break;
12831 }
12832 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12833 == min_hi
12834 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
12835 switch (code)
12836 {
12837 case GE_EXPR:
12838 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
12839 return fold_build2_loc (loc, NE_EXPR, type,
12840 fold_convert_loc (loc,
12841 TREE_TYPE (arg1), arg0),
12842 arg1);
12843 case LT_EXPR:
12844 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
12845 return fold_build2_loc (loc, EQ_EXPR, type,
12846 fold_convert_loc (loc, TREE_TYPE (arg1),
12847 arg0),
12848 arg1);
12849 default:
12850 break;
12851 }
12852
12853 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
12854 && TREE_INT_CST_LOW (arg1) == signed_max_lo
12855 && TYPE_UNSIGNED (arg1_type)
12856 /* We will flip the signedness of the comparison operator
12857 associated with the mode of arg1, so the sign bit is
12858 specified by this mode. Check that arg1 is the signed
12859 max associated with this sign bit. */
12860 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
12861 /* signed_type does not work on pointer types. */
12862 && INTEGRAL_TYPE_P (arg1_type))
12863 {
12864 /* The following case also applies to X < signed_max+1
12865 and X >= signed_max+1 because previous transformations. */
12866 if (code == LE_EXPR || code == GT_EXPR)
12867 {
12868 tree st;
12869 st = signed_type_for (TREE_TYPE (arg1));
12870 return fold_build2_loc (loc,
12871 code == LE_EXPR ? GE_EXPR : LT_EXPR,
12872 type, fold_convert_loc (loc, st, arg0),
12873 build_int_cst (st, 0));
12874 }
12875 }
12876 }
12877 }
12878
12879 /* If we are comparing an ABS_EXPR with a constant, we can
12880 convert all the cases into explicit comparisons, but they may
12881 well not be faster than doing the ABS and one comparison.
12882 But ABS (X) <= C is a range comparison, which becomes a subtraction
12883 and a comparison, and is probably faster. */
12884 if (code == LE_EXPR
12885 && TREE_CODE (arg1) == INTEGER_CST
12886 && TREE_CODE (arg0) == ABS_EXPR
12887 && ! TREE_SIDE_EFFECTS (arg0)
12888 && (0 != (tem = negate_expr (arg1)))
12889 && TREE_CODE (tem) == INTEGER_CST
12890 && !TREE_OVERFLOW (tem))
12891 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
12892 build2 (GE_EXPR, type,
12893 TREE_OPERAND (arg0, 0), tem),
12894 build2 (LE_EXPR, type,
12895 TREE_OPERAND (arg0, 0), arg1));
12896
12897 /* Convert ABS_EXPR<x> >= 0 to true. */
12898 strict_overflow_p = false;
12899 if (code == GE_EXPR
12900 && (integer_zerop (arg1)
12901 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
12902 && real_zerop (arg1)))
12903 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12904 {
12905 if (strict_overflow_p)
12906 fold_overflow_warning (("assuming signed overflow does not occur "
12907 "when simplifying comparison of "
12908 "absolute value and zero"),
12909 WARN_STRICT_OVERFLOW_CONDITIONAL);
12910 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12911 }
12912
12913 /* Convert ABS_EXPR<x> < 0 to false. */
12914 strict_overflow_p = false;
12915 if (code == LT_EXPR
12916 && (integer_zerop (arg1) || real_zerop (arg1))
12917 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12918 {
12919 if (strict_overflow_p)
12920 fold_overflow_warning (("assuming signed overflow does not occur "
12921 "when simplifying comparison of "
12922 "absolute value and zero"),
12923 WARN_STRICT_OVERFLOW_CONDITIONAL);
12924 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12925 }
12926
12927 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
12928 and similarly for >= into !=. */
12929 if ((code == LT_EXPR || code == GE_EXPR)
12930 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12931 && TREE_CODE (arg1) == LSHIFT_EXPR
12932 && integer_onep (TREE_OPERAND (arg1, 0)))
12933 {
12934 tem = build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12935 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12936 TREE_OPERAND (arg1, 1)),
12937 build_int_cst (TREE_TYPE (arg0), 0));
12938 goto fold_binary_exit;
12939 }
12940
12941 if ((code == LT_EXPR || code == GE_EXPR)
12942 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12943 && CONVERT_EXPR_P (arg1)
12944 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
12945 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
12946 {
12947 tem = build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12948 fold_convert_loc (loc, TREE_TYPE (arg0),
12949 build2 (RSHIFT_EXPR,
12950 TREE_TYPE (arg0), arg0,
12951 TREE_OPERAND (TREE_OPERAND (arg1, 0),
12952 1))),
12953 build_int_cst (TREE_TYPE (arg0), 0));
12954 goto fold_binary_exit;
12955 }
12956
12957 return NULL_TREE;
12958
12959 case UNORDERED_EXPR:
12960 case ORDERED_EXPR:
12961 case UNLT_EXPR:
12962 case UNLE_EXPR:
12963 case UNGT_EXPR:
12964 case UNGE_EXPR:
12965 case UNEQ_EXPR:
12966 case LTGT_EXPR:
12967 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
12968 {
12969 t1 = fold_relational_const (code, type, arg0, arg1);
12970 if (t1 != NULL_TREE)
12971 return t1;
12972 }
12973
12974 /* If the first operand is NaN, the result is constant. */
12975 if (TREE_CODE (arg0) == REAL_CST
12976 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
12977 && (code != LTGT_EXPR || ! flag_trapping_math))
12978 {
12979 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
12980 ? integer_zero_node
12981 : integer_one_node;
12982 return omit_one_operand_loc (loc, type, t1, arg1);
12983 }
12984
12985 /* If the second operand is NaN, the result is constant. */
12986 if (TREE_CODE (arg1) == REAL_CST
12987 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
12988 && (code != LTGT_EXPR || ! flag_trapping_math))
12989 {
12990 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
12991 ? integer_zero_node
12992 : integer_one_node;
12993 return omit_one_operand_loc (loc, type, t1, arg0);
12994 }
12995
12996 /* Simplify unordered comparison of something with itself. */
12997 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
12998 && operand_equal_p (arg0, arg1, 0))
12999 return constant_boolean_node (1, type);
13000
13001 if (code == LTGT_EXPR
13002 && !flag_trapping_math
13003 && operand_equal_p (arg0, arg1, 0))
13004 return constant_boolean_node (0, type);
13005
13006 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13007 {
13008 tree targ0 = strip_float_extensions (arg0);
13009 tree targ1 = strip_float_extensions (arg1);
13010 tree newtype = TREE_TYPE (targ0);
13011
13012 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13013 newtype = TREE_TYPE (targ1);
13014
13015 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13016 return fold_build2_loc (loc, code, type,
13017 fold_convert_loc (loc, newtype, targ0),
13018 fold_convert_loc (loc, newtype, targ1));
13019 }
13020
13021 return NULL_TREE;
13022
13023 case COMPOUND_EXPR:
13024 /* When pedantic, a compound expression can be neither an lvalue
13025 nor an integer constant expression. */
13026 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13027 return NULL_TREE;
13028 /* Don't let (0, 0) be null pointer constant. */
13029 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13030 : fold_convert_loc (loc, type, arg1);
13031 return pedantic_non_lvalue_loc (loc, tem);
13032
13033 case COMPLEX_EXPR:
13034 if ((TREE_CODE (arg0) == REAL_CST
13035 && TREE_CODE (arg1) == REAL_CST)
13036 || (TREE_CODE (arg0) == INTEGER_CST
13037 && TREE_CODE (arg1) == INTEGER_CST))
13038 return build_complex (type, arg0, arg1);
13039 return NULL_TREE;
13040
13041 case ASSERT_EXPR:
13042 /* An ASSERT_EXPR should never be passed to fold_binary. */
13043 gcc_unreachable ();
13044
13045 default:
13046 return NULL_TREE;
13047 } /* switch (code) */
13048 fold_binary_exit:
13049 protected_set_expr_location (tem, loc);
13050 return tem;
13051 }
13052
13053 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13054 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13055 of GOTO_EXPR. */
13056
13057 static tree
13058 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13059 {
13060 switch (TREE_CODE (*tp))
13061 {
13062 case LABEL_EXPR:
13063 return *tp;
13064
13065 case GOTO_EXPR:
13066 *walk_subtrees = 0;
13067
13068 /* ... fall through ... */
13069
13070 default:
13071 return NULL_TREE;
13072 }
13073 }
13074
13075 /* Return whether the sub-tree ST contains a label which is accessible from
13076 outside the sub-tree. */
13077
13078 static bool
13079 contains_label_p (tree st)
13080 {
13081 return
13082 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13083 }
13084
13085 /* Fold a ternary expression of code CODE and type TYPE with operands
13086 OP0, OP1, and OP2. Return the folded expression if folding is
13087 successful. Otherwise, return NULL_TREE. */
13088
13089 tree
13090 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13091 tree op0, tree op1, tree op2)
13092 {
13093 tree tem;
13094 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
13095 enum tree_code_class kind = TREE_CODE_CLASS (code);
13096
13097 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13098 && TREE_CODE_LENGTH (code) == 3);
13099
13100 /* Strip any conversions that don't change the mode. This is safe
13101 for every expression, except for a comparison expression because
13102 its signedness is derived from its operands. So, in the latter
13103 case, only strip conversions that don't change the signedness.
13104
13105 Note that this is done as an internal manipulation within the
13106 constant folder, in order to find the simplest representation of
13107 the arguments so that their form can be studied. In any cases,
13108 the appropriate type conversions should be put back in the tree
13109 that will get out of the constant folder. */
13110 if (op0)
13111 {
13112 arg0 = op0;
13113 STRIP_NOPS (arg0);
13114 }
13115
13116 if (op1)
13117 {
13118 arg1 = op1;
13119 STRIP_NOPS (arg1);
13120 }
13121
13122 switch (code)
13123 {
13124 case COMPONENT_REF:
13125 if (TREE_CODE (arg0) == CONSTRUCTOR
13126 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13127 {
13128 unsigned HOST_WIDE_INT idx;
13129 tree field, value;
13130 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13131 if (field == arg1)
13132 return value;
13133 }
13134 return NULL_TREE;
13135
13136 case COND_EXPR:
13137 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13138 so all simple results must be passed through pedantic_non_lvalue. */
13139 if (TREE_CODE (arg0) == INTEGER_CST)
13140 {
13141 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13142 tem = integer_zerop (arg0) ? op2 : op1;
13143 /* Only optimize constant conditions when the selected branch
13144 has the same type as the COND_EXPR. This avoids optimizing
13145 away "c ? x : throw", where the throw has a void type.
13146 Avoid throwing away that operand which contains label. */
13147 if ((!TREE_SIDE_EFFECTS (unused_op)
13148 || !contains_label_p (unused_op))
13149 && (! VOID_TYPE_P (TREE_TYPE (tem))
13150 || VOID_TYPE_P (type)))
13151 return pedantic_non_lvalue_loc (loc, tem);
13152 return NULL_TREE;
13153 }
13154 if (operand_equal_p (arg1, op2, 0))
13155 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
13156
13157 /* If we have A op B ? A : C, we may be able to convert this to a
13158 simpler expression, depending on the operation and the values
13159 of B and C. Signed zeros prevent all of these transformations,
13160 for reasons given above each one.
13161
13162 Also try swapping the arguments and inverting the conditional. */
13163 if (COMPARISON_CLASS_P (arg0)
13164 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13165 arg1, TREE_OPERAND (arg0, 1))
13166 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13167 {
13168 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13169 if (tem)
13170 return tem;
13171 }
13172
13173 if (COMPARISON_CLASS_P (arg0)
13174 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13175 op2,
13176 TREE_OPERAND (arg0, 1))
13177 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
13178 {
13179 tem = fold_truth_not_expr (loc, arg0);
13180 if (tem && COMPARISON_CLASS_P (tem))
13181 {
13182 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13183 if (tem)
13184 return tem;
13185 }
13186 }
13187
13188 /* If the second operand is simpler than the third, swap them
13189 since that produces better jump optimization results. */
13190 if (truth_value_p (TREE_CODE (arg0))
13191 && tree_swap_operands_p (op1, op2, false))
13192 {
13193 /* See if this can be inverted. If it can't, possibly because
13194 it was a floating-point inequality comparison, don't do
13195 anything. */
13196 tem = fold_truth_not_expr (loc, arg0);
13197 if (tem)
13198 return fold_build3_loc (loc, code, type, tem, op2, op1);
13199 }
13200
13201 /* Convert A ? 1 : 0 to simply A. */
13202 if (integer_onep (op1)
13203 && integer_zerop (op2)
13204 /* If we try to convert OP0 to our type, the
13205 call to fold will try to move the conversion inside
13206 a COND, which will recurse. In that case, the COND_EXPR
13207 is probably the best choice, so leave it alone. */
13208 && type == TREE_TYPE (arg0))
13209 return pedantic_non_lvalue_loc (loc, arg0);
13210
13211 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13212 over COND_EXPR in cases such as floating point comparisons. */
13213 if (integer_zerop (op1)
13214 && integer_onep (op2)
13215 && truth_value_p (TREE_CODE (arg0)))
13216 return pedantic_non_lvalue_loc (loc,
13217 fold_convert_loc (loc, type,
13218 invert_truthvalue_loc (loc,
13219 arg0)));
13220
13221 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13222 if (TREE_CODE (arg0) == LT_EXPR
13223 && integer_zerop (TREE_OPERAND (arg0, 1))
13224 && integer_zerop (op2)
13225 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13226 {
13227 /* sign_bit_p only checks ARG1 bits within A's precision.
13228 If <sign bit of A> has wider type than A, bits outside
13229 of A's precision in <sign bit of A> need to be checked.
13230 If they are all 0, this optimization needs to be done
13231 in unsigned A's type, if they are all 1 in signed A's type,
13232 otherwise this can't be done. */
13233 if (TYPE_PRECISION (TREE_TYPE (tem))
13234 < TYPE_PRECISION (TREE_TYPE (arg1))
13235 && TYPE_PRECISION (TREE_TYPE (tem))
13236 < TYPE_PRECISION (type))
13237 {
13238 unsigned HOST_WIDE_INT mask_lo;
13239 HOST_WIDE_INT mask_hi;
13240 int inner_width, outer_width;
13241 tree tem_type;
13242
13243 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13244 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13245 if (outer_width > TYPE_PRECISION (type))
13246 outer_width = TYPE_PRECISION (type);
13247
13248 if (outer_width > HOST_BITS_PER_WIDE_INT)
13249 {
13250 mask_hi = ((unsigned HOST_WIDE_INT) -1
13251 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
13252 mask_lo = -1;
13253 }
13254 else
13255 {
13256 mask_hi = 0;
13257 mask_lo = ((unsigned HOST_WIDE_INT) -1
13258 >> (HOST_BITS_PER_WIDE_INT - outer_width));
13259 }
13260 if (inner_width > HOST_BITS_PER_WIDE_INT)
13261 {
13262 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
13263 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13264 mask_lo = 0;
13265 }
13266 else
13267 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
13268 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13269
13270 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
13271 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
13272 {
13273 tem_type = signed_type_for (TREE_TYPE (tem));
13274 tem = fold_convert_loc (loc, tem_type, tem);
13275 }
13276 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
13277 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
13278 {
13279 tem_type = unsigned_type_for (TREE_TYPE (tem));
13280 tem = fold_convert_loc (loc, tem_type, tem);
13281 }
13282 else
13283 tem = NULL;
13284 }
13285
13286 if (tem)
13287 return
13288 fold_convert_loc (loc, type,
13289 fold_build2_loc (loc, BIT_AND_EXPR,
13290 TREE_TYPE (tem), tem,
13291 fold_convert_loc (loc,
13292 TREE_TYPE (tem),
13293 arg1)));
13294 }
13295
13296 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13297 already handled above. */
13298 if (TREE_CODE (arg0) == BIT_AND_EXPR
13299 && integer_onep (TREE_OPERAND (arg0, 1))
13300 && integer_zerop (op2)
13301 && integer_pow2p (arg1))
13302 {
13303 tree tem = TREE_OPERAND (arg0, 0);
13304 STRIP_NOPS (tem);
13305 if (TREE_CODE (tem) == RSHIFT_EXPR
13306 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
13307 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13308 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
13309 return fold_build2_loc (loc, BIT_AND_EXPR, type,
13310 TREE_OPERAND (tem, 0), arg1);
13311 }
13312
13313 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13314 is probably obsolete because the first operand should be a
13315 truth value (that's why we have the two cases above), but let's
13316 leave it in until we can confirm this for all front-ends. */
13317 if (integer_zerop (op2)
13318 && TREE_CODE (arg0) == NE_EXPR
13319 && integer_zerop (TREE_OPERAND (arg0, 1))
13320 && integer_pow2p (arg1)
13321 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13322 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13323 arg1, OEP_ONLY_CONST))
13324 return pedantic_non_lvalue_loc (loc,
13325 fold_convert_loc (loc, type,
13326 TREE_OPERAND (arg0, 0)));
13327
13328 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13329 if (integer_zerop (op2)
13330 && truth_value_p (TREE_CODE (arg0))
13331 && truth_value_p (TREE_CODE (arg1)))
13332 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13333 fold_convert_loc (loc, type, arg0),
13334 arg1);
13335
13336 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13337 if (integer_onep (op2)
13338 && truth_value_p (TREE_CODE (arg0))
13339 && truth_value_p (TREE_CODE (arg1)))
13340 {
13341 /* Only perform transformation if ARG0 is easily inverted. */
13342 tem = fold_truth_not_expr (loc, arg0);
13343 if (tem)
13344 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
13345 fold_convert_loc (loc, type, tem),
13346 arg1);
13347 }
13348
13349 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13350 if (integer_zerop (arg1)
13351 && truth_value_p (TREE_CODE (arg0))
13352 && truth_value_p (TREE_CODE (op2)))
13353 {
13354 /* Only perform transformation if ARG0 is easily inverted. */
13355 tem = fold_truth_not_expr (loc, arg0);
13356 if (tem)
13357 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13358 fold_convert_loc (loc, type, tem),
13359 op2);
13360 }
13361
13362 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13363 if (integer_onep (arg1)
13364 && truth_value_p (TREE_CODE (arg0))
13365 && truth_value_p (TREE_CODE (op2)))
13366 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
13367 fold_convert_loc (loc, type, arg0),
13368 op2);
13369
13370 return NULL_TREE;
13371
13372 case CALL_EXPR:
13373 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13374 of fold_ternary on them. */
13375 gcc_unreachable ();
13376
13377 case BIT_FIELD_REF:
13378 if ((TREE_CODE (arg0) == VECTOR_CST
13379 || (TREE_CODE (arg0) == CONSTRUCTOR && TREE_CONSTANT (arg0)))
13380 && type == TREE_TYPE (TREE_TYPE (arg0)))
13381 {
13382 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
13383 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
13384
13385 if (width != 0
13386 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
13387 && (idx % width) == 0
13388 && (idx = idx / width)
13389 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
13390 {
13391 tree elements = NULL_TREE;
13392
13393 if (TREE_CODE (arg0) == VECTOR_CST)
13394 elements = TREE_VECTOR_CST_ELTS (arg0);
13395 else
13396 {
13397 unsigned HOST_WIDE_INT idx;
13398 tree value;
13399
13400 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (arg0), idx, value)
13401 elements = tree_cons (NULL_TREE, value, elements);
13402 }
13403 while (idx-- > 0 && elements)
13404 elements = TREE_CHAIN (elements);
13405 if (elements)
13406 return TREE_VALUE (elements);
13407 else
13408 return fold_convert_loc (loc, type, integer_zero_node);
13409 }
13410 }
13411
13412 /* A bit-field-ref that referenced the full argument can be stripped. */
13413 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
13414 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
13415 && integer_zerop (op2))
13416 return fold_convert_loc (loc, type, arg0);
13417
13418 return NULL_TREE;
13419
13420 default:
13421 return NULL_TREE;
13422 } /* switch (code) */
13423 }
13424
13425 /* Perform constant folding and related simplification of EXPR.
13426 The related simplifications include x*1 => x, x*0 => 0, etc.,
13427 and application of the associative law.
13428 NOP_EXPR conversions may be removed freely (as long as we
13429 are careful not to change the type of the overall expression).
13430 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13431 but we can constant-fold them if they have constant operands. */
13432
13433 #ifdef ENABLE_FOLD_CHECKING
13434 # define fold(x) fold_1 (x)
13435 static tree fold_1 (tree);
13436 static
13437 #endif
13438 tree
13439 fold (tree expr)
13440 {
13441 const tree t = expr;
13442 enum tree_code code = TREE_CODE (t);
13443 enum tree_code_class kind = TREE_CODE_CLASS (code);
13444 tree tem;
13445 location_t loc = EXPR_LOCATION (expr);
13446
13447 /* Return right away if a constant. */
13448 if (kind == tcc_constant)
13449 return t;
13450
13451 /* CALL_EXPR-like objects with variable numbers of operands are
13452 treated specially. */
13453 if (kind == tcc_vl_exp)
13454 {
13455 if (code == CALL_EXPR)
13456 {
13457 tem = fold_call_expr (loc, expr, false);
13458 return tem ? tem : expr;
13459 }
13460 return expr;
13461 }
13462
13463 if (IS_EXPR_CODE_CLASS (kind))
13464 {
13465 tree type = TREE_TYPE (t);
13466 tree op0, op1, op2;
13467
13468 switch (TREE_CODE_LENGTH (code))
13469 {
13470 case 1:
13471 op0 = TREE_OPERAND (t, 0);
13472 tem = fold_unary_loc (loc, code, type, op0);
13473 return tem ? tem : expr;
13474 case 2:
13475 op0 = TREE_OPERAND (t, 0);
13476 op1 = TREE_OPERAND (t, 1);
13477 tem = fold_binary_loc (loc, code, type, op0, op1);
13478 return tem ? tem : expr;
13479 case 3:
13480 op0 = TREE_OPERAND (t, 0);
13481 op1 = TREE_OPERAND (t, 1);
13482 op2 = TREE_OPERAND (t, 2);
13483 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13484 return tem ? tem : expr;
13485 default:
13486 break;
13487 }
13488 }
13489
13490 switch (code)
13491 {
13492 case ARRAY_REF:
13493 {
13494 tree op0 = TREE_OPERAND (t, 0);
13495 tree op1 = TREE_OPERAND (t, 1);
13496
13497 if (TREE_CODE (op1) == INTEGER_CST
13498 && TREE_CODE (op0) == CONSTRUCTOR
13499 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13500 {
13501 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (op0);
13502 unsigned HOST_WIDE_INT end = VEC_length (constructor_elt, elts);
13503 unsigned HOST_WIDE_INT begin = 0;
13504
13505 /* Find a matching index by means of a binary search. */
13506 while (begin != end)
13507 {
13508 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
13509 tree index = VEC_index (constructor_elt, elts, middle)->index;
13510
13511 if (TREE_CODE (index) == INTEGER_CST
13512 && tree_int_cst_lt (index, op1))
13513 begin = middle + 1;
13514 else if (TREE_CODE (index) == INTEGER_CST
13515 && tree_int_cst_lt (op1, index))
13516 end = middle;
13517 else if (TREE_CODE (index) == RANGE_EXPR
13518 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
13519 begin = middle + 1;
13520 else if (TREE_CODE (index) == RANGE_EXPR
13521 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
13522 end = middle;
13523 else
13524 return VEC_index (constructor_elt, elts, middle)->value;
13525 }
13526 }
13527
13528 return t;
13529 }
13530
13531 case CONST_DECL:
13532 return fold (DECL_INITIAL (t));
13533
13534 default:
13535 return t;
13536 } /* switch (code) */
13537 }
13538
13539 #ifdef ENABLE_FOLD_CHECKING
13540 #undef fold
13541
13542 static void fold_checksum_tree (const_tree, struct md5_ctx *, htab_t);
13543 static void fold_check_failed (const_tree, const_tree);
13544 void print_fold_checksum (const_tree);
13545
13546 /* When --enable-checking=fold, compute a digest of expr before
13547 and after actual fold call to see if fold did not accidentally
13548 change original expr. */
13549
13550 tree
13551 fold (tree expr)
13552 {
13553 tree ret;
13554 struct md5_ctx ctx;
13555 unsigned char checksum_before[16], checksum_after[16];
13556 htab_t ht;
13557
13558 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13559 md5_init_ctx (&ctx);
13560 fold_checksum_tree (expr, &ctx, ht);
13561 md5_finish_ctx (&ctx, checksum_before);
13562 htab_empty (ht);
13563
13564 ret = fold_1 (expr);
13565
13566 md5_init_ctx (&ctx);
13567 fold_checksum_tree (expr, &ctx, ht);
13568 md5_finish_ctx (&ctx, checksum_after);
13569 htab_delete (ht);
13570
13571 if (memcmp (checksum_before, checksum_after, 16))
13572 fold_check_failed (expr, ret);
13573
13574 return ret;
13575 }
13576
13577 void
13578 print_fold_checksum (const_tree expr)
13579 {
13580 struct md5_ctx ctx;
13581 unsigned char checksum[16], cnt;
13582 htab_t ht;
13583
13584 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13585 md5_init_ctx (&ctx);
13586 fold_checksum_tree (expr, &ctx, ht);
13587 md5_finish_ctx (&ctx, checksum);
13588 htab_delete (ht);
13589 for (cnt = 0; cnt < 16; ++cnt)
13590 fprintf (stderr, "%02x", checksum[cnt]);
13591 putc ('\n', stderr);
13592 }
13593
13594 static void
13595 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
13596 {
13597 internal_error ("fold check: original tree changed by fold");
13598 }
13599
13600 static void
13601 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx, htab_t ht)
13602 {
13603 const void **slot;
13604 enum tree_code code;
13605 union tree_node buf;
13606 int i, len;
13607
13608 recursive_label:
13609
13610 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
13611 <= sizeof (struct tree_function_decl))
13612 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
13613 if (expr == NULL)
13614 return;
13615 slot = (const void **) htab_find_slot (ht, expr, INSERT);
13616 if (*slot != NULL)
13617 return;
13618 *slot = expr;
13619 code = TREE_CODE (expr);
13620 if (TREE_CODE_CLASS (code) == tcc_declaration
13621 && DECL_ASSEMBLER_NAME_SET_P (expr))
13622 {
13623 /* Allow DECL_ASSEMBLER_NAME to be modified. */
13624 memcpy ((char *) &buf, expr, tree_size (expr));
13625 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
13626 expr = (tree) &buf;
13627 }
13628 else if (TREE_CODE_CLASS (code) == tcc_type
13629 && (TYPE_POINTER_TO (expr)
13630 || TYPE_REFERENCE_TO (expr)
13631 || TYPE_CACHED_VALUES_P (expr)
13632 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
13633 || TYPE_NEXT_VARIANT (expr)))
13634 {
13635 /* Allow these fields to be modified. */
13636 tree tmp;
13637 memcpy ((char *) &buf, expr, tree_size (expr));
13638 expr = tmp = (tree) &buf;
13639 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
13640 TYPE_POINTER_TO (tmp) = NULL;
13641 TYPE_REFERENCE_TO (tmp) = NULL;
13642 TYPE_NEXT_VARIANT (tmp) = NULL;
13643 if (TYPE_CACHED_VALUES_P (tmp))
13644 {
13645 TYPE_CACHED_VALUES_P (tmp) = 0;
13646 TYPE_CACHED_VALUES (tmp) = NULL;
13647 }
13648 }
13649 md5_process_bytes (expr, tree_size (expr), ctx);
13650 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
13651 if (TREE_CODE_CLASS (code) != tcc_type
13652 && TREE_CODE_CLASS (code) != tcc_declaration
13653 && code != TREE_LIST
13654 && code != SSA_NAME)
13655 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
13656 switch (TREE_CODE_CLASS (code))
13657 {
13658 case tcc_constant:
13659 switch (code)
13660 {
13661 case STRING_CST:
13662 md5_process_bytes (TREE_STRING_POINTER (expr),
13663 TREE_STRING_LENGTH (expr), ctx);
13664 break;
13665 case COMPLEX_CST:
13666 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
13667 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
13668 break;
13669 case VECTOR_CST:
13670 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
13671 break;
13672 default:
13673 break;
13674 }
13675 break;
13676 case tcc_exceptional:
13677 switch (code)
13678 {
13679 case TREE_LIST:
13680 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
13681 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
13682 expr = TREE_CHAIN (expr);
13683 goto recursive_label;
13684 break;
13685 case TREE_VEC:
13686 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
13687 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
13688 break;
13689 default:
13690 break;
13691 }
13692 break;
13693 case tcc_expression:
13694 case tcc_reference:
13695 case tcc_comparison:
13696 case tcc_unary:
13697 case tcc_binary:
13698 case tcc_statement:
13699 case tcc_vl_exp:
13700 len = TREE_OPERAND_LENGTH (expr);
13701 for (i = 0; i < len; ++i)
13702 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
13703 break;
13704 case tcc_declaration:
13705 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
13706 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
13707 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
13708 {
13709 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
13710 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
13711 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
13712 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
13713 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
13714 }
13715 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
13716 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
13717
13718 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
13719 {
13720 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
13721 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
13722 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
13723 }
13724 break;
13725 case tcc_type:
13726 if (TREE_CODE (expr) == ENUMERAL_TYPE)
13727 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
13728 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
13729 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
13730 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
13731 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
13732 if (INTEGRAL_TYPE_P (expr)
13733 || SCALAR_FLOAT_TYPE_P (expr))
13734 {
13735 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
13736 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
13737 }
13738 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
13739 if (TREE_CODE (expr) == RECORD_TYPE
13740 || TREE_CODE (expr) == UNION_TYPE
13741 || TREE_CODE (expr) == QUAL_UNION_TYPE)
13742 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
13743 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
13744 break;
13745 default:
13746 break;
13747 }
13748 }
13749
13750 /* Helper function for outputting the checksum of a tree T. When
13751 debugging with gdb, you can "define mynext" to be "next" followed
13752 by "call debug_fold_checksum (op0)", then just trace down till the
13753 outputs differ. */
13754
13755 DEBUG_FUNCTION void
13756 debug_fold_checksum (const_tree t)
13757 {
13758 int i;
13759 unsigned char checksum[16];
13760 struct md5_ctx ctx;
13761 htab_t ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13762
13763 md5_init_ctx (&ctx);
13764 fold_checksum_tree (t, &ctx, ht);
13765 md5_finish_ctx (&ctx, checksum);
13766 htab_empty (ht);
13767
13768 for (i = 0; i < 16; i++)
13769 fprintf (stderr, "%d ", checksum[i]);
13770
13771 fprintf (stderr, "\n");
13772 }
13773
13774 #endif
13775
13776 /* Fold a unary tree expression with code CODE of type TYPE with an
13777 operand OP0. LOC is the location of the resulting expression.
13778 Return a folded expression if successful. Otherwise, return a tree
13779 expression with code CODE of type TYPE with an operand OP0. */
13780
13781 tree
13782 fold_build1_stat_loc (location_t loc,
13783 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
13784 {
13785 tree tem;
13786 #ifdef ENABLE_FOLD_CHECKING
13787 unsigned char checksum_before[16], checksum_after[16];
13788 struct md5_ctx ctx;
13789 htab_t ht;
13790
13791 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13792 md5_init_ctx (&ctx);
13793 fold_checksum_tree (op0, &ctx, ht);
13794 md5_finish_ctx (&ctx, checksum_before);
13795 htab_empty (ht);
13796 #endif
13797
13798 tem = fold_unary_loc (loc, code, type, op0);
13799 if (!tem)
13800 {
13801 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
13802 SET_EXPR_LOCATION (tem, loc);
13803 }
13804
13805 #ifdef ENABLE_FOLD_CHECKING
13806 md5_init_ctx (&ctx);
13807 fold_checksum_tree (op0, &ctx, ht);
13808 md5_finish_ctx (&ctx, checksum_after);
13809 htab_delete (ht);
13810
13811 if (memcmp (checksum_before, checksum_after, 16))
13812 fold_check_failed (op0, tem);
13813 #endif
13814 return tem;
13815 }
13816
13817 /* Fold a binary tree expression with code CODE of type TYPE with
13818 operands OP0 and OP1. LOC is the location of the resulting
13819 expression. Return a folded expression if successful. Otherwise,
13820 return a tree expression with code CODE of type TYPE with operands
13821 OP0 and OP1. */
13822
13823 tree
13824 fold_build2_stat_loc (location_t loc,
13825 enum tree_code code, tree type, tree op0, tree op1
13826 MEM_STAT_DECL)
13827 {
13828 tree tem;
13829 #ifdef ENABLE_FOLD_CHECKING
13830 unsigned char checksum_before_op0[16],
13831 checksum_before_op1[16],
13832 checksum_after_op0[16],
13833 checksum_after_op1[16];
13834 struct md5_ctx ctx;
13835 htab_t ht;
13836
13837 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13838 md5_init_ctx (&ctx);
13839 fold_checksum_tree (op0, &ctx, ht);
13840 md5_finish_ctx (&ctx, checksum_before_op0);
13841 htab_empty (ht);
13842
13843 md5_init_ctx (&ctx);
13844 fold_checksum_tree (op1, &ctx, ht);
13845 md5_finish_ctx (&ctx, checksum_before_op1);
13846 htab_empty (ht);
13847 #endif
13848
13849 tem = fold_binary_loc (loc, code, type, op0, op1);
13850 if (!tem)
13851 {
13852 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
13853 SET_EXPR_LOCATION (tem, loc);
13854 }
13855
13856 #ifdef ENABLE_FOLD_CHECKING
13857 md5_init_ctx (&ctx);
13858 fold_checksum_tree (op0, &ctx, ht);
13859 md5_finish_ctx (&ctx, checksum_after_op0);
13860 htab_empty (ht);
13861
13862 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13863 fold_check_failed (op0, tem);
13864
13865 md5_init_ctx (&ctx);
13866 fold_checksum_tree (op1, &ctx, ht);
13867 md5_finish_ctx (&ctx, checksum_after_op1);
13868 htab_delete (ht);
13869
13870 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13871 fold_check_failed (op1, tem);
13872 #endif
13873 return tem;
13874 }
13875
13876 /* Fold a ternary tree expression with code CODE of type TYPE with
13877 operands OP0, OP1, and OP2. Return a folded expression if
13878 successful. Otherwise, return a tree expression with code CODE of
13879 type TYPE with operands OP0, OP1, and OP2. */
13880
13881 tree
13882 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
13883 tree op0, tree op1, tree op2 MEM_STAT_DECL)
13884 {
13885 tree tem;
13886 #ifdef ENABLE_FOLD_CHECKING
13887 unsigned char checksum_before_op0[16],
13888 checksum_before_op1[16],
13889 checksum_before_op2[16],
13890 checksum_after_op0[16],
13891 checksum_after_op1[16],
13892 checksum_after_op2[16];
13893 struct md5_ctx ctx;
13894 htab_t ht;
13895
13896 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13897 md5_init_ctx (&ctx);
13898 fold_checksum_tree (op0, &ctx, ht);
13899 md5_finish_ctx (&ctx, checksum_before_op0);
13900 htab_empty (ht);
13901
13902 md5_init_ctx (&ctx);
13903 fold_checksum_tree (op1, &ctx, ht);
13904 md5_finish_ctx (&ctx, checksum_before_op1);
13905 htab_empty (ht);
13906
13907 md5_init_ctx (&ctx);
13908 fold_checksum_tree (op2, &ctx, ht);
13909 md5_finish_ctx (&ctx, checksum_before_op2);
13910 htab_empty (ht);
13911 #endif
13912
13913 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
13914 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13915 if (!tem)
13916 {
13917 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
13918 SET_EXPR_LOCATION (tem, loc);
13919 }
13920
13921 #ifdef ENABLE_FOLD_CHECKING
13922 md5_init_ctx (&ctx);
13923 fold_checksum_tree (op0, &ctx, ht);
13924 md5_finish_ctx (&ctx, checksum_after_op0);
13925 htab_empty (ht);
13926
13927 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13928 fold_check_failed (op0, tem);
13929
13930 md5_init_ctx (&ctx);
13931 fold_checksum_tree (op1, &ctx, ht);
13932 md5_finish_ctx (&ctx, checksum_after_op1);
13933 htab_empty (ht);
13934
13935 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13936 fold_check_failed (op1, tem);
13937
13938 md5_init_ctx (&ctx);
13939 fold_checksum_tree (op2, &ctx, ht);
13940 md5_finish_ctx (&ctx, checksum_after_op2);
13941 htab_delete (ht);
13942
13943 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
13944 fold_check_failed (op2, tem);
13945 #endif
13946 return tem;
13947 }
13948
13949 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
13950 arguments in ARGARRAY, and a null static chain.
13951 Return a folded expression if successful. Otherwise, return a CALL_EXPR
13952 of type TYPE from the given operands as constructed by build_call_array. */
13953
13954 tree
13955 fold_build_call_array_loc (location_t loc, tree type, tree fn,
13956 int nargs, tree *argarray)
13957 {
13958 tree tem;
13959 #ifdef ENABLE_FOLD_CHECKING
13960 unsigned char checksum_before_fn[16],
13961 checksum_before_arglist[16],
13962 checksum_after_fn[16],
13963 checksum_after_arglist[16];
13964 struct md5_ctx ctx;
13965 htab_t ht;
13966 int i;
13967
13968 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13969 md5_init_ctx (&ctx);
13970 fold_checksum_tree (fn, &ctx, ht);
13971 md5_finish_ctx (&ctx, checksum_before_fn);
13972 htab_empty (ht);
13973
13974 md5_init_ctx (&ctx);
13975 for (i = 0; i < nargs; i++)
13976 fold_checksum_tree (argarray[i], &ctx, ht);
13977 md5_finish_ctx (&ctx, checksum_before_arglist);
13978 htab_empty (ht);
13979 #endif
13980
13981 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
13982
13983 #ifdef ENABLE_FOLD_CHECKING
13984 md5_init_ctx (&ctx);
13985 fold_checksum_tree (fn, &ctx, ht);
13986 md5_finish_ctx (&ctx, checksum_after_fn);
13987 htab_empty (ht);
13988
13989 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
13990 fold_check_failed (fn, tem);
13991
13992 md5_init_ctx (&ctx);
13993 for (i = 0; i < nargs; i++)
13994 fold_checksum_tree (argarray[i], &ctx, ht);
13995 md5_finish_ctx (&ctx, checksum_after_arglist);
13996 htab_delete (ht);
13997
13998 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
13999 fold_check_failed (NULL_TREE, tem);
14000 #endif
14001 return tem;
14002 }
14003
14004 /* Perform constant folding and related simplification of initializer
14005 expression EXPR. These behave identically to "fold_buildN" but ignore
14006 potential run-time traps and exceptions that fold must preserve. */
14007
14008 #define START_FOLD_INIT \
14009 int saved_signaling_nans = flag_signaling_nans;\
14010 int saved_trapping_math = flag_trapping_math;\
14011 int saved_rounding_math = flag_rounding_math;\
14012 int saved_trapv = flag_trapv;\
14013 int saved_folding_initializer = folding_initializer;\
14014 flag_signaling_nans = 0;\
14015 flag_trapping_math = 0;\
14016 flag_rounding_math = 0;\
14017 flag_trapv = 0;\
14018 folding_initializer = 1;
14019
14020 #define END_FOLD_INIT \
14021 flag_signaling_nans = saved_signaling_nans;\
14022 flag_trapping_math = saved_trapping_math;\
14023 flag_rounding_math = saved_rounding_math;\
14024 flag_trapv = saved_trapv;\
14025 folding_initializer = saved_folding_initializer;
14026
14027 tree
14028 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14029 tree type, tree op)
14030 {
14031 tree result;
14032 START_FOLD_INIT;
14033
14034 result = fold_build1_loc (loc, code, type, op);
14035
14036 END_FOLD_INIT;
14037 return result;
14038 }
14039
14040 tree
14041 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14042 tree type, tree op0, tree op1)
14043 {
14044 tree result;
14045 START_FOLD_INIT;
14046
14047 result = fold_build2_loc (loc, code, type, op0, op1);
14048
14049 END_FOLD_INIT;
14050 return result;
14051 }
14052
14053 tree
14054 fold_build3_initializer_loc (location_t loc, enum tree_code code,
14055 tree type, tree op0, tree op1, tree op2)
14056 {
14057 tree result;
14058 START_FOLD_INIT;
14059
14060 result = fold_build3_loc (loc, code, type, op0, op1, op2);
14061
14062 END_FOLD_INIT;
14063 return result;
14064 }
14065
14066 tree
14067 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14068 int nargs, tree *argarray)
14069 {
14070 tree result;
14071 START_FOLD_INIT;
14072
14073 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14074
14075 END_FOLD_INIT;
14076 return result;
14077 }
14078
14079 #undef START_FOLD_INIT
14080 #undef END_FOLD_INIT
14081
14082 /* Determine if first argument is a multiple of second argument. Return 0 if
14083 it is not, or we cannot easily determined it to be.
14084
14085 An example of the sort of thing we care about (at this point; this routine
14086 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14087 fold cases do now) is discovering that
14088
14089 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14090
14091 is a multiple of
14092
14093 SAVE_EXPR (J * 8)
14094
14095 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14096
14097 This code also handles discovering that
14098
14099 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14100
14101 is a multiple of 8 so we don't have to worry about dealing with a
14102 possible remainder.
14103
14104 Note that we *look* inside a SAVE_EXPR only to determine how it was
14105 calculated; it is not safe for fold to do much of anything else with the
14106 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14107 at run time. For example, the latter example above *cannot* be implemented
14108 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14109 evaluation time of the original SAVE_EXPR is not necessarily the same at
14110 the time the new expression is evaluated. The only optimization of this
14111 sort that would be valid is changing
14112
14113 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14114
14115 divided by 8 to
14116
14117 SAVE_EXPR (I) * SAVE_EXPR (J)
14118
14119 (where the same SAVE_EXPR (J) is used in the original and the
14120 transformed version). */
14121
14122 int
14123 multiple_of_p (tree type, const_tree top, const_tree bottom)
14124 {
14125 if (operand_equal_p (top, bottom, 0))
14126 return 1;
14127
14128 if (TREE_CODE (type) != INTEGER_TYPE)
14129 return 0;
14130
14131 switch (TREE_CODE (top))
14132 {
14133 case BIT_AND_EXPR:
14134 /* Bitwise and provides a power of two multiple. If the mask is
14135 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14136 if (!integer_pow2p (bottom))
14137 return 0;
14138 /* FALLTHRU */
14139
14140 case MULT_EXPR:
14141 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14142 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14143
14144 case PLUS_EXPR:
14145 case MINUS_EXPR:
14146 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14147 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14148
14149 case LSHIFT_EXPR:
14150 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14151 {
14152 tree op1, t1;
14153
14154 op1 = TREE_OPERAND (top, 1);
14155 /* const_binop may not detect overflow correctly,
14156 so check for it explicitly here. */
14157 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
14158 > TREE_INT_CST_LOW (op1)
14159 && TREE_INT_CST_HIGH (op1) == 0
14160 && 0 != (t1 = fold_convert (type,
14161 const_binop (LSHIFT_EXPR,
14162 size_one_node,
14163 op1)))
14164 && !TREE_OVERFLOW (t1))
14165 return multiple_of_p (type, t1, bottom);
14166 }
14167 return 0;
14168
14169 case NOP_EXPR:
14170 /* Can't handle conversions from non-integral or wider integral type. */
14171 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14172 || (TYPE_PRECISION (type)
14173 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14174 return 0;
14175
14176 /* .. fall through ... */
14177
14178 case SAVE_EXPR:
14179 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14180
14181 case COND_EXPR:
14182 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
14183 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
14184
14185 case INTEGER_CST:
14186 if (TREE_CODE (bottom) != INTEGER_CST
14187 || integer_zerop (bottom)
14188 || (TYPE_UNSIGNED (type)
14189 && (tree_int_cst_sgn (top) < 0
14190 || tree_int_cst_sgn (bottom) < 0)))
14191 return 0;
14192 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
14193 top, bottom, 0));
14194
14195 default:
14196 return 0;
14197 }
14198 }
14199
14200 /* Return true if CODE or TYPE is known to be non-negative. */
14201
14202 static bool
14203 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14204 {
14205 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14206 && truth_value_p (code))
14207 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14208 have a signed:1 type (where the value is -1 and 0). */
14209 return true;
14210 return false;
14211 }
14212
14213 /* Return true if (CODE OP0) is known to be non-negative. If the return
14214 value is based on the assumption that signed overflow is undefined,
14215 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14216 *STRICT_OVERFLOW_P. */
14217
14218 bool
14219 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14220 bool *strict_overflow_p)
14221 {
14222 if (TYPE_UNSIGNED (type))
14223 return true;
14224
14225 switch (code)
14226 {
14227 case ABS_EXPR:
14228 /* We can't return 1 if flag_wrapv is set because
14229 ABS_EXPR<INT_MIN> = INT_MIN. */
14230 if (!INTEGRAL_TYPE_P (type))
14231 return true;
14232 if (TYPE_OVERFLOW_UNDEFINED (type))
14233 {
14234 *strict_overflow_p = true;
14235 return true;
14236 }
14237 break;
14238
14239 case NON_LVALUE_EXPR:
14240 case FLOAT_EXPR:
14241 case FIX_TRUNC_EXPR:
14242 return tree_expr_nonnegative_warnv_p (op0,
14243 strict_overflow_p);
14244
14245 case NOP_EXPR:
14246 {
14247 tree inner_type = TREE_TYPE (op0);
14248 tree outer_type = type;
14249
14250 if (TREE_CODE (outer_type) == REAL_TYPE)
14251 {
14252 if (TREE_CODE (inner_type) == REAL_TYPE)
14253 return tree_expr_nonnegative_warnv_p (op0,
14254 strict_overflow_p);
14255 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14256 {
14257 if (TYPE_UNSIGNED (inner_type))
14258 return true;
14259 return tree_expr_nonnegative_warnv_p (op0,
14260 strict_overflow_p);
14261 }
14262 }
14263 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
14264 {
14265 if (TREE_CODE (inner_type) == REAL_TYPE)
14266 return tree_expr_nonnegative_warnv_p (op0,
14267 strict_overflow_p);
14268 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14269 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14270 && TYPE_UNSIGNED (inner_type);
14271 }
14272 }
14273 break;
14274
14275 default:
14276 return tree_simple_nonnegative_warnv_p (code, type);
14277 }
14278
14279 /* We don't know sign of `t', so be conservative and return false. */
14280 return false;
14281 }
14282
14283 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14284 value is based on the assumption that signed overflow is undefined,
14285 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14286 *STRICT_OVERFLOW_P. */
14287
14288 bool
14289 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14290 tree op1, bool *strict_overflow_p)
14291 {
14292 if (TYPE_UNSIGNED (type))
14293 return true;
14294
14295 switch (code)
14296 {
14297 case POINTER_PLUS_EXPR:
14298 case PLUS_EXPR:
14299 if (FLOAT_TYPE_P (type))
14300 return (tree_expr_nonnegative_warnv_p (op0,
14301 strict_overflow_p)
14302 && tree_expr_nonnegative_warnv_p (op1,
14303 strict_overflow_p));
14304
14305 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14306 both unsigned and at least 2 bits shorter than the result. */
14307 if (TREE_CODE (type) == INTEGER_TYPE
14308 && TREE_CODE (op0) == NOP_EXPR
14309 && TREE_CODE (op1) == NOP_EXPR)
14310 {
14311 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14312 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14313 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14314 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14315 {
14316 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14317 TYPE_PRECISION (inner2)) + 1;
14318 return prec < TYPE_PRECISION (type);
14319 }
14320 }
14321 break;
14322
14323 case MULT_EXPR:
14324 if (FLOAT_TYPE_P (type))
14325 {
14326 /* x * x for floating point x is always non-negative. */
14327 if (operand_equal_p (op0, op1, 0))
14328 return true;
14329 return (tree_expr_nonnegative_warnv_p (op0,
14330 strict_overflow_p)
14331 && tree_expr_nonnegative_warnv_p (op1,
14332 strict_overflow_p));
14333 }
14334
14335 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14336 both unsigned and their total bits is shorter than the result. */
14337 if (TREE_CODE (type) == INTEGER_TYPE
14338 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14339 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14340 {
14341 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14342 ? TREE_TYPE (TREE_OPERAND (op0, 0))
14343 : TREE_TYPE (op0);
14344 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14345 ? TREE_TYPE (TREE_OPERAND (op1, 0))
14346 : TREE_TYPE (op1);
14347
14348 bool unsigned0 = TYPE_UNSIGNED (inner0);
14349 bool unsigned1 = TYPE_UNSIGNED (inner1);
14350
14351 if (TREE_CODE (op0) == INTEGER_CST)
14352 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14353
14354 if (TREE_CODE (op1) == INTEGER_CST)
14355 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14356
14357 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14358 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14359 {
14360 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14361 ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
14362 : TYPE_PRECISION (inner0);
14363
14364 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14365 ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
14366 : TYPE_PRECISION (inner1);
14367
14368 return precision0 + precision1 < TYPE_PRECISION (type);
14369 }
14370 }
14371 return false;
14372
14373 case BIT_AND_EXPR:
14374 case MAX_EXPR:
14375 return (tree_expr_nonnegative_warnv_p (op0,
14376 strict_overflow_p)
14377 || tree_expr_nonnegative_warnv_p (op1,
14378 strict_overflow_p));
14379
14380 case BIT_IOR_EXPR:
14381 case BIT_XOR_EXPR:
14382 case MIN_EXPR:
14383 case RDIV_EXPR:
14384 case TRUNC_DIV_EXPR:
14385 case CEIL_DIV_EXPR:
14386 case FLOOR_DIV_EXPR:
14387 case ROUND_DIV_EXPR:
14388 return (tree_expr_nonnegative_warnv_p (op0,
14389 strict_overflow_p)
14390 && tree_expr_nonnegative_warnv_p (op1,
14391 strict_overflow_p));
14392
14393 case TRUNC_MOD_EXPR:
14394 case CEIL_MOD_EXPR:
14395 case FLOOR_MOD_EXPR:
14396 case ROUND_MOD_EXPR:
14397 return tree_expr_nonnegative_warnv_p (op0,
14398 strict_overflow_p);
14399 default:
14400 return tree_simple_nonnegative_warnv_p (code, type);
14401 }
14402
14403 /* We don't know sign of `t', so be conservative and return false. */
14404 return false;
14405 }
14406
14407 /* Return true if T is known to be non-negative. If the return
14408 value is based on the assumption that signed overflow is undefined,
14409 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14410 *STRICT_OVERFLOW_P. */
14411
14412 bool
14413 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14414 {
14415 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14416 return true;
14417
14418 switch (TREE_CODE (t))
14419 {
14420 case INTEGER_CST:
14421 return tree_int_cst_sgn (t) >= 0;
14422
14423 case REAL_CST:
14424 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14425
14426 case FIXED_CST:
14427 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14428
14429 case COND_EXPR:
14430 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14431 strict_overflow_p)
14432 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
14433 strict_overflow_p));
14434 default:
14435 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14436 TREE_TYPE (t));
14437 }
14438 /* We don't know sign of `t', so be conservative and return false. */
14439 return false;
14440 }
14441
14442 /* Return true if T is known to be non-negative. If the return
14443 value is based on the assumption that signed overflow is undefined,
14444 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14445 *STRICT_OVERFLOW_P. */
14446
14447 bool
14448 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
14449 tree arg0, tree arg1, bool *strict_overflow_p)
14450 {
14451 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
14452 switch (DECL_FUNCTION_CODE (fndecl))
14453 {
14454 CASE_FLT_FN (BUILT_IN_ACOS):
14455 CASE_FLT_FN (BUILT_IN_ACOSH):
14456 CASE_FLT_FN (BUILT_IN_CABS):
14457 CASE_FLT_FN (BUILT_IN_COSH):
14458 CASE_FLT_FN (BUILT_IN_ERFC):
14459 CASE_FLT_FN (BUILT_IN_EXP):
14460 CASE_FLT_FN (BUILT_IN_EXP10):
14461 CASE_FLT_FN (BUILT_IN_EXP2):
14462 CASE_FLT_FN (BUILT_IN_FABS):
14463 CASE_FLT_FN (BUILT_IN_FDIM):
14464 CASE_FLT_FN (BUILT_IN_HYPOT):
14465 CASE_FLT_FN (BUILT_IN_POW10):
14466 CASE_INT_FN (BUILT_IN_FFS):
14467 CASE_INT_FN (BUILT_IN_PARITY):
14468 CASE_INT_FN (BUILT_IN_POPCOUNT):
14469 case BUILT_IN_BSWAP32:
14470 case BUILT_IN_BSWAP64:
14471 /* Always true. */
14472 return true;
14473
14474 CASE_FLT_FN (BUILT_IN_SQRT):
14475 /* sqrt(-0.0) is -0.0. */
14476 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
14477 return true;
14478 return tree_expr_nonnegative_warnv_p (arg0,
14479 strict_overflow_p);
14480
14481 CASE_FLT_FN (BUILT_IN_ASINH):
14482 CASE_FLT_FN (BUILT_IN_ATAN):
14483 CASE_FLT_FN (BUILT_IN_ATANH):
14484 CASE_FLT_FN (BUILT_IN_CBRT):
14485 CASE_FLT_FN (BUILT_IN_CEIL):
14486 CASE_FLT_FN (BUILT_IN_ERF):
14487 CASE_FLT_FN (BUILT_IN_EXPM1):
14488 CASE_FLT_FN (BUILT_IN_FLOOR):
14489 CASE_FLT_FN (BUILT_IN_FMOD):
14490 CASE_FLT_FN (BUILT_IN_FREXP):
14491 CASE_FLT_FN (BUILT_IN_LCEIL):
14492 CASE_FLT_FN (BUILT_IN_LDEXP):
14493 CASE_FLT_FN (BUILT_IN_LFLOOR):
14494 CASE_FLT_FN (BUILT_IN_LLCEIL):
14495 CASE_FLT_FN (BUILT_IN_LLFLOOR):
14496 CASE_FLT_FN (BUILT_IN_LLRINT):
14497 CASE_FLT_FN (BUILT_IN_LLROUND):
14498 CASE_FLT_FN (BUILT_IN_LRINT):
14499 CASE_FLT_FN (BUILT_IN_LROUND):
14500 CASE_FLT_FN (BUILT_IN_MODF):
14501 CASE_FLT_FN (BUILT_IN_NEARBYINT):
14502 CASE_FLT_FN (BUILT_IN_RINT):
14503 CASE_FLT_FN (BUILT_IN_ROUND):
14504 CASE_FLT_FN (BUILT_IN_SCALB):
14505 CASE_FLT_FN (BUILT_IN_SCALBLN):
14506 CASE_FLT_FN (BUILT_IN_SCALBN):
14507 CASE_FLT_FN (BUILT_IN_SIGNBIT):
14508 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
14509 CASE_FLT_FN (BUILT_IN_SINH):
14510 CASE_FLT_FN (BUILT_IN_TANH):
14511 CASE_FLT_FN (BUILT_IN_TRUNC):
14512 /* True if the 1st argument is nonnegative. */
14513 return tree_expr_nonnegative_warnv_p (arg0,
14514 strict_overflow_p);
14515
14516 CASE_FLT_FN (BUILT_IN_FMAX):
14517 /* True if the 1st OR 2nd arguments are nonnegative. */
14518 return (tree_expr_nonnegative_warnv_p (arg0,
14519 strict_overflow_p)
14520 || (tree_expr_nonnegative_warnv_p (arg1,
14521 strict_overflow_p)));
14522
14523 CASE_FLT_FN (BUILT_IN_FMIN):
14524 /* True if the 1st AND 2nd arguments are nonnegative. */
14525 return (tree_expr_nonnegative_warnv_p (arg0,
14526 strict_overflow_p)
14527 && (tree_expr_nonnegative_warnv_p (arg1,
14528 strict_overflow_p)));
14529
14530 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14531 /* True if the 2nd argument is nonnegative. */
14532 return tree_expr_nonnegative_warnv_p (arg1,
14533 strict_overflow_p);
14534
14535 CASE_FLT_FN (BUILT_IN_POWI):
14536 /* True if the 1st argument is nonnegative or the second
14537 argument is an even integer. */
14538 if (TREE_CODE (arg1) == INTEGER_CST
14539 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
14540 return true;
14541 return tree_expr_nonnegative_warnv_p (arg0,
14542 strict_overflow_p);
14543
14544 CASE_FLT_FN (BUILT_IN_POW):
14545 /* True if the 1st argument is nonnegative or the second
14546 argument is an even integer valued real. */
14547 if (TREE_CODE (arg1) == REAL_CST)
14548 {
14549 REAL_VALUE_TYPE c;
14550 HOST_WIDE_INT n;
14551
14552 c = TREE_REAL_CST (arg1);
14553 n = real_to_integer (&c);
14554 if ((n & 1) == 0)
14555 {
14556 REAL_VALUE_TYPE cint;
14557 real_from_integer (&cint, VOIDmode, n,
14558 n < 0 ? -1 : 0, 0);
14559 if (real_identical (&c, &cint))
14560 return true;
14561 }
14562 }
14563 return tree_expr_nonnegative_warnv_p (arg0,
14564 strict_overflow_p);
14565
14566 default:
14567 break;
14568 }
14569 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
14570 type);
14571 }
14572
14573 /* Return true if T is known to be non-negative. If the return
14574 value is based on the assumption that signed overflow is undefined,
14575 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14576 *STRICT_OVERFLOW_P. */
14577
14578 bool
14579 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14580 {
14581 enum tree_code code = TREE_CODE (t);
14582 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14583 return true;
14584
14585 switch (code)
14586 {
14587 case TARGET_EXPR:
14588 {
14589 tree temp = TARGET_EXPR_SLOT (t);
14590 t = TARGET_EXPR_INITIAL (t);
14591
14592 /* If the initializer is non-void, then it's a normal expression
14593 that will be assigned to the slot. */
14594 if (!VOID_TYPE_P (t))
14595 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
14596
14597 /* Otherwise, the initializer sets the slot in some way. One common
14598 way is an assignment statement at the end of the initializer. */
14599 while (1)
14600 {
14601 if (TREE_CODE (t) == BIND_EXPR)
14602 t = expr_last (BIND_EXPR_BODY (t));
14603 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
14604 || TREE_CODE (t) == TRY_CATCH_EXPR)
14605 t = expr_last (TREE_OPERAND (t, 0));
14606 else if (TREE_CODE (t) == STATEMENT_LIST)
14607 t = expr_last (t);
14608 else
14609 break;
14610 }
14611 if (TREE_CODE (t) == MODIFY_EXPR
14612 && TREE_OPERAND (t, 0) == temp)
14613 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14614 strict_overflow_p);
14615
14616 return false;
14617 }
14618
14619 case CALL_EXPR:
14620 {
14621 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
14622 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
14623
14624 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
14625 get_callee_fndecl (t),
14626 arg0,
14627 arg1,
14628 strict_overflow_p);
14629 }
14630 case COMPOUND_EXPR:
14631 case MODIFY_EXPR:
14632 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14633 strict_overflow_p);
14634 case BIND_EXPR:
14635 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
14636 strict_overflow_p);
14637 case SAVE_EXPR:
14638 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14639 strict_overflow_p);
14640
14641 default:
14642 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14643 TREE_TYPE (t));
14644 }
14645
14646 /* We don't know sign of `t', so be conservative and return false. */
14647 return false;
14648 }
14649
14650 /* Return true if T is known to be non-negative. If the return
14651 value is based on the assumption that signed overflow is undefined,
14652 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14653 *STRICT_OVERFLOW_P. */
14654
14655 bool
14656 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14657 {
14658 enum tree_code code;
14659 if (t == error_mark_node)
14660 return false;
14661
14662 code = TREE_CODE (t);
14663 switch (TREE_CODE_CLASS (code))
14664 {
14665 case tcc_binary:
14666 case tcc_comparison:
14667 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14668 TREE_TYPE (t),
14669 TREE_OPERAND (t, 0),
14670 TREE_OPERAND (t, 1),
14671 strict_overflow_p);
14672
14673 case tcc_unary:
14674 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14675 TREE_TYPE (t),
14676 TREE_OPERAND (t, 0),
14677 strict_overflow_p);
14678
14679 case tcc_constant:
14680 case tcc_declaration:
14681 case tcc_reference:
14682 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14683
14684 default:
14685 break;
14686 }
14687
14688 switch (code)
14689 {
14690 case TRUTH_AND_EXPR:
14691 case TRUTH_OR_EXPR:
14692 case TRUTH_XOR_EXPR:
14693 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14694 TREE_TYPE (t),
14695 TREE_OPERAND (t, 0),
14696 TREE_OPERAND (t, 1),
14697 strict_overflow_p);
14698 case TRUTH_NOT_EXPR:
14699 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14700 TREE_TYPE (t),
14701 TREE_OPERAND (t, 0),
14702 strict_overflow_p);
14703
14704 case COND_EXPR:
14705 case CONSTRUCTOR:
14706 case OBJ_TYPE_REF:
14707 case ASSERT_EXPR:
14708 case ADDR_EXPR:
14709 case WITH_SIZE_EXPR:
14710 case SSA_NAME:
14711 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14712
14713 default:
14714 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
14715 }
14716 }
14717
14718 /* Return true if `t' is known to be non-negative. Handle warnings
14719 about undefined signed overflow. */
14720
14721 bool
14722 tree_expr_nonnegative_p (tree t)
14723 {
14724 bool ret, strict_overflow_p;
14725
14726 strict_overflow_p = false;
14727 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
14728 if (strict_overflow_p)
14729 fold_overflow_warning (("assuming signed overflow does not occur when "
14730 "determining that expression is always "
14731 "non-negative"),
14732 WARN_STRICT_OVERFLOW_MISC);
14733 return ret;
14734 }
14735
14736
14737 /* Return true when (CODE OP0) is an address and is known to be nonzero.
14738 For floating point we further ensure that T is not denormal.
14739 Similar logic is present in nonzero_address in rtlanal.h.
14740
14741 If the return value is based on the assumption that signed overflow
14742 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14743 change *STRICT_OVERFLOW_P. */
14744
14745 bool
14746 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
14747 bool *strict_overflow_p)
14748 {
14749 switch (code)
14750 {
14751 case ABS_EXPR:
14752 return tree_expr_nonzero_warnv_p (op0,
14753 strict_overflow_p);
14754
14755 case NOP_EXPR:
14756 {
14757 tree inner_type = TREE_TYPE (op0);
14758 tree outer_type = type;
14759
14760 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
14761 && tree_expr_nonzero_warnv_p (op0,
14762 strict_overflow_p));
14763 }
14764 break;
14765
14766 case NON_LVALUE_EXPR:
14767 return tree_expr_nonzero_warnv_p (op0,
14768 strict_overflow_p);
14769
14770 default:
14771 break;
14772 }
14773
14774 return false;
14775 }
14776
14777 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
14778 For floating point we further ensure that T is not denormal.
14779 Similar logic is present in nonzero_address in rtlanal.h.
14780
14781 If the return value is based on the assumption that signed overflow
14782 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14783 change *STRICT_OVERFLOW_P. */
14784
14785 bool
14786 tree_binary_nonzero_warnv_p (enum tree_code code,
14787 tree type,
14788 tree op0,
14789 tree op1, bool *strict_overflow_p)
14790 {
14791 bool sub_strict_overflow_p;
14792 switch (code)
14793 {
14794 case POINTER_PLUS_EXPR:
14795 case PLUS_EXPR:
14796 if (TYPE_OVERFLOW_UNDEFINED (type))
14797 {
14798 /* With the presence of negative values it is hard
14799 to say something. */
14800 sub_strict_overflow_p = false;
14801 if (!tree_expr_nonnegative_warnv_p (op0,
14802 &sub_strict_overflow_p)
14803 || !tree_expr_nonnegative_warnv_p (op1,
14804 &sub_strict_overflow_p))
14805 return false;
14806 /* One of operands must be positive and the other non-negative. */
14807 /* We don't set *STRICT_OVERFLOW_P here: even if this value
14808 overflows, on a twos-complement machine the sum of two
14809 nonnegative numbers can never be zero. */
14810 return (tree_expr_nonzero_warnv_p (op0,
14811 strict_overflow_p)
14812 || tree_expr_nonzero_warnv_p (op1,
14813 strict_overflow_p));
14814 }
14815 break;
14816
14817 case MULT_EXPR:
14818 if (TYPE_OVERFLOW_UNDEFINED (type))
14819 {
14820 if (tree_expr_nonzero_warnv_p (op0,
14821 strict_overflow_p)
14822 && tree_expr_nonzero_warnv_p (op1,
14823 strict_overflow_p))
14824 {
14825 *strict_overflow_p = true;
14826 return true;
14827 }
14828 }
14829 break;
14830
14831 case MIN_EXPR:
14832 sub_strict_overflow_p = false;
14833 if (tree_expr_nonzero_warnv_p (op0,
14834 &sub_strict_overflow_p)
14835 && tree_expr_nonzero_warnv_p (op1,
14836 &sub_strict_overflow_p))
14837 {
14838 if (sub_strict_overflow_p)
14839 *strict_overflow_p = true;
14840 }
14841 break;
14842
14843 case MAX_EXPR:
14844 sub_strict_overflow_p = false;
14845 if (tree_expr_nonzero_warnv_p (op0,
14846 &sub_strict_overflow_p))
14847 {
14848 if (sub_strict_overflow_p)
14849 *strict_overflow_p = true;
14850
14851 /* When both operands are nonzero, then MAX must be too. */
14852 if (tree_expr_nonzero_warnv_p (op1,
14853 strict_overflow_p))
14854 return true;
14855
14856 /* MAX where operand 0 is positive is positive. */
14857 return tree_expr_nonnegative_warnv_p (op0,
14858 strict_overflow_p);
14859 }
14860 /* MAX where operand 1 is positive is positive. */
14861 else if (tree_expr_nonzero_warnv_p (op1,
14862 &sub_strict_overflow_p)
14863 && tree_expr_nonnegative_warnv_p (op1,
14864 &sub_strict_overflow_p))
14865 {
14866 if (sub_strict_overflow_p)
14867 *strict_overflow_p = true;
14868 return true;
14869 }
14870 break;
14871
14872 case BIT_IOR_EXPR:
14873 return (tree_expr_nonzero_warnv_p (op1,
14874 strict_overflow_p)
14875 || tree_expr_nonzero_warnv_p (op0,
14876 strict_overflow_p));
14877
14878 default:
14879 break;
14880 }
14881
14882 return false;
14883 }
14884
14885 /* Return true when T is an address and is known to be nonzero.
14886 For floating point we further ensure that T is not denormal.
14887 Similar logic is present in nonzero_address in rtlanal.h.
14888
14889 If the return value is based on the assumption that signed overflow
14890 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14891 change *STRICT_OVERFLOW_P. */
14892
14893 bool
14894 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
14895 {
14896 bool sub_strict_overflow_p;
14897 switch (TREE_CODE (t))
14898 {
14899 case INTEGER_CST:
14900 return !integer_zerop (t);
14901
14902 case ADDR_EXPR:
14903 {
14904 tree base = TREE_OPERAND (t, 0);
14905 if (!DECL_P (base))
14906 base = get_base_address (base);
14907
14908 if (!base)
14909 return false;
14910
14911 /* Weak declarations may link to NULL. Other things may also be NULL
14912 so protect with -fdelete-null-pointer-checks; but not variables
14913 allocated on the stack. */
14914 if (DECL_P (base)
14915 && (flag_delete_null_pointer_checks
14916 || (DECL_CONTEXT (base)
14917 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
14918 && auto_var_in_fn_p (base, DECL_CONTEXT (base)))))
14919 return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
14920
14921 /* Constants are never weak. */
14922 if (CONSTANT_CLASS_P (base))
14923 return true;
14924
14925 return false;
14926 }
14927
14928 case COND_EXPR:
14929 sub_strict_overflow_p = false;
14930 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
14931 &sub_strict_overflow_p)
14932 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
14933 &sub_strict_overflow_p))
14934 {
14935 if (sub_strict_overflow_p)
14936 *strict_overflow_p = true;
14937 return true;
14938 }
14939 break;
14940
14941 default:
14942 break;
14943 }
14944 return false;
14945 }
14946
14947 /* Return true when T is an address and is known to be nonzero.
14948 For floating point we further ensure that T is not denormal.
14949 Similar logic is present in nonzero_address in rtlanal.h.
14950
14951 If the return value is based on the assumption that signed overflow
14952 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14953 change *STRICT_OVERFLOW_P. */
14954
14955 bool
14956 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
14957 {
14958 tree type = TREE_TYPE (t);
14959 enum tree_code code;
14960
14961 /* Doing something useful for floating point would need more work. */
14962 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
14963 return false;
14964
14965 code = TREE_CODE (t);
14966 switch (TREE_CODE_CLASS (code))
14967 {
14968 case tcc_unary:
14969 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
14970 strict_overflow_p);
14971 case tcc_binary:
14972 case tcc_comparison:
14973 return tree_binary_nonzero_warnv_p (code, type,
14974 TREE_OPERAND (t, 0),
14975 TREE_OPERAND (t, 1),
14976 strict_overflow_p);
14977 case tcc_constant:
14978 case tcc_declaration:
14979 case tcc_reference:
14980 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
14981
14982 default:
14983 break;
14984 }
14985
14986 switch (code)
14987 {
14988 case TRUTH_NOT_EXPR:
14989 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
14990 strict_overflow_p);
14991
14992 case TRUTH_AND_EXPR:
14993 case TRUTH_OR_EXPR:
14994 case TRUTH_XOR_EXPR:
14995 return tree_binary_nonzero_warnv_p (code, type,
14996 TREE_OPERAND (t, 0),
14997 TREE_OPERAND (t, 1),
14998 strict_overflow_p);
14999
15000 case COND_EXPR:
15001 case CONSTRUCTOR:
15002 case OBJ_TYPE_REF:
15003 case ASSERT_EXPR:
15004 case ADDR_EXPR:
15005 case WITH_SIZE_EXPR:
15006 case SSA_NAME:
15007 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15008
15009 case COMPOUND_EXPR:
15010 case MODIFY_EXPR:
15011 case BIND_EXPR:
15012 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15013 strict_overflow_p);
15014
15015 case SAVE_EXPR:
15016 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
15017 strict_overflow_p);
15018
15019 case CALL_EXPR:
15020 return alloca_call_p (t);
15021
15022 default:
15023 break;
15024 }
15025 return false;
15026 }
15027
15028 /* Return true when T is an address and is known to be nonzero.
15029 Handle warnings about undefined signed overflow. */
15030
15031 bool
15032 tree_expr_nonzero_p (tree t)
15033 {
15034 bool ret, strict_overflow_p;
15035
15036 strict_overflow_p = false;
15037 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
15038 if (strict_overflow_p)
15039 fold_overflow_warning (("assuming signed overflow does not occur when "
15040 "determining that expression is always "
15041 "non-zero"),
15042 WARN_STRICT_OVERFLOW_MISC);
15043 return ret;
15044 }
15045
15046 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15047 attempt to fold the expression to a constant without modifying TYPE,
15048 OP0 or OP1.
15049
15050 If the expression could be simplified to a constant, then return
15051 the constant. If the expression would not be simplified to a
15052 constant, then return NULL_TREE. */
15053
15054 tree
15055 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15056 {
15057 tree tem = fold_binary (code, type, op0, op1);
15058 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15059 }
15060
15061 /* Given the components of a unary expression CODE, TYPE and OP0,
15062 attempt to fold the expression to a constant without modifying
15063 TYPE or OP0.
15064
15065 If the expression could be simplified to a constant, then return
15066 the constant. If the expression would not be simplified to a
15067 constant, then return NULL_TREE. */
15068
15069 tree
15070 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15071 {
15072 tree tem = fold_unary (code, type, op0);
15073 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15074 }
15075
15076 /* If EXP represents referencing an element in a constant string
15077 (either via pointer arithmetic or array indexing), return the
15078 tree representing the value accessed, otherwise return NULL. */
15079
15080 tree
15081 fold_read_from_constant_string (tree exp)
15082 {
15083 if ((TREE_CODE (exp) == INDIRECT_REF
15084 || TREE_CODE (exp) == ARRAY_REF)
15085 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15086 {
15087 tree exp1 = TREE_OPERAND (exp, 0);
15088 tree index;
15089 tree string;
15090 location_t loc = EXPR_LOCATION (exp);
15091
15092 if (TREE_CODE (exp) == INDIRECT_REF)
15093 string = string_constant (exp1, &index);
15094 else
15095 {
15096 tree low_bound = array_ref_low_bound (exp);
15097 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15098
15099 /* Optimize the special-case of a zero lower bound.
15100
15101 We convert the low_bound to sizetype to avoid some problems
15102 with constant folding. (E.g. suppose the lower bound is 1,
15103 and its mode is QI. Without the conversion,l (ARRAY
15104 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15105 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15106 if (! integer_zerop (low_bound))
15107 index = size_diffop_loc (loc, index,
15108 fold_convert_loc (loc, sizetype, low_bound));
15109
15110 string = exp1;
15111 }
15112
15113 if (string
15114 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15115 && TREE_CODE (string) == STRING_CST
15116 && TREE_CODE (index) == INTEGER_CST
15117 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15118 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15119 == MODE_INT)
15120 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15121 return build_int_cst_type (TREE_TYPE (exp),
15122 (TREE_STRING_POINTER (string)
15123 [TREE_INT_CST_LOW (index)]));
15124 }
15125 return NULL;
15126 }
15127
15128 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15129 an integer constant, real, or fixed-point constant.
15130
15131 TYPE is the type of the result. */
15132
15133 static tree
15134 fold_negate_const (tree arg0, tree type)
15135 {
15136 tree t = NULL_TREE;
15137
15138 switch (TREE_CODE (arg0))
15139 {
15140 case INTEGER_CST:
15141 {
15142 unsigned HOST_WIDE_INT low;
15143 HOST_WIDE_INT high;
15144 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
15145 TREE_INT_CST_HIGH (arg0),
15146 &low, &high);
15147 t = force_fit_type_double (type, low, high, 1,
15148 (overflow | TREE_OVERFLOW (arg0))
15149 && !TYPE_UNSIGNED (type));
15150 break;
15151 }
15152
15153 case REAL_CST:
15154 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15155 break;
15156
15157 case FIXED_CST:
15158 {
15159 FIXED_VALUE_TYPE f;
15160 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15161 &(TREE_FIXED_CST (arg0)), NULL,
15162 TYPE_SATURATING (type));
15163 t = build_fixed (type, f);
15164 /* Propagate overflow flags. */
15165 if (overflow_p | TREE_OVERFLOW (arg0))
15166 TREE_OVERFLOW (t) = 1;
15167 break;
15168 }
15169
15170 default:
15171 gcc_unreachable ();
15172 }
15173
15174 return t;
15175 }
15176
15177 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15178 an integer constant or real constant.
15179
15180 TYPE is the type of the result. */
15181
15182 tree
15183 fold_abs_const (tree arg0, tree type)
15184 {
15185 tree t = NULL_TREE;
15186
15187 switch (TREE_CODE (arg0))
15188 {
15189 case INTEGER_CST:
15190 /* If the value is unsigned, then the absolute value is
15191 the same as the ordinary value. */
15192 if (TYPE_UNSIGNED (type))
15193 t = arg0;
15194 /* Similarly, if the value is non-negative. */
15195 else if (INT_CST_LT (integer_minus_one_node, arg0))
15196 t = arg0;
15197 /* If the value is negative, then the absolute value is
15198 its negation. */
15199 else
15200 {
15201 unsigned HOST_WIDE_INT low;
15202 HOST_WIDE_INT high;
15203 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
15204 TREE_INT_CST_HIGH (arg0),
15205 &low, &high);
15206 t = force_fit_type_double (type, low, high, -1,
15207 overflow | TREE_OVERFLOW (arg0));
15208 }
15209 break;
15210
15211 case REAL_CST:
15212 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15213 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15214 else
15215 t = arg0;
15216 break;
15217
15218 default:
15219 gcc_unreachable ();
15220 }
15221
15222 return t;
15223 }
15224
15225 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15226 constant. TYPE is the type of the result. */
15227
15228 static tree
15229 fold_not_const (tree arg0, tree type)
15230 {
15231 tree t = NULL_TREE;
15232
15233 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15234
15235 t = force_fit_type_double (type, ~TREE_INT_CST_LOW (arg0),
15236 ~TREE_INT_CST_HIGH (arg0), 0,
15237 TREE_OVERFLOW (arg0));
15238
15239 return t;
15240 }
15241
15242 /* Given CODE, a relational operator, the target type, TYPE and two
15243 constant operands OP0 and OP1, return the result of the
15244 relational operation. If the result is not a compile time
15245 constant, then return NULL_TREE. */
15246
15247 static tree
15248 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15249 {
15250 int result, invert;
15251
15252 /* From here on, the only cases we handle are when the result is
15253 known to be a constant. */
15254
15255 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15256 {
15257 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15258 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15259
15260 /* Handle the cases where either operand is a NaN. */
15261 if (real_isnan (c0) || real_isnan (c1))
15262 {
15263 switch (code)
15264 {
15265 case EQ_EXPR:
15266 case ORDERED_EXPR:
15267 result = 0;
15268 break;
15269
15270 case NE_EXPR:
15271 case UNORDERED_EXPR:
15272 case UNLT_EXPR:
15273 case UNLE_EXPR:
15274 case UNGT_EXPR:
15275 case UNGE_EXPR:
15276 case UNEQ_EXPR:
15277 result = 1;
15278 break;
15279
15280 case LT_EXPR:
15281 case LE_EXPR:
15282 case GT_EXPR:
15283 case GE_EXPR:
15284 case LTGT_EXPR:
15285 if (flag_trapping_math)
15286 return NULL_TREE;
15287 result = 0;
15288 break;
15289
15290 default:
15291 gcc_unreachable ();
15292 }
15293
15294 return constant_boolean_node (result, type);
15295 }
15296
15297 return constant_boolean_node (real_compare (code, c0, c1), type);
15298 }
15299
15300 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15301 {
15302 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15303 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15304 return constant_boolean_node (fixed_compare (code, c0, c1), type);
15305 }
15306
15307 /* Handle equality/inequality of complex constants. */
15308 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15309 {
15310 tree rcond = fold_relational_const (code, type,
15311 TREE_REALPART (op0),
15312 TREE_REALPART (op1));
15313 tree icond = fold_relational_const (code, type,
15314 TREE_IMAGPART (op0),
15315 TREE_IMAGPART (op1));
15316 if (code == EQ_EXPR)
15317 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15318 else if (code == NE_EXPR)
15319 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15320 else
15321 return NULL_TREE;
15322 }
15323
15324 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15325
15326 To compute GT, swap the arguments and do LT.
15327 To compute GE, do LT and invert the result.
15328 To compute LE, swap the arguments, do LT and invert the result.
15329 To compute NE, do EQ and invert the result.
15330
15331 Therefore, the code below must handle only EQ and LT. */
15332
15333 if (code == LE_EXPR || code == GT_EXPR)
15334 {
15335 tree tem = op0;
15336 op0 = op1;
15337 op1 = tem;
15338 code = swap_tree_comparison (code);
15339 }
15340
15341 /* Note that it is safe to invert for real values here because we
15342 have already handled the one case that it matters. */
15343
15344 invert = 0;
15345 if (code == NE_EXPR || code == GE_EXPR)
15346 {
15347 invert = 1;
15348 code = invert_tree_comparison (code, false);
15349 }
15350
15351 /* Compute a result for LT or EQ if args permit;
15352 Otherwise return T. */
15353 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15354 {
15355 if (code == EQ_EXPR)
15356 result = tree_int_cst_equal (op0, op1);
15357 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
15358 result = INT_CST_LT_UNSIGNED (op0, op1);
15359 else
15360 result = INT_CST_LT (op0, op1);
15361 }
15362 else
15363 return NULL_TREE;
15364
15365 if (invert)
15366 result ^= 1;
15367 return constant_boolean_node (result, type);
15368 }
15369
15370 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15371 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15372 itself. */
15373
15374 tree
15375 fold_build_cleanup_point_expr (tree type, tree expr)
15376 {
15377 /* If the expression does not have side effects then we don't have to wrap
15378 it with a cleanup point expression. */
15379 if (!TREE_SIDE_EFFECTS (expr))
15380 return expr;
15381
15382 /* If the expression is a return, check to see if the expression inside the
15383 return has no side effects or the right hand side of the modify expression
15384 inside the return. If either don't have side effects set we don't need to
15385 wrap the expression in a cleanup point expression. Note we don't check the
15386 left hand side of the modify because it should always be a return decl. */
15387 if (TREE_CODE (expr) == RETURN_EXPR)
15388 {
15389 tree op = TREE_OPERAND (expr, 0);
15390 if (!op || !TREE_SIDE_EFFECTS (op))
15391 return expr;
15392 op = TREE_OPERAND (op, 1);
15393 if (!TREE_SIDE_EFFECTS (op))
15394 return expr;
15395 }
15396
15397 return build1 (CLEANUP_POINT_EXPR, type, expr);
15398 }
15399
15400 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15401 of an indirection through OP0, or NULL_TREE if no simplification is
15402 possible. */
15403
15404 tree
15405 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
15406 {
15407 tree sub = op0;
15408 tree subtype;
15409
15410 STRIP_NOPS (sub);
15411 subtype = TREE_TYPE (sub);
15412 if (!POINTER_TYPE_P (subtype))
15413 return NULL_TREE;
15414
15415 if (TREE_CODE (sub) == ADDR_EXPR)
15416 {
15417 tree op = TREE_OPERAND (sub, 0);
15418 tree optype = TREE_TYPE (op);
15419 /* *&CONST_DECL -> to the value of the const decl. */
15420 if (TREE_CODE (op) == CONST_DECL)
15421 return DECL_INITIAL (op);
15422 /* *&p => p; make sure to handle *&"str"[cst] here. */
15423 if (type == optype)
15424 {
15425 tree fop = fold_read_from_constant_string (op);
15426 if (fop)
15427 return fop;
15428 else
15429 return op;
15430 }
15431 /* *(foo *)&fooarray => fooarray[0] */
15432 else if (TREE_CODE (optype) == ARRAY_TYPE
15433 && type == TREE_TYPE (optype))
15434 {
15435 tree type_domain = TYPE_DOMAIN (optype);
15436 tree min_val = size_zero_node;
15437 if (type_domain && TYPE_MIN_VALUE (type_domain))
15438 min_val = TYPE_MIN_VALUE (type_domain);
15439 op0 = build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
15440 SET_EXPR_LOCATION (op0, loc);
15441 return op0;
15442 }
15443 /* *(foo *)&complexfoo => __real__ complexfoo */
15444 else if (TREE_CODE (optype) == COMPLEX_TYPE
15445 && type == TREE_TYPE (optype))
15446 return fold_build1_loc (loc, REALPART_EXPR, type, op);
15447 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15448 else if (TREE_CODE (optype) == VECTOR_TYPE
15449 && type == TREE_TYPE (optype))
15450 {
15451 tree part_width = TYPE_SIZE (type);
15452 tree index = bitsize_int (0);
15453 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
15454 }
15455 }
15456
15457 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15458 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15459 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15460 {
15461 tree op00 = TREE_OPERAND (sub, 0);
15462 tree op01 = TREE_OPERAND (sub, 1);
15463 tree op00type;
15464
15465 STRIP_NOPS (op00);
15466 op00type = TREE_TYPE (op00);
15467 if (TREE_CODE (op00) == ADDR_EXPR
15468 && TREE_CODE (TREE_TYPE (op00type)) == VECTOR_TYPE
15469 && type == TREE_TYPE (TREE_TYPE (op00type)))
15470 {
15471 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
15472 tree part_width = TYPE_SIZE (type);
15473 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
15474 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
15475 tree index = bitsize_int (indexi);
15476
15477 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (op00type)))
15478 return fold_build3_loc (loc,
15479 BIT_FIELD_REF, type, TREE_OPERAND (op00, 0),
15480 part_width, index);
15481
15482 }
15483 }
15484
15485
15486 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15487 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15488 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15489 {
15490 tree op00 = TREE_OPERAND (sub, 0);
15491 tree op01 = TREE_OPERAND (sub, 1);
15492 tree op00type;
15493
15494 STRIP_NOPS (op00);
15495 op00type = TREE_TYPE (op00);
15496 if (TREE_CODE (op00) == ADDR_EXPR
15497 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
15498 && type == TREE_TYPE (TREE_TYPE (op00type)))
15499 {
15500 tree size = TYPE_SIZE_UNIT (type);
15501 if (tree_int_cst_equal (size, op01))
15502 return fold_build1_loc (loc, IMAGPART_EXPR, type,
15503 TREE_OPERAND (op00, 0));
15504 }
15505 }
15506
15507 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15508 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
15509 && type == TREE_TYPE (TREE_TYPE (subtype)))
15510 {
15511 tree type_domain;
15512 tree min_val = size_zero_node;
15513 sub = build_fold_indirect_ref_loc (loc, sub);
15514 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
15515 if (type_domain && TYPE_MIN_VALUE (type_domain))
15516 min_val = TYPE_MIN_VALUE (type_domain);
15517 op0 = build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
15518 SET_EXPR_LOCATION (op0, loc);
15519 return op0;
15520 }
15521
15522 return NULL_TREE;
15523 }
15524
15525 /* Builds an expression for an indirection through T, simplifying some
15526 cases. */
15527
15528 tree
15529 build_fold_indirect_ref_loc (location_t loc, tree t)
15530 {
15531 tree type = TREE_TYPE (TREE_TYPE (t));
15532 tree sub = fold_indirect_ref_1 (loc, type, t);
15533
15534 if (sub)
15535 return sub;
15536
15537 t = build1 (INDIRECT_REF, type, t);
15538 SET_EXPR_LOCATION (t, loc);
15539 return t;
15540 }
15541
15542 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15543
15544 tree
15545 fold_indirect_ref_loc (location_t loc, tree t)
15546 {
15547 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
15548
15549 if (sub)
15550 return sub;
15551 else
15552 return t;
15553 }
15554
15555 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15556 whose result is ignored. The type of the returned tree need not be
15557 the same as the original expression. */
15558
15559 tree
15560 fold_ignored_result (tree t)
15561 {
15562 if (!TREE_SIDE_EFFECTS (t))
15563 return integer_zero_node;
15564
15565 for (;;)
15566 switch (TREE_CODE_CLASS (TREE_CODE (t)))
15567 {
15568 case tcc_unary:
15569 t = TREE_OPERAND (t, 0);
15570 break;
15571
15572 case tcc_binary:
15573 case tcc_comparison:
15574 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15575 t = TREE_OPERAND (t, 0);
15576 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
15577 t = TREE_OPERAND (t, 1);
15578 else
15579 return t;
15580 break;
15581
15582 case tcc_expression:
15583 switch (TREE_CODE (t))
15584 {
15585 case COMPOUND_EXPR:
15586 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15587 return t;
15588 t = TREE_OPERAND (t, 0);
15589 break;
15590
15591 case COND_EXPR:
15592 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
15593 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
15594 return t;
15595 t = TREE_OPERAND (t, 0);
15596 break;
15597
15598 default:
15599 return t;
15600 }
15601 break;
15602
15603 default:
15604 return t;
15605 }
15606 }
15607
15608 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
15609 This can only be applied to objects of a sizetype. */
15610
15611 tree
15612 round_up_loc (location_t loc, tree value, int divisor)
15613 {
15614 tree div = NULL_TREE;
15615
15616 gcc_assert (divisor > 0);
15617 if (divisor == 1)
15618 return value;
15619
15620 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15621 have to do anything. Only do this when we are not given a const,
15622 because in that case, this check is more expensive than just
15623 doing it. */
15624 if (TREE_CODE (value) != INTEGER_CST)
15625 {
15626 div = build_int_cst (TREE_TYPE (value), divisor);
15627
15628 if (multiple_of_p (TREE_TYPE (value), value, div))
15629 return value;
15630 }
15631
15632 /* If divisor is a power of two, simplify this to bit manipulation. */
15633 if (divisor == (divisor & -divisor))
15634 {
15635 if (TREE_CODE (value) == INTEGER_CST)
15636 {
15637 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (value);
15638 unsigned HOST_WIDE_INT high;
15639 bool overflow_p;
15640
15641 if ((low & (divisor - 1)) == 0)
15642 return value;
15643
15644 overflow_p = TREE_OVERFLOW (value);
15645 high = TREE_INT_CST_HIGH (value);
15646 low &= ~(divisor - 1);
15647 low += divisor;
15648 if (low == 0)
15649 {
15650 high++;
15651 if (high == 0)
15652 overflow_p = true;
15653 }
15654
15655 return force_fit_type_double (TREE_TYPE (value), low, high,
15656 -1, overflow_p);
15657 }
15658 else
15659 {
15660 tree t;
15661
15662 t = build_int_cst (TREE_TYPE (value), divisor - 1);
15663 value = size_binop_loc (loc, PLUS_EXPR, value, t);
15664 t = build_int_cst (TREE_TYPE (value), -divisor);
15665 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
15666 }
15667 }
15668 else
15669 {
15670 if (!div)
15671 div = build_int_cst (TREE_TYPE (value), divisor);
15672 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
15673 value = size_binop_loc (loc, MULT_EXPR, value, div);
15674 }
15675
15676 return value;
15677 }
15678
15679 /* Likewise, but round down. */
15680
15681 tree
15682 round_down_loc (location_t loc, tree value, int divisor)
15683 {
15684 tree div = NULL_TREE;
15685
15686 gcc_assert (divisor > 0);
15687 if (divisor == 1)
15688 return value;
15689
15690 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15691 have to do anything. Only do this when we are not given a const,
15692 because in that case, this check is more expensive than just
15693 doing it. */
15694 if (TREE_CODE (value) != INTEGER_CST)
15695 {
15696 div = build_int_cst (TREE_TYPE (value), divisor);
15697
15698 if (multiple_of_p (TREE_TYPE (value), value, div))
15699 return value;
15700 }
15701
15702 /* If divisor is a power of two, simplify this to bit manipulation. */
15703 if (divisor == (divisor & -divisor))
15704 {
15705 tree t;
15706
15707 t = build_int_cst (TREE_TYPE (value), -divisor);
15708 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
15709 }
15710 else
15711 {
15712 if (!div)
15713 div = build_int_cst (TREE_TYPE (value), divisor);
15714 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
15715 value = size_binop_loc (loc, MULT_EXPR, value, div);
15716 }
15717
15718 return value;
15719 }
15720
15721 /* Returns the pointer to the base of the object addressed by EXP and
15722 extracts the information about the offset of the access, storing it
15723 to PBITPOS and POFFSET. */
15724
15725 static tree
15726 split_address_to_core_and_offset (tree exp,
15727 HOST_WIDE_INT *pbitpos, tree *poffset)
15728 {
15729 tree core;
15730 enum machine_mode mode;
15731 int unsignedp, volatilep;
15732 HOST_WIDE_INT bitsize;
15733 location_t loc = EXPR_LOCATION (exp);
15734
15735 if (TREE_CODE (exp) == ADDR_EXPR)
15736 {
15737 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
15738 poffset, &mode, &unsignedp, &volatilep,
15739 false);
15740 core = build_fold_addr_expr_loc (loc, core);
15741 }
15742 else
15743 {
15744 core = exp;
15745 *pbitpos = 0;
15746 *poffset = NULL_TREE;
15747 }
15748
15749 return core;
15750 }
15751
15752 /* Returns true if addresses of E1 and E2 differ by a constant, false
15753 otherwise. If they do, E1 - E2 is stored in *DIFF. */
15754
15755 bool
15756 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
15757 {
15758 tree core1, core2;
15759 HOST_WIDE_INT bitpos1, bitpos2;
15760 tree toffset1, toffset2, tdiff, type;
15761
15762 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
15763 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
15764
15765 if (bitpos1 % BITS_PER_UNIT != 0
15766 || bitpos2 % BITS_PER_UNIT != 0
15767 || !operand_equal_p (core1, core2, 0))
15768 return false;
15769
15770 if (toffset1 && toffset2)
15771 {
15772 type = TREE_TYPE (toffset1);
15773 if (type != TREE_TYPE (toffset2))
15774 toffset2 = fold_convert (type, toffset2);
15775
15776 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
15777 if (!cst_and_fits_in_hwi (tdiff))
15778 return false;
15779
15780 *diff = int_cst_value (tdiff);
15781 }
15782 else if (toffset1 || toffset2)
15783 {
15784 /* If only one of the offsets is non-constant, the difference cannot
15785 be a constant. */
15786 return false;
15787 }
15788 else
15789 *diff = 0;
15790
15791 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
15792 return true;
15793 }
15794
15795 /* Simplify the floating point expression EXP when the sign of the
15796 result is not significant. Return NULL_TREE if no simplification
15797 is possible. */
15798
15799 tree
15800 fold_strip_sign_ops (tree exp)
15801 {
15802 tree arg0, arg1;
15803 location_t loc = EXPR_LOCATION (exp);
15804
15805 switch (TREE_CODE (exp))
15806 {
15807 case ABS_EXPR:
15808 case NEGATE_EXPR:
15809 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15810 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
15811
15812 case MULT_EXPR:
15813 case RDIV_EXPR:
15814 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
15815 return NULL_TREE;
15816 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15817 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15818 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
15819 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
15820 arg0 ? arg0 : TREE_OPERAND (exp, 0),
15821 arg1 ? arg1 : TREE_OPERAND (exp, 1));
15822 break;
15823
15824 case COMPOUND_EXPR:
15825 arg0 = TREE_OPERAND (exp, 0);
15826 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15827 if (arg1)
15828 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
15829 break;
15830
15831 case COND_EXPR:
15832 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15833 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
15834 if (arg0 || arg1)
15835 return fold_build3_loc (loc,
15836 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
15837 arg0 ? arg0 : TREE_OPERAND (exp, 1),
15838 arg1 ? arg1 : TREE_OPERAND (exp, 2));
15839 break;
15840
15841 case CALL_EXPR:
15842 {
15843 const enum built_in_function fcode = builtin_mathfn_code (exp);
15844 switch (fcode)
15845 {
15846 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15847 /* Strip copysign function call, return the 1st argument. */
15848 arg0 = CALL_EXPR_ARG (exp, 0);
15849 arg1 = CALL_EXPR_ARG (exp, 1);
15850 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
15851
15852 default:
15853 /* Strip sign ops from the argument of "odd" math functions. */
15854 if (negate_mathfn_p (fcode))
15855 {
15856 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
15857 if (arg0)
15858 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
15859 }
15860 break;
15861 }
15862 }
15863 break;
15864
15865 default:
15866 break;
15867 }
15868 return NULL_TREE;
15869 }