1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011,
4 2012, 2013 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide and size_binop.
32 fold takes a tree as argument and returns a simplified tree.
34 size_binop takes a tree code for an arithmetic operation
35 and two operands that are trees, and produces a tree for the
36 result, assuming the type comes from `sizetype'.
38 size_int takes an integer value, and creates a tree constant
39 with type from `sizetype'.
41 Note: Since the folders get called on non-gimple code as well as
42 gimple code, we need to handle GIMPLE tuples as well as their
43 corresponding tree equivalents. */
47 #include "coretypes.h"
56 #include "diagnostic-core.h"
59 #include "hash-table.h"
60 #include "langhooks.h"
63 #include "tree-flow.h"
65 /* Nonzero if we are folding constants inside an initializer; zero
67 int folding_initializer
= 0;
69 /* The following constants represent a bit based encoding of GCC's
70 comparison operators. This encoding simplifies transformations
71 on relational comparison operators, such as AND and OR. */
72 enum comparison_code
{
91 static bool negate_mathfn_p (enum built_in_function
);
92 static bool negate_expr_p (tree
);
93 static tree
negate_expr (tree
);
94 static tree
split_tree (tree
, enum tree_code
, tree
*, tree
*, tree
*, int);
95 static tree
associate_trees (location_t
, tree
, tree
, enum tree_code
, tree
);
96 static tree
const_binop (enum tree_code
, tree
, tree
);
97 static enum comparison_code
comparison_to_compcode (enum tree_code
);
98 static enum tree_code
compcode_to_comparison (enum comparison_code
);
99 static int operand_equal_for_comparison_p (tree
, tree
, tree
);
100 static int twoval_comparison_p (tree
, tree
*, tree
*, int *);
101 static tree
eval_subst (location_t
, tree
, tree
, tree
, tree
, tree
);
102 static tree
pedantic_omit_one_operand_loc (location_t
, tree
, tree
, tree
);
103 static tree
distribute_bit_expr (location_t
, enum tree_code
, tree
, tree
, tree
);
104 static tree
make_bit_field_ref (location_t
, tree
, tree
,
105 HOST_WIDE_INT
, HOST_WIDE_INT
, int);
106 static tree
optimize_bit_field_compare (location_t
, enum tree_code
,
108 static tree
decode_field_reference (location_t
, tree
, HOST_WIDE_INT
*,
110 enum machine_mode
*, int *, int *,
112 static int all_ones_mask_p (const_tree
, int);
113 static tree
sign_bit_p (tree
, const_tree
);
114 static int simple_operand_p (const_tree
);
115 static bool simple_operand_p_2 (tree
);
116 static tree
range_binop (enum tree_code
, tree
, tree
, int, tree
, int);
117 static tree
range_predecessor (tree
);
118 static tree
range_successor (tree
);
119 static tree
fold_range_test (location_t
, enum tree_code
, tree
, tree
, tree
);
120 static tree
fold_cond_expr_with_comparison (location_t
, tree
, tree
, tree
, tree
);
121 static tree
unextend (tree
, int, int, tree
);
122 static tree
optimize_minmax_comparison (location_t
, enum tree_code
,
124 static tree
extract_muldiv (tree
, tree
, enum tree_code
, tree
, bool *);
125 static tree
extract_muldiv_1 (tree
, tree
, enum tree_code
, tree
, bool *);
126 static tree
fold_binary_op_with_conditional_arg (location_t
,
127 enum tree_code
, tree
,
130 static tree
fold_mathfn_compare (location_t
,
131 enum built_in_function
, enum tree_code
,
133 static tree
fold_inf_compare (location_t
, enum tree_code
, tree
, tree
, tree
);
134 static tree
fold_div_compare (location_t
, enum tree_code
, tree
, tree
, tree
);
135 static bool reorder_operands_p (const_tree
, const_tree
);
136 static tree
fold_negate_const (tree
, tree
);
137 static tree
fold_not_const (const_tree
, tree
);
138 static tree
fold_relational_const (enum tree_code
, tree
, tree
, tree
);
139 static tree
fold_convert_const (enum tree_code
, tree
, tree
);
141 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
142 Otherwise, return LOC. */
145 expr_location_or (tree t
, location_t loc
)
147 location_t tloc
= EXPR_LOCATION (t
);
148 return tloc
== UNKNOWN_LOCATION
? loc
: tloc
;
151 /* Similar to protected_set_expr_location, but never modify x in place,
152 if location can and needs to be set, unshare it. */
155 protected_set_expr_location_unshare (tree x
, location_t loc
)
157 if (CAN_HAVE_LOCATION_P (x
)
158 && EXPR_LOCATION (x
) != loc
159 && !(TREE_CODE (x
) == SAVE_EXPR
160 || TREE_CODE (x
) == TARGET_EXPR
161 || TREE_CODE (x
) == BIND_EXPR
))
164 SET_EXPR_LOCATION (x
, loc
);
169 /* If ARG2 divides ARG1 with zero remainder, carries out the division
170 of type CODE and returns the quotient.
171 Otherwise returns NULL_TREE. */
174 div_if_zero_remainder (enum tree_code code
, const_tree arg1
, const_tree arg2
)
179 /* The sign of the division is according to operand two, that
180 does the correct thing for POINTER_PLUS_EXPR where we want
181 a signed division. */
182 uns
= TYPE_UNSIGNED (TREE_TYPE (arg2
));
184 quo
= tree_to_double_int (arg1
).divmod (tree_to_double_int (arg2
),
188 return build_int_cst_wide (TREE_TYPE (arg1
), quo
.low
, quo
.high
);
193 /* This is nonzero if we should defer warnings about undefined
194 overflow. This facility exists because these warnings are a
195 special case. The code to estimate loop iterations does not want
196 to issue any warnings, since it works with expressions which do not
197 occur in user code. Various bits of cleanup code call fold(), but
198 only use the result if it has certain characteristics (e.g., is a
199 constant); that code only wants to issue a warning if the result is
202 static int fold_deferring_overflow_warnings
;
204 /* If a warning about undefined overflow is deferred, this is the
205 warning. Note that this may cause us to turn two warnings into
206 one, but that is fine since it is sufficient to only give one
207 warning per expression. */
209 static const char* fold_deferred_overflow_warning
;
211 /* If a warning about undefined overflow is deferred, this is the
212 level at which the warning should be emitted. */
214 static enum warn_strict_overflow_code fold_deferred_overflow_code
;
216 /* Start deferring overflow warnings. We could use a stack here to
217 permit nested calls, but at present it is not necessary. */
220 fold_defer_overflow_warnings (void)
222 ++fold_deferring_overflow_warnings
;
225 /* Stop deferring overflow warnings. If there is a pending warning,
226 and ISSUE is true, then issue the warning if appropriate. STMT is
227 the statement with which the warning should be associated (used for
228 location information); STMT may be NULL. CODE is the level of the
229 warning--a warn_strict_overflow_code value. This function will use
230 the smaller of CODE and the deferred code when deciding whether to
231 issue the warning. CODE may be zero to mean to always use the
235 fold_undefer_overflow_warnings (bool issue
, const_gimple stmt
, int code
)
240 gcc_assert (fold_deferring_overflow_warnings
> 0);
241 --fold_deferring_overflow_warnings
;
242 if (fold_deferring_overflow_warnings
> 0)
244 if (fold_deferred_overflow_warning
!= NULL
246 && code
< (int) fold_deferred_overflow_code
)
247 fold_deferred_overflow_code
= (enum warn_strict_overflow_code
) code
;
251 warnmsg
= fold_deferred_overflow_warning
;
252 fold_deferred_overflow_warning
= NULL
;
254 if (!issue
|| warnmsg
== NULL
)
257 if (gimple_no_warning_p (stmt
))
260 /* Use the smallest code level when deciding to issue the
262 if (code
== 0 || code
> (int) fold_deferred_overflow_code
)
263 code
= fold_deferred_overflow_code
;
265 if (!issue_strict_overflow_warning (code
))
269 locus
= input_location
;
271 locus
= gimple_location (stmt
);
272 warning_at (locus
, OPT_Wstrict_overflow
, "%s", warnmsg
);
275 /* Stop deferring overflow warnings, ignoring any deferred
279 fold_undefer_and_ignore_overflow_warnings (void)
281 fold_undefer_overflow_warnings (false, NULL
, 0);
284 /* Whether we are deferring overflow warnings. */
287 fold_deferring_overflow_warnings_p (void)
289 return fold_deferring_overflow_warnings
> 0;
292 /* This is called when we fold something based on the fact that signed
293 overflow is undefined. */
296 fold_overflow_warning (const char* gmsgid
, enum warn_strict_overflow_code wc
)
298 if (fold_deferring_overflow_warnings
> 0)
300 if (fold_deferred_overflow_warning
== NULL
301 || wc
< fold_deferred_overflow_code
)
303 fold_deferred_overflow_warning
= gmsgid
;
304 fold_deferred_overflow_code
= wc
;
307 else if (issue_strict_overflow_warning (wc
))
308 warning (OPT_Wstrict_overflow
, gmsgid
);
311 /* Return true if the built-in mathematical function specified by CODE
312 is odd, i.e. -f(x) == f(-x). */
315 negate_mathfn_p (enum built_in_function code
)
319 CASE_FLT_FN (BUILT_IN_ASIN
):
320 CASE_FLT_FN (BUILT_IN_ASINH
):
321 CASE_FLT_FN (BUILT_IN_ATAN
):
322 CASE_FLT_FN (BUILT_IN_ATANH
):
323 CASE_FLT_FN (BUILT_IN_CASIN
):
324 CASE_FLT_FN (BUILT_IN_CASINH
):
325 CASE_FLT_FN (BUILT_IN_CATAN
):
326 CASE_FLT_FN (BUILT_IN_CATANH
):
327 CASE_FLT_FN (BUILT_IN_CBRT
):
328 CASE_FLT_FN (BUILT_IN_CPROJ
):
329 CASE_FLT_FN (BUILT_IN_CSIN
):
330 CASE_FLT_FN (BUILT_IN_CSINH
):
331 CASE_FLT_FN (BUILT_IN_CTAN
):
332 CASE_FLT_FN (BUILT_IN_CTANH
):
333 CASE_FLT_FN (BUILT_IN_ERF
):
334 CASE_FLT_FN (BUILT_IN_LLROUND
):
335 CASE_FLT_FN (BUILT_IN_LROUND
):
336 CASE_FLT_FN (BUILT_IN_ROUND
):
337 CASE_FLT_FN (BUILT_IN_SIN
):
338 CASE_FLT_FN (BUILT_IN_SINH
):
339 CASE_FLT_FN (BUILT_IN_TAN
):
340 CASE_FLT_FN (BUILT_IN_TANH
):
341 CASE_FLT_FN (BUILT_IN_TRUNC
):
344 CASE_FLT_FN (BUILT_IN_LLRINT
):
345 CASE_FLT_FN (BUILT_IN_LRINT
):
346 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
347 CASE_FLT_FN (BUILT_IN_RINT
):
348 return !flag_rounding_math
;
356 /* Check whether we may negate an integer constant T without causing
360 may_negate_without_overflow_p (const_tree t
)
362 unsigned HOST_WIDE_INT val
;
366 gcc_assert (TREE_CODE (t
) == INTEGER_CST
);
368 type
= TREE_TYPE (t
);
369 if (TYPE_UNSIGNED (type
))
372 prec
= TYPE_PRECISION (type
);
373 if (prec
> HOST_BITS_PER_WIDE_INT
)
375 if (TREE_INT_CST_LOW (t
) != 0)
377 prec
-= HOST_BITS_PER_WIDE_INT
;
378 val
= TREE_INT_CST_HIGH (t
);
381 val
= TREE_INT_CST_LOW (t
);
382 if (prec
< HOST_BITS_PER_WIDE_INT
)
383 val
&= ((unsigned HOST_WIDE_INT
) 1 << prec
) - 1;
384 return val
!= ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1));
387 /* Determine whether an expression T can be cheaply negated using
388 the function negate_expr without introducing undefined overflow. */
391 negate_expr_p (tree t
)
398 type
= TREE_TYPE (t
);
401 switch (TREE_CODE (t
))
404 if (TYPE_OVERFLOW_WRAPS (type
))
407 /* Check that -CST will not overflow type. */
408 return may_negate_without_overflow_p (t
);
410 return (INTEGRAL_TYPE_P (type
)
411 && TYPE_OVERFLOW_WRAPS (type
));
418 /* We want to canonicalize to positive real constants. Pretend
419 that only negative ones can be easily negated. */
420 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
423 return negate_expr_p (TREE_REALPART (t
))
424 && negate_expr_p (TREE_IMAGPART (t
));
427 return negate_expr_p (TREE_OPERAND (t
, 0))
428 && negate_expr_p (TREE_OPERAND (t
, 1));
431 return negate_expr_p (TREE_OPERAND (t
, 0));
434 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
435 || HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
437 /* -(A + B) -> (-B) - A. */
438 if (negate_expr_p (TREE_OPERAND (t
, 1))
439 && reorder_operands_p (TREE_OPERAND (t
, 0),
440 TREE_OPERAND (t
, 1)))
442 /* -(A + B) -> (-A) - B. */
443 return negate_expr_p (TREE_OPERAND (t
, 0));
446 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
447 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
448 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
449 && reorder_operands_p (TREE_OPERAND (t
, 0),
450 TREE_OPERAND (t
, 1));
453 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
459 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t
))))
460 return negate_expr_p (TREE_OPERAND (t
, 1))
461 || negate_expr_p (TREE_OPERAND (t
, 0));
469 /* In general we can't negate A / B, because if A is INT_MIN and
470 B is 1, we may turn this into INT_MIN / -1 which is undefined
471 and actually traps on some architectures. But if overflow is
472 undefined, we can negate, because - (INT_MIN / 1) is an
474 if (INTEGRAL_TYPE_P (TREE_TYPE (t
))
475 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
)))
477 return negate_expr_p (TREE_OPERAND (t
, 1))
478 || negate_expr_p (TREE_OPERAND (t
, 0));
481 /* Negate -((double)float) as (double)(-float). */
482 if (TREE_CODE (type
) == REAL_TYPE
)
484 tree tem
= strip_float_extensions (t
);
486 return negate_expr_p (tem
);
491 /* Negate -f(x) as f(-x). */
492 if (negate_mathfn_p (builtin_mathfn_code (t
)))
493 return negate_expr_p (CALL_EXPR_ARG (t
, 0));
497 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
498 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
500 tree op1
= TREE_OPERAND (t
, 1);
501 if (TREE_INT_CST_HIGH (op1
) == 0
502 && (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (type
) - 1)
503 == TREE_INT_CST_LOW (op1
))
514 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
515 simplification is possible.
516 If negate_expr_p would return true for T, NULL_TREE will never be
520 fold_negate_expr (location_t loc
, tree t
)
522 tree type
= TREE_TYPE (t
);
525 switch (TREE_CODE (t
))
527 /* Convert - (~A) to A + 1. */
529 if (INTEGRAL_TYPE_P (type
))
530 return fold_build2_loc (loc
, PLUS_EXPR
, type
, TREE_OPERAND (t
, 0),
531 build_int_cst (type
, 1));
535 tem
= fold_negate_const (t
, type
);
536 if (TREE_OVERFLOW (tem
) == TREE_OVERFLOW (t
)
537 || !TYPE_OVERFLOW_TRAPS (type
))
542 tem
= fold_negate_const (t
, type
);
543 /* Two's complement FP formats, such as c4x, may overflow. */
544 if (!TREE_OVERFLOW (tem
) || !flag_trapping_math
)
549 tem
= fold_negate_const (t
, type
);
554 tree rpart
= negate_expr (TREE_REALPART (t
));
555 tree ipart
= negate_expr (TREE_IMAGPART (t
));
557 if ((TREE_CODE (rpart
) == REAL_CST
558 && TREE_CODE (ipart
) == REAL_CST
)
559 || (TREE_CODE (rpart
) == INTEGER_CST
560 && TREE_CODE (ipart
) == INTEGER_CST
))
561 return build_complex (type
, rpart
, ipart
);
566 if (negate_expr_p (t
))
567 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
568 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)),
569 fold_negate_expr (loc
, TREE_OPERAND (t
, 1)));
573 if (negate_expr_p (t
))
574 return fold_build1_loc (loc
, CONJ_EXPR
, type
,
575 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)));
579 return TREE_OPERAND (t
, 0);
582 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
583 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
585 /* -(A + B) -> (-B) - A. */
586 if (negate_expr_p (TREE_OPERAND (t
, 1))
587 && reorder_operands_p (TREE_OPERAND (t
, 0),
588 TREE_OPERAND (t
, 1)))
590 tem
= negate_expr (TREE_OPERAND (t
, 1));
591 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
592 tem
, TREE_OPERAND (t
, 0));
595 /* -(A + B) -> (-A) - B. */
596 if (negate_expr_p (TREE_OPERAND (t
, 0)))
598 tem
= negate_expr (TREE_OPERAND (t
, 0));
599 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
600 tem
, TREE_OPERAND (t
, 1));
606 /* - (A - B) -> B - A */
607 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
608 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
609 && reorder_operands_p (TREE_OPERAND (t
, 0), TREE_OPERAND (t
, 1)))
610 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
611 TREE_OPERAND (t
, 1), TREE_OPERAND (t
, 0));
615 if (TYPE_UNSIGNED (type
))
621 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
)))
623 tem
= TREE_OPERAND (t
, 1);
624 if (negate_expr_p (tem
))
625 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
626 TREE_OPERAND (t
, 0), negate_expr (tem
));
627 tem
= TREE_OPERAND (t
, 0);
628 if (negate_expr_p (tem
))
629 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
630 negate_expr (tem
), TREE_OPERAND (t
, 1));
639 /* In general we can't negate A / B, because if A is INT_MIN and
640 B is 1, we may turn this into INT_MIN / -1 which is undefined
641 and actually traps on some architectures. But if overflow is
642 undefined, we can negate, because - (INT_MIN / 1) is an
644 if (!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
646 const char * const warnmsg
= G_("assuming signed overflow does not "
647 "occur when negating a division");
648 tem
= TREE_OPERAND (t
, 1);
649 if (negate_expr_p (tem
))
651 if (INTEGRAL_TYPE_P (type
)
652 && (TREE_CODE (tem
) != INTEGER_CST
653 || integer_onep (tem
)))
654 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MISC
);
655 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
656 TREE_OPERAND (t
, 0), negate_expr (tem
));
658 tem
= TREE_OPERAND (t
, 0);
659 if (negate_expr_p (tem
))
661 if (INTEGRAL_TYPE_P (type
)
662 && (TREE_CODE (tem
) != INTEGER_CST
663 || tree_int_cst_equal (tem
, TYPE_MIN_VALUE (type
))))
664 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MISC
);
665 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
666 negate_expr (tem
), TREE_OPERAND (t
, 1));
672 /* Convert -((double)float) into (double)(-float). */
673 if (TREE_CODE (type
) == REAL_TYPE
)
675 tem
= strip_float_extensions (t
);
676 if (tem
!= t
&& negate_expr_p (tem
))
677 return fold_convert_loc (loc
, type
, negate_expr (tem
));
682 /* Negate -f(x) as f(-x). */
683 if (negate_mathfn_p (builtin_mathfn_code (t
))
684 && negate_expr_p (CALL_EXPR_ARG (t
, 0)))
688 fndecl
= get_callee_fndecl (t
);
689 arg
= negate_expr (CALL_EXPR_ARG (t
, 0));
690 return build_call_expr_loc (loc
, fndecl
, 1, arg
);
695 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
696 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
698 tree op1
= TREE_OPERAND (t
, 1);
699 if (TREE_INT_CST_HIGH (op1
) == 0
700 && (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (type
) - 1)
701 == TREE_INT_CST_LOW (op1
))
703 tree ntype
= TYPE_UNSIGNED (type
)
704 ? signed_type_for (type
)
705 : unsigned_type_for (type
);
706 tree temp
= fold_convert_loc (loc
, ntype
, TREE_OPERAND (t
, 0));
707 temp
= fold_build2_loc (loc
, RSHIFT_EXPR
, ntype
, temp
, op1
);
708 return fold_convert_loc (loc
, type
, temp
);
720 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
721 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
733 loc
= EXPR_LOCATION (t
);
734 type
= TREE_TYPE (t
);
737 tem
= fold_negate_expr (loc
, t
);
739 tem
= build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (t
), t
);
740 return fold_convert_loc (loc
, type
, tem
);
743 /* Split a tree IN into a constant, literal and variable parts that could be
744 combined with CODE to make IN. "constant" means an expression with
745 TREE_CONSTANT but that isn't an actual constant. CODE must be a
746 commutative arithmetic operation. Store the constant part into *CONP,
747 the literal in *LITP and return the variable part. If a part isn't
748 present, set it to null. If the tree does not decompose in this way,
749 return the entire tree as the variable part and the other parts as null.
751 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
752 case, we negate an operand that was subtracted. Except if it is a
753 literal for which we use *MINUS_LITP instead.
755 If NEGATE_P is true, we are negating all of IN, again except a literal
756 for which we use *MINUS_LITP instead.
758 If IN is itself a literal or constant, return it as appropriate.
760 Note that we do not guarantee that any of the three values will be the
761 same type as IN, but they will have the same signedness and mode. */
764 split_tree (tree in
, enum tree_code code
, tree
*conp
, tree
*litp
,
765 tree
*minus_litp
, int negate_p
)
773 /* Strip any conversions that don't change the machine mode or signedness. */
774 STRIP_SIGN_NOPS (in
);
776 if (TREE_CODE (in
) == INTEGER_CST
|| TREE_CODE (in
) == REAL_CST
777 || TREE_CODE (in
) == FIXED_CST
)
779 else if (TREE_CODE (in
) == code
780 || ((! FLOAT_TYPE_P (TREE_TYPE (in
)) || flag_associative_math
)
781 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in
))
782 /* We can associate addition and subtraction together (even
783 though the C standard doesn't say so) for integers because
784 the value is not affected. For reals, the value might be
785 affected, so we can't. */
786 && ((code
== PLUS_EXPR
&& TREE_CODE (in
) == MINUS_EXPR
)
787 || (code
== MINUS_EXPR
&& TREE_CODE (in
) == PLUS_EXPR
))))
789 tree op0
= TREE_OPERAND (in
, 0);
790 tree op1
= TREE_OPERAND (in
, 1);
791 int neg1_p
= TREE_CODE (in
) == MINUS_EXPR
;
792 int neg_litp_p
= 0, neg_conp_p
= 0, neg_var_p
= 0;
794 /* First see if either of the operands is a literal, then a constant. */
795 if (TREE_CODE (op0
) == INTEGER_CST
|| TREE_CODE (op0
) == REAL_CST
796 || TREE_CODE (op0
) == FIXED_CST
)
797 *litp
= op0
, op0
= 0;
798 else if (TREE_CODE (op1
) == INTEGER_CST
|| TREE_CODE (op1
) == REAL_CST
799 || TREE_CODE (op1
) == FIXED_CST
)
800 *litp
= op1
, neg_litp_p
= neg1_p
, op1
= 0;
802 if (op0
!= 0 && TREE_CONSTANT (op0
))
803 *conp
= op0
, op0
= 0;
804 else if (op1
!= 0 && TREE_CONSTANT (op1
))
805 *conp
= op1
, neg_conp_p
= neg1_p
, op1
= 0;
807 /* If we haven't dealt with either operand, this is not a case we can
808 decompose. Otherwise, VAR is either of the ones remaining, if any. */
809 if (op0
!= 0 && op1
!= 0)
814 var
= op1
, neg_var_p
= neg1_p
;
816 /* Now do any needed negations. */
818 *minus_litp
= *litp
, *litp
= 0;
820 *conp
= negate_expr (*conp
);
822 var
= negate_expr (var
);
824 else if (TREE_CODE (in
) == BIT_NOT_EXPR
825 && code
== PLUS_EXPR
)
827 /* -X - 1 is folded to ~X, undo that here. */
828 *minus_litp
= build_one_cst (TREE_TYPE (in
));
829 var
= negate_expr (TREE_OPERAND (in
, 0));
831 else if (TREE_CONSTANT (in
))
839 *minus_litp
= *litp
, *litp
= 0;
840 else if (*minus_litp
)
841 *litp
= *minus_litp
, *minus_litp
= 0;
842 *conp
= negate_expr (*conp
);
843 var
= negate_expr (var
);
849 /* Re-associate trees split by the above function. T1 and T2 are
850 either expressions to associate or null. Return the new
851 expression, if any. LOC is the location of the new expression. If
852 we build an operation, do it in TYPE and with CODE. */
855 associate_trees (location_t loc
, tree t1
, tree t2
, enum tree_code code
, tree type
)
862 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
863 try to fold this since we will have infinite recursion. But do
864 deal with any NEGATE_EXPRs. */
865 if (TREE_CODE (t1
) == code
|| TREE_CODE (t2
) == code
866 || TREE_CODE (t1
) == MINUS_EXPR
|| TREE_CODE (t2
) == MINUS_EXPR
)
868 if (code
== PLUS_EXPR
)
870 if (TREE_CODE (t1
) == NEGATE_EXPR
)
871 return build2_loc (loc
, MINUS_EXPR
, type
,
872 fold_convert_loc (loc
, type
, t2
),
873 fold_convert_loc (loc
, type
,
874 TREE_OPERAND (t1
, 0)));
875 else if (TREE_CODE (t2
) == NEGATE_EXPR
)
876 return build2_loc (loc
, MINUS_EXPR
, type
,
877 fold_convert_loc (loc
, type
, t1
),
878 fold_convert_loc (loc
, type
,
879 TREE_OPERAND (t2
, 0)));
880 else if (integer_zerop (t2
))
881 return fold_convert_loc (loc
, type
, t1
);
883 else if (code
== MINUS_EXPR
)
885 if (integer_zerop (t2
))
886 return fold_convert_loc (loc
, type
, t1
);
889 return build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, t1
),
890 fold_convert_loc (loc
, type
, t2
));
893 return fold_build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, t1
),
894 fold_convert_loc (loc
, type
, t2
));
897 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
898 for use in int_const_binop, size_binop and size_diffop. */
901 int_binop_types_match_p (enum tree_code code
, const_tree type1
, const_tree type2
)
903 if (!INTEGRAL_TYPE_P (type1
) && !POINTER_TYPE_P (type1
))
905 if (!INTEGRAL_TYPE_P (type2
) && !POINTER_TYPE_P (type2
))
920 return TYPE_UNSIGNED (type1
) == TYPE_UNSIGNED (type2
)
921 && TYPE_PRECISION (type1
) == TYPE_PRECISION (type2
)
922 && TYPE_MODE (type1
) == TYPE_MODE (type2
);
926 /* Combine two integer constants ARG1 and ARG2 under operation CODE
927 to produce a new constant. Return NULL_TREE if we don't know how
928 to evaluate CODE at compile-time. */
931 int_const_binop_1 (enum tree_code code
, const_tree arg1
, const_tree arg2
,
934 double_int op1
, op2
, res
, tmp
;
936 tree type
= TREE_TYPE (arg1
);
937 bool uns
= TYPE_UNSIGNED (type
);
938 bool overflow
= false;
940 op1
= tree_to_double_int (arg1
);
941 op2
= tree_to_double_int (arg2
);
958 res
= op1
.rshift (op2
.to_shwi (), TYPE_PRECISION (type
), !uns
);
962 /* It's unclear from the C standard whether shifts can overflow.
963 The following code ignores overflow; perhaps a C standard
964 interpretation ruling is needed. */
965 res
= op1
.lshift (op2
.to_shwi (), TYPE_PRECISION (type
), !uns
);
969 res
= op1
.rrotate (op2
.to_shwi (), TYPE_PRECISION (type
));
973 res
= op1
.lrotate (op2
.to_shwi (), TYPE_PRECISION (type
));
977 res
= op1
.add_with_sign (op2
, false, &overflow
);
981 res
= op1
.sub_with_overflow (op2
, &overflow
);
985 res
= op1
.mul_with_sign (op2
, false, &overflow
);
988 case MULT_HIGHPART_EXPR
:
989 /* ??? Need quad precision, or an additional shift operand
990 to the multiply primitive, to handle very large highparts. */
991 if (TYPE_PRECISION (type
) > HOST_BITS_PER_WIDE_INT
)
994 res
= tmp
.rshift (TYPE_PRECISION (type
), TYPE_PRECISION (type
), !uns
);
998 case FLOOR_DIV_EXPR
: case CEIL_DIV_EXPR
:
1000 /* This is a shortcut for a common special case. */
1001 if (op2
.high
== 0 && (HOST_WIDE_INT
) op2
.low
> 0
1002 && !TREE_OVERFLOW (arg1
)
1003 && !TREE_OVERFLOW (arg2
)
1004 && op1
.high
== 0 && (HOST_WIDE_INT
) op1
.low
>= 0)
1006 if (code
== CEIL_DIV_EXPR
)
1007 op1
.low
+= op2
.low
- 1;
1009 res
.low
= op1
.low
/ op2
.low
, res
.high
= 0;
1013 /* ... fall through ... */
1015 case ROUND_DIV_EXPR
:
1023 if (op1
== op2
&& !op1
.is_zero ())
1025 res
= double_int_one
;
1028 res
= op1
.divmod_with_overflow (op2
, uns
, code
, &tmp
, &overflow
);
1031 case TRUNC_MOD_EXPR
:
1032 case FLOOR_MOD_EXPR
: case CEIL_MOD_EXPR
:
1033 /* This is a shortcut for a common special case. */
1034 if (op2
.high
== 0 && (HOST_WIDE_INT
) op2
.low
> 0
1035 && !TREE_OVERFLOW (arg1
)
1036 && !TREE_OVERFLOW (arg2
)
1037 && op1
.high
== 0 && (HOST_WIDE_INT
) op1
.low
>= 0)
1039 if (code
== CEIL_MOD_EXPR
)
1040 op1
.low
+= op2
.low
- 1;
1041 res
.low
= op1
.low
% op2
.low
, res
.high
= 0;
1045 /* ... fall through ... */
1047 case ROUND_MOD_EXPR
:
1050 tmp
= op1
.divmod_with_overflow (op2
, uns
, code
, &res
, &overflow
);
1054 res
= op1
.min (op2
, uns
);
1058 res
= op1
.max (op2
, uns
);
1065 t
= force_fit_type_double (TREE_TYPE (arg1
), res
, overflowable
,
1067 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
));
1073 int_const_binop (enum tree_code code
, const_tree arg1
, const_tree arg2
)
1075 return int_const_binop_1 (code
, arg1
, arg2
, 1);
1078 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1079 constant. We assume ARG1 and ARG2 have the same data type, or at least
1080 are the same kind of constant and the same machine mode. Return zero if
1081 combining the constants is not allowed in the current operating mode. */
1084 const_binop (enum tree_code code
, tree arg1
, tree arg2
)
1086 /* Sanity check for the recursive cases. */
1093 if (TREE_CODE (arg1
) == INTEGER_CST
)
1094 return int_const_binop (code
, arg1
, arg2
);
1096 if (TREE_CODE (arg1
) == REAL_CST
)
1098 enum machine_mode mode
;
1101 REAL_VALUE_TYPE value
;
1102 REAL_VALUE_TYPE result
;
1106 /* The following codes are handled by real_arithmetic. */
1121 d1
= TREE_REAL_CST (arg1
);
1122 d2
= TREE_REAL_CST (arg2
);
1124 type
= TREE_TYPE (arg1
);
1125 mode
= TYPE_MODE (type
);
1127 /* Don't perform operation if we honor signaling NaNs and
1128 either operand is a NaN. */
1129 if (HONOR_SNANS (mode
)
1130 && (REAL_VALUE_ISNAN (d1
) || REAL_VALUE_ISNAN (d2
)))
1133 /* Don't perform operation if it would raise a division
1134 by zero exception. */
1135 if (code
== RDIV_EXPR
1136 && REAL_VALUES_EQUAL (d2
, dconst0
)
1137 && (flag_trapping_math
|| ! MODE_HAS_INFINITIES (mode
)))
1140 /* If either operand is a NaN, just return it. Otherwise, set up
1141 for floating-point trap; we return an overflow. */
1142 if (REAL_VALUE_ISNAN (d1
))
1144 else if (REAL_VALUE_ISNAN (d2
))
1147 inexact
= real_arithmetic (&value
, code
, &d1
, &d2
);
1148 real_convert (&result
, mode
, &value
);
1150 /* Don't constant fold this floating point operation if
1151 the result has overflowed and flag_trapping_math. */
1152 if (flag_trapping_math
1153 && MODE_HAS_INFINITIES (mode
)
1154 && REAL_VALUE_ISINF (result
)
1155 && !REAL_VALUE_ISINF (d1
)
1156 && !REAL_VALUE_ISINF (d2
))
1159 /* Don't constant fold this floating point operation if the
1160 result may dependent upon the run-time rounding mode and
1161 flag_rounding_math is set, or if GCC's software emulation
1162 is unable to accurately represent the result. */
1163 if ((flag_rounding_math
1164 || (MODE_COMPOSITE_P (mode
) && !flag_unsafe_math_optimizations
))
1165 && (inexact
|| !real_identical (&result
, &value
)))
1168 t
= build_real (type
, result
);
1170 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
);
1174 if (TREE_CODE (arg1
) == FIXED_CST
)
1176 FIXED_VALUE_TYPE f1
;
1177 FIXED_VALUE_TYPE f2
;
1178 FIXED_VALUE_TYPE result
;
1183 /* The following codes are handled by fixed_arithmetic. */
1189 case TRUNC_DIV_EXPR
:
1190 f2
= TREE_FIXED_CST (arg2
);
1195 f2
.data
.high
= TREE_INT_CST_HIGH (arg2
);
1196 f2
.data
.low
= TREE_INT_CST_LOW (arg2
);
1204 f1
= TREE_FIXED_CST (arg1
);
1205 type
= TREE_TYPE (arg1
);
1206 sat_p
= TYPE_SATURATING (type
);
1207 overflow_p
= fixed_arithmetic (&result
, code
, &f1
, &f2
, sat_p
);
1208 t
= build_fixed (type
, result
);
1209 /* Propagate overflow flags. */
1210 if (overflow_p
| TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
))
1211 TREE_OVERFLOW (t
) = 1;
1215 if (TREE_CODE (arg1
) == COMPLEX_CST
)
1217 tree type
= TREE_TYPE (arg1
);
1218 tree r1
= TREE_REALPART (arg1
);
1219 tree i1
= TREE_IMAGPART (arg1
);
1220 tree r2
= TREE_REALPART (arg2
);
1221 tree i2
= TREE_IMAGPART (arg2
);
1228 real
= const_binop (code
, r1
, r2
);
1229 imag
= const_binop (code
, i1
, i2
);
1233 if (COMPLEX_FLOAT_TYPE_P (type
))
1234 return do_mpc_arg2 (arg1
, arg2
, type
,
1235 /* do_nonfinite= */ folding_initializer
,
1238 real
= const_binop (MINUS_EXPR
,
1239 const_binop (MULT_EXPR
, r1
, r2
),
1240 const_binop (MULT_EXPR
, i1
, i2
));
1241 imag
= const_binop (PLUS_EXPR
,
1242 const_binop (MULT_EXPR
, r1
, i2
),
1243 const_binop (MULT_EXPR
, i1
, r2
));
1247 if (COMPLEX_FLOAT_TYPE_P (type
))
1248 return do_mpc_arg2 (arg1
, arg2
, type
,
1249 /* do_nonfinite= */ folding_initializer
,
1252 case TRUNC_DIV_EXPR
:
1254 case FLOOR_DIV_EXPR
:
1255 case ROUND_DIV_EXPR
:
1256 if (flag_complex_method
== 0)
1258 /* Keep this algorithm in sync with
1259 tree-complex.c:expand_complex_div_straight().
1261 Expand complex division to scalars, straightforward algorithm.
1262 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1266 = const_binop (PLUS_EXPR
,
1267 const_binop (MULT_EXPR
, r2
, r2
),
1268 const_binop (MULT_EXPR
, i2
, i2
));
1270 = const_binop (PLUS_EXPR
,
1271 const_binop (MULT_EXPR
, r1
, r2
),
1272 const_binop (MULT_EXPR
, i1
, i2
));
1274 = const_binop (MINUS_EXPR
,
1275 const_binop (MULT_EXPR
, i1
, r2
),
1276 const_binop (MULT_EXPR
, r1
, i2
));
1278 real
= const_binop (code
, t1
, magsquared
);
1279 imag
= const_binop (code
, t2
, magsquared
);
1283 /* Keep this algorithm in sync with
1284 tree-complex.c:expand_complex_div_wide().
1286 Expand complex division to scalars, modified algorithm to minimize
1287 overflow with wide input ranges. */
1288 tree compare
= fold_build2 (LT_EXPR
, boolean_type_node
,
1289 fold_abs_const (r2
, TREE_TYPE (type
)),
1290 fold_abs_const (i2
, TREE_TYPE (type
)));
1292 if (integer_nonzerop (compare
))
1294 /* In the TRUE branch, we compute
1296 div = (br * ratio) + bi;
1297 tr = (ar * ratio) + ai;
1298 ti = (ai * ratio) - ar;
1301 tree ratio
= const_binop (code
, r2
, i2
);
1302 tree div
= const_binop (PLUS_EXPR
, i2
,
1303 const_binop (MULT_EXPR
, r2
, ratio
));
1304 real
= const_binop (MULT_EXPR
, r1
, ratio
);
1305 real
= const_binop (PLUS_EXPR
, real
, i1
);
1306 real
= const_binop (code
, real
, div
);
1308 imag
= const_binop (MULT_EXPR
, i1
, ratio
);
1309 imag
= const_binop (MINUS_EXPR
, imag
, r1
);
1310 imag
= const_binop (code
, imag
, div
);
1314 /* In the FALSE branch, we compute
1316 divisor = (d * ratio) + c;
1317 tr = (b * ratio) + a;
1318 ti = b - (a * ratio);
1321 tree ratio
= const_binop (code
, i2
, r2
);
1322 tree div
= const_binop (PLUS_EXPR
, r2
,
1323 const_binop (MULT_EXPR
, i2
, ratio
));
1325 real
= const_binop (MULT_EXPR
, i1
, ratio
);
1326 real
= const_binop (PLUS_EXPR
, real
, r1
);
1327 real
= const_binop (code
, real
, div
);
1329 imag
= const_binop (MULT_EXPR
, r1
, ratio
);
1330 imag
= const_binop (MINUS_EXPR
, i1
, imag
);
1331 imag
= const_binop (code
, imag
, div
);
1341 return build_complex (type
, real
, imag
);
1344 if (TREE_CODE (arg1
) == VECTOR_CST
1345 && TREE_CODE (arg2
) == VECTOR_CST
)
1347 tree type
= TREE_TYPE(arg1
);
1348 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
1349 tree
*elts
= XALLOCAVEC (tree
, count
);
1351 for (i
= 0; i
< count
; i
++)
1353 tree elem1
= VECTOR_CST_ELT (arg1
, i
);
1354 tree elem2
= VECTOR_CST_ELT (arg2
, i
);
1356 elts
[i
] = const_binop (code
, elem1
, elem2
);
1358 /* It is possible that const_binop cannot handle the given
1359 code and return NULL_TREE */
1360 if(elts
[i
] == NULL_TREE
)
1364 return build_vector (type
, elts
);
1369 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1370 indicates which particular sizetype to create. */
1373 size_int_kind (HOST_WIDE_INT number
, enum size_type_kind kind
)
1375 return build_int_cst (sizetype_tab
[(int) kind
], number
);
1378 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1379 is a tree code. The type of the result is taken from the operands.
1380 Both must be equivalent integer types, ala int_binop_types_match_p.
1381 If the operands are constant, so is the result. */
1384 size_binop_loc (location_t loc
, enum tree_code code
, tree arg0
, tree arg1
)
1386 tree type
= TREE_TYPE (arg0
);
1388 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
1389 return error_mark_node
;
1391 gcc_assert (int_binop_types_match_p (code
, TREE_TYPE (arg0
),
1394 /* Handle the special case of two integer constants faster. */
1395 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
1397 /* And some specific cases even faster than that. */
1398 if (code
== PLUS_EXPR
)
1400 if (integer_zerop (arg0
) && !TREE_OVERFLOW (arg0
))
1402 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
1405 else if (code
== MINUS_EXPR
)
1407 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
1410 else if (code
== MULT_EXPR
)
1412 if (integer_onep (arg0
) && !TREE_OVERFLOW (arg0
))
1416 /* Handle general case of two integer constants. For sizetype
1417 constant calculations we always want to know about overflow,
1418 even in the unsigned case. */
1419 return int_const_binop_1 (code
, arg0
, arg1
, -1);
1422 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
1425 /* Given two values, either both of sizetype or both of bitsizetype,
1426 compute the difference between the two values. Return the value
1427 in signed type corresponding to the type of the operands. */
1430 size_diffop_loc (location_t loc
, tree arg0
, tree arg1
)
1432 tree type
= TREE_TYPE (arg0
);
1435 gcc_assert (int_binop_types_match_p (MINUS_EXPR
, TREE_TYPE (arg0
),
1438 /* If the type is already signed, just do the simple thing. */
1439 if (!TYPE_UNSIGNED (type
))
1440 return size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
);
1442 if (type
== sizetype
)
1444 else if (type
== bitsizetype
)
1445 ctype
= sbitsizetype
;
1447 ctype
= signed_type_for (type
);
1449 /* If either operand is not a constant, do the conversions to the signed
1450 type and subtract. The hardware will do the right thing with any
1451 overflow in the subtraction. */
1452 if (TREE_CODE (arg0
) != INTEGER_CST
|| TREE_CODE (arg1
) != INTEGER_CST
)
1453 return size_binop_loc (loc
, MINUS_EXPR
,
1454 fold_convert_loc (loc
, ctype
, arg0
),
1455 fold_convert_loc (loc
, ctype
, arg1
));
1457 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1458 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1459 overflow) and negate (which can't either). Special-case a result
1460 of zero while we're here. */
1461 if (tree_int_cst_equal (arg0
, arg1
))
1462 return build_int_cst (ctype
, 0);
1463 else if (tree_int_cst_lt (arg1
, arg0
))
1464 return fold_convert_loc (loc
, ctype
,
1465 size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
));
1467 return size_binop_loc (loc
, MINUS_EXPR
, build_int_cst (ctype
, 0),
1468 fold_convert_loc (loc
, ctype
,
1469 size_binop_loc (loc
,
1474 /* A subroutine of fold_convert_const handling conversions of an
1475 INTEGER_CST to another integer type. */
1478 fold_convert_const_int_from_int (tree type
, const_tree arg1
)
1482 /* Given an integer constant, make new constant with new type,
1483 appropriately sign-extended or truncated. */
1484 t
= force_fit_type_double (type
, tree_to_double_int (arg1
),
1485 !POINTER_TYPE_P (TREE_TYPE (arg1
)),
1486 (TREE_INT_CST_HIGH (arg1
) < 0
1487 && (TYPE_UNSIGNED (type
)
1488 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
1489 | TREE_OVERFLOW (arg1
));
1494 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1495 to an integer type. */
1498 fold_convert_const_int_from_real (enum tree_code code
, tree type
, const_tree arg1
)
1503 /* The following code implements the floating point to integer
1504 conversion rules required by the Java Language Specification,
1505 that IEEE NaNs are mapped to zero and values that overflow
1506 the target precision saturate, i.e. values greater than
1507 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1508 are mapped to INT_MIN. These semantics are allowed by the
1509 C and C++ standards that simply state that the behavior of
1510 FP-to-integer conversion is unspecified upon overflow. */
1514 REAL_VALUE_TYPE x
= TREE_REAL_CST (arg1
);
1518 case FIX_TRUNC_EXPR
:
1519 real_trunc (&r
, VOIDmode
, &x
);
1526 /* If R is NaN, return zero and show we have an overflow. */
1527 if (REAL_VALUE_ISNAN (r
))
1530 val
= double_int_zero
;
1533 /* See if R is less than the lower bound or greater than the
1538 tree lt
= TYPE_MIN_VALUE (type
);
1539 REAL_VALUE_TYPE l
= real_value_from_int_cst (NULL_TREE
, lt
);
1540 if (REAL_VALUES_LESS (r
, l
))
1543 val
= tree_to_double_int (lt
);
1549 tree ut
= TYPE_MAX_VALUE (type
);
1552 REAL_VALUE_TYPE u
= real_value_from_int_cst (NULL_TREE
, ut
);
1553 if (REAL_VALUES_LESS (u
, r
))
1556 val
= tree_to_double_int (ut
);
1562 real_to_integer2 ((HOST_WIDE_INT
*) &val
.low
, &val
.high
, &r
);
1564 t
= force_fit_type_double (type
, val
, -1, overflow
| TREE_OVERFLOW (arg1
));
1568 /* A subroutine of fold_convert_const handling conversions of a
1569 FIXED_CST to an integer type. */
1572 fold_convert_const_int_from_fixed (tree type
, const_tree arg1
)
1575 double_int temp
, temp_trunc
;
1578 /* Right shift FIXED_CST to temp by fbit. */
1579 temp
= TREE_FIXED_CST (arg1
).data
;
1580 mode
= TREE_FIXED_CST (arg1
).mode
;
1581 if (GET_MODE_FBIT (mode
) < HOST_BITS_PER_DOUBLE_INT
)
1583 temp
= temp
.rshift (GET_MODE_FBIT (mode
),
1584 HOST_BITS_PER_DOUBLE_INT
,
1585 SIGNED_FIXED_POINT_MODE_P (mode
));
1587 /* Left shift temp to temp_trunc by fbit. */
1588 temp_trunc
= temp
.lshift (GET_MODE_FBIT (mode
),
1589 HOST_BITS_PER_DOUBLE_INT
,
1590 SIGNED_FIXED_POINT_MODE_P (mode
));
1594 temp
= double_int_zero
;
1595 temp_trunc
= double_int_zero
;
1598 /* If FIXED_CST is negative, we need to round the value toward 0.
1599 By checking if the fractional bits are not zero to add 1 to temp. */
1600 if (SIGNED_FIXED_POINT_MODE_P (mode
)
1601 && temp_trunc
.is_negative ()
1602 && TREE_FIXED_CST (arg1
).data
!= temp_trunc
)
1603 temp
+= double_int_one
;
1605 /* Given a fixed-point constant, make new constant with new type,
1606 appropriately sign-extended or truncated. */
1607 t
= force_fit_type_double (type
, temp
, -1,
1608 (temp
.is_negative ()
1609 && (TYPE_UNSIGNED (type
)
1610 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
1611 | TREE_OVERFLOW (arg1
));
1616 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1617 to another floating point type. */
1620 fold_convert_const_real_from_real (tree type
, const_tree arg1
)
1622 REAL_VALUE_TYPE value
;
1625 real_convert (&value
, TYPE_MODE (type
), &TREE_REAL_CST (arg1
));
1626 t
= build_real (type
, value
);
1628 /* If converting an infinity or NAN to a representation that doesn't
1629 have one, set the overflow bit so that we can produce some kind of
1630 error message at the appropriate point if necessary. It's not the
1631 most user-friendly message, but it's better than nothing. */
1632 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1
))
1633 && !MODE_HAS_INFINITIES (TYPE_MODE (type
)))
1634 TREE_OVERFLOW (t
) = 1;
1635 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
1636 && !MODE_HAS_NANS (TYPE_MODE (type
)))
1637 TREE_OVERFLOW (t
) = 1;
1638 /* Regular overflow, conversion produced an infinity in a mode that
1639 can't represent them. */
1640 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type
))
1641 && REAL_VALUE_ISINF (value
)
1642 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1
)))
1643 TREE_OVERFLOW (t
) = 1;
1645 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
1649 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1650 to a floating point type. */
1653 fold_convert_const_real_from_fixed (tree type
, const_tree arg1
)
1655 REAL_VALUE_TYPE value
;
1658 real_convert_from_fixed (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
));
1659 t
= build_real (type
, value
);
1661 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
1665 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1666 to another fixed-point type. */
1669 fold_convert_const_fixed_from_fixed (tree type
, const_tree arg1
)
1671 FIXED_VALUE_TYPE value
;
1675 overflow_p
= fixed_convert (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
),
1676 TYPE_SATURATING (type
));
1677 t
= build_fixed (type
, value
);
1679 /* Propagate overflow flags. */
1680 if (overflow_p
| TREE_OVERFLOW (arg1
))
1681 TREE_OVERFLOW (t
) = 1;
1685 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1686 to a fixed-point type. */
1689 fold_convert_const_fixed_from_int (tree type
, const_tree arg1
)
1691 FIXED_VALUE_TYPE value
;
1695 overflow_p
= fixed_convert_from_int (&value
, TYPE_MODE (type
),
1696 TREE_INT_CST (arg1
),
1697 TYPE_UNSIGNED (TREE_TYPE (arg1
)),
1698 TYPE_SATURATING (type
));
1699 t
= build_fixed (type
, value
);
1701 /* Propagate overflow flags. */
1702 if (overflow_p
| TREE_OVERFLOW (arg1
))
1703 TREE_OVERFLOW (t
) = 1;
1707 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1708 to a fixed-point type. */
1711 fold_convert_const_fixed_from_real (tree type
, const_tree arg1
)
1713 FIXED_VALUE_TYPE value
;
1717 overflow_p
= fixed_convert_from_real (&value
, TYPE_MODE (type
),
1718 &TREE_REAL_CST (arg1
),
1719 TYPE_SATURATING (type
));
1720 t
= build_fixed (type
, value
);
1722 /* Propagate overflow flags. */
1723 if (overflow_p
| TREE_OVERFLOW (arg1
))
1724 TREE_OVERFLOW (t
) = 1;
1728 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1729 type TYPE. If no simplification can be done return NULL_TREE. */
1732 fold_convert_const (enum tree_code code
, tree type
, tree arg1
)
1734 if (TREE_TYPE (arg1
) == type
)
1737 if (POINTER_TYPE_P (type
) || INTEGRAL_TYPE_P (type
)
1738 || TREE_CODE (type
) == OFFSET_TYPE
)
1740 if (TREE_CODE (arg1
) == INTEGER_CST
)
1741 return fold_convert_const_int_from_int (type
, arg1
);
1742 else if (TREE_CODE (arg1
) == REAL_CST
)
1743 return fold_convert_const_int_from_real (code
, type
, arg1
);
1744 else if (TREE_CODE (arg1
) == FIXED_CST
)
1745 return fold_convert_const_int_from_fixed (type
, arg1
);
1747 else if (TREE_CODE (type
) == REAL_TYPE
)
1749 if (TREE_CODE (arg1
) == INTEGER_CST
)
1750 return build_real_from_int_cst (type
, arg1
);
1751 else if (TREE_CODE (arg1
) == REAL_CST
)
1752 return fold_convert_const_real_from_real (type
, arg1
);
1753 else if (TREE_CODE (arg1
) == FIXED_CST
)
1754 return fold_convert_const_real_from_fixed (type
, arg1
);
1756 else if (TREE_CODE (type
) == FIXED_POINT_TYPE
)
1758 if (TREE_CODE (arg1
) == FIXED_CST
)
1759 return fold_convert_const_fixed_from_fixed (type
, arg1
);
1760 else if (TREE_CODE (arg1
) == INTEGER_CST
)
1761 return fold_convert_const_fixed_from_int (type
, arg1
);
1762 else if (TREE_CODE (arg1
) == REAL_CST
)
1763 return fold_convert_const_fixed_from_real (type
, arg1
);
1768 /* Construct a vector of zero elements of vector type TYPE. */
1771 build_zero_vector (tree type
)
1775 t
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), integer_zero_node
);
1776 return build_vector_from_val (type
, t
);
1779 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1782 fold_convertible_p (const_tree type
, const_tree arg
)
1784 tree orig
= TREE_TYPE (arg
);
1789 if (TREE_CODE (arg
) == ERROR_MARK
1790 || TREE_CODE (type
) == ERROR_MARK
1791 || TREE_CODE (orig
) == ERROR_MARK
)
1794 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
1797 switch (TREE_CODE (type
))
1799 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
1800 case POINTER_TYPE
: case REFERENCE_TYPE
:
1802 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
1803 || TREE_CODE (orig
) == OFFSET_TYPE
)
1805 return (TREE_CODE (orig
) == VECTOR_TYPE
1806 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
1809 case FIXED_POINT_TYPE
:
1813 return TREE_CODE (type
) == TREE_CODE (orig
);
1820 /* Convert expression ARG to type TYPE. Used by the middle-end for
1821 simple conversions in preference to calling the front-end's convert. */
1824 fold_convert_loc (location_t loc
, tree type
, tree arg
)
1826 tree orig
= TREE_TYPE (arg
);
1832 if (TREE_CODE (arg
) == ERROR_MARK
1833 || TREE_CODE (type
) == ERROR_MARK
1834 || TREE_CODE (orig
) == ERROR_MARK
)
1835 return error_mark_node
;
1837 switch (TREE_CODE (type
))
1840 case REFERENCE_TYPE
:
1841 /* Handle conversions between pointers to different address spaces. */
1842 if (POINTER_TYPE_P (orig
)
1843 && (TYPE_ADDR_SPACE (TREE_TYPE (type
))
1844 != TYPE_ADDR_SPACE (TREE_TYPE (orig
))))
1845 return fold_build1_loc (loc
, ADDR_SPACE_CONVERT_EXPR
, type
, arg
);
1848 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
1850 if (TREE_CODE (arg
) == INTEGER_CST
)
1852 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
1853 if (tem
!= NULL_TREE
)
1856 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
1857 || TREE_CODE (orig
) == OFFSET_TYPE
)
1858 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
1859 if (TREE_CODE (orig
) == COMPLEX_TYPE
)
1860 return fold_convert_loc (loc
, type
,
1861 fold_build1_loc (loc
, REALPART_EXPR
,
1862 TREE_TYPE (orig
), arg
));
1863 gcc_assert (TREE_CODE (orig
) == VECTOR_TYPE
1864 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
1865 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
1868 if (TREE_CODE (arg
) == INTEGER_CST
)
1870 tem
= fold_convert_const (FLOAT_EXPR
, type
, arg
);
1871 if (tem
!= NULL_TREE
)
1874 else if (TREE_CODE (arg
) == REAL_CST
)
1876 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
1877 if (tem
!= NULL_TREE
)
1880 else if (TREE_CODE (arg
) == FIXED_CST
)
1882 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
1883 if (tem
!= NULL_TREE
)
1887 switch (TREE_CODE (orig
))
1890 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
1891 case POINTER_TYPE
: case REFERENCE_TYPE
:
1892 return fold_build1_loc (loc
, FLOAT_EXPR
, type
, arg
);
1895 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
1897 case FIXED_POINT_TYPE
:
1898 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
1901 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
1902 return fold_convert_loc (loc
, type
, tem
);
1908 case FIXED_POINT_TYPE
:
1909 if (TREE_CODE (arg
) == FIXED_CST
|| TREE_CODE (arg
) == INTEGER_CST
1910 || TREE_CODE (arg
) == REAL_CST
)
1912 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
1913 if (tem
!= NULL_TREE
)
1914 goto fold_convert_exit
;
1917 switch (TREE_CODE (orig
))
1919 case FIXED_POINT_TYPE
:
1924 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
1927 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
1928 return fold_convert_loc (loc
, type
, tem
);
1935 switch (TREE_CODE (orig
))
1938 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
1939 case POINTER_TYPE
: case REFERENCE_TYPE
:
1941 case FIXED_POINT_TYPE
:
1942 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
1943 fold_convert_loc (loc
, TREE_TYPE (type
), arg
),
1944 fold_convert_loc (loc
, TREE_TYPE (type
),
1945 integer_zero_node
));
1950 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
1952 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
),
1953 TREE_OPERAND (arg
, 0));
1954 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
),
1955 TREE_OPERAND (arg
, 1));
1956 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
1959 arg
= save_expr (arg
);
1960 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
1961 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, TREE_TYPE (orig
), arg
);
1962 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
), rpart
);
1963 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
), ipart
);
1964 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
1972 if (integer_zerop (arg
))
1973 return build_zero_vector (type
);
1974 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
1975 gcc_assert (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
1976 || TREE_CODE (orig
) == VECTOR_TYPE
);
1977 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, type
, arg
);
1980 tem
= fold_ignored_result (arg
);
1981 return fold_build1_loc (loc
, NOP_EXPR
, type
, tem
);
1984 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
1985 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
1989 protected_set_expr_location_unshare (tem
, loc
);
1993 /* Return false if expr can be assumed not to be an lvalue, true
1997 maybe_lvalue_p (const_tree x
)
1999 /* We only need to wrap lvalue tree codes. */
2000 switch (TREE_CODE (x
))
2013 case ARRAY_RANGE_REF
:
2019 case PREINCREMENT_EXPR
:
2020 case PREDECREMENT_EXPR
:
2022 case TRY_CATCH_EXPR
:
2023 case WITH_CLEANUP_EXPR
:
2032 /* Assume the worst for front-end tree codes. */
2033 if ((int)TREE_CODE (x
) >= NUM_TREE_CODES
)
2041 /* Return an expr equal to X but certainly not valid as an lvalue. */
2044 non_lvalue_loc (location_t loc
, tree x
)
2046 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2051 if (! maybe_lvalue_p (x
))
2053 return build1_loc (loc
, NON_LVALUE_EXPR
, TREE_TYPE (x
), x
);
2056 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2057 Zero means allow extended lvalues. */
2059 int pedantic_lvalues
;
2061 /* When pedantic, return an expr equal to X but certainly not valid as a
2062 pedantic lvalue. Otherwise, return X. */
2065 pedantic_non_lvalue_loc (location_t loc
, tree x
)
2067 if (pedantic_lvalues
)
2068 return non_lvalue_loc (loc
, x
);
2070 return protected_set_expr_location_unshare (x
, loc
);
2073 /* Given a tree comparison code, return the code that is the logical inverse.
2074 It is generally not safe to do this for floating-point comparisons, except
2075 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2076 ERROR_MARK in this case. */
2079 invert_tree_comparison (enum tree_code code
, bool honor_nans
)
2081 if (honor_nans
&& flag_trapping_math
&& code
!= EQ_EXPR
&& code
!= NE_EXPR
2082 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
)
2092 return honor_nans
? UNLE_EXPR
: LE_EXPR
;
2094 return honor_nans
? UNLT_EXPR
: LT_EXPR
;
2096 return honor_nans
? UNGE_EXPR
: GE_EXPR
;
2098 return honor_nans
? UNGT_EXPR
: GT_EXPR
;
2112 return UNORDERED_EXPR
;
2113 case UNORDERED_EXPR
:
2114 return ORDERED_EXPR
;
2120 /* Similar, but return the comparison that results if the operands are
2121 swapped. This is safe for floating-point. */
2124 swap_tree_comparison (enum tree_code code
)
2131 case UNORDERED_EXPR
:
2157 /* Convert a comparison tree code from an enum tree_code representation
2158 into a compcode bit-based encoding. This function is the inverse of
2159 compcode_to_comparison. */
2161 static enum comparison_code
2162 comparison_to_compcode (enum tree_code code
)
2179 return COMPCODE_ORD
;
2180 case UNORDERED_EXPR
:
2181 return COMPCODE_UNORD
;
2183 return COMPCODE_UNLT
;
2185 return COMPCODE_UNEQ
;
2187 return COMPCODE_UNLE
;
2189 return COMPCODE_UNGT
;
2191 return COMPCODE_LTGT
;
2193 return COMPCODE_UNGE
;
2199 /* Convert a compcode bit-based encoding of a comparison operator back
2200 to GCC's enum tree_code representation. This function is the
2201 inverse of comparison_to_compcode. */
2203 static enum tree_code
2204 compcode_to_comparison (enum comparison_code code
)
2221 return ORDERED_EXPR
;
2222 case COMPCODE_UNORD
:
2223 return UNORDERED_EXPR
;
2241 /* Return a tree for the comparison which is the combination of
2242 doing the AND or OR (depending on CODE) of the two operations LCODE
2243 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2244 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2245 if this makes the transformation invalid. */
2248 combine_comparisons (location_t loc
,
2249 enum tree_code code
, enum tree_code lcode
,
2250 enum tree_code rcode
, tree truth_type
,
2251 tree ll_arg
, tree lr_arg
)
2253 bool honor_nans
= HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg
)));
2254 enum comparison_code lcompcode
= comparison_to_compcode (lcode
);
2255 enum comparison_code rcompcode
= comparison_to_compcode (rcode
);
2260 case TRUTH_AND_EXPR
: case TRUTH_ANDIF_EXPR
:
2261 compcode
= lcompcode
& rcompcode
;
2264 case TRUTH_OR_EXPR
: case TRUTH_ORIF_EXPR
:
2265 compcode
= lcompcode
| rcompcode
;
2274 /* Eliminate unordered comparisons, as well as LTGT and ORD
2275 which are not used unless the mode has NaNs. */
2276 compcode
&= ~COMPCODE_UNORD
;
2277 if (compcode
== COMPCODE_LTGT
)
2278 compcode
= COMPCODE_NE
;
2279 else if (compcode
== COMPCODE_ORD
)
2280 compcode
= COMPCODE_TRUE
;
2282 else if (flag_trapping_math
)
2284 /* Check that the original operation and the optimized ones will trap
2285 under the same condition. */
2286 bool ltrap
= (lcompcode
& COMPCODE_UNORD
) == 0
2287 && (lcompcode
!= COMPCODE_EQ
)
2288 && (lcompcode
!= COMPCODE_ORD
);
2289 bool rtrap
= (rcompcode
& COMPCODE_UNORD
) == 0
2290 && (rcompcode
!= COMPCODE_EQ
)
2291 && (rcompcode
!= COMPCODE_ORD
);
2292 bool trap
= (compcode
& COMPCODE_UNORD
) == 0
2293 && (compcode
!= COMPCODE_EQ
)
2294 && (compcode
!= COMPCODE_ORD
);
2296 /* In a short-circuited boolean expression the LHS might be
2297 such that the RHS, if evaluated, will never trap. For
2298 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2299 if neither x nor y is NaN. (This is a mixed blessing: for
2300 example, the expression above will never trap, hence
2301 optimizing it to x < y would be invalid). */
2302 if ((code
== TRUTH_ORIF_EXPR
&& (lcompcode
& COMPCODE_UNORD
))
2303 || (code
== TRUTH_ANDIF_EXPR
&& !(lcompcode
& COMPCODE_UNORD
)))
2306 /* If the comparison was short-circuited, and only the RHS
2307 trapped, we may now generate a spurious trap. */
2309 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2312 /* If we changed the conditions that cause a trap, we lose. */
2313 if ((ltrap
|| rtrap
) != trap
)
2317 if (compcode
== COMPCODE_TRUE
)
2318 return constant_boolean_node (true, truth_type
);
2319 else if (compcode
== COMPCODE_FALSE
)
2320 return constant_boolean_node (false, truth_type
);
2323 enum tree_code tcode
;
2325 tcode
= compcode_to_comparison ((enum comparison_code
) compcode
);
2326 return fold_build2_loc (loc
, tcode
, truth_type
, ll_arg
, lr_arg
);
2330 /* Return nonzero if two operands (typically of the same tree node)
2331 are necessarily equal. If either argument has side-effects this
2332 function returns zero. FLAGS modifies behavior as follows:
2334 If OEP_ONLY_CONST is set, only return nonzero for constants.
2335 This function tests whether the operands are indistinguishable;
2336 it does not test whether they are equal using C's == operation.
2337 The distinction is important for IEEE floating point, because
2338 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2339 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2341 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2342 even though it may hold multiple values during a function.
2343 This is because a GCC tree node guarantees that nothing else is
2344 executed between the evaluation of its "operands" (which may often
2345 be evaluated in arbitrary order). Hence if the operands themselves
2346 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2347 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2348 unset means assuming isochronic (or instantaneous) tree equivalence.
2349 Unless comparing arbitrary expression trees, such as from different
2350 statements, this flag can usually be left unset.
2352 If OEP_PURE_SAME is set, then pure functions with identical arguments
2353 are considered the same. It is used when the caller has other ways
2354 to ensure that global memory is unchanged in between. */
2357 operand_equal_p (const_tree arg0
, const_tree arg1
, unsigned int flags
)
2359 /* If either is ERROR_MARK, they aren't equal. */
2360 if (TREE_CODE (arg0
) == ERROR_MARK
|| TREE_CODE (arg1
) == ERROR_MARK
2361 || TREE_TYPE (arg0
) == error_mark_node
2362 || TREE_TYPE (arg1
) == error_mark_node
)
2365 /* Similar, if either does not have a type (like a released SSA name),
2366 they aren't equal. */
2367 if (!TREE_TYPE (arg0
) || !TREE_TYPE (arg1
))
2370 /* Check equality of integer constants before bailing out due to
2371 precision differences. */
2372 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
2373 return tree_int_cst_equal (arg0
, arg1
);
2375 /* If both types don't have the same signedness, then we can't consider
2376 them equal. We must check this before the STRIP_NOPS calls
2377 because they may change the signedness of the arguments. As pointers
2378 strictly don't have a signedness, require either two pointers or
2379 two non-pointers as well. */
2380 if (TYPE_UNSIGNED (TREE_TYPE (arg0
)) != TYPE_UNSIGNED (TREE_TYPE (arg1
))
2381 || POINTER_TYPE_P (TREE_TYPE (arg0
)) != POINTER_TYPE_P (TREE_TYPE (arg1
)))
2384 /* We cannot consider pointers to different address space equal. */
2385 if (POINTER_TYPE_P (TREE_TYPE (arg0
)) && POINTER_TYPE_P (TREE_TYPE (arg1
))
2386 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0
)))
2387 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1
)))))
2390 /* If both types don't have the same precision, then it is not safe
2392 if (TYPE_PRECISION (TREE_TYPE (arg0
)) != TYPE_PRECISION (TREE_TYPE (arg1
)))
2398 /* In case both args are comparisons but with different comparison
2399 code, try to swap the comparison operands of one arg to produce
2400 a match and compare that variant. */
2401 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2402 && COMPARISON_CLASS_P (arg0
)
2403 && COMPARISON_CLASS_P (arg1
))
2405 enum tree_code swap_code
= swap_tree_comparison (TREE_CODE (arg1
));
2407 if (TREE_CODE (arg0
) == swap_code
)
2408 return operand_equal_p (TREE_OPERAND (arg0
, 0),
2409 TREE_OPERAND (arg1
, 1), flags
)
2410 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2411 TREE_OPERAND (arg1
, 0), flags
);
2414 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2415 /* This is needed for conversions and for COMPONENT_REF.
2416 Might as well play it safe and always test this. */
2417 || TREE_CODE (TREE_TYPE (arg0
)) == ERROR_MARK
2418 || TREE_CODE (TREE_TYPE (arg1
)) == ERROR_MARK
2419 || TYPE_MODE (TREE_TYPE (arg0
)) != TYPE_MODE (TREE_TYPE (arg1
)))
2422 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2423 We don't care about side effects in that case because the SAVE_EXPR
2424 takes care of that for us. In all other cases, two expressions are
2425 equal if they have no side effects. If we have two identical
2426 expressions with side effects that should be treated the same due
2427 to the only side effects being identical SAVE_EXPR's, that will
2428 be detected in the recursive calls below.
2429 If we are taking an invariant address of two identical objects
2430 they are necessarily equal as well. */
2431 if (arg0
== arg1
&& ! (flags
& OEP_ONLY_CONST
)
2432 && (TREE_CODE (arg0
) == SAVE_EXPR
2433 || (flags
& OEP_CONSTANT_ADDRESS_OF
)
2434 || (! TREE_SIDE_EFFECTS (arg0
) && ! TREE_SIDE_EFFECTS (arg1
))))
2437 /* Next handle constant cases, those for which we can return 1 even
2438 if ONLY_CONST is set. */
2439 if (TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
))
2440 switch (TREE_CODE (arg0
))
2443 return tree_int_cst_equal (arg0
, arg1
);
2446 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0
),
2447 TREE_FIXED_CST (arg1
));
2450 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0
),
2451 TREE_REAL_CST (arg1
)))
2455 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
))))
2457 /* If we do not distinguish between signed and unsigned zero,
2458 consider them equal. */
2459 if (real_zerop (arg0
) && real_zerop (arg1
))
2468 if (VECTOR_CST_NELTS (arg0
) != VECTOR_CST_NELTS (arg1
))
2471 for (i
= 0; i
< VECTOR_CST_NELTS (arg0
); ++i
)
2473 if (!operand_equal_p (VECTOR_CST_ELT (arg0
, i
),
2474 VECTOR_CST_ELT (arg1
, i
), flags
))
2481 return (operand_equal_p (TREE_REALPART (arg0
), TREE_REALPART (arg1
),
2483 && operand_equal_p (TREE_IMAGPART (arg0
), TREE_IMAGPART (arg1
),
2487 return (TREE_STRING_LENGTH (arg0
) == TREE_STRING_LENGTH (arg1
)
2488 && ! memcmp (TREE_STRING_POINTER (arg0
),
2489 TREE_STRING_POINTER (arg1
),
2490 TREE_STRING_LENGTH (arg0
)));
2493 return operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0),
2494 TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
)
2495 ? OEP_CONSTANT_ADDRESS_OF
: 0);
2500 if (flags
& OEP_ONLY_CONST
)
2503 /* Define macros to test an operand from arg0 and arg1 for equality and a
2504 variant that allows null and views null as being different from any
2505 non-null value. In the latter case, if either is null, the both
2506 must be; otherwise, do the normal comparison. */
2507 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2508 TREE_OPERAND (arg1, N), flags)
2510 #define OP_SAME_WITH_NULL(N) \
2511 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2512 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2514 switch (TREE_CODE_CLASS (TREE_CODE (arg0
)))
2517 /* Two conversions are equal only if signedness and modes match. */
2518 switch (TREE_CODE (arg0
))
2521 case FIX_TRUNC_EXPR
:
2522 if (TYPE_UNSIGNED (TREE_TYPE (arg0
))
2523 != TYPE_UNSIGNED (TREE_TYPE (arg1
)))
2533 case tcc_comparison
:
2535 if (OP_SAME (0) && OP_SAME (1))
2538 /* For commutative ops, allow the other order. */
2539 return (commutative_tree_code (TREE_CODE (arg0
))
2540 && operand_equal_p (TREE_OPERAND (arg0
, 0),
2541 TREE_OPERAND (arg1
, 1), flags
)
2542 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2543 TREE_OPERAND (arg1
, 0), flags
));
2546 /* If either of the pointer (or reference) expressions we are
2547 dereferencing contain a side effect, these cannot be equal. */
2548 if (TREE_SIDE_EFFECTS (arg0
)
2549 || TREE_SIDE_EFFECTS (arg1
))
2552 switch (TREE_CODE (arg0
))
2559 case TARGET_MEM_REF
:
2560 /* Require equal extra operands and then fall through to MEM_REF
2561 handling of the two common operands. */
2562 if (!OP_SAME_WITH_NULL (2)
2563 || !OP_SAME_WITH_NULL (3)
2564 || !OP_SAME_WITH_NULL (4))
2568 /* Require equal access sizes, and similar pointer types.
2569 We can have incomplete types for array references of
2570 variable-sized arrays from the Fortran frontent
2572 return ((TYPE_SIZE (TREE_TYPE (arg0
)) == TYPE_SIZE (TREE_TYPE (arg1
))
2573 || (TYPE_SIZE (TREE_TYPE (arg0
))
2574 && TYPE_SIZE (TREE_TYPE (arg1
))
2575 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0
)),
2576 TYPE_SIZE (TREE_TYPE (arg1
)), flags
)))
2577 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg0
, 1)))
2578 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg1
, 1))))
2579 && OP_SAME (0) && OP_SAME (1));
2582 case ARRAY_RANGE_REF
:
2583 /* Operands 2 and 3 may be null.
2584 Compare the array index by value if it is constant first as we
2585 may have different types but same value here. */
2587 && (tree_int_cst_equal (TREE_OPERAND (arg0
, 1),
2588 TREE_OPERAND (arg1
, 1))
2590 && OP_SAME_WITH_NULL (2)
2591 && OP_SAME_WITH_NULL (3));
2594 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2595 may be NULL when we're called to compare MEM_EXPRs. */
2596 return OP_SAME_WITH_NULL (0)
2598 && OP_SAME_WITH_NULL (2);
2601 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2607 case tcc_expression
:
2608 switch (TREE_CODE (arg0
))
2611 case TRUTH_NOT_EXPR
:
2614 case TRUTH_ANDIF_EXPR
:
2615 case TRUTH_ORIF_EXPR
:
2616 return OP_SAME (0) && OP_SAME (1);
2619 case WIDEN_MULT_PLUS_EXPR
:
2620 case WIDEN_MULT_MINUS_EXPR
:
2623 /* The multiplcation operands are commutative. */
2626 case TRUTH_AND_EXPR
:
2628 case TRUTH_XOR_EXPR
:
2629 if (OP_SAME (0) && OP_SAME (1))
2632 /* Otherwise take into account this is a commutative operation. */
2633 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
2634 TREE_OPERAND (arg1
, 1), flags
)
2635 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2636 TREE_OPERAND (arg1
, 0), flags
));
2641 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2648 switch (TREE_CODE (arg0
))
2651 /* If the CALL_EXPRs call different functions, then they
2652 clearly can not be equal. */
2653 if (! operand_equal_p (CALL_EXPR_FN (arg0
), CALL_EXPR_FN (arg1
),
2658 unsigned int cef
= call_expr_flags (arg0
);
2659 if (flags
& OEP_PURE_SAME
)
2660 cef
&= ECF_CONST
| ECF_PURE
;
2667 /* Now see if all the arguments are the same. */
2669 const_call_expr_arg_iterator iter0
, iter1
;
2671 for (a0
= first_const_call_expr_arg (arg0
, &iter0
),
2672 a1
= first_const_call_expr_arg (arg1
, &iter1
);
2674 a0
= next_const_call_expr_arg (&iter0
),
2675 a1
= next_const_call_expr_arg (&iter1
))
2676 if (! operand_equal_p (a0
, a1
, flags
))
2679 /* If we get here and both argument lists are exhausted
2680 then the CALL_EXPRs are equal. */
2681 return ! (a0
|| a1
);
2687 case tcc_declaration
:
2688 /* Consider __builtin_sqrt equal to sqrt. */
2689 return (TREE_CODE (arg0
) == FUNCTION_DECL
2690 && DECL_BUILT_IN (arg0
) && DECL_BUILT_IN (arg1
)
2691 && DECL_BUILT_IN_CLASS (arg0
) == DECL_BUILT_IN_CLASS (arg1
)
2692 && DECL_FUNCTION_CODE (arg0
) == DECL_FUNCTION_CODE (arg1
));
2699 #undef OP_SAME_WITH_NULL
2702 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2703 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2705 When in doubt, return 0. */
2708 operand_equal_for_comparison_p (tree arg0
, tree arg1
, tree other
)
2710 int unsignedp1
, unsignedpo
;
2711 tree primarg0
, primarg1
, primother
;
2712 unsigned int correct_width
;
2714 if (operand_equal_p (arg0
, arg1
, 0))
2717 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
2718 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
2721 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2722 and see if the inner values are the same. This removes any
2723 signedness comparison, which doesn't matter here. */
2724 primarg0
= arg0
, primarg1
= arg1
;
2725 STRIP_NOPS (primarg0
);
2726 STRIP_NOPS (primarg1
);
2727 if (operand_equal_p (primarg0
, primarg1
, 0))
2730 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2731 actual comparison operand, ARG0.
2733 First throw away any conversions to wider types
2734 already present in the operands. */
2736 primarg1
= get_narrower (arg1
, &unsignedp1
);
2737 primother
= get_narrower (other
, &unsignedpo
);
2739 correct_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
2740 if (unsignedp1
== unsignedpo
2741 && TYPE_PRECISION (TREE_TYPE (primarg1
)) < correct_width
2742 && TYPE_PRECISION (TREE_TYPE (primother
)) < correct_width
)
2744 tree type
= TREE_TYPE (arg0
);
2746 /* Make sure shorter operand is extended the right way
2747 to match the longer operand. */
2748 primarg1
= fold_convert (signed_or_unsigned_type_for
2749 (unsignedp1
, TREE_TYPE (primarg1
)), primarg1
);
2751 if (operand_equal_p (arg0
, fold_convert (type
, primarg1
), 0))
2758 /* See if ARG is an expression that is either a comparison or is performing
2759 arithmetic on comparisons. The comparisons must only be comparing
2760 two different values, which will be stored in *CVAL1 and *CVAL2; if
2761 they are nonzero it means that some operands have already been found.
2762 No variables may be used anywhere else in the expression except in the
2763 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2764 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2766 If this is true, return 1. Otherwise, return zero. */
2769 twoval_comparison_p (tree arg
, tree
*cval1
, tree
*cval2
, int *save_p
)
2771 enum tree_code code
= TREE_CODE (arg
);
2772 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
2774 /* We can handle some of the tcc_expression cases here. */
2775 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
2777 else if (tclass
== tcc_expression
2778 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
2779 || code
== COMPOUND_EXPR
))
2780 tclass
= tcc_binary
;
2782 else if (tclass
== tcc_expression
&& code
== SAVE_EXPR
2783 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg
, 0)))
2785 /* If we've already found a CVAL1 or CVAL2, this expression is
2786 two complex to handle. */
2787 if (*cval1
|| *cval2
)
2797 return twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
);
2800 return (twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
)
2801 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2802 cval1
, cval2
, save_p
));
2807 case tcc_expression
:
2808 if (code
== COND_EXPR
)
2809 return (twoval_comparison_p (TREE_OPERAND (arg
, 0),
2810 cval1
, cval2
, save_p
)
2811 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2812 cval1
, cval2
, save_p
)
2813 && twoval_comparison_p (TREE_OPERAND (arg
, 2),
2814 cval1
, cval2
, save_p
));
2817 case tcc_comparison
:
2818 /* First see if we can handle the first operand, then the second. For
2819 the second operand, we know *CVAL1 can't be zero. It must be that
2820 one side of the comparison is each of the values; test for the
2821 case where this isn't true by failing if the two operands
2824 if (operand_equal_p (TREE_OPERAND (arg
, 0),
2825 TREE_OPERAND (arg
, 1), 0))
2829 *cval1
= TREE_OPERAND (arg
, 0);
2830 else if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 0), 0))
2832 else if (*cval2
== 0)
2833 *cval2
= TREE_OPERAND (arg
, 0);
2834 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 0), 0))
2839 if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 1), 0))
2841 else if (*cval2
== 0)
2842 *cval2
= TREE_OPERAND (arg
, 1);
2843 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 1), 0))
2855 /* ARG is a tree that is known to contain just arithmetic operations and
2856 comparisons. Evaluate the operations in the tree substituting NEW0 for
2857 any occurrence of OLD0 as an operand of a comparison and likewise for
2861 eval_subst (location_t loc
, tree arg
, tree old0
, tree new0
,
2862 tree old1
, tree new1
)
2864 tree type
= TREE_TYPE (arg
);
2865 enum tree_code code
= TREE_CODE (arg
);
2866 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
2868 /* We can handle some of the tcc_expression cases here. */
2869 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
2871 else if (tclass
== tcc_expression
2872 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2873 tclass
= tcc_binary
;
2878 return fold_build1_loc (loc
, code
, type
,
2879 eval_subst (loc
, TREE_OPERAND (arg
, 0),
2880 old0
, new0
, old1
, new1
));
2883 return fold_build2_loc (loc
, code
, type
,
2884 eval_subst (loc
, TREE_OPERAND (arg
, 0),
2885 old0
, new0
, old1
, new1
),
2886 eval_subst (loc
, TREE_OPERAND (arg
, 1),
2887 old0
, new0
, old1
, new1
));
2889 case tcc_expression
:
2893 return eval_subst (loc
, TREE_OPERAND (arg
, 0), old0
, new0
,
2897 return eval_subst (loc
, TREE_OPERAND (arg
, 1), old0
, new0
,
2901 return fold_build3_loc (loc
, code
, type
,
2902 eval_subst (loc
, TREE_OPERAND (arg
, 0),
2903 old0
, new0
, old1
, new1
),
2904 eval_subst (loc
, TREE_OPERAND (arg
, 1),
2905 old0
, new0
, old1
, new1
),
2906 eval_subst (loc
, TREE_OPERAND (arg
, 2),
2907 old0
, new0
, old1
, new1
));
2911 /* Fall through - ??? */
2913 case tcc_comparison
:
2915 tree arg0
= TREE_OPERAND (arg
, 0);
2916 tree arg1
= TREE_OPERAND (arg
, 1);
2918 /* We need to check both for exact equality and tree equality. The
2919 former will be true if the operand has a side-effect. In that
2920 case, we know the operand occurred exactly once. */
2922 if (arg0
== old0
|| operand_equal_p (arg0
, old0
, 0))
2924 else if (arg0
== old1
|| operand_equal_p (arg0
, old1
, 0))
2927 if (arg1
== old0
|| operand_equal_p (arg1
, old0
, 0))
2929 else if (arg1
== old1
|| operand_equal_p (arg1
, old1
, 0))
2932 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
2940 /* Return a tree for the case when the result of an expression is RESULT
2941 converted to TYPE and OMITTED was previously an operand of the expression
2942 but is now not needed (e.g., we folded OMITTED * 0).
2944 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2945 the conversion of RESULT to TYPE. */
2948 omit_one_operand_loc (location_t loc
, tree type
, tree result
, tree omitted
)
2950 tree t
= fold_convert_loc (loc
, type
, result
);
2952 /* If the resulting operand is an empty statement, just return the omitted
2953 statement casted to void. */
2954 if (IS_EMPTY_STMT (t
) && TREE_SIDE_EFFECTS (omitted
))
2955 return build1_loc (loc
, NOP_EXPR
, void_type_node
,
2956 fold_ignored_result (omitted
));
2958 if (TREE_SIDE_EFFECTS (omitted
))
2959 return build2_loc (loc
, COMPOUND_EXPR
, type
,
2960 fold_ignored_result (omitted
), t
);
2962 return non_lvalue_loc (loc
, t
);
2965 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2968 pedantic_omit_one_operand_loc (location_t loc
, tree type
, tree result
,
2971 tree t
= fold_convert_loc (loc
, type
, result
);
2973 /* If the resulting operand is an empty statement, just return the omitted
2974 statement casted to void. */
2975 if (IS_EMPTY_STMT (t
) && TREE_SIDE_EFFECTS (omitted
))
2976 return build1_loc (loc
, NOP_EXPR
, void_type_node
,
2977 fold_ignored_result (omitted
));
2979 if (TREE_SIDE_EFFECTS (omitted
))
2980 return build2_loc (loc
, COMPOUND_EXPR
, type
,
2981 fold_ignored_result (omitted
), t
);
2983 return pedantic_non_lvalue_loc (loc
, t
);
2986 /* Return a tree for the case when the result of an expression is RESULT
2987 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
2988 of the expression but are now not needed.
2990 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
2991 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
2992 evaluated before OMITTED2. Otherwise, if neither has side effects,
2993 just do the conversion of RESULT to TYPE. */
2996 omit_two_operands_loc (location_t loc
, tree type
, tree result
,
2997 tree omitted1
, tree omitted2
)
2999 tree t
= fold_convert_loc (loc
, type
, result
);
3001 if (TREE_SIDE_EFFECTS (omitted2
))
3002 t
= build2_loc (loc
, COMPOUND_EXPR
, type
, omitted2
, t
);
3003 if (TREE_SIDE_EFFECTS (omitted1
))
3004 t
= build2_loc (loc
, COMPOUND_EXPR
, type
, omitted1
, t
);
3006 return TREE_CODE (t
) != COMPOUND_EXPR
? non_lvalue_loc (loc
, t
) : t
;
3010 /* Return a simplified tree node for the truth-negation of ARG. This
3011 never alters ARG itself. We assume that ARG is an operation that
3012 returns a truth value (0 or 1).
3014 FIXME: one would think we would fold the result, but it causes
3015 problems with the dominator optimizer. */
3018 fold_truth_not_expr (location_t loc
, tree arg
)
3020 tree type
= TREE_TYPE (arg
);
3021 enum tree_code code
= TREE_CODE (arg
);
3022 location_t loc1
, loc2
;
3024 /* If this is a comparison, we can simply invert it, except for
3025 floating-point non-equality comparisons, in which case we just
3026 enclose a TRUTH_NOT_EXPR around what we have. */
3028 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3030 tree op_type
= TREE_TYPE (TREE_OPERAND (arg
, 0));
3031 if (FLOAT_TYPE_P (op_type
)
3032 && flag_trapping_math
3033 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
3034 && code
!= NE_EXPR
&& code
!= EQ_EXPR
)
3037 code
= invert_tree_comparison (code
, HONOR_NANS (TYPE_MODE (op_type
)));
3038 if (code
== ERROR_MARK
)
3041 return build2_loc (loc
, code
, type
, TREE_OPERAND (arg
, 0),
3042 TREE_OPERAND (arg
, 1));
3048 return constant_boolean_node (integer_zerop (arg
), type
);
3050 case TRUTH_AND_EXPR
:
3051 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3052 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3053 return build2_loc (loc
, TRUTH_OR_EXPR
, type
,
3054 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3055 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3058 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3059 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3060 return build2_loc (loc
, TRUTH_AND_EXPR
, type
,
3061 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3062 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3064 case TRUTH_XOR_EXPR
:
3065 /* Here we can invert either operand. We invert the first operand
3066 unless the second operand is a TRUTH_NOT_EXPR in which case our
3067 result is the XOR of the first operand with the inside of the
3068 negation of the second operand. */
3070 if (TREE_CODE (TREE_OPERAND (arg
, 1)) == TRUTH_NOT_EXPR
)
3071 return build2_loc (loc
, TRUTH_XOR_EXPR
, type
, TREE_OPERAND (arg
, 0),
3072 TREE_OPERAND (TREE_OPERAND (arg
, 1), 0));
3074 return build2_loc (loc
, TRUTH_XOR_EXPR
, type
,
3075 invert_truthvalue_loc (loc
, TREE_OPERAND (arg
, 0)),
3076 TREE_OPERAND (arg
, 1));
3078 case TRUTH_ANDIF_EXPR
:
3079 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3080 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3081 return build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
3082 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3083 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3085 case TRUTH_ORIF_EXPR
:
3086 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3087 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3088 return build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
3089 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3090 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3092 case TRUTH_NOT_EXPR
:
3093 return TREE_OPERAND (arg
, 0);
3097 tree arg1
= TREE_OPERAND (arg
, 1);
3098 tree arg2
= TREE_OPERAND (arg
, 2);
3100 loc1
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3101 loc2
= expr_location_or (TREE_OPERAND (arg
, 2), loc
);
3103 /* A COND_EXPR may have a throw as one operand, which
3104 then has void type. Just leave void operands
3106 return build3_loc (loc
, COND_EXPR
, type
, TREE_OPERAND (arg
, 0),
3107 VOID_TYPE_P (TREE_TYPE (arg1
))
3108 ? arg1
: invert_truthvalue_loc (loc1
, arg1
),
3109 VOID_TYPE_P (TREE_TYPE (arg2
))
3110 ? arg2
: invert_truthvalue_loc (loc2
, arg2
));
3114 loc1
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3115 return build2_loc (loc
, COMPOUND_EXPR
, type
,
3116 TREE_OPERAND (arg
, 0),
3117 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 1)));
3119 case NON_LVALUE_EXPR
:
3120 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3121 return invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0));
3124 if (TREE_CODE (TREE_TYPE (arg
)) == BOOLEAN_TYPE
)
3125 return build1_loc (loc
, TRUTH_NOT_EXPR
, type
, arg
);
3127 /* ... fall through ... */
3130 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3131 return build1_loc (loc
, TREE_CODE (arg
), type
,
3132 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
3135 if (!integer_onep (TREE_OPERAND (arg
, 1)))
3137 return build2_loc (loc
, EQ_EXPR
, type
, arg
, build_int_cst (type
, 0));
3140 return build1_loc (loc
, TRUTH_NOT_EXPR
, type
, arg
);
3142 case CLEANUP_POINT_EXPR
:
3143 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3144 return build1_loc (loc
, CLEANUP_POINT_EXPR
, type
,
3145 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
3152 /* Return a simplified tree node for the truth-negation of ARG. This
3153 never alters ARG itself. We assume that ARG is an operation that
3154 returns a truth value (0 or 1).
3156 FIXME: one would think we would fold the result, but it causes
3157 problems with the dominator optimizer. */
3160 invert_truthvalue_loc (location_t loc
, tree arg
)
3164 if (TREE_CODE (arg
) == ERROR_MARK
)
3167 tem
= fold_truth_not_expr (loc
, arg
);
3169 tem
= build1_loc (loc
, TRUTH_NOT_EXPR
, TREE_TYPE (arg
), arg
);
3174 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3175 operands are another bit-wise operation with a common input. If so,
3176 distribute the bit operations to save an operation and possibly two if
3177 constants are involved. For example, convert
3178 (A | B) & (A | C) into A | (B & C)
3179 Further simplification will occur if B and C are constants.
3181 If this optimization cannot be done, 0 will be returned. */
3184 distribute_bit_expr (location_t loc
, enum tree_code code
, tree type
,
3185 tree arg0
, tree arg1
)
3190 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
3191 || TREE_CODE (arg0
) == code
3192 || (TREE_CODE (arg0
) != BIT_AND_EXPR
3193 && TREE_CODE (arg0
) != BIT_IOR_EXPR
))
3196 if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0), 0))
3198 common
= TREE_OPERAND (arg0
, 0);
3199 left
= TREE_OPERAND (arg0
, 1);
3200 right
= TREE_OPERAND (arg1
, 1);
3202 else if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 1), 0))
3204 common
= TREE_OPERAND (arg0
, 0);
3205 left
= TREE_OPERAND (arg0
, 1);
3206 right
= TREE_OPERAND (arg1
, 0);
3208 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 0), 0))
3210 common
= TREE_OPERAND (arg0
, 1);
3211 left
= TREE_OPERAND (arg0
, 0);
3212 right
= TREE_OPERAND (arg1
, 1);
3214 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 1), 0))
3216 common
= TREE_OPERAND (arg0
, 1);
3217 left
= TREE_OPERAND (arg0
, 0);
3218 right
= TREE_OPERAND (arg1
, 0);
3223 common
= fold_convert_loc (loc
, type
, common
);
3224 left
= fold_convert_loc (loc
, type
, left
);
3225 right
= fold_convert_loc (loc
, type
, right
);
3226 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, common
,
3227 fold_build2_loc (loc
, code
, type
, left
, right
));
3230 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3231 with code CODE. This optimization is unsafe. */
3233 distribute_real_division (location_t loc
, enum tree_code code
, tree type
,
3234 tree arg0
, tree arg1
)
3236 bool mul0
= TREE_CODE (arg0
) == MULT_EXPR
;
3237 bool mul1
= TREE_CODE (arg1
) == MULT_EXPR
;
3239 /* (A / C) +- (B / C) -> (A +- B) / C. */
3241 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3242 TREE_OPERAND (arg1
, 1), 0))
3243 return fold_build2_loc (loc
, mul0
? MULT_EXPR
: RDIV_EXPR
, type
,
3244 fold_build2_loc (loc
, code
, type
,
3245 TREE_OPERAND (arg0
, 0),
3246 TREE_OPERAND (arg1
, 0)),
3247 TREE_OPERAND (arg0
, 1));
3249 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3250 if (operand_equal_p (TREE_OPERAND (arg0
, 0),
3251 TREE_OPERAND (arg1
, 0), 0)
3252 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
3253 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
3255 REAL_VALUE_TYPE r0
, r1
;
3256 r0
= TREE_REAL_CST (TREE_OPERAND (arg0
, 1));
3257 r1
= TREE_REAL_CST (TREE_OPERAND (arg1
, 1));
3259 real_arithmetic (&r0
, RDIV_EXPR
, &dconst1
, &r0
);
3261 real_arithmetic (&r1
, RDIV_EXPR
, &dconst1
, &r1
);
3262 real_arithmetic (&r0
, code
, &r0
, &r1
);
3263 return fold_build2_loc (loc
, MULT_EXPR
, type
,
3264 TREE_OPERAND (arg0
, 0),
3265 build_real (type
, r0
));
3271 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3272 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3275 make_bit_field_ref (location_t loc
, tree inner
, tree type
,
3276 HOST_WIDE_INT bitsize
, HOST_WIDE_INT bitpos
, int unsignedp
)
3278 tree result
, bftype
;
3282 tree size
= TYPE_SIZE (TREE_TYPE (inner
));
3283 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner
))
3284 || POINTER_TYPE_P (TREE_TYPE (inner
)))
3285 && host_integerp (size
, 0)
3286 && tree_low_cst (size
, 0) == bitsize
)
3287 return fold_convert_loc (loc
, type
, inner
);
3291 if (TYPE_PRECISION (bftype
) != bitsize
3292 || TYPE_UNSIGNED (bftype
) == !unsignedp
)
3293 bftype
= build_nonstandard_integer_type (bitsize
, 0);
3295 result
= build3_loc (loc
, BIT_FIELD_REF
, bftype
, inner
,
3296 size_int (bitsize
), bitsize_int (bitpos
));
3299 result
= fold_convert_loc (loc
, type
, result
);
3304 /* Optimize a bit-field compare.
3306 There are two cases: First is a compare against a constant and the
3307 second is a comparison of two items where the fields are at the same
3308 bit position relative to the start of a chunk (byte, halfword, word)
3309 large enough to contain it. In these cases we can avoid the shift
3310 implicit in bitfield extractions.
3312 For constants, we emit a compare of the shifted constant with the
3313 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3314 compared. For two fields at the same position, we do the ANDs with the
3315 similar mask and compare the result of the ANDs.
3317 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3318 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3319 are the left and right operands of the comparison, respectively.
3321 If the optimization described above can be done, we return the resulting
3322 tree. Otherwise we return zero. */
3325 optimize_bit_field_compare (location_t loc
, enum tree_code code
,
3326 tree compare_type
, tree lhs
, tree rhs
)
3328 HOST_WIDE_INT lbitpos
, lbitsize
, rbitpos
, rbitsize
, nbitpos
, nbitsize
;
3329 tree type
= TREE_TYPE (lhs
);
3330 tree signed_type
, unsigned_type
;
3331 int const_p
= TREE_CODE (rhs
) == INTEGER_CST
;
3332 enum machine_mode lmode
, rmode
, nmode
;
3333 int lunsignedp
, runsignedp
;
3334 int lvolatilep
= 0, rvolatilep
= 0;
3335 tree linner
, rinner
= NULL_TREE
;
3339 /* In the strict volatile bitfields case, doing code changes here may prevent
3340 other optimizations, in particular in a SLOW_BYTE_ACCESS setting. */
3341 if (flag_strict_volatile_bitfields
> 0)
3344 /* Get all the information about the extractions being done. If the bit size
3345 if the same as the size of the underlying object, we aren't doing an
3346 extraction at all and so can do nothing. We also don't want to
3347 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3348 then will no longer be able to replace it. */
3349 linner
= get_inner_reference (lhs
, &lbitsize
, &lbitpos
, &offset
, &lmode
,
3350 &lunsignedp
, &lvolatilep
, false);
3351 if (linner
== lhs
|| lbitsize
== GET_MODE_BITSIZE (lmode
) || lbitsize
< 0
3352 || offset
!= 0 || TREE_CODE (linner
) == PLACEHOLDER_EXPR
)
3357 /* If this is not a constant, we can only do something if bit positions,
3358 sizes, and signedness are the same. */
3359 rinner
= get_inner_reference (rhs
, &rbitsize
, &rbitpos
, &offset
, &rmode
,
3360 &runsignedp
, &rvolatilep
, false);
3362 if (rinner
== rhs
|| lbitpos
!= rbitpos
|| lbitsize
!= rbitsize
3363 || lunsignedp
!= runsignedp
|| offset
!= 0
3364 || TREE_CODE (rinner
) == PLACEHOLDER_EXPR
)
3368 /* See if we can find a mode to refer to this field. We should be able to,
3369 but fail if we can't. */
3371 && GET_MODE_BITSIZE (lmode
) > 0
3372 && flag_strict_volatile_bitfields
> 0)
3375 nmode
= get_best_mode (lbitsize
, lbitpos
, 0, 0,
3376 const_p
? TYPE_ALIGN (TREE_TYPE (linner
))
3377 : MIN (TYPE_ALIGN (TREE_TYPE (linner
)),
3378 TYPE_ALIGN (TREE_TYPE (rinner
))),
3379 word_mode
, lvolatilep
|| rvolatilep
);
3380 if (nmode
== VOIDmode
)
3383 /* Set signed and unsigned types of the precision of this mode for the
3385 signed_type
= lang_hooks
.types
.type_for_mode (nmode
, 0);
3386 unsigned_type
= lang_hooks
.types
.type_for_mode (nmode
, 1);
3388 /* Compute the bit position and size for the new reference and our offset
3389 within it. If the new reference is the same size as the original, we
3390 won't optimize anything, so return zero. */
3391 nbitsize
= GET_MODE_BITSIZE (nmode
);
3392 nbitpos
= lbitpos
& ~ (nbitsize
- 1);
3394 if (nbitsize
== lbitsize
)
3397 if (BYTES_BIG_ENDIAN
)
3398 lbitpos
= nbitsize
- lbitsize
- lbitpos
;
3400 /* Make the mask to be used against the extracted field. */
3401 mask
= build_int_cst_type (unsigned_type
, -1);
3402 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (nbitsize
- lbitsize
));
3403 mask
= const_binop (RSHIFT_EXPR
, mask
,
3404 size_int (nbitsize
- lbitsize
- lbitpos
));
3407 /* If not comparing with constant, just rework the comparison
3409 return fold_build2_loc (loc
, code
, compare_type
,
3410 fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3411 make_bit_field_ref (loc
, linner
,
3416 fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3417 make_bit_field_ref (loc
, rinner
,
3423 /* Otherwise, we are handling the constant case. See if the constant is too
3424 big for the field. Warn and return a tree of for 0 (false) if so. We do
3425 this not only for its own sake, but to avoid having to test for this
3426 error case below. If we didn't, we might generate wrong code.
3428 For unsigned fields, the constant shifted right by the field length should
3429 be all zero. For signed fields, the high-order bits should agree with
3434 if (! integer_zerop (const_binop (RSHIFT_EXPR
,
3435 fold_convert_loc (loc
,
3436 unsigned_type
, rhs
),
3437 size_int (lbitsize
))))
3439 warning (0, "comparison is always %d due to width of bit-field",
3441 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3446 tree tem
= const_binop (RSHIFT_EXPR
,
3447 fold_convert_loc (loc
, signed_type
, rhs
),
3448 size_int (lbitsize
- 1));
3449 if (! integer_zerop (tem
) && ! integer_all_onesp (tem
))
3451 warning (0, "comparison is always %d due to width of bit-field",
3453 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3457 /* Single-bit compares should always be against zero. */
3458 if (lbitsize
== 1 && ! integer_zerop (rhs
))
3460 code
= code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
;
3461 rhs
= build_int_cst (type
, 0);
3464 /* Make a new bitfield reference, shift the constant over the
3465 appropriate number of bits and mask it with the computed mask
3466 (in case this was a signed field). If we changed it, make a new one. */
3467 lhs
= make_bit_field_ref (loc
, linner
, unsigned_type
, nbitsize
, nbitpos
, 1);
3470 TREE_SIDE_EFFECTS (lhs
) = 1;
3471 TREE_THIS_VOLATILE (lhs
) = 1;
3474 rhs
= const_binop (BIT_AND_EXPR
,
3475 const_binop (LSHIFT_EXPR
,
3476 fold_convert_loc (loc
, unsigned_type
, rhs
),
3477 size_int (lbitpos
)),
3480 lhs
= build2_loc (loc
, code
, compare_type
,
3481 build2 (BIT_AND_EXPR
, unsigned_type
, lhs
, mask
), rhs
);
3485 /* Subroutine for fold_truth_andor_1: decode a field reference.
3487 If EXP is a comparison reference, we return the innermost reference.
3489 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3490 set to the starting bit number.
3492 If the innermost field can be completely contained in a mode-sized
3493 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3495 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3496 otherwise it is not changed.
3498 *PUNSIGNEDP is set to the signedness of the field.
3500 *PMASK is set to the mask used. This is either contained in a
3501 BIT_AND_EXPR or derived from the width of the field.
3503 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3505 Return 0 if this is not a component reference or is one that we can't
3506 do anything with. */
3509 decode_field_reference (location_t loc
, tree exp
, HOST_WIDE_INT
*pbitsize
,
3510 HOST_WIDE_INT
*pbitpos
, enum machine_mode
*pmode
,
3511 int *punsignedp
, int *pvolatilep
,
3512 tree
*pmask
, tree
*pand_mask
)
3514 tree outer_type
= 0;
3516 tree mask
, inner
, offset
;
3518 unsigned int precision
;
3520 /* All the optimizations using this function assume integer fields.
3521 There are problems with FP fields since the type_for_size call
3522 below can fail for, e.g., XFmode. */
3523 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp
)))
3526 /* We are interested in the bare arrangement of bits, so strip everything
3527 that doesn't affect the machine mode. However, record the type of the
3528 outermost expression if it may matter below. */
3529 if (CONVERT_EXPR_P (exp
)
3530 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
3531 outer_type
= TREE_TYPE (exp
);
3534 if (TREE_CODE (exp
) == BIT_AND_EXPR
)
3536 and_mask
= TREE_OPERAND (exp
, 1);
3537 exp
= TREE_OPERAND (exp
, 0);
3538 STRIP_NOPS (exp
); STRIP_NOPS (and_mask
);
3539 if (TREE_CODE (and_mask
) != INTEGER_CST
)
3543 inner
= get_inner_reference (exp
, pbitsize
, pbitpos
, &offset
, pmode
,
3544 punsignedp
, pvolatilep
, false);
3545 if ((inner
== exp
&& and_mask
== 0)
3546 || *pbitsize
< 0 || offset
!= 0
3547 || TREE_CODE (inner
) == PLACEHOLDER_EXPR
)
3550 /* If the number of bits in the reference is the same as the bitsize of
3551 the outer type, then the outer type gives the signedness. Otherwise
3552 (in case of a small bitfield) the signedness is unchanged. */
3553 if (outer_type
&& *pbitsize
== TYPE_PRECISION (outer_type
))
3554 *punsignedp
= TYPE_UNSIGNED (outer_type
);
3556 /* Compute the mask to access the bitfield. */
3557 unsigned_type
= lang_hooks
.types
.type_for_size (*pbitsize
, 1);
3558 precision
= TYPE_PRECISION (unsigned_type
);
3560 mask
= build_int_cst_type (unsigned_type
, -1);
3562 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
));
3563 mask
= const_binop (RSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
));
3565 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3567 mask
= fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3568 fold_convert_loc (loc
, unsigned_type
, and_mask
), mask
);
3571 *pand_mask
= and_mask
;
3575 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3579 all_ones_mask_p (const_tree mask
, int size
)
3581 tree type
= TREE_TYPE (mask
);
3582 unsigned int precision
= TYPE_PRECISION (type
);
3585 tmask
= build_int_cst_type (signed_type_for (type
), -1);
3588 tree_int_cst_equal (mask
,
3589 const_binop (RSHIFT_EXPR
,
3590 const_binop (LSHIFT_EXPR
, tmask
,
3591 size_int (precision
- size
)),
3592 size_int (precision
- size
)));
3595 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3596 represents the sign bit of EXP's type. If EXP represents a sign
3597 or zero extension, also test VAL against the unextended type.
3598 The return value is the (sub)expression whose sign bit is VAL,
3599 or NULL_TREE otherwise. */
3602 sign_bit_p (tree exp
, const_tree val
)
3604 unsigned HOST_WIDE_INT mask_lo
, lo
;
3605 HOST_WIDE_INT mask_hi
, hi
;
3609 /* Tree EXP must have an integral type. */
3610 t
= TREE_TYPE (exp
);
3611 if (! INTEGRAL_TYPE_P (t
))
3614 /* Tree VAL must be an integer constant. */
3615 if (TREE_CODE (val
) != INTEGER_CST
3616 || TREE_OVERFLOW (val
))
3619 width
= TYPE_PRECISION (t
);
3620 if (width
> HOST_BITS_PER_WIDE_INT
)
3622 hi
= (unsigned HOST_WIDE_INT
) 1 << (width
- HOST_BITS_PER_WIDE_INT
- 1);
3625 mask_hi
= ((unsigned HOST_WIDE_INT
) -1
3626 >> (HOST_BITS_PER_DOUBLE_INT
- width
));
3632 lo
= (unsigned HOST_WIDE_INT
) 1 << (width
- 1);
3635 mask_lo
= ((unsigned HOST_WIDE_INT
) -1
3636 >> (HOST_BITS_PER_WIDE_INT
- width
));
3639 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3640 treat VAL as if it were unsigned. */
3641 if ((TREE_INT_CST_HIGH (val
) & mask_hi
) == hi
3642 && (TREE_INT_CST_LOW (val
) & mask_lo
) == lo
)
3645 /* Handle extension from a narrower type. */
3646 if (TREE_CODE (exp
) == NOP_EXPR
3647 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))) < width
)
3648 return sign_bit_p (TREE_OPERAND (exp
, 0), val
);
3653 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3654 to be evaluated unconditionally. */
3657 simple_operand_p (const_tree exp
)
3659 /* Strip any conversions that don't change the machine mode. */
3662 return (CONSTANT_CLASS_P (exp
)
3663 || TREE_CODE (exp
) == SSA_NAME
3665 && ! TREE_ADDRESSABLE (exp
)
3666 && ! TREE_THIS_VOLATILE (exp
)
3667 && ! DECL_NONLOCAL (exp
)
3668 /* Don't regard global variables as simple. They may be
3669 allocated in ways unknown to the compiler (shared memory,
3670 #pragma weak, etc). */
3671 && ! TREE_PUBLIC (exp
)
3672 && ! DECL_EXTERNAL (exp
)
3673 /* Loading a static variable is unduly expensive, but global
3674 registers aren't expensive. */
3675 && (! TREE_STATIC (exp
) || DECL_REGISTER (exp
))));
3678 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3679 to be evaluated unconditionally.
3680 I addition to simple_operand_p, we assume that comparisons, conversions,
3681 and logic-not operations are simple, if their operands are simple, too. */
3684 simple_operand_p_2 (tree exp
)
3686 enum tree_code code
;
3688 if (TREE_SIDE_EFFECTS (exp
)
3689 || tree_could_trap_p (exp
))
3692 while (CONVERT_EXPR_P (exp
))
3693 exp
= TREE_OPERAND (exp
, 0);
3695 code
= TREE_CODE (exp
);
3697 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3698 return (simple_operand_p (TREE_OPERAND (exp
, 0))
3699 && simple_operand_p (TREE_OPERAND (exp
, 1)));
3701 if (code
== TRUTH_NOT_EXPR
)
3702 return simple_operand_p_2 (TREE_OPERAND (exp
, 0));
3704 return simple_operand_p (exp
);
3708 /* The following functions are subroutines to fold_range_test and allow it to
3709 try to change a logical combination of comparisons into a range test.
3712 X == 2 || X == 3 || X == 4 || X == 5
3716 (unsigned) (X - 2) <= 3
3718 We describe each set of comparisons as being either inside or outside
3719 a range, using a variable named like IN_P, and then describe the
3720 range with a lower and upper bound. If one of the bounds is omitted,
3721 it represents either the highest or lowest value of the type.
3723 In the comments below, we represent a range by two numbers in brackets
3724 preceded by a "+" to designate being inside that range, or a "-" to
3725 designate being outside that range, so the condition can be inverted by
3726 flipping the prefix. An omitted bound is represented by a "-". For
3727 example, "- [-, 10]" means being outside the range starting at the lowest
3728 possible value and ending at 10, in other words, being greater than 10.
3729 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3732 We set up things so that the missing bounds are handled in a consistent
3733 manner so neither a missing bound nor "true" and "false" need to be
3734 handled using a special case. */
3736 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3737 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3738 and UPPER1_P are nonzero if the respective argument is an upper bound
3739 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3740 must be specified for a comparison. ARG1 will be converted to ARG0's
3741 type if both are specified. */
3744 range_binop (enum tree_code code
, tree type
, tree arg0
, int upper0_p
,
3745 tree arg1
, int upper1_p
)
3751 /* If neither arg represents infinity, do the normal operation.
3752 Else, if not a comparison, return infinity. Else handle the special
3753 comparison rules. Note that most of the cases below won't occur, but
3754 are handled for consistency. */
3756 if (arg0
!= 0 && arg1
!= 0)
3758 tem
= fold_build2 (code
, type
!= 0 ? type
: TREE_TYPE (arg0
),
3759 arg0
, fold_convert (TREE_TYPE (arg0
), arg1
));
3761 return TREE_CODE (tem
) == INTEGER_CST
? tem
: 0;
3764 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
3767 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3768 for neither. In real maths, we cannot assume open ended ranges are
3769 the same. But, this is computer arithmetic, where numbers are finite.
3770 We can therefore make the transformation of any unbounded range with
3771 the value Z, Z being greater than any representable number. This permits
3772 us to treat unbounded ranges as equal. */
3773 sgn0
= arg0
!= 0 ? 0 : (upper0_p
? 1 : -1);
3774 sgn1
= arg1
!= 0 ? 0 : (upper1_p
? 1 : -1);
3778 result
= sgn0
== sgn1
;
3781 result
= sgn0
!= sgn1
;
3784 result
= sgn0
< sgn1
;
3787 result
= sgn0
<= sgn1
;
3790 result
= sgn0
> sgn1
;
3793 result
= sgn0
>= sgn1
;
3799 return constant_boolean_node (result
, type
);
3802 /* Helper routine for make_range. Perform one step for it, return
3803 new expression if the loop should continue or NULL_TREE if it should
3807 make_range_step (location_t loc
, enum tree_code code
, tree arg0
, tree arg1
,
3808 tree exp_type
, tree
*p_low
, tree
*p_high
, int *p_in_p
,
3809 bool *strict_overflow_p
)
3811 tree arg0_type
= TREE_TYPE (arg0
);
3812 tree n_low
, n_high
, low
= *p_low
, high
= *p_high
;
3813 int in_p
= *p_in_p
, n_in_p
;
3817 case TRUTH_NOT_EXPR
:
3821 case EQ_EXPR
: case NE_EXPR
:
3822 case LT_EXPR
: case LE_EXPR
: case GE_EXPR
: case GT_EXPR
:
3823 /* We can only do something if the range is testing for zero
3824 and if the second operand is an integer constant. Note that
3825 saying something is "in" the range we make is done by
3826 complementing IN_P since it will set in the initial case of
3827 being not equal to zero; "out" is leaving it alone. */
3828 if (low
== NULL_TREE
|| high
== NULL_TREE
3829 || ! integer_zerop (low
) || ! integer_zerop (high
)
3830 || TREE_CODE (arg1
) != INTEGER_CST
)
3835 case NE_EXPR
: /* - [c, c] */
3838 case EQ_EXPR
: /* + [c, c] */
3839 in_p
= ! in_p
, low
= high
= arg1
;
3841 case GT_EXPR
: /* - [-, c] */
3842 low
= 0, high
= arg1
;
3844 case GE_EXPR
: /* + [c, -] */
3845 in_p
= ! in_p
, low
= arg1
, high
= 0;
3847 case LT_EXPR
: /* - [c, -] */
3848 low
= arg1
, high
= 0;
3850 case LE_EXPR
: /* + [-, c] */
3851 in_p
= ! in_p
, low
= 0, high
= arg1
;
3857 /* If this is an unsigned comparison, we also know that EXP is
3858 greater than or equal to zero. We base the range tests we make
3859 on that fact, so we record it here so we can parse existing
3860 range tests. We test arg0_type since often the return type
3861 of, e.g. EQ_EXPR, is boolean. */
3862 if (TYPE_UNSIGNED (arg0_type
) && (low
== 0 || high
== 0))
3864 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
3866 build_int_cst (arg0_type
, 0),
3870 in_p
= n_in_p
, low
= n_low
, high
= n_high
;
3872 /* If the high bound is missing, but we have a nonzero low
3873 bound, reverse the range so it goes from zero to the low bound
3875 if (high
== 0 && low
&& ! integer_zerop (low
))
3878 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low
, 0,
3879 integer_one_node
, 0);
3880 low
= build_int_cst (arg0_type
, 0);
3890 /* If flag_wrapv and ARG0_TYPE is signed, make sure
3891 low and high are non-NULL, then normalize will DTRT. */
3892 if (!TYPE_UNSIGNED (arg0_type
)
3893 && !TYPE_OVERFLOW_UNDEFINED (arg0_type
))
3895 if (low
== NULL_TREE
)
3896 low
= TYPE_MIN_VALUE (arg0_type
);
3897 if (high
== NULL_TREE
)
3898 high
= TYPE_MAX_VALUE (arg0_type
);
3901 /* (-x) IN [a,b] -> x in [-b, -a] */
3902 n_low
= range_binop (MINUS_EXPR
, exp_type
,
3903 build_int_cst (exp_type
, 0),
3905 n_high
= range_binop (MINUS_EXPR
, exp_type
,
3906 build_int_cst (exp_type
, 0),
3908 if (n_high
!= 0 && TREE_OVERFLOW (n_high
))
3914 return build2_loc (loc
, MINUS_EXPR
, exp_type
, negate_expr (arg0
),
3915 build_int_cst (exp_type
, 1));
3919 if (TREE_CODE (arg1
) != INTEGER_CST
)
3922 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3923 move a constant to the other side. */
3924 if (!TYPE_UNSIGNED (arg0_type
)
3925 && !TYPE_OVERFLOW_UNDEFINED (arg0_type
))
3928 /* If EXP is signed, any overflow in the computation is undefined,
3929 so we don't worry about it so long as our computations on
3930 the bounds don't overflow. For unsigned, overflow is defined
3931 and this is exactly the right thing. */
3932 n_low
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
3933 arg0_type
, low
, 0, arg1
, 0);
3934 n_high
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
3935 arg0_type
, high
, 1, arg1
, 0);
3936 if ((n_low
!= 0 && TREE_OVERFLOW (n_low
))
3937 || (n_high
!= 0 && TREE_OVERFLOW (n_high
)))
3940 if (TYPE_OVERFLOW_UNDEFINED (arg0_type
))
3941 *strict_overflow_p
= true;
3944 /* Check for an unsigned range which has wrapped around the maximum
3945 value thus making n_high < n_low, and normalize it. */
3946 if (n_low
&& n_high
&& tree_int_cst_lt (n_high
, n_low
))
3948 low
= range_binop (PLUS_EXPR
, arg0_type
, n_high
, 0,
3949 integer_one_node
, 0);
3950 high
= range_binop (MINUS_EXPR
, arg0_type
, n_low
, 0,
3951 integer_one_node
, 0);
3953 /* If the range is of the form +/- [ x+1, x ], we won't
3954 be able to normalize it. But then, it represents the
3955 whole range or the empty set, so make it
3957 if (tree_int_cst_equal (n_low
, low
)
3958 && tree_int_cst_equal (n_high
, high
))
3964 low
= n_low
, high
= n_high
;
3972 case NON_LVALUE_EXPR
:
3973 if (TYPE_PRECISION (arg0_type
) > TYPE_PRECISION (exp_type
))
3976 if (! INTEGRAL_TYPE_P (arg0_type
)
3977 || (low
!= 0 && ! int_fits_type_p (low
, arg0_type
))
3978 || (high
!= 0 && ! int_fits_type_p (high
, arg0_type
)))
3981 n_low
= low
, n_high
= high
;
3984 n_low
= fold_convert_loc (loc
, arg0_type
, n_low
);
3987 n_high
= fold_convert_loc (loc
, arg0_type
, n_high
);
3989 /* If we're converting arg0 from an unsigned type, to exp,
3990 a signed type, we will be doing the comparison as unsigned.
3991 The tests above have already verified that LOW and HIGH
3994 So we have to ensure that we will handle large unsigned
3995 values the same way that the current signed bounds treat
3998 if (!TYPE_UNSIGNED (exp_type
) && TYPE_UNSIGNED (arg0_type
))
4002 /* For fixed-point modes, we need to pass the saturating flag
4003 as the 2nd parameter. */
4004 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type
)))
4006 = lang_hooks
.types
.type_for_mode (TYPE_MODE (arg0_type
),
4007 TYPE_SATURATING (arg0_type
));
4010 = lang_hooks
.types
.type_for_mode (TYPE_MODE (arg0_type
), 1);
4012 /* A range without an upper bound is, naturally, unbounded.
4013 Since convert would have cropped a very large value, use
4014 the max value for the destination type. */
4016 = TYPE_MAX_VALUE (equiv_type
) ? TYPE_MAX_VALUE (equiv_type
)
4017 : TYPE_MAX_VALUE (arg0_type
);
4019 if (TYPE_PRECISION (exp_type
) == TYPE_PRECISION (arg0_type
))
4020 high_positive
= fold_build2_loc (loc
, RSHIFT_EXPR
, arg0_type
,
4021 fold_convert_loc (loc
, arg0_type
,
4023 build_int_cst (arg0_type
, 1));
4025 /* If the low bound is specified, "and" the range with the
4026 range for which the original unsigned value will be
4030 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
, 1, n_low
, n_high
,
4031 1, fold_convert_loc (loc
, arg0_type
,
4036 in_p
= (n_in_p
== in_p
);
4040 /* Otherwise, "or" the range with the range of the input
4041 that will be interpreted as negative. */
4042 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
, 0, n_low
, n_high
,
4043 1, fold_convert_loc (loc
, arg0_type
,
4048 in_p
= (in_p
!= n_in_p
);
4062 /* Given EXP, a logical expression, set the range it is testing into
4063 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4064 actually being tested. *PLOW and *PHIGH will be made of the same
4065 type as the returned expression. If EXP is not a comparison, we
4066 will most likely not be returning a useful value and range. Set
4067 *STRICT_OVERFLOW_P to true if the return value is only valid
4068 because signed overflow is undefined; otherwise, do not change
4069 *STRICT_OVERFLOW_P. */
4072 make_range (tree exp
, int *pin_p
, tree
*plow
, tree
*phigh
,
4073 bool *strict_overflow_p
)
4075 enum tree_code code
;
4076 tree arg0
, arg1
= NULL_TREE
;
4077 tree exp_type
, nexp
;
4080 location_t loc
= EXPR_LOCATION (exp
);
4082 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4083 and see if we can refine the range. Some of the cases below may not
4084 happen, but it doesn't seem worth worrying about this. We "continue"
4085 the outer loop when we've changed something; otherwise we "break"
4086 the switch, which will "break" the while. */
4089 low
= high
= build_int_cst (TREE_TYPE (exp
), 0);
4093 code
= TREE_CODE (exp
);
4094 exp_type
= TREE_TYPE (exp
);
4097 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
)))
4099 if (TREE_OPERAND_LENGTH (exp
) > 0)
4100 arg0
= TREE_OPERAND (exp
, 0);
4101 if (TREE_CODE_CLASS (code
) == tcc_binary
4102 || TREE_CODE_CLASS (code
) == tcc_comparison
4103 || (TREE_CODE_CLASS (code
) == tcc_expression
4104 && TREE_OPERAND_LENGTH (exp
) > 1))
4105 arg1
= TREE_OPERAND (exp
, 1);
4107 if (arg0
== NULL_TREE
)
4110 nexp
= make_range_step (loc
, code
, arg0
, arg1
, exp_type
, &low
,
4111 &high
, &in_p
, strict_overflow_p
);
4112 if (nexp
== NULL_TREE
)
4117 /* If EXP is a constant, we can evaluate whether this is true or false. */
4118 if (TREE_CODE (exp
) == INTEGER_CST
)
4120 in_p
= in_p
== (integer_onep (range_binop (GE_EXPR
, integer_type_node
,
4122 && integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4128 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4132 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4133 type, TYPE, return an expression to test if EXP is in (or out of, depending
4134 on IN_P) the range. Return 0 if the test couldn't be created. */
4137 build_range_check (location_t loc
, tree type
, tree exp
, int in_p
,
4138 tree low
, tree high
)
4140 tree etype
= TREE_TYPE (exp
), value
;
4142 #ifdef HAVE_canonicalize_funcptr_for_compare
4143 /* Disable this optimization for function pointer expressions
4144 on targets that require function pointer canonicalization. */
4145 if (HAVE_canonicalize_funcptr_for_compare
4146 && TREE_CODE (etype
) == POINTER_TYPE
4147 && TREE_CODE (TREE_TYPE (etype
)) == FUNCTION_TYPE
)
4153 value
= build_range_check (loc
, type
, exp
, 1, low
, high
);
4155 return invert_truthvalue_loc (loc
, value
);
4160 if (low
== 0 && high
== 0)
4161 return build_int_cst (type
, 1);
4164 return fold_build2_loc (loc
, LE_EXPR
, type
, exp
,
4165 fold_convert_loc (loc
, etype
, high
));
4168 return fold_build2_loc (loc
, GE_EXPR
, type
, exp
,
4169 fold_convert_loc (loc
, etype
, low
));
4171 if (operand_equal_p (low
, high
, 0))
4172 return fold_build2_loc (loc
, EQ_EXPR
, type
, exp
,
4173 fold_convert_loc (loc
, etype
, low
));
4175 if (integer_zerop (low
))
4177 if (! TYPE_UNSIGNED (etype
))
4179 etype
= unsigned_type_for (etype
);
4180 high
= fold_convert_loc (loc
, etype
, high
);
4181 exp
= fold_convert_loc (loc
, etype
, exp
);
4183 return build_range_check (loc
, type
, exp
, 1, 0, high
);
4186 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4187 if (integer_onep (low
) && TREE_CODE (high
) == INTEGER_CST
)
4189 unsigned HOST_WIDE_INT lo
;
4193 prec
= TYPE_PRECISION (etype
);
4194 if (prec
<= HOST_BITS_PER_WIDE_INT
)
4197 lo
= ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1)) - 1;
4201 hi
= ((HOST_WIDE_INT
) 1 << (prec
- HOST_BITS_PER_WIDE_INT
- 1)) - 1;
4202 lo
= (unsigned HOST_WIDE_INT
) -1;
4205 if (TREE_INT_CST_HIGH (high
) == hi
&& TREE_INT_CST_LOW (high
) == lo
)
4207 if (TYPE_UNSIGNED (etype
))
4209 tree signed_etype
= signed_type_for (etype
);
4210 if (TYPE_PRECISION (signed_etype
) != TYPE_PRECISION (etype
))
4212 = build_nonstandard_integer_type (TYPE_PRECISION (etype
), 0);
4214 etype
= signed_etype
;
4215 exp
= fold_convert_loc (loc
, etype
, exp
);
4217 return fold_build2_loc (loc
, GT_EXPR
, type
, exp
,
4218 build_int_cst (etype
, 0));
4222 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4223 This requires wrap-around arithmetics for the type of the expression.
4224 First make sure that arithmetics in this type is valid, then make sure
4225 that it wraps around. */
4226 if (TREE_CODE (etype
) == ENUMERAL_TYPE
|| TREE_CODE (etype
) == BOOLEAN_TYPE
)
4227 etype
= lang_hooks
.types
.type_for_size (TYPE_PRECISION (etype
),
4228 TYPE_UNSIGNED (etype
));
4230 if (TREE_CODE (etype
) == INTEGER_TYPE
&& !TYPE_OVERFLOW_WRAPS (etype
))
4232 tree utype
, minv
, maxv
;
4234 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4235 for the type in question, as we rely on this here. */
4236 utype
= unsigned_type_for (etype
);
4237 maxv
= fold_convert_loc (loc
, utype
, TYPE_MAX_VALUE (etype
));
4238 maxv
= range_binop (PLUS_EXPR
, NULL_TREE
, maxv
, 1,
4239 integer_one_node
, 1);
4240 minv
= fold_convert_loc (loc
, utype
, TYPE_MIN_VALUE (etype
));
4242 if (integer_zerop (range_binop (NE_EXPR
, integer_type_node
,
4249 high
= fold_convert_loc (loc
, etype
, high
);
4250 low
= fold_convert_loc (loc
, etype
, low
);
4251 exp
= fold_convert_loc (loc
, etype
, exp
);
4253 value
= const_binop (MINUS_EXPR
, high
, low
);
4256 if (POINTER_TYPE_P (etype
))
4258 if (value
!= 0 && !TREE_OVERFLOW (value
))
4260 low
= fold_build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (low
), low
);
4261 return build_range_check (loc
, type
,
4262 fold_build_pointer_plus_loc (loc
, exp
, low
),
4263 1, build_int_cst (etype
, 0), value
);
4268 if (value
!= 0 && !TREE_OVERFLOW (value
))
4269 return build_range_check (loc
, type
,
4270 fold_build2_loc (loc
, MINUS_EXPR
, etype
, exp
, low
),
4271 1, build_int_cst (etype
, 0), value
);
4276 /* Return the predecessor of VAL in its type, handling the infinite case. */
4279 range_predecessor (tree val
)
4281 tree type
= TREE_TYPE (val
);
4283 if (INTEGRAL_TYPE_P (type
)
4284 && operand_equal_p (val
, TYPE_MIN_VALUE (type
), 0))
4287 return range_binop (MINUS_EXPR
, NULL_TREE
, val
, 0, integer_one_node
, 0);
4290 /* Return the successor of VAL in its type, handling the infinite case. */
4293 range_successor (tree val
)
4295 tree type
= TREE_TYPE (val
);
4297 if (INTEGRAL_TYPE_P (type
)
4298 && operand_equal_p (val
, TYPE_MAX_VALUE (type
), 0))
4301 return range_binop (PLUS_EXPR
, NULL_TREE
, val
, 0, integer_one_node
, 0);
4304 /* Given two ranges, see if we can merge them into one. Return 1 if we
4305 can, 0 if we can't. Set the output range into the specified parameters. */
4308 merge_ranges (int *pin_p
, tree
*plow
, tree
*phigh
, int in0_p
, tree low0
,
4309 tree high0
, int in1_p
, tree low1
, tree high1
)
4317 int lowequal
= ((low0
== 0 && low1
== 0)
4318 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4319 low0
, 0, low1
, 0)));
4320 int highequal
= ((high0
== 0 && high1
== 0)
4321 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4322 high0
, 1, high1
, 1)));
4324 /* Make range 0 be the range that starts first, or ends last if they
4325 start at the same value. Swap them if it isn't. */
4326 if (integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4329 && integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4330 high1
, 1, high0
, 1))))
4332 temp
= in0_p
, in0_p
= in1_p
, in1_p
= temp
;
4333 tem
= low0
, low0
= low1
, low1
= tem
;
4334 tem
= high0
, high0
= high1
, high1
= tem
;
4337 /* Now flag two cases, whether the ranges are disjoint or whether the
4338 second range is totally subsumed in the first. Note that the tests
4339 below are simplified by the ones above. */
4340 no_overlap
= integer_onep (range_binop (LT_EXPR
, integer_type_node
,
4341 high0
, 1, low1
, 0));
4342 subset
= integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4343 high1
, 1, high0
, 1));
4345 /* We now have four cases, depending on whether we are including or
4346 excluding the two ranges. */
4349 /* If they don't overlap, the result is false. If the second range
4350 is a subset it is the result. Otherwise, the range is from the start
4351 of the second to the end of the first. */
4353 in_p
= 0, low
= high
= 0;
4355 in_p
= 1, low
= low1
, high
= high1
;
4357 in_p
= 1, low
= low1
, high
= high0
;
4360 else if (in0_p
&& ! in1_p
)
4362 /* If they don't overlap, the result is the first range. If they are
4363 equal, the result is false. If the second range is a subset of the
4364 first, and the ranges begin at the same place, we go from just after
4365 the end of the second range to the end of the first. If the second
4366 range is not a subset of the first, or if it is a subset and both
4367 ranges end at the same place, the range starts at the start of the
4368 first range and ends just before the second range.
4369 Otherwise, we can't describe this as a single range. */
4371 in_p
= 1, low
= low0
, high
= high0
;
4372 else if (lowequal
&& highequal
)
4373 in_p
= 0, low
= high
= 0;
4374 else if (subset
&& lowequal
)
4376 low
= range_successor (high1
);
4381 /* We are in the weird situation where high0 > high1 but
4382 high1 has no successor. Punt. */
4386 else if (! subset
|| highequal
)
4389 high
= range_predecessor (low1
);
4393 /* low0 < low1 but low1 has no predecessor. Punt. */
4401 else if (! in0_p
&& in1_p
)
4403 /* If they don't overlap, the result is the second range. If the second
4404 is a subset of the first, the result is false. Otherwise,
4405 the range starts just after the first range and ends at the
4406 end of the second. */
4408 in_p
= 1, low
= low1
, high
= high1
;
4409 else if (subset
|| highequal
)
4410 in_p
= 0, low
= high
= 0;
4413 low
= range_successor (high0
);
4418 /* high1 > high0 but high0 has no successor. Punt. */
4426 /* The case where we are excluding both ranges. Here the complex case
4427 is if they don't overlap. In that case, the only time we have a
4428 range is if they are adjacent. If the second is a subset of the
4429 first, the result is the first. Otherwise, the range to exclude
4430 starts at the beginning of the first range and ends at the end of the
4434 if (integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4435 range_successor (high0
),
4437 in_p
= 0, low
= low0
, high
= high1
;
4440 /* Canonicalize - [min, x] into - [-, x]. */
4441 if (low0
&& TREE_CODE (low0
) == INTEGER_CST
)
4442 switch (TREE_CODE (TREE_TYPE (low0
)))
4445 if (TYPE_PRECISION (TREE_TYPE (low0
))
4446 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0
))))
4450 if (tree_int_cst_equal (low0
,
4451 TYPE_MIN_VALUE (TREE_TYPE (low0
))))
4455 if (TYPE_UNSIGNED (TREE_TYPE (low0
))
4456 && integer_zerop (low0
))
4463 /* Canonicalize - [x, max] into - [x, -]. */
4464 if (high1
&& TREE_CODE (high1
) == INTEGER_CST
)
4465 switch (TREE_CODE (TREE_TYPE (high1
)))
4468 if (TYPE_PRECISION (TREE_TYPE (high1
))
4469 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1
))))
4473 if (tree_int_cst_equal (high1
,
4474 TYPE_MAX_VALUE (TREE_TYPE (high1
))))
4478 if (TYPE_UNSIGNED (TREE_TYPE (high1
))
4479 && integer_zerop (range_binop (PLUS_EXPR
, NULL_TREE
,
4481 integer_one_node
, 1)))
4488 /* The ranges might be also adjacent between the maximum and
4489 minimum values of the given type. For
4490 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4491 return + [x + 1, y - 1]. */
4492 if (low0
== 0 && high1
== 0)
4494 low
= range_successor (high0
);
4495 high
= range_predecessor (low1
);
4496 if (low
== 0 || high
== 0)
4506 in_p
= 0, low
= low0
, high
= high0
;
4508 in_p
= 0, low
= low0
, high
= high1
;
4511 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4516 /* Subroutine of fold, looking inside expressions of the form
4517 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4518 of the COND_EXPR. This function is being used also to optimize
4519 A op B ? C : A, by reversing the comparison first.
4521 Return a folded expression whose code is not a COND_EXPR
4522 anymore, or NULL_TREE if no folding opportunity is found. */
4525 fold_cond_expr_with_comparison (location_t loc
, tree type
,
4526 tree arg0
, tree arg1
, tree arg2
)
4528 enum tree_code comp_code
= TREE_CODE (arg0
);
4529 tree arg00
= TREE_OPERAND (arg0
, 0);
4530 tree arg01
= TREE_OPERAND (arg0
, 1);
4531 tree arg1_type
= TREE_TYPE (arg1
);
4537 /* If we have A op 0 ? A : -A, consider applying the following
4540 A == 0? A : -A same as -A
4541 A != 0? A : -A same as A
4542 A >= 0? A : -A same as abs (A)
4543 A > 0? A : -A same as abs (A)
4544 A <= 0? A : -A same as -abs (A)
4545 A < 0? A : -A same as -abs (A)
4547 None of these transformations work for modes with signed
4548 zeros. If A is +/-0, the first two transformations will
4549 change the sign of the result (from +0 to -0, or vice
4550 versa). The last four will fix the sign of the result,
4551 even though the original expressions could be positive or
4552 negative, depending on the sign of A.
4554 Note that all these transformations are correct if A is
4555 NaN, since the two alternatives (A and -A) are also NaNs. */
4556 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
4557 && (FLOAT_TYPE_P (TREE_TYPE (arg01
))
4558 ? real_zerop (arg01
)
4559 : integer_zerop (arg01
))
4560 && ((TREE_CODE (arg2
) == NEGATE_EXPR
4561 && operand_equal_p (TREE_OPERAND (arg2
, 0), arg1
, 0))
4562 /* In the case that A is of the form X-Y, '-A' (arg2) may
4563 have already been folded to Y-X, check for that. */
4564 || (TREE_CODE (arg1
) == MINUS_EXPR
4565 && TREE_CODE (arg2
) == MINUS_EXPR
4566 && operand_equal_p (TREE_OPERAND (arg1
, 0),
4567 TREE_OPERAND (arg2
, 1), 0)
4568 && operand_equal_p (TREE_OPERAND (arg1
, 1),
4569 TREE_OPERAND (arg2
, 0), 0))))
4574 tem
= fold_convert_loc (loc
, arg1_type
, arg1
);
4575 return pedantic_non_lvalue_loc (loc
,
4576 fold_convert_loc (loc
, type
,
4577 negate_expr (tem
)));
4580 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4583 if (flag_trapping_math
)
4588 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4589 arg1
= fold_convert_loc (loc
, signed_type_for
4590 (TREE_TYPE (arg1
)), arg1
);
4591 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4592 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
4595 if (flag_trapping_math
)
4599 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4600 arg1
= fold_convert_loc (loc
, signed_type_for
4601 (TREE_TYPE (arg1
)), arg1
);
4602 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4603 return negate_expr (fold_convert_loc (loc
, type
, tem
));
4605 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
4609 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4610 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4611 both transformations are correct when A is NaN: A != 0
4612 is then true, and A == 0 is false. */
4614 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
4615 && integer_zerop (arg01
) && integer_zerop (arg2
))
4617 if (comp_code
== NE_EXPR
)
4618 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4619 else if (comp_code
== EQ_EXPR
)
4620 return build_int_cst (type
, 0);
4623 /* Try some transformations of A op B ? A : B.
4625 A == B? A : B same as B
4626 A != B? A : B same as A
4627 A >= B? A : B same as max (A, B)
4628 A > B? A : B same as max (B, A)
4629 A <= B? A : B same as min (A, B)
4630 A < B? A : B same as min (B, A)
4632 As above, these transformations don't work in the presence
4633 of signed zeros. For example, if A and B are zeros of
4634 opposite sign, the first two transformations will change
4635 the sign of the result. In the last four, the original
4636 expressions give different results for (A=+0, B=-0) and
4637 (A=-0, B=+0), but the transformed expressions do not.
4639 The first two transformations are correct if either A or B
4640 is a NaN. In the first transformation, the condition will
4641 be false, and B will indeed be chosen. In the case of the
4642 second transformation, the condition A != B will be true,
4643 and A will be chosen.
4645 The conversions to max() and min() are not correct if B is
4646 a number and A is not. The conditions in the original
4647 expressions will be false, so all four give B. The min()
4648 and max() versions would give a NaN instead. */
4649 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
4650 && operand_equal_for_comparison_p (arg01
, arg2
, arg00
)
4651 /* Avoid these transformations if the COND_EXPR may be used
4652 as an lvalue in the C++ front-end. PR c++/19199. */
4654 || (strcmp (lang_hooks
.name
, "GNU C++") != 0
4655 && strcmp (lang_hooks
.name
, "GNU Objective-C++") != 0)
4656 || ! maybe_lvalue_p (arg1
)
4657 || ! maybe_lvalue_p (arg2
)))
4659 tree comp_op0
= arg00
;
4660 tree comp_op1
= arg01
;
4661 tree comp_type
= TREE_TYPE (comp_op0
);
4663 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4664 if (TYPE_MAIN_VARIANT (comp_type
) == TYPE_MAIN_VARIANT (type
))
4674 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg2
));
4676 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4681 /* In C++ a ?: expression can be an lvalue, so put the
4682 operand which will be used if they are equal first
4683 so that we can convert this back to the
4684 corresponding COND_EXPR. */
4685 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4687 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
4688 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
4689 tem
= (comp_code
== LE_EXPR
|| comp_code
== UNLE_EXPR
)
4690 ? fold_build2_loc (loc
, MIN_EXPR
, comp_type
, comp_op0
, comp_op1
)
4691 : fold_build2_loc (loc
, MIN_EXPR
, comp_type
,
4692 comp_op1
, comp_op0
);
4693 return pedantic_non_lvalue_loc (loc
,
4694 fold_convert_loc (loc
, type
, tem
));
4701 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4703 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
4704 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
4705 tem
= (comp_code
== GE_EXPR
|| comp_code
== UNGE_EXPR
)
4706 ? fold_build2_loc (loc
, MAX_EXPR
, comp_type
, comp_op0
, comp_op1
)
4707 : fold_build2_loc (loc
, MAX_EXPR
, comp_type
,
4708 comp_op1
, comp_op0
);
4709 return pedantic_non_lvalue_loc (loc
,
4710 fold_convert_loc (loc
, type
, tem
));
4714 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4715 return pedantic_non_lvalue_loc (loc
,
4716 fold_convert_loc (loc
, type
, arg2
));
4719 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4720 return pedantic_non_lvalue_loc (loc
,
4721 fold_convert_loc (loc
, type
, arg1
));
4724 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
4729 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4730 we might still be able to simplify this. For example,
4731 if C1 is one less or one more than C2, this might have started
4732 out as a MIN or MAX and been transformed by this function.
4733 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4735 if (INTEGRAL_TYPE_P (type
)
4736 && TREE_CODE (arg01
) == INTEGER_CST
4737 && TREE_CODE (arg2
) == INTEGER_CST
)
4741 if (TREE_CODE (arg1
) == INTEGER_CST
)
4743 /* We can replace A with C1 in this case. */
4744 arg1
= fold_convert_loc (loc
, type
, arg01
);
4745 return fold_build3_loc (loc
, COND_EXPR
, type
, arg0
, arg1
, arg2
);
4748 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4749 MIN_EXPR, to preserve the signedness of the comparison. */
4750 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
4752 && operand_equal_p (arg01
,
4753 const_binop (PLUS_EXPR
, arg2
,
4754 build_int_cst (type
, 1)),
4757 tem
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (arg00
), arg00
,
4758 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4760 return pedantic_non_lvalue_loc (loc
,
4761 fold_convert_loc (loc
, type
, tem
));
4766 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4768 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
4770 && operand_equal_p (arg01
,
4771 const_binop (MINUS_EXPR
, arg2
,
4772 build_int_cst (type
, 1)),
4775 tem
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (arg00
), arg00
,
4776 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4778 return pedantic_non_lvalue_loc (loc
,
4779 fold_convert_loc (loc
, type
, tem
));
4784 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4785 MAX_EXPR, to preserve the signedness of the comparison. */
4786 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
4788 && operand_equal_p (arg01
,
4789 const_binop (MINUS_EXPR
, arg2
,
4790 build_int_cst (type
, 1)),
4793 tem
= fold_build2_loc (loc
, MAX_EXPR
, TREE_TYPE (arg00
), arg00
,
4794 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4796 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
4801 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4802 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
4804 && operand_equal_p (arg01
,
4805 const_binop (PLUS_EXPR
, arg2
,
4806 build_int_cst (type
, 1)),
4809 tem
= fold_build2_loc (loc
, MAX_EXPR
, TREE_TYPE (arg00
), arg00
,
4810 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4812 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
4826 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4827 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4828 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4832 /* EXP is some logical combination of boolean tests. See if we can
4833 merge it into some range test. Return the new tree if so. */
4836 fold_range_test (location_t loc
, enum tree_code code
, tree type
,
4839 int or_op
= (code
== TRUTH_ORIF_EXPR
4840 || code
== TRUTH_OR_EXPR
);
4841 int in0_p
, in1_p
, in_p
;
4842 tree low0
, low1
, low
, high0
, high1
, high
;
4843 bool strict_overflow_p
= false;
4844 tree lhs
= make_range (op0
, &in0_p
, &low0
, &high0
, &strict_overflow_p
);
4845 tree rhs
= make_range (op1
, &in1_p
, &low1
, &high1
, &strict_overflow_p
);
4847 const char * const warnmsg
= G_("assuming signed overflow does not occur "
4848 "when simplifying range test");
4850 /* If this is an OR operation, invert both sides; we will invert
4851 again at the end. */
4853 in0_p
= ! in0_p
, in1_p
= ! in1_p
;
4855 /* If both expressions are the same, if we can merge the ranges, and we
4856 can build the range test, return it or it inverted. If one of the
4857 ranges is always true or always false, consider it to be the same
4858 expression as the other. */
4859 if ((lhs
== 0 || rhs
== 0 || operand_equal_p (lhs
, rhs
, 0))
4860 && merge_ranges (&in_p
, &low
, &high
, in0_p
, low0
, high0
,
4862 && 0 != (tem
= (build_range_check (loc
, type
,
4864 : rhs
!= 0 ? rhs
: integer_zero_node
,
4867 if (strict_overflow_p
)
4868 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
4869 return or_op
? invert_truthvalue_loc (loc
, tem
) : tem
;
4872 /* On machines where the branch cost is expensive, if this is a
4873 short-circuited branch and the underlying object on both sides
4874 is the same, make a non-short-circuit operation. */
4875 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4876 && lhs
!= 0 && rhs
!= 0
4877 && (code
== TRUTH_ANDIF_EXPR
4878 || code
== TRUTH_ORIF_EXPR
)
4879 && operand_equal_p (lhs
, rhs
, 0))
4881 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4882 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4883 which cases we can't do this. */
4884 if (simple_operand_p (lhs
))
4885 return build2_loc (loc
, code
== TRUTH_ANDIF_EXPR
4886 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
4889 else if (!lang_hooks
.decls
.global_bindings_p ()
4890 && !CONTAINS_PLACEHOLDER_P (lhs
))
4892 tree common
= save_expr (lhs
);
4894 if (0 != (lhs
= build_range_check (loc
, type
, common
,
4895 or_op
? ! in0_p
: in0_p
,
4897 && (0 != (rhs
= build_range_check (loc
, type
, common
,
4898 or_op
? ! in1_p
: in1_p
,
4901 if (strict_overflow_p
)
4902 fold_overflow_warning (warnmsg
,
4903 WARN_STRICT_OVERFLOW_COMPARISON
);
4904 return build2_loc (loc
, code
== TRUTH_ANDIF_EXPR
4905 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
4914 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
4915 bit value. Arrange things so the extra bits will be set to zero if and
4916 only if C is signed-extended to its full width. If MASK is nonzero,
4917 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4920 unextend (tree c
, int p
, int unsignedp
, tree mask
)
4922 tree type
= TREE_TYPE (c
);
4923 int modesize
= GET_MODE_BITSIZE (TYPE_MODE (type
));
4926 if (p
== modesize
|| unsignedp
)
4929 /* We work by getting just the sign bit into the low-order bit, then
4930 into the high-order bit, then sign-extend. We then XOR that value
4932 temp
= const_binop (RSHIFT_EXPR
, c
, size_int (p
- 1));
4933 temp
= const_binop (BIT_AND_EXPR
, temp
, size_int (1));
4935 /* We must use a signed type in order to get an arithmetic right shift.
4936 However, we must also avoid introducing accidental overflows, so that
4937 a subsequent call to integer_zerop will work. Hence we must
4938 do the type conversion here. At this point, the constant is either
4939 zero or one, and the conversion to a signed type can never overflow.
4940 We could get an overflow if this conversion is done anywhere else. */
4941 if (TYPE_UNSIGNED (type
))
4942 temp
= fold_convert (signed_type_for (type
), temp
);
4944 temp
= const_binop (LSHIFT_EXPR
, temp
, size_int (modesize
- 1));
4945 temp
= const_binop (RSHIFT_EXPR
, temp
, size_int (modesize
- p
- 1));
4947 temp
= const_binop (BIT_AND_EXPR
, temp
,
4948 fold_convert (TREE_TYPE (c
), mask
));
4949 /* If necessary, convert the type back to match the type of C. */
4950 if (TYPE_UNSIGNED (type
))
4951 temp
= fold_convert (type
, temp
);
4953 return fold_convert (type
, const_binop (BIT_XOR_EXPR
, c
, temp
));
4956 /* For an expression that has the form
4960 we can drop one of the inner expressions and simplify to
4964 LOC is the location of the resulting expression. OP is the inner
4965 logical operation; the left-hand side in the examples above, while CMPOP
4966 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
4967 removing a condition that guards another, as in
4968 (A != NULL && A->...) || A == NULL
4969 which we must not transform. If RHS_ONLY is true, only eliminate the
4970 right-most operand of the inner logical operation. */
4973 merge_truthop_with_opposite_arm (location_t loc
, tree op
, tree cmpop
,
4976 tree type
= TREE_TYPE (cmpop
);
4977 enum tree_code code
= TREE_CODE (cmpop
);
4978 enum tree_code truthop_code
= TREE_CODE (op
);
4979 tree lhs
= TREE_OPERAND (op
, 0);
4980 tree rhs
= TREE_OPERAND (op
, 1);
4981 tree orig_lhs
= lhs
, orig_rhs
= rhs
;
4982 enum tree_code rhs_code
= TREE_CODE (rhs
);
4983 enum tree_code lhs_code
= TREE_CODE (lhs
);
4984 enum tree_code inv_code
;
4986 if (TREE_SIDE_EFFECTS (op
) || TREE_SIDE_EFFECTS (cmpop
))
4989 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
4992 if (rhs_code
== truthop_code
)
4994 tree newrhs
= merge_truthop_with_opposite_arm (loc
, rhs
, cmpop
, rhs_only
);
4995 if (newrhs
!= NULL_TREE
)
4998 rhs_code
= TREE_CODE (rhs
);
5001 if (lhs_code
== truthop_code
&& !rhs_only
)
5003 tree newlhs
= merge_truthop_with_opposite_arm (loc
, lhs
, cmpop
, false);
5004 if (newlhs
!= NULL_TREE
)
5007 lhs_code
= TREE_CODE (lhs
);
5011 inv_code
= invert_tree_comparison (code
, HONOR_NANS (TYPE_MODE (type
)));
5012 if (inv_code
== rhs_code
5013 && operand_equal_p (TREE_OPERAND (rhs
, 0), TREE_OPERAND (cmpop
, 0), 0)
5014 && operand_equal_p (TREE_OPERAND (rhs
, 1), TREE_OPERAND (cmpop
, 1), 0))
5016 if (!rhs_only
&& inv_code
== lhs_code
5017 && operand_equal_p (TREE_OPERAND (lhs
, 0), TREE_OPERAND (cmpop
, 0), 0)
5018 && operand_equal_p (TREE_OPERAND (lhs
, 1), TREE_OPERAND (cmpop
, 1), 0))
5020 if (rhs
!= orig_rhs
|| lhs
!= orig_lhs
)
5021 return fold_build2_loc (loc
, truthop_code
, TREE_TYPE (cmpop
),
5026 /* Find ways of folding logical expressions of LHS and RHS:
5027 Try to merge two comparisons to the same innermost item.
5028 Look for range tests like "ch >= '0' && ch <= '9'".
5029 Look for combinations of simple terms on machines with expensive branches
5030 and evaluate the RHS unconditionally.
5032 For example, if we have p->a == 2 && p->b == 4 and we can make an
5033 object large enough to span both A and B, we can do this with a comparison
5034 against the object ANDed with the a mask.
5036 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5037 operations to do this with one comparison.
5039 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5040 function and the one above.
5042 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5043 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5045 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5048 We return the simplified tree or 0 if no optimization is possible. */
5051 fold_truth_andor_1 (location_t loc
, enum tree_code code
, tree truth_type
,
5054 /* If this is the "or" of two comparisons, we can do something if
5055 the comparisons are NE_EXPR. If this is the "and", we can do something
5056 if the comparisons are EQ_EXPR. I.e.,
5057 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5059 WANTED_CODE is this operation code. For single bit fields, we can
5060 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5061 comparison for one-bit fields. */
5063 enum tree_code wanted_code
;
5064 enum tree_code lcode
, rcode
;
5065 tree ll_arg
, lr_arg
, rl_arg
, rr_arg
;
5066 tree ll_inner
, lr_inner
, rl_inner
, rr_inner
;
5067 HOST_WIDE_INT ll_bitsize
, ll_bitpos
, lr_bitsize
, lr_bitpos
;
5068 HOST_WIDE_INT rl_bitsize
, rl_bitpos
, rr_bitsize
, rr_bitpos
;
5069 HOST_WIDE_INT xll_bitpos
, xlr_bitpos
, xrl_bitpos
, xrr_bitpos
;
5070 HOST_WIDE_INT lnbitsize
, lnbitpos
, rnbitsize
, rnbitpos
;
5071 int ll_unsignedp
, lr_unsignedp
, rl_unsignedp
, rr_unsignedp
;
5072 enum machine_mode ll_mode
, lr_mode
, rl_mode
, rr_mode
;
5073 enum machine_mode lnmode
, rnmode
;
5074 tree ll_mask
, lr_mask
, rl_mask
, rr_mask
;
5075 tree ll_and_mask
, lr_and_mask
, rl_and_mask
, rr_and_mask
;
5076 tree l_const
, r_const
;
5077 tree lntype
, rntype
, result
;
5078 HOST_WIDE_INT first_bit
, end_bit
;
5081 /* Start by getting the comparison codes. Fail if anything is volatile.
5082 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5083 it were surrounded with a NE_EXPR. */
5085 if (TREE_SIDE_EFFECTS (lhs
) || TREE_SIDE_EFFECTS (rhs
))
5088 lcode
= TREE_CODE (lhs
);
5089 rcode
= TREE_CODE (rhs
);
5091 if (lcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (lhs
, 1)))
5093 lhs
= build2 (NE_EXPR
, truth_type
, lhs
,
5094 build_int_cst (TREE_TYPE (lhs
), 0));
5098 if (rcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (rhs
, 1)))
5100 rhs
= build2 (NE_EXPR
, truth_type
, rhs
,
5101 build_int_cst (TREE_TYPE (rhs
), 0));
5105 if (TREE_CODE_CLASS (lcode
) != tcc_comparison
5106 || TREE_CODE_CLASS (rcode
) != tcc_comparison
)
5109 ll_arg
= TREE_OPERAND (lhs
, 0);
5110 lr_arg
= TREE_OPERAND (lhs
, 1);
5111 rl_arg
= TREE_OPERAND (rhs
, 0);
5112 rr_arg
= TREE_OPERAND (rhs
, 1);
5114 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5115 if (simple_operand_p (ll_arg
)
5116 && simple_operand_p (lr_arg
))
5118 if (operand_equal_p (ll_arg
, rl_arg
, 0)
5119 && operand_equal_p (lr_arg
, rr_arg
, 0))
5121 result
= combine_comparisons (loc
, code
, lcode
, rcode
,
5122 truth_type
, ll_arg
, lr_arg
);
5126 else if (operand_equal_p (ll_arg
, rr_arg
, 0)
5127 && operand_equal_p (lr_arg
, rl_arg
, 0))
5129 result
= combine_comparisons (loc
, code
, lcode
,
5130 swap_tree_comparison (rcode
),
5131 truth_type
, ll_arg
, lr_arg
);
5137 code
= ((code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
)
5138 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
);
5140 /* If the RHS can be evaluated unconditionally and its operands are
5141 simple, it wins to evaluate the RHS unconditionally on machines
5142 with expensive branches. In this case, this isn't a comparison
5143 that can be merged. */
5145 if (BRANCH_COST (optimize_function_for_speed_p (cfun
),
5147 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg
))
5148 && simple_operand_p (rl_arg
)
5149 && simple_operand_p (rr_arg
))
5151 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5152 if (code
== TRUTH_OR_EXPR
5153 && lcode
== NE_EXPR
&& integer_zerop (lr_arg
)
5154 && rcode
== NE_EXPR
&& integer_zerop (rr_arg
)
5155 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
5156 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
5157 return build2_loc (loc
, NE_EXPR
, truth_type
,
5158 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5160 build_int_cst (TREE_TYPE (ll_arg
), 0));
5162 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5163 if (code
== TRUTH_AND_EXPR
5164 && lcode
== EQ_EXPR
&& integer_zerop (lr_arg
)
5165 && rcode
== EQ_EXPR
&& integer_zerop (rr_arg
)
5166 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
5167 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
5168 return build2_loc (loc
, EQ_EXPR
, truth_type
,
5169 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5171 build_int_cst (TREE_TYPE (ll_arg
), 0));
5174 /* See if the comparisons can be merged. Then get all the parameters for
5177 if ((lcode
!= EQ_EXPR
&& lcode
!= NE_EXPR
)
5178 || (rcode
!= EQ_EXPR
&& rcode
!= NE_EXPR
))
5182 ll_inner
= decode_field_reference (loc
, ll_arg
,
5183 &ll_bitsize
, &ll_bitpos
, &ll_mode
,
5184 &ll_unsignedp
, &volatilep
, &ll_mask
,
5186 lr_inner
= decode_field_reference (loc
, lr_arg
,
5187 &lr_bitsize
, &lr_bitpos
, &lr_mode
,
5188 &lr_unsignedp
, &volatilep
, &lr_mask
,
5190 rl_inner
= decode_field_reference (loc
, rl_arg
,
5191 &rl_bitsize
, &rl_bitpos
, &rl_mode
,
5192 &rl_unsignedp
, &volatilep
, &rl_mask
,
5194 rr_inner
= decode_field_reference (loc
, rr_arg
,
5195 &rr_bitsize
, &rr_bitpos
, &rr_mode
,
5196 &rr_unsignedp
, &volatilep
, &rr_mask
,
5199 /* It must be true that the inner operation on the lhs of each
5200 comparison must be the same if we are to be able to do anything.
5201 Then see if we have constants. If not, the same must be true for
5203 if (volatilep
|| ll_inner
== 0 || rl_inner
== 0
5204 || ! operand_equal_p (ll_inner
, rl_inner
, 0))
5207 if (TREE_CODE (lr_arg
) == INTEGER_CST
5208 && TREE_CODE (rr_arg
) == INTEGER_CST
)
5209 l_const
= lr_arg
, r_const
= rr_arg
;
5210 else if (lr_inner
== 0 || rr_inner
== 0
5211 || ! operand_equal_p (lr_inner
, rr_inner
, 0))
5214 l_const
= r_const
= 0;
5216 /* If either comparison code is not correct for our logical operation,
5217 fail. However, we can convert a one-bit comparison against zero into
5218 the opposite comparison against that bit being set in the field. */
5220 wanted_code
= (code
== TRUTH_AND_EXPR
? EQ_EXPR
: NE_EXPR
);
5221 if (lcode
!= wanted_code
)
5223 if (l_const
&& integer_zerop (l_const
) && integer_pow2p (ll_mask
))
5225 /* Make the left operand unsigned, since we are only interested
5226 in the value of one bit. Otherwise we are doing the wrong
5235 /* This is analogous to the code for l_const above. */
5236 if (rcode
!= wanted_code
)
5238 if (r_const
&& integer_zerop (r_const
) && integer_pow2p (rl_mask
))
5247 /* See if we can find a mode that contains both fields being compared on
5248 the left. If we can't, fail. Otherwise, update all constants and masks
5249 to be relative to a field of that size. */
5250 first_bit
= MIN (ll_bitpos
, rl_bitpos
);
5251 end_bit
= MAX (ll_bitpos
+ ll_bitsize
, rl_bitpos
+ rl_bitsize
);
5252 lnmode
= get_best_mode (end_bit
- first_bit
, first_bit
, 0, 0,
5253 TYPE_ALIGN (TREE_TYPE (ll_inner
)), word_mode
,
5255 if (lnmode
== VOIDmode
)
5258 lnbitsize
= GET_MODE_BITSIZE (lnmode
);
5259 lnbitpos
= first_bit
& ~ (lnbitsize
- 1);
5260 lntype
= lang_hooks
.types
.type_for_size (lnbitsize
, 1);
5261 xll_bitpos
= ll_bitpos
- lnbitpos
, xrl_bitpos
= rl_bitpos
- lnbitpos
;
5263 if (BYTES_BIG_ENDIAN
)
5265 xll_bitpos
= lnbitsize
- xll_bitpos
- ll_bitsize
;
5266 xrl_bitpos
= lnbitsize
- xrl_bitpos
- rl_bitsize
;
5269 ll_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, ll_mask
),
5270 size_int (xll_bitpos
));
5271 rl_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, rl_mask
),
5272 size_int (xrl_bitpos
));
5276 l_const
= fold_convert_loc (loc
, lntype
, l_const
);
5277 l_const
= unextend (l_const
, ll_bitsize
, ll_unsignedp
, ll_and_mask
);
5278 l_const
= const_binop (LSHIFT_EXPR
, l_const
, size_int (xll_bitpos
));
5279 if (! integer_zerop (const_binop (BIT_AND_EXPR
, l_const
,
5280 fold_build1_loc (loc
, BIT_NOT_EXPR
,
5283 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5285 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5290 r_const
= fold_convert_loc (loc
, lntype
, r_const
);
5291 r_const
= unextend (r_const
, rl_bitsize
, rl_unsignedp
, rl_and_mask
);
5292 r_const
= const_binop (LSHIFT_EXPR
, r_const
, size_int (xrl_bitpos
));
5293 if (! integer_zerop (const_binop (BIT_AND_EXPR
, r_const
,
5294 fold_build1_loc (loc
, BIT_NOT_EXPR
,
5297 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5299 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5303 /* If the right sides are not constant, do the same for it. Also,
5304 disallow this optimization if a size or signedness mismatch occurs
5305 between the left and right sides. */
5308 if (ll_bitsize
!= lr_bitsize
|| rl_bitsize
!= rr_bitsize
5309 || ll_unsignedp
!= lr_unsignedp
|| rl_unsignedp
!= rr_unsignedp
5310 /* Make sure the two fields on the right
5311 correspond to the left without being swapped. */
5312 || ll_bitpos
- rl_bitpos
!= lr_bitpos
- rr_bitpos
)
5315 first_bit
= MIN (lr_bitpos
, rr_bitpos
);
5316 end_bit
= MAX (lr_bitpos
+ lr_bitsize
, rr_bitpos
+ rr_bitsize
);
5317 rnmode
= get_best_mode (end_bit
- first_bit
, first_bit
, 0, 0,
5318 TYPE_ALIGN (TREE_TYPE (lr_inner
)), word_mode
,
5320 if (rnmode
== VOIDmode
)
5323 rnbitsize
= GET_MODE_BITSIZE (rnmode
);
5324 rnbitpos
= first_bit
& ~ (rnbitsize
- 1);
5325 rntype
= lang_hooks
.types
.type_for_size (rnbitsize
, 1);
5326 xlr_bitpos
= lr_bitpos
- rnbitpos
, xrr_bitpos
= rr_bitpos
- rnbitpos
;
5328 if (BYTES_BIG_ENDIAN
)
5330 xlr_bitpos
= rnbitsize
- xlr_bitpos
- lr_bitsize
;
5331 xrr_bitpos
= rnbitsize
- xrr_bitpos
- rr_bitsize
;
5334 lr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
5336 size_int (xlr_bitpos
));
5337 rr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
5339 size_int (xrr_bitpos
));
5341 /* Make a mask that corresponds to both fields being compared.
5342 Do this for both items being compared. If the operands are the
5343 same size and the bits being compared are in the same position
5344 then we can do this by masking both and comparing the masked
5346 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
);
5347 lr_mask
= const_binop (BIT_IOR_EXPR
, lr_mask
, rr_mask
);
5348 if (lnbitsize
== rnbitsize
&& xll_bitpos
== xlr_bitpos
)
5350 lhs
= make_bit_field_ref (loc
, ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5351 ll_unsignedp
|| rl_unsignedp
);
5352 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5353 lhs
= build2 (BIT_AND_EXPR
, lntype
, lhs
, ll_mask
);
5355 rhs
= make_bit_field_ref (loc
, lr_inner
, rntype
, rnbitsize
, rnbitpos
,
5356 lr_unsignedp
|| rr_unsignedp
);
5357 if (! all_ones_mask_p (lr_mask
, rnbitsize
))
5358 rhs
= build2 (BIT_AND_EXPR
, rntype
, rhs
, lr_mask
);
5360 return build2_loc (loc
, wanted_code
, truth_type
, lhs
, rhs
);
5363 /* There is still another way we can do something: If both pairs of
5364 fields being compared are adjacent, we may be able to make a wider
5365 field containing them both.
5367 Note that we still must mask the lhs/rhs expressions. Furthermore,
5368 the mask must be shifted to account for the shift done by
5369 make_bit_field_ref. */
5370 if ((ll_bitsize
+ ll_bitpos
== rl_bitpos
5371 && lr_bitsize
+ lr_bitpos
== rr_bitpos
)
5372 || (ll_bitpos
== rl_bitpos
+ rl_bitsize
5373 && lr_bitpos
== rr_bitpos
+ rr_bitsize
))
5377 lhs
= make_bit_field_ref (loc
, ll_inner
, lntype
,
5378 ll_bitsize
+ rl_bitsize
,
5379 MIN (ll_bitpos
, rl_bitpos
), ll_unsignedp
);
5380 rhs
= make_bit_field_ref (loc
, lr_inner
, rntype
,
5381 lr_bitsize
+ rr_bitsize
,
5382 MIN (lr_bitpos
, rr_bitpos
), lr_unsignedp
);
5384 ll_mask
= const_binop (RSHIFT_EXPR
, ll_mask
,
5385 size_int (MIN (xll_bitpos
, xrl_bitpos
)));
5386 lr_mask
= const_binop (RSHIFT_EXPR
, lr_mask
,
5387 size_int (MIN (xlr_bitpos
, xrr_bitpos
)));
5389 /* Convert to the smaller type before masking out unwanted bits. */
5391 if (lntype
!= rntype
)
5393 if (lnbitsize
> rnbitsize
)
5395 lhs
= fold_convert_loc (loc
, rntype
, lhs
);
5396 ll_mask
= fold_convert_loc (loc
, rntype
, ll_mask
);
5399 else if (lnbitsize
< rnbitsize
)
5401 rhs
= fold_convert_loc (loc
, lntype
, rhs
);
5402 lr_mask
= fold_convert_loc (loc
, lntype
, lr_mask
);
5407 if (! all_ones_mask_p (ll_mask
, ll_bitsize
+ rl_bitsize
))
5408 lhs
= build2 (BIT_AND_EXPR
, type
, lhs
, ll_mask
);
5410 if (! all_ones_mask_p (lr_mask
, lr_bitsize
+ rr_bitsize
))
5411 rhs
= build2 (BIT_AND_EXPR
, type
, rhs
, lr_mask
);
5413 return build2_loc (loc
, wanted_code
, truth_type
, lhs
, rhs
);
5419 /* Handle the case of comparisons with constants. If there is something in
5420 common between the masks, those bits of the constants must be the same.
5421 If not, the condition is always false. Test for this to avoid generating
5422 incorrect code below. */
5423 result
= const_binop (BIT_AND_EXPR
, ll_mask
, rl_mask
);
5424 if (! integer_zerop (result
)
5425 && simple_cst_equal (const_binop (BIT_AND_EXPR
, result
, l_const
),
5426 const_binop (BIT_AND_EXPR
, result
, r_const
)) != 1)
5428 if (wanted_code
== NE_EXPR
)
5430 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5431 return constant_boolean_node (true, truth_type
);
5435 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5436 return constant_boolean_node (false, truth_type
);
5440 /* Construct the expression we will return. First get the component
5441 reference we will make. Unless the mask is all ones the width of
5442 that field, perform the mask operation. Then compare with the
5444 result
= make_bit_field_ref (loc
, ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5445 ll_unsignedp
|| rl_unsignedp
);
5447 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
);
5448 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5449 result
= build2_loc (loc
, BIT_AND_EXPR
, lntype
, result
, ll_mask
);
5451 return build2_loc (loc
, wanted_code
, truth_type
, result
,
5452 const_binop (BIT_IOR_EXPR
, l_const
, r_const
));
5455 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5459 optimize_minmax_comparison (location_t loc
, enum tree_code code
, tree type
,
5463 enum tree_code op_code
;
5466 int consts_equal
, consts_lt
;
5469 STRIP_SIGN_NOPS (arg0
);
5471 op_code
= TREE_CODE (arg0
);
5472 minmax_const
= TREE_OPERAND (arg0
, 1);
5473 comp_const
= fold_convert_loc (loc
, TREE_TYPE (arg0
), op1
);
5474 consts_equal
= tree_int_cst_equal (minmax_const
, comp_const
);
5475 consts_lt
= tree_int_cst_lt (minmax_const
, comp_const
);
5476 inner
= TREE_OPERAND (arg0
, 0);
5478 /* If something does not permit us to optimize, return the original tree. */
5479 if ((op_code
!= MIN_EXPR
&& op_code
!= MAX_EXPR
)
5480 || TREE_CODE (comp_const
) != INTEGER_CST
5481 || TREE_OVERFLOW (comp_const
)
5482 || TREE_CODE (minmax_const
) != INTEGER_CST
5483 || TREE_OVERFLOW (minmax_const
))
5486 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5487 and GT_EXPR, doing the rest with recursive calls using logical
5491 case NE_EXPR
: case LT_EXPR
: case LE_EXPR
:
5494 = optimize_minmax_comparison (loc
,
5495 invert_tree_comparison (code
, false),
5498 return invert_truthvalue_loc (loc
, tem
);
5504 fold_build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
5505 optimize_minmax_comparison
5506 (loc
, EQ_EXPR
, type
, arg0
, comp_const
),
5507 optimize_minmax_comparison
5508 (loc
, GT_EXPR
, type
, arg0
, comp_const
));
5511 if (op_code
== MAX_EXPR
&& consts_equal
)
5512 /* MAX (X, 0) == 0 -> X <= 0 */
5513 return fold_build2_loc (loc
, LE_EXPR
, type
, inner
, comp_const
);
5515 else if (op_code
== MAX_EXPR
&& consts_lt
)
5516 /* MAX (X, 0) == 5 -> X == 5 */
5517 return fold_build2_loc (loc
, EQ_EXPR
, type
, inner
, comp_const
);
5519 else if (op_code
== MAX_EXPR
)
5520 /* MAX (X, 0) == -1 -> false */
5521 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5523 else if (consts_equal
)
5524 /* MIN (X, 0) == 0 -> X >= 0 */
5525 return fold_build2_loc (loc
, GE_EXPR
, type
, inner
, comp_const
);
5528 /* MIN (X, 0) == 5 -> false */
5529 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5532 /* MIN (X, 0) == -1 -> X == -1 */
5533 return fold_build2_loc (loc
, EQ_EXPR
, type
, inner
, comp_const
);
5536 if (op_code
== MAX_EXPR
&& (consts_equal
|| consts_lt
))
5537 /* MAX (X, 0) > 0 -> X > 0
5538 MAX (X, 0) > 5 -> X > 5 */
5539 return fold_build2_loc (loc
, GT_EXPR
, type
, inner
, comp_const
);
5541 else if (op_code
== MAX_EXPR
)
5542 /* MAX (X, 0) > -1 -> true */
5543 return omit_one_operand_loc (loc
, type
, integer_one_node
, inner
);
5545 else if (op_code
== MIN_EXPR
&& (consts_equal
|| consts_lt
))
5546 /* MIN (X, 0) > 0 -> false
5547 MIN (X, 0) > 5 -> false */
5548 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5551 /* MIN (X, 0) > -1 -> X > -1 */
5552 return fold_build2_loc (loc
, GT_EXPR
, type
, inner
, comp_const
);
5559 /* T is an integer expression that is being multiplied, divided, or taken a
5560 modulus (CODE says which and what kind of divide or modulus) by a
5561 constant C. See if we can eliminate that operation by folding it with
5562 other operations already in T. WIDE_TYPE, if non-null, is a type that
5563 should be used for the computation if wider than our type.
5565 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5566 (X * 2) + (Y * 4). We must, however, be assured that either the original
5567 expression would not overflow or that overflow is undefined for the type
5568 in the language in question.
5570 If we return a non-null expression, it is an equivalent form of the
5571 original computation, but need not be in the original type.
5573 We set *STRICT_OVERFLOW_P to true if the return values depends on
5574 signed overflow being undefined. Otherwise we do not change
5575 *STRICT_OVERFLOW_P. */
5578 extract_muldiv (tree t
, tree c
, enum tree_code code
, tree wide_type
,
5579 bool *strict_overflow_p
)
5581 /* To avoid exponential search depth, refuse to allow recursion past
5582 three levels. Beyond that (1) it's highly unlikely that we'll find
5583 something interesting and (2) we've probably processed it before
5584 when we built the inner expression. */
5593 ret
= extract_muldiv_1 (t
, c
, code
, wide_type
, strict_overflow_p
);
5600 extract_muldiv_1 (tree t
, tree c
, enum tree_code code
, tree wide_type
,
5601 bool *strict_overflow_p
)
5603 tree type
= TREE_TYPE (t
);
5604 enum tree_code tcode
= TREE_CODE (t
);
5605 tree ctype
= (wide_type
!= 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type
))
5606 > GET_MODE_SIZE (TYPE_MODE (type
)))
5607 ? wide_type
: type
);
5609 int same_p
= tcode
== code
;
5610 tree op0
= NULL_TREE
, op1
= NULL_TREE
;
5611 bool sub_strict_overflow_p
;
5613 /* Don't deal with constants of zero here; they confuse the code below. */
5614 if (integer_zerop (c
))
5617 if (TREE_CODE_CLASS (tcode
) == tcc_unary
)
5618 op0
= TREE_OPERAND (t
, 0);
5620 if (TREE_CODE_CLASS (tcode
) == tcc_binary
)
5621 op0
= TREE_OPERAND (t
, 0), op1
= TREE_OPERAND (t
, 1);
5623 /* Note that we need not handle conditional operations here since fold
5624 already handles those cases. So just do arithmetic here. */
5628 /* For a constant, we can always simplify if we are a multiply
5629 or (for divide and modulus) if it is a multiple of our constant. */
5630 if (code
== MULT_EXPR
5631 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, t
, c
)))
5632 return const_binop (code
, fold_convert (ctype
, t
),
5633 fold_convert (ctype
, c
));
5636 CASE_CONVERT
: case NON_LVALUE_EXPR
:
5637 /* If op0 is an expression ... */
5638 if ((COMPARISON_CLASS_P (op0
)
5639 || UNARY_CLASS_P (op0
)
5640 || BINARY_CLASS_P (op0
)
5641 || VL_EXP_CLASS_P (op0
)
5642 || EXPRESSION_CLASS_P (op0
))
5643 /* ... and has wrapping overflow, and its type is smaller
5644 than ctype, then we cannot pass through as widening. */
5645 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0
))
5646 && (TYPE_PRECISION (ctype
)
5647 > TYPE_PRECISION (TREE_TYPE (op0
))))
5648 /* ... or this is a truncation (t is narrower than op0),
5649 then we cannot pass through this narrowing. */
5650 || (TYPE_PRECISION (type
)
5651 < TYPE_PRECISION (TREE_TYPE (op0
)))
5652 /* ... or signedness changes for division or modulus,
5653 then we cannot pass through this conversion. */
5654 || (code
!= MULT_EXPR
5655 && (TYPE_UNSIGNED (ctype
)
5656 != TYPE_UNSIGNED (TREE_TYPE (op0
))))
5657 /* ... or has undefined overflow while the converted to
5658 type has not, we cannot do the operation in the inner type
5659 as that would introduce undefined overflow. */
5660 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0
))
5661 && !TYPE_OVERFLOW_UNDEFINED (type
))))
5664 /* Pass the constant down and see if we can make a simplification. If
5665 we can, replace this expression with the inner simplification for
5666 possible later conversion to our or some other type. */
5667 if ((t2
= fold_convert (TREE_TYPE (op0
), c
)) != 0
5668 && TREE_CODE (t2
) == INTEGER_CST
5669 && !TREE_OVERFLOW (t2
)
5670 && (0 != (t1
= extract_muldiv (op0
, t2
, code
,
5672 ? ctype
: NULL_TREE
,
5673 strict_overflow_p
))))
5678 /* If widening the type changes it from signed to unsigned, then we
5679 must avoid building ABS_EXPR itself as unsigned. */
5680 if (TYPE_UNSIGNED (ctype
) && !TYPE_UNSIGNED (type
))
5682 tree cstype
= (*signed_type_for
) (ctype
);
5683 if ((t1
= extract_muldiv (op0
, c
, code
, cstype
, strict_overflow_p
))
5686 t1
= fold_build1 (tcode
, cstype
, fold_convert (cstype
, t1
));
5687 return fold_convert (ctype
, t1
);
5691 /* If the constant is negative, we cannot simplify this. */
5692 if (tree_int_cst_sgn (c
) == -1)
5696 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
, strict_overflow_p
))
5698 return fold_build1 (tcode
, ctype
, fold_convert (ctype
, t1
));
5701 case MIN_EXPR
: case MAX_EXPR
:
5702 /* If widening the type changes the signedness, then we can't perform
5703 this optimization as that changes the result. */
5704 if (TYPE_UNSIGNED (ctype
) != TYPE_UNSIGNED (type
))
5707 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5708 sub_strict_overflow_p
= false;
5709 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
,
5710 &sub_strict_overflow_p
)) != 0
5711 && (t2
= extract_muldiv (op1
, c
, code
, wide_type
,
5712 &sub_strict_overflow_p
)) != 0)
5714 if (tree_int_cst_sgn (c
) < 0)
5715 tcode
= (tcode
== MIN_EXPR
? MAX_EXPR
: MIN_EXPR
);
5716 if (sub_strict_overflow_p
)
5717 *strict_overflow_p
= true;
5718 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5719 fold_convert (ctype
, t2
));
5723 case LSHIFT_EXPR
: case RSHIFT_EXPR
:
5724 /* If the second operand is constant, this is a multiplication
5725 or floor division, by a power of two, so we can treat it that
5726 way unless the multiplier or divisor overflows. Signed
5727 left-shift overflow is implementation-defined rather than
5728 undefined in C90, so do not convert signed left shift into
5730 if (TREE_CODE (op1
) == INTEGER_CST
5731 && (tcode
== RSHIFT_EXPR
|| TYPE_UNSIGNED (TREE_TYPE (op0
)))
5732 /* const_binop may not detect overflow correctly,
5733 so check for it explicitly here. */
5734 && TYPE_PRECISION (TREE_TYPE (size_one_node
)) > TREE_INT_CST_LOW (op1
)
5735 && TREE_INT_CST_HIGH (op1
) == 0
5736 && 0 != (t1
= fold_convert (ctype
,
5737 const_binop (LSHIFT_EXPR
,
5740 && !TREE_OVERFLOW (t1
))
5741 return extract_muldiv (build2 (tcode
== LSHIFT_EXPR
5742 ? MULT_EXPR
: FLOOR_DIV_EXPR
,
5744 fold_convert (ctype
, op0
),
5746 c
, code
, wide_type
, strict_overflow_p
);
5749 case PLUS_EXPR
: case MINUS_EXPR
:
5750 /* See if we can eliminate the operation on both sides. If we can, we
5751 can return a new PLUS or MINUS. If we can't, the only remaining
5752 cases where we can do anything are if the second operand is a
5754 sub_strict_overflow_p
= false;
5755 t1
= extract_muldiv (op0
, c
, code
, wide_type
, &sub_strict_overflow_p
);
5756 t2
= extract_muldiv (op1
, c
, code
, wide_type
, &sub_strict_overflow_p
);
5757 if (t1
!= 0 && t2
!= 0
5758 && (code
== MULT_EXPR
5759 /* If not multiplication, we can only do this if both operands
5760 are divisible by c. */
5761 || (multiple_of_p (ctype
, op0
, c
)
5762 && multiple_of_p (ctype
, op1
, c
))))
5764 if (sub_strict_overflow_p
)
5765 *strict_overflow_p
= true;
5766 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5767 fold_convert (ctype
, t2
));
5770 /* If this was a subtraction, negate OP1 and set it to be an addition.
5771 This simplifies the logic below. */
5772 if (tcode
== MINUS_EXPR
)
5774 tcode
= PLUS_EXPR
, op1
= negate_expr (op1
);
5775 /* If OP1 was not easily negatable, the constant may be OP0. */
5776 if (TREE_CODE (op0
) == INTEGER_CST
)
5787 if (TREE_CODE (op1
) != INTEGER_CST
)
5790 /* If either OP1 or C are negative, this optimization is not safe for
5791 some of the division and remainder types while for others we need
5792 to change the code. */
5793 if (tree_int_cst_sgn (op1
) < 0 || tree_int_cst_sgn (c
) < 0)
5795 if (code
== CEIL_DIV_EXPR
)
5796 code
= FLOOR_DIV_EXPR
;
5797 else if (code
== FLOOR_DIV_EXPR
)
5798 code
= CEIL_DIV_EXPR
;
5799 else if (code
!= MULT_EXPR
5800 && code
!= CEIL_MOD_EXPR
&& code
!= FLOOR_MOD_EXPR
)
5804 /* If it's a multiply or a division/modulus operation of a multiple
5805 of our constant, do the operation and verify it doesn't overflow. */
5806 if (code
== MULT_EXPR
5807 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
)))
5809 op1
= const_binop (code
, fold_convert (ctype
, op1
),
5810 fold_convert (ctype
, c
));
5811 /* We allow the constant to overflow with wrapping semantics. */
5813 || (TREE_OVERFLOW (op1
) && !TYPE_OVERFLOW_WRAPS (ctype
)))
5819 /* If we have an unsigned type, we cannot widen the operation since it
5820 will change the result if the original computation overflowed. */
5821 if (TYPE_UNSIGNED (ctype
) && ctype
!= type
)
5824 /* If we were able to eliminate our operation from the first side,
5825 apply our operation to the second side and reform the PLUS. */
5826 if (t1
!= 0 && (TREE_CODE (t1
) != code
|| code
== MULT_EXPR
))
5827 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
), op1
);
5829 /* The last case is if we are a multiply. In that case, we can
5830 apply the distributive law to commute the multiply and addition
5831 if the multiplication of the constants doesn't overflow. */
5832 if (code
== MULT_EXPR
)
5833 return fold_build2 (tcode
, ctype
,
5834 fold_build2 (code
, ctype
,
5835 fold_convert (ctype
, op0
),
5836 fold_convert (ctype
, c
)),
5842 /* We have a special case here if we are doing something like
5843 (C * 8) % 4 since we know that's zero. */
5844 if ((code
== TRUNC_MOD_EXPR
|| code
== CEIL_MOD_EXPR
5845 || code
== FLOOR_MOD_EXPR
|| code
== ROUND_MOD_EXPR
)
5846 /* If the multiplication can overflow we cannot optimize this. */
5847 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
))
5848 && TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
5849 && integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
)))
5851 *strict_overflow_p
= true;
5852 return omit_one_operand (type
, integer_zero_node
, op0
);
5855 /* ... fall through ... */
5857 case TRUNC_DIV_EXPR
: case CEIL_DIV_EXPR
: case FLOOR_DIV_EXPR
:
5858 case ROUND_DIV_EXPR
: case EXACT_DIV_EXPR
:
5859 /* If we can extract our operation from the LHS, do so and return a
5860 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5861 do something only if the second operand is a constant. */
5863 && (t1
= extract_muldiv (op0
, c
, code
, wide_type
,
5864 strict_overflow_p
)) != 0)
5865 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5866 fold_convert (ctype
, op1
));
5867 else if (tcode
== MULT_EXPR
&& code
== MULT_EXPR
5868 && (t1
= extract_muldiv (op1
, c
, code
, wide_type
,
5869 strict_overflow_p
)) != 0)
5870 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5871 fold_convert (ctype
, t1
));
5872 else if (TREE_CODE (op1
) != INTEGER_CST
)
5875 /* If these are the same operation types, we can associate them
5876 assuming no overflow. */
5881 unsigned prec
= TYPE_PRECISION (ctype
);
5882 bool uns
= TYPE_UNSIGNED (ctype
);
5883 double_int diop1
= tree_to_double_int (op1
).ext (prec
, uns
);
5884 double_int dic
= tree_to_double_int (c
).ext (prec
, uns
);
5885 mul
= diop1
.mul_with_sign (dic
, false, &overflow_p
);
5886 overflow_p
= ((!uns
&& overflow_p
)
5887 | TREE_OVERFLOW (c
) | TREE_OVERFLOW (op1
));
5888 if (!double_int_fits_to_tree_p (ctype
, mul
)
5889 && ((uns
&& tcode
!= MULT_EXPR
) || !uns
))
5892 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5893 double_int_to_tree (ctype
, mul
));
5896 /* If these operations "cancel" each other, we have the main
5897 optimizations of this pass, which occur when either constant is a
5898 multiple of the other, in which case we replace this with either an
5899 operation or CODE or TCODE.
5901 If we have an unsigned type, we cannot do this since it will change
5902 the result if the original computation overflowed. */
5903 if (TYPE_OVERFLOW_UNDEFINED (ctype
)
5904 && ((code
== MULT_EXPR
&& tcode
== EXACT_DIV_EXPR
)
5905 || (tcode
== MULT_EXPR
5906 && code
!= TRUNC_MOD_EXPR
&& code
!= CEIL_MOD_EXPR
5907 && code
!= FLOOR_MOD_EXPR
&& code
!= ROUND_MOD_EXPR
5908 && code
!= MULT_EXPR
)))
5910 if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
)))
5912 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
5913 *strict_overflow_p
= true;
5914 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5915 fold_convert (ctype
,
5916 const_binop (TRUNC_DIV_EXPR
,
5919 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, c
, op1
)))
5921 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
5922 *strict_overflow_p
= true;
5923 return fold_build2 (code
, ctype
, fold_convert (ctype
, op0
),
5924 fold_convert (ctype
,
5925 const_binop (TRUNC_DIV_EXPR
,
5938 /* Return a node which has the indicated constant VALUE (either 0 or
5939 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
5940 and is of the indicated TYPE. */
5943 constant_boolean_node (bool value
, tree type
)
5945 if (type
== integer_type_node
)
5946 return value
? integer_one_node
: integer_zero_node
;
5947 else if (type
== boolean_type_node
)
5948 return value
? boolean_true_node
: boolean_false_node
;
5949 else if (TREE_CODE (type
) == VECTOR_TYPE
)
5950 return build_vector_from_val (type
,
5951 build_int_cst (TREE_TYPE (type
),
5954 return fold_convert (type
, value
? integer_one_node
: integer_zero_node
);
5958 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5959 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5960 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5961 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5962 COND is the first argument to CODE; otherwise (as in the example
5963 given here), it is the second argument. TYPE is the type of the
5964 original expression. Return NULL_TREE if no simplification is
5968 fold_binary_op_with_conditional_arg (location_t loc
,
5969 enum tree_code code
,
5970 tree type
, tree op0
, tree op1
,
5971 tree cond
, tree arg
, int cond_first_p
)
5973 tree cond_type
= cond_first_p
? TREE_TYPE (op0
) : TREE_TYPE (op1
);
5974 tree arg_type
= cond_first_p
? TREE_TYPE (op1
) : TREE_TYPE (op0
);
5975 tree test
, true_value
, false_value
;
5976 tree lhs
= NULL_TREE
;
5977 tree rhs
= NULL_TREE
;
5978 enum tree_code cond_code
= COND_EXPR
;
5980 if (TREE_CODE (cond
) == COND_EXPR
5981 || TREE_CODE (cond
) == VEC_COND_EXPR
)
5983 test
= TREE_OPERAND (cond
, 0);
5984 true_value
= TREE_OPERAND (cond
, 1);
5985 false_value
= TREE_OPERAND (cond
, 2);
5986 /* If this operand throws an expression, then it does not make
5987 sense to try to perform a logical or arithmetic operation
5989 if (VOID_TYPE_P (TREE_TYPE (true_value
)))
5991 if (VOID_TYPE_P (TREE_TYPE (false_value
)))
5996 tree testtype
= TREE_TYPE (cond
);
5998 true_value
= constant_boolean_node (true, testtype
);
5999 false_value
= constant_boolean_node (false, testtype
);
6002 if (TREE_CODE (TREE_TYPE (test
)) == VECTOR_TYPE
)
6003 cond_code
= VEC_COND_EXPR
;
6005 /* This transformation is only worthwhile if we don't have to wrap ARG
6006 in a SAVE_EXPR and the operation can be simplified without recursing
6007 on at least one of the branches once its pushed inside the COND_EXPR. */
6008 if (!TREE_CONSTANT (arg
)
6009 && (TREE_SIDE_EFFECTS (arg
)
6010 || TREE_CODE (arg
) == COND_EXPR
|| TREE_CODE (arg
) == VEC_COND_EXPR
6011 || TREE_CONSTANT (true_value
) || TREE_CONSTANT (false_value
)))
6014 arg
= fold_convert_loc (loc
, arg_type
, arg
);
6017 true_value
= fold_convert_loc (loc
, cond_type
, true_value
);
6019 lhs
= fold_build2_loc (loc
, code
, type
, true_value
, arg
);
6021 lhs
= fold_build2_loc (loc
, code
, type
, arg
, true_value
);
6025 false_value
= fold_convert_loc (loc
, cond_type
, false_value
);
6027 rhs
= fold_build2_loc (loc
, code
, type
, false_value
, arg
);
6029 rhs
= fold_build2_loc (loc
, code
, type
, arg
, false_value
);
6032 /* Check that we have simplified at least one of the branches. */
6033 if (!TREE_CONSTANT (arg
) && !TREE_CONSTANT (lhs
) && !TREE_CONSTANT (rhs
))
6036 return fold_build3_loc (loc
, cond_code
, type
, test
, lhs
, rhs
);
6040 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6042 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6043 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6044 ADDEND is the same as X.
6046 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6047 and finite. The problematic cases are when X is zero, and its mode
6048 has signed zeros. In the case of rounding towards -infinity,
6049 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6050 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6053 fold_real_zero_addition_p (const_tree type
, const_tree addend
, int negate
)
6055 if (!real_zerop (addend
))
6058 /* Don't allow the fold with -fsignaling-nans. */
6059 if (HONOR_SNANS (TYPE_MODE (type
)))
6062 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6063 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
6066 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6067 if (TREE_CODE (addend
) == REAL_CST
6068 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend
)))
6071 /* The mode has signed zeros, and we have to honor their sign.
6072 In this situation, there is only one case we can return true for.
6073 X - 0 is the same as X unless rounding towards -infinity is
6075 return negate
&& !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
));
6078 /* Subroutine of fold() that checks comparisons of built-in math
6079 functions against real constants.
6081 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6082 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6083 is the type of the result and ARG0 and ARG1 are the operands of the
6084 comparison. ARG1 must be a TREE_REAL_CST.
6086 The function returns the constant folded tree if a simplification
6087 can be made, and NULL_TREE otherwise. */
6090 fold_mathfn_compare (location_t loc
,
6091 enum built_in_function fcode
, enum tree_code code
,
6092 tree type
, tree arg0
, tree arg1
)
6096 if (BUILTIN_SQRT_P (fcode
))
6098 tree arg
= CALL_EXPR_ARG (arg0
, 0);
6099 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (arg0
));
6101 c
= TREE_REAL_CST (arg1
);
6102 if (REAL_VALUE_NEGATIVE (c
))
6104 /* sqrt(x) < y is always false, if y is negative. */
6105 if (code
== EQ_EXPR
|| code
== LT_EXPR
|| code
== LE_EXPR
)
6106 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
6108 /* sqrt(x) > y is always true, if y is negative and we
6109 don't care about NaNs, i.e. negative values of x. */
6110 if (code
== NE_EXPR
|| !HONOR_NANS (mode
))
6111 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
6113 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6114 return fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6115 build_real (TREE_TYPE (arg
), dconst0
));
6117 else if (code
== GT_EXPR
|| code
== GE_EXPR
)
6121 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
6122 real_convert (&c2
, mode
, &c2
);
6124 if (REAL_VALUE_ISINF (c2
))
6126 /* sqrt(x) > y is x == +Inf, when y is very large. */
6127 if (HONOR_INFINITIES (mode
))
6128 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg
,
6129 build_real (TREE_TYPE (arg
), c2
));
6131 /* sqrt(x) > y is always false, when y is very large
6132 and we don't care about infinities. */
6133 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
6136 /* sqrt(x) > c is the same as x > c*c. */
6137 return fold_build2_loc (loc
, code
, type
, arg
,
6138 build_real (TREE_TYPE (arg
), c2
));
6140 else if (code
== LT_EXPR
|| code
== LE_EXPR
)
6144 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
6145 real_convert (&c2
, mode
, &c2
);
6147 if (REAL_VALUE_ISINF (c2
))
6149 /* sqrt(x) < y is always true, when y is a very large
6150 value and we don't care about NaNs or Infinities. */
6151 if (! HONOR_NANS (mode
) && ! HONOR_INFINITIES (mode
))
6152 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
6154 /* sqrt(x) < y is x != +Inf when y is very large and we
6155 don't care about NaNs. */
6156 if (! HONOR_NANS (mode
))
6157 return fold_build2_loc (loc
, NE_EXPR
, type
, arg
,
6158 build_real (TREE_TYPE (arg
), c2
));
6160 /* sqrt(x) < y is x >= 0 when y is very large and we
6161 don't care about Infinities. */
6162 if (! HONOR_INFINITIES (mode
))
6163 return fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6164 build_real (TREE_TYPE (arg
), dconst0
));
6166 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6167 arg
= save_expr (arg
);
6168 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
6169 fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6170 build_real (TREE_TYPE (arg
),
6172 fold_build2_loc (loc
, NE_EXPR
, type
, arg
,
6173 build_real (TREE_TYPE (arg
),
6177 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6178 if (! HONOR_NANS (mode
))
6179 return fold_build2_loc (loc
, code
, type
, arg
,
6180 build_real (TREE_TYPE (arg
), c2
));
6182 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6183 arg
= save_expr (arg
);
6184 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
6185 fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6186 build_real (TREE_TYPE (arg
),
6188 fold_build2_loc (loc
, code
, type
, arg
,
6189 build_real (TREE_TYPE (arg
),
6197 /* Subroutine of fold() that optimizes comparisons against Infinities,
6198 either +Inf or -Inf.
6200 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6201 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6202 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6204 The function returns the constant folded tree if a simplification
6205 can be made, and NULL_TREE otherwise. */
6208 fold_inf_compare (location_t loc
, enum tree_code code
, tree type
,
6209 tree arg0
, tree arg1
)
6211 enum machine_mode mode
;
6212 REAL_VALUE_TYPE max
;
6216 mode
= TYPE_MODE (TREE_TYPE (arg0
));
6218 /* For negative infinity swap the sense of the comparison. */
6219 neg
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
));
6221 code
= swap_tree_comparison (code
);
6226 /* x > +Inf is always false, if with ignore sNANs. */
6227 if (HONOR_SNANS (mode
))
6229 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6232 /* x <= +Inf is always true, if we don't case about NaNs. */
6233 if (! HONOR_NANS (mode
))
6234 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6236 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6237 arg0
= save_expr (arg0
);
6238 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
, arg0
);
6242 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6243 real_maxval (&max
, neg
, mode
);
6244 return fold_build2_loc (loc
, neg
? LT_EXPR
: GT_EXPR
, type
,
6245 arg0
, build_real (TREE_TYPE (arg0
), max
));
6248 /* x < +Inf is always equal to x <= DBL_MAX. */
6249 real_maxval (&max
, neg
, mode
);
6250 return fold_build2_loc (loc
, neg
? GE_EXPR
: LE_EXPR
, type
,
6251 arg0
, build_real (TREE_TYPE (arg0
), max
));
6254 /* x != +Inf is always equal to !(x > DBL_MAX). */
6255 real_maxval (&max
, neg
, mode
);
6256 if (! HONOR_NANS (mode
))
6257 return fold_build2_loc (loc
, neg
? GE_EXPR
: LE_EXPR
, type
,
6258 arg0
, build_real (TREE_TYPE (arg0
), max
));
6260 temp
= fold_build2_loc (loc
, neg
? LT_EXPR
: GT_EXPR
, type
,
6261 arg0
, build_real (TREE_TYPE (arg0
), max
));
6262 return fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, temp
);
6271 /* Subroutine of fold() that optimizes comparisons of a division by
6272 a nonzero integer constant against an integer constant, i.e.
6275 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6276 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6277 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6279 The function returns the constant folded tree if a simplification
6280 can be made, and NULL_TREE otherwise. */
6283 fold_div_compare (location_t loc
,
6284 enum tree_code code
, tree type
, tree arg0
, tree arg1
)
6286 tree prod
, tmp
, hi
, lo
;
6287 tree arg00
= TREE_OPERAND (arg0
, 0);
6288 tree arg01
= TREE_OPERAND (arg0
, 1);
6290 bool unsigned_p
= TYPE_UNSIGNED (TREE_TYPE (arg0
));
6294 /* We have to do this the hard way to detect unsigned overflow.
6295 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6296 val
= TREE_INT_CST (arg01
)
6297 .mul_with_sign (TREE_INT_CST (arg1
), unsigned_p
, &overflow
);
6298 prod
= force_fit_type_double (TREE_TYPE (arg00
), val
, -1, overflow
);
6299 neg_overflow
= false;
6303 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6304 build_int_cst (TREE_TYPE (arg01
), 1));
6307 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6308 val
= TREE_INT_CST (prod
)
6309 .add_with_sign (TREE_INT_CST (tmp
), unsigned_p
, &overflow
);
6310 hi
= force_fit_type_double (TREE_TYPE (arg00
), val
,
6311 -1, overflow
| TREE_OVERFLOW (prod
));
6313 else if (tree_int_cst_sgn (arg01
) >= 0)
6315 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6316 build_int_cst (TREE_TYPE (arg01
), 1));
6317 switch (tree_int_cst_sgn (arg1
))
6320 neg_overflow
= true;
6321 lo
= int_const_binop (MINUS_EXPR
, prod
, tmp
);
6326 lo
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6331 hi
= int_const_binop (PLUS_EXPR
, prod
, tmp
);
6341 /* A negative divisor reverses the relational operators. */
6342 code
= swap_tree_comparison (code
);
6344 tmp
= int_const_binop (PLUS_EXPR
, arg01
,
6345 build_int_cst (TREE_TYPE (arg01
), 1));
6346 switch (tree_int_cst_sgn (arg1
))
6349 hi
= int_const_binop (MINUS_EXPR
, prod
, tmp
);
6354 hi
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6359 neg_overflow
= true;
6360 lo
= int_const_binop (PLUS_EXPR
, prod
, tmp
);
6372 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6373 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg00
);
6374 if (TREE_OVERFLOW (hi
))
6375 return fold_build2_loc (loc
, GE_EXPR
, type
, arg00
, lo
);
6376 if (TREE_OVERFLOW (lo
))
6377 return fold_build2_loc (loc
, LE_EXPR
, type
, arg00
, hi
);
6378 return build_range_check (loc
, type
, arg00
, 1, lo
, hi
);
6381 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6382 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg00
);
6383 if (TREE_OVERFLOW (hi
))
6384 return fold_build2_loc (loc
, LT_EXPR
, type
, arg00
, lo
);
6385 if (TREE_OVERFLOW (lo
))
6386 return fold_build2_loc (loc
, GT_EXPR
, type
, arg00
, hi
);
6387 return build_range_check (loc
, type
, arg00
, 0, lo
, hi
);
6390 if (TREE_OVERFLOW (lo
))
6392 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6393 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6395 return fold_build2_loc (loc
, LT_EXPR
, type
, arg00
, lo
);
6398 if (TREE_OVERFLOW (hi
))
6400 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6401 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6403 return fold_build2_loc (loc
, LE_EXPR
, type
, arg00
, hi
);
6406 if (TREE_OVERFLOW (hi
))
6408 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6409 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6411 return fold_build2_loc (loc
, GT_EXPR
, type
, arg00
, hi
);
6414 if (TREE_OVERFLOW (lo
))
6416 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6417 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6419 return fold_build2_loc (loc
, GE_EXPR
, type
, arg00
, lo
);
6429 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6430 equality/inequality test, then return a simplified form of the test
6431 using a sign testing. Otherwise return NULL. TYPE is the desired
6435 fold_single_bit_test_into_sign_test (location_t loc
,
6436 enum tree_code code
, tree arg0
, tree arg1
,
6439 /* If this is testing a single bit, we can optimize the test. */
6440 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6441 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6442 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6444 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6445 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6446 tree arg00
= sign_bit_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
6448 if (arg00
!= NULL_TREE
6449 /* This is only a win if casting to a signed type is cheap,
6450 i.e. when arg00's type is not a partial mode. */
6451 && TYPE_PRECISION (TREE_TYPE (arg00
))
6452 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00
))))
6454 tree stype
= signed_type_for (TREE_TYPE (arg00
));
6455 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
6457 fold_convert_loc (loc
, stype
, arg00
),
6458 build_int_cst (stype
, 0));
6465 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6466 equality/inequality test, then return a simplified form of
6467 the test using shifts and logical operations. Otherwise return
6468 NULL. TYPE is the desired result type. */
6471 fold_single_bit_test (location_t loc
, enum tree_code code
,
6472 tree arg0
, tree arg1
, tree result_type
)
6474 /* If this is testing a single bit, we can optimize the test. */
6475 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6476 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6477 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6479 tree inner
= TREE_OPERAND (arg0
, 0);
6480 tree type
= TREE_TYPE (arg0
);
6481 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
6482 enum machine_mode operand_mode
= TYPE_MODE (type
);
6484 tree signed_type
, unsigned_type
, intermediate_type
;
6487 /* First, see if we can fold the single bit test into a sign-bit
6489 tem
= fold_single_bit_test_into_sign_test (loc
, code
, arg0
, arg1
,
6494 /* Otherwise we have (A & C) != 0 where C is a single bit,
6495 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6496 Similarly for (A & C) == 0. */
6498 /* If INNER is a right shift of a constant and it plus BITNUM does
6499 not overflow, adjust BITNUM and INNER. */
6500 if (TREE_CODE (inner
) == RSHIFT_EXPR
6501 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
6502 && TREE_INT_CST_HIGH (TREE_OPERAND (inner
, 1)) == 0
6503 && bitnum
< TYPE_PRECISION (type
)
6504 && 0 > compare_tree_int (TREE_OPERAND (inner
, 1),
6505 bitnum
- TYPE_PRECISION (type
)))
6507 bitnum
+= TREE_INT_CST_LOW (TREE_OPERAND (inner
, 1));
6508 inner
= TREE_OPERAND (inner
, 0);
6511 /* If we are going to be able to omit the AND below, we must do our
6512 operations as unsigned. If we must use the AND, we have a choice.
6513 Normally unsigned is faster, but for some machines signed is. */
6514 #ifdef LOAD_EXTEND_OP
6515 ops_unsigned
= (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
6516 && !flag_syntax_only
) ? 0 : 1;
6521 signed_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 0);
6522 unsigned_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 1);
6523 intermediate_type
= ops_unsigned
? unsigned_type
: signed_type
;
6524 inner
= fold_convert_loc (loc
, intermediate_type
, inner
);
6527 inner
= build2 (RSHIFT_EXPR
, intermediate_type
,
6528 inner
, size_int (bitnum
));
6530 one
= build_int_cst (intermediate_type
, 1);
6532 if (code
== EQ_EXPR
)
6533 inner
= fold_build2_loc (loc
, BIT_XOR_EXPR
, intermediate_type
, inner
, one
);
6535 /* Put the AND last so it can combine with more things. */
6536 inner
= build2 (BIT_AND_EXPR
, intermediate_type
, inner
, one
);
6538 /* Make sure to return the proper type. */
6539 inner
= fold_convert_loc (loc
, result_type
, inner
);
6546 /* Check whether we are allowed to reorder operands arg0 and arg1,
6547 such that the evaluation of arg1 occurs before arg0. */
6550 reorder_operands_p (const_tree arg0
, const_tree arg1
)
6552 if (! flag_evaluation_order
)
6554 if (TREE_CONSTANT (arg0
) || TREE_CONSTANT (arg1
))
6556 return ! TREE_SIDE_EFFECTS (arg0
)
6557 && ! TREE_SIDE_EFFECTS (arg1
);
6560 /* Test whether it is preferable two swap two operands, ARG0 and
6561 ARG1, for example because ARG0 is an integer constant and ARG1
6562 isn't. If REORDER is true, only recommend swapping if we can
6563 evaluate the operands in reverse order. */
6566 tree_swap_operands_p (const_tree arg0
, const_tree arg1
, bool reorder
)
6568 STRIP_SIGN_NOPS (arg0
);
6569 STRIP_SIGN_NOPS (arg1
);
6571 if (TREE_CODE (arg1
) == INTEGER_CST
)
6573 if (TREE_CODE (arg0
) == INTEGER_CST
)
6576 if (TREE_CODE (arg1
) == REAL_CST
)
6578 if (TREE_CODE (arg0
) == REAL_CST
)
6581 if (TREE_CODE (arg1
) == FIXED_CST
)
6583 if (TREE_CODE (arg0
) == FIXED_CST
)
6586 if (TREE_CODE (arg1
) == COMPLEX_CST
)
6588 if (TREE_CODE (arg0
) == COMPLEX_CST
)
6591 if (TREE_CONSTANT (arg1
))
6593 if (TREE_CONSTANT (arg0
))
6596 if (optimize_function_for_size_p (cfun
))
6599 if (reorder
&& flag_evaluation_order
6600 && (TREE_SIDE_EFFECTS (arg0
) || TREE_SIDE_EFFECTS (arg1
)))
6603 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6604 for commutative and comparison operators. Ensuring a canonical
6605 form allows the optimizers to find additional redundancies without
6606 having to explicitly check for both orderings. */
6607 if (TREE_CODE (arg0
) == SSA_NAME
6608 && TREE_CODE (arg1
) == SSA_NAME
6609 && SSA_NAME_VERSION (arg0
) > SSA_NAME_VERSION (arg1
))
6612 /* Put SSA_NAMEs last. */
6613 if (TREE_CODE (arg1
) == SSA_NAME
)
6615 if (TREE_CODE (arg0
) == SSA_NAME
)
6618 /* Put variables last. */
6627 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6628 ARG0 is extended to a wider type. */
6631 fold_widened_comparison (location_t loc
, enum tree_code code
,
6632 tree type
, tree arg0
, tree arg1
)
6634 tree arg0_unw
= get_unwidened (arg0
, NULL_TREE
);
6636 tree shorter_type
, outer_type
;
6640 if (arg0_unw
== arg0
)
6642 shorter_type
= TREE_TYPE (arg0_unw
);
6644 #ifdef HAVE_canonicalize_funcptr_for_compare
6645 /* Disable this optimization if we're casting a function pointer
6646 type on targets that require function pointer canonicalization. */
6647 if (HAVE_canonicalize_funcptr_for_compare
6648 && TREE_CODE (shorter_type
) == POINTER_TYPE
6649 && TREE_CODE (TREE_TYPE (shorter_type
)) == FUNCTION_TYPE
)
6653 if (TYPE_PRECISION (TREE_TYPE (arg0
)) <= TYPE_PRECISION (shorter_type
))
6656 arg1_unw
= get_unwidened (arg1
, NULL_TREE
);
6658 /* If possible, express the comparison in the shorter mode. */
6659 if ((code
== EQ_EXPR
|| code
== NE_EXPR
6660 || TYPE_UNSIGNED (TREE_TYPE (arg0
)) == TYPE_UNSIGNED (shorter_type
))
6661 && (TREE_TYPE (arg1_unw
) == shorter_type
6662 || ((TYPE_PRECISION (shorter_type
)
6663 >= TYPE_PRECISION (TREE_TYPE (arg1_unw
)))
6664 && (TYPE_UNSIGNED (shorter_type
)
6665 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw
))))
6666 || (TREE_CODE (arg1_unw
) == INTEGER_CST
6667 && (TREE_CODE (shorter_type
) == INTEGER_TYPE
6668 || TREE_CODE (shorter_type
) == BOOLEAN_TYPE
)
6669 && int_fits_type_p (arg1_unw
, shorter_type
))))
6670 return fold_build2_loc (loc
, code
, type
, arg0_unw
,
6671 fold_convert_loc (loc
, shorter_type
, arg1_unw
));
6673 if (TREE_CODE (arg1_unw
) != INTEGER_CST
6674 || TREE_CODE (shorter_type
) != INTEGER_TYPE
6675 || !int_fits_type_p (arg1_unw
, shorter_type
))
6678 /* If we are comparing with the integer that does not fit into the range
6679 of the shorter type, the result is known. */
6680 outer_type
= TREE_TYPE (arg1_unw
);
6681 min
= lower_bound_in_type (outer_type
, shorter_type
);
6682 max
= upper_bound_in_type (outer_type
, shorter_type
);
6684 above
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
6686 below
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
6693 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6698 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6704 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6706 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6711 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6713 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6722 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6723 ARG0 just the signedness is changed. */
6726 fold_sign_changed_comparison (location_t loc
, enum tree_code code
, tree type
,
6727 tree arg0
, tree arg1
)
6730 tree inner_type
, outer_type
;
6732 if (!CONVERT_EXPR_P (arg0
))
6735 outer_type
= TREE_TYPE (arg0
);
6736 arg0_inner
= TREE_OPERAND (arg0
, 0);
6737 inner_type
= TREE_TYPE (arg0_inner
);
6739 #ifdef HAVE_canonicalize_funcptr_for_compare
6740 /* Disable this optimization if we're casting a function pointer
6741 type on targets that require function pointer canonicalization. */
6742 if (HAVE_canonicalize_funcptr_for_compare
6743 && TREE_CODE (inner_type
) == POINTER_TYPE
6744 && TREE_CODE (TREE_TYPE (inner_type
)) == FUNCTION_TYPE
)
6748 if (TYPE_PRECISION (inner_type
) != TYPE_PRECISION (outer_type
))
6751 if (TREE_CODE (arg1
) != INTEGER_CST
6752 && !(CONVERT_EXPR_P (arg1
)
6753 && TREE_TYPE (TREE_OPERAND (arg1
, 0)) == inner_type
))
6756 if (TYPE_UNSIGNED (inner_type
) != TYPE_UNSIGNED (outer_type
)
6761 if (POINTER_TYPE_P (inner_type
) != POINTER_TYPE_P (outer_type
))
6764 if (TREE_CODE (arg1
) == INTEGER_CST
)
6765 arg1
= force_fit_type_double (inner_type
, tree_to_double_int (arg1
),
6766 0, TREE_OVERFLOW (arg1
));
6768 arg1
= fold_convert_loc (loc
, inner_type
, arg1
);
6770 return fold_build2_loc (loc
, code
, type
, arg0_inner
, arg1
);
6773 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6774 step of the array. Reconstructs s and delta in the case of s *
6775 delta being an integer constant (and thus already folded). ADDR is
6776 the address. MULT is the multiplicative expression. If the
6777 function succeeds, the new address expression is returned.
6778 Otherwise NULL_TREE is returned. LOC is the location of the
6779 resulting expression. */
6782 try_move_mult_to_index (location_t loc
, tree addr
, tree op1
)
6784 tree s
, delta
, step
;
6785 tree ref
= TREE_OPERAND (addr
, 0), pref
;
6790 /* Strip the nops that might be added when converting op1 to sizetype. */
6793 /* Canonicalize op1 into a possibly non-constant delta
6794 and an INTEGER_CST s. */
6795 if (TREE_CODE (op1
) == MULT_EXPR
)
6797 tree arg0
= TREE_OPERAND (op1
, 0), arg1
= TREE_OPERAND (op1
, 1);
6802 if (TREE_CODE (arg0
) == INTEGER_CST
)
6807 else if (TREE_CODE (arg1
) == INTEGER_CST
)
6815 else if (TREE_CODE (op1
) == INTEGER_CST
)
6822 /* Simulate we are delta * 1. */
6824 s
= integer_one_node
;
6827 /* Handle &x.array the same as we would handle &x.array[0]. */
6828 if (TREE_CODE (ref
) == COMPONENT_REF
6829 && TREE_CODE (TREE_TYPE (ref
)) == ARRAY_TYPE
)
6833 /* Remember if this was a multi-dimensional array. */
6834 if (TREE_CODE (TREE_OPERAND (ref
, 0)) == ARRAY_REF
)
6837 domain
= TYPE_DOMAIN (TREE_TYPE (ref
));
6840 itype
= TREE_TYPE (domain
);
6842 step
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ref
)));
6843 if (TREE_CODE (step
) != INTEGER_CST
)
6848 if (! tree_int_cst_equal (step
, s
))
6853 /* Try if delta is a multiple of step. */
6854 tree tmp
= div_if_zero_remainder (EXACT_DIV_EXPR
, op1
, step
);
6860 /* Only fold here if we can verify we do not overflow one
6861 dimension of a multi-dimensional array. */
6866 if (!TYPE_MIN_VALUE (domain
)
6867 || !TYPE_MAX_VALUE (domain
)
6868 || TREE_CODE (TYPE_MAX_VALUE (domain
)) != INTEGER_CST
)
6871 tmp
= fold_binary_loc (loc
, PLUS_EXPR
, itype
,
6872 fold_convert_loc (loc
, itype
,
6873 TYPE_MIN_VALUE (domain
)),
6874 fold_convert_loc (loc
, itype
, delta
));
6875 if (TREE_CODE (tmp
) != INTEGER_CST
6876 || tree_int_cst_lt (TYPE_MAX_VALUE (domain
), tmp
))
6880 /* We found a suitable component reference. */
6882 pref
= TREE_OPERAND (addr
, 0);
6883 ret
= copy_node (pref
);
6884 SET_EXPR_LOCATION (ret
, loc
);
6886 ret
= build4_loc (loc
, ARRAY_REF
, TREE_TYPE (TREE_TYPE (ref
)), ret
,
6888 (loc
, PLUS_EXPR
, itype
,
6889 fold_convert_loc (loc
, itype
,
6891 (TYPE_DOMAIN (TREE_TYPE (ref
)))),
6892 fold_convert_loc (loc
, itype
, delta
)),
6893 NULL_TREE
, NULL_TREE
);
6894 return build_fold_addr_expr_loc (loc
, ret
);
6899 for (;; ref
= TREE_OPERAND (ref
, 0))
6901 if (TREE_CODE (ref
) == ARRAY_REF
)
6905 /* Remember if this was a multi-dimensional array. */
6906 if (TREE_CODE (TREE_OPERAND (ref
, 0)) == ARRAY_REF
)
6909 domain
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref
, 0)));
6912 itype
= TREE_TYPE (domain
);
6914 step
= array_ref_element_size (ref
);
6915 if (TREE_CODE (step
) != INTEGER_CST
)
6920 if (! tree_int_cst_equal (step
, s
))
6925 /* Try if delta is a multiple of step. */
6926 tree tmp
= div_if_zero_remainder (EXACT_DIV_EXPR
, op1
, step
);
6932 /* Only fold here if we can verify we do not overflow one
6933 dimension of a multi-dimensional array. */
6938 if (TREE_CODE (TREE_OPERAND (ref
, 1)) != INTEGER_CST
6939 || !TYPE_MAX_VALUE (domain
)
6940 || TREE_CODE (TYPE_MAX_VALUE (domain
)) != INTEGER_CST
)
6943 tmp
= fold_binary_loc (loc
, PLUS_EXPR
, itype
,
6944 fold_convert_loc (loc
, itype
,
6945 TREE_OPERAND (ref
, 1)),
6946 fold_convert_loc (loc
, itype
, delta
));
6948 || TREE_CODE (tmp
) != INTEGER_CST
6949 || tree_int_cst_lt (TYPE_MAX_VALUE (domain
), tmp
))
6958 if (!handled_component_p (ref
))
6962 /* We found the suitable array reference. So copy everything up to it,
6963 and replace the index. */
6965 pref
= TREE_OPERAND (addr
, 0);
6966 ret
= copy_node (pref
);
6967 SET_EXPR_LOCATION (ret
, loc
);
6972 pref
= TREE_OPERAND (pref
, 0);
6973 TREE_OPERAND (pos
, 0) = copy_node (pref
);
6974 pos
= TREE_OPERAND (pos
, 0);
6977 TREE_OPERAND (pos
, 1)
6978 = fold_build2_loc (loc
, PLUS_EXPR
, itype
,
6979 fold_convert_loc (loc
, itype
, TREE_OPERAND (pos
, 1)),
6980 fold_convert_loc (loc
, itype
, delta
));
6981 return fold_build1_loc (loc
, ADDR_EXPR
, TREE_TYPE (addr
), ret
);
6985 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6986 means A >= Y && A != MAX, but in this case we know that
6987 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6990 fold_to_nonsharp_ineq_using_bound (location_t loc
, tree ineq
, tree bound
)
6992 tree a
, typea
, type
= TREE_TYPE (ineq
), a1
, diff
, y
;
6994 if (TREE_CODE (bound
) == LT_EXPR
)
6995 a
= TREE_OPERAND (bound
, 0);
6996 else if (TREE_CODE (bound
) == GT_EXPR
)
6997 a
= TREE_OPERAND (bound
, 1);
7001 typea
= TREE_TYPE (a
);
7002 if (!INTEGRAL_TYPE_P (typea
)
7003 && !POINTER_TYPE_P (typea
))
7006 if (TREE_CODE (ineq
) == LT_EXPR
)
7008 a1
= TREE_OPERAND (ineq
, 1);
7009 y
= TREE_OPERAND (ineq
, 0);
7011 else if (TREE_CODE (ineq
) == GT_EXPR
)
7013 a1
= TREE_OPERAND (ineq
, 0);
7014 y
= TREE_OPERAND (ineq
, 1);
7019 if (TREE_TYPE (a1
) != typea
)
7022 if (POINTER_TYPE_P (typea
))
7024 /* Convert the pointer types into integer before taking the difference. */
7025 tree ta
= fold_convert_loc (loc
, ssizetype
, a
);
7026 tree ta1
= fold_convert_loc (loc
, ssizetype
, a1
);
7027 diff
= fold_binary_loc (loc
, MINUS_EXPR
, ssizetype
, ta1
, ta
);
7030 diff
= fold_binary_loc (loc
, MINUS_EXPR
, typea
, a1
, a
);
7032 if (!diff
|| !integer_onep (diff
))
7035 return fold_build2_loc (loc
, GE_EXPR
, type
, a
, y
);
7038 /* Fold a sum or difference of at least one multiplication.
7039 Returns the folded tree or NULL if no simplification could be made. */
7042 fold_plusminus_mult_expr (location_t loc
, enum tree_code code
, tree type
,
7043 tree arg0
, tree arg1
)
7045 tree arg00
, arg01
, arg10
, arg11
;
7046 tree alt0
= NULL_TREE
, alt1
= NULL_TREE
, same
;
7048 /* (A * C) +- (B * C) -> (A+-B) * C.
7049 (A * C) +- A -> A * (C+-1).
7050 We are most concerned about the case where C is a constant,
7051 but other combinations show up during loop reduction. Since
7052 it is not difficult, try all four possibilities. */
7054 if (TREE_CODE (arg0
) == MULT_EXPR
)
7056 arg00
= TREE_OPERAND (arg0
, 0);
7057 arg01
= TREE_OPERAND (arg0
, 1);
7059 else if (TREE_CODE (arg0
) == INTEGER_CST
)
7061 arg00
= build_one_cst (type
);
7066 /* We cannot generate constant 1 for fract. */
7067 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
7070 arg01
= build_one_cst (type
);
7072 if (TREE_CODE (arg1
) == MULT_EXPR
)
7074 arg10
= TREE_OPERAND (arg1
, 0);
7075 arg11
= TREE_OPERAND (arg1
, 1);
7077 else if (TREE_CODE (arg1
) == INTEGER_CST
)
7079 arg10
= build_one_cst (type
);
7080 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7081 the purpose of this canonicalization. */
7082 if (TREE_INT_CST_HIGH (arg1
) == -1
7083 && negate_expr_p (arg1
)
7084 && code
== PLUS_EXPR
)
7086 arg11
= negate_expr (arg1
);
7094 /* We cannot generate constant 1 for fract. */
7095 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
7098 arg11
= build_one_cst (type
);
7102 if (operand_equal_p (arg01
, arg11
, 0))
7103 same
= arg01
, alt0
= arg00
, alt1
= arg10
;
7104 else if (operand_equal_p (arg00
, arg10
, 0))
7105 same
= arg00
, alt0
= arg01
, alt1
= arg11
;
7106 else if (operand_equal_p (arg00
, arg11
, 0))
7107 same
= arg00
, alt0
= arg01
, alt1
= arg10
;
7108 else if (operand_equal_p (arg01
, arg10
, 0))
7109 same
= arg01
, alt0
= arg00
, alt1
= arg11
;
7111 /* No identical multiplicands; see if we can find a common
7112 power-of-two factor in non-power-of-two multiplies. This
7113 can help in multi-dimensional array access. */
7114 else if (host_integerp (arg01
, 0)
7115 && host_integerp (arg11
, 0))
7117 HOST_WIDE_INT int01
, int11
, tmp
;
7120 int01
= TREE_INT_CST_LOW (arg01
);
7121 int11
= TREE_INT_CST_LOW (arg11
);
7123 /* Move min of absolute values to int11. */
7124 if (absu_hwi (int01
) < absu_hwi (int11
))
7126 tmp
= int01
, int01
= int11
, int11
= tmp
;
7127 alt0
= arg00
, arg00
= arg10
, arg10
= alt0
;
7134 if (exact_log2 (absu_hwi (int11
)) > 0 && int01
% int11
== 0
7135 /* The remainder should not be a constant, otherwise we
7136 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7137 increased the number of multiplications necessary. */
7138 && TREE_CODE (arg10
) != INTEGER_CST
)
7140 alt0
= fold_build2_loc (loc
, MULT_EXPR
, TREE_TYPE (arg00
), arg00
,
7141 build_int_cst (TREE_TYPE (arg00
),
7146 maybe_same
= alt0
, alt0
= alt1
, alt1
= maybe_same
;
7151 return fold_build2_loc (loc
, MULT_EXPR
, type
,
7152 fold_build2_loc (loc
, code
, type
,
7153 fold_convert_loc (loc
, type
, alt0
),
7154 fold_convert_loc (loc
, type
, alt1
)),
7155 fold_convert_loc (loc
, type
, same
));
7160 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7161 specified by EXPR into the buffer PTR of length LEN bytes.
7162 Return the number of bytes placed in the buffer, or zero
7166 native_encode_int (const_tree expr
, unsigned char *ptr
, int len
)
7168 tree type
= TREE_TYPE (expr
);
7169 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7170 int byte
, offset
, word
, words
;
7171 unsigned char value
;
7173 if (total_bytes
> len
)
7175 words
= total_bytes
/ UNITS_PER_WORD
;
7177 for (byte
= 0; byte
< total_bytes
; byte
++)
7179 int bitpos
= byte
* BITS_PER_UNIT
;
7180 if (bitpos
< HOST_BITS_PER_WIDE_INT
)
7181 value
= (unsigned char) (TREE_INT_CST_LOW (expr
) >> bitpos
);
7183 value
= (unsigned char) (TREE_INT_CST_HIGH (expr
)
7184 >> (bitpos
- HOST_BITS_PER_WIDE_INT
));
7186 if (total_bytes
> UNITS_PER_WORD
)
7188 word
= byte
/ UNITS_PER_WORD
;
7189 if (WORDS_BIG_ENDIAN
)
7190 word
= (words
- 1) - word
;
7191 offset
= word
* UNITS_PER_WORD
;
7192 if (BYTES_BIG_ENDIAN
)
7193 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7195 offset
+= byte
% UNITS_PER_WORD
;
7198 offset
= BYTES_BIG_ENDIAN
? (total_bytes
- 1) - byte
: byte
;
7199 ptr
[offset
] = value
;
7205 /* Subroutine of native_encode_expr. Encode the REAL_CST
7206 specified by EXPR into the buffer PTR of length LEN bytes.
7207 Return the number of bytes placed in the buffer, or zero
7211 native_encode_real (const_tree expr
, unsigned char *ptr
, int len
)
7213 tree type
= TREE_TYPE (expr
);
7214 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7215 int byte
, offset
, word
, words
, bitpos
;
7216 unsigned char value
;
7218 /* There are always 32 bits in each long, no matter the size of
7219 the hosts long. We handle floating point representations with
7223 if (total_bytes
> len
)
7225 words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
7227 real_to_target (tmp
, TREE_REAL_CST_PTR (expr
), TYPE_MODE (type
));
7229 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7230 bitpos
+= BITS_PER_UNIT
)
7232 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7233 value
= (unsigned char) (tmp
[bitpos
/ 32] >> (bitpos
& 31));
7235 if (UNITS_PER_WORD
< 4)
7237 word
= byte
/ UNITS_PER_WORD
;
7238 if (WORDS_BIG_ENDIAN
)
7239 word
= (words
- 1) - word
;
7240 offset
= word
* UNITS_PER_WORD
;
7241 if (BYTES_BIG_ENDIAN
)
7242 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7244 offset
+= byte
% UNITS_PER_WORD
;
7247 offset
= BYTES_BIG_ENDIAN
? 3 - byte
: byte
;
7248 ptr
[offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3)] = value
;
7253 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7254 specified by EXPR into the buffer PTR of length LEN bytes.
7255 Return the number of bytes placed in the buffer, or zero
7259 native_encode_complex (const_tree expr
, unsigned char *ptr
, int len
)
7264 part
= TREE_REALPART (expr
);
7265 rsize
= native_encode_expr (part
, ptr
, len
);
7268 part
= TREE_IMAGPART (expr
);
7269 isize
= native_encode_expr (part
, ptr
+rsize
, len
-rsize
);
7272 return rsize
+ isize
;
7276 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7277 specified by EXPR into the buffer PTR of length LEN bytes.
7278 Return the number of bytes placed in the buffer, or zero
7282 native_encode_vector (const_tree expr
, unsigned char *ptr
, int len
)
7289 count
= VECTOR_CST_NELTS (expr
);
7290 itype
= TREE_TYPE (TREE_TYPE (expr
));
7291 size
= GET_MODE_SIZE (TYPE_MODE (itype
));
7292 for (i
= 0; i
< count
; i
++)
7294 elem
= VECTOR_CST_ELT (expr
, i
);
7295 if (native_encode_expr (elem
, ptr
+offset
, len
-offset
) != size
)
7303 /* Subroutine of native_encode_expr. Encode the STRING_CST
7304 specified by EXPR into the buffer PTR of length LEN bytes.
7305 Return the number of bytes placed in the buffer, or zero
7309 native_encode_string (const_tree expr
, unsigned char *ptr
, int len
)
7311 tree type
= TREE_TYPE (expr
);
7312 HOST_WIDE_INT total_bytes
;
7314 if (TREE_CODE (type
) != ARRAY_TYPE
7315 || TREE_CODE (TREE_TYPE (type
)) != INTEGER_TYPE
7316 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type
))) != BITS_PER_UNIT
7317 || !host_integerp (TYPE_SIZE_UNIT (type
), 0))
7319 total_bytes
= tree_low_cst (TYPE_SIZE_UNIT (type
), 0);
7320 if (total_bytes
> len
)
7322 if (TREE_STRING_LENGTH (expr
) < total_bytes
)
7324 memcpy (ptr
, TREE_STRING_POINTER (expr
), TREE_STRING_LENGTH (expr
));
7325 memset (ptr
+ TREE_STRING_LENGTH (expr
), 0,
7326 total_bytes
- TREE_STRING_LENGTH (expr
));
7329 memcpy (ptr
, TREE_STRING_POINTER (expr
), total_bytes
);
7334 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7335 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7336 buffer PTR of length LEN bytes. Return the number of bytes
7337 placed in the buffer, or zero upon failure. */
7340 native_encode_expr (const_tree expr
, unsigned char *ptr
, int len
)
7342 switch (TREE_CODE (expr
))
7345 return native_encode_int (expr
, ptr
, len
);
7348 return native_encode_real (expr
, ptr
, len
);
7351 return native_encode_complex (expr
, ptr
, len
);
7354 return native_encode_vector (expr
, ptr
, len
);
7357 return native_encode_string (expr
, ptr
, len
);
7365 /* Subroutine of native_interpret_expr. Interpret the contents of
7366 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7367 If the buffer cannot be interpreted, return NULL_TREE. */
7370 native_interpret_int (tree type
, const unsigned char *ptr
, int len
)
7372 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7373 int byte
, offset
, word
, words
;
7374 unsigned char value
;
7377 if (total_bytes
> len
)
7379 if (total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7382 result
= double_int_zero
;
7383 words
= total_bytes
/ UNITS_PER_WORD
;
7385 for (byte
= 0; byte
< total_bytes
; byte
++)
7387 int bitpos
= byte
* BITS_PER_UNIT
;
7388 if (total_bytes
> UNITS_PER_WORD
)
7390 word
= byte
/ UNITS_PER_WORD
;
7391 if (WORDS_BIG_ENDIAN
)
7392 word
= (words
- 1) - word
;
7393 offset
= word
* UNITS_PER_WORD
;
7394 if (BYTES_BIG_ENDIAN
)
7395 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7397 offset
+= byte
% UNITS_PER_WORD
;
7400 offset
= BYTES_BIG_ENDIAN
? (total_bytes
- 1) - byte
: byte
;
7401 value
= ptr
[offset
];
7403 if (bitpos
< HOST_BITS_PER_WIDE_INT
)
7404 result
.low
|= (unsigned HOST_WIDE_INT
) value
<< bitpos
;
7406 result
.high
|= (unsigned HOST_WIDE_INT
) value
7407 << (bitpos
- HOST_BITS_PER_WIDE_INT
);
7410 return double_int_to_tree (type
, result
);
7414 /* Subroutine of native_interpret_expr. Interpret the contents of
7415 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7416 If the buffer cannot be interpreted, return NULL_TREE. */
7419 native_interpret_real (tree type
, const unsigned char *ptr
, int len
)
7421 enum machine_mode mode
= TYPE_MODE (type
);
7422 int total_bytes
= GET_MODE_SIZE (mode
);
7423 int byte
, offset
, word
, words
, bitpos
;
7424 unsigned char value
;
7425 /* There are always 32 bits in each long, no matter the size of
7426 the hosts long. We handle floating point representations with
7431 total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7432 if (total_bytes
> len
|| total_bytes
> 24)
7434 words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
7436 memset (tmp
, 0, sizeof (tmp
));
7437 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7438 bitpos
+= BITS_PER_UNIT
)
7440 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7441 if (UNITS_PER_WORD
< 4)
7443 word
= byte
/ UNITS_PER_WORD
;
7444 if (WORDS_BIG_ENDIAN
)
7445 word
= (words
- 1) - word
;
7446 offset
= word
* UNITS_PER_WORD
;
7447 if (BYTES_BIG_ENDIAN
)
7448 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7450 offset
+= byte
% UNITS_PER_WORD
;
7453 offset
= BYTES_BIG_ENDIAN
? 3 - byte
: byte
;
7454 value
= ptr
[offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3)];
7456 tmp
[bitpos
/ 32] |= (unsigned long)value
<< (bitpos
& 31);
7459 real_from_target (&r
, tmp
, mode
);
7460 return build_real (type
, r
);
7464 /* Subroutine of native_interpret_expr. Interpret the contents of
7465 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7466 If the buffer cannot be interpreted, return NULL_TREE. */
7469 native_interpret_complex (tree type
, const unsigned char *ptr
, int len
)
7471 tree etype
, rpart
, ipart
;
7474 etype
= TREE_TYPE (type
);
7475 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7478 rpart
= native_interpret_expr (etype
, ptr
, size
);
7481 ipart
= native_interpret_expr (etype
, ptr
+size
, size
);
7484 return build_complex (type
, rpart
, ipart
);
7488 /* Subroutine of native_interpret_expr. Interpret the contents of
7489 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7490 If the buffer cannot be interpreted, return NULL_TREE. */
7493 native_interpret_vector (tree type
, const unsigned char *ptr
, int len
)
7499 etype
= TREE_TYPE (type
);
7500 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7501 count
= TYPE_VECTOR_SUBPARTS (type
);
7502 if (size
* count
> len
)
7505 elements
= XALLOCAVEC (tree
, count
);
7506 for (i
= count
- 1; i
>= 0; i
--)
7508 elem
= native_interpret_expr (etype
, ptr
+(i
*size
), size
);
7513 return build_vector (type
, elements
);
7517 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7518 the buffer PTR of length LEN as a constant of type TYPE. For
7519 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7520 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7521 return NULL_TREE. */
7524 native_interpret_expr (tree type
, const unsigned char *ptr
, int len
)
7526 switch (TREE_CODE (type
))
7532 case REFERENCE_TYPE
:
7533 return native_interpret_int (type
, ptr
, len
);
7536 return native_interpret_real (type
, ptr
, len
);
7539 return native_interpret_complex (type
, ptr
, len
);
7542 return native_interpret_vector (type
, ptr
, len
);
7549 /* Returns true if we can interpret the contents of a native encoding
7553 can_native_interpret_type_p (tree type
)
7555 switch (TREE_CODE (type
))
7561 case REFERENCE_TYPE
:
7571 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7572 TYPE at compile-time. If we're unable to perform the conversion
7573 return NULL_TREE. */
7576 fold_view_convert_expr (tree type
, tree expr
)
7578 /* We support up to 512-bit values (for V8DFmode). */
7579 unsigned char buffer
[64];
7582 /* Check that the host and target are sane. */
7583 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8)
7586 len
= native_encode_expr (expr
, buffer
, sizeof (buffer
));
7590 return native_interpret_expr (type
, buffer
, len
);
7593 /* Build an expression for the address of T. Folds away INDIRECT_REF
7594 to avoid confusing the gimplify process. */
7597 build_fold_addr_expr_with_type_loc (location_t loc
, tree t
, tree ptrtype
)
7599 /* The size of the object is not relevant when talking about its address. */
7600 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
7601 t
= TREE_OPERAND (t
, 0);
7603 if (TREE_CODE (t
) == INDIRECT_REF
)
7605 t
= TREE_OPERAND (t
, 0);
7607 if (TREE_TYPE (t
) != ptrtype
)
7608 t
= build1_loc (loc
, NOP_EXPR
, ptrtype
, t
);
7610 else if (TREE_CODE (t
) == MEM_REF
7611 && integer_zerop (TREE_OPERAND (t
, 1)))
7612 return TREE_OPERAND (t
, 0);
7613 else if (TREE_CODE (t
) == MEM_REF
7614 && TREE_CODE (TREE_OPERAND (t
, 0)) == INTEGER_CST
)
7615 return fold_binary (POINTER_PLUS_EXPR
, ptrtype
,
7616 TREE_OPERAND (t
, 0),
7617 convert_to_ptrofftype (TREE_OPERAND (t
, 1)));
7618 else if (TREE_CODE (t
) == VIEW_CONVERT_EXPR
)
7620 t
= build_fold_addr_expr_loc (loc
, TREE_OPERAND (t
, 0));
7622 if (TREE_TYPE (t
) != ptrtype
)
7623 t
= fold_convert_loc (loc
, ptrtype
, t
);
7626 t
= build1_loc (loc
, ADDR_EXPR
, ptrtype
, t
);
7631 /* Build an expression for the address of T. */
7634 build_fold_addr_expr_loc (location_t loc
, tree t
)
7636 tree ptrtype
= build_pointer_type (TREE_TYPE (t
));
7638 return build_fold_addr_expr_with_type_loc (loc
, t
, ptrtype
);
7641 static bool vec_cst_ctor_to_array (tree
, tree
*);
7643 /* Fold a unary expression of code CODE and type TYPE with operand
7644 OP0. Return the folded expression if folding is successful.
7645 Otherwise, return NULL_TREE. */
7648 fold_unary_loc (location_t loc
, enum tree_code code
, tree type
, tree op0
)
7652 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
7654 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
7655 && TREE_CODE_LENGTH (code
) == 1);
7660 if (CONVERT_EXPR_CODE_P (code
)
7661 || code
== FLOAT_EXPR
|| code
== ABS_EXPR
|| code
== NEGATE_EXPR
)
7663 /* Don't use STRIP_NOPS, because signedness of argument type
7665 STRIP_SIGN_NOPS (arg0
);
7669 /* Strip any conversions that don't change the mode. This
7670 is safe for every expression, except for a comparison
7671 expression because its signedness is derived from its
7674 Note that this is done as an internal manipulation within
7675 the constant folder, in order to find the simplest
7676 representation of the arguments so that their form can be
7677 studied. In any cases, the appropriate type conversions
7678 should be put back in the tree that will get out of the
7684 if (TREE_CODE_CLASS (code
) == tcc_unary
)
7686 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
7687 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7688 fold_build1_loc (loc
, code
, type
,
7689 fold_convert_loc (loc
, TREE_TYPE (op0
),
7690 TREE_OPERAND (arg0
, 1))));
7691 else if (TREE_CODE (arg0
) == COND_EXPR
)
7693 tree arg01
= TREE_OPERAND (arg0
, 1);
7694 tree arg02
= TREE_OPERAND (arg0
, 2);
7695 if (! VOID_TYPE_P (TREE_TYPE (arg01
)))
7696 arg01
= fold_build1_loc (loc
, code
, type
,
7697 fold_convert_loc (loc
,
7698 TREE_TYPE (op0
), arg01
));
7699 if (! VOID_TYPE_P (TREE_TYPE (arg02
)))
7700 arg02
= fold_build1_loc (loc
, code
, type
,
7701 fold_convert_loc (loc
,
7702 TREE_TYPE (op0
), arg02
));
7703 tem
= fold_build3_loc (loc
, COND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7706 /* If this was a conversion, and all we did was to move into
7707 inside the COND_EXPR, bring it back out. But leave it if
7708 it is a conversion from integer to integer and the
7709 result precision is no wider than a word since such a
7710 conversion is cheap and may be optimized away by combine,
7711 while it couldn't if it were outside the COND_EXPR. Then return
7712 so we don't get into an infinite recursion loop taking the
7713 conversion out and then back in. */
7715 if ((CONVERT_EXPR_CODE_P (code
)
7716 || code
== NON_LVALUE_EXPR
)
7717 && TREE_CODE (tem
) == COND_EXPR
7718 && TREE_CODE (TREE_OPERAND (tem
, 1)) == code
7719 && TREE_CODE (TREE_OPERAND (tem
, 2)) == code
7720 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 1))
7721 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 2))
7722 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))
7723 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)))
7724 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
7726 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))))
7727 && TYPE_PRECISION (TREE_TYPE (tem
)) <= BITS_PER_WORD
)
7728 || flag_syntax_only
))
7729 tem
= build1_loc (loc
, code
, type
,
7731 TREE_TYPE (TREE_OPERAND
7732 (TREE_OPERAND (tem
, 1), 0)),
7733 TREE_OPERAND (tem
, 0),
7734 TREE_OPERAND (TREE_OPERAND (tem
, 1), 0),
7735 TREE_OPERAND (TREE_OPERAND (tem
, 2),
7744 /* Re-association barriers around constants and other re-association
7745 barriers can be removed. */
7746 if (CONSTANT_CLASS_P (op0
)
7747 || TREE_CODE (op0
) == PAREN_EXPR
)
7748 return fold_convert_loc (loc
, type
, op0
);
7753 case FIX_TRUNC_EXPR
:
7754 if (TREE_TYPE (op0
) == type
)
7757 if (COMPARISON_CLASS_P (op0
))
7759 /* If we have (type) (a CMP b) and type is an integral type, return
7760 new expression involving the new type. Canonicalize
7761 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7763 Do not fold the result as that would not simplify further, also
7764 folding again results in recursions. */
7765 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
7766 return build2_loc (loc
, TREE_CODE (op0
), type
,
7767 TREE_OPERAND (op0
, 0),
7768 TREE_OPERAND (op0
, 1));
7769 else if (!INTEGRAL_TYPE_P (type
) && !VOID_TYPE_P (type
)
7770 && TREE_CODE (type
) != VECTOR_TYPE
)
7771 return build3_loc (loc
, COND_EXPR
, type
, op0
,
7772 constant_boolean_node (true, type
),
7773 constant_boolean_node (false, type
));
7776 /* Handle cases of two conversions in a row. */
7777 if (CONVERT_EXPR_P (op0
))
7779 tree inside_type
= TREE_TYPE (TREE_OPERAND (op0
, 0));
7780 tree inter_type
= TREE_TYPE (op0
);
7781 int inside_int
= INTEGRAL_TYPE_P (inside_type
);
7782 int inside_ptr
= POINTER_TYPE_P (inside_type
);
7783 int inside_float
= FLOAT_TYPE_P (inside_type
);
7784 int inside_vec
= TREE_CODE (inside_type
) == VECTOR_TYPE
;
7785 unsigned int inside_prec
= TYPE_PRECISION (inside_type
);
7786 int inside_unsignedp
= TYPE_UNSIGNED (inside_type
);
7787 int inter_int
= INTEGRAL_TYPE_P (inter_type
);
7788 int inter_ptr
= POINTER_TYPE_P (inter_type
);
7789 int inter_float
= FLOAT_TYPE_P (inter_type
);
7790 int inter_vec
= TREE_CODE (inter_type
) == VECTOR_TYPE
;
7791 unsigned int inter_prec
= TYPE_PRECISION (inter_type
);
7792 int inter_unsignedp
= TYPE_UNSIGNED (inter_type
);
7793 int final_int
= INTEGRAL_TYPE_P (type
);
7794 int final_ptr
= POINTER_TYPE_P (type
);
7795 int final_float
= FLOAT_TYPE_P (type
);
7796 int final_vec
= TREE_CODE (type
) == VECTOR_TYPE
;
7797 unsigned int final_prec
= TYPE_PRECISION (type
);
7798 int final_unsignedp
= TYPE_UNSIGNED (type
);
7800 /* In addition to the cases of two conversions in a row
7801 handled below, if we are converting something to its own
7802 type via an object of identical or wider precision, neither
7803 conversion is needed. */
7804 if (TYPE_MAIN_VARIANT (inside_type
) == TYPE_MAIN_VARIANT (type
)
7805 && (((inter_int
|| inter_ptr
) && final_int
)
7806 || (inter_float
&& final_float
))
7807 && inter_prec
>= final_prec
)
7808 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
7810 /* Likewise, if the intermediate and initial types are either both
7811 float or both integer, we don't need the middle conversion if the
7812 former is wider than the latter and doesn't change the signedness
7813 (for integers). Avoid this if the final type is a pointer since
7814 then we sometimes need the middle conversion. Likewise if the
7815 final type has a precision not equal to the size of its mode. */
7816 if (((inter_int
&& inside_int
)
7817 || (inter_float
&& inside_float
)
7818 || (inter_vec
&& inside_vec
))
7819 && inter_prec
>= inside_prec
7820 && (inter_float
|| inter_vec
7821 || inter_unsignedp
== inside_unsignedp
)
7822 && ! (final_prec
!= GET_MODE_PRECISION (TYPE_MODE (type
))
7823 && TYPE_MODE (type
) == TYPE_MODE (inter_type
))
7825 && (! final_vec
|| inter_prec
== inside_prec
))
7826 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
7828 /* If we have a sign-extension of a zero-extended value, we can
7829 replace that by a single zero-extension. Likewise if the
7830 final conversion does not change precision we can drop the
7831 intermediate conversion. */
7832 if (inside_int
&& inter_int
&& final_int
7833 && ((inside_prec
< inter_prec
&& inter_prec
< final_prec
7834 && inside_unsignedp
&& !inter_unsignedp
)
7835 || final_prec
== inter_prec
))
7836 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
7838 /* Two conversions in a row are not needed unless:
7839 - some conversion is floating-point (overstrict for now), or
7840 - some conversion is a vector (overstrict for now), or
7841 - the intermediate type is narrower than both initial and
7843 - the intermediate type and innermost type differ in signedness,
7844 and the outermost type is wider than the intermediate, or
7845 - the initial type is a pointer type and the precisions of the
7846 intermediate and final types differ, or
7847 - the final type is a pointer type and the precisions of the
7848 initial and intermediate types differ. */
7849 if (! inside_float
&& ! inter_float
&& ! final_float
7850 && ! inside_vec
&& ! inter_vec
&& ! final_vec
7851 && (inter_prec
>= inside_prec
|| inter_prec
>= final_prec
)
7852 && ! (inside_int
&& inter_int
7853 && inter_unsignedp
!= inside_unsignedp
7854 && inter_prec
< final_prec
)
7855 && ((inter_unsignedp
&& inter_prec
> inside_prec
)
7856 == (final_unsignedp
&& final_prec
> inter_prec
))
7857 && ! (inside_ptr
&& inter_prec
!= final_prec
)
7858 && ! (final_ptr
&& inside_prec
!= inter_prec
)
7859 && ! (final_prec
!= GET_MODE_PRECISION (TYPE_MODE (type
))
7860 && TYPE_MODE (type
) == TYPE_MODE (inter_type
)))
7861 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
7864 /* Handle (T *)&A.B.C for A being of type T and B and C
7865 living at offset zero. This occurs frequently in
7866 C++ upcasting and then accessing the base. */
7867 if (TREE_CODE (op0
) == ADDR_EXPR
7868 && POINTER_TYPE_P (type
)
7869 && handled_component_p (TREE_OPERAND (op0
, 0)))
7871 HOST_WIDE_INT bitsize
, bitpos
;
7873 enum machine_mode mode
;
7874 int unsignedp
, volatilep
;
7875 tree base
= TREE_OPERAND (op0
, 0);
7876 base
= get_inner_reference (base
, &bitsize
, &bitpos
, &offset
,
7877 &mode
, &unsignedp
, &volatilep
, false);
7878 /* If the reference was to a (constant) zero offset, we can use
7879 the address of the base if it has the same base type
7880 as the result type and the pointer type is unqualified. */
7881 if (! offset
&& bitpos
== 0
7882 && (TYPE_MAIN_VARIANT (TREE_TYPE (type
))
7883 == TYPE_MAIN_VARIANT (TREE_TYPE (base
)))
7884 && TYPE_QUALS (type
) == TYPE_UNQUALIFIED
)
7885 return fold_convert_loc (loc
, type
,
7886 build_fold_addr_expr_loc (loc
, base
));
7889 if (TREE_CODE (op0
) == MODIFY_EXPR
7890 && TREE_CONSTANT (TREE_OPERAND (op0
, 1))
7891 /* Detect assigning a bitfield. */
7892 && !(TREE_CODE (TREE_OPERAND (op0
, 0)) == COMPONENT_REF
7894 (TREE_OPERAND (TREE_OPERAND (op0
, 0), 1))))
7896 /* Don't leave an assignment inside a conversion
7897 unless assigning a bitfield. */
7898 tem
= fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 1));
7899 /* First do the assignment, then return converted constant. */
7900 tem
= build2_loc (loc
, COMPOUND_EXPR
, TREE_TYPE (tem
), op0
, tem
);
7901 TREE_NO_WARNING (tem
) = 1;
7902 TREE_USED (tem
) = 1;
7906 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7907 constants (if x has signed type, the sign bit cannot be set
7908 in c). This folds extension into the BIT_AND_EXPR.
7909 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7910 very likely don't have maximal range for their precision and this
7911 transformation effectively doesn't preserve non-maximal ranges. */
7912 if (TREE_CODE (type
) == INTEGER_TYPE
7913 && TREE_CODE (op0
) == BIT_AND_EXPR
7914 && TREE_CODE (TREE_OPERAND (op0
, 1)) == INTEGER_CST
)
7916 tree and_expr
= op0
;
7917 tree and0
= TREE_OPERAND (and_expr
, 0);
7918 tree and1
= TREE_OPERAND (and_expr
, 1);
7921 if (TYPE_UNSIGNED (TREE_TYPE (and_expr
))
7922 || (TYPE_PRECISION (type
)
7923 <= TYPE_PRECISION (TREE_TYPE (and_expr
))))
7925 else if (TYPE_PRECISION (TREE_TYPE (and1
))
7926 <= HOST_BITS_PER_WIDE_INT
7927 && host_integerp (and1
, 1))
7929 unsigned HOST_WIDE_INT cst
;
7931 cst
= tree_low_cst (and1
, 1);
7932 cst
&= (HOST_WIDE_INT
) -1
7933 << (TYPE_PRECISION (TREE_TYPE (and1
)) - 1);
7934 change
= (cst
== 0);
7935 #ifdef LOAD_EXTEND_OP
7937 && !flag_syntax_only
7938 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0
)))
7941 tree uns
= unsigned_type_for (TREE_TYPE (and0
));
7942 and0
= fold_convert_loc (loc
, uns
, and0
);
7943 and1
= fold_convert_loc (loc
, uns
, and1
);
7949 tem
= force_fit_type_double (type
, tree_to_double_int (and1
),
7950 0, TREE_OVERFLOW (and1
));
7951 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
7952 fold_convert_loc (loc
, type
, and0
), tem
);
7956 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7957 when one of the new casts will fold away. Conservatively we assume
7958 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7959 if (POINTER_TYPE_P (type
)
7960 && TREE_CODE (arg0
) == POINTER_PLUS_EXPR
7961 && (!TYPE_RESTRICT (type
) || TYPE_RESTRICT (TREE_TYPE (arg0
)))
7962 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
7963 || TREE_CODE (TREE_OPERAND (arg0
, 0)) == NOP_EXPR
7964 || TREE_CODE (TREE_OPERAND (arg0
, 1)) == NOP_EXPR
))
7966 tree arg00
= TREE_OPERAND (arg0
, 0);
7967 tree arg01
= TREE_OPERAND (arg0
, 1);
7969 return fold_build_pointer_plus_loc
7970 (loc
, fold_convert_loc (loc
, type
, arg00
), arg01
);
7973 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7974 of the same precision, and X is an integer type not narrower than
7975 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7976 if (INTEGRAL_TYPE_P (type
)
7977 && TREE_CODE (op0
) == BIT_NOT_EXPR
7978 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
7979 && CONVERT_EXPR_P (TREE_OPERAND (op0
, 0))
7980 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (op0
)))
7982 tem
= TREE_OPERAND (TREE_OPERAND (op0
, 0), 0);
7983 if (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
7984 && TYPE_PRECISION (type
) <= TYPE_PRECISION (TREE_TYPE (tem
)))
7985 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
7986 fold_convert_loc (loc
, type
, tem
));
7989 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7990 type of X and Y (integer types only). */
7991 if (INTEGRAL_TYPE_P (type
)
7992 && TREE_CODE (op0
) == MULT_EXPR
7993 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
7994 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
7996 /* Be careful not to introduce new overflows. */
7998 if (TYPE_OVERFLOW_WRAPS (type
))
8001 mult_type
= unsigned_type_for (type
);
8003 if (TYPE_PRECISION (mult_type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
8005 tem
= fold_build2_loc (loc
, MULT_EXPR
, mult_type
,
8006 fold_convert_loc (loc
, mult_type
,
8007 TREE_OPERAND (op0
, 0)),
8008 fold_convert_loc (loc
, mult_type
,
8009 TREE_OPERAND (op0
, 1)));
8010 return fold_convert_loc (loc
, type
, tem
);
8014 tem
= fold_convert_const (code
, type
, op0
);
8015 return tem
? tem
: NULL_TREE
;
8017 case ADDR_SPACE_CONVERT_EXPR
:
8018 if (integer_zerop (arg0
))
8019 return fold_convert_const (code
, type
, arg0
);
8022 case FIXED_CONVERT_EXPR
:
8023 tem
= fold_convert_const (code
, type
, arg0
);
8024 return tem
? tem
: NULL_TREE
;
8026 case VIEW_CONVERT_EXPR
:
8027 if (TREE_TYPE (op0
) == type
)
8029 if (TREE_CODE (op0
) == VIEW_CONVERT_EXPR
)
8030 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
,
8031 type
, TREE_OPERAND (op0
, 0));
8032 if (TREE_CODE (op0
) == MEM_REF
)
8033 return fold_build2_loc (loc
, MEM_REF
, type
,
8034 TREE_OPERAND (op0
, 0), TREE_OPERAND (op0
, 1));
8036 /* For integral conversions with the same precision or pointer
8037 conversions use a NOP_EXPR instead. */
8038 if ((INTEGRAL_TYPE_P (type
)
8039 || POINTER_TYPE_P (type
))
8040 && (INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8041 || POINTER_TYPE_P (TREE_TYPE (op0
)))
8042 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (op0
)))
8043 return fold_convert_loc (loc
, type
, op0
);
8045 /* Strip inner integral conversions that do not change the precision. */
8046 if (CONVERT_EXPR_P (op0
)
8047 && (INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8048 || POINTER_TYPE_P (TREE_TYPE (op0
)))
8049 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0
, 0)))
8050 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0
, 0))))
8051 && (TYPE_PRECISION (TREE_TYPE (op0
))
8052 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0
, 0)))))
8053 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
,
8054 type
, TREE_OPERAND (op0
, 0));
8056 return fold_view_convert_expr (type
, op0
);
8059 tem
= fold_negate_expr (loc
, arg0
);
8061 return fold_convert_loc (loc
, type
, tem
);
8065 if (TREE_CODE (arg0
) == INTEGER_CST
|| TREE_CODE (arg0
) == REAL_CST
)
8066 return fold_abs_const (arg0
, type
);
8067 else if (TREE_CODE (arg0
) == NEGATE_EXPR
)
8068 return fold_build1_loc (loc
, ABS_EXPR
, type
, TREE_OPERAND (arg0
, 0));
8069 /* Convert fabs((double)float) into (double)fabsf(float). */
8070 else if (TREE_CODE (arg0
) == NOP_EXPR
8071 && TREE_CODE (type
) == REAL_TYPE
)
8073 tree targ0
= strip_float_extensions (arg0
);
8075 return fold_convert_loc (loc
, type
,
8076 fold_build1_loc (loc
, ABS_EXPR
,
8080 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8081 else if (TREE_CODE (arg0
) == ABS_EXPR
)
8083 else if (tree_expr_nonnegative_p (arg0
))
8086 /* Strip sign ops from argument. */
8087 if (TREE_CODE (type
) == REAL_TYPE
)
8089 tem
= fold_strip_sign_ops (arg0
);
8091 return fold_build1_loc (loc
, ABS_EXPR
, type
,
8092 fold_convert_loc (loc
, type
, tem
));
8097 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8098 return fold_convert_loc (loc
, type
, arg0
);
8099 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8101 tree itype
= TREE_TYPE (type
);
8102 tree rpart
= fold_convert_loc (loc
, itype
, TREE_OPERAND (arg0
, 0));
8103 tree ipart
= fold_convert_loc (loc
, itype
, TREE_OPERAND (arg0
, 1));
8104 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
,
8105 negate_expr (ipart
));
8107 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8109 tree itype
= TREE_TYPE (type
);
8110 tree rpart
= fold_convert_loc (loc
, itype
, TREE_REALPART (arg0
));
8111 tree ipart
= fold_convert_loc (loc
, itype
, TREE_IMAGPART (arg0
));
8112 return build_complex (type
, rpart
, negate_expr (ipart
));
8114 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8115 return fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
8119 if (TREE_CODE (arg0
) == INTEGER_CST
)
8120 return fold_not_const (arg0
, type
);
8121 else if (TREE_CODE (arg0
) == BIT_NOT_EXPR
)
8122 return fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
8123 /* Convert ~ (-A) to A - 1. */
8124 else if (INTEGRAL_TYPE_P (type
) && TREE_CODE (arg0
) == NEGATE_EXPR
)
8125 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
8126 fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0)),
8127 build_int_cst (type
, 1));
8128 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8129 else if (INTEGRAL_TYPE_P (type
)
8130 && ((TREE_CODE (arg0
) == MINUS_EXPR
8131 && integer_onep (TREE_OPERAND (arg0
, 1)))
8132 || (TREE_CODE (arg0
) == PLUS_EXPR
8133 && integer_all_onesp (TREE_OPERAND (arg0
, 1)))))
8134 return fold_build1_loc (loc
, NEGATE_EXPR
, type
,
8135 fold_convert_loc (loc
, type
,
8136 TREE_OPERAND (arg0
, 0)));
8137 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8138 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
8139 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
8140 fold_convert_loc (loc
, type
,
8141 TREE_OPERAND (arg0
, 0)))))
8142 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
, tem
,
8143 fold_convert_loc (loc
, type
,
8144 TREE_OPERAND (arg0
, 1)));
8145 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
8146 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
8147 fold_convert_loc (loc
, type
,
8148 TREE_OPERAND (arg0
, 1)))))
8149 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
,
8150 fold_convert_loc (loc
, type
,
8151 TREE_OPERAND (arg0
, 0)), tem
);
8152 /* Perform BIT_NOT_EXPR on each element individually. */
8153 else if (TREE_CODE (arg0
) == VECTOR_CST
)
8157 unsigned count
= VECTOR_CST_NELTS (arg0
), i
;
8159 elements
= XALLOCAVEC (tree
, count
);
8160 for (i
= 0; i
< count
; i
++)
8162 elem
= VECTOR_CST_ELT (arg0
, i
);
8163 elem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (type
), elem
);
8164 if (elem
== NULL_TREE
)
8169 return build_vector (type
, elements
);
8174 case TRUTH_NOT_EXPR
:
8175 /* The argument to invert_truthvalue must have Boolean type. */
8176 if (TREE_CODE (TREE_TYPE (arg0
)) != BOOLEAN_TYPE
)
8177 arg0
= fold_convert_loc (loc
, boolean_type_node
, arg0
);
8179 /* Note that the operand of this must be an int
8180 and its values must be 0 or 1.
8181 ("true" is a fixed value perhaps depending on the language,
8182 but we don't handle values other than 1 correctly yet.) */
8183 tem
= fold_truth_not_expr (loc
, arg0
);
8186 return fold_convert_loc (loc
, type
, tem
);
8189 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8190 return fold_convert_loc (loc
, type
, arg0
);
8191 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8192 return omit_one_operand_loc (loc
, type
, TREE_OPERAND (arg0
, 0),
8193 TREE_OPERAND (arg0
, 1));
8194 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8195 return fold_convert_loc (loc
, type
, TREE_REALPART (arg0
));
8196 if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8198 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8199 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), itype
,
8200 fold_build1_loc (loc
, REALPART_EXPR
, itype
,
8201 TREE_OPERAND (arg0
, 0)),
8202 fold_build1_loc (loc
, REALPART_EXPR
, itype
,
8203 TREE_OPERAND (arg0
, 1)));
8204 return fold_convert_loc (loc
, type
, tem
);
8206 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8208 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8209 tem
= fold_build1_loc (loc
, REALPART_EXPR
, itype
,
8210 TREE_OPERAND (arg0
, 0));
8211 return fold_convert_loc (loc
, type
, tem
);
8213 if (TREE_CODE (arg0
) == CALL_EXPR
)
8215 tree fn
= get_callee_fndecl (arg0
);
8216 if (fn
&& DECL_BUILT_IN_CLASS (fn
) == BUILT_IN_NORMAL
)
8217 switch (DECL_FUNCTION_CODE (fn
))
8219 CASE_FLT_FN (BUILT_IN_CEXPI
):
8220 fn
= mathfn_built_in (type
, BUILT_IN_COS
);
8222 return build_call_expr_loc (loc
, fn
, 1, CALL_EXPR_ARG (arg0
, 0));
8232 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8233 return build_zero_cst (type
);
8234 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8235 return omit_one_operand_loc (loc
, type
, TREE_OPERAND (arg0
, 1),
8236 TREE_OPERAND (arg0
, 0));
8237 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8238 return fold_convert_loc (loc
, type
, TREE_IMAGPART (arg0
));
8239 if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8241 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8242 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), itype
,
8243 fold_build1_loc (loc
, IMAGPART_EXPR
, itype
,
8244 TREE_OPERAND (arg0
, 0)),
8245 fold_build1_loc (loc
, IMAGPART_EXPR
, itype
,
8246 TREE_OPERAND (arg0
, 1)));
8247 return fold_convert_loc (loc
, type
, tem
);
8249 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8251 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8252 tem
= fold_build1_loc (loc
, IMAGPART_EXPR
, itype
, TREE_OPERAND (arg0
, 0));
8253 return fold_convert_loc (loc
, type
, negate_expr (tem
));
8255 if (TREE_CODE (arg0
) == CALL_EXPR
)
8257 tree fn
= get_callee_fndecl (arg0
);
8258 if (fn
&& DECL_BUILT_IN_CLASS (fn
) == BUILT_IN_NORMAL
)
8259 switch (DECL_FUNCTION_CODE (fn
))
8261 CASE_FLT_FN (BUILT_IN_CEXPI
):
8262 fn
= mathfn_built_in (type
, BUILT_IN_SIN
);
8264 return build_call_expr_loc (loc
, fn
, 1, CALL_EXPR_ARG (arg0
, 0));
8274 /* Fold *&X to X if X is an lvalue. */
8275 if (TREE_CODE (op0
) == ADDR_EXPR
)
8277 tree op00
= TREE_OPERAND (op0
, 0);
8278 if ((TREE_CODE (op00
) == VAR_DECL
8279 || TREE_CODE (op00
) == PARM_DECL
8280 || TREE_CODE (op00
) == RESULT_DECL
)
8281 && !TREE_READONLY (op00
))
8286 case VEC_UNPACK_LO_EXPR
:
8287 case VEC_UNPACK_HI_EXPR
:
8288 case VEC_UNPACK_FLOAT_LO_EXPR
:
8289 case VEC_UNPACK_FLOAT_HI_EXPR
:
8291 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
8293 enum tree_code subcode
;
8295 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
* 2);
8296 if (TREE_CODE (arg0
) != VECTOR_CST
)
8299 elts
= XALLOCAVEC (tree
, nelts
* 2);
8300 if (!vec_cst_ctor_to_array (arg0
, elts
))
8303 if ((!BYTES_BIG_ENDIAN
) ^ (code
== VEC_UNPACK_LO_EXPR
8304 || code
== VEC_UNPACK_FLOAT_LO_EXPR
))
8307 if (code
== VEC_UNPACK_LO_EXPR
|| code
== VEC_UNPACK_HI_EXPR
)
8310 subcode
= FLOAT_EXPR
;
8312 for (i
= 0; i
< nelts
; i
++)
8314 elts
[i
] = fold_convert_const (subcode
, TREE_TYPE (type
), elts
[i
]);
8315 if (elts
[i
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[i
]))
8319 return build_vector (type
, elts
);
8322 case REDUC_MIN_EXPR
:
8323 case REDUC_MAX_EXPR
:
8324 case REDUC_PLUS_EXPR
:
8326 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
8328 enum tree_code subcode
;
8330 if (TREE_CODE (op0
) != VECTOR_CST
)
8333 elts
= XALLOCAVEC (tree
, nelts
);
8334 if (!vec_cst_ctor_to_array (op0
, elts
))
8339 case REDUC_MIN_EXPR
: subcode
= MIN_EXPR
; break;
8340 case REDUC_MAX_EXPR
: subcode
= MAX_EXPR
; break;
8341 case REDUC_PLUS_EXPR
: subcode
= PLUS_EXPR
; break;
8342 default: gcc_unreachable ();
8345 for (i
= 1; i
< nelts
; i
++)
8347 elts
[0] = const_binop (subcode
, elts
[0], elts
[i
]);
8348 if (elts
[0] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[0]))
8350 elts
[i
] = build_zero_cst (TREE_TYPE (type
));
8353 return build_vector (type
, elts
);
8358 } /* switch (code) */
8362 /* If the operation was a conversion do _not_ mark a resulting constant
8363 with TREE_OVERFLOW if the original constant was not. These conversions
8364 have implementation defined behavior and retaining the TREE_OVERFLOW
8365 flag here would confuse later passes such as VRP. */
8367 fold_unary_ignore_overflow_loc (location_t loc
, enum tree_code code
,
8368 tree type
, tree op0
)
8370 tree res
= fold_unary_loc (loc
, code
, type
, op0
);
8372 && TREE_CODE (res
) == INTEGER_CST
8373 && TREE_CODE (op0
) == INTEGER_CST
8374 && CONVERT_EXPR_CODE_P (code
))
8375 TREE_OVERFLOW (res
) = TREE_OVERFLOW (op0
);
8380 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8381 operands OP0 and OP1. LOC is the location of the resulting expression.
8382 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8383 Return the folded expression if folding is successful. Otherwise,
8384 return NULL_TREE. */
8386 fold_truth_andor (location_t loc
, enum tree_code code
, tree type
,
8387 tree arg0
, tree arg1
, tree op0
, tree op1
)
8391 /* We only do these simplifications if we are optimizing. */
8395 /* Check for things like (A || B) && (A || C). We can convert this
8396 to A || (B && C). Note that either operator can be any of the four
8397 truth and/or operations and the transformation will still be
8398 valid. Also note that we only care about order for the
8399 ANDIF and ORIF operators. If B contains side effects, this
8400 might change the truth-value of A. */
8401 if (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8402 && (TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
8403 || TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
8404 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
8405 || TREE_CODE (arg0
) == TRUTH_OR_EXPR
)
8406 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0
, 1)))
8408 tree a00
= TREE_OPERAND (arg0
, 0);
8409 tree a01
= TREE_OPERAND (arg0
, 1);
8410 tree a10
= TREE_OPERAND (arg1
, 0);
8411 tree a11
= TREE_OPERAND (arg1
, 1);
8412 int commutative
= ((TREE_CODE (arg0
) == TRUTH_OR_EXPR
8413 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
)
8414 && (code
== TRUTH_AND_EXPR
8415 || code
== TRUTH_OR_EXPR
));
8417 if (operand_equal_p (a00
, a10
, 0))
8418 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
8419 fold_build2_loc (loc
, code
, type
, a01
, a11
));
8420 else if (commutative
&& operand_equal_p (a00
, a11
, 0))
8421 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
8422 fold_build2_loc (loc
, code
, type
, a01
, a10
));
8423 else if (commutative
&& operand_equal_p (a01
, a10
, 0))
8424 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a01
,
8425 fold_build2_loc (loc
, code
, type
, a00
, a11
));
8427 /* This case if tricky because we must either have commutative
8428 operators or else A10 must not have side-effects. */
8430 else if ((commutative
|| ! TREE_SIDE_EFFECTS (a10
))
8431 && operand_equal_p (a01
, a11
, 0))
8432 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
8433 fold_build2_loc (loc
, code
, type
, a00
, a10
),
8437 /* See if we can build a range comparison. */
8438 if (0 != (tem
= fold_range_test (loc
, code
, type
, op0
, op1
)))
8441 if ((code
== TRUTH_ANDIF_EXPR
&& TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
)
8442 || (code
== TRUTH_ORIF_EXPR
&& TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
))
8444 tem
= merge_truthop_with_opposite_arm (loc
, arg0
, arg1
, true);
8446 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
8449 if ((code
== TRUTH_ANDIF_EXPR
&& TREE_CODE (arg1
) == TRUTH_ORIF_EXPR
)
8450 || (code
== TRUTH_ORIF_EXPR
&& TREE_CODE (arg1
) == TRUTH_ANDIF_EXPR
))
8452 tem
= merge_truthop_with_opposite_arm (loc
, arg1
, arg0
, false);
8454 return fold_build2_loc (loc
, code
, type
, arg0
, tem
);
8457 /* Check for the possibility of merging component references. If our
8458 lhs is another similar operation, try to merge its rhs with our
8459 rhs. Then try to merge our lhs and rhs. */
8460 if (TREE_CODE (arg0
) == code
8461 && 0 != (tem
= fold_truth_andor_1 (loc
, code
, type
,
8462 TREE_OPERAND (arg0
, 1), arg1
)))
8463 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
8465 if ((tem
= fold_truth_andor_1 (loc
, code
, type
, arg0
, arg1
)) != 0)
8468 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8469 && (code
== TRUTH_AND_EXPR
8470 || code
== TRUTH_ANDIF_EXPR
8471 || code
== TRUTH_OR_EXPR
8472 || code
== TRUTH_ORIF_EXPR
))
8474 enum tree_code ncode
, icode
;
8476 ncode
= (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_AND_EXPR
)
8477 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
;
8478 icode
= ncode
== TRUTH_AND_EXPR
? TRUTH_ANDIF_EXPR
: TRUTH_ORIF_EXPR
;
8480 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8481 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8482 We don't want to pack more than two leafs to a non-IF AND/OR
8484 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8485 equal to IF-CODE, then we don't want to add right-hand operand.
8486 If the inner right-hand side of left-hand operand has
8487 side-effects, or isn't simple, then we can't add to it,
8488 as otherwise we might destroy if-sequence. */
8489 if (TREE_CODE (arg0
) == icode
8490 && simple_operand_p_2 (arg1
)
8491 /* Needed for sequence points to handle trappings, and
8493 && simple_operand_p_2 (TREE_OPERAND (arg0
, 1)))
8495 tem
= fold_build2_loc (loc
, ncode
, type
, TREE_OPERAND (arg0
, 1),
8497 return fold_build2_loc (loc
, icode
, type
, TREE_OPERAND (arg0
, 0),
8500 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8501 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8502 else if (TREE_CODE (arg1
) == icode
8503 && simple_operand_p_2 (arg0
)
8504 /* Needed for sequence points to handle trappings, and
8506 && simple_operand_p_2 (TREE_OPERAND (arg1
, 0)))
8508 tem
= fold_build2_loc (loc
, ncode
, type
,
8509 arg0
, TREE_OPERAND (arg1
, 0));
8510 return fold_build2_loc (loc
, icode
, type
, tem
,
8511 TREE_OPERAND (arg1
, 1));
8513 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8515 For sequence point consistancy, we need to check for trapping,
8516 and side-effects. */
8517 else if (code
== icode
&& simple_operand_p_2 (arg0
)
8518 && simple_operand_p_2 (arg1
))
8519 return fold_build2_loc (loc
, ncode
, type
, arg0
, arg1
);
8525 /* Fold a binary expression of code CODE and type TYPE with operands
8526 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8527 Return the folded expression if folding is successful. Otherwise,
8528 return NULL_TREE. */
8531 fold_minmax (location_t loc
, enum tree_code code
, tree type
, tree op0
, tree op1
)
8533 enum tree_code compl_code
;
8535 if (code
== MIN_EXPR
)
8536 compl_code
= MAX_EXPR
;
8537 else if (code
== MAX_EXPR
)
8538 compl_code
= MIN_EXPR
;
8542 /* MIN (MAX (a, b), b) == b. */
8543 if (TREE_CODE (op0
) == compl_code
8544 && operand_equal_p (TREE_OPERAND (op0
, 1), op1
, 0))
8545 return omit_one_operand_loc (loc
, type
, op1
, TREE_OPERAND (op0
, 0));
8547 /* MIN (MAX (b, a), b) == b. */
8548 if (TREE_CODE (op0
) == compl_code
8549 && operand_equal_p (TREE_OPERAND (op0
, 0), op1
, 0)
8550 && reorder_operands_p (TREE_OPERAND (op0
, 1), op1
))
8551 return omit_one_operand_loc (loc
, type
, op1
, TREE_OPERAND (op0
, 1));
8553 /* MIN (a, MAX (a, b)) == a. */
8554 if (TREE_CODE (op1
) == compl_code
8555 && operand_equal_p (op0
, TREE_OPERAND (op1
, 0), 0)
8556 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 1)))
8557 return omit_one_operand_loc (loc
, type
, op0
, TREE_OPERAND (op1
, 1));
8559 /* MIN (a, MAX (b, a)) == a. */
8560 if (TREE_CODE (op1
) == compl_code
8561 && operand_equal_p (op0
, TREE_OPERAND (op1
, 1), 0)
8562 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 0)))
8563 return omit_one_operand_loc (loc
, type
, op0
, TREE_OPERAND (op1
, 0));
8568 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8569 by changing CODE to reduce the magnitude of constants involved in
8570 ARG0 of the comparison.
8571 Returns a canonicalized comparison tree if a simplification was
8572 possible, otherwise returns NULL_TREE.
8573 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8574 valid if signed overflow is undefined. */
8577 maybe_canonicalize_comparison_1 (location_t loc
, enum tree_code code
, tree type
,
8578 tree arg0
, tree arg1
,
8579 bool *strict_overflow_p
)
8581 enum tree_code code0
= TREE_CODE (arg0
);
8582 tree t
, cst0
= NULL_TREE
;
8586 /* Match A +- CST code arg1 and CST code arg1. We can change the
8587 first form only if overflow is undefined. */
8588 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
8589 /* In principle pointers also have undefined overflow behavior,
8590 but that causes problems elsewhere. */
8591 && !POINTER_TYPE_P (TREE_TYPE (arg0
))
8592 && (code0
== MINUS_EXPR
8593 || code0
== PLUS_EXPR
)
8594 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
8595 || code0
== INTEGER_CST
))
8598 /* Identify the constant in arg0 and its sign. */
8599 if (code0
== INTEGER_CST
)
8602 cst0
= TREE_OPERAND (arg0
, 1);
8603 sgn0
= tree_int_cst_sgn (cst0
);
8605 /* Overflowed constants and zero will cause problems. */
8606 if (integer_zerop (cst0
)
8607 || TREE_OVERFLOW (cst0
))
8610 /* See if we can reduce the magnitude of the constant in
8611 arg0 by changing the comparison code. */
8612 if (code0
== INTEGER_CST
)
8614 /* CST <= arg1 -> CST-1 < arg1. */
8615 if (code
== LE_EXPR
&& sgn0
== 1)
8617 /* -CST < arg1 -> -CST-1 <= arg1. */
8618 else if (code
== LT_EXPR
&& sgn0
== -1)
8620 /* CST > arg1 -> CST-1 >= arg1. */
8621 else if (code
== GT_EXPR
&& sgn0
== 1)
8623 /* -CST >= arg1 -> -CST-1 > arg1. */
8624 else if (code
== GE_EXPR
&& sgn0
== -1)
8628 /* arg1 code' CST' might be more canonical. */
8633 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8635 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8637 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8638 else if (code
== GT_EXPR
8639 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8641 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8642 else if (code
== LE_EXPR
8643 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8645 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8646 else if (code
== GE_EXPR
8647 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8651 *strict_overflow_p
= true;
8654 /* Now build the constant reduced in magnitude. But not if that
8655 would produce one outside of its types range. */
8656 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0
))
8658 && TYPE_MIN_VALUE (TREE_TYPE (cst0
))
8659 && tree_int_cst_equal (cst0
, TYPE_MIN_VALUE (TREE_TYPE (cst0
))))
8661 && TYPE_MAX_VALUE (TREE_TYPE (cst0
))
8662 && tree_int_cst_equal (cst0
, TYPE_MAX_VALUE (TREE_TYPE (cst0
))))))
8663 /* We cannot swap the comparison here as that would cause us to
8664 endlessly recurse. */
8667 t
= int_const_binop (sgn0
== -1 ? PLUS_EXPR
: MINUS_EXPR
,
8668 cst0
, build_int_cst (TREE_TYPE (cst0
), 1));
8669 if (code0
!= INTEGER_CST
)
8670 t
= fold_build2_loc (loc
, code0
, TREE_TYPE (arg0
), TREE_OPERAND (arg0
, 0), t
);
8671 t
= fold_convert (TREE_TYPE (arg1
), t
);
8673 /* If swapping might yield to a more canonical form, do so. */
8675 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
, arg1
, t
);
8677 return fold_build2_loc (loc
, code
, type
, t
, arg1
);
8680 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8681 overflow further. Try to decrease the magnitude of constants involved
8682 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8683 and put sole constants at the second argument position.
8684 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8687 maybe_canonicalize_comparison (location_t loc
, enum tree_code code
, tree type
,
8688 tree arg0
, tree arg1
)
8691 bool strict_overflow_p
;
8692 const char * const warnmsg
= G_("assuming signed overflow does not occur "
8693 "when reducing constant in comparison");
8695 /* Try canonicalization by simplifying arg0. */
8696 strict_overflow_p
= false;
8697 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg0
, arg1
,
8698 &strict_overflow_p
);
8701 if (strict_overflow_p
)
8702 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8706 /* Try canonicalization by simplifying arg1 using the swapped
8708 code
= swap_tree_comparison (code
);
8709 strict_overflow_p
= false;
8710 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg1
, arg0
,
8711 &strict_overflow_p
);
8712 if (t
&& strict_overflow_p
)
8713 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8717 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8718 space. This is used to avoid issuing overflow warnings for
8719 expressions like &p->x which can not wrap. */
8722 pointer_may_wrap_p (tree base
, tree offset
, HOST_WIDE_INT bitpos
)
8724 double_int di_offset
, total
;
8726 if (!POINTER_TYPE_P (TREE_TYPE (base
)))
8732 if (offset
== NULL_TREE
)
8733 di_offset
= double_int_zero
;
8734 else if (TREE_CODE (offset
) != INTEGER_CST
|| TREE_OVERFLOW (offset
))
8737 di_offset
= TREE_INT_CST (offset
);
8740 double_int units
= double_int::from_uhwi (bitpos
/ BITS_PER_UNIT
);
8741 total
= di_offset
.add_with_sign (units
, true, &overflow
);
8745 if (total
.high
!= 0)
8748 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (TREE_TYPE (base
)));
8752 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8754 if (TREE_CODE (base
) == ADDR_EXPR
)
8756 HOST_WIDE_INT base_size
;
8758 base_size
= int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base
, 0)));
8759 if (base_size
> 0 && size
< base_size
)
8763 return total
.low
> (unsigned HOST_WIDE_INT
) size
;
8766 /* Subroutine of fold_binary. This routine performs all of the
8767 transformations that are common to the equality/inequality
8768 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8769 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8770 fold_binary should call fold_binary. Fold a comparison with
8771 tree code CODE and type TYPE with operands OP0 and OP1. Return
8772 the folded comparison or NULL_TREE. */
8775 fold_comparison (location_t loc
, enum tree_code code
, tree type
,
8778 tree arg0
, arg1
, tem
;
8783 STRIP_SIGN_NOPS (arg0
);
8784 STRIP_SIGN_NOPS (arg1
);
8786 tem
= fold_relational_const (code
, type
, arg0
, arg1
);
8787 if (tem
!= NULL_TREE
)
8790 /* If one arg is a real or integer constant, put it last. */
8791 if (tree_swap_operands_p (arg0
, arg1
, true))
8792 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
, op1
, op0
);
8794 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8795 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8796 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8797 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
8798 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
8799 && (TREE_CODE (arg1
) == INTEGER_CST
8800 && !TREE_OVERFLOW (arg1
)))
8802 tree const1
= TREE_OPERAND (arg0
, 1);
8804 tree variable
= TREE_OPERAND (arg0
, 0);
8807 lhs_add
= TREE_CODE (arg0
) != PLUS_EXPR
;
8809 lhs
= fold_build2_loc (loc
, lhs_add
? PLUS_EXPR
: MINUS_EXPR
,
8810 TREE_TYPE (arg1
), const2
, const1
);
8812 /* If the constant operation overflowed this can be
8813 simplified as a comparison against INT_MAX/INT_MIN. */
8814 if (TREE_CODE (lhs
) == INTEGER_CST
8815 && TREE_OVERFLOW (lhs
))
8817 int const1_sgn
= tree_int_cst_sgn (const1
);
8818 enum tree_code code2
= code
;
8820 /* Get the sign of the constant on the lhs if the
8821 operation were VARIABLE + CONST1. */
8822 if (TREE_CODE (arg0
) == MINUS_EXPR
)
8823 const1_sgn
= -const1_sgn
;
8825 /* The sign of the constant determines if we overflowed
8826 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8827 Canonicalize to the INT_MIN overflow by swapping the comparison
8829 if (const1_sgn
== -1)
8830 code2
= swap_tree_comparison (code
);
8832 /* We now can look at the canonicalized case
8833 VARIABLE + 1 CODE2 INT_MIN
8834 and decide on the result. */
8835 if (code2
== LT_EXPR
8837 || code2
== EQ_EXPR
)
8838 return omit_one_operand_loc (loc
, type
, boolean_false_node
, variable
);
8839 else if (code2
== NE_EXPR
8841 || code2
== GT_EXPR
)
8842 return omit_one_operand_loc (loc
, type
, boolean_true_node
, variable
);
8845 if (TREE_CODE (lhs
) == TREE_CODE (arg1
)
8846 && (TREE_CODE (lhs
) != INTEGER_CST
8847 || !TREE_OVERFLOW (lhs
)))
8849 if (code
!= EQ_EXPR
&& code
!= NE_EXPR
)
8850 fold_overflow_warning ("assuming signed overflow does not occur "
8851 "when changing X +- C1 cmp C2 to "
8853 WARN_STRICT_OVERFLOW_COMPARISON
);
8854 return fold_build2_loc (loc
, code
, type
, variable
, lhs
);
8858 /* For comparisons of pointers we can decompose it to a compile time
8859 comparison of the base objects and the offsets into the object.
8860 This requires at least one operand being an ADDR_EXPR or a
8861 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8862 if (POINTER_TYPE_P (TREE_TYPE (arg0
))
8863 && (TREE_CODE (arg0
) == ADDR_EXPR
8864 || TREE_CODE (arg1
) == ADDR_EXPR
8865 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
8866 || TREE_CODE (arg1
) == POINTER_PLUS_EXPR
))
8868 tree base0
, base1
, offset0
= NULL_TREE
, offset1
= NULL_TREE
;
8869 HOST_WIDE_INT bitsize
, bitpos0
= 0, bitpos1
= 0;
8870 enum machine_mode mode
;
8871 int volatilep
, unsignedp
;
8872 bool indirect_base0
= false, indirect_base1
= false;
8874 /* Get base and offset for the access. Strip ADDR_EXPR for
8875 get_inner_reference, but put it back by stripping INDIRECT_REF
8876 off the base object if possible. indirect_baseN will be true
8877 if baseN is not an address but refers to the object itself. */
8879 if (TREE_CODE (arg0
) == ADDR_EXPR
)
8881 base0
= get_inner_reference (TREE_OPERAND (arg0
, 0),
8882 &bitsize
, &bitpos0
, &offset0
, &mode
,
8883 &unsignedp
, &volatilep
, false);
8884 if (TREE_CODE (base0
) == INDIRECT_REF
)
8885 base0
= TREE_OPERAND (base0
, 0);
8887 indirect_base0
= true;
8889 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
8891 base0
= TREE_OPERAND (arg0
, 0);
8892 STRIP_SIGN_NOPS (base0
);
8893 if (TREE_CODE (base0
) == ADDR_EXPR
)
8895 base0
= TREE_OPERAND (base0
, 0);
8896 indirect_base0
= true;
8898 offset0
= TREE_OPERAND (arg0
, 1);
8899 if (host_integerp (offset0
, 0))
8901 HOST_WIDE_INT off
= size_low_cst (offset0
);
8902 if ((HOST_WIDE_INT
) (((unsigned HOST_WIDE_INT
) off
)
8904 / BITS_PER_UNIT
== (HOST_WIDE_INT
) off
)
8906 bitpos0
= off
* BITS_PER_UNIT
;
8907 offset0
= NULL_TREE
;
8913 if (TREE_CODE (arg1
) == ADDR_EXPR
)
8915 base1
= get_inner_reference (TREE_OPERAND (arg1
, 0),
8916 &bitsize
, &bitpos1
, &offset1
, &mode
,
8917 &unsignedp
, &volatilep
, false);
8918 if (TREE_CODE (base1
) == INDIRECT_REF
)
8919 base1
= TREE_OPERAND (base1
, 0);
8921 indirect_base1
= true;
8923 else if (TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
8925 base1
= TREE_OPERAND (arg1
, 0);
8926 STRIP_SIGN_NOPS (base1
);
8927 if (TREE_CODE (base1
) == ADDR_EXPR
)
8929 base1
= TREE_OPERAND (base1
, 0);
8930 indirect_base1
= true;
8932 offset1
= TREE_OPERAND (arg1
, 1);
8933 if (host_integerp (offset1
, 0))
8935 HOST_WIDE_INT off
= size_low_cst (offset1
);
8936 if ((HOST_WIDE_INT
) (((unsigned HOST_WIDE_INT
) off
)
8938 / BITS_PER_UNIT
== (HOST_WIDE_INT
) off
)
8940 bitpos1
= off
* BITS_PER_UNIT
;
8941 offset1
= NULL_TREE
;
8946 /* A local variable can never be pointed to by
8947 the default SSA name of an incoming parameter. */
8948 if ((TREE_CODE (arg0
) == ADDR_EXPR
8950 && TREE_CODE (base0
) == VAR_DECL
8951 && auto_var_in_fn_p (base0
, current_function_decl
)
8953 && TREE_CODE (base1
) == SSA_NAME
8954 && SSA_NAME_IS_DEFAULT_DEF (base1
)
8955 && TREE_CODE (SSA_NAME_VAR (base1
)) == PARM_DECL
)
8956 || (TREE_CODE (arg1
) == ADDR_EXPR
8958 && TREE_CODE (base1
) == VAR_DECL
8959 && auto_var_in_fn_p (base1
, current_function_decl
)
8961 && TREE_CODE (base0
) == SSA_NAME
8962 && SSA_NAME_IS_DEFAULT_DEF (base0
)
8963 && TREE_CODE (SSA_NAME_VAR (base0
)) == PARM_DECL
))
8965 if (code
== NE_EXPR
)
8966 return constant_boolean_node (1, type
);
8967 else if (code
== EQ_EXPR
)
8968 return constant_boolean_node (0, type
);
8970 /* If we have equivalent bases we might be able to simplify. */
8971 else if (indirect_base0
== indirect_base1
8972 && operand_equal_p (base0
, base1
, 0))
8974 /* We can fold this expression to a constant if the non-constant
8975 offset parts are equal. */
8976 if ((offset0
== offset1
8977 || (offset0
&& offset1
8978 && operand_equal_p (offset0
, offset1
, 0)))
8981 || (indirect_base0
&& DECL_P (base0
))
8982 || POINTER_TYPE_OVERFLOW_UNDEFINED
))
8987 && bitpos0
!= bitpos1
8988 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
8989 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
8990 fold_overflow_warning (("assuming pointer wraparound does not "
8991 "occur when comparing P +- C1 with "
8993 WARN_STRICT_OVERFLOW_CONDITIONAL
);
8998 return constant_boolean_node (bitpos0
== bitpos1
, type
);
9000 return constant_boolean_node (bitpos0
!= bitpos1
, type
);
9002 return constant_boolean_node (bitpos0
< bitpos1
, type
);
9004 return constant_boolean_node (bitpos0
<= bitpos1
, type
);
9006 return constant_boolean_node (bitpos0
>= bitpos1
, type
);
9008 return constant_boolean_node (bitpos0
> bitpos1
, type
);
9012 /* We can simplify the comparison to a comparison of the variable
9013 offset parts if the constant offset parts are equal.
9014 Be careful to use signed sizetype here because otherwise we
9015 mess with array offsets in the wrong way. This is possible
9016 because pointer arithmetic is restricted to retain within an
9017 object and overflow on pointer differences is undefined as of
9018 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9019 else if (bitpos0
== bitpos1
9020 && ((code
== EQ_EXPR
|| code
== NE_EXPR
)
9021 || (indirect_base0
&& DECL_P (base0
))
9022 || POINTER_TYPE_OVERFLOW_UNDEFINED
))
9024 /* By converting to signed sizetype we cover middle-end pointer
9025 arithmetic which operates on unsigned pointer types of size
9026 type size and ARRAY_REF offsets which are properly sign or
9027 zero extended from their type in case it is narrower than
9029 if (offset0
== NULL_TREE
)
9030 offset0
= build_int_cst (ssizetype
, 0);
9032 offset0
= fold_convert_loc (loc
, ssizetype
, offset0
);
9033 if (offset1
== NULL_TREE
)
9034 offset1
= build_int_cst (ssizetype
, 0);
9036 offset1
= fold_convert_loc (loc
, ssizetype
, offset1
);
9040 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
9041 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
9042 fold_overflow_warning (("assuming pointer wraparound does not "
9043 "occur when comparing P +- C1 with "
9045 WARN_STRICT_OVERFLOW_COMPARISON
);
9047 return fold_build2_loc (loc
, code
, type
, offset0
, offset1
);
9050 /* For non-equal bases we can simplify if they are addresses
9051 of local binding decls or constants. */
9052 else if (indirect_base0
&& indirect_base1
9053 /* We know that !operand_equal_p (base0, base1, 0)
9054 because the if condition was false. But make
9055 sure two decls are not the same. */
9057 && TREE_CODE (arg0
) == ADDR_EXPR
9058 && TREE_CODE (arg1
) == ADDR_EXPR
9059 && (((TREE_CODE (base0
) == VAR_DECL
9060 || TREE_CODE (base0
) == PARM_DECL
)
9061 && (targetm
.binds_local_p (base0
)
9062 || CONSTANT_CLASS_P (base1
)))
9063 || CONSTANT_CLASS_P (base0
))
9064 && (((TREE_CODE (base1
) == VAR_DECL
9065 || TREE_CODE (base1
) == PARM_DECL
)
9066 && (targetm
.binds_local_p (base1
)
9067 || CONSTANT_CLASS_P (base0
)))
9068 || CONSTANT_CLASS_P (base1
)))
9070 if (code
== EQ_EXPR
)
9071 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
9073 else if (code
== NE_EXPR
)
9074 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
9077 /* For equal offsets we can simplify to a comparison of the
9079 else if (bitpos0
== bitpos1
9081 ? base0
!= TREE_OPERAND (arg0
, 0) : base0
!= arg0
)
9083 ? base1
!= TREE_OPERAND (arg1
, 0) : base1
!= arg1
)
9084 && ((offset0
== offset1
)
9085 || (offset0
&& offset1
9086 && operand_equal_p (offset0
, offset1
, 0))))
9089 base0
= build_fold_addr_expr_loc (loc
, base0
);
9091 base1
= build_fold_addr_expr_loc (loc
, base1
);
9092 return fold_build2_loc (loc
, code
, type
, base0
, base1
);
9096 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9097 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9098 the resulting offset is smaller in absolute value than the
9100 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
9101 && (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
9102 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9103 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
9104 && (TREE_CODE (arg1
) == PLUS_EXPR
|| TREE_CODE (arg1
) == MINUS_EXPR
)
9105 && (TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
9106 && !TREE_OVERFLOW (TREE_OPERAND (arg1
, 1))))
9108 tree const1
= TREE_OPERAND (arg0
, 1);
9109 tree const2
= TREE_OPERAND (arg1
, 1);
9110 tree variable1
= TREE_OPERAND (arg0
, 0);
9111 tree variable2
= TREE_OPERAND (arg1
, 0);
9113 const char * const warnmsg
= G_("assuming signed overflow does not "
9114 "occur when combining constants around "
9117 /* Put the constant on the side where it doesn't overflow and is
9118 of lower absolute value than before. */
9119 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
9120 ? MINUS_EXPR
: PLUS_EXPR
,
9122 if (!TREE_OVERFLOW (cst
)
9123 && tree_int_cst_compare (const2
, cst
) == tree_int_cst_sgn (const2
))
9125 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
9126 return fold_build2_loc (loc
, code
, type
,
9128 fold_build2_loc (loc
,
9129 TREE_CODE (arg1
), TREE_TYPE (arg1
),
9133 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
9134 ? MINUS_EXPR
: PLUS_EXPR
,
9136 if (!TREE_OVERFLOW (cst
)
9137 && tree_int_cst_compare (const1
, cst
) == tree_int_cst_sgn (const1
))
9139 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
9140 return fold_build2_loc (loc
, code
, type
,
9141 fold_build2_loc (loc
, TREE_CODE (arg0
), TREE_TYPE (arg0
),
9147 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9148 signed arithmetic case. That form is created by the compiler
9149 often enough for folding it to be of value. One example is in
9150 computing loop trip counts after Operator Strength Reduction. */
9151 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
9152 && TREE_CODE (arg0
) == MULT_EXPR
9153 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9154 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
9155 && integer_zerop (arg1
))
9157 tree const1
= TREE_OPERAND (arg0
, 1);
9158 tree const2
= arg1
; /* zero */
9159 tree variable1
= TREE_OPERAND (arg0
, 0);
9160 enum tree_code cmp_code
= code
;
9162 /* Handle unfolded multiplication by zero. */
9163 if (integer_zerop (const1
))
9164 return fold_build2_loc (loc
, cmp_code
, type
, const1
, const2
);
9166 fold_overflow_warning (("assuming signed overflow does not occur when "
9167 "eliminating multiplication in comparison "
9169 WARN_STRICT_OVERFLOW_COMPARISON
);
9171 /* If const1 is negative we swap the sense of the comparison. */
9172 if (tree_int_cst_sgn (const1
) < 0)
9173 cmp_code
= swap_tree_comparison (cmp_code
);
9175 return fold_build2_loc (loc
, cmp_code
, type
, variable1
, const2
);
9178 tem
= maybe_canonicalize_comparison (loc
, code
, type
, arg0
, arg1
);
9182 if (FLOAT_TYPE_P (TREE_TYPE (arg0
)))
9184 tree targ0
= strip_float_extensions (arg0
);
9185 tree targ1
= strip_float_extensions (arg1
);
9186 tree newtype
= TREE_TYPE (targ0
);
9188 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
9189 newtype
= TREE_TYPE (targ1
);
9191 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9192 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
9193 return fold_build2_loc (loc
, code
, type
,
9194 fold_convert_loc (loc
, newtype
, targ0
),
9195 fold_convert_loc (loc
, newtype
, targ1
));
9197 /* (-a) CMP (-b) -> b CMP a */
9198 if (TREE_CODE (arg0
) == NEGATE_EXPR
9199 && TREE_CODE (arg1
) == NEGATE_EXPR
)
9200 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg1
, 0),
9201 TREE_OPERAND (arg0
, 0));
9203 if (TREE_CODE (arg1
) == REAL_CST
)
9205 REAL_VALUE_TYPE cst
;
9206 cst
= TREE_REAL_CST (arg1
);
9208 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9209 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
9210 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
9211 TREE_OPERAND (arg0
, 0),
9212 build_real (TREE_TYPE (arg1
),
9213 real_value_negate (&cst
)));
9215 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9216 /* a CMP (-0) -> a CMP 0 */
9217 if (REAL_VALUE_MINUS_ZERO (cst
))
9218 return fold_build2_loc (loc
, code
, type
, arg0
,
9219 build_real (TREE_TYPE (arg1
), dconst0
));
9221 /* x != NaN is always true, other ops are always false. */
9222 if (REAL_VALUE_ISNAN (cst
)
9223 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1
))))
9225 tem
= (code
== NE_EXPR
) ? integer_one_node
: integer_zero_node
;
9226 return omit_one_operand_loc (loc
, type
, tem
, arg0
);
9229 /* Fold comparisons against infinity. */
9230 if (REAL_VALUE_ISINF (cst
)
9231 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
))))
9233 tem
= fold_inf_compare (loc
, code
, type
, arg0
, arg1
);
9234 if (tem
!= NULL_TREE
)
9239 /* If this is a comparison of a real constant with a PLUS_EXPR
9240 or a MINUS_EXPR of a real constant, we can convert it into a
9241 comparison with a revised real constant as long as no overflow
9242 occurs when unsafe_math_optimizations are enabled. */
9243 if (flag_unsafe_math_optimizations
9244 && TREE_CODE (arg1
) == REAL_CST
9245 && (TREE_CODE (arg0
) == PLUS_EXPR
9246 || TREE_CODE (arg0
) == MINUS_EXPR
)
9247 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
9248 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
9249 ? MINUS_EXPR
: PLUS_EXPR
,
9250 arg1
, TREE_OPERAND (arg0
, 1)))
9251 && !TREE_OVERFLOW (tem
))
9252 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
9254 /* Likewise, we can simplify a comparison of a real constant with
9255 a MINUS_EXPR whose first operand is also a real constant, i.e.
9256 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9257 floating-point types only if -fassociative-math is set. */
9258 if (flag_associative_math
9259 && TREE_CODE (arg1
) == REAL_CST
9260 && TREE_CODE (arg0
) == MINUS_EXPR
9261 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
9262 && 0 != (tem
= const_binop (MINUS_EXPR
, TREE_OPERAND (arg0
, 0),
9264 && !TREE_OVERFLOW (tem
))
9265 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
9266 TREE_OPERAND (arg0
, 1), tem
);
9268 /* Fold comparisons against built-in math functions. */
9269 if (TREE_CODE (arg1
) == REAL_CST
9270 && flag_unsafe_math_optimizations
9271 && ! flag_errno_math
)
9273 enum built_in_function fcode
= builtin_mathfn_code (arg0
);
9275 if (fcode
!= END_BUILTINS
)
9277 tem
= fold_mathfn_compare (loc
, fcode
, code
, type
, arg0
, arg1
);
9278 if (tem
!= NULL_TREE
)
9284 if (TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
9285 && CONVERT_EXPR_P (arg0
))
9287 /* If we are widening one operand of an integer comparison,
9288 see if the other operand is similarly being widened. Perhaps we
9289 can do the comparison in the narrower type. */
9290 tem
= fold_widened_comparison (loc
, code
, type
, arg0
, arg1
);
9294 /* Or if we are changing signedness. */
9295 tem
= fold_sign_changed_comparison (loc
, code
, type
, arg0
, arg1
);
9300 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9301 constant, we can simplify it. */
9302 if (TREE_CODE (arg1
) == INTEGER_CST
9303 && (TREE_CODE (arg0
) == MIN_EXPR
9304 || TREE_CODE (arg0
) == MAX_EXPR
)
9305 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
9307 tem
= optimize_minmax_comparison (loc
, code
, type
, op0
, op1
);
9312 /* Simplify comparison of something with itself. (For IEEE
9313 floating-point, we can only do some of these simplifications.) */
9314 if (operand_equal_p (arg0
, arg1
, 0))
9319 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
9320 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9321 return constant_boolean_node (1, type
);
9326 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
9327 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9328 return constant_boolean_node (1, type
);
9329 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
, arg1
);
9332 /* For NE, we can only do this simplification if integer
9333 or we don't honor IEEE floating point NaNs. */
9334 if (FLOAT_TYPE_P (TREE_TYPE (arg0
))
9335 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9337 /* ... fall through ... */
9340 return constant_boolean_node (0, type
);
9346 /* If we are comparing an expression that just has comparisons
9347 of two integer values, arithmetic expressions of those comparisons,
9348 and constants, we can simplify it. There are only three cases
9349 to check: the two values can either be equal, the first can be
9350 greater, or the second can be greater. Fold the expression for
9351 those three values. Since each value must be 0 or 1, we have
9352 eight possibilities, each of which corresponds to the constant 0
9353 or 1 or one of the six possible comparisons.
9355 This handles common cases like (a > b) == 0 but also handles
9356 expressions like ((x > y) - (y > x)) > 0, which supposedly
9357 occur in macroized code. */
9359 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) != INTEGER_CST
)
9361 tree cval1
= 0, cval2
= 0;
9364 if (twoval_comparison_p (arg0
, &cval1
, &cval2
, &save_p
)
9365 /* Don't handle degenerate cases here; they should already
9366 have been handled anyway. */
9367 && cval1
!= 0 && cval2
!= 0
9368 && ! (TREE_CONSTANT (cval1
) && TREE_CONSTANT (cval2
))
9369 && TREE_TYPE (cval1
) == TREE_TYPE (cval2
)
9370 && INTEGRAL_TYPE_P (TREE_TYPE (cval1
))
9371 && TYPE_MAX_VALUE (TREE_TYPE (cval1
))
9372 && TYPE_MAX_VALUE (TREE_TYPE (cval2
))
9373 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1
)),
9374 TYPE_MAX_VALUE (TREE_TYPE (cval2
)), 0))
9376 tree maxval
= TYPE_MAX_VALUE (TREE_TYPE (cval1
));
9377 tree minval
= TYPE_MIN_VALUE (TREE_TYPE (cval1
));
9379 /* We can't just pass T to eval_subst in case cval1 or cval2
9380 was the same as ARG1. */
9383 = fold_build2_loc (loc
, code
, type
,
9384 eval_subst (loc
, arg0
, cval1
, maxval
,
9388 = fold_build2_loc (loc
, code
, type
,
9389 eval_subst (loc
, arg0
, cval1
, maxval
,
9393 = fold_build2_loc (loc
, code
, type
,
9394 eval_subst (loc
, arg0
, cval1
, minval
,
9398 /* All three of these results should be 0 or 1. Confirm they are.
9399 Then use those values to select the proper code to use. */
9401 if (TREE_CODE (high_result
) == INTEGER_CST
9402 && TREE_CODE (equal_result
) == INTEGER_CST
9403 && TREE_CODE (low_result
) == INTEGER_CST
)
9405 /* Make a 3-bit mask with the high-order bit being the
9406 value for `>', the next for '=', and the low for '<'. */
9407 switch ((integer_onep (high_result
) * 4)
9408 + (integer_onep (equal_result
) * 2)
9409 + integer_onep (low_result
))
9413 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
9434 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
9439 tem
= save_expr (build2 (code
, type
, cval1
, cval2
));
9440 SET_EXPR_LOCATION (tem
, loc
);
9443 return fold_build2_loc (loc
, code
, type
, cval1
, cval2
);
9448 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9449 into a single range test. */
9450 if ((TREE_CODE (arg0
) == TRUNC_DIV_EXPR
9451 || TREE_CODE (arg0
) == EXACT_DIV_EXPR
)
9452 && TREE_CODE (arg1
) == INTEGER_CST
9453 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9454 && !integer_zerop (TREE_OPERAND (arg0
, 1))
9455 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
9456 && !TREE_OVERFLOW (arg1
))
9458 tem
= fold_div_compare (loc
, code
, type
, arg0
, arg1
);
9459 if (tem
!= NULL_TREE
)
9463 /* Fold ~X op ~Y as Y op X. */
9464 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9465 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
9467 tree cmp_type
= TREE_TYPE (TREE_OPERAND (arg0
, 0));
9468 return fold_build2_loc (loc
, code
, type
,
9469 fold_convert_loc (loc
, cmp_type
,
9470 TREE_OPERAND (arg1
, 0)),
9471 TREE_OPERAND (arg0
, 0));
9474 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9475 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9476 && TREE_CODE (arg1
) == INTEGER_CST
)
9478 tree cmp_type
= TREE_TYPE (TREE_OPERAND (arg0
, 0));
9479 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
9480 TREE_OPERAND (arg0
, 0),
9481 fold_build1_loc (loc
, BIT_NOT_EXPR
, cmp_type
,
9482 fold_convert_loc (loc
, cmp_type
, arg1
)));
9489 /* Subroutine of fold_binary. Optimize complex multiplications of the
9490 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9491 argument EXPR represents the expression "z" of type TYPE. */
9494 fold_mult_zconjz (location_t loc
, tree type
, tree expr
)
9496 tree itype
= TREE_TYPE (type
);
9497 tree rpart
, ipart
, tem
;
9499 if (TREE_CODE (expr
) == COMPLEX_EXPR
)
9501 rpart
= TREE_OPERAND (expr
, 0);
9502 ipart
= TREE_OPERAND (expr
, 1);
9504 else if (TREE_CODE (expr
) == COMPLEX_CST
)
9506 rpart
= TREE_REALPART (expr
);
9507 ipart
= TREE_IMAGPART (expr
);
9511 expr
= save_expr (expr
);
9512 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, itype
, expr
);
9513 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, itype
, expr
);
9516 rpart
= save_expr (rpart
);
9517 ipart
= save_expr (ipart
);
9518 tem
= fold_build2_loc (loc
, PLUS_EXPR
, itype
,
9519 fold_build2_loc (loc
, MULT_EXPR
, itype
, rpart
, rpart
),
9520 fold_build2_loc (loc
, MULT_EXPR
, itype
, ipart
, ipart
));
9521 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, tem
,
9522 build_zero_cst (itype
));
9526 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9527 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9528 guarantees that P and N have the same least significant log2(M) bits.
9529 N is not otherwise constrained. In particular, N is not normalized to
9530 0 <= N < M as is common. In general, the precise value of P is unknown.
9531 M is chosen as large as possible such that constant N can be determined.
9533 Returns M and sets *RESIDUE to N.
9535 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9536 account. This is not always possible due to PR 35705.
9539 static unsigned HOST_WIDE_INT
9540 get_pointer_modulus_and_residue (tree expr
, unsigned HOST_WIDE_INT
*residue
,
9541 bool allow_func_align
)
9543 enum tree_code code
;
9547 code
= TREE_CODE (expr
);
9548 if (code
== ADDR_EXPR
)
9550 unsigned int bitalign
;
9551 get_object_alignment_1 (TREE_OPERAND (expr
, 0), &bitalign
, residue
);
9552 *residue
/= BITS_PER_UNIT
;
9553 return bitalign
/ BITS_PER_UNIT
;
9555 else if (code
== POINTER_PLUS_EXPR
)
9558 unsigned HOST_WIDE_INT modulus
;
9559 enum tree_code inner_code
;
9561 op0
= TREE_OPERAND (expr
, 0);
9563 modulus
= get_pointer_modulus_and_residue (op0
, residue
,
9566 op1
= TREE_OPERAND (expr
, 1);
9568 inner_code
= TREE_CODE (op1
);
9569 if (inner_code
== INTEGER_CST
)
9571 *residue
+= TREE_INT_CST_LOW (op1
);
9574 else if (inner_code
== MULT_EXPR
)
9576 op1
= TREE_OPERAND (op1
, 1);
9577 if (TREE_CODE (op1
) == INTEGER_CST
)
9579 unsigned HOST_WIDE_INT align
;
9581 /* Compute the greatest power-of-2 divisor of op1. */
9582 align
= TREE_INT_CST_LOW (op1
);
9585 /* If align is non-zero and less than *modulus, replace
9586 *modulus with align., If align is 0, then either op1 is 0
9587 or the greatest power-of-2 divisor of op1 doesn't fit in an
9588 unsigned HOST_WIDE_INT. In either case, no additional
9589 constraint is imposed. */
9591 modulus
= MIN (modulus
, align
);
9598 /* If we get here, we were unable to determine anything useful about the
9603 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9604 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9607 vec_cst_ctor_to_array (tree arg
, tree
*elts
)
9609 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg
)), i
;
9611 if (TREE_CODE (arg
) == VECTOR_CST
)
9613 for (i
= 0; i
< VECTOR_CST_NELTS (arg
); ++i
)
9614 elts
[i
] = VECTOR_CST_ELT (arg
, i
);
9616 else if (TREE_CODE (arg
) == CONSTRUCTOR
)
9618 constructor_elt
*elt
;
9620 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg
), i
, elt
)
9621 if (i
>= nelts
|| TREE_CODE (TREE_TYPE (elt
->value
)) == VECTOR_TYPE
)
9624 elts
[i
] = elt
->value
;
9628 for (; i
< nelts
; i
++)
9630 = fold_convert (TREE_TYPE (TREE_TYPE (arg
)), integer_zero_node
);
9634 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9635 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9636 NULL_TREE otherwise. */
9639 fold_vec_perm (tree type
, tree arg0
, tree arg1
, const unsigned char *sel
)
9641 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
9643 bool need_ctor
= false;
9645 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
9646 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
);
9647 if (TREE_TYPE (TREE_TYPE (arg0
)) != TREE_TYPE (type
)
9648 || TREE_TYPE (TREE_TYPE (arg1
)) != TREE_TYPE (type
))
9651 elts
= XALLOCAVEC (tree
, nelts
* 3);
9652 if (!vec_cst_ctor_to_array (arg0
, elts
)
9653 || !vec_cst_ctor_to_array (arg1
, elts
+ nelts
))
9656 for (i
= 0; i
< nelts
; i
++)
9658 if (!CONSTANT_CLASS_P (elts
[sel
[i
]]))
9660 elts
[i
+ 2 * nelts
] = unshare_expr (elts
[sel
[i
]]);
9665 vec
<constructor_elt
, va_gc
> *v
;
9666 vec_alloc (v
, nelts
);
9667 for (i
= 0; i
< nelts
; i
++)
9668 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, elts
[2 * nelts
+ i
]);
9669 return build_constructor (type
, v
);
9672 return build_vector (type
, &elts
[2 * nelts
]);
9675 /* Try to fold a pointer difference of type TYPE two address expressions of
9676 array references AREF0 and AREF1 using location LOC. Return a
9677 simplified expression for the difference or NULL_TREE. */
9680 fold_addr_of_array_ref_difference (location_t loc
, tree type
,
9681 tree aref0
, tree aref1
)
9683 tree base0
= TREE_OPERAND (aref0
, 0);
9684 tree base1
= TREE_OPERAND (aref1
, 0);
9685 tree base_offset
= build_int_cst (type
, 0);
9687 /* If the bases are array references as well, recurse. If the bases
9688 are pointer indirections compute the difference of the pointers.
9689 If the bases are equal, we are set. */
9690 if ((TREE_CODE (base0
) == ARRAY_REF
9691 && TREE_CODE (base1
) == ARRAY_REF
9693 = fold_addr_of_array_ref_difference (loc
, type
, base0
, base1
)))
9694 || (INDIRECT_REF_P (base0
)
9695 && INDIRECT_REF_P (base1
)
9696 && (base_offset
= fold_binary_loc (loc
, MINUS_EXPR
, type
,
9697 TREE_OPERAND (base0
, 0),
9698 TREE_OPERAND (base1
, 0))))
9699 || operand_equal_p (base0
, base1
, 0))
9701 tree op0
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref0
, 1));
9702 tree op1
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref1
, 1));
9703 tree esz
= fold_convert_loc (loc
, type
, array_ref_element_size (aref0
));
9704 tree diff
= build2 (MINUS_EXPR
, type
, op0
, op1
);
9705 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
9707 fold_build2_loc (loc
, MULT_EXPR
, type
,
9713 /* If the real or vector real constant CST of type TYPE has an exact
9714 inverse, return it, else return NULL. */
9717 exact_inverse (tree type
, tree cst
)
9720 tree unit_type
, *elts
;
9721 enum machine_mode mode
;
9722 unsigned vec_nelts
, i
;
9724 switch (TREE_CODE (cst
))
9727 r
= TREE_REAL_CST (cst
);
9729 if (exact_real_inverse (TYPE_MODE (type
), &r
))
9730 return build_real (type
, r
);
9735 vec_nelts
= VECTOR_CST_NELTS (cst
);
9736 elts
= XALLOCAVEC (tree
, vec_nelts
);
9737 unit_type
= TREE_TYPE (type
);
9738 mode
= TYPE_MODE (unit_type
);
9740 for (i
= 0; i
< vec_nelts
; i
++)
9742 r
= TREE_REAL_CST (VECTOR_CST_ELT (cst
, i
));
9743 if (!exact_real_inverse (mode
, &r
))
9745 elts
[i
] = build_real (unit_type
, r
);
9748 return build_vector (type
, elts
);
9755 /* Fold a binary expression of code CODE and type TYPE with operands
9756 OP0 and OP1. LOC is the location of the resulting expression.
9757 Return the folded expression if folding is successful. Otherwise,
9758 return NULL_TREE. */
9761 fold_binary_loc (location_t loc
,
9762 enum tree_code code
, tree type
, tree op0
, tree op1
)
9764 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
9765 tree arg0
, arg1
, tem
;
9766 tree t1
= NULL_TREE
;
9767 bool strict_overflow_p
;
9769 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
9770 && TREE_CODE_LENGTH (code
) == 2
9772 && op1
!= NULL_TREE
);
9777 /* Strip any conversions that don't change the mode. This is
9778 safe for every expression, except for a comparison expression
9779 because its signedness is derived from its operands. So, in
9780 the latter case, only strip conversions that don't change the
9781 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9784 Note that this is done as an internal manipulation within the
9785 constant folder, in order to find the simplest representation
9786 of the arguments so that their form can be studied. In any
9787 cases, the appropriate type conversions should be put back in
9788 the tree that will get out of the constant folder. */
9790 if (kind
== tcc_comparison
|| code
== MIN_EXPR
|| code
== MAX_EXPR
)
9792 STRIP_SIGN_NOPS (arg0
);
9793 STRIP_SIGN_NOPS (arg1
);
9801 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9802 constant but we can't do arithmetic on them. */
9803 if ((TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
9804 || (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
9805 || (TREE_CODE (arg0
) == FIXED_CST
&& TREE_CODE (arg1
) == FIXED_CST
)
9806 || (TREE_CODE (arg0
) == FIXED_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
9807 || (TREE_CODE (arg0
) == COMPLEX_CST
&& TREE_CODE (arg1
) == COMPLEX_CST
)
9808 || (TREE_CODE (arg0
) == VECTOR_CST
&& TREE_CODE (arg1
) == VECTOR_CST
))
9810 if (kind
== tcc_binary
)
9812 /* Make sure type and arg0 have the same saturating flag. */
9813 gcc_assert (TYPE_SATURATING (type
)
9814 == TYPE_SATURATING (TREE_TYPE (arg0
)));
9815 tem
= const_binop (code
, arg0
, arg1
);
9817 else if (kind
== tcc_comparison
)
9818 tem
= fold_relational_const (code
, type
, arg0
, arg1
);
9822 if (tem
!= NULL_TREE
)
9824 if (TREE_TYPE (tem
) != type
)
9825 tem
= fold_convert_loc (loc
, type
, tem
);
9830 /* If this is a commutative operation, and ARG0 is a constant, move it
9831 to ARG1 to reduce the number of tests below. */
9832 if (commutative_tree_code (code
)
9833 && tree_swap_operands_p (arg0
, arg1
, true))
9834 return fold_build2_loc (loc
, code
, type
, op1
, op0
);
9836 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9838 First check for cases where an arithmetic operation is applied to a
9839 compound, conditional, or comparison operation. Push the arithmetic
9840 operation inside the compound or conditional to see if any folding
9841 can then be done. Convert comparison to conditional for this purpose.
9842 The also optimizes non-constant cases that used to be done in
9845 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9846 one of the operands is a comparison and the other is a comparison, a
9847 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9848 code below would make the expression more complex. Change it to a
9849 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9850 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9852 if ((code
== BIT_AND_EXPR
|| code
== BIT_IOR_EXPR
9853 || code
== EQ_EXPR
|| code
== NE_EXPR
)
9854 && TREE_CODE (type
) != VECTOR_TYPE
9855 && ((truth_value_p (TREE_CODE (arg0
))
9856 && (truth_value_p (TREE_CODE (arg1
))
9857 || (TREE_CODE (arg1
) == BIT_AND_EXPR
9858 && integer_onep (TREE_OPERAND (arg1
, 1)))))
9859 || (truth_value_p (TREE_CODE (arg1
))
9860 && (truth_value_p (TREE_CODE (arg0
))
9861 || (TREE_CODE (arg0
) == BIT_AND_EXPR
9862 && integer_onep (TREE_OPERAND (arg0
, 1)))))))
9864 tem
= fold_build2_loc (loc
, code
== BIT_AND_EXPR
? TRUTH_AND_EXPR
9865 : code
== BIT_IOR_EXPR
? TRUTH_OR_EXPR
9868 fold_convert_loc (loc
, boolean_type_node
, arg0
),
9869 fold_convert_loc (loc
, boolean_type_node
, arg1
));
9871 if (code
== EQ_EXPR
)
9872 tem
= invert_truthvalue_loc (loc
, tem
);
9874 return fold_convert_loc (loc
, type
, tem
);
9877 if (TREE_CODE_CLASS (code
) == tcc_binary
9878 || TREE_CODE_CLASS (code
) == tcc_comparison
)
9880 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
9882 tem
= fold_build2_loc (loc
, code
, type
,
9883 fold_convert_loc (loc
, TREE_TYPE (op0
),
9884 TREE_OPERAND (arg0
, 1)), op1
);
9885 return build2_loc (loc
, COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
9888 if (TREE_CODE (arg1
) == COMPOUND_EXPR
9889 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
9891 tem
= fold_build2_loc (loc
, code
, type
, op0
,
9892 fold_convert_loc (loc
, TREE_TYPE (op1
),
9893 TREE_OPERAND (arg1
, 1)));
9894 return build2_loc (loc
, COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0),
9898 if (TREE_CODE (arg0
) == COND_EXPR
9899 || TREE_CODE (arg0
) == VEC_COND_EXPR
9900 || COMPARISON_CLASS_P (arg0
))
9902 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
9904 /*cond_first_p=*/1);
9905 if (tem
!= NULL_TREE
)
9909 if (TREE_CODE (arg1
) == COND_EXPR
9910 || TREE_CODE (arg1
) == VEC_COND_EXPR
9911 || COMPARISON_CLASS_P (arg1
))
9913 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
9915 /*cond_first_p=*/0);
9916 if (tem
!= NULL_TREE
)
9924 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9925 if (TREE_CODE (arg0
) == ADDR_EXPR
9926 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == MEM_REF
)
9928 tree iref
= TREE_OPERAND (arg0
, 0);
9929 return fold_build2 (MEM_REF
, type
,
9930 TREE_OPERAND (iref
, 0),
9931 int_const_binop (PLUS_EXPR
, arg1
,
9932 TREE_OPERAND (iref
, 1)));
9935 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9936 if (TREE_CODE (arg0
) == ADDR_EXPR
9937 && handled_component_p (TREE_OPERAND (arg0
, 0)))
9940 HOST_WIDE_INT coffset
;
9941 base
= get_addr_base_and_unit_offset (TREE_OPERAND (arg0
, 0),
9945 return fold_build2 (MEM_REF
, type
,
9946 build_fold_addr_expr (base
),
9947 int_const_binop (PLUS_EXPR
, arg1
,
9948 size_int (coffset
)));
9953 case POINTER_PLUS_EXPR
:
9954 /* 0 +p index -> (type)index */
9955 if (integer_zerop (arg0
))
9956 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
9958 /* PTR +p 0 -> PTR */
9959 if (integer_zerop (arg1
))
9960 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
9962 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9963 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
9964 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
9965 return fold_convert_loc (loc
, type
,
9966 fold_build2_loc (loc
, PLUS_EXPR
, sizetype
,
9967 fold_convert_loc (loc
, sizetype
,
9969 fold_convert_loc (loc
, sizetype
,
9972 /* (PTR +p B) +p A -> PTR +p (B + A) */
9973 if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
9976 tree arg01
= fold_convert_loc (loc
, sizetype
, TREE_OPERAND (arg0
, 1));
9977 tree arg00
= TREE_OPERAND (arg0
, 0);
9978 inner
= fold_build2_loc (loc
, PLUS_EXPR
, sizetype
,
9979 arg01
, fold_convert_loc (loc
, sizetype
, arg1
));
9980 return fold_convert_loc (loc
, type
,
9981 fold_build_pointer_plus_loc (loc
,
9985 /* PTR_CST +p CST -> CST1 */
9986 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
9987 return fold_build2_loc (loc
, PLUS_EXPR
, type
, arg0
,
9988 fold_convert_loc (loc
, type
, arg1
));
9990 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9991 of the array. Loop optimizer sometimes produce this type of
9993 if (TREE_CODE (arg0
) == ADDR_EXPR
)
9995 tem
= try_move_mult_to_index (loc
, arg0
,
9996 fold_convert_loc (loc
,
9999 return fold_convert_loc (loc
, type
, tem
);
10005 /* A + (-B) -> A - B */
10006 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
10007 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
10008 fold_convert_loc (loc
, type
, arg0
),
10009 fold_convert_loc (loc
, type
,
10010 TREE_OPERAND (arg1
, 0)));
10011 /* (-A) + B -> B - A */
10012 if (TREE_CODE (arg0
) == NEGATE_EXPR
10013 && reorder_operands_p (TREE_OPERAND (arg0
, 0), arg1
))
10014 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
10015 fold_convert_loc (loc
, type
, arg1
),
10016 fold_convert_loc (loc
, type
,
10017 TREE_OPERAND (arg0
, 0)));
10019 if (INTEGRAL_TYPE_P (type
))
10021 /* Convert ~A + 1 to -A. */
10022 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10023 && integer_onep (arg1
))
10024 return fold_build1_loc (loc
, NEGATE_EXPR
, type
,
10025 fold_convert_loc (loc
, type
,
10026 TREE_OPERAND (arg0
, 0)));
10028 /* ~X + X is -1. */
10029 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10030 && !TYPE_OVERFLOW_TRAPS (type
))
10032 tree tem
= TREE_OPERAND (arg0
, 0);
10035 if (operand_equal_p (tem
, arg1
, 0))
10037 t1
= build_int_cst_type (type
, -1);
10038 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
10042 /* X + ~X is -1. */
10043 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
10044 && !TYPE_OVERFLOW_TRAPS (type
))
10046 tree tem
= TREE_OPERAND (arg1
, 0);
10049 if (operand_equal_p (arg0
, tem
, 0))
10051 t1
= build_int_cst_type (type
, -1);
10052 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
10056 /* X + (X / CST) * -CST is X % CST. */
10057 if (TREE_CODE (arg1
) == MULT_EXPR
10058 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == TRUNC_DIV_EXPR
10059 && operand_equal_p (arg0
,
10060 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0), 0))
10062 tree cst0
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1);
10063 tree cst1
= TREE_OPERAND (arg1
, 1);
10064 tree sum
= fold_binary_loc (loc
, PLUS_EXPR
, TREE_TYPE (cst1
),
10066 if (sum
&& integer_zerop (sum
))
10067 return fold_convert_loc (loc
, type
,
10068 fold_build2_loc (loc
, TRUNC_MOD_EXPR
,
10069 TREE_TYPE (arg0
), arg0
,
10074 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10075 one. Make sure the type is not saturating and has the signedness of
10076 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10077 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10078 if ((TREE_CODE (arg0
) == MULT_EXPR
10079 || TREE_CODE (arg1
) == MULT_EXPR
)
10080 && !TYPE_SATURATING (type
)
10081 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg0
))
10082 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg1
))
10083 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
10085 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
10090 if (! FLOAT_TYPE_P (type
))
10092 if (integer_zerop (arg1
))
10093 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10095 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10096 with a constant, and the two constants have no bits in common,
10097 we should treat this as a BIT_IOR_EXPR since this may produce more
10098 simplifications. */
10099 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10100 && TREE_CODE (arg1
) == BIT_AND_EXPR
10101 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
10102 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
10103 && integer_zerop (const_binop (BIT_AND_EXPR
,
10104 TREE_OPERAND (arg0
, 1),
10105 TREE_OPERAND (arg1
, 1))))
10107 code
= BIT_IOR_EXPR
;
10111 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10112 (plus (plus (mult) (mult)) (foo)) so that we can
10113 take advantage of the factoring cases below. */
10114 if (TYPE_OVERFLOW_WRAPS (type
)
10115 && (((TREE_CODE (arg0
) == PLUS_EXPR
10116 || TREE_CODE (arg0
) == MINUS_EXPR
)
10117 && TREE_CODE (arg1
) == MULT_EXPR
)
10118 || ((TREE_CODE (arg1
) == PLUS_EXPR
10119 || TREE_CODE (arg1
) == MINUS_EXPR
)
10120 && TREE_CODE (arg0
) == MULT_EXPR
)))
10122 tree parg0
, parg1
, parg
, marg
;
10123 enum tree_code pcode
;
10125 if (TREE_CODE (arg1
) == MULT_EXPR
)
10126 parg
= arg0
, marg
= arg1
;
10128 parg
= arg1
, marg
= arg0
;
10129 pcode
= TREE_CODE (parg
);
10130 parg0
= TREE_OPERAND (parg
, 0);
10131 parg1
= TREE_OPERAND (parg
, 1);
10132 STRIP_NOPS (parg0
);
10133 STRIP_NOPS (parg1
);
10135 if (TREE_CODE (parg0
) == MULT_EXPR
10136 && TREE_CODE (parg1
) != MULT_EXPR
)
10137 return fold_build2_loc (loc
, pcode
, type
,
10138 fold_build2_loc (loc
, PLUS_EXPR
, type
,
10139 fold_convert_loc (loc
, type
,
10141 fold_convert_loc (loc
, type
,
10143 fold_convert_loc (loc
, type
, parg1
));
10144 if (TREE_CODE (parg0
) != MULT_EXPR
10145 && TREE_CODE (parg1
) == MULT_EXPR
)
10147 fold_build2_loc (loc
, PLUS_EXPR
, type
,
10148 fold_convert_loc (loc
, type
, parg0
),
10149 fold_build2_loc (loc
, pcode
, type
,
10150 fold_convert_loc (loc
, type
, marg
),
10151 fold_convert_loc (loc
, type
,
10157 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10158 if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 0))
10159 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10161 /* Likewise if the operands are reversed. */
10162 if (fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
10163 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
10165 /* Convert X + -C into X - C. */
10166 if (TREE_CODE (arg1
) == REAL_CST
10167 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
)))
10169 tem
= fold_negate_const (arg1
, type
);
10170 if (!TREE_OVERFLOW (arg1
) || !flag_trapping_math
)
10171 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
10172 fold_convert_loc (loc
, type
, arg0
),
10173 fold_convert_loc (loc
, type
, tem
));
10176 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10177 to __complex__ ( x, y ). This is not the same for SNaNs or
10178 if signed zeros are involved. */
10179 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10180 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10181 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10183 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10184 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
10185 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
10186 bool arg0rz
= false, arg0iz
= false;
10187 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
10188 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
10190 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
10191 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
10192 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
10194 tree rp
= arg1r
? arg1r
10195 : build1 (REALPART_EXPR
, rtype
, arg1
);
10196 tree ip
= arg0i
? arg0i
10197 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
10198 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10200 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
10202 tree rp
= arg0r
? arg0r
10203 : build1 (REALPART_EXPR
, rtype
, arg0
);
10204 tree ip
= arg1i
? arg1i
10205 : build1 (IMAGPART_EXPR
, rtype
, arg1
);
10206 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10211 if (flag_unsafe_math_optimizations
10212 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
10213 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
10214 && (tem
= distribute_real_division (loc
, code
, type
, arg0
, arg1
)))
10217 /* Convert x+x into x*2.0. */
10218 if (operand_equal_p (arg0
, arg1
, 0)
10219 && SCALAR_FLOAT_TYPE_P (type
))
10220 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
,
10221 build_real (type
, dconst2
));
10223 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10224 We associate floats only if the user has specified
10225 -fassociative-math. */
10226 if (flag_associative_math
10227 && TREE_CODE (arg1
) == PLUS_EXPR
10228 && TREE_CODE (arg0
) != MULT_EXPR
)
10230 tree tree10
= TREE_OPERAND (arg1
, 0);
10231 tree tree11
= TREE_OPERAND (arg1
, 1);
10232 if (TREE_CODE (tree11
) == MULT_EXPR
10233 && TREE_CODE (tree10
) == MULT_EXPR
)
10236 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, arg0
, tree10
);
10237 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree0
, tree11
);
10240 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10241 We associate floats only if the user has specified
10242 -fassociative-math. */
10243 if (flag_associative_math
10244 && TREE_CODE (arg0
) == PLUS_EXPR
10245 && TREE_CODE (arg1
) != MULT_EXPR
)
10247 tree tree00
= TREE_OPERAND (arg0
, 0);
10248 tree tree01
= TREE_OPERAND (arg0
, 1);
10249 if (TREE_CODE (tree01
) == MULT_EXPR
10250 && TREE_CODE (tree00
) == MULT_EXPR
)
10253 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, tree01
, arg1
);
10254 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree00
, tree0
);
10260 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10261 is a rotate of A by C1 bits. */
10262 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10263 is a rotate of A by B bits. */
10265 enum tree_code code0
, code1
;
10267 code0
= TREE_CODE (arg0
);
10268 code1
= TREE_CODE (arg1
);
10269 if (((code0
== RSHIFT_EXPR
&& code1
== LSHIFT_EXPR
)
10270 || (code1
== RSHIFT_EXPR
&& code0
== LSHIFT_EXPR
))
10271 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10272 TREE_OPERAND (arg1
, 0), 0)
10273 && (rtype
= TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10274 TYPE_UNSIGNED (rtype
))
10275 /* Only create rotates in complete modes. Other cases are not
10276 expanded properly. */
10277 && TYPE_PRECISION (rtype
) == GET_MODE_PRECISION (TYPE_MODE (rtype
)))
10279 tree tree01
, tree11
;
10280 enum tree_code code01
, code11
;
10282 tree01
= TREE_OPERAND (arg0
, 1);
10283 tree11
= TREE_OPERAND (arg1
, 1);
10284 STRIP_NOPS (tree01
);
10285 STRIP_NOPS (tree11
);
10286 code01
= TREE_CODE (tree01
);
10287 code11
= TREE_CODE (tree11
);
10288 if (code01
== INTEGER_CST
10289 && code11
== INTEGER_CST
10290 && TREE_INT_CST_HIGH (tree01
) == 0
10291 && TREE_INT_CST_HIGH (tree11
) == 0
10292 && ((TREE_INT_CST_LOW (tree01
) + TREE_INT_CST_LOW (tree11
))
10293 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)))))
10295 tem
= build2_loc (loc
, LROTATE_EXPR
,
10296 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10297 TREE_OPERAND (arg0
, 0),
10298 code0
== LSHIFT_EXPR
? tree01
: tree11
);
10299 return fold_convert_loc (loc
, type
, tem
);
10301 else if (code11
== MINUS_EXPR
)
10303 tree tree110
, tree111
;
10304 tree110
= TREE_OPERAND (tree11
, 0);
10305 tree111
= TREE_OPERAND (tree11
, 1);
10306 STRIP_NOPS (tree110
);
10307 STRIP_NOPS (tree111
);
10308 if (TREE_CODE (tree110
) == INTEGER_CST
10309 && 0 == compare_tree_int (tree110
,
10311 (TREE_TYPE (TREE_OPERAND
10313 && operand_equal_p (tree01
, tree111
, 0))
10315 fold_convert_loc (loc
, type
,
10316 build2 ((code0
== LSHIFT_EXPR
10319 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10320 TREE_OPERAND (arg0
, 0), tree01
));
10322 else if (code01
== MINUS_EXPR
)
10324 tree tree010
, tree011
;
10325 tree010
= TREE_OPERAND (tree01
, 0);
10326 tree011
= TREE_OPERAND (tree01
, 1);
10327 STRIP_NOPS (tree010
);
10328 STRIP_NOPS (tree011
);
10329 if (TREE_CODE (tree010
) == INTEGER_CST
10330 && 0 == compare_tree_int (tree010
,
10332 (TREE_TYPE (TREE_OPERAND
10334 && operand_equal_p (tree11
, tree011
, 0))
10335 return fold_convert_loc
10337 build2 ((code0
!= LSHIFT_EXPR
10340 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10341 TREE_OPERAND (arg0
, 0), tree11
));
10347 /* In most languages, can't associate operations on floats through
10348 parentheses. Rather than remember where the parentheses were, we
10349 don't associate floats at all, unless the user has specified
10350 -fassociative-math.
10351 And, we need to make sure type is not saturating. */
10353 if ((! FLOAT_TYPE_P (type
) || flag_associative_math
)
10354 && !TYPE_SATURATING (type
))
10356 tree var0
, con0
, lit0
, minus_lit0
;
10357 tree var1
, con1
, lit1
, minus_lit1
;
10361 /* Split both trees into variables, constants, and literals. Then
10362 associate each group together, the constants with literals,
10363 then the result with variables. This increases the chances of
10364 literals being recombined later and of generating relocatable
10365 expressions for the sum of a constant and literal. */
10366 var0
= split_tree (arg0
, code
, &con0
, &lit0
, &minus_lit0
, 0);
10367 var1
= split_tree (arg1
, code
, &con1
, &lit1
, &minus_lit1
,
10368 code
== MINUS_EXPR
);
10370 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10371 if (code
== MINUS_EXPR
)
10374 /* With undefined overflow prefer doing association in a type
10375 which wraps on overflow, if that is one of the operand types. */
10376 if ((POINTER_TYPE_P (type
) && POINTER_TYPE_OVERFLOW_UNDEFINED
)
10377 || (INTEGRAL_TYPE_P (type
) && !TYPE_OVERFLOW_WRAPS (type
)))
10379 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
10380 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
)))
10381 atype
= TREE_TYPE (arg0
);
10382 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
10383 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1
)))
10384 atype
= TREE_TYPE (arg1
);
10385 gcc_assert (TYPE_PRECISION (atype
) == TYPE_PRECISION (type
));
10388 /* With undefined overflow we can only associate constants with one
10389 variable, and constants whose association doesn't overflow. */
10390 if ((POINTER_TYPE_P (atype
) && POINTER_TYPE_OVERFLOW_UNDEFINED
)
10391 || (INTEGRAL_TYPE_P (atype
) && !TYPE_OVERFLOW_WRAPS (atype
)))
10398 if (TREE_CODE (tmp0
) == NEGATE_EXPR
)
10399 tmp0
= TREE_OPERAND (tmp0
, 0);
10400 if (CONVERT_EXPR_P (tmp0
)
10401 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0
, 0)))
10402 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0
, 0)))
10403 <= TYPE_PRECISION (atype
)))
10404 tmp0
= TREE_OPERAND (tmp0
, 0);
10405 if (TREE_CODE (tmp1
) == NEGATE_EXPR
)
10406 tmp1
= TREE_OPERAND (tmp1
, 0);
10407 if (CONVERT_EXPR_P (tmp1
)
10408 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1
, 0)))
10409 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1
, 0)))
10410 <= TYPE_PRECISION (atype
)))
10411 tmp1
= TREE_OPERAND (tmp1
, 0);
10412 /* The only case we can still associate with two variables
10413 is if they are the same, modulo negation and bit-pattern
10414 preserving conversions. */
10415 if (!operand_equal_p (tmp0
, tmp1
, 0))
10420 /* Only do something if we found more than two objects. Otherwise,
10421 nothing has changed and we risk infinite recursion. */
10423 && (2 < ((var0
!= 0) + (var1
!= 0)
10424 + (con0
!= 0) + (con1
!= 0)
10425 + (lit0
!= 0) + (lit1
!= 0)
10426 + (minus_lit0
!= 0) + (minus_lit1
!= 0))))
10428 bool any_overflows
= false;
10429 if (lit0
) any_overflows
|= TREE_OVERFLOW (lit0
);
10430 if (lit1
) any_overflows
|= TREE_OVERFLOW (lit1
);
10431 if (minus_lit0
) any_overflows
|= TREE_OVERFLOW (minus_lit0
);
10432 if (minus_lit1
) any_overflows
|= TREE_OVERFLOW (minus_lit1
);
10433 var0
= associate_trees (loc
, var0
, var1
, code
, atype
);
10434 con0
= associate_trees (loc
, con0
, con1
, code
, atype
);
10435 lit0
= associate_trees (loc
, lit0
, lit1
, code
, atype
);
10436 minus_lit0
= associate_trees (loc
, minus_lit0
, minus_lit1
,
10439 /* Preserve the MINUS_EXPR if the negative part of the literal is
10440 greater than the positive part. Otherwise, the multiplicative
10441 folding code (i.e extract_muldiv) may be fooled in case
10442 unsigned constants are subtracted, like in the following
10443 example: ((X*2 + 4) - 8U)/2. */
10444 if (minus_lit0
&& lit0
)
10446 if (TREE_CODE (lit0
) == INTEGER_CST
10447 && TREE_CODE (minus_lit0
) == INTEGER_CST
10448 && tree_int_cst_lt (lit0
, minus_lit0
))
10450 minus_lit0
= associate_trees (loc
, minus_lit0
, lit0
,
10451 MINUS_EXPR
, atype
);
10456 lit0
= associate_trees (loc
, lit0
, minus_lit0
,
10457 MINUS_EXPR
, atype
);
10462 /* Don't introduce overflows through reassociation. */
10464 && ((lit0
&& TREE_OVERFLOW (lit0
))
10465 || (minus_lit0
&& TREE_OVERFLOW (minus_lit0
))))
10472 fold_convert_loc (loc
, type
,
10473 associate_trees (loc
, var0
, minus_lit0
,
10474 MINUS_EXPR
, atype
));
10477 con0
= associate_trees (loc
, con0
, minus_lit0
,
10478 MINUS_EXPR
, atype
);
10480 fold_convert_loc (loc
, type
,
10481 associate_trees (loc
, var0
, con0
,
10482 PLUS_EXPR
, atype
));
10486 con0
= associate_trees (loc
, con0
, lit0
, code
, atype
);
10488 fold_convert_loc (loc
, type
, associate_trees (loc
, var0
, con0
,
10496 /* Pointer simplifications for subtraction, simple reassociations. */
10497 if (POINTER_TYPE_P (TREE_TYPE (arg1
)) && POINTER_TYPE_P (TREE_TYPE (arg0
)))
10499 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10500 if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
10501 && TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
10503 tree arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10504 tree arg01
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10505 tree arg10
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
10506 tree arg11
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
10507 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
10508 fold_build2_loc (loc
, MINUS_EXPR
, type
,
10510 fold_build2_loc (loc
, MINUS_EXPR
, type
,
10513 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10514 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
10516 tree arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10517 tree arg01
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10518 tree tmp
= fold_binary_loc (loc
, MINUS_EXPR
, type
, arg00
,
10519 fold_convert_loc (loc
, type
, arg1
));
10521 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tmp
, arg01
);
10524 /* A - (-B) -> A + B */
10525 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
10526 return fold_build2_loc (loc
, PLUS_EXPR
, type
, op0
,
10527 fold_convert_loc (loc
, type
,
10528 TREE_OPERAND (arg1
, 0)));
10529 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10530 if (TREE_CODE (arg0
) == NEGATE_EXPR
10531 && (FLOAT_TYPE_P (type
)
10532 || INTEGRAL_TYPE_P (type
))
10533 && negate_expr_p (arg1
)
10534 && reorder_operands_p (arg0
, arg1
))
10535 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
10536 fold_convert_loc (loc
, type
,
10537 negate_expr (arg1
)),
10538 fold_convert_loc (loc
, type
,
10539 TREE_OPERAND (arg0
, 0)));
10540 /* Convert -A - 1 to ~A. */
10541 if (INTEGRAL_TYPE_P (type
)
10542 && TREE_CODE (arg0
) == NEGATE_EXPR
10543 && integer_onep (arg1
)
10544 && !TYPE_OVERFLOW_TRAPS (type
))
10545 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
10546 fold_convert_loc (loc
, type
,
10547 TREE_OPERAND (arg0
, 0)));
10549 /* Convert -1 - A to ~A. */
10550 if (INTEGRAL_TYPE_P (type
)
10551 && integer_all_onesp (arg0
))
10552 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, op1
);
10555 /* X - (X / CST) * CST is X % CST. */
10556 if (INTEGRAL_TYPE_P (type
)
10557 && TREE_CODE (arg1
) == MULT_EXPR
10558 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == TRUNC_DIV_EXPR
10559 && operand_equal_p (arg0
,
10560 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0), 0)
10561 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1),
10562 TREE_OPERAND (arg1
, 1), 0))
10564 fold_convert_loc (loc
, type
,
10565 fold_build2_loc (loc
, TRUNC_MOD_EXPR
, TREE_TYPE (arg0
),
10566 arg0
, TREE_OPERAND (arg1
, 1)));
10568 if (! FLOAT_TYPE_P (type
))
10570 if (integer_zerop (arg0
))
10571 return negate_expr (fold_convert_loc (loc
, type
, arg1
));
10572 if (integer_zerop (arg1
))
10573 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10575 /* Fold A - (A & B) into ~B & A. */
10576 if (!TREE_SIDE_EFFECTS (arg0
)
10577 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
10579 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0))
10581 tree arg10
= fold_convert_loc (loc
, type
,
10582 TREE_OPERAND (arg1
, 0));
10583 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10584 fold_build1_loc (loc
, BIT_NOT_EXPR
,
10586 fold_convert_loc (loc
, type
, arg0
));
10588 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10590 tree arg11
= fold_convert_loc (loc
,
10591 type
, TREE_OPERAND (arg1
, 1));
10592 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10593 fold_build1_loc (loc
, BIT_NOT_EXPR
,
10595 fold_convert_loc (loc
, type
, arg0
));
10599 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10600 any power of 2 minus 1. */
10601 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10602 && TREE_CODE (arg1
) == BIT_AND_EXPR
10603 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10604 TREE_OPERAND (arg1
, 0), 0))
10606 tree mask0
= TREE_OPERAND (arg0
, 1);
10607 tree mask1
= TREE_OPERAND (arg1
, 1);
10608 tree tem
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, mask0
);
10610 if (operand_equal_p (tem
, mask1
, 0))
10612 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, type
,
10613 TREE_OPERAND (arg0
, 0), mask1
);
10614 return fold_build2_loc (loc
, MINUS_EXPR
, type
, tem
, mask1
);
10619 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10620 else if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 1))
10621 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10623 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10624 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10625 (-ARG1 + ARG0) reduces to -ARG1. */
10626 else if (fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
10627 return negate_expr (fold_convert_loc (loc
, type
, arg1
));
10629 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10630 __complex__ ( x, -y ). This is not the same for SNaNs or if
10631 signed zeros are involved. */
10632 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10633 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10634 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10636 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10637 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
10638 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
10639 bool arg0rz
= false, arg0iz
= false;
10640 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
10641 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
10643 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
10644 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
10645 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
10647 tree rp
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
10649 : build1 (REALPART_EXPR
, rtype
, arg1
));
10650 tree ip
= arg0i
? arg0i
10651 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
10652 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10654 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
10656 tree rp
= arg0r
? arg0r
10657 : build1 (REALPART_EXPR
, rtype
, arg0
);
10658 tree ip
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
10660 : build1 (IMAGPART_EXPR
, rtype
, arg1
));
10661 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10666 /* Fold &x - &x. This can happen from &x.foo - &x.
10667 This is unsafe for certain floats even in non-IEEE formats.
10668 In IEEE, it is unsafe because it does wrong for NaNs.
10669 Also note that operand_equal_p is always false if an operand
10672 if ((!FLOAT_TYPE_P (type
) || !HONOR_NANS (TYPE_MODE (type
)))
10673 && operand_equal_p (arg0
, arg1
, 0))
10674 return build_zero_cst (type
);
10676 /* A - B -> A + (-B) if B is easily negatable. */
10677 if (negate_expr_p (arg1
)
10678 && ((FLOAT_TYPE_P (type
)
10679 /* Avoid this transformation if B is a positive REAL_CST. */
10680 && (TREE_CODE (arg1
) != REAL_CST
10681 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
))))
10682 || INTEGRAL_TYPE_P (type
)))
10683 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
10684 fold_convert_loc (loc
, type
, arg0
),
10685 fold_convert_loc (loc
, type
,
10686 negate_expr (arg1
)));
10688 /* Try folding difference of addresses. */
10690 HOST_WIDE_INT diff
;
10692 if ((TREE_CODE (arg0
) == ADDR_EXPR
10693 || TREE_CODE (arg1
) == ADDR_EXPR
)
10694 && ptr_difference_const (arg0
, arg1
, &diff
))
10695 return build_int_cst_type (type
, diff
);
10698 /* Fold &a[i] - &a[j] to i-j. */
10699 if (TREE_CODE (arg0
) == ADDR_EXPR
10700 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == ARRAY_REF
10701 && TREE_CODE (arg1
) == ADDR_EXPR
10702 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == ARRAY_REF
)
10704 tree tem
= fold_addr_of_array_ref_difference (loc
, type
,
10705 TREE_OPERAND (arg0
, 0),
10706 TREE_OPERAND (arg1
, 0));
10711 if (FLOAT_TYPE_P (type
)
10712 && flag_unsafe_math_optimizations
10713 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
10714 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
10715 && (tem
= distribute_real_division (loc
, code
, type
, arg0
, arg1
)))
10718 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10719 one. Make sure the type is not saturating and has the signedness of
10720 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10721 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10722 if ((TREE_CODE (arg0
) == MULT_EXPR
10723 || TREE_CODE (arg1
) == MULT_EXPR
)
10724 && !TYPE_SATURATING (type
)
10725 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg0
))
10726 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg1
))
10727 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
10729 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
10737 /* (-A) * (-B) -> A * B */
10738 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
10739 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10740 fold_convert_loc (loc
, type
,
10741 TREE_OPERAND (arg0
, 0)),
10742 fold_convert_loc (loc
, type
,
10743 negate_expr (arg1
)));
10744 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
10745 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10746 fold_convert_loc (loc
, type
,
10747 negate_expr (arg0
)),
10748 fold_convert_loc (loc
, type
,
10749 TREE_OPERAND (arg1
, 0)));
10751 if (! FLOAT_TYPE_P (type
))
10753 if (integer_zerop (arg1
))
10754 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
10755 if (integer_onep (arg1
))
10756 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10757 /* Transform x * -1 into -x. Make sure to do the negation
10758 on the original operand with conversions not stripped
10759 because we can only strip non-sign-changing conversions. */
10760 if (integer_all_onesp (arg1
))
10761 return fold_convert_loc (loc
, type
, negate_expr (op0
));
10762 /* Transform x * -C into -x * C if x is easily negatable. */
10763 if (TREE_CODE (arg1
) == INTEGER_CST
10764 && tree_int_cst_sgn (arg1
) == -1
10765 && negate_expr_p (arg0
)
10766 && (tem
= negate_expr (arg1
)) != arg1
10767 && !TREE_OVERFLOW (tem
))
10768 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10769 fold_convert_loc (loc
, type
,
10770 negate_expr (arg0
)),
10773 /* (a * (1 << b)) is (a << b) */
10774 if (TREE_CODE (arg1
) == LSHIFT_EXPR
10775 && integer_onep (TREE_OPERAND (arg1
, 0)))
10776 return fold_build2_loc (loc
, LSHIFT_EXPR
, type
, op0
,
10777 TREE_OPERAND (arg1
, 1));
10778 if (TREE_CODE (arg0
) == LSHIFT_EXPR
10779 && integer_onep (TREE_OPERAND (arg0
, 0)))
10780 return fold_build2_loc (loc
, LSHIFT_EXPR
, type
, op1
,
10781 TREE_OPERAND (arg0
, 1));
10783 /* (A + A) * C -> A * 2 * C */
10784 if (TREE_CODE (arg0
) == PLUS_EXPR
10785 && TREE_CODE (arg1
) == INTEGER_CST
10786 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10787 TREE_OPERAND (arg0
, 1), 0))
10788 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10789 omit_one_operand_loc (loc
, type
,
10790 TREE_OPERAND (arg0
, 0),
10791 TREE_OPERAND (arg0
, 1)),
10792 fold_build2_loc (loc
, MULT_EXPR
, type
,
10793 build_int_cst (type
, 2) , arg1
));
10795 strict_overflow_p
= false;
10796 if (TREE_CODE (arg1
) == INTEGER_CST
10797 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
10798 &strict_overflow_p
)))
10800 if (strict_overflow_p
)
10801 fold_overflow_warning (("assuming signed overflow does not "
10802 "occur when simplifying "
10804 WARN_STRICT_OVERFLOW_MISC
);
10805 return fold_convert_loc (loc
, type
, tem
);
10808 /* Optimize z * conj(z) for integer complex numbers. */
10809 if (TREE_CODE (arg0
) == CONJ_EXPR
10810 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10811 return fold_mult_zconjz (loc
, type
, arg1
);
10812 if (TREE_CODE (arg1
) == CONJ_EXPR
10813 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10814 return fold_mult_zconjz (loc
, type
, arg0
);
10818 /* Maybe fold x * 0 to 0. The expressions aren't the same
10819 when x is NaN, since x * 0 is also NaN. Nor are they the
10820 same in modes with signed zeros, since multiplying a
10821 negative value by 0 gives -0, not +0. */
10822 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
10823 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10824 && real_zerop (arg1
))
10825 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
10826 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10827 Likewise for complex arithmetic with signed zeros. */
10828 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10829 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10830 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10831 && real_onep (arg1
))
10832 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10834 /* Transform x * -1.0 into -x. */
10835 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10836 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10837 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10838 && real_minus_onep (arg1
))
10839 return fold_convert_loc (loc
, type
, negate_expr (arg0
));
10841 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10842 the result for floating point types due to rounding so it is applied
10843 only if -fassociative-math was specify. */
10844 if (flag_associative_math
10845 && TREE_CODE (arg0
) == RDIV_EXPR
10846 && TREE_CODE (arg1
) == REAL_CST
10847 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
)
10849 tree tem
= const_binop (MULT_EXPR
, TREE_OPERAND (arg0
, 0),
10852 return fold_build2_loc (loc
, RDIV_EXPR
, type
, tem
,
10853 TREE_OPERAND (arg0
, 1));
10856 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10857 if (operand_equal_p (arg0
, arg1
, 0))
10859 tree tem
= fold_strip_sign_ops (arg0
);
10860 if (tem
!= NULL_TREE
)
10862 tem
= fold_convert_loc (loc
, type
, tem
);
10863 return fold_build2_loc (loc
, MULT_EXPR
, type
, tem
, tem
);
10867 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10868 This is not the same for NaNs or if signed zeros are
10870 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
10871 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10872 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
10873 && TREE_CODE (arg1
) == COMPLEX_CST
10874 && real_zerop (TREE_REALPART (arg1
)))
10876 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10877 if (real_onep (TREE_IMAGPART (arg1
)))
10879 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
10880 negate_expr (fold_build1_loc (loc
, IMAGPART_EXPR
,
10882 fold_build1_loc (loc
, REALPART_EXPR
, rtype
, arg0
));
10883 else if (real_minus_onep (TREE_IMAGPART (arg1
)))
10885 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
10886 fold_build1_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
),
10887 negate_expr (fold_build1_loc (loc
, REALPART_EXPR
,
10891 /* Optimize z * conj(z) for floating point complex numbers.
10892 Guarded by flag_unsafe_math_optimizations as non-finite
10893 imaginary components don't produce scalar results. */
10894 if (flag_unsafe_math_optimizations
10895 && TREE_CODE (arg0
) == CONJ_EXPR
10896 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10897 return fold_mult_zconjz (loc
, type
, arg1
);
10898 if (flag_unsafe_math_optimizations
10899 && TREE_CODE (arg1
) == CONJ_EXPR
10900 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10901 return fold_mult_zconjz (loc
, type
, arg0
);
10903 if (flag_unsafe_math_optimizations
)
10905 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
10906 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
10908 /* Optimizations of root(...)*root(...). */
10909 if (fcode0
== fcode1
&& BUILTIN_ROOT_P (fcode0
))
10912 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10913 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
10915 /* Optimize sqrt(x)*sqrt(x) as x. */
10916 if (BUILTIN_SQRT_P (fcode0
)
10917 && operand_equal_p (arg00
, arg10
, 0)
10918 && ! HONOR_SNANS (TYPE_MODE (type
)))
10921 /* Optimize root(x)*root(y) as root(x*y). */
10922 rootfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10923 arg
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg00
, arg10
);
10924 return build_call_expr_loc (loc
, rootfn
, 1, arg
);
10927 /* Optimize expN(x)*expN(y) as expN(x+y). */
10928 if (fcode0
== fcode1
&& BUILTIN_EXPONENT_P (fcode0
))
10930 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10931 tree arg
= fold_build2_loc (loc
, PLUS_EXPR
, type
,
10932 CALL_EXPR_ARG (arg0
, 0),
10933 CALL_EXPR_ARG (arg1
, 0));
10934 return build_call_expr_loc (loc
, expfn
, 1, arg
);
10937 /* Optimizations of pow(...)*pow(...). */
10938 if ((fcode0
== BUILT_IN_POW
&& fcode1
== BUILT_IN_POW
)
10939 || (fcode0
== BUILT_IN_POWF
&& fcode1
== BUILT_IN_POWF
)
10940 || (fcode0
== BUILT_IN_POWL
&& fcode1
== BUILT_IN_POWL
))
10942 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10943 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
10944 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
10945 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
10947 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10948 if (operand_equal_p (arg01
, arg11
, 0))
10950 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10951 tree arg
= fold_build2_loc (loc
, MULT_EXPR
, type
,
10953 return build_call_expr_loc (loc
, powfn
, 2, arg
, arg01
);
10956 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10957 if (operand_equal_p (arg00
, arg10
, 0))
10959 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10960 tree arg
= fold_build2_loc (loc
, PLUS_EXPR
, type
,
10962 return build_call_expr_loc (loc
, powfn
, 2, arg00
, arg
);
10966 /* Optimize tan(x)*cos(x) as sin(x). */
10967 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_COS
)
10968 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_COSF
)
10969 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_COSL
)
10970 || (fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_TAN
)
10971 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_TANF
)
10972 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_TANL
))
10973 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
10974 CALL_EXPR_ARG (arg1
, 0), 0))
10976 tree sinfn
= mathfn_built_in (type
, BUILT_IN_SIN
);
10978 if (sinfn
!= NULL_TREE
)
10979 return build_call_expr_loc (loc
, sinfn
, 1,
10980 CALL_EXPR_ARG (arg0
, 0));
10983 /* Optimize x*pow(x,c) as pow(x,c+1). */
10984 if (fcode1
== BUILT_IN_POW
10985 || fcode1
== BUILT_IN_POWF
10986 || fcode1
== BUILT_IN_POWL
)
10988 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
10989 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
10990 if (TREE_CODE (arg11
) == REAL_CST
10991 && !TREE_OVERFLOW (arg11
)
10992 && operand_equal_p (arg0
, arg10
, 0))
10994 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
10998 c
= TREE_REAL_CST (arg11
);
10999 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
11000 arg
= build_real (type
, c
);
11001 return build_call_expr_loc (loc
, powfn
, 2, arg0
, arg
);
11005 /* Optimize pow(x,c)*x as pow(x,c+1). */
11006 if (fcode0
== BUILT_IN_POW
11007 || fcode0
== BUILT_IN_POWF
11008 || fcode0
== BUILT_IN_POWL
)
11010 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11011 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
11012 if (TREE_CODE (arg01
) == REAL_CST
11013 && !TREE_OVERFLOW (arg01
)
11014 && operand_equal_p (arg1
, arg00
, 0))
11016 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11020 c
= TREE_REAL_CST (arg01
);
11021 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
11022 arg
= build_real (type
, c
);
11023 return build_call_expr_loc (loc
, powfn
, 2, arg1
, arg
);
11027 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
11028 if (!in_gimple_form
11030 && operand_equal_p (arg0
, arg1
, 0))
11032 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
11036 tree arg
= build_real (type
, dconst2
);
11037 return build_call_expr_loc (loc
, powfn
, 2, arg0
, arg
);
11046 if (integer_all_onesp (arg1
))
11047 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
11048 if (integer_zerop (arg1
))
11049 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11050 if (operand_equal_p (arg0
, arg1
, 0))
11051 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11053 /* ~X | X is -1. */
11054 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11055 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11057 t1
= build_zero_cst (type
);
11058 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
11059 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
11062 /* X | ~X is -1. */
11063 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
11064 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11066 t1
= build_zero_cst (type
);
11067 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
11068 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
11071 /* Canonicalize (X & C1) | C2. */
11072 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11073 && TREE_CODE (arg1
) == INTEGER_CST
11074 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11076 double_int c1
, c2
, c3
, msk
;
11077 int width
= TYPE_PRECISION (type
), w
;
11078 c1
= tree_to_double_int (TREE_OPERAND (arg0
, 1));
11079 c2
= tree_to_double_int (arg1
);
11081 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11082 if ((c1
& c2
) == c1
)
11083 return omit_one_operand_loc (loc
, type
, arg1
,
11084 TREE_OPERAND (arg0
, 0));
11086 msk
= double_int::mask (width
);
11088 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11089 if (msk
.and_not (c1
| c2
).is_zero ())
11090 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
,
11091 TREE_OPERAND (arg0
, 0), arg1
);
11093 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11094 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11095 mode which allows further optimizations. */
11098 c3
= c1
.and_not (c2
);
11099 for (w
= BITS_PER_UNIT
;
11100 w
<= width
&& w
<= HOST_BITS_PER_WIDE_INT
;
11103 unsigned HOST_WIDE_INT mask
11104 = (unsigned HOST_WIDE_INT
) -1 >> (HOST_BITS_PER_WIDE_INT
- w
);
11105 if (((c1
.low
| c2
.low
) & mask
) == mask
11106 && (c1
.low
& ~mask
) == 0 && c1
.high
== 0)
11108 c3
= double_int::from_uhwi (mask
);
11113 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
,
11114 fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11115 TREE_OPERAND (arg0
, 0),
11116 double_int_to_tree (type
,
11121 /* (X & Y) | Y is (X, Y). */
11122 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11123 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11124 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 0));
11125 /* (X & Y) | X is (Y, X). */
11126 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11127 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11128 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11129 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 1));
11130 /* X | (X & Y) is (Y, X). */
11131 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11132 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0)
11133 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 1)))
11134 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 1));
11135 /* X | (Y & X) is (Y, X). */
11136 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11137 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11138 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11139 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 0));
11141 /* (X & ~Y) | (~X & Y) is X ^ Y */
11142 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11143 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
11145 tree a0
, a1
, l0
, l1
, n0
, n1
;
11147 a0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
11148 a1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
11150 l0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11151 l1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11153 n0
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, l0
);
11154 n1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, l1
);
11156 if ((operand_equal_p (n0
, a0
, 0)
11157 && operand_equal_p (n1
, a1
, 0))
11158 || (operand_equal_p (n0
, a1
, 0)
11159 && operand_equal_p (n1
, a0
, 0)))
11160 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
, l0
, n1
);
11163 t1
= distribute_bit_expr (loc
, code
, type
, arg0
, arg1
);
11164 if (t1
!= NULL_TREE
)
11167 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11169 This results in more efficient code for machines without a NAND
11170 instruction. Combine will canonicalize to the first form
11171 which will allow use of NAND instructions provided by the
11172 backend if they exist. */
11173 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11174 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
11177 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
11178 build2 (BIT_AND_EXPR
, type
,
11179 fold_convert_loc (loc
, type
,
11180 TREE_OPERAND (arg0
, 0)),
11181 fold_convert_loc (loc
, type
,
11182 TREE_OPERAND (arg1
, 0))));
11185 /* See if this can be simplified into a rotate first. If that
11186 is unsuccessful continue in the association code. */
11190 if (integer_zerop (arg1
))
11191 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11192 if (integer_all_onesp (arg1
))
11193 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, op0
);
11194 if (operand_equal_p (arg0
, arg1
, 0))
11195 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
11197 /* ~X ^ X is -1. */
11198 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11199 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11201 t1
= build_zero_cst (type
);
11202 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
11203 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
11206 /* X ^ ~X is -1. */
11207 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
11208 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11210 t1
= build_zero_cst (type
);
11211 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
11212 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
11215 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11216 with a constant, and the two constants have no bits in common,
11217 we should treat this as a BIT_IOR_EXPR since this may produce more
11218 simplifications. */
11219 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11220 && TREE_CODE (arg1
) == BIT_AND_EXPR
11221 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
11222 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
11223 && integer_zerop (const_binop (BIT_AND_EXPR
,
11224 TREE_OPERAND (arg0
, 1),
11225 TREE_OPERAND (arg1
, 1))))
11227 code
= BIT_IOR_EXPR
;
11231 /* (X | Y) ^ X -> Y & ~ X*/
11232 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11233 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11235 tree t2
= TREE_OPERAND (arg0
, 1);
11236 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
),
11238 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11239 fold_convert_loc (loc
, type
, t2
),
11240 fold_convert_loc (loc
, type
, t1
));
11244 /* (Y | X) ^ X -> Y & ~ X*/
11245 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11246 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11248 tree t2
= TREE_OPERAND (arg0
, 0);
11249 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
),
11251 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11252 fold_convert_loc (loc
, type
, t2
),
11253 fold_convert_loc (loc
, type
, t1
));
11257 /* X ^ (X | Y) -> Y & ~ X*/
11258 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11259 && operand_equal_p (TREE_OPERAND (arg1
, 0), arg0
, 0))
11261 tree t2
= TREE_OPERAND (arg1
, 1);
11262 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg0
),
11264 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11265 fold_convert_loc (loc
, type
, t2
),
11266 fold_convert_loc (loc
, type
, t1
));
11270 /* X ^ (Y | X) -> Y & ~ X*/
11271 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11272 && operand_equal_p (TREE_OPERAND (arg1
, 1), arg0
, 0))
11274 tree t2
= TREE_OPERAND (arg1
, 0);
11275 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg0
),
11277 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11278 fold_convert_loc (loc
, type
, t2
),
11279 fold_convert_loc (loc
, type
, t1
));
11283 /* Convert ~X ^ ~Y to X ^ Y. */
11284 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11285 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
11286 return fold_build2_loc (loc
, code
, type
,
11287 fold_convert_loc (loc
, type
,
11288 TREE_OPERAND (arg0
, 0)),
11289 fold_convert_loc (loc
, type
,
11290 TREE_OPERAND (arg1
, 0)));
11292 /* Convert ~X ^ C to X ^ ~C. */
11293 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11294 && TREE_CODE (arg1
) == INTEGER_CST
)
11295 return fold_build2_loc (loc
, code
, type
,
11296 fold_convert_loc (loc
, type
,
11297 TREE_OPERAND (arg0
, 0)),
11298 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, arg1
));
11300 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11301 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11302 && integer_onep (TREE_OPERAND (arg0
, 1))
11303 && integer_onep (arg1
))
11304 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
,
11305 build_zero_cst (TREE_TYPE (arg0
)));
11307 /* Fold (X & Y) ^ Y as ~X & Y. */
11308 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11309 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11311 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11312 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11313 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11314 fold_convert_loc (loc
, type
, arg1
));
11316 /* Fold (X & Y) ^ X as ~Y & X. */
11317 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11318 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11319 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11321 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11322 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11323 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11324 fold_convert_loc (loc
, type
, arg1
));
11326 /* Fold X ^ (X & Y) as X & ~Y. */
11327 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11328 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11330 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
11331 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11332 fold_convert_loc (loc
, type
, arg0
),
11333 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
));
11335 /* Fold X ^ (Y & X) as ~Y & X. */
11336 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11337 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11338 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11340 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
11341 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11342 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11343 fold_convert_loc (loc
, type
, arg0
));
11346 /* See if this can be simplified into a rotate first. If that
11347 is unsuccessful continue in the association code. */
11351 if (integer_all_onesp (arg1
))
11352 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11353 if (integer_zerop (arg1
))
11354 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
11355 if (operand_equal_p (arg0
, arg1
, 0))
11356 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11358 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11359 if ((TREE_CODE (arg0
) == BIT_NOT_EXPR
11360 || TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11361 || (TREE_CODE (arg0
) == EQ_EXPR
11362 && integer_zerop (TREE_OPERAND (arg0
, 1))))
11363 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11364 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
11366 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11367 if ((TREE_CODE (arg1
) == BIT_NOT_EXPR
11368 || TREE_CODE (arg1
) == TRUTH_NOT_EXPR
11369 || (TREE_CODE (arg1
) == EQ_EXPR
11370 && integer_zerop (TREE_OPERAND (arg1
, 1))))
11371 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11372 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
11374 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11375 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11376 && TREE_CODE (arg1
) == INTEGER_CST
11377 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11379 tree tmp1
= fold_convert_loc (loc
, type
, arg1
);
11380 tree tmp2
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11381 tree tmp3
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11382 tmp2
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
, tmp2
, tmp1
);
11383 tmp3
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
, tmp3
, tmp1
);
11385 fold_convert_loc (loc
, type
,
11386 fold_build2_loc (loc
, BIT_IOR_EXPR
,
11387 type
, tmp2
, tmp3
));
11390 /* (X | Y) & Y is (X, Y). */
11391 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11392 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11393 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 0));
11394 /* (X | Y) & X is (Y, X). */
11395 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11396 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11397 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11398 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 1));
11399 /* X & (X | Y) is (Y, X). */
11400 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11401 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0)
11402 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 1)))
11403 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 1));
11404 /* X & (Y | X) is (Y, X). */
11405 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11406 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11407 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11408 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 0));
11410 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11411 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11412 && integer_onep (TREE_OPERAND (arg0
, 1))
11413 && integer_onep (arg1
))
11416 tem
= TREE_OPERAND (arg0
, 0);
11417 tem2
= fold_convert_loc (loc
, TREE_TYPE (tem
), arg1
);
11418 tem2
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
),
11420 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem2
,
11421 build_zero_cst (TREE_TYPE (tem
)));
11423 /* Fold ~X & 1 as (X & 1) == 0. */
11424 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11425 && integer_onep (arg1
))
11428 tem
= TREE_OPERAND (arg0
, 0);
11429 tem2
= fold_convert_loc (loc
, TREE_TYPE (tem
), arg1
);
11430 tem2
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
),
11432 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem2
,
11433 build_zero_cst (TREE_TYPE (tem
)));
11435 /* Fold !X & 1 as X == 0. */
11436 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11437 && integer_onep (arg1
))
11439 tem
= TREE_OPERAND (arg0
, 0);
11440 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem
,
11441 build_zero_cst (TREE_TYPE (tem
)));
11444 /* Fold (X ^ Y) & Y as ~X & Y. */
11445 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11446 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11448 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11449 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11450 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11451 fold_convert_loc (loc
, type
, arg1
));
11453 /* Fold (X ^ Y) & X as ~Y & X. */
11454 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11455 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11456 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11458 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11459 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11460 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11461 fold_convert_loc (loc
, type
, arg1
));
11463 /* Fold X & (X ^ Y) as X & ~Y. */
11464 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
11465 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11467 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
11468 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11469 fold_convert_loc (loc
, type
, arg0
),
11470 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
));
11472 /* Fold X & (Y ^ X) as ~Y & X. */
11473 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
11474 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11475 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11477 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
11478 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11479 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11480 fold_convert_loc (loc
, type
, arg0
));
11483 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11484 multiple of 1 << CST. */
11485 if (TREE_CODE (arg1
) == INTEGER_CST
)
11487 double_int cst1
= tree_to_double_int (arg1
);
11488 double_int ncst1
= (-cst1
).ext(TYPE_PRECISION (TREE_TYPE (arg1
)),
11489 TYPE_UNSIGNED (TREE_TYPE (arg1
)));
11490 if ((cst1
& ncst1
) == ncst1
11491 && multiple_of_p (type
, arg0
,
11492 double_int_to_tree (TREE_TYPE (arg1
), ncst1
)))
11493 return fold_convert_loc (loc
, type
, arg0
);
11496 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11498 if (TREE_CODE (arg1
) == INTEGER_CST
11499 && TREE_CODE (arg0
) == MULT_EXPR
11500 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11503 = tree_to_double_int (TREE_OPERAND (arg0
, 1)).trailing_zeros ();
11506 double_int arg1mask
, masked
;
11507 arg1mask
= ~double_int::mask (arg1tz
);
11508 arg1mask
= arg1mask
.ext (TYPE_PRECISION (type
),
11509 TYPE_UNSIGNED (type
));
11510 masked
= arg1mask
& tree_to_double_int (arg1
);
11511 if (masked
.is_zero ())
11512 return omit_two_operands_loc (loc
, type
, build_zero_cst (type
),
11514 else if (masked
!= tree_to_double_int (arg1
))
11515 return fold_build2_loc (loc
, code
, type
, op0
,
11516 double_int_to_tree (type
, masked
));
11520 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11521 ((A & N) + B) & M -> (A + B) & M
11522 Similarly if (N & M) == 0,
11523 ((A | N) + B) & M -> (A + B) & M
11524 and for - instead of + (or unary - instead of +)
11525 and/or ^ instead of |.
11526 If B is constant and (B & M) == 0, fold into A & M. */
11527 if (host_integerp (arg1
, 1))
11529 unsigned HOST_WIDE_INT cst1
= tree_low_cst (arg1
, 1);
11530 if (~cst1
&& (cst1
& (cst1
+ 1)) == 0
11531 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
11532 && (TREE_CODE (arg0
) == PLUS_EXPR
11533 || TREE_CODE (arg0
) == MINUS_EXPR
11534 || TREE_CODE (arg0
) == NEGATE_EXPR
)
11535 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
))
11536 || TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
))
11540 unsigned HOST_WIDE_INT cst0
;
11542 /* Now we know that arg0 is (C + D) or (C - D) or
11543 -C and arg1 (M) is == (1LL << cst) - 1.
11544 Store C into PMOP[0] and D into PMOP[1]. */
11545 pmop
[0] = TREE_OPERAND (arg0
, 0);
11547 if (TREE_CODE (arg0
) != NEGATE_EXPR
)
11549 pmop
[1] = TREE_OPERAND (arg0
, 1);
11553 if (!host_integerp (TYPE_MAX_VALUE (TREE_TYPE (arg0
)), 1)
11554 || (tree_low_cst (TYPE_MAX_VALUE (TREE_TYPE (arg0
)), 1)
11558 for (; which
>= 0; which
--)
11559 switch (TREE_CODE (pmop
[which
]))
11564 if (TREE_CODE (TREE_OPERAND (pmop
[which
], 1))
11567 /* tree_low_cst not used, because we don't care about
11569 cst0
= TREE_INT_CST_LOW (TREE_OPERAND (pmop
[which
], 1));
11571 if (TREE_CODE (pmop
[which
]) == BIT_AND_EXPR
)
11576 else if (cst0
!= 0)
11578 /* If C or D is of the form (A & N) where
11579 (N & M) == M, or of the form (A | N) or
11580 (A ^ N) where (N & M) == 0, replace it with A. */
11581 pmop
[which
] = TREE_OPERAND (pmop
[which
], 0);
11584 /* If C or D is a N where (N & M) == 0, it can be
11585 omitted (assumed 0). */
11586 if ((TREE_CODE (arg0
) == PLUS_EXPR
11587 || (TREE_CODE (arg0
) == MINUS_EXPR
&& which
== 0))
11588 && (TREE_INT_CST_LOW (pmop
[which
]) & cst1
) == 0)
11589 pmop
[which
] = NULL
;
11595 /* Only build anything new if we optimized one or both arguments
11597 if (pmop
[0] != TREE_OPERAND (arg0
, 0)
11598 || (TREE_CODE (arg0
) != NEGATE_EXPR
11599 && pmop
[1] != TREE_OPERAND (arg0
, 1)))
11601 tree utype
= TREE_TYPE (arg0
);
11602 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
)))
11604 /* Perform the operations in a type that has defined
11605 overflow behavior. */
11606 utype
= unsigned_type_for (TREE_TYPE (arg0
));
11607 if (pmop
[0] != NULL
)
11608 pmop
[0] = fold_convert_loc (loc
, utype
, pmop
[0]);
11609 if (pmop
[1] != NULL
)
11610 pmop
[1] = fold_convert_loc (loc
, utype
, pmop
[1]);
11613 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
11614 tem
= fold_build1_loc (loc
, NEGATE_EXPR
, utype
, pmop
[0]);
11615 else if (TREE_CODE (arg0
) == PLUS_EXPR
)
11617 if (pmop
[0] != NULL
&& pmop
[1] != NULL
)
11618 tem
= fold_build2_loc (loc
, PLUS_EXPR
, utype
,
11620 else if (pmop
[0] != NULL
)
11622 else if (pmop
[1] != NULL
)
11625 return build_int_cst (type
, 0);
11627 else if (pmop
[0] == NULL
)
11628 tem
= fold_build1_loc (loc
, NEGATE_EXPR
, utype
, pmop
[1]);
11630 tem
= fold_build2_loc (loc
, MINUS_EXPR
, utype
,
11632 /* TEM is now the new binary +, - or unary - replacement. */
11633 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, utype
, tem
,
11634 fold_convert_loc (loc
, utype
, arg1
));
11635 return fold_convert_loc (loc
, type
, tem
);
11640 t1
= distribute_bit_expr (loc
, code
, type
, arg0
, arg1
);
11641 if (t1
!= NULL_TREE
)
11643 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11644 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) == NOP_EXPR
11645 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
11648 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)));
11650 if (prec
< BITS_PER_WORD
&& prec
< HOST_BITS_PER_WIDE_INT
11651 && (~TREE_INT_CST_LOW (arg1
)
11652 & (((HOST_WIDE_INT
) 1 << prec
) - 1)) == 0)
11654 fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11657 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11659 This results in more efficient code for machines without a NOR
11660 instruction. Combine will canonicalize to the first form
11661 which will allow use of NOR instructions provided by the
11662 backend if they exist. */
11663 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11664 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
11666 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
11667 build2 (BIT_IOR_EXPR
, type
,
11668 fold_convert_loc (loc
, type
,
11669 TREE_OPERAND (arg0
, 0)),
11670 fold_convert_loc (loc
, type
,
11671 TREE_OPERAND (arg1
, 0))));
11674 /* If arg0 is derived from the address of an object or function, we may
11675 be able to fold this expression using the object or function's
11677 if (POINTER_TYPE_P (TREE_TYPE (arg0
)) && host_integerp (arg1
, 1))
11679 unsigned HOST_WIDE_INT modulus
, residue
;
11680 unsigned HOST_WIDE_INT low
= TREE_INT_CST_LOW (arg1
);
11682 modulus
= get_pointer_modulus_and_residue (arg0
, &residue
,
11683 integer_onep (arg1
));
11685 /* This works because modulus is a power of 2. If this weren't the
11686 case, we'd have to replace it by its greatest power-of-2
11687 divisor: modulus & -modulus. */
11689 return build_int_cst (type
, residue
& low
);
11692 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11693 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11694 if the new mask might be further optimized. */
11695 if ((TREE_CODE (arg0
) == LSHIFT_EXPR
11696 || TREE_CODE (arg0
) == RSHIFT_EXPR
)
11697 && host_integerp (TREE_OPERAND (arg0
, 1), 1)
11698 && host_integerp (arg1
, TYPE_UNSIGNED (TREE_TYPE (arg1
)))
11699 && tree_low_cst (TREE_OPERAND (arg0
, 1), 1)
11700 < TYPE_PRECISION (TREE_TYPE (arg0
))
11701 && TYPE_PRECISION (TREE_TYPE (arg0
)) <= HOST_BITS_PER_WIDE_INT
11702 && tree_low_cst (TREE_OPERAND (arg0
, 1), 1) > 0)
11704 unsigned int shiftc
= tree_low_cst (TREE_OPERAND (arg0
, 1), 1);
11705 unsigned HOST_WIDE_INT mask
11706 = tree_low_cst (arg1
, TYPE_UNSIGNED (TREE_TYPE (arg1
)));
11707 unsigned HOST_WIDE_INT newmask
, zerobits
= 0;
11708 tree shift_type
= TREE_TYPE (arg0
);
11710 if (TREE_CODE (arg0
) == LSHIFT_EXPR
)
11711 zerobits
= ((((unsigned HOST_WIDE_INT
) 1) << shiftc
) - 1);
11712 else if (TREE_CODE (arg0
) == RSHIFT_EXPR
11713 && TYPE_PRECISION (TREE_TYPE (arg0
))
11714 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0
))))
11716 unsigned int prec
= TYPE_PRECISION (TREE_TYPE (arg0
));
11717 tree arg00
= TREE_OPERAND (arg0
, 0);
11718 /* See if more bits can be proven as zero because of
11720 if (TREE_CODE (arg00
) == NOP_EXPR
11721 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00
, 0))))
11723 tree inner_type
= TREE_TYPE (TREE_OPERAND (arg00
, 0));
11724 if (TYPE_PRECISION (inner_type
)
11725 == GET_MODE_BITSIZE (TYPE_MODE (inner_type
))
11726 && TYPE_PRECISION (inner_type
) < prec
)
11728 prec
= TYPE_PRECISION (inner_type
);
11729 /* See if we can shorten the right shift. */
11731 shift_type
= inner_type
;
11734 zerobits
= ~(unsigned HOST_WIDE_INT
) 0;
11735 zerobits
>>= HOST_BITS_PER_WIDE_INT
- shiftc
;
11736 zerobits
<<= prec
- shiftc
;
11737 /* For arithmetic shift if sign bit could be set, zerobits
11738 can contain actually sign bits, so no transformation is
11739 possible, unless MASK masks them all away. In that
11740 case the shift needs to be converted into logical shift. */
11741 if (!TYPE_UNSIGNED (TREE_TYPE (arg0
))
11742 && prec
== TYPE_PRECISION (TREE_TYPE (arg0
)))
11744 if ((mask
& zerobits
) == 0)
11745 shift_type
= unsigned_type_for (TREE_TYPE (arg0
));
11751 /* ((X << 16) & 0xff00) is (X, 0). */
11752 if ((mask
& zerobits
) == mask
)
11753 return omit_one_operand_loc (loc
, type
,
11754 build_int_cst (type
, 0), arg0
);
11756 newmask
= mask
| zerobits
;
11757 if (newmask
!= mask
&& (newmask
& (newmask
+ 1)) == 0)
11761 /* Only do the transformation if NEWMASK is some integer
11763 for (prec
= BITS_PER_UNIT
;
11764 prec
< HOST_BITS_PER_WIDE_INT
; prec
<<= 1)
11765 if (newmask
== (((unsigned HOST_WIDE_INT
) 1) << prec
) - 1)
11767 if (prec
< HOST_BITS_PER_WIDE_INT
11768 || newmask
== ~(unsigned HOST_WIDE_INT
) 0)
11772 if (shift_type
!= TREE_TYPE (arg0
))
11774 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), shift_type
,
11775 fold_convert_loc (loc
, shift_type
,
11776 TREE_OPERAND (arg0
, 0)),
11777 TREE_OPERAND (arg0
, 1));
11778 tem
= fold_convert_loc (loc
, type
, tem
);
11782 newmaskt
= build_int_cst_type (TREE_TYPE (op1
), newmask
);
11783 if (!tree_int_cst_equal (newmaskt
, arg1
))
11784 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
, tem
, newmaskt
);
11792 /* Don't touch a floating-point divide by zero unless the mode
11793 of the constant can represent infinity. */
11794 if (TREE_CODE (arg1
) == REAL_CST
11795 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
)))
11796 && real_zerop (arg1
))
11799 /* Optimize A / A to 1.0 if we don't care about
11800 NaNs or Infinities. Skip the transformation
11801 for non-real operands. */
11802 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0
))
11803 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
11804 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0
)))
11805 && operand_equal_p (arg0
, arg1
, 0))
11807 tree r
= build_real (TREE_TYPE (arg0
), dconst1
);
11809 return omit_two_operands_loc (loc
, type
, r
, arg0
, arg1
);
11812 /* The complex version of the above A / A optimization. */
11813 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
11814 && operand_equal_p (arg0
, arg1
, 0))
11816 tree elem_type
= TREE_TYPE (TREE_TYPE (arg0
));
11817 if (! HONOR_NANS (TYPE_MODE (elem_type
))
11818 && ! HONOR_INFINITIES (TYPE_MODE (elem_type
)))
11820 tree r
= build_real (elem_type
, dconst1
);
11821 /* omit_two_operands will call fold_convert for us. */
11822 return omit_two_operands_loc (loc
, type
, r
, arg0
, arg1
);
11826 /* (-A) / (-B) -> A / B */
11827 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
11828 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11829 TREE_OPERAND (arg0
, 0),
11830 negate_expr (arg1
));
11831 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
11832 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11833 negate_expr (arg0
),
11834 TREE_OPERAND (arg1
, 0));
11836 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11837 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
11838 && real_onep (arg1
))
11839 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11841 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11842 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
11843 && real_minus_onep (arg1
))
11844 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
,
11845 negate_expr (arg0
)));
11847 /* If ARG1 is a constant, we can convert this to a multiply by the
11848 reciprocal. This does not have the same rounding properties,
11849 so only do this if -freciprocal-math. We can actually
11850 always safely do it if ARG1 is a power of two, but it's hard to
11851 tell if it is or not in a portable manner. */
11853 && (TREE_CODE (arg1
) == REAL_CST
11854 || (TREE_CODE (arg1
) == COMPLEX_CST
11855 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg1
)))
11856 || (TREE_CODE (arg1
) == VECTOR_CST
11857 && VECTOR_FLOAT_TYPE_P (TREE_TYPE (arg1
)))))
11859 if (flag_reciprocal_math
11860 && 0 != (tem
= const_binop (code
, build_one_cst (type
), arg1
)))
11861 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, tem
);
11862 /* Find the reciprocal if optimizing and the result is exact.
11863 TODO: Complex reciprocal not implemented. */
11864 if (TREE_CODE (arg1
) != COMPLEX_CST
)
11866 tree inverse
= exact_inverse (TREE_TYPE (arg0
), arg1
);
11869 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, inverse
);
11872 /* Convert A/B/C to A/(B*C). */
11873 if (flag_reciprocal_math
11874 && TREE_CODE (arg0
) == RDIV_EXPR
)
11875 return fold_build2_loc (loc
, RDIV_EXPR
, type
, TREE_OPERAND (arg0
, 0),
11876 fold_build2_loc (loc
, MULT_EXPR
, type
,
11877 TREE_OPERAND (arg0
, 1), arg1
));
11879 /* Convert A/(B/C) to (A/B)*C. */
11880 if (flag_reciprocal_math
11881 && TREE_CODE (arg1
) == RDIV_EXPR
)
11882 return fold_build2_loc (loc
, MULT_EXPR
, type
,
11883 fold_build2_loc (loc
, RDIV_EXPR
, type
, arg0
,
11884 TREE_OPERAND (arg1
, 0)),
11885 TREE_OPERAND (arg1
, 1));
11887 /* Convert C1/(X*C2) into (C1/C2)/X. */
11888 if (flag_reciprocal_math
11889 && TREE_CODE (arg1
) == MULT_EXPR
11890 && TREE_CODE (arg0
) == REAL_CST
11891 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
11893 tree tem
= const_binop (RDIV_EXPR
, arg0
,
11894 TREE_OPERAND (arg1
, 1));
11896 return fold_build2_loc (loc
, RDIV_EXPR
, type
, tem
,
11897 TREE_OPERAND (arg1
, 0));
11900 if (flag_unsafe_math_optimizations
)
11902 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
11903 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
11905 /* Optimize sin(x)/cos(x) as tan(x). */
11906 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_COS
)
11907 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_COSF
)
11908 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_COSL
))
11909 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
11910 CALL_EXPR_ARG (arg1
, 0), 0))
11912 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
11914 if (tanfn
!= NULL_TREE
)
11915 return build_call_expr_loc (loc
, tanfn
, 1, CALL_EXPR_ARG (arg0
, 0));
11918 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11919 if (((fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_SIN
)
11920 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_SINF
)
11921 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_SINL
))
11922 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
11923 CALL_EXPR_ARG (arg1
, 0), 0))
11925 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
11927 if (tanfn
!= NULL_TREE
)
11929 tree tmp
= build_call_expr_loc (loc
, tanfn
, 1,
11930 CALL_EXPR_ARG (arg0
, 0));
11931 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11932 build_real (type
, dconst1
), tmp
);
11936 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11937 NaNs or Infinities. */
11938 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_TAN
)
11939 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_TANF
)
11940 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_TANL
)))
11942 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11943 tree arg01
= CALL_EXPR_ARG (arg1
, 0);
11945 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00
)))
11946 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00
)))
11947 && operand_equal_p (arg00
, arg01
, 0))
11949 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
11951 if (cosfn
!= NULL_TREE
)
11952 return build_call_expr_loc (loc
, cosfn
, 1, arg00
);
11956 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11957 NaNs or Infinities. */
11958 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_SIN
)
11959 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_SINF
)
11960 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_SINL
)))
11962 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11963 tree arg01
= CALL_EXPR_ARG (arg1
, 0);
11965 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00
)))
11966 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00
)))
11967 && operand_equal_p (arg00
, arg01
, 0))
11969 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
11971 if (cosfn
!= NULL_TREE
)
11973 tree tmp
= build_call_expr_loc (loc
, cosfn
, 1, arg00
);
11974 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11975 build_real (type
, dconst1
),
11981 /* Optimize pow(x,c)/x as pow(x,c-1). */
11982 if (fcode0
== BUILT_IN_POW
11983 || fcode0
== BUILT_IN_POWF
11984 || fcode0
== BUILT_IN_POWL
)
11986 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11987 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
11988 if (TREE_CODE (arg01
) == REAL_CST
11989 && !TREE_OVERFLOW (arg01
)
11990 && operand_equal_p (arg1
, arg00
, 0))
11992 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11996 c
= TREE_REAL_CST (arg01
);
11997 real_arithmetic (&c
, MINUS_EXPR
, &c
, &dconst1
);
11998 arg
= build_real (type
, c
);
11999 return build_call_expr_loc (loc
, powfn
, 2, arg1
, arg
);
12003 /* Optimize a/root(b/c) into a*root(c/b). */
12004 if (BUILTIN_ROOT_P (fcode1
))
12006 tree rootarg
= CALL_EXPR_ARG (arg1
, 0);
12008 if (TREE_CODE (rootarg
) == RDIV_EXPR
)
12010 tree rootfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
12011 tree b
= TREE_OPERAND (rootarg
, 0);
12012 tree c
= TREE_OPERAND (rootarg
, 1);
12014 tree tmp
= fold_build2_loc (loc
, RDIV_EXPR
, type
, c
, b
);
12016 tmp
= build_call_expr_loc (loc
, rootfn
, 1, tmp
);
12017 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, tmp
);
12021 /* Optimize x/expN(y) into x*expN(-y). */
12022 if (BUILTIN_EXPONENT_P (fcode1
))
12024 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
12025 tree arg
= negate_expr (CALL_EXPR_ARG (arg1
, 0));
12026 arg1
= build_call_expr_loc (loc
,
12028 fold_convert_loc (loc
, type
, arg
));
12029 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
12032 /* Optimize x/pow(y,z) into x*pow(y,-z). */
12033 if (fcode1
== BUILT_IN_POW
12034 || fcode1
== BUILT_IN_POWF
12035 || fcode1
== BUILT_IN_POWL
)
12037 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
12038 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
12039 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
12040 tree neg11
= fold_convert_loc (loc
, type
,
12041 negate_expr (arg11
));
12042 arg1
= build_call_expr_loc (loc
, powfn
, 2, arg10
, neg11
);
12043 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
12048 case TRUNC_DIV_EXPR
:
12049 /* Optimize (X & (-A)) / A where A is a power of 2,
12051 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12052 && !TYPE_UNSIGNED (type
) && TREE_CODE (arg1
) == INTEGER_CST
12053 && integer_pow2p (arg1
) && tree_int_cst_sgn (arg1
) > 0)
12055 tree sum
= fold_binary_loc (loc
, PLUS_EXPR
, TREE_TYPE (arg1
),
12056 arg1
, TREE_OPERAND (arg0
, 1));
12057 if (sum
&& integer_zerop (sum
)) {
12058 unsigned long pow2
;
12060 if (TREE_INT_CST_LOW (arg1
))
12061 pow2
= exact_log2 (TREE_INT_CST_LOW (arg1
));
12063 pow2
= exact_log2 (TREE_INT_CST_HIGH (arg1
))
12064 + HOST_BITS_PER_WIDE_INT
;
12066 return fold_build2_loc (loc
, RSHIFT_EXPR
, type
,
12067 TREE_OPERAND (arg0
, 0),
12068 build_int_cst (integer_type_node
, pow2
));
12074 case FLOOR_DIV_EXPR
:
12075 /* Simplify A / (B << N) where A and B are positive and B is
12076 a power of 2, to A >> (N + log2(B)). */
12077 strict_overflow_p
= false;
12078 if (TREE_CODE (arg1
) == LSHIFT_EXPR
12079 && (TYPE_UNSIGNED (type
)
12080 || tree_expr_nonnegative_warnv_p (op0
, &strict_overflow_p
)))
12082 tree sval
= TREE_OPERAND (arg1
, 0);
12083 if (integer_pow2p (sval
) && tree_int_cst_sgn (sval
) > 0)
12085 tree sh_cnt
= TREE_OPERAND (arg1
, 1);
12086 unsigned long pow2
;
12088 if (TREE_INT_CST_LOW (sval
))
12089 pow2
= exact_log2 (TREE_INT_CST_LOW (sval
));
12091 pow2
= exact_log2 (TREE_INT_CST_HIGH (sval
))
12092 + HOST_BITS_PER_WIDE_INT
;
12094 if (strict_overflow_p
)
12095 fold_overflow_warning (("assuming signed overflow does not "
12096 "occur when simplifying A / (B << N)"),
12097 WARN_STRICT_OVERFLOW_MISC
);
12099 sh_cnt
= fold_build2_loc (loc
, PLUS_EXPR
, TREE_TYPE (sh_cnt
),
12101 build_int_cst (TREE_TYPE (sh_cnt
),
12103 return fold_build2_loc (loc
, RSHIFT_EXPR
, type
,
12104 fold_convert_loc (loc
, type
, arg0
), sh_cnt
);
12108 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
12109 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
12110 if (INTEGRAL_TYPE_P (type
)
12111 && TYPE_UNSIGNED (type
)
12112 && code
== FLOOR_DIV_EXPR
)
12113 return fold_build2_loc (loc
, TRUNC_DIV_EXPR
, type
, op0
, op1
);
12117 case ROUND_DIV_EXPR
:
12118 case CEIL_DIV_EXPR
:
12119 case EXACT_DIV_EXPR
:
12120 if (integer_onep (arg1
))
12121 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12122 if (integer_zerop (arg1
))
12124 /* X / -1 is -X. */
12125 if (!TYPE_UNSIGNED (type
)
12126 && TREE_CODE (arg1
) == INTEGER_CST
12127 && TREE_INT_CST_LOW (arg1
) == (unsigned HOST_WIDE_INT
) -1
12128 && TREE_INT_CST_HIGH (arg1
) == -1)
12129 return fold_convert_loc (loc
, type
, negate_expr (arg0
));
12131 /* Convert -A / -B to A / B when the type is signed and overflow is
12133 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
12134 && TREE_CODE (arg0
) == NEGATE_EXPR
12135 && negate_expr_p (arg1
))
12137 if (INTEGRAL_TYPE_P (type
))
12138 fold_overflow_warning (("assuming signed overflow does not occur "
12139 "when distributing negation across "
12141 WARN_STRICT_OVERFLOW_MISC
);
12142 return fold_build2_loc (loc
, code
, type
,
12143 fold_convert_loc (loc
, type
,
12144 TREE_OPERAND (arg0
, 0)),
12145 fold_convert_loc (loc
, type
,
12146 negate_expr (arg1
)));
12148 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
12149 && TREE_CODE (arg1
) == NEGATE_EXPR
12150 && negate_expr_p (arg0
))
12152 if (INTEGRAL_TYPE_P (type
))
12153 fold_overflow_warning (("assuming signed overflow does not occur "
12154 "when distributing negation across "
12156 WARN_STRICT_OVERFLOW_MISC
);
12157 return fold_build2_loc (loc
, code
, type
,
12158 fold_convert_loc (loc
, type
,
12159 negate_expr (arg0
)),
12160 fold_convert_loc (loc
, type
,
12161 TREE_OPERAND (arg1
, 0)));
12164 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12165 operation, EXACT_DIV_EXPR.
12167 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12168 At one time others generated faster code, it's not clear if they do
12169 after the last round to changes to the DIV code in expmed.c. */
12170 if ((code
== CEIL_DIV_EXPR
|| code
== FLOOR_DIV_EXPR
)
12171 && multiple_of_p (type
, arg0
, arg1
))
12172 return fold_build2_loc (loc
, EXACT_DIV_EXPR
, type
, arg0
, arg1
);
12174 strict_overflow_p
= false;
12175 if (TREE_CODE (arg1
) == INTEGER_CST
12176 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
12177 &strict_overflow_p
)))
12179 if (strict_overflow_p
)
12180 fold_overflow_warning (("assuming signed overflow does not occur "
12181 "when simplifying division"),
12182 WARN_STRICT_OVERFLOW_MISC
);
12183 return fold_convert_loc (loc
, type
, tem
);
12188 case CEIL_MOD_EXPR
:
12189 case FLOOR_MOD_EXPR
:
12190 case ROUND_MOD_EXPR
:
12191 case TRUNC_MOD_EXPR
:
12192 /* X % 1 is always zero, but be sure to preserve any side
12194 if (integer_onep (arg1
))
12195 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12197 /* X % 0, return X % 0 unchanged so that we can get the
12198 proper warnings and errors. */
12199 if (integer_zerop (arg1
))
12202 /* 0 % X is always zero, but be sure to preserve any side
12203 effects in X. Place this after checking for X == 0. */
12204 if (integer_zerop (arg0
))
12205 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
12207 /* X % -1 is zero. */
12208 if (!TYPE_UNSIGNED (type
)
12209 && TREE_CODE (arg1
) == INTEGER_CST
12210 && TREE_INT_CST_LOW (arg1
) == (unsigned HOST_WIDE_INT
) -1
12211 && TREE_INT_CST_HIGH (arg1
) == -1)
12212 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12214 /* X % -C is the same as X % C. */
12215 if (code
== TRUNC_MOD_EXPR
12216 && !TYPE_UNSIGNED (type
)
12217 && TREE_CODE (arg1
) == INTEGER_CST
12218 && !TREE_OVERFLOW (arg1
)
12219 && TREE_INT_CST_HIGH (arg1
) < 0
12220 && !TYPE_OVERFLOW_TRAPS (type
)
12221 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
12222 && !sign_bit_p (arg1
, arg1
))
12223 return fold_build2_loc (loc
, code
, type
,
12224 fold_convert_loc (loc
, type
, arg0
),
12225 fold_convert_loc (loc
, type
,
12226 negate_expr (arg1
)));
12228 /* X % -Y is the same as X % Y. */
12229 if (code
== TRUNC_MOD_EXPR
12230 && !TYPE_UNSIGNED (type
)
12231 && TREE_CODE (arg1
) == NEGATE_EXPR
12232 && !TYPE_OVERFLOW_TRAPS (type
))
12233 return fold_build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, arg0
),
12234 fold_convert_loc (loc
, type
,
12235 TREE_OPERAND (arg1
, 0)));
12237 strict_overflow_p
= false;
12238 if (TREE_CODE (arg1
) == INTEGER_CST
12239 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
12240 &strict_overflow_p
)))
12242 if (strict_overflow_p
)
12243 fold_overflow_warning (("assuming signed overflow does not occur "
12244 "when simplifying modulus"),
12245 WARN_STRICT_OVERFLOW_MISC
);
12246 return fold_convert_loc (loc
, type
, tem
);
12249 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12250 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12251 if ((code
== TRUNC_MOD_EXPR
|| code
== FLOOR_MOD_EXPR
)
12252 && (TYPE_UNSIGNED (type
)
12253 || tree_expr_nonnegative_warnv_p (op0
, &strict_overflow_p
)))
12256 /* Also optimize A % (C << N) where C is a power of 2,
12257 to A & ((C << N) - 1). */
12258 if (TREE_CODE (arg1
) == LSHIFT_EXPR
)
12259 c
= TREE_OPERAND (arg1
, 0);
12261 if (integer_pow2p (c
) && tree_int_cst_sgn (c
) > 0)
12264 = fold_build2_loc (loc
, MINUS_EXPR
, TREE_TYPE (arg1
), arg1
,
12265 build_int_cst (TREE_TYPE (arg1
), 1));
12266 if (strict_overflow_p
)
12267 fold_overflow_warning (("assuming signed overflow does not "
12268 "occur when simplifying "
12269 "X % (power of two)"),
12270 WARN_STRICT_OVERFLOW_MISC
);
12271 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
12272 fold_convert_loc (loc
, type
, arg0
),
12273 fold_convert_loc (loc
, type
, mask
));
12281 if (integer_all_onesp (arg0
))
12282 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12286 /* Optimize -1 >> x for arithmetic right shifts. */
12287 if (integer_all_onesp (arg0
) && !TYPE_UNSIGNED (type
)
12288 && tree_expr_nonnegative_p (arg1
))
12289 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12290 /* ... fall through ... */
12294 if (integer_zerop (arg1
))
12295 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12296 if (integer_zerop (arg0
))
12297 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12299 /* Since negative shift count is not well-defined,
12300 don't try to compute it in the compiler. */
12301 if (TREE_CODE (arg1
) == INTEGER_CST
&& tree_int_cst_sgn (arg1
) < 0)
12304 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12305 if (TREE_CODE (op0
) == code
&& host_integerp (arg1
, false)
12306 && TREE_INT_CST_LOW (arg1
) < TYPE_PRECISION (type
)
12307 && host_integerp (TREE_OPERAND (arg0
, 1), false)
12308 && TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)) < TYPE_PRECISION (type
))
12310 HOST_WIDE_INT low
= (TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1))
12311 + TREE_INT_CST_LOW (arg1
));
12313 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12314 being well defined. */
12315 if (low
>= TYPE_PRECISION (type
))
12317 if (code
== LROTATE_EXPR
|| code
== RROTATE_EXPR
)
12318 low
= low
% TYPE_PRECISION (type
);
12319 else if (TYPE_UNSIGNED (type
) || code
== LSHIFT_EXPR
)
12320 return omit_one_operand_loc (loc
, type
, build_int_cst (type
, 0),
12321 TREE_OPERAND (arg0
, 0));
12323 low
= TYPE_PRECISION (type
) - 1;
12326 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12327 build_int_cst (type
, low
));
12330 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12331 into x & ((unsigned)-1 >> c) for unsigned types. */
12332 if (((code
== LSHIFT_EXPR
&& TREE_CODE (arg0
) == RSHIFT_EXPR
)
12333 || (TYPE_UNSIGNED (type
)
12334 && code
== RSHIFT_EXPR
&& TREE_CODE (arg0
) == LSHIFT_EXPR
))
12335 && host_integerp (arg1
, false)
12336 && TREE_INT_CST_LOW (arg1
) < TYPE_PRECISION (type
)
12337 && host_integerp (TREE_OPERAND (arg0
, 1), false)
12338 && TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)) < TYPE_PRECISION (type
))
12340 HOST_WIDE_INT low0
= TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1));
12341 HOST_WIDE_INT low1
= TREE_INT_CST_LOW (arg1
);
12347 arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
12349 lshift
= build_int_cst (type
, -1);
12350 lshift
= int_const_binop (code
, lshift
, arg1
);
12352 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
, arg00
, lshift
);
12356 /* Rewrite an LROTATE_EXPR by a constant into an
12357 RROTATE_EXPR by a new constant. */
12358 if (code
== LROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
)
12360 tree tem
= build_int_cst (TREE_TYPE (arg1
),
12361 TYPE_PRECISION (type
));
12362 tem
= const_binop (MINUS_EXPR
, tem
, arg1
);
12363 return fold_build2_loc (loc
, RROTATE_EXPR
, type
, op0
, tem
);
12366 /* If we have a rotate of a bit operation with the rotate count and
12367 the second operand of the bit operation both constant,
12368 permute the two operations. */
12369 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
12370 && (TREE_CODE (arg0
) == BIT_AND_EXPR
12371 || TREE_CODE (arg0
) == BIT_IOR_EXPR
12372 || TREE_CODE (arg0
) == BIT_XOR_EXPR
)
12373 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12374 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
12375 fold_build2_loc (loc
, code
, type
,
12376 TREE_OPERAND (arg0
, 0), arg1
),
12377 fold_build2_loc (loc
, code
, type
,
12378 TREE_OPERAND (arg0
, 1), arg1
));
12380 /* Two consecutive rotates adding up to the precision of the
12381 type can be ignored. */
12382 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
12383 && TREE_CODE (arg0
) == RROTATE_EXPR
12384 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
12385 && TREE_INT_CST_HIGH (arg1
) == 0
12386 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0
, 1)) == 0
12387 && ((TREE_INT_CST_LOW (arg1
)
12388 + TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)))
12389 == (unsigned int) TYPE_PRECISION (type
)))
12390 return TREE_OPERAND (arg0
, 0);
12392 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12393 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12394 if the latter can be further optimized. */
12395 if ((code
== LSHIFT_EXPR
|| code
== RSHIFT_EXPR
)
12396 && TREE_CODE (arg0
) == BIT_AND_EXPR
12397 && TREE_CODE (arg1
) == INTEGER_CST
12398 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12400 tree mask
= fold_build2_loc (loc
, code
, type
,
12401 fold_convert_loc (loc
, type
,
12402 TREE_OPERAND (arg0
, 1)),
12404 tree shift
= fold_build2_loc (loc
, code
, type
,
12405 fold_convert_loc (loc
, type
,
12406 TREE_OPERAND (arg0
, 0)),
12408 tem
= fold_binary_loc (loc
, BIT_AND_EXPR
, type
, shift
, mask
);
12416 if (operand_equal_p (arg0
, arg1
, 0))
12417 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12418 if (INTEGRAL_TYPE_P (type
)
12419 && operand_equal_p (arg1
, TYPE_MIN_VALUE (type
), OEP_ONLY_CONST
))
12420 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12421 tem
= fold_minmax (loc
, MIN_EXPR
, type
, arg0
, arg1
);
12427 if (operand_equal_p (arg0
, arg1
, 0))
12428 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12429 if (INTEGRAL_TYPE_P (type
)
12430 && TYPE_MAX_VALUE (type
)
12431 && operand_equal_p (arg1
, TYPE_MAX_VALUE (type
), OEP_ONLY_CONST
))
12432 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12433 tem
= fold_minmax (loc
, MAX_EXPR
, type
, arg0
, arg1
);
12438 case TRUTH_ANDIF_EXPR
:
12439 /* Note that the operands of this must be ints
12440 and their values must be 0 or 1.
12441 ("true" is a fixed value perhaps depending on the language.) */
12442 /* If first arg is constant zero, return it. */
12443 if (integer_zerop (arg0
))
12444 return fold_convert_loc (loc
, type
, arg0
);
12445 case TRUTH_AND_EXPR
:
12446 /* If either arg is constant true, drop it. */
12447 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
12448 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
12449 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
)
12450 /* Preserve sequence points. */
12451 && (code
!= TRUTH_ANDIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
12452 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12453 /* If second arg is constant zero, result is zero, but first arg
12454 must be evaluated. */
12455 if (integer_zerop (arg1
))
12456 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12457 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12458 case will be handled here. */
12459 if (integer_zerop (arg0
))
12460 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12462 /* !X && X is always false. */
12463 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12464 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12465 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
12466 /* X && !X is always false. */
12467 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12468 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12469 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12471 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12472 means A >= Y && A != MAX, but in this case we know that
12475 if (!TREE_SIDE_EFFECTS (arg0
)
12476 && !TREE_SIDE_EFFECTS (arg1
))
12478 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg0
, arg1
);
12479 if (tem
&& !operand_equal_p (tem
, arg0
, 0))
12480 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
12482 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg1
, arg0
);
12483 if (tem
&& !operand_equal_p (tem
, arg1
, 0))
12484 return fold_build2_loc (loc
, code
, type
, arg0
, tem
);
12487 if ((tem
= fold_truth_andor (loc
, code
, type
, arg0
, arg1
, op0
, op1
))
12493 case TRUTH_ORIF_EXPR
:
12494 /* Note that the operands of this must be ints
12495 and their values must be 0 or true.
12496 ("true" is a fixed value perhaps depending on the language.) */
12497 /* If first arg is constant true, return it. */
12498 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
12499 return fold_convert_loc (loc
, type
, arg0
);
12500 case TRUTH_OR_EXPR
:
12501 /* If either arg is constant zero, drop it. */
12502 if (TREE_CODE (arg0
) == INTEGER_CST
&& integer_zerop (arg0
))
12503 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
12504 if (TREE_CODE (arg1
) == INTEGER_CST
&& integer_zerop (arg1
)
12505 /* Preserve sequence points. */
12506 && (code
!= TRUTH_ORIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
12507 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12508 /* If second arg is constant true, result is true, but we must
12509 evaluate first arg. */
12510 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
))
12511 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12512 /* Likewise for first arg, but note this only occurs here for
12514 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
12515 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12517 /* !X || X is always true. */
12518 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12519 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12520 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
12521 /* X || !X is always true. */
12522 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12523 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12524 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
12526 /* (X && !Y) || (!X && Y) is X ^ Y */
12527 if (TREE_CODE (arg0
) == TRUTH_AND_EXPR
12528 && TREE_CODE (arg1
) == TRUTH_AND_EXPR
)
12530 tree a0
, a1
, l0
, l1
, n0
, n1
;
12532 a0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
12533 a1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
12535 l0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
12536 l1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
12538 n0
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, l0
);
12539 n1
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, l1
);
12541 if ((operand_equal_p (n0
, a0
, 0)
12542 && operand_equal_p (n1
, a1
, 0))
12543 || (operand_equal_p (n0
, a1
, 0)
12544 && operand_equal_p (n1
, a0
, 0)))
12545 return fold_build2_loc (loc
, TRUTH_XOR_EXPR
, type
, l0
, n1
);
12548 if ((tem
= fold_truth_andor (loc
, code
, type
, arg0
, arg1
, op0
, op1
))
12554 case TRUTH_XOR_EXPR
:
12555 /* If the second arg is constant zero, drop it. */
12556 if (integer_zerop (arg1
))
12557 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12558 /* If the second arg is constant true, this is a logical inversion. */
12559 if (integer_onep (arg1
))
12561 /* Only call invert_truthvalue if operand is a truth value. */
12562 if (TREE_CODE (TREE_TYPE (arg0
)) != BOOLEAN_TYPE
)
12563 tem
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, TREE_TYPE (arg0
), arg0
);
12565 tem
= invert_truthvalue_loc (loc
, arg0
);
12566 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
12568 /* Identical arguments cancel to zero. */
12569 if (operand_equal_p (arg0
, arg1
, 0))
12570 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12572 /* !X ^ X is always true. */
12573 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12574 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12575 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
12577 /* X ^ !X is always true. */
12578 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12579 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12580 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
12589 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
12590 if (tem
!= NULL_TREE
)
12593 /* bool_var != 0 becomes bool_var. */
12594 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
12595 && code
== NE_EXPR
)
12596 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12598 /* bool_var == 1 becomes bool_var. */
12599 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
12600 && code
== EQ_EXPR
)
12601 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12603 /* bool_var != 1 becomes !bool_var. */
12604 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
12605 && code
== NE_EXPR
)
12606 return fold_convert_loc (loc
, type
,
12607 fold_build1_loc (loc
, TRUTH_NOT_EXPR
,
12608 TREE_TYPE (arg0
), arg0
));
12610 /* bool_var == 0 becomes !bool_var. */
12611 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
12612 && code
== EQ_EXPR
)
12613 return fold_convert_loc (loc
, type
,
12614 fold_build1_loc (loc
, TRUTH_NOT_EXPR
,
12615 TREE_TYPE (arg0
), arg0
));
12617 /* !exp != 0 becomes !exp */
12618 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
&& integer_zerop (arg1
)
12619 && code
== NE_EXPR
)
12620 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12622 /* If this is an equality comparison of the address of two non-weak,
12623 unaliased symbols neither of which are extern (since we do not
12624 have access to attributes for externs), then we know the result. */
12625 if (TREE_CODE (arg0
) == ADDR_EXPR
12626 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0
, 0))
12627 && ! DECL_WEAK (TREE_OPERAND (arg0
, 0))
12628 && ! lookup_attribute ("alias",
12629 DECL_ATTRIBUTES (TREE_OPERAND (arg0
, 0)))
12630 && ! DECL_EXTERNAL (TREE_OPERAND (arg0
, 0))
12631 && TREE_CODE (arg1
) == ADDR_EXPR
12632 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1
, 0))
12633 && ! DECL_WEAK (TREE_OPERAND (arg1
, 0))
12634 && ! lookup_attribute ("alias",
12635 DECL_ATTRIBUTES (TREE_OPERAND (arg1
, 0)))
12636 && ! DECL_EXTERNAL (TREE_OPERAND (arg1
, 0)))
12638 /* We know that we're looking at the address of two
12639 non-weak, unaliased, static _DECL nodes.
12641 It is both wasteful and incorrect to call operand_equal_p
12642 to compare the two ADDR_EXPR nodes. It is wasteful in that
12643 all we need to do is test pointer equality for the arguments
12644 to the two ADDR_EXPR nodes. It is incorrect to use
12645 operand_equal_p as that function is NOT equivalent to a
12646 C equality test. It can in fact return false for two
12647 objects which would test as equal using the C equality
12649 bool equal
= TREE_OPERAND (arg0
, 0) == TREE_OPERAND (arg1
, 0);
12650 return constant_boolean_node (equal
12651 ? code
== EQ_EXPR
: code
!= EQ_EXPR
,
12655 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12656 a MINUS_EXPR of a constant, we can convert it into a comparison with
12657 a revised constant as long as no overflow occurs. */
12658 if (TREE_CODE (arg1
) == INTEGER_CST
12659 && (TREE_CODE (arg0
) == PLUS_EXPR
12660 || TREE_CODE (arg0
) == MINUS_EXPR
)
12661 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
12662 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
12663 ? MINUS_EXPR
: PLUS_EXPR
,
12664 fold_convert_loc (loc
, TREE_TYPE (arg0
),
12666 TREE_OPERAND (arg0
, 1)))
12667 && !TREE_OVERFLOW (tem
))
12668 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
12670 /* Similarly for a NEGATE_EXPR. */
12671 if (TREE_CODE (arg0
) == NEGATE_EXPR
12672 && TREE_CODE (arg1
) == INTEGER_CST
12673 && 0 != (tem
= negate_expr (fold_convert_loc (loc
, TREE_TYPE (arg0
),
12675 && TREE_CODE (tem
) == INTEGER_CST
12676 && !TREE_OVERFLOW (tem
))
12677 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
12679 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12680 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12681 && TREE_CODE (arg1
) == INTEGER_CST
12682 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12683 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12684 fold_build2_loc (loc
, BIT_XOR_EXPR
, TREE_TYPE (arg0
),
12685 fold_convert_loc (loc
,
12688 TREE_OPERAND (arg0
, 1)));
12690 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12691 if ((TREE_CODE (arg0
) == PLUS_EXPR
12692 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
12693 || TREE_CODE (arg0
) == MINUS_EXPR
)
12694 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0
,
12697 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
12698 || POINTER_TYPE_P (TREE_TYPE (arg0
))))
12700 tree val
= TREE_OPERAND (arg0
, 1);
12701 return omit_two_operands_loc (loc
, type
,
12702 fold_build2_loc (loc
, code
, type
,
12704 build_int_cst (TREE_TYPE (val
),
12706 TREE_OPERAND (arg0
, 0), arg1
);
12709 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12710 if (TREE_CODE (arg0
) == MINUS_EXPR
12711 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == INTEGER_CST
12712 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0
,
12715 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 0)) & 1) == 1)
12717 return omit_two_operands_loc (loc
, type
,
12719 ? boolean_true_node
: boolean_false_node
,
12720 TREE_OPERAND (arg0
, 1), arg1
);
12723 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12724 for !=. Don't do this for ordered comparisons due to overflow. */
12725 if (TREE_CODE (arg0
) == MINUS_EXPR
12726 && integer_zerop (arg1
))
12727 return fold_build2_loc (loc
, code
, type
,
12728 TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
12730 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12731 if (TREE_CODE (arg0
) == ABS_EXPR
12732 && (integer_zerop (arg1
) || real_zerop (arg1
)))
12733 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), arg1
);
12735 /* If this is an EQ or NE comparison with zero and ARG0 is
12736 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12737 two operations, but the latter can be done in one less insn
12738 on machines that have only two-operand insns or on which a
12739 constant cannot be the first operand. */
12740 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12741 && integer_zerop (arg1
))
12743 tree arg00
= TREE_OPERAND (arg0
, 0);
12744 tree arg01
= TREE_OPERAND (arg0
, 1);
12745 if (TREE_CODE (arg00
) == LSHIFT_EXPR
12746 && integer_onep (TREE_OPERAND (arg00
, 0)))
12748 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg00
),
12749 arg01
, TREE_OPERAND (arg00
, 1));
12750 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
12751 build_int_cst (TREE_TYPE (arg0
), 1));
12752 return fold_build2_loc (loc
, code
, type
,
12753 fold_convert_loc (loc
, TREE_TYPE (arg1
), tem
),
12756 else if (TREE_CODE (arg01
) == LSHIFT_EXPR
12757 && integer_onep (TREE_OPERAND (arg01
, 0)))
12759 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg01
),
12760 arg00
, TREE_OPERAND (arg01
, 1));
12761 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
12762 build_int_cst (TREE_TYPE (arg0
), 1));
12763 return fold_build2_loc (loc
, code
, type
,
12764 fold_convert_loc (loc
, TREE_TYPE (arg1
), tem
),
12769 /* If this is an NE or EQ comparison of zero against the result of a
12770 signed MOD operation whose second operand is a power of 2, make
12771 the MOD operation unsigned since it is simpler and equivalent. */
12772 if (integer_zerop (arg1
)
12773 && !TYPE_UNSIGNED (TREE_TYPE (arg0
))
12774 && (TREE_CODE (arg0
) == TRUNC_MOD_EXPR
12775 || TREE_CODE (arg0
) == CEIL_MOD_EXPR
12776 || TREE_CODE (arg0
) == FLOOR_MOD_EXPR
12777 || TREE_CODE (arg0
) == ROUND_MOD_EXPR
)
12778 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
12780 tree newtype
= unsigned_type_for (TREE_TYPE (arg0
));
12781 tree newmod
= fold_build2_loc (loc
, TREE_CODE (arg0
), newtype
,
12782 fold_convert_loc (loc
, newtype
,
12783 TREE_OPERAND (arg0
, 0)),
12784 fold_convert_loc (loc
, newtype
,
12785 TREE_OPERAND (arg0
, 1)));
12787 return fold_build2_loc (loc
, code
, type
, newmod
,
12788 fold_convert_loc (loc
, newtype
, arg1
));
12791 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12792 C1 is a valid shift constant, and C2 is a power of two, i.e.
12794 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12795 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == RSHIFT_EXPR
12796 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1))
12798 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12799 && integer_zerop (arg1
))
12801 tree itype
= TREE_TYPE (arg0
);
12802 unsigned HOST_WIDE_INT prec
= TYPE_PRECISION (itype
);
12803 tree arg001
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1);
12805 /* Check for a valid shift count. */
12806 if (TREE_INT_CST_HIGH (arg001
) == 0
12807 && TREE_INT_CST_LOW (arg001
) < prec
)
12809 tree arg01
= TREE_OPERAND (arg0
, 1);
12810 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
12811 unsigned HOST_WIDE_INT log2
= tree_log2 (arg01
);
12812 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12813 can be rewritten as (X & (C2 << C1)) != 0. */
12814 if ((log2
+ TREE_INT_CST_LOW (arg001
)) < prec
)
12816 tem
= fold_build2_loc (loc
, LSHIFT_EXPR
, itype
, arg01
, arg001
);
12817 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, itype
, arg000
, tem
);
12818 return fold_build2_loc (loc
, code
, type
, tem
,
12819 fold_convert_loc (loc
, itype
, arg1
));
12821 /* Otherwise, for signed (arithmetic) shifts,
12822 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12823 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12824 else if (!TYPE_UNSIGNED (itype
))
12825 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
, type
,
12826 arg000
, build_int_cst (itype
, 0));
12827 /* Otherwise, of unsigned (logical) shifts,
12828 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12829 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12831 return omit_one_operand_loc (loc
, type
,
12832 code
== EQ_EXPR
? integer_one_node
12833 : integer_zero_node
,
12838 /* If we have (A & C) == C where C is a power of 2, convert this into
12839 (A & C) != 0. Similarly for NE_EXPR. */
12840 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12841 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12842 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
12843 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
12844 arg0
, fold_convert_loc (loc
, TREE_TYPE (arg0
),
12845 integer_zero_node
));
12847 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12848 bit, then fold the expression into A < 0 or A >= 0. */
12849 tem
= fold_single_bit_test_into_sign_test (loc
, code
, arg0
, arg1
, type
);
12853 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12854 Similarly for NE_EXPR. */
12855 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12856 && TREE_CODE (arg1
) == INTEGER_CST
12857 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12859 tree notc
= fold_build1_loc (loc
, BIT_NOT_EXPR
,
12860 TREE_TYPE (TREE_OPERAND (arg0
, 1)),
12861 TREE_OPERAND (arg0
, 1));
12863 = fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
12864 fold_convert_loc (loc
, TREE_TYPE (arg0
), arg1
),
12866 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
12867 if (integer_nonzerop (dandnotc
))
12868 return omit_one_operand_loc (loc
, type
, rslt
, arg0
);
12871 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12872 Similarly for NE_EXPR. */
12873 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
12874 && TREE_CODE (arg1
) == INTEGER_CST
12875 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12877 tree notd
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
), arg1
);
12879 = fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
12880 TREE_OPERAND (arg0
, 1),
12881 fold_convert_loc (loc
, TREE_TYPE (arg0
), notd
));
12882 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
12883 if (integer_nonzerop (candnotd
))
12884 return omit_one_operand_loc (loc
, type
, rslt
, arg0
);
12887 /* If this is a comparison of a field, we may be able to simplify it. */
12888 if ((TREE_CODE (arg0
) == COMPONENT_REF
12889 || TREE_CODE (arg0
) == BIT_FIELD_REF
)
12890 /* Handle the constant case even without -O
12891 to make sure the warnings are given. */
12892 && (optimize
|| TREE_CODE (arg1
) == INTEGER_CST
))
12894 t1
= optimize_bit_field_compare (loc
, code
, type
, arg0
, arg1
);
12899 /* Optimize comparisons of strlen vs zero to a compare of the
12900 first character of the string vs zero. To wit,
12901 strlen(ptr) == 0 => *ptr == 0
12902 strlen(ptr) != 0 => *ptr != 0
12903 Other cases should reduce to one of these two (or a constant)
12904 due to the return value of strlen being unsigned. */
12905 if (TREE_CODE (arg0
) == CALL_EXPR
12906 && integer_zerop (arg1
))
12908 tree fndecl
= get_callee_fndecl (arg0
);
12911 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
12912 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRLEN
12913 && call_expr_nargs (arg0
) == 1
12914 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0
, 0))) == POINTER_TYPE
)
12916 tree iref
= build_fold_indirect_ref_loc (loc
,
12917 CALL_EXPR_ARG (arg0
, 0));
12918 return fold_build2_loc (loc
, code
, type
, iref
,
12919 build_int_cst (TREE_TYPE (iref
), 0));
12923 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12924 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12925 if (TREE_CODE (arg0
) == RSHIFT_EXPR
12926 && integer_zerop (arg1
)
12927 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12929 tree arg00
= TREE_OPERAND (arg0
, 0);
12930 tree arg01
= TREE_OPERAND (arg0
, 1);
12931 tree itype
= TREE_TYPE (arg00
);
12932 if (TREE_INT_CST_HIGH (arg01
) == 0
12933 && TREE_INT_CST_LOW (arg01
)
12934 == (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (itype
) - 1))
12936 if (TYPE_UNSIGNED (itype
))
12938 itype
= signed_type_for (itype
);
12939 arg00
= fold_convert_loc (loc
, itype
, arg00
);
12941 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
12942 type
, arg00
, build_zero_cst (itype
));
12946 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12947 if (integer_zerop (arg1
)
12948 && TREE_CODE (arg0
) == BIT_XOR_EXPR
)
12949 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12950 TREE_OPERAND (arg0
, 1));
12952 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12953 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12954 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
12955 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12956 build_zero_cst (TREE_TYPE (arg0
)));
12957 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12958 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12959 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
12960 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
12961 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 1),
12962 build_zero_cst (TREE_TYPE (arg0
)));
12964 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12965 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12966 && TREE_CODE (arg1
) == INTEGER_CST
12967 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12968 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12969 fold_build2_loc (loc
, BIT_XOR_EXPR
, TREE_TYPE (arg1
),
12970 TREE_OPERAND (arg0
, 1), arg1
));
12972 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12973 (X & C) == 0 when C is a single bit. */
12974 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12975 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_NOT_EXPR
12976 && integer_zerop (arg1
)
12977 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
12979 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
12980 TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0),
12981 TREE_OPERAND (arg0
, 1));
12982 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
,
12984 fold_convert_loc (loc
, TREE_TYPE (arg0
),
12988 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12989 constant C is a power of two, i.e. a single bit. */
12990 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12991 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
12992 && integer_zerop (arg1
)
12993 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12994 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
12995 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
12997 tree arg00
= TREE_OPERAND (arg0
, 0);
12998 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
12999 arg00
, build_int_cst (TREE_TYPE (arg00
), 0));
13002 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
13003 when is C is a power of two, i.e. a single bit. */
13004 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13005 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_XOR_EXPR
13006 && integer_zerop (arg1
)
13007 && integer_pow2p (TREE_OPERAND (arg0
, 1))
13008 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
13009 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
13011 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
13012 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg000
),
13013 arg000
, TREE_OPERAND (arg0
, 1));
13014 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
13015 tem
, build_int_cst (TREE_TYPE (tem
), 0));
13018 if (integer_zerop (arg1
)
13019 && tree_expr_nonzero_p (arg0
))
13021 tree res
= constant_boolean_node (code
==NE_EXPR
, type
);
13022 return omit_one_operand_loc (loc
, type
, res
, arg0
);
13025 /* Fold -X op -Y as X op Y, where op is eq/ne. */
13026 if (TREE_CODE (arg0
) == NEGATE_EXPR
13027 && TREE_CODE (arg1
) == NEGATE_EXPR
)
13028 return fold_build2_loc (loc
, code
, type
,
13029 TREE_OPERAND (arg0
, 0),
13030 fold_convert_loc (loc
, TREE_TYPE (arg0
),
13031 TREE_OPERAND (arg1
, 0)));
13033 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
13034 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13035 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
13037 tree arg00
= TREE_OPERAND (arg0
, 0);
13038 tree arg01
= TREE_OPERAND (arg0
, 1);
13039 tree arg10
= TREE_OPERAND (arg1
, 0);
13040 tree arg11
= TREE_OPERAND (arg1
, 1);
13041 tree itype
= TREE_TYPE (arg0
);
13043 if (operand_equal_p (arg01
, arg11
, 0))
13044 return fold_build2_loc (loc
, code
, type
,
13045 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
13046 fold_build2_loc (loc
,
13047 BIT_XOR_EXPR
, itype
,
13050 build_zero_cst (itype
));
13052 if (operand_equal_p (arg01
, arg10
, 0))
13053 return fold_build2_loc (loc
, code
, type
,
13054 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
13055 fold_build2_loc (loc
,
13056 BIT_XOR_EXPR
, itype
,
13059 build_zero_cst (itype
));
13061 if (operand_equal_p (arg00
, arg11
, 0))
13062 return fold_build2_loc (loc
, code
, type
,
13063 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
13064 fold_build2_loc (loc
,
13065 BIT_XOR_EXPR
, itype
,
13068 build_zero_cst (itype
));
13070 if (operand_equal_p (arg00
, arg10
, 0))
13071 return fold_build2_loc (loc
, code
, type
,
13072 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
13073 fold_build2_loc (loc
,
13074 BIT_XOR_EXPR
, itype
,
13077 build_zero_cst (itype
));
13080 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
13081 && TREE_CODE (arg1
) == BIT_XOR_EXPR
)
13083 tree arg00
= TREE_OPERAND (arg0
, 0);
13084 tree arg01
= TREE_OPERAND (arg0
, 1);
13085 tree arg10
= TREE_OPERAND (arg1
, 0);
13086 tree arg11
= TREE_OPERAND (arg1
, 1);
13087 tree itype
= TREE_TYPE (arg0
);
13089 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
13090 operand_equal_p guarantees no side-effects so we don't need
13091 to use omit_one_operand on Z. */
13092 if (operand_equal_p (arg01
, arg11
, 0))
13093 return fold_build2_loc (loc
, code
, type
, arg00
,
13094 fold_convert_loc (loc
, TREE_TYPE (arg00
),
13096 if (operand_equal_p (arg01
, arg10
, 0))
13097 return fold_build2_loc (loc
, code
, type
, arg00
,
13098 fold_convert_loc (loc
, TREE_TYPE (arg00
),
13100 if (operand_equal_p (arg00
, arg11
, 0))
13101 return fold_build2_loc (loc
, code
, type
, arg01
,
13102 fold_convert_loc (loc
, TREE_TYPE (arg01
),
13104 if (operand_equal_p (arg00
, arg10
, 0))
13105 return fold_build2_loc (loc
, code
, type
, arg01
,
13106 fold_convert_loc (loc
, TREE_TYPE (arg01
),
13109 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
13110 if (TREE_CODE (arg01
) == INTEGER_CST
13111 && TREE_CODE (arg11
) == INTEGER_CST
)
13113 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg01
,
13114 fold_convert_loc (loc
, itype
, arg11
));
13115 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg00
, tem
);
13116 return fold_build2_loc (loc
, code
, type
, tem
,
13117 fold_convert_loc (loc
, itype
, arg10
));
13121 /* Attempt to simplify equality/inequality comparisons of complex
13122 values. Only lower the comparison if the result is known or
13123 can be simplified to a single scalar comparison. */
13124 if ((TREE_CODE (arg0
) == COMPLEX_EXPR
13125 || TREE_CODE (arg0
) == COMPLEX_CST
)
13126 && (TREE_CODE (arg1
) == COMPLEX_EXPR
13127 || TREE_CODE (arg1
) == COMPLEX_CST
))
13129 tree real0
, imag0
, real1
, imag1
;
13132 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
13134 real0
= TREE_OPERAND (arg0
, 0);
13135 imag0
= TREE_OPERAND (arg0
, 1);
13139 real0
= TREE_REALPART (arg0
);
13140 imag0
= TREE_IMAGPART (arg0
);
13143 if (TREE_CODE (arg1
) == COMPLEX_EXPR
)
13145 real1
= TREE_OPERAND (arg1
, 0);
13146 imag1
= TREE_OPERAND (arg1
, 1);
13150 real1
= TREE_REALPART (arg1
);
13151 imag1
= TREE_IMAGPART (arg1
);
13154 rcond
= fold_binary_loc (loc
, code
, type
, real0
, real1
);
13155 if (rcond
&& TREE_CODE (rcond
) == INTEGER_CST
)
13157 if (integer_zerop (rcond
))
13159 if (code
== EQ_EXPR
)
13160 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
13162 return fold_build2_loc (loc
, NE_EXPR
, type
, imag0
, imag1
);
13166 if (code
== NE_EXPR
)
13167 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
13169 return fold_build2_loc (loc
, EQ_EXPR
, type
, imag0
, imag1
);
13173 icond
= fold_binary_loc (loc
, code
, type
, imag0
, imag1
);
13174 if (icond
&& TREE_CODE (icond
) == INTEGER_CST
)
13176 if (integer_zerop (icond
))
13178 if (code
== EQ_EXPR
)
13179 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
13181 return fold_build2_loc (loc
, NE_EXPR
, type
, real0
, real1
);
13185 if (code
== NE_EXPR
)
13186 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
13188 return fold_build2_loc (loc
, EQ_EXPR
, type
, real0
, real1
);
13199 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
13200 if (tem
!= NULL_TREE
)
13203 /* Transform comparisons of the form X +- C CMP X. */
13204 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
13205 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
13206 && ((TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
13207 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
))))
13208 || (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
13209 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))))
13211 tree arg01
= TREE_OPERAND (arg0
, 1);
13212 enum tree_code code0
= TREE_CODE (arg0
);
13215 if (TREE_CODE (arg01
) == REAL_CST
)
13216 is_positive
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01
)) ? -1 : 1;
13218 is_positive
= tree_int_cst_sgn (arg01
);
13220 /* (X - c) > X becomes false. */
13221 if (code
== GT_EXPR
13222 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
13223 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
13225 if (TREE_CODE (arg01
) == INTEGER_CST
13226 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13227 fold_overflow_warning (("assuming signed overflow does not "
13228 "occur when assuming that (X - c) > X "
13229 "is always false"),
13230 WARN_STRICT_OVERFLOW_ALL
);
13231 return constant_boolean_node (0, type
);
13234 /* Likewise (X + c) < X becomes false. */
13235 if (code
== LT_EXPR
13236 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
13237 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
13239 if (TREE_CODE (arg01
) == INTEGER_CST
13240 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13241 fold_overflow_warning (("assuming signed overflow does not "
13242 "occur when assuming that "
13243 "(X + c) < X is always false"),
13244 WARN_STRICT_OVERFLOW_ALL
);
13245 return constant_boolean_node (0, type
);
13248 /* Convert (X - c) <= X to true. */
13249 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
)))
13251 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
13252 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
13254 if (TREE_CODE (arg01
) == INTEGER_CST
13255 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13256 fold_overflow_warning (("assuming signed overflow does not "
13257 "occur when assuming that "
13258 "(X - c) <= X is always true"),
13259 WARN_STRICT_OVERFLOW_ALL
);
13260 return constant_boolean_node (1, type
);
13263 /* Convert (X + c) >= X to true. */
13264 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
)))
13266 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
13267 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
13269 if (TREE_CODE (arg01
) == INTEGER_CST
13270 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13271 fold_overflow_warning (("assuming signed overflow does not "
13272 "occur when assuming that "
13273 "(X + c) >= X is always true"),
13274 WARN_STRICT_OVERFLOW_ALL
);
13275 return constant_boolean_node (1, type
);
13278 if (TREE_CODE (arg01
) == INTEGER_CST
)
13280 /* Convert X + c > X and X - c < X to true for integers. */
13281 if (code
== GT_EXPR
13282 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
13283 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
13285 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13286 fold_overflow_warning (("assuming signed overflow does "
13287 "not occur when assuming that "
13288 "(X + c) > X is always true"),
13289 WARN_STRICT_OVERFLOW_ALL
);
13290 return constant_boolean_node (1, type
);
13293 if (code
== LT_EXPR
13294 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
13295 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
13297 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13298 fold_overflow_warning (("assuming signed overflow does "
13299 "not occur when assuming that "
13300 "(X - c) < X is always true"),
13301 WARN_STRICT_OVERFLOW_ALL
);
13302 return constant_boolean_node (1, type
);
13305 /* Convert X + c <= X and X - c >= X to false for integers. */
13306 if (code
== LE_EXPR
13307 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
13308 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
13310 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13311 fold_overflow_warning (("assuming signed overflow does "
13312 "not occur when assuming that "
13313 "(X + c) <= X is always false"),
13314 WARN_STRICT_OVERFLOW_ALL
);
13315 return constant_boolean_node (0, type
);
13318 if (code
== GE_EXPR
13319 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
13320 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
13322 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13323 fold_overflow_warning (("assuming signed overflow does "
13324 "not occur when assuming that "
13325 "(X - c) >= X is always false"),
13326 WARN_STRICT_OVERFLOW_ALL
);
13327 return constant_boolean_node (0, type
);
13332 /* Comparisons with the highest or lowest possible integer of
13333 the specified precision will have known values. */
13335 tree arg1_type
= TREE_TYPE (arg1
);
13336 unsigned int width
= TYPE_PRECISION (arg1_type
);
13338 if (TREE_CODE (arg1
) == INTEGER_CST
13339 && width
<= HOST_BITS_PER_DOUBLE_INT
13340 && (INTEGRAL_TYPE_P (arg1_type
) || POINTER_TYPE_P (arg1_type
)))
13342 HOST_WIDE_INT signed_max_hi
;
13343 unsigned HOST_WIDE_INT signed_max_lo
;
13344 unsigned HOST_WIDE_INT max_hi
, max_lo
, min_hi
, min_lo
;
13346 if (width
<= HOST_BITS_PER_WIDE_INT
)
13348 signed_max_lo
= ((unsigned HOST_WIDE_INT
) 1 << (width
- 1))
13353 if (TYPE_UNSIGNED (arg1_type
))
13355 max_lo
= ((unsigned HOST_WIDE_INT
) 2 << (width
- 1)) - 1;
13361 max_lo
= signed_max_lo
;
13362 min_lo
= ((unsigned HOST_WIDE_INT
) -1 << (width
- 1));
13368 width
-= HOST_BITS_PER_WIDE_INT
;
13369 signed_max_lo
= -1;
13370 signed_max_hi
= ((unsigned HOST_WIDE_INT
) 1 << (width
- 1))
13375 if (TYPE_UNSIGNED (arg1_type
))
13377 max_hi
= ((unsigned HOST_WIDE_INT
) 2 << (width
- 1)) - 1;
13382 max_hi
= signed_max_hi
;
13383 min_hi
= ((unsigned HOST_WIDE_INT
) -1 << (width
- 1));
13387 if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
) == max_hi
13388 && TREE_INT_CST_LOW (arg1
) == max_lo
)
13392 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
13395 return fold_build2_loc (loc
, EQ_EXPR
, type
, op0
, op1
);
13398 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
13401 return fold_build2_loc (loc
, NE_EXPR
, type
, op0
, op1
);
13403 /* The GE_EXPR and LT_EXPR cases above are not normally
13404 reached because of previous transformations. */
13409 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
13411 && TREE_INT_CST_LOW (arg1
) == max_lo
- 1)
13415 arg1
= const_binop (PLUS_EXPR
, arg1
,
13416 build_int_cst (TREE_TYPE (arg1
), 1));
13417 return fold_build2_loc (loc
, EQ_EXPR
, type
,
13418 fold_convert_loc (loc
,
13419 TREE_TYPE (arg1
), arg0
),
13422 arg1
= const_binop (PLUS_EXPR
, arg1
,
13423 build_int_cst (TREE_TYPE (arg1
), 1));
13424 return fold_build2_loc (loc
, NE_EXPR
, type
,
13425 fold_convert_loc (loc
, TREE_TYPE (arg1
),
13431 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
13433 && TREE_INT_CST_LOW (arg1
) == min_lo
)
13437 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
13440 return fold_build2_loc (loc
, EQ_EXPR
, type
, op0
, op1
);
13443 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
13446 return fold_build2_loc (loc
, NE_EXPR
, type
, op0
, op1
);
13451 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
13453 && TREE_INT_CST_LOW (arg1
) == min_lo
+ 1)
13457 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
);
13458 return fold_build2_loc (loc
, NE_EXPR
, type
,
13459 fold_convert_loc (loc
,
13460 TREE_TYPE (arg1
), arg0
),
13463 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
);
13464 return fold_build2_loc (loc
, EQ_EXPR
, type
,
13465 fold_convert_loc (loc
, TREE_TYPE (arg1
),
13472 else if (TREE_INT_CST_HIGH (arg1
) == signed_max_hi
13473 && TREE_INT_CST_LOW (arg1
) == signed_max_lo
13474 && TYPE_UNSIGNED (arg1_type
)
13475 /* We will flip the signedness of the comparison operator
13476 associated with the mode of arg1, so the sign bit is
13477 specified by this mode. Check that arg1 is the signed
13478 max associated with this sign bit. */
13479 && width
== GET_MODE_BITSIZE (TYPE_MODE (arg1_type
))
13480 /* signed_type does not work on pointer types. */
13481 && INTEGRAL_TYPE_P (arg1_type
))
13483 /* The following case also applies to X < signed_max+1
13484 and X >= signed_max+1 because previous transformations. */
13485 if (code
== LE_EXPR
|| code
== GT_EXPR
)
13488 st
= signed_type_for (TREE_TYPE (arg1
));
13489 return fold_build2_loc (loc
,
13490 code
== LE_EXPR
? GE_EXPR
: LT_EXPR
,
13491 type
, fold_convert_loc (loc
, st
, arg0
),
13492 build_int_cst (st
, 0));
13498 /* If we are comparing an ABS_EXPR with a constant, we can
13499 convert all the cases into explicit comparisons, but they may
13500 well not be faster than doing the ABS and one comparison.
13501 But ABS (X) <= C is a range comparison, which becomes a subtraction
13502 and a comparison, and is probably faster. */
13503 if (code
== LE_EXPR
13504 && TREE_CODE (arg1
) == INTEGER_CST
13505 && TREE_CODE (arg0
) == ABS_EXPR
13506 && ! TREE_SIDE_EFFECTS (arg0
)
13507 && (0 != (tem
= negate_expr (arg1
)))
13508 && TREE_CODE (tem
) == INTEGER_CST
13509 && !TREE_OVERFLOW (tem
))
13510 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
13511 build2 (GE_EXPR
, type
,
13512 TREE_OPERAND (arg0
, 0), tem
),
13513 build2 (LE_EXPR
, type
,
13514 TREE_OPERAND (arg0
, 0), arg1
));
13516 /* Convert ABS_EXPR<x> >= 0 to true. */
13517 strict_overflow_p
= false;
13518 if (code
== GE_EXPR
13519 && (integer_zerop (arg1
)
13520 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
13521 && real_zerop (arg1
)))
13522 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
13524 if (strict_overflow_p
)
13525 fold_overflow_warning (("assuming signed overflow does not occur "
13526 "when simplifying comparison of "
13527 "absolute value and zero"),
13528 WARN_STRICT_OVERFLOW_CONDITIONAL
);
13529 return omit_one_operand_loc (loc
, type
,
13530 constant_boolean_node (true, type
),
13534 /* Convert ABS_EXPR<x> < 0 to false. */
13535 strict_overflow_p
= false;
13536 if (code
== LT_EXPR
13537 && (integer_zerop (arg1
) || real_zerop (arg1
))
13538 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
13540 if (strict_overflow_p
)
13541 fold_overflow_warning (("assuming signed overflow does not occur "
13542 "when simplifying comparison of "
13543 "absolute value and zero"),
13544 WARN_STRICT_OVERFLOW_CONDITIONAL
);
13545 return omit_one_operand_loc (loc
, type
,
13546 constant_boolean_node (false, type
),
13550 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13551 and similarly for >= into !=. */
13552 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
13553 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
13554 && TREE_CODE (arg1
) == LSHIFT_EXPR
13555 && integer_onep (TREE_OPERAND (arg1
, 0)))
13556 return build2_loc (loc
, code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
13557 build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
13558 TREE_OPERAND (arg1
, 1)),
13559 build_zero_cst (TREE_TYPE (arg0
)));
13561 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
13562 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
13563 && CONVERT_EXPR_P (arg1
)
13564 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == LSHIFT_EXPR
13565 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0)))
13567 tem
= build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
13568 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1));
13569 return build2_loc (loc
, code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
13570 fold_convert_loc (loc
, TREE_TYPE (arg0
), tem
),
13571 build_zero_cst (TREE_TYPE (arg0
)));
13576 case UNORDERED_EXPR
:
13584 if (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
13586 t1
= fold_relational_const (code
, type
, arg0
, arg1
);
13587 if (t1
!= NULL_TREE
)
13591 /* If the first operand is NaN, the result is constant. */
13592 if (TREE_CODE (arg0
) == REAL_CST
13593 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0
))
13594 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
13596 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
13597 ? integer_zero_node
13598 : integer_one_node
;
13599 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
13602 /* If the second operand is NaN, the result is constant. */
13603 if (TREE_CODE (arg1
) == REAL_CST
13604 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
13605 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
13607 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
13608 ? integer_zero_node
13609 : integer_one_node
;
13610 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
13613 /* Simplify unordered comparison of something with itself. */
13614 if ((code
== UNLE_EXPR
|| code
== UNGE_EXPR
|| code
== UNEQ_EXPR
)
13615 && operand_equal_p (arg0
, arg1
, 0))
13616 return constant_boolean_node (1, type
);
13618 if (code
== LTGT_EXPR
13619 && !flag_trapping_math
13620 && operand_equal_p (arg0
, arg1
, 0))
13621 return constant_boolean_node (0, type
);
13623 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13625 tree targ0
= strip_float_extensions (arg0
);
13626 tree targ1
= strip_float_extensions (arg1
);
13627 tree newtype
= TREE_TYPE (targ0
);
13629 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
13630 newtype
= TREE_TYPE (targ1
);
13632 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
13633 return fold_build2_loc (loc
, code
, type
,
13634 fold_convert_loc (loc
, newtype
, targ0
),
13635 fold_convert_loc (loc
, newtype
, targ1
));
13640 case COMPOUND_EXPR
:
13641 /* When pedantic, a compound expression can be neither an lvalue
13642 nor an integer constant expression. */
13643 if (TREE_SIDE_EFFECTS (arg0
) || TREE_CONSTANT (arg1
))
13645 /* Don't let (0, 0) be null pointer constant. */
13646 tem
= integer_zerop (arg1
) ? build1 (NOP_EXPR
, type
, arg1
)
13647 : fold_convert_loc (loc
, type
, arg1
);
13648 return pedantic_non_lvalue_loc (loc
, tem
);
13651 if ((TREE_CODE (arg0
) == REAL_CST
13652 && TREE_CODE (arg1
) == REAL_CST
)
13653 || (TREE_CODE (arg0
) == INTEGER_CST
13654 && TREE_CODE (arg1
) == INTEGER_CST
))
13655 return build_complex (type
, arg0
, arg1
);
13656 if (TREE_CODE (arg0
) == REALPART_EXPR
13657 && TREE_CODE (arg1
) == IMAGPART_EXPR
13658 && TREE_TYPE (TREE_OPERAND (arg0
, 0)) == type
13659 && operand_equal_p (TREE_OPERAND (arg0
, 0),
13660 TREE_OPERAND (arg1
, 0), 0))
13661 return omit_one_operand_loc (loc
, type
, TREE_OPERAND (arg0
, 0),
13662 TREE_OPERAND (arg1
, 0));
13666 /* An ASSERT_EXPR should never be passed to fold_binary. */
13667 gcc_unreachable ();
13669 case VEC_PACK_TRUNC_EXPR
:
13670 case VEC_PACK_FIX_TRUNC_EXPR
:
13672 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
13675 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
/ 2
13676 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
/ 2);
13677 if (TREE_CODE (arg0
) != VECTOR_CST
|| TREE_CODE (arg1
) != VECTOR_CST
)
13680 elts
= XALLOCAVEC (tree
, nelts
);
13681 if (!vec_cst_ctor_to_array (arg0
, elts
)
13682 || !vec_cst_ctor_to_array (arg1
, elts
+ nelts
/ 2))
13685 for (i
= 0; i
< nelts
; i
++)
13687 elts
[i
] = fold_convert_const (code
== VEC_PACK_TRUNC_EXPR
13688 ? NOP_EXPR
: FIX_TRUNC_EXPR
,
13689 TREE_TYPE (type
), elts
[i
]);
13690 if (elts
[i
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[i
]))
13694 return build_vector (type
, elts
);
13697 case VEC_WIDEN_MULT_LO_EXPR
:
13698 case VEC_WIDEN_MULT_HI_EXPR
:
13699 case VEC_WIDEN_MULT_EVEN_EXPR
:
13700 case VEC_WIDEN_MULT_ODD_EXPR
:
13702 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
);
13703 unsigned int out
, ofs
, scale
;
13706 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
* 2
13707 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
* 2);
13708 if (TREE_CODE (arg0
) != VECTOR_CST
|| TREE_CODE (arg1
) != VECTOR_CST
)
13711 elts
= XALLOCAVEC (tree
, nelts
* 4);
13712 if (!vec_cst_ctor_to_array (arg0
, elts
)
13713 || !vec_cst_ctor_to_array (arg1
, elts
+ nelts
* 2))
13716 if (code
== VEC_WIDEN_MULT_LO_EXPR
)
13717 scale
= 0, ofs
= BYTES_BIG_ENDIAN
? nelts
: 0;
13718 else if (code
== VEC_WIDEN_MULT_HI_EXPR
)
13719 scale
= 0, ofs
= BYTES_BIG_ENDIAN
? 0 : nelts
;
13720 else if (code
== VEC_WIDEN_MULT_EVEN_EXPR
)
13721 scale
= 1, ofs
= 0;
13722 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
13723 scale
= 1, ofs
= 1;
13725 for (out
= 0; out
< nelts
; out
++)
13727 unsigned int in1
= (out
<< scale
) + ofs
;
13728 unsigned int in2
= in1
+ nelts
* 2;
13731 t1
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), elts
[in1
]);
13732 t2
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), elts
[in2
]);
13734 if (t1
== NULL_TREE
|| t2
== NULL_TREE
)
13736 elts
[out
] = const_binop (MULT_EXPR
, t1
, t2
);
13737 if (elts
[out
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[out
]))
13741 return build_vector (type
, elts
);
13746 } /* switch (code) */
13749 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13750 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13754 contains_label_1 (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
13756 switch (TREE_CODE (*tp
))
13762 *walk_subtrees
= 0;
13764 /* ... fall through ... */
13771 /* Return whether the sub-tree ST contains a label which is accessible from
13772 outside the sub-tree. */
13775 contains_label_p (tree st
)
13778 (walk_tree_without_duplicates (&st
, contains_label_1
, NULL
) != NULL_TREE
);
13781 /* Fold a ternary expression of code CODE and type TYPE with operands
13782 OP0, OP1, and OP2. Return the folded expression if folding is
13783 successful. Otherwise, return NULL_TREE. */
13786 fold_ternary_loc (location_t loc
, enum tree_code code
, tree type
,
13787 tree op0
, tree op1
, tree op2
)
13790 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
, arg2
= NULL_TREE
;
13791 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
13793 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
13794 && TREE_CODE_LENGTH (code
) == 3);
13796 /* Strip any conversions that don't change the mode. This is safe
13797 for every expression, except for a comparison expression because
13798 its signedness is derived from its operands. So, in the latter
13799 case, only strip conversions that don't change the signedness.
13801 Note that this is done as an internal manipulation within the
13802 constant folder, in order to find the simplest representation of
13803 the arguments so that their form can be studied. In any cases,
13804 the appropriate type conversions should be put back in the tree
13805 that will get out of the constant folder. */
13826 case COMPONENT_REF
:
13827 if (TREE_CODE (arg0
) == CONSTRUCTOR
13828 && ! type_contains_placeholder_p (TREE_TYPE (arg0
)))
13830 unsigned HOST_WIDE_INT idx
;
13832 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0
), idx
, field
, value
)
13839 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13840 so all simple results must be passed through pedantic_non_lvalue. */
13841 if (TREE_CODE (arg0
) == INTEGER_CST
)
13843 tree unused_op
= integer_zerop (arg0
) ? op1
: op2
;
13844 tem
= integer_zerop (arg0
) ? op2
: op1
;
13845 /* Only optimize constant conditions when the selected branch
13846 has the same type as the COND_EXPR. This avoids optimizing
13847 away "c ? x : throw", where the throw has a void type.
13848 Avoid throwing away that operand which contains label. */
13849 if ((!TREE_SIDE_EFFECTS (unused_op
)
13850 || !contains_label_p (unused_op
))
13851 && (! VOID_TYPE_P (TREE_TYPE (tem
))
13852 || VOID_TYPE_P (type
)))
13853 return pedantic_non_lvalue_loc (loc
, tem
);
13856 if (operand_equal_p (arg1
, op2
, 0))
13857 return pedantic_omit_one_operand_loc (loc
, type
, arg1
, arg0
);
13859 /* If we have A op B ? A : C, we may be able to convert this to a
13860 simpler expression, depending on the operation and the values
13861 of B and C. Signed zeros prevent all of these transformations,
13862 for reasons given above each one.
13864 Also try swapping the arguments and inverting the conditional. */
13865 if (COMPARISON_CLASS_P (arg0
)
13866 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
13867 arg1
, TREE_OPERAND (arg0
, 1))
13868 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1
))))
13870 tem
= fold_cond_expr_with_comparison (loc
, type
, arg0
, op1
, op2
);
13875 if (COMPARISON_CLASS_P (arg0
)
13876 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
13878 TREE_OPERAND (arg0
, 1))
13879 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2
))))
13881 location_t loc0
= expr_location_or (arg0
, loc
);
13882 tem
= fold_truth_not_expr (loc0
, arg0
);
13883 if (tem
&& COMPARISON_CLASS_P (tem
))
13885 tem
= fold_cond_expr_with_comparison (loc
, type
, tem
, op2
, op1
);
13891 /* If the second operand is simpler than the third, swap them
13892 since that produces better jump optimization results. */
13893 if (truth_value_p (TREE_CODE (arg0
))
13894 && tree_swap_operands_p (op1
, op2
, false))
13896 location_t loc0
= expr_location_or (arg0
, loc
);
13897 /* See if this can be inverted. If it can't, possibly because
13898 it was a floating-point inequality comparison, don't do
13900 tem
= fold_truth_not_expr (loc0
, arg0
);
13902 return fold_build3_loc (loc
, code
, type
, tem
, op2
, op1
);
13905 /* Convert A ? 1 : 0 to simply A. */
13906 if (integer_onep (op1
)
13907 && integer_zerop (op2
)
13908 /* If we try to convert OP0 to our type, the
13909 call to fold will try to move the conversion inside
13910 a COND, which will recurse. In that case, the COND_EXPR
13911 is probably the best choice, so leave it alone. */
13912 && type
== TREE_TYPE (arg0
))
13913 return pedantic_non_lvalue_loc (loc
, arg0
);
13915 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13916 over COND_EXPR in cases such as floating point comparisons. */
13917 if (integer_zerop (op1
)
13918 && integer_onep (op2
)
13919 && truth_value_p (TREE_CODE (arg0
)))
13920 return pedantic_non_lvalue_loc (loc
,
13921 fold_convert_loc (loc
, type
,
13922 invert_truthvalue_loc (loc
,
13925 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13926 if (TREE_CODE (arg0
) == LT_EXPR
13927 && integer_zerop (TREE_OPERAND (arg0
, 1))
13928 && integer_zerop (op2
)
13929 && (tem
= sign_bit_p (TREE_OPERAND (arg0
, 0), arg1
)))
13931 /* sign_bit_p only checks ARG1 bits within A's precision.
13932 If <sign bit of A> has wider type than A, bits outside
13933 of A's precision in <sign bit of A> need to be checked.
13934 If they are all 0, this optimization needs to be done
13935 in unsigned A's type, if they are all 1 in signed A's type,
13936 otherwise this can't be done. */
13937 if (TYPE_PRECISION (TREE_TYPE (tem
))
13938 < TYPE_PRECISION (TREE_TYPE (arg1
))
13939 && TYPE_PRECISION (TREE_TYPE (tem
))
13940 < TYPE_PRECISION (type
))
13942 unsigned HOST_WIDE_INT mask_lo
;
13943 HOST_WIDE_INT mask_hi
;
13944 int inner_width
, outer_width
;
13947 inner_width
= TYPE_PRECISION (TREE_TYPE (tem
));
13948 outer_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
13949 if (outer_width
> TYPE_PRECISION (type
))
13950 outer_width
= TYPE_PRECISION (type
);
13952 if (outer_width
> HOST_BITS_PER_WIDE_INT
)
13954 mask_hi
= ((unsigned HOST_WIDE_INT
) -1
13955 >> (HOST_BITS_PER_DOUBLE_INT
- outer_width
));
13961 mask_lo
= ((unsigned HOST_WIDE_INT
) -1
13962 >> (HOST_BITS_PER_WIDE_INT
- outer_width
));
13964 if (inner_width
> HOST_BITS_PER_WIDE_INT
)
13966 mask_hi
&= ~((unsigned HOST_WIDE_INT
) -1
13967 >> (HOST_BITS_PER_WIDE_INT
- inner_width
));
13971 mask_lo
&= ~((unsigned HOST_WIDE_INT
) -1
13972 >> (HOST_BITS_PER_WIDE_INT
- inner_width
));
13974 if ((TREE_INT_CST_HIGH (arg1
) & mask_hi
) == mask_hi
13975 && (TREE_INT_CST_LOW (arg1
) & mask_lo
) == mask_lo
)
13977 tem_type
= signed_type_for (TREE_TYPE (tem
));
13978 tem
= fold_convert_loc (loc
, tem_type
, tem
);
13980 else if ((TREE_INT_CST_HIGH (arg1
) & mask_hi
) == 0
13981 && (TREE_INT_CST_LOW (arg1
) & mask_lo
) == 0)
13983 tem_type
= unsigned_type_for (TREE_TYPE (tem
));
13984 tem
= fold_convert_loc (loc
, tem_type
, tem
);
13992 fold_convert_loc (loc
, type
,
13993 fold_build2_loc (loc
, BIT_AND_EXPR
,
13994 TREE_TYPE (tem
), tem
,
13995 fold_convert_loc (loc
,
14000 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
14001 already handled above. */
14002 if (TREE_CODE (arg0
) == BIT_AND_EXPR
14003 && integer_onep (TREE_OPERAND (arg0
, 1))
14004 && integer_zerop (op2
)
14005 && integer_pow2p (arg1
))
14007 tree tem
= TREE_OPERAND (arg0
, 0);
14009 if (TREE_CODE (tem
) == RSHIFT_EXPR
14010 && TREE_CODE (TREE_OPERAND (tem
, 1)) == INTEGER_CST
14011 && (unsigned HOST_WIDE_INT
) tree_log2 (arg1
) ==
14012 TREE_INT_CST_LOW (TREE_OPERAND (tem
, 1)))
14013 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
14014 TREE_OPERAND (tem
, 0), arg1
);
14017 /* A & N ? N : 0 is simply A & N if N is a power of two. This
14018 is probably obsolete because the first operand should be a
14019 truth value (that's why we have the two cases above), but let's
14020 leave it in until we can confirm this for all front-ends. */
14021 if (integer_zerop (op2
)
14022 && TREE_CODE (arg0
) == NE_EXPR
14023 && integer_zerop (TREE_OPERAND (arg0
, 1))
14024 && integer_pow2p (arg1
)
14025 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
14026 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
14027 arg1
, OEP_ONLY_CONST
))
14028 return pedantic_non_lvalue_loc (loc
,
14029 fold_convert_loc (loc
, type
,
14030 TREE_OPERAND (arg0
, 0)));
14032 /* Convert A ? B : 0 into A && B if A and B are truth values. */
14033 if (integer_zerop (op2
)
14034 && truth_value_p (TREE_CODE (arg0
))
14035 && truth_value_p (TREE_CODE (arg1
)))
14036 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
14037 fold_convert_loc (loc
, type
, arg0
),
14040 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
14041 if (integer_onep (op2
)
14042 && truth_value_p (TREE_CODE (arg0
))
14043 && truth_value_p (TREE_CODE (arg1
)))
14045 location_t loc0
= expr_location_or (arg0
, loc
);
14046 /* Only perform transformation if ARG0 is easily inverted. */
14047 tem
= fold_truth_not_expr (loc0
, arg0
);
14049 return fold_build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
14050 fold_convert_loc (loc
, type
, tem
),
14054 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
14055 if (integer_zerop (arg1
)
14056 && truth_value_p (TREE_CODE (arg0
))
14057 && truth_value_p (TREE_CODE (op2
)))
14059 location_t loc0
= expr_location_or (arg0
, loc
);
14060 /* Only perform transformation if ARG0 is easily inverted. */
14061 tem
= fold_truth_not_expr (loc0
, arg0
);
14063 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
14064 fold_convert_loc (loc
, type
, tem
),
14068 /* Convert A ? 1 : B into A || B if A and B are truth values. */
14069 if (integer_onep (arg1
)
14070 && truth_value_p (TREE_CODE (arg0
))
14071 && truth_value_p (TREE_CODE (op2
)))
14072 return fold_build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
14073 fold_convert_loc (loc
, type
, arg0
),
14078 case VEC_COND_EXPR
:
14079 if (TREE_CODE (arg0
) == VECTOR_CST
)
14081 if (integer_all_onesp (arg0
) && !TREE_SIDE_EFFECTS (op2
))
14082 return pedantic_non_lvalue_loc (loc
, op1
);
14083 if (integer_zerop (arg0
) && !TREE_SIDE_EFFECTS (op1
))
14084 return pedantic_non_lvalue_loc (loc
, op2
);
14089 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
14090 of fold_ternary on them. */
14091 gcc_unreachable ();
14093 case BIT_FIELD_REF
:
14094 if ((TREE_CODE (arg0
) == VECTOR_CST
14095 || (TREE_CODE (arg0
) == CONSTRUCTOR
14096 && TREE_CODE (TREE_TYPE (arg0
)) == VECTOR_TYPE
))
14097 && (type
== TREE_TYPE (TREE_TYPE (arg0
))
14098 || (TREE_CODE (type
) == VECTOR_TYPE
14099 && TREE_TYPE (type
) == TREE_TYPE (TREE_TYPE (arg0
)))))
14101 tree eltype
= TREE_TYPE (TREE_TYPE (arg0
));
14102 unsigned HOST_WIDE_INT width
= tree_low_cst (TYPE_SIZE (eltype
), 1);
14103 unsigned HOST_WIDE_INT n
= tree_low_cst (arg1
, 1);
14104 unsigned HOST_WIDE_INT idx
= tree_low_cst (op2
, 1);
14107 && (idx
% width
) == 0
14108 && (n
% width
) == 0
14109 && ((idx
+ n
) / width
) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)))
14114 if (TREE_CODE (arg0
) == VECTOR_CST
)
14117 return VECTOR_CST_ELT (arg0
, idx
);
14119 tree
*vals
= XALLOCAVEC (tree
, n
);
14120 for (unsigned i
= 0; i
< n
; ++i
)
14121 vals
[i
] = VECTOR_CST_ELT (arg0
, idx
+ i
);
14122 return build_vector (type
, vals
);
14125 /* Constructor elements can be subvectors. */
14126 unsigned HOST_WIDE_INT k
= 1;
14127 if (CONSTRUCTOR_NELTS (arg0
) != 0)
14129 tree cons_elem
= TREE_TYPE (CONSTRUCTOR_ELT (arg0
, 0)->value
);
14130 if (TREE_CODE (cons_elem
) == VECTOR_TYPE
)
14131 k
= TYPE_VECTOR_SUBPARTS (cons_elem
);
14134 /* We keep an exact subset of the constructor elements. */
14135 if ((idx
% k
) == 0 && (n
% k
) == 0)
14137 if (CONSTRUCTOR_NELTS (arg0
) == 0)
14138 return build_constructor (type
, NULL
);
14143 if (idx
< CONSTRUCTOR_NELTS (arg0
))
14144 return CONSTRUCTOR_ELT (arg0
, idx
)->value
;
14145 return build_zero_cst (type
);
14148 vec
<constructor_elt
, va_gc
> *vals
;
14149 vec_alloc (vals
, n
);
14150 for (unsigned i
= 0;
14151 i
< n
&& idx
+ i
< CONSTRUCTOR_NELTS (arg0
);
14153 CONSTRUCTOR_APPEND_ELT (vals
, NULL_TREE
,
14155 (arg0
, idx
+ i
)->value
);
14156 return build_constructor (type
, vals
);
14158 /* The bitfield references a single constructor element. */
14159 else if (idx
+ n
<= (idx
/ k
+ 1) * k
)
14161 if (CONSTRUCTOR_NELTS (arg0
) <= idx
/ k
)
14162 return build_zero_cst (type
);
14164 return CONSTRUCTOR_ELT (arg0
, idx
/ k
)->value
;
14166 return fold_build3_loc (loc
, code
, type
,
14167 CONSTRUCTOR_ELT (arg0
, idx
/ k
)->value
, op1
,
14168 build_int_cst (TREE_TYPE (op2
), (idx
% k
) * width
));
14173 /* A bit-field-ref that referenced the full argument can be stripped. */
14174 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
14175 && TYPE_PRECISION (TREE_TYPE (arg0
)) == tree_low_cst (arg1
, 1)
14176 && integer_zerop (op2
))
14177 return fold_convert_loc (loc
, type
, arg0
);
14179 /* On constants we can use native encode/interpret to constant
14180 fold (nearly) all BIT_FIELD_REFs. */
14181 if (CONSTANT_CLASS_P (arg0
)
14182 && can_native_interpret_type_p (type
)
14183 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (arg0
)), 1)
14184 /* This limitation should not be necessary, we just need to
14185 round this up to mode size. */
14186 && tree_low_cst (op1
, 1) % BITS_PER_UNIT
== 0
14187 /* Need bit-shifting of the buffer to relax the following. */
14188 && tree_low_cst (op2
, 1) % BITS_PER_UNIT
== 0)
14190 unsigned HOST_WIDE_INT bitpos
= tree_low_cst (op2
, 1);
14191 unsigned HOST_WIDE_INT bitsize
= tree_low_cst (op1
, 1);
14192 unsigned HOST_WIDE_INT clen
;
14193 clen
= tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (arg0
)), 1);
14194 /* ??? We cannot tell native_encode_expr to start at
14195 some random byte only. So limit us to a reasonable amount
14199 unsigned char *b
= XALLOCAVEC (unsigned char, clen
);
14200 unsigned HOST_WIDE_INT len
= native_encode_expr (arg0
, b
, clen
);
14202 && len
* BITS_PER_UNIT
>= bitpos
+ bitsize
)
14204 tree v
= native_interpret_expr (type
,
14205 b
+ bitpos
/ BITS_PER_UNIT
,
14206 bitsize
/ BITS_PER_UNIT
);
14216 /* For integers we can decompose the FMA if possible. */
14217 if (TREE_CODE (arg0
) == INTEGER_CST
14218 && TREE_CODE (arg1
) == INTEGER_CST
)
14219 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
14220 const_binop (MULT_EXPR
, arg0
, arg1
), arg2
);
14221 if (integer_zerop (arg2
))
14222 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
14224 return fold_fma (loc
, type
, arg0
, arg1
, arg2
);
14226 case VEC_PERM_EXPR
:
14227 if (TREE_CODE (arg2
) == VECTOR_CST
)
14229 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
, mask
;
14230 unsigned char *sel
= XALLOCAVEC (unsigned char, nelts
);
14232 bool need_mask_canon
= false;
14233 bool all_in_vec0
= true;
14234 bool all_in_vec1
= true;
14235 bool maybe_identity
= true;
14236 bool single_arg
= (op0
== op1
);
14237 bool changed
= false;
14239 mask
= single_arg
? (nelts
- 1) : (2 * nelts
- 1);
14240 gcc_assert (nelts
== VECTOR_CST_NELTS (arg2
));
14241 for (i
= 0; i
< nelts
; i
++)
14243 tree val
= VECTOR_CST_ELT (arg2
, i
);
14244 if (TREE_CODE (val
) != INTEGER_CST
)
14247 sel
[i
] = TREE_INT_CST_LOW (val
) & mask
;
14248 if (TREE_INT_CST_HIGH (val
)
14249 || ((unsigned HOST_WIDE_INT
)
14250 TREE_INT_CST_LOW (val
) != sel
[i
]))
14251 need_mask_canon
= true;
14253 if (sel
[i
] < nelts
)
14254 all_in_vec1
= false;
14256 all_in_vec0
= false;
14258 if ((sel
[i
] & (nelts
-1)) != i
)
14259 maybe_identity
= false;
14262 if (maybe_identity
)
14272 else if (all_in_vec1
)
14275 for (i
= 0; i
< nelts
; i
++)
14277 need_mask_canon
= true;
14280 if ((TREE_CODE (op0
) == VECTOR_CST
14281 || TREE_CODE (op0
) == CONSTRUCTOR
)
14282 && (TREE_CODE (op1
) == VECTOR_CST
14283 || TREE_CODE (op1
) == CONSTRUCTOR
))
14285 t
= fold_vec_perm (type
, op0
, op1
, sel
);
14286 if (t
!= NULL_TREE
)
14290 if (op0
== op1
&& !single_arg
)
14293 if (need_mask_canon
&& arg2
== op2
)
14295 tree
*tsel
= XALLOCAVEC (tree
, nelts
);
14296 tree eltype
= TREE_TYPE (TREE_TYPE (arg2
));
14297 for (i
= 0; i
< nelts
; i
++)
14298 tsel
[i
] = build_int_cst (eltype
, sel
[i
]);
14299 op2
= build_vector (TREE_TYPE (arg2
), tsel
);
14304 return build3_loc (loc
, VEC_PERM_EXPR
, type
, op0
, op1
, op2
);
14310 } /* switch (code) */
14313 /* Perform constant folding and related simplification of EXPR.
14314 The related simplifications include x*1 => x, x*0 => 0, etc.,
14315 and application of the associative law.
14316 NOP_EXPR conversions may be removed freely (as long as we
14317 are careful not to change the type of the overall expression).
14318 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14319 but we can constant-fold them if they have constant operands. */
14321 #ifdef ENABLE_FOLD_CHECKING
14322 # define fold(x) fold_1 (x)
14323 static tree
fold_1 (tree
);
14329 const tree t
= expr
;
14330 enum tree_code code
= TREE_CODE (t
);
14331 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
14333 location_t loc
= EXPR_LOCATION (expr
);
14335 /* Return right away if a constant. */
14336 if (kind
== tcc_constant
)
14339 /* CALL_EXPR-like objects with variable numbers of operands are
14340 treated specially. */
14341 if (kind
== tcc_vl_exp
)
14343 if (code
== CALL_EXPR
)
14345 tem
= fold_call_expr (loc
, expr
, false);
14346 return tem
? tem
: expr
;
14351 if (IS_EXPR_CODE_CLASS (kind
))
14353 tree type
= TREE_TYPE (t
);
14354 tree op0
, op1
, op2
;
14356 switch (TREE_CODE_LENGTH (code
))
14359 op0
= TREE_OPERAND (t
, 0);
14360 tem
= fold_unary_loc (loc
, code
, type
, op0
);
14361 return tem
? tem
: expr
;
14363 op0
= TREE_OPERAND (t
, 0);
14364 op1
= TREE_OPERAND (t
, 1);
14365 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
14366 return tem
? tem
: expr
;
14368 op0
= TREE_OPERAND (t
, 0);
14369 op1
= TREE_OPERAND (t
, 1);
14370 op2
= TREE_OPERAND (t
, 2);
14371 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
14372 return tem
? tem
: expr
;
14382 tree op0
= TREE_OPERAND (t
, 0);
14383 tree op1
= TREE_OPERAND (t
, 1);
14385 if (TREE_CODE (op1
) == INTEGER_CST
14386 && TREE_CODE (op0
) == CONSTRUCTOR
14387 && ! type_contains_placeholder_p (TREE_TYPE (op0
)))
14389 vec
<constructor_elt
, va_gc
> *elts
= CONSTRUCTOR_ELTS (op0
);
14390 unsigned HOST_WIDE_INT end
= vec_safe_length (elts
);
14391 unsigned HOST_WIDE_INT begin
= 0;
14393 /* Find a matching index by means of a binary search. */
14394 while (begin
!= end
)
14396 unsigned HOST_WIDE_INT middle
= (begin
+ end
) / 2;
14397 tree index
= (*elts
)[middle
].index
;
14399 if (TREE_CODE (index
) == INTEGER_CST
14400 && tree_int_cst_lt (index
, op1
))
14401 begin
= middle
+ 1;
14402 else if (TREE_CODE (index
) == INTEGER_CST
14403 && tree_int_cst_lt (op1
, index
))
14405 else if (TREE_CODE (index
) == RANGE_EXPR
14406 && tree_int_cst_lt (TREE_OPERAND (index
, 1), op1
))
14407 begin
= middle
+ 1;
14408 else if (TREE_CODE (index
) == RANGE_EXPR
14409 && tree_int_cst_lt (op1
, TREE_OPERAND (index
, 0)))
14412 return (*elts
)[middle
].value
;
14419 /* Return a VECTOR_CST if possible. */
14422 tree type
= TREE_TYPE (t
);
14423 if (TREE_CODE (type
) != VECTOR_TYPE
)
14426 tree
*vec
= XALLOCAVEC (tree
, TYPE_VECTOR_SUBPARTS (type
));
14427 unsigned HOST_WIDE_INT idx
, pos
= 0;
14430 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t
), idx
, value
)
14432 if (!CONSTANT_CLASS_P (value
))
14434 if (TREE_CODE (value
) == VECTOR_CST
)
14436 for (unsigned i
= 0; i
< VECTOR_CST_NELTS (value
); ++i
)
14437 vec
[pos
++] = VECTOR_CST_ELT (value
, i
);
14440 vec
[pos
++] = value
;
14442 for (; pos
< TYPE_VECTOR_SUBPARTS (type
); ++pos
)
14443 vec
[pos
] = build_zero_cst (TREE_TYPE (type
));
14445 return build_vector (type
, vec
);
14449 return fold (DECL_INITIAL (t
));
14453 } /* switch (code) */
14456 #ifdef ENABLE_FOLD_CHECKING
14459 static void fold_checksum_tree (const_tree
, struct md5_ctx
*,
14460 hash_table
<pointer_hash
<tree_node
> >);
14461 static void fold_check_failed (const_tree
, const_tree
);
14462 void print_fold_checksum (const_tree
);
14464 /* When --enable-checking=fold, compute a digest of expr before
14465 and after actual fold call to see if fold did not accidentally
14466 change original expr. */
14472 struct md5_ctx ctx
;
14473 unsigned char checksum_before
[16], checksum_after
[16];
14474 hash_table
<pointer_hash
<tree_node
> > ht
;
14477 md5_init_ctx (&ctx
);
14478 fold_checksum_tree (expr
, &ctx
, ht
);
14479 md5_finish_ctx (&ctx
, checksum_before
);
14482 ret
= fold_1 (expr
);
14484 md5_init_ctx (&ctx
);
14485 fold_checksum_tree (expr
, &ctx
, ht
);
14486 md5_finish_ctx (&ctx
, checksum_after
);
14489 if (memcmp (checksum_before
, checksum_after
, 16))
14490 fold_check_failed (expr
, ret
);
14496 print_fold_checksum (const_tree expr
)
14498 struct md5_ctx ctx
;
14499 unsigned char checksum
[16], cnt
;
14500 hash_table
<pointer_hash
<tree_node
> > ht
;
14503 md5_init_ctx (&ctx
);
14504 fold_checksum_tree (expr
, &ctx
, ht
);
14505 md5_finish_ctx (&ctx
, checksum
);
14507 for (cnt
= 0; cnt
< 16; ++cnt
)
14508 fprintf (stderr
, "%02x", checksum
[cnt
]);
14509 putc ('\n', stderr
);
14513 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED
, const_tree ret ATTRIBUTE_UNUSED
)
14515 internal_error ("fold check: original tree changed by fold");
14519 fold_checksum_tree (const_tree expr
, struct md5_ctx
*ctx
,
14520 hash_table
<pointer_hash
<tree_node
> > ht
)
14523 enum tree_code code
;
14524 union tree_node buf
;
14530 slot
= ht
.find_slot (expr
, INSERT
);
14533 *slot
= CONST_CAST_TREE (expr
);
14534 code
= TREE_CODE (expr
);
14535 if (TREE_CODE_CLASS (code
) == tcc_declaration
14536 && DECL_ASSEMBLER_NAME_SET_P (expr
))
14538 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14539 memcpy ((char *) &buf
, expr
, tree_size (expr
));
14540 SET_DECL_ASSEMBLER_NAME ((tree
)&buf
, NULL
);
14541 expr
= (tree
) &buf
;
14543 else if (TREE_CODE_CLASS (code
) == tcc_type
14544 && (TYPE_POINTER_TO (expr
)
14545 || TYPE_REFERENCE_TO (expr
)
14546 || TYPE_CACHED_VALUES_P (expr
)
14547 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr
)
14548 || TYPE_NEXT_VARIANT (expr
)))
14550 /* Allow these fields to be modified. */
14552 memcpy ((char *) &buf
, expr
, tree_size (expr
));
14553 expr
= tmp
= (tree
) &buf
;
14554 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp
) = 0;
14555 TYPE_POINTER_TO (tmp
) = NULL
;
14556 TYPE_REFERENCE_TO (tmp
) = NULL
;
14557 TYPE_NEXT_VARIANT (tmp
) = NULL
;
14558 if (TYPE_CACHED_VALUES_P (tmp
))
14560 TYPE_CACHED_VALUES_P (tmp
) = 0;
14561 TYPE_CACHED_VALUES (tmp
) = NULL
;
14564 md5_process_bytes (expr
, tree_size (expr
), ctx
);
14565 if (CODE_CONTAINS_STRUCT (code
, TS_TYPED
))
14566 fold_checksum_tree (TREE_TYPE (expr
), ctx
, ht
);
14567 if (TREE_CODE_CLASS (code
) != tcc_type
14568 && TREE_CODE_CLASS (code
) != tcc_declaration
14569 && code
!= TREE_LIST
14570 && code
!= SSA_NAME
14571 && CODE_CONTAINS_STRUCT (code
, TS_COMMON
))
14572 fold_checksum_tree (TREE_CHAIN (expr
), ctx
, ht
);
14573 switch (TREE_CODE_CLASS (code
))
14579 md5_process_bytes (TREE_STRING_POINTER (expr
),
14580 TREE_STRING_LENGTH (expr
), ctx
);
14583 fold_checksum_tree (TREE_REALPART (expr
), ctx
, ht
);
14584 fold_checksum_tree (TREE_IMAGPART (expr
), ctx
, ht
);
14587 for (i
= 0; i
< (int) VECTOR_CST_NELTS (expr
); ++i
)
14588 fold_checksum_tree (VECTOR_CST_ELT (expr
, i
), ctx
, ht
);
14594 case tcc_exceptional
:
14598 fold_checksum_tree (TREE_PURPOSE (expr
), ctx
, ht
);
14599 fold_checksum_tree (TREE_VALUE (expr
), ctx
, ht
);
14600 expr
= TREE_CHAIN (expr
);
14601 goto recursive_label
;
14604 for (i
= 0; i
< TREE_VEC_LENGTH (expr
); ++i
)
14605 fold_checksum_tree (TREE_VEC_ELT (expr
, i
), ctx
, ht
);
14611 case tcc_expression
:
14612 case tcc_reference
:
14613 case tcc_comparison
:
14616 case tcc_statement
:
14618 len
= TREE_OPERAND_LENGTH (expr
);
14619 for (i
= 0; i
< len
; ++i
)
14620 fold_checksum_tree (TREE_OPERAND (expr
, i
), ctx
, ht
);
14622 case tcc_declaration
:
14623 fold_checksum_tree (DECL_NAME (expr
), ctx
, ht
);
14624 fold_checksum_tree (DECL_CONTEXT (expr
), ctx
, ht
);
14625 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_COMMON
))
14627 fold_checksum_tree (DECL_SIZE (expr
), ctx
, ht
);
14628 fold_checksum_tree (DECL_SIZE_UNIT (expr
), ctx
, ht
);
14629 fold_checksum_tree (DECL_INITIAL (expr
), ctx
, ht
);
14630 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr
), ctx
, ht
);
14631 fold_checksum_tree (DECL_ATTRIBUTES (expr
), ctx
, ht
);
14633 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_WITH_VIS
))
14634 fold_checksum_tree (DECL_SECTION_NAME (expr
), ctx
, ht
);
14636 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_NON_COMMON
))
14638 fold_checksum_tree (DECL_VINDEX (expr
), ctx
, ht
);
14639 fold_checksum_tree (DECL_RESULT_FLD (expr
), ctx
, ht
);
14640 fold_checksum_tree (DECL_ARGUMENT_FLD (expr
), ctx
, ht
);
14644 if (TREE_CODE (expr
) == ENUMERAL_TYPE
)
14645 fold_checksum_tree (TYPE_VALUES (expr
), ctx
, ht
);
14646 fold_checksum_tree (TYPE_SIZE (expr
), ctx
, ht
);
14647 fold_checksum_tree (TYPE_SIZE_UNIT (expr
), ctx
, ht
);
14648 fold_checksum_tree (TYPE_ATTRIBUTES (expr
), ctx
, ht
);
14649 fold_checksum_tree (TYPE_NAME (expr
), ctx
, ht
);
14650 if (INTEGRAL_TYPE_P (expr
)
14651 || SCALAR_FLOAT_TYPE_P (expr
))
14653 fold_checksum_tree (TYPE_MIN_VALUE (expr
), ctx
, ht
);
14654 fold_checksum_tree (TYPE_MAX_VALUE (expr
), ctx
, ht
);
14656 fold_checksum_tree (TYPE_MAIN_VARIANT (expr
), ctx
, ht
);
14657 if (TREE_CODE (expr
) == RECORD_TYPE
14658 || TREE_CODE (expr
) == UNION_TYPE
14659 || TREE_CODE (expr
) == QUAL_UNION_TYPE
)
14660 fold_checksum_tree (TYPE_BINFO (expr
), ctx
, ht
);
14661 fold_checksum_tree (TYPE_CONTEXT (expr
), ctx
, ht
);
14668 /* Helper function for outputting the checksum of a tree T. When
14669 debugging with gdb, you can "define mynext" to be "next" followed
14670 by "call debug_fold_checksum (op0)", then just trace down till the
14673 DEBUG_FUNCTION
void
14674 debug_fold_checksum (const_tree t
)
14677 unsigned char checksum
[16];
14678 struct md5_ctx ctx
;
14679 hash_table
<pointer_hash
<tree_node
> > ht
;
14682 md5_init_ctx (&ctx
);
14683 fold_checksum_tree (t
, &ctx
, ht
);
14684 md5_finish_ctx (&ctx
, checksum
);
14687 for (i
= 0; i
< 16; i
++)
14688 fprintf (stderr
, "%d ", checksum
[i
]);
14690 fprintf (stderr
, "\n");
14695 /* Fold a unary tree expression with code CODE of type TYPE with an
14696 operand OP0. LOC is the location of the resulting expression.
14697 Return a folded expression if successful. Otherwise, return a tree
14698 expression with code CODE of type TYPE with an operand OP0. */
14701 fold_build1_stat_loc (location_t loc
,
14702 enum tree_code code
, tree type
, tree op0 MEM_STAT_DECL
)
14705 #ifdef ENABLE_FOLD_CHECKING
14706 unsigned char checksum_before
[16], checksum_after
[16];
14707 struct md5_ctx ctx
;
14708 hash_table
<pointer_hash
<tree_node
> > ht
;
14711 md5_init_ctx (&ctx
);
14712 fold_checksum_tree (op0
, &ctx
, ht
);
14713 md5_finish_ctx (&ctx
, checksum_before
);
14717 tem
= fold_unary_loc (loc
, code
, type
, op0
);
14719 tem
= build1_stat_loc (loc
, code
, type
, op0 PASS_MEM_STAT
);
14721 #ifdef ENABLE_FOLD_CHECKING
14722 md5_init_ctx (&ctx
);
14723 fold_checksum_tree (op0
, &ctx
, ht
);
14724 md5_finish_ctx (&ctx
, checksum_after
);
14727 if (memcmp (checksum_before
, checksum_after
, 16))
14728 fold_check_failed (op0
, tem
);
14733 /* Fold a binary tree expression with code CODE of type TYPE with
14734 operands OP0 and OP1. LOC is the location of the resulting
14735 expression. Return a folded expression if successful. Otherwise,
14736 return a tree expression with code CODE of type TYPE with operands
14740 fold_build2_stat_loc (location_t loc
,
14741 enum tree_code code
, tree type
, tree op0
, tree op1
14745 #ifdef ENABLE_FOLD_CHECKING
14746 unsigned char checksum_before_op0
[16],
14747 checksum_before_op1
[16],
14748 checksum_after_op0
[16],
14749 checksum_after_op1
[16];
14750 struct md5_ctx ctx
;
14751 hash_table
<pointer_hash
<tree_node
> > ht
;
14754 md5_init_ctx (&ctx
);
14755 fold_checksum_tree (op0
, &ctx
, ht
);
14756 md5_finish_ctx (&ctx
, checksum_before_op0
);
14759 md5_init_ctx (&ctx
);
14760 fold_checksum_tree (op1
, &ctx
, ht
);
14761 md5_finish_ctx (&ctx
, checksum_before_op1
);
14765 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
14767 tem
= build2_stat_loc (loc
, code
, type
, op0
, op1 PASS_MEM_STAT
);
14769 #ifdef ENABLE_FOLD_CHECKING
14770 md5_init_ctx (&ctx
);
14771 fold_checksum_tree (op0
, &ctx
, ht
);
14772 md5_finish_ctx (&ctx
, checksum_after_op0
);
14775 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
14776 fold_check_failed (op0
, tem
);
14778 md5_init_ctx (&ctx
);
14779 fold_checksum_tree (op1
, &ctx
, ht
);
14780 md5_finish_ctx (&ctx
, checksum_after_op1
);
14783 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
14784 fold_check_failed (op1
, tem
);
14789 /* Fold a ternary tree expression with code CODE of type TYPE with
14790 operands OP0, OP1, and OP2. Return a folded expression if
14791 successful. Otherwise, return a tree expression with code CODE of
14792 type TYPE with operands OP0, OP1, and OP2. */
14795 fold_build3_stat_loc (location_t loc
, enum tree_code code
, tree type
,
14796 tree op0
, tree op1
, tree op2 MEM_STAT_DECL
)
14799 #ifdef ENABLE_FOLD_CHECKING
14800 unsigned char checksum_before_op0
[16],
14801 checksum_before_op1
[16],
14802 checksum_before_op2
[16],
14803 checksum_after_op0
[16],
14804 checksum_after_op1
[16],
14805 checksum_after_op2
[16];
14806 struct md5_ctx ctx
;
14807 hash_table
<pointer_hash
<tree_node
> > ht
;
14810 md5_init_ctx (&ctx
);
14811 fold_checksum_tree (op0
, &ctx
, ht
);
14812 md5_finish_ctx (&ctx
, checksum_before_op0
);
14815 md5_init_ctx (&ctx
);
14816 fold_checksum_tree (op1
, &ctx
, ht
);
14817 md5_finish_ctx (&ctx
, checksum_before_op1
);
14820 md5_init_ctx (&ctx
);
14821 fold_checksum_tree (op2
, &ctx
, ht
);
14822 md5_finish_ctx (&ctx
, checksum_before_op2
);
14826 gcc_assert (TREE_CODE_CLASS (code
) != tcc_vl_exp
);
14827 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
14829 tem
= build3_stat_loc (loc
, code
, type
, op0
, op1
, op2 PASS_MEM_STAT
);
14831 #ifdef ENABLE_FOLD_CHECKING
14832 md5_init_ctx (&ctx
);
14833 fold_checksum_tree (op0
, &ctx
, ht
);
14834 md5_finish_ctx (&ctx
, checksum_after_op0
);
14837 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
14838 fold_check_failed (op0
, tem
);
14840 md5_init_ctx (&ctx
);
14841 fold_checksum_tree (op1
, &ctx
, ht
);
14842 md5_finish_ctx (&ctx
, checksum_after_op1
);
14845 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
14846 fold_check_failed (op1
, tem
);
14848 md5_init_ctx (&ctx
);
14849 fold_checksum_tree (op2
, &ctx
, ht
);
14850 md5_finish_ctx (&ctx
, checksum_after_op2
);
14853 if (memcmp (checksum_before_op2
, checksum_after_op2
, 16))
14854 fold_check_failed (op2
, tem
);
14859 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14860 arguments in ARGARRAY, and a null static chain.
14861 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14862 of type TYPE from the given operands as constructed by build_call_array. */
14865 fold_build_call_array_loc (location_t loc
, tree type
, tree fn
,
14866 int nargs
, tree
*argarray
)
14869 #ifdef ENABLE_FOLD_CHECKING
14870 unsigned char checksum_before_fn
[16],
14871 checksum_before_arglist
[16],
14872 checksum_after_fn
[16],
14873 checksum_after_arglist
[16];
14874 struct md5_ctx ctx
;
14875 hash_table
<pointer_hash
<tree_node
> > ht
;
14879 md5_init_ctx (&ctx
);
14880 fold_checksum_tree (fn
, &ctx
, ht
);
14881 md5_finish_ctx (&ctx
, checksum_before_fn
);
14884 md5_init_ctx (&ctx
);
14885 for (i
= 0; i
< nargs
; i
++)
14886 fold_checksum_tree (argarray
[i
], &ctx
, ht
);
14887 md5_finish_ctx (&ctx
, checksum_before_arglist
);
14891 tem
= fold_builtin_call_array (loc
, type
, fn
, nargs
, argarray
);
14893 #ifdef ENABLE_FOLD_CHECKING
14894 md5_init_ctx (&ctx
);
14895 fold_checksum_tree (fn
, &ctx
, ht
);
14896 md5_finish_ctx (&ctx
, checksum_after_fn
);
14899 if (memcmp (checksum_before_fn
, checksum_after_fn
, 16))
14900 fold_check_failed (fn
, tem
);
14902 md5_init_ctx (&ctx
);
14903 for (i
= 0; i
< nargs
; i
++)
14904 fold_checksum_tree (argarray
[i
], &ctx
, ht
);
14905 md5_finish_ctx (&ctx
, checksum_after_arglist
);
14908 if (memcmp (checksum_before_arglist
, checksum_after_arglist
, 16))
14909 fold_check_failed (NULL_TREE
, tem
);
14914 /* Perform constant folding and related simplification of initializer
14915 expression EXPR. These behave identically to "fold_buildN" but ignore
14916 potential run-time traps and exceptions that fold must preserve. */
14918 #define START_FOLD_INIT \
14919 int saved_signaling_nans = flag_signaling_nans;\
14920 int saved_trapping_math = flag_trapping_math;\
14921 int saved_rounding_math = flag_rounding_math;\
14922 int saved_trapv = flag_trapv;\
14923 int saved_folding_initializer = folding_initializer;\
14924 flag_signaling_nans = 0;\
14925 flag_trapping_math = 0;\
14926 flag_rounding_math = 0;\
14928 folding_initializer = 1;
14930 #define END_FOLD_INIT \
14931 flag_signaling_nans = saved_signaling_nans;\
14932 flag_trapping_math = saved_trapping_math;\
14933 flag_rounding_math = saved_rounding_math;\
14934 flag_trapv = saved_trapv;\
14935 folding_initializer = saved_folding_initializer;
14938 fold_build1_initializer_loc (location_t loc
, enum tree_code code
,
14939 tree type
, tree op
)
14944 result
= fold_build1_loc (loc
, code
, type
, op
);
14951 fold_build2_initializer_loc (location_t loc
, enum tree_code code
,
14952 tree type
, tree op0
, tree op1
)
14957 result
= fold_build2_loc (loc
, code
, type
, op0
, op1
);
14964 fold_build3_initializer_loc (location_t loc
, enum tree_code code
,
14965 tree type
, tree op0
, tree op1
, tree op2
)
14970 result
= fold_build3_loc (loc
, code
, type
, op0
, op1
, op2
);
14977 fold_build_call_array_initializer_loc (location_t loc
, tree type
, tree fn
,
14978 int nargs
, tree
*argarray
)
14983 result
= fold_build_call_array_loc (loc
, type
, fn
, nargs
, argarray
);
14989 #undef START_FOLD_INIT
14990 #undef END_FOLD_INIT
14992 /* Determine if first argument is a multiple of second argument. Return 0 if
14993 it is not, or we cannot easily determined it to be.
14995 An example of the sort of thing we care about (at this point; this routine
14996 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14997 fold cases do now) is discovering that
14999 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15005 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
15007 This code also handles discovering that
15009 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15011 is a multiple of 8 so we don't have to worry about dealing with a
15012 possible remainder.
15014 Note that we *look* inside a SAVE_EXPR only to determine how it was
15015 calculated; it is not safe for fold to do much of anything else with the
15016 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
15017 at run time. For example, the latter example above *cannot* be implemented
15018 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
15019 evaluation time of the original SAVE_EXPR is not necessarily the same at
15020 the time the new expression is evaluated. The only optimization of this
15021 sort that would be valid is changing
15023 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
15027 SAVE_EXPR (I) * SAVE_EXPR (J)
15029 (where the same SAVE_EXPR (J) is used in the original and the
15030 transformed version). */
15033 multiple_of_p (tree type
, const_tree top
, const_tree bottom
)
15035 if (operand_equal_p (top
, bottom
, 0))
15038 if (TREE_CODE (type
) != INTEGER_TYPE
)
15041 switch (TREE_CODE (top
))
15044 /* Bitwise and provides a power of two multiple. If the mask is
15045 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
15046 if (!integer_pow2p (bottom
))
15051 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
15052 || multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
15056 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
15057 && multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
15060 if (TREE_CODE (TREE_OPERAND (top
, 1)) == INTEGER_CST
)
15064 op1
= TREE_OPERAND (top
, 1);
15065 /* const_binop may not detect overflow correctly,
15066 so check for it explicitly here. */
15067 if (TYPE_PRECISION (TREE_TYPE (size_one_node
))
15068 > TREE_INT_CST_LOW (op1
)
15069 && TREE_INT_CST_HIGH (op1
) == 0
15070 && 0 != (t1
= fold_convert (type
,
15071 const_binop (LSHIFT_EXPR
,
15074 && !TREE_OVERFLOW (t1
))
15075 return multiple_of_p (type
, t1
, bottom
);
15080 /* Can't handle conversions from non-integral or wider integral type. */
15081 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top
, 0))) != INTEGER_TYPE
)
15082 || (TYPE_PRECISION (type
)
15083 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top
, 0)))))
15086 /* .. fall through ... */
15089 return multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
);
15092 return (multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
)
15093 && multiple_of_p (type
, TREE_OPERAND (top
, 2), bottom
));
15096 if (TREE_CODE (bottom
) != INTEGER_CST
15097 || integer_zerop (bottom
)
15098 || (TYPE_UNSIGNED (type
)
15099 && (tree_int_cst_sgn (top
) < 0
15100 || tree_int_cst_sgn (bottom
) < 0)))
15102 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR
,
15110 /* Return true if CODE or TYPE is known to be non-negative. */
15113 tree_simple_nonnegative_warnv_p (enum tree_code code
, tree type
)
15115 if ((TYPE_PRECISION (type
) != 1 || TYPE_UNSIGNED (type
))
15116 && truth_value_p (code
))
15117 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
15118 have a signed:1 type (where the value is -1 and 0). */
15123 /* Return true if (CODE OP0) is known to be non-negative. If the return
15124 value is based on the assumption that signed overflow is undefined,
15125 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15126 *STRICT_OVERFLOW_P. */
15129 tree_unary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
15130 bool *strict_overflow_p
)
15132 if (TYPE_UNSIGNED (type
))
15138 /* We can't return 1 if flag_wrapv is set because
15139 ABS_EXPR<INT_MIN> = INT_MIN. */
15140 if (!INTEGRAL_TYPE_P (type
))
15142 if (TYPE_OVERFLOW_UNDEFINED (type
))
15144 *strict_overflow_p
= true;
15149 case NON_LVALUE_EXPR
:
15151 case FIX_TRUNC_EXPR
:
15152 return tree_expr_nonnegative_warnv_p (op0
,
15153 strict_overflow_p
);
15157 tree inner_type
= TREE_TYPE (op0
);
15158 tree outer_type
= type
;
15160 if (TREE_CODE (outer_type
) == REAL_TYPE
)
15162 if (TREE_CODE (inner_type
) == REAL_TYPE
)
15163 return tree_expr_nonnegative_warnv_p (op0
,
15164 strict_overflow_p
);
15165 if (TREE_CODE (inner_type
) == INTEGER_TYPE
)
15167 if (TYPE_UNSIGNED (inner_type
))
15169 return tree_expr_nonnegative_warnv_p (op0
,
15170 strict_overflow_p
);
15173 else if (TREE_CODE (outer_type
) == INTEGER_TYPE
)
15175 if (TREE_CODE (inner_type
) == REAL_TYPE
)
15176 return tree_expr_nonnegative_warnv_p (op0
,
15177 strict_overflow_p
);
15178 if (TREE_CODE (inner_type
) == INTEGER_TYPE
)
15179 return TYPE_PRECISION (inner_type
) < TYPE_PRECISION (outer_type
)
15180 && TYPE_UNSIGNED (inner_type
);
15186 return tree_simple_nonnegative_warnv_p (code
, type
);
15189 /* We don't know sign of `t', so be conservative and return false. */
15193 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
15194 value is based on the assumption that signed overflow is undefined,
15195 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15196 *STRICT_OVERFLOW_P. */
15199 tree_binary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
15200 tree op1
, bool *strict_overflow_p
)
15202 if (TYPE_UNSIGNED (type
))
15207 case POINTER_PLUS_EXPR
:
15209 if (FLOAT_TYPE_P (type
))
15210 return (tree_expr_nonnegative_warnv_p (op0
,
15212 && tree_expr_nonnegative_warnv_p (op1
,
15213 strict_overflow_p
));
15215 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
15216 both unsigned and at least 2 bits shorter than the result. */
15217 if (TREE_CODE (type
) == INTEGER_TYPE
15218 && TREE_CODE (op0
) == NOP_EXPR
15219 && TREE_CODE (op1
) == NOP_EXPR
)
15221 tree inner1
= TREE_TYPE (TREE_OPERAND (op0
, 0));
15222 tree inner2
= TREE_TYPE (TREE_OPERAND (op1
, 0));
15223 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner1
)
15224 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner2
))
15226 unsigned int prec
= MAX (TYPE_PRECISION (inner1
),
15227 TYPE_PRECISION (inner2
)) + 1;
15228 return prec
< TYPE_PRECISION (type
);
15234 if (FLOAT_TYPE_P (type
))
15236 /* x * x for floating point x is always non-negative. */
15237 if (operand_equal_p (op0
, op1
, 0))
15239 return (tree_expr_nonnegative_warnv_p (op0
,
15241 && tree_expr_nonnegative_warnv_p (op1
,
15242 strict_overflow_p
));
15245 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
15246 both unsigned and their total bits is shorter than the result. */
15247 if (TREE_CODE (type
) == INTEGER_TYPE
15248 && (TREE_CODE (op0
) == NOP_EXPR
|| TREE_CODE (op0
) == INTEGER_CST
)
15249 && (TREE_CODE (op1
) == NOP_EXPR
|| TREE_CODE (op1
) == INTEGER_CST
))
15251 tree inner0
= (TREE_CODE (op0
) == NOP_EXPR
)
15252 ? TREE_TYPE (TREE_OPERAND (op0
, 0))
15254 tree inner1
= (TREE_CODE (op1
) == NOP_EXPR
)
15255 ? TREE_TYPE (TREE_OPERAND (op1
, 0))
15258 bool unsigned0
= TYPE_UNSIGNED (inner0
);
15259 bool unsigned1
= TYPE_UNSIGNED (inner1
);
15261 if (TREE_CODE (op0
) == INTEGER_CST
)
15262 unsigned0
= unsigned0
|| tree_int_cst_sgn (op0
) >= 0;
15264 if (TREE_CODE (op1
) == INTEGER_CST
)
15265 unsigned1
= unsigned1
|| tree_int_cst_sgn (op1
) >= 0;
15267 if (TREE_CODE (inner0
) == INTEGER_TYPE
&& unsigned0
15268 && TREE_CODE (inner1
) == INTEGER_TYPE
&& unsigned1
)
15270 unsigned int precision0
= (TREE_CODE (op0
) == INTEGER_CST
)
15271 ? tree_int_cst_min_precision (op0
, /*unsignedp=*/true)
15272 : TYPE_PRECISION (inner0
);
15274 unsigned int precision1
= (TREE_CODE (op1
) == INTEGER_CST
)
15275 ? tree_int_cst_min_precision (op1
, /*unsignedp=*/true)
15276 : TYPE_PRECISION (inner1
);
15278 return precision0
+ precision1
< TYPE_PRECISION (type
);
15285 return (tree_expr_nonnegative_warnv_p (op0
,
15287 || tree_expr_nonnegative_warnv_p (op1
,
15288 strict_overflow_p
));
15294 case TRUNC_DIV_EXPR
:
15295 case CEIL_DIV_EXPR
:
15296 case FLOOR_DIV_EXPR
:
15297 case ROUND_DIV_EXPR
:
15298 return (tree_expr_nonnegative_warnv_p (op0
,
15300 && tree_expr_nonnegative_warnv_p (op1
,
15301 strict_overflow_p
));
15303 case TRUNC_MOD_EXPR
:
15304 case CEIL_MOD_EXPR
:
15305 case FLOOR_MOD_EXPR
:
15306 case ROUND_MOD_EXPR
:
15307 return tree_expr_nonnegative_warnv_p (op0
,
15308 strict_overflow_p
);
15310 return tree_simple_nonnegative_warnv_p (code
, type
);
15313 /* We don't know sign of `t', so be conservative and return false. */
15317 /* Return true if T is known to be non-negative. If the return
15318 value is based on the assumption that signed overflow is undefined,
15319 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15320 *STRICT_OVERFLOW_P. */
15323 tree_single_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
15325 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
15328 switch (TREE_CODE (t
))
15331 return tree_int_cst_sgn (t
) >= 0;
15334 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
15337 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t
));
15340 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
15342 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 2),
15343 strict_overflow_p
));
15345 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
),
15348 /* We don't know sign of `t', so be conservative and return false. */
15352 /* Return true if T is known to be non-negative. If the return
15353 value is based on the assumption that signed overflow is undefined,
15354 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15355 *STRICT_OVERFLOW_P. */
15358 tree_call_nonnegative_warnv_p (tree type
, tree fndecl
,
15359 tree arg0
, tree arg1
, bool *strict_overflow_p
)
15361 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
15362 switch (DECL_FUNCTION_CODE (fndecl
))
15364 CASE_FLT_FN (BUILT_IN_ACOS
):
15365 CASE_FLT_FN (BUILT_IN_ACOSH
):
15366 CASE_FLT_FN (BUILT_IN_CABS
):
15367 CASE_FLT_FN (BUILT_IN_COSH
):
15368 CASE_FLT_FN (BUILT_IN_ERFC
):
15369 CASE_FLT_FN (BUILT_IN_EXP
):
15370 CASE_FLT_FN (BUILT_IN_EXP10
):
15371 CASE_FLT_FN (BUILT_IN_EXP2
):
15372 CASE_FLT_FN (BUILT_IN_FABS
):
15373 CASE_FLT_FN (BUILT_IN_FDIM
):
15374 CASE_FLT_FN (BUILT_IN_HYPOT
):
15375 CASE_FLT_FN (BUILT_IN_POW10
):
15376 CASE_INT_FN (BUILT_IN_FFS
):
15377 CASE_INT_FN (BUILT_IN_PARITY
):
15378 CASE_INT_FN (BUILT_IN_POPCOUNT
):
15379 case BUILT_IN_BSWAP32
:
15380 case BUILT_IN_BSWAP64
:
15384 CASE_FLT_FN (BUILT_IN_SQRT
):
15385 /* sqrt(-0.0) is -0.0. */
15386 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
15388 return tree_expr_nonnegative_warnv_p (arg0
,
15389 strict_overflow_p
);
15391 CASE_FLT_FN (BUILT_IN_ASINH
):
15392 CASE_FLT_FN (BUILT_IN_ATAN
):
15393 CASE_FLT_FN (BUILT_IN_ATANH
):
15394 CASE_FLT_FN (BUILT_IN_CBRT
):
15395 CASE_FLT_FN (BUILT_IN_CEIL
):
15396 CASE_FLT_FN (BUILT_IN_ERF
):
15397 CASE_FLT_FN (BUILT_IN_EXPM1
):
15398 CASE_FLT_FN (BUILT_IN_FLOOR
):
15399 CASE_FLT_FN (BUILT_IN_FMOD
):
15400 CASE_FLT_FN (BUILT_IN_FREXP
):
15401 CASE_FLT_FN (BUILT_IN_ICEIL
):
15402 CASE_FLT_FN (BUILT_IN_IFLOOR
):
15403 CASE_FLT_FN (BUILT_IN_IRINT
):
15404 CASE_FLT_FN (BUILT_IN_IROUND
):
15405 CASE_FLT_FN (BUILT_IN_LCEIL
):
15406 CASE_FLT_FN (BUILT_IN_LDEXP
):
15407 CASE_FLT_FN (BUILT_IN_LFLOOR
):
15408 CASE_FLT_FN (BUILT_IN_LLCEIL
):
15409 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
15410 CASE_FLT_FN (BUILT_IN_LLRINT
):
15411 CASE_FLT_FN (BUILT_IN_LLROUND
):
15412 CASE_FLT_FN (BUILT_IN_LRINT
):
15413 CASE_FLT_FN (BUILT_IN_LROUND
):
15414 CASE_FLT_FN (BUILT_IN_MODF
):
15415 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
15416 CASE_FLT_FN (BUILT_IN_RINT
):
15417 CASE_FLT_FN (BUILT_IN_ROUND
):
15418 CASE_FLT_FN (BUILT_IN_SCALB
):
15419 CASE_FLT_FN (BUILT_IN_SCALBLN
):
15420 CASE_FLT_FN (BUILT_IN_SCALBN
):
15421 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
15422 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
15423 CASE_FLT_FN (BUILT_IN_SINH
):
15424 CASE_FLT_FN (BUILT_IN_TANH
):
15425 CASE_FLT_FN (BUILT_IN_TRUNC
):
15426 /* True if the 1st argument is nonnegative. */
15427 return tree_expr_nonnegative_warnv_p (arg0
,
15428 strict_overflow_p
);
15430 CASE_FLT_FN (BUILT_IN_FMAX
):
15431 /* True if the 1st OR 2nd arguments are nonnegative. */
15432 return (tree_expr_nonnegative_warnv_p (arg0
,
15434 || (tree_expr_nonnegative_warnv_p (arg1
,
15435 strict_overflow_p
)));
15437 CASE_FLT_FN (BUILT_IN_FMIN
):
15438 /* True if the 1st AND 2nd arguments are nonnegative. */
15439 return (tree_expr_nonnegative_warnv_p (arg0
,
15441 && (tree_expr_nonnegative_warnv_p (arg1
,
15442 strict_overflow_p
)));
15444 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
15445 /* True if the 2nd argument is nonnegative. */
15446 return tree_expr_nonnegative_warnv_p (arg1
,
15447 strict_overflow_p
);
15449 CASE_FLT_FN (BUILT_IN_POWI
):
15450 /* True if the 1st argument is nonnegative or the second
15451 argument is an even integer. */
15452 if (TREE_CODE (arg1
) == INTEGER_CST
15453 && (TREE_INT_CST_LOW (arg1
) & 1) == 0)
15455 return tree_expr_nonnegative_warnv_p (arg0
,
15456 strict_overflow_p
);
15458 CASE_FLT_FN (BUILT_IN_POW
):
15459 /* True if the 1st argument is nonnegative or the second
15460 argument is an even integer valued real. */
15461 if (TREE_CODE (arg1
) == REAL_CST
)
15466 c
= TREE_REAL_CST (arg1
);
15467 n
= real_to_integer (&c
);
15470 REAL_VALUE_TYPE cint
;
15471 real_from_integer (&cint
, VOIDmode
, n
,
15472 n
< 0 ? -1 : 0, 0);
15473 if (real_identical (&c
, &cint
))
15477 return tree_expr_nonnegative_warnv_p (arg0
,
15478 strict_overflow_p
);
15483 return tree_simple_nonnegative_warnv_p (CALL_EXPR
,
15487 /* Return true if T is known to be non-negative. If the return
15488 value is based on the assumption that signed overflow is undefined,
15489 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15490 *STRICT_OVERFLOW_P. */
15493 tree_invalid_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
15495 enum tree_code code
= TREE_CODE (t
);
15496 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
15503 tree temp
= TARGET_EXPR_SLOT (t
);
15504 t
= TARGET_EXPR_INITIAL (t
);
15506 /* If the initializer is non-void, then it's a normal expression
15507 that will be assigned to the slot. */
15508 if (!VOID_TYPE_P (t
))
15509 return tree_expr_nonnegative_warnv_p (t
, strict_overflow_p
);
15511 /* Otherwise, the initializer sets the slot in some way. One common
15512 way is an assignment statement at the end of the initializer. */
15515 if (TREE_CODE (t
) == BIND_EXPR
)
15516 t
= expr_last (BIND_EXPR_BODY (t
));
15517 else if (TREE_CODE (t
) == TRY_FINALLY_EXPR
15518 || TREE_CODE (t
) == TRY_CATCH_EXPR
)
15519 t
= expr_last (TREE_OPERAND (t
, 0));
15520 else if (TREE_CODE (t
) == STATEMENT_LIST
)
15525 if (TREE_CODE (t
) == MODIFY_EXPR
15526 && TREE_OPERAND (t
, 0) == temp
)
15527 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
15528 strict_overflow_p
);
15535 tree arg0
= call_expr_nargs (t
) > 0 ? CALL_EXPR_ARG (t
, 0) : NULL_TREE
;
15536 tree arg1
= call_expr_nargs (t
) > 1 ? CALL_EXPR_ARG (t
, 1) : NULL_TREE
;
15538 return tree_call_nonnegative_warnv_p (TREE_TYPE (t
),
15539 get_callee_fndecl (t
),
15542 strict_overflow_p
);
15544 case COMPOUND_EXPR
:
15546 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
15547 strict_overflow_p
);
15549 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t
, 1)),
15550 strict_overflow_p
);
15552 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 0),
15553 strict_overflow_p
);
15556 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
),
15560 /* We don't know sign of `t', so be conservative and return false. */
15564 /* Return true if T is known to be non-negative. If the return
15565 value is based on the assumption that signed overflow is undefined,
15566 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15567 *STRICT_OVERFLOW_P. */
15570 tree_expr_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
15572 enum tree_code code
;
15573 if (t
== error_mark_node
)
15576 code
= TREE_CODE (t
);
15577 switch (TREE_CODE_CLASS (code
))
15580 case tcc_comparison
:
15581 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
15583 TREE_OPERAND (t
, 0),
15584 TREE_OPERAND (t
, 1),
15585 strict_overflow_p
);
15588 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
15590 TREE_OPERAND (t
, 0),
15591 strict_overflow_p
);
15594 case tcc_declaration
:
15595 case tcc_reference
:
15596 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
);
15604 case TRUTH_AND_EXPR
:
15605 case TRUTH_OR_EXPR
:
15606 case TRUTH_XOR_EXPR
:
15607 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
15609 TREE_OPERAND (t
, 0),
15610 TREE_OPERAND (t
, 1),
15611 strict_overflow_p
);
15612 case TRUTH_NOT_EXPR
:
15613 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
15615 TREE_OPERAND (t
, 0),
15616 strict_overflow_p
);
15623 case WITH_SIZE_EXPR
:
15625 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
);
15628 return tree_invalid_nonnegative_warnv_p (t
, strict_overflow_p
);
15632 /* Return true if `t' is known to be non-negative. Handle warnings
15633 about undefined signed overflow. */
15636 tree_expr_nonnegative_p (tree t
)
15638 bool ret
, strict_overflow_p
;
15640 strict_overflow_p
= false;
15641 ret
= tree_expr_nonnegative_warnv_p (t
, &strict_overflow_p
);
15642 if (strict_overflow_p
)
15643 fold_overflow_warning (("assuming signed overflow does not occur when "
15644 "determining that expression is always "
15646 WARN_STRICT_OVERFLOW_MISC
);
15651 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15652 For floating point we further ensure that T is not denormal.
15653 Similar logic is present in nonzero_address in rtlanal.h.
15655 If the return value is based on the assumption that signed overflow
15656 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15657 change *STRICT_OVERFLOW_P. */
15660 tree_unary_nonzero_warnv_p (enum tree_code code
, tree type
, tree op0
,
15661 bool *strict_overflow_p
)
15666 return tree_expr_nonzero_warnv_p (op0
,
15667 strict_overflow_p
);
15671 tree inner_type
= TREE_TYPE (op0
);
15672 tree outer_type
= type
;
15674 return (TYPE_PRECISION (outer_type
) >= TYPE_PRECISION (inner_type
)
15675 && tree_expr_nonzero_warnv_p (op0
,
15676 strict_overflow_p
));
15680 case NON_LVALUE_EXPR
:
15681 return tree_expr_nonzero_warnv_p (op0
,
15682 strict_overflow_p
);
15691 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15692 For floating point we further ensure that T is not denormal.
15693 Similar logic is present in nonzero_address in rtlanal.h.
15695 If the return value is based on the assumption that signed overflow
15696 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15697 change *STRICT_OVERFLOW_P. */
15700 tree_binary_nonzero_warnv_p (enum tree_code code
,
15703 tree op1
, bool *strict_overflow_p
)
15705 bool sub_strict_overflow_p
;
15708 case POINTER_PLUS_EXPR
:
15710 if (TYPE_OVERFLOW_UNDEFINED (type
))
15712 /* With the presence of negative values it is hard
15713 to say something. */
15714 sub_strict_overflow_p
= false;
15715 if (!tree_expr_nonnegative_warnv_p (op0
,
15716 &sub_strict_overflow_p
)
15717 || !tree_expr_nonnegative_warnv_p (op1
,
15718 &sub_strict_overflow_p
))
15720 /* One of operands must be positive and the other non-negative. */
15721 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15722 overflows, on a twos-complement machine the sum of two
15723 nonnegative numbers can never be zero. */
15724 return (tree_expr_nonzero_warnv_p (op0
,
15726 || tree_expr_nonzero_warnv_p (op1
,
15727 strict_overflow_p
));
15732 if (TYPE_OVERFLOW_UNDEFINED (type
))
15734 if (tree_expr_nonzero_warnv_p (op0
,
15736 && tree_expr_nonzero_warnv_p (op1
,
15737 strict_overflow_p
))
15739 *strict_overflow_p
= true;
15746 sub_strict_overflow_p
= false;
15747 if (tree_expr_nonzero_warnv_p (op0
,
15748 &sub_strict_overflow_p
)
15749 && tree_expr_nonzero_warnv_p (op1
,
15750 &sub_strict_overflow_p
))
15752 if (sub_strict_overflow_p
)
15753 *strict_overflow_p
= true;
15758 sub_strict_overflow_p
= false;
15759 if (tree_expr_nonzero_warnv_p (op0
,
15760 &sub_strict_overflow_p
))
15762 if (sub_strict_overflow_p
)
15763 *strict_overflow_p
= true;
15765 /* When both operands are nonzero, then MAX must be too. */
15766 if (tree_expr_nonzero_warnv_p (op1
,
15767 strict_overflow_p
))
15770 /* MAX where operand 0 is positive is positive. */
15771 return tree_expr_nonnegative_warnv_p (op0
,
15772 strict_overflow_p
);
15774 /* MAX where operand 1 is positive is positive. */
15775 else if (tree_expr_nonzero_warnv_p (op1
,
15776 &sub_strict_overflow_p
)
15777 && tree_expr_nonnegative_warnv_p (op1
,
15778 &sub_strict_overflow_p
))
15780 if (sub_strict_overflow_p
)
15781 *strict_overflow_p
= true;
15787 return (tree_expr_nonzero_warnv_p (op1
,
15789 || tree_expr_nonzero_warnv_p (op0
,
15790 strict_overflow_p
));
15799 /* Return true when T is an address and is known to be nonzero.
15800 For floating point we further ensure that T is not denormal.
15801 Similar logic is present in nonzero_address in rtlanal.h.
15803 If the return value is based on the assumption that signed overflow
15804 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15805 change *STRICT_OVERFLOW_P. */
15808 tree_single_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
15810 bool sub_strict_overflow_p
;
15811 switch (TREE_CODE (t
))
15814 return !integer_zerop (t
);
15818 tree base
= TREE_OPERAND (t
, 0);
15819 if (!DECL_P (base
))
15820 base
= get_base_address (base
);
15825 /* Weak declarations may link to NULL. Other things may also be NULL
15826 so protect with -fdelete-null-pointer-checks; but not variables
15827 allocated on the stack. */
15829 && (flag_delete_null_pointer_checks
15830 || (DECL_CONTEXT (base
)
15831 && TREE_CODE (DECL_CONTEXT (base
)) == FUNCTION_DECL
15832 && auto_var_in_fn_p (base
, DECL_CONTEXT (base
)))))
15833 return !VAR_OR_FUNCTION_DECL_P (base
) || !DECL_WEAK (base
);
15835 /* Constants are never weak. */
15836 if (CONSTANT_CLASS_P (base
))
15843 sub_strict_overflow_p
= false;
15844 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
15845 &sub_strict_overflow_p
)
15846 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 2),
15847 &sub_strict_overflow_p
))
15849 if (sub_strict_overflow_p
)
15850 *strict_overflow_p
= true;
15861 /* Return true when T is an address and is known to be nonzero.
15862 For floating point we further ensure that T is not denormal.
15863 Similar logic is present in nonzero_address in rtlanal.h.
15865 If the return value is based on the assumption that signed overflow
15866 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15867 change *STRICT_OVERFLOW_P. */
15870 tree_expr_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
15872 tree type
= TREE_TYPE (t
);
15873 enum tree_code code
;
15875 /* Doing something useful for floating point would need more work. */
15876 if (!INTEGRAL_TYPE_P (type
) && !POINTER_TYPE_P (type
))
15879 code
= TREE_CODE (t
);
15880 switch (TREE_CODE_CLASS (code
))
15883 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
15884 strict_overflow_p
);
15886 case tcc_comparison
:
15887 return tree_binary_nonzero_warnv_p (code
, type
,
15888 TREE_OPERAND (t
, 0),
15889 TREE_OPERAND (t
, 1),
15890 strict_overflow_p
);
15892 case tcc_declaration
:
15893 case tcc_reference
:
15894 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
15902 case TRUTH_NOT_EXPR
:
15903 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
15904 strict_overflow_p
);
15906 case TRUTH_AND_EXPR
:
15907 case TRUTH_OR_EXPR
:
15908 case TRUTH_XOR_EXPR
:
15909 return tree_binary_nonzero_warnv_p (code
, type
,
15910 TREE_OPERAND (t
, 0),
15911 TREE_OPERAND (t
, 1),
15912 strict_overflow_p
);
15919 case WITH_SIZE_EXPR
:
15921 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
15923 case COMPOUND_EXPR
:
15926 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
15927 strict_overflow_p
);
15930 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 0),
15931 strict_overflow_p
);
15934 return alloca_call_p (t
);
15942 /* Return true when T is an address and is known to be nonzero.
15943 Handle warnings about undefined signed overflow. */
15946 tree_expr_nonzero_p (tree t
)
15948 bool ret
, strict_overflow_p
;
15950 strict_overflow_p
= false;
15951 ret
= tree_expr_nonzero_warnv_p (t
, &strict_overflow_p
);
15952 if (strict_overflow_p
)
15953 fold_overflow_warning (("assuming signed overflow does not occur when "
15954 "determining that expression is always "
15956 WARN_STRICT_OVERFLOW_MISC
);
15960 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15961 attempt to fold the expression to a constant without modifying TYPE,
15964 If the expression could be simplified to a constant, then return
15965 the constant. If the expression would not be simplified to a
15966 constant, then return NULL_TREE. */
15969 fold_binary_to_constant (enum tree_code code
, tree type
, tree op0
, tree op1
)
15971 tree tem
= fold_binary (code
, type
, op0
, op1
);
15972 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
15975 /* Given the components of a unary expression CODE, TYPE and OP0,
15976 attempt to fold the expression to a constant without modifying
15979 If the expression could be simplified to a constant, then return
15980 the constant. If the expression would not be simplified to a
15981 constant, then return NULL_TREE. */
15984 fold_unary_to_constant (enum tree_code code
, tree type
, tree op0
)
15986 tree tem
= fold_unary (code
, type
, op0
);
15987 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
15990 /* If EXP represents referencing an element in a constant string
15991 (either via pointer arithmetic or array indexing), return the
15992 tree representing the value accessed, otherwise return NULL. */
15995 fold_read_from_constant_string (tree exp
)
15997 if ((TREE_CODE (exp
) == INDIRECT_REF
15998 || TREE_CODE (exp
) == ARRAY_REF
)
15999 && TREE_CODE (TREE_TYPE (exp
)) == INTEGER_TYPE
)
16001 tree exp1
= TREE_OPERAND (exp
, 0);
16004 location_t loc
= EXPR_LOCATION (exp
);
16006 if (TREE_CODE (exp
) == INDIRECT_REF
)
16007 string
= string_constant (exp1
, &index
);
16010 tree low_bound
= array_ref_low_bound (exp
);
16011 index
= fold_convert_loc (loc
, sizetype
, TREE_OPERAND (exp
, 1));
16013 /* Optimize the special-case of a zero lower bound.
16015 We convert the low_bound to sizetype to avoid some problems
16016 with constant folding. (E.g. suppose the lower bound is 1,
16017 and its mode is QI. Without the conversion,l (ARRAY
16018 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
16019 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
16020 if (! integer_zerop (low_bound
))
16021 index
= size_diffop_loc (loc
, index
,
16022 fold_convert_loc (loc
, sizetype
, low_bound
));
16028 && TYPE_MODE (TREE_TYPE (exp
)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))
16029 && TREE_CODE (string
) == STRING_CST
16030 && TREE_CODE (index
) == INTEGER_CST
16031 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
16032 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
))))
16034 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))) == 1))
16035 return build_int_cst_type (TREE_TYPE (exp
),
16036 (TREE_STRING_POINTER (string
)
16037 [TREE_INT_CST_LOW (index
)]));
16042 /* Return the tree for neg (ARG0) when ARG0 is known to be either
16043 an integer constant, real, or fixed-point constant.
16045 TYPE is the type of the result. */
16048 fold_negate_const (tree arg0
, tree type
)
16050 tree t
= NULL_TREE
;
16052 switch (TREE_CODE (arg0
))
16056 double_int val
= tree_to_double_int (arg0
);
16058 val
= val
.neg_with_overflow (&overflow
);
16059 t
= force_fit_type_double (type
, val
, 1,
16060 (overflow
| TREE_OVERFLOW (arg0
))
16061 && !TYPE_UNSIGNED (type
));
16066 t
= build_real (type
, real_value_negate (&TREE_REAL_CST (arg0
)));
16071 FIXED_VALUE_TYPE f
;
16072 bool overflow_p
= fixed_arithmetic (&f
, NEGATE_EXPR
,
16073 &(TREE_FIXED_CST (arg0
)), NULL
,
16074 TYPE_SATURATING (type
));
16075 t
= build_fixed (type
, f
);
16076 /* Propagate overflow flags. */
16077 if (overflow_p
| TREE_OVERFLOW (arg0
))
16078 TREE_OVERFLOW (t
) = 1;
16083 gcc_unreachable ();
16089 /* Return the tree for abs (ARG0) when ARG0 is known to be either
16090 an integer constant or real constant.
16092 TYPE is the type of the result. */
16095 fold_abs_const (tree arg0
, tree type
)
16097 tree t
= NULL_TREE
;
16099 switch (TREE_CODE (arg0
))
16103 double_int val
= tree_to_double_int (arg0
);
16105 /* If the value is unsigned or non-negative, then the absolute value
16106 is the same as the ordinary value. */
16107 if (TYPE_UNSIGNED (type
)
16108 || !val
.is_negative ())
16111 /* If the value is negative, then the absolute value is
16116 val
= val
.neg_with_overflow (&overflow
);
16117 t
= force_fit_type_double (type
, val
, -1,
16118 overflow
| TREE_OVERFLOW (arg0
));
16124 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0
)))
16125 t
= build_real (type
, real_value_negate (&TREE_REAL_CST (arg0
)));
16131 gcc_unreachable ();
16137 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
16138 constant. TYPE is the type of the result. */
16141 fold_not_const (const_tree arg0
, tree type
)
16145 gcc_assert (TREE_CODE (arg0
) == INTEGER_CST
);
16147 val
= ~tree_to_double_int (arg0
);
16148 return force_fit_type_double (type
, val
, 0, TREE_OVERFLOW (arg0
));
16151 /* Given CODE, a relational operator, the target type, TYPE and two
16152 constant operands OP0 and OP1, return the result of the
16153 relational operation. If the result is not a compile time
16154 constant, then return NULL_TREE. */
16157 fold_relational_const (enum tree_code code
, tree type
, tree op0
, tree op1
)
16159 int result
, invert
;
16161 /* From here on, the only cases we handle are when the result is
16162 known to be a constant. */
16164 if (TREE_CODE (op0
) == REAL_CST
&& TREE_CODE (op1
) == REAL_CST
)
16166 const REAL_VALUE_TYPE
*c0
= TREE_REAL_CST_PTR (op0
);
16167 const REAL_VALUE_TYPE
*c1
= TREE_REAL_CST_PTR (op1
);
16169 /* Handle the cases where either operand is a NaN. */
16170 if (real_isnan (c0
) || real_isnan (c1
))
16180 case UNORDERED_EXPR
:
16194 if (flag_trapping_math
)
16200 gcc_unreachable ();
16203 return constant_boolean_node (result
, type
);
16206 return constant_boolean_node (real_compare (code
, c0
, c1
), type
);
16209 if (TREE_CODE (op0
) == FIXED_CST
&& TREE_CODE (op1
) == FIXED_CST
)
16211 const FIXED_VALUE_TYPE
*c0
= TREE_FIXED_CST_PTR (op0
);
16212 const FIXED_VALUE_TYPE
*c1
= TREE_FIXED_CST_PTR (op1
);
16213 return constant_boolean_node (fixed_compare (code
, c0
, c1
), type
);
16216 /* Handle equality/inequality of complex constants. */
16217 if (TREE_CODE (op0
) == COMPLEX_CST
&& TREE_CODE (op1
) == COMPLEX_CST
)
16219 tree rcond
= fold_relational_const (code
, type
,
16220 TREE_REALPART (op0
),
16221 TREE_REALPART (op1
));
16222 tree icond
= fold_relational_const (code
, type
,
16223 TREE_IMAGPART (op0
),
16224 TREE_IMAGPART (op1
));
16225 if (code
== EQ_EXPR
)
16226 return fold_build2 (TRUTH_ANDIF_EXPR
, type
, rcond
, icond
);
16227 else if (code
== NE_EXPR
)
16228 return fold_build2 (TRUTH_ORIF_EXPR
, type
, rcond
, icond
);
16233 if (TREE_CODE (op0
) == VECTOR_CST
&& TREE_CODE (op1
) == VECTOR_CST
)
16235 unsigned count
= VECTOR_CST_NELTS (op0
);
16236 tree
*elts
= XALLOCAVEC (tree
, count
);
16237 gcc_assert (VECTOR_CST_NELTS (op1
) == count
16238 && TYPE_VECTOR_SUBPARTS (type
) == count
);
16240 for (unsigned i
= 0; i
< count
; i
++)
16242 tree elem_type
= TREE_TYPE (type
);
16243 tree elem0
= VECTOR_CST_ELT (op0
, i
);
16244 tree elem1
= VECTOR_CST_ELT (op1
, i
);
16246 tree tem
= fold_relational_const (code
, elem_type
,
16249 if (tem
== NULL_TREE
)
16252 elts
[i
] = build_int_cst (elem_type
, integer_zerop (tem
) ? 0 : -1);
16255 return build_vector (type
, elts
);
16258 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
16260 To compute GT, swap the arguments and do LT.
16261 To compute GE, do LT and invert the result.
16262 To compute LE, swap the arguments, do LT and invert the result.
16263 To compute NE, do EQ and invert the result.
16265 Therefore, the code below must handle only EQ and LT. */
16267 if (code
== LE_EXPR
|| code
== GT_EXPR
)
16272 code
= swap_tree_comparison (code
);
16275 /* Note that it is safe to invert for real values here because we
16276 have already handled the one case that it matters. */
16279 if (code
== NE_EXPR
|| code
== GE_EXPR
)
16282 code
= invert_tree_comparison (code
, false);
16285 /* Compute a result for LT or EQ if args permit;
16286 Otherwise return T. */
16287 if (TREE_CODE (op0
) == INTEGER_CST
&& TREE_CODE (op1
) == INTEGER_CST
)
16289 if (code
== EQ_EXPR
)
16290 result
= tree_int_cst_equal (op0
, op1
);
16291 else if (TYPE_UNSIGNED (TREE_TYPE (op0
)))
16292 result
= INT_CST_LT_UNSIGNED (op0
, op1
);
16294 result
= INT_CST_LT (op0
, op1
);
16301 return constant_boolean_node (result
, type
);
16304 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16305 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
16309 fold_build_cleanup_point_expr (tree type
, tree expr
)
16311 /* If the expression does not have side effects then we don't have to wrap
16312 it with a cleanup point expression. */
16313 if (!TREE_SIDE_EFFECTS (expr
))
16316 /* If the expression is a return, check to see if the expression inside the
16317 return has no side effects or the right hand side of the modify expression
16318 inside the return. If either don't have side effects set we don't need to
16319 wrap the expression in a cleanup point expression. Note we don't check the
16320 left hand side of the modify because it should always be a return decl. */
16321 if (TREE_CODE (expr
) == RETURN_EXPR
)
16323 tree op
= TREE_OPERAND (expr
, 0);
16324 if (!op
|| !TREE_SIDE_EFFECTS (op
))
16326 op
= TREE_OPERAND (op
, 1);
16327 if (!TREE_SIDE_EFFECTS (op
))
16331 return build1 (CLEANUP_POINT_EXPR
, type
, expr
);
16334 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16335 of an indirection through OP0, or NULL_TREE if no simplification is
16339 fold_indirect_ref_1 (location_t loc
, tree type
, tree op0
)
16345 subtype
= TREE_TYPE (sub
);
16346 if (!POINTER_TYPE_P (subtype
))
16349 if (TREE_CODE (sub
) == ADDR_EXPR
)
16351 tree op
= TREE_OPERAND (sub
, 0);
16352 tree optype
= TREE_TYPE (op
);
16353 /* *&CONST_DECL -> to the value of the const decl. */
16354 if (TREE_CODE (op
) == CONST_DECL
)
16355 return DECL_INITIAL (op
);
16356 /* *&p => p; make sure to handle *&"str"[cst] here. */
16357 if (type
== optype
)
16359 tree fop
= fold_read_from_constant_string (op
);
16365 /* *(foo *)&fooarray => fooarray[0] */
16366 else if (TREE_CODE (optype
) == ARRAY_TYPE
16367 && type
== TREE_TYPE (optype
)
16368 && (!in_gimple_form
16369 || TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
))
16371 tree type_domain
= TYPE_DOMAIN (optype
);
16372 tree min_val
= size_zero_node
;
16373 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
16374 min_val
= TYPE_MIN_VALUE (type_domain
);
16376 && TREE_CODE (min_val
) != INTEGER_CST
)
16378 return build4_loc (loc
, ARRAY_REF
, type
, op
, min_val
,
16379 NULL_TREE
, NULL_TREE
);
16381 /* *(foo *)&complexfoo => __real__ complexfoo */
16382 else if (TREE_CODE (optype
) == COMPLEX_TYPE
16383 && type
== TREE_TYPE (optype
))
16384 return fold_build1_loc (loc
, REALPART_EXPR
, type
, op
);
16385 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16386 else if (TREE_CODE (optype
) == VECTOR_TYPE
16387 && type
== TREE_TYPE (optype
))
16389 tree part_width
= TYPE_SIZE (type
);
16390 tree index
= bitsize_int (0);
16391 return fold_build3_loc (loc
, BIT_FIELD_REF
, type
, op
, part_width
, index
);
16395 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
16396 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
16398 tree op00
= TREE_OPERAND (sub
, 0);
16399 tree op01
= TREE_OPERAND (sub
, 1);
16402 if (TREE_CODE (op00
) == ADDR_EXPR
)
16405 op00
= TREE_OPERAND (op00
, 0);
16406 op00type
= TREE_TYPE (op00
);
16408 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16409 if (TREE_CODE (op00type
) == VECTOR_TYPE
16410 && type
== TREE_TYPE (op00type
))
16412 HOST_WIDE_INT offset
= tree_low_cst (op01
, 0);
16413 tree part_width
= TYPE_SIZE (type
);
16414 unsigned HOST_WIDE_INT part_widthi
= tree_low_cst (part_width
, 0)/BITS_PER_UNIT
;
16415 unsigned HOST_WIDE_INT indexi
= offset
* BITS_PER_UNIT
;
16416 tree index
= bitsize_int (indexi
);
16418 if (offset
/part_widthi
<= TYPE_VECTOR_SUBPARTS (op00type
))
16419 return fold_build3_loc (loc
,
16420 BIT_FIELD_REF
, type
, op00
,
16421 part_width
, index
);
16424 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16425 else if (TREE_CODE (op00type
) == COMPLEX_TYPE
16426 && type
== TREE_TYPE (op00type
))
16428 tree size
= TYPE_SIZE_UNIT (type
);
16429 if (tree_int_cst_equal (size
, op01
))
16430 return fold_build1_loc (loc
, IMAGPART_EXPR
, type
, op00
);
16432 /* ((foo *)&fooarray)[1] => fooarray[1] */
16433 else if (TREE_CODE (op00type
) == ARRAY_TYPE
16434 && type
== TREE_TYPE (op00type
))
16436 tree type_domain
= TYPE_DOMAIN (op00type
);
16437 tree min_val
= size_zero_node
;
16438 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
16439 min_val
= TYPE_MIN_VALUE (type_domain
);
16440 op01
= size_binop_loc (loc
, EXACT_DIV_EXPR
, op01
,
16441 TYPE_SIZE_UNIT (type
));
16442 op01
= size_binop_loc (loc
, PLUS_EXPR
, op01
, min_val
);
16443 return build4_loc (loc
, ARRAY_REF
, type
, op00
, op01
,
16444 NULL_TREE
, NULL_TREE
);
16449 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16450 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
16451 && type
== TREE_TYPE (TREE_TYPE (subtype
))
16452 && (!in_gimple_form
16453 || TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
))
16456 tree min_val
= size_zero_node
;
16457 sub
= build_fold_indirect_ref_loc (loc
, sub
);
16458 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
16459 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
16460 min_val
= TYPE_MIN_VALUE (type_domain
);
16462 && TREE_CODE (min_val
) != INTEGER_CST
)
16464 return build4_loc (loc
, ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
,
16471 /* Builds an expression for an indirection through T, simplifying some
16475 build_fold_indirect_ref_loc (location_t loc
, tree t
)
16477 tree type
= TREE_TYPE (TREE_TYPE (t
));
16478 tree sub
= fold_indirect_ref_1 (loc
, type
, t
);
16483 return build1_loc (loc
, INDIRECT_REF
, type
, t
);
16486 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16489 fold_indirect_ref_loc (location_t loc
, tree t
)
16491 tree sub
= fold_indirect_ref_1 (loc
, TREE_TYPE (t
), TREE_OPERAND (t
, 0));
16499 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16500 whose result is ignored. The type of the returned tree need not be
16501 the same as the original expression. */
16504 fold_ignored_result (tree t
)
16506 if (!TREE_SIDE_EFFECTS (t
))
16507 return integer_zero_node
;
16510 switch (TREE_CODE_CLASS (TREE_CODE (t
)))
16513 t
= TREE_OPERAND (t
, 0);
16517 case tcc_comparison
:
16518 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
16519 t
= TREE_OPERAND (t
, 0);
16520 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 0)))
16521 t
= TREE_OPERAND (t
, 1);
16526 case tcc_expression
:
16527 switch (TREE_CODE (t
))
16529 case COMPOUND_EXPR
:
16530 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
16532 t
= TREE_OPERAND (t
, 0);
16536 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1))
16537 || TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 2)))
16539 t
= TREE_OPERAND (t
, 0);
16552 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
16553 This can only be applied to objects of a sizetype. */
16556 round_up_loc (location_t loc
, tree value
, int divisor
)
16558 tree div
= NULL_TREE
;
16560 gcc_assert (divisor
> 0);
16564 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16565 have to do anything. Only do this when we are not given a const,
16566 because in that case, this check is more expensive than just
16568 if (TREE_CODE (value
) != INTEGER_CST
)
16570 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16572 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
16576 /* If divisor is a power of two, simplify this to bit manipulation. */
16577 if (divisor
== (divisor
& -divisor
))
16579 if (TREE_CODE (value
) == INTEGER_CST
)
16581 double_int val
= tree_to_double_int (value
);
16584 if ((val
.low
& (divisor
- 1)) == 0)
16587 overflow_p
= TREE_OVERFLOW (value
);
16588 val
.low
&= ~(divisor
- 1);
16589 val
.low
+= divisor
;
16597 return force_fit_type_double (TREE_TYPE (value
), val
,
16604 t
= build_int_cst (TREE_TYPE (value
), divisor
- 1);
16605 value
= size_binop_loc (loc
, PLUS_EXPR
, value
, t
);
16606 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
16607 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
16613 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16614 value
= size_binop_loc (loc
, CEIL_DIV_EXPR
, value
, div
);
16615 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
16621 /* Likewise, but round down. */
16624 round_down_loc (location_t loc
, tree value
, int divisor
)
16626 tree div
= NULL_TREE
;
16628 gcc_assert (divisor
> 0);
16632 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16633 have to do anything. Only do this when we are not given a const,
16634 because in that case, this check is more expensive than just
16636 if (TREE_CODE (value
) != INTEGER_CST
)
16638 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16640 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
16644 /* If divisor is a power of two, simplify this to bit manipulation. */
16645 if (divisor
== (divisor
& -divisor
))
16649 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
16650 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
16655 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16656 value
= size_binop_loc (loc
, FLOOR_DIV_EXPR
, value
, div
);
16657 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
16663 /* Returns the pointer to the base of the object addressed by EXP and
16664 extracts the information about the offset of the access, storing it
16665 to PBITPOS and POFFSET. */
16668 split_address_to_core_and_offset (tree exp
,
16669 HOST_WIDE_INT
*pbitpos
, tree
*poffset
)
16672 enum machine_mode mode
;
16673 int unsignedp
, volatilep
;
16674 HOST_WIDE_INT bitsize
;
16675 location_t loc
= EXPR_LOCATION (exp
);
16677 if (TREE_CODE (exp
) == ADDR_EXPR
)
16679 core
= get_inner_reference (TREE_OPERAND (exp
, 0), &bitsize
, pbitpos
,
16680 poffset
, &mode
, &unsignedp
, &volatilep
,
16682 core
= build_fold_addr_expr_loc (loc
, core
);
16688 *poffset
= NULL_TREE
;
16694 /* Returns true if addresses of E1 and E2 differ by a constant, false
16695 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16698 ptr_difference_const (tree e1
, tree e2
, HOST_WIDE_INT
*diff
)
16701 HOST_WIDE_INT bitpos1
, bitpos2
;
16702 tree toffset1
, toffset2
, tdiff
, type
;
16704 core1
= split_address_to_core_and_offset (e1
, &bitpos1
, &toffset1
);
16705 core2
= split_address_to_core_and_offset (e2
, &bitpos2
, &toffset2
);
16707 if (bitpos1
% BITS_PER_UNIT
!= 0
16708 || bitpos2
% BITS_PER_UNIT
!= 0
16709 || !operand_equal_p (core1
, core2
, 0))
16712 if (toffset1
&& toffset2
)
16714 type
= TREE_TYPE (toffset1
);
16715 if (type
!= TREE_TYPE (toffset2
))
16716 toffset2
= fold_convert (type
, toffset2
);
16718 tdiff
= fold_build2 (MINUS_EXPR
, type
, toffset1
, toffset2
);
16719 if (!cst_and_fits_in_hwi (tdiff
))
16722 *diff
= int_cst_value (tdiff
);
16724 else if (toffset1
|| toffset2
)
16726 /* If only one of the offsets is non-constant, the difference cannot
16733 *diff
+= (bitpos1
- bitpos2
) / BITS_PER_UNIT
;
16737 /* Simplify the floating point expression EXP when the sign of the
16738 result is not significant. Return NULL_TREE if no simplification
16742 fold_strip_sign_ops (tree exp
)
16745 location_t loc
= EXPR_LOCATION (exp
);
16747 switch (TREE_CODE (exp
))
16751 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
16752 return arg0
? arg0
: TREE_OPERAND (exp
, 0);
16756 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp
))))
16758 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
16759 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
16760 if (arg0
!= NULL_TREE
|| arg1
!= NULL_TREE
)
16761 return fold_build2_loc (loc
, TREE_CODE (exp
), TREE_TYPE (exp
),
16762 arg0
? arg0
: TREE_OPERAND (exp
, 0),
16763 arg1
? arg1
: TREE_OPERAND (exp
, 1));
16766 case COMPOUND_EXPR
:
16767 arg0
= TREE_OPERAND (exp
, 0);
16768 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
16770 return fold_build2_loc (loc
, COMPOUND_EXPR
, TREE_TYPE (exp
), arg0
, arg1
);
16774 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
16775 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 2));
16777 return fold_build3_loc (loc
,
16778 COND_EXPR
, TREE_TYPE (exp
), TREE_OPERAND (exp
, 0),
16779 arg0
? arg0
: TREE_OPERAND (exp
, 1),
16780 arg1
? arg1
: TREE_OPERAND (exp
, 2));
16785 const enum built_in_function fcode
= builtin_mathfn_code (exp
);
16788 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
16789 /* Strip copysign function call, return the 1st argument. */
16790 arg0
= CALL_EXPR_ARG (exp
, 0);
16791 arg1
= CALL_EXPR_ARG (exp
, 1);
16792 return omit_one_operand_loc (loc
, TREE_TYPE (exp
), arg0
, arg1
);
16795 /* Strip sign ops from the argument of "odd" math functions. */
16796 if (negate_mathfn_p (fcode
))
16798 arg0
= fold_strip_sign_ops (CALL_EXPR_ARG (exp
, 0));
16800 return build_call_expr_loc (loc
, get_callee_fndecl (exp
), 1, arg0
);