1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
45 #include "coretypes.h"
49 #include "stor-layout.h"
51 #include "tree-iterator.h"
57 #include "diagnostic-core.h"
59 #include "langhooks.h"
66 #include "hard-reg-set.h"
69 #include "basic-block.h"
70 #include "tree-ssa-alias.h"
71 #include "internal-fn.h"
73 #include "gimple-expr.h"
78 #include "hash-table.h" /* Required for ENABLE_FOLD_CHECKING. */
81 #include "plugin-api.h"
84 #include "generic-match.h"
87 /* Nonzero if we are folding constants inside an initializer; zero
89 int folding_initializer
= 0;
91 /* The following constants represent a bit based encoding of GCC's
92 comparison operators. This encoding simplifies transformations
93 on relational comparison operators, such as AND and OR. */
94 enum comparison_code
{
113 static bool negate_mathfn_p (enum built_in_function
);
114 static bool negate_expr_p (tree
);
115 static tree
negate_expr (tree
);
116 static tree
split_tree (tree
, enum tree_code
, tree
*, tree
*, tree
*, int);
117 static tree
associate_trees (location_t
, tree
, tree
, enum tree_code
, tree
);
118 static enum comparison_code
comparison_to_compcode (enum tree_code
);
119 static enum tree_code
compcode_to_comparison (enum comparison_code
);
120 static int operand_equal_for_comparison_p (tree
, tree
, tree
);
121 static int twoval_comparison_p (tree
, tree
*, tree
*, int *);
122 static tree
eval_subst (location_t
, tree
, tree
, tree
, tree
, tree
);
123 static tree
distribute_bit_expr (location_t
, enum tree_code
, tree
, tree
, tree
);
124 static tree
make_bit_field_ref (location_t
, tree
, tree
,
125 HOST_WIDE_INT
, HOST_WIDE_INT
, int);
126 static tree
optimize_bit_field_compare (location_t
, enum tree_code
,
128 static tree
decode_field_reference (location_t
, tree
, HOST_WIDE_INT
*,
130 machine_mode
*, int *, int *,
132 static int simple_operand_p (const_tree
);
133 static bool simple_operand_p_2 (tree
);
134 static tree
range_binop (enum tree_code
, tree
, tree
, int, tree
, int);
135 static tree
range_predecessor (tree
);
136 static tree
range_successor (tree
);
137 static tree
fold_range_test (location_t
, enum tree_code
, tree
, tree
, tree
);
138 static tree
fold_cond_expr_with_comparison (location_t
, tree
, tree
, tree
, tree
);
139 static tree
unextend (tree
, int, int, tree
);
140 static tree
optimize_minmax_comparison (location_t
, enum tree_code
,
142 static tree
extract_muldiv (tree
, tree
, enum tree_code
, tree
, bool *);
143 static tree
extract_muldiv_1 (tree
, tree
, enum tree_code
, tree
, bool *);
144 static tree
fold_binary_op_with_conditional_arg (location_t
,
145 enum tree_code
, tree
,
148 static tree
fold_mathfn_compare (location_t
,
149 enum built_in_function
, enum tree_code
,
151 static tree
fold_inf_compare (location_t
, enum tree_code
, tree
, tree
, tree
);
152 static tree
fold_div_compare (location_t
, enum tree_code
, tree
, tree
, tree
);
153 static bool reorder_operands_p (const_tree
, const_tree
);
154 static tree
fold_negate_const (tree
, tree
);
155 static tree
fold_not_const (const_tree
, tree
);
156 static tree
fold_relational_const (enum tree_code
, tree
, tree
, tree
);
157 static tree
fold_convert_const (enum tree_code
, tree
, tree
);
158 static tree
fold_view_convert_expr (tree
, tree
);
159 static bool vec_cst_ctor_to_array (tree
, tree
*);
162 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
163 Otherwise, return LOC. */
166 expr_location_or (tree t
, location_t loc
)
168 location_t tloc
= EXPR_LOCATION (t
);
169 return tloc
== UNKNOWN_LOCATION
? loc
: tloc
;
172 /* Similar to protected_set_expr_location, but never modify x in place,
173 if location can and needs to be set, unshare it. */
176 protected_set_expr_location_unshare (tree x
, location_t loc
)
178 if (CAN_HAVE_LOCATION_P (x
)
179 && EXPR_LOCATION (x
) != loc
180 && !(TREE_CODE (x
) == SAVE_EXPR
181 || TREE_CODE (x
) == TARGET_EXPR
182 || TREE_CODE (x
) == BIND_EXPR
))
185 SET_EXPR_LOCATION (x
, loc
);
190 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
191 division and returns the quotient. Otherwise returns
195 div_if_zero_remainder (const_tree arg1
, const_tree arg2
)
199 if (wi::multiple_of_p (wi::to_widest (arg1
), wi::to_widest (arg2
),
201 return wide_int_to_tree (TREE_TYPE (arg1
), quo
);
206 /* This is nonzero if we should defer warnings about undefined
207 overflow. This facility exists because these warnings are a
208 special case. The code to estimate loop iterations does not want
209 to issue any warnings, since it works with expressions which do not
210 occur in user code. Various bits of cleanup code call fold(), but
211 only use the result if it has certain characteristics (e.g., is a
212 constant); that code only wants to issue a warning if the result is
215 static int fold_deferring_overflow_warnings
;
217 /* If a warning about undefined overflow is deferred, this is the
218 warning. Note that this may cause us to turn two warnings into
219 one, but that is fine since it is sufficient to only give one
220 warning per expression. */
222 static const char* fold_deferred_overflow_warning
;
224 /* If a warning about undefined overflow is deferred, this is the
225 level at which the warning should be emitted. */
227 static enum warn_strict_overflow_code fold_deferred_overflow_code
;
229 /* Start deferring overflow warnings. We could use a stack here to
230 permit nested calls, but at present it is not necessary. */
233 fold_defer_overflow_warnings (void)
235 ++fold_deferring_overflow_warnings
;
238 /* Stop deferring overflow warnings. If there is a pending warning,
239 and ISSUE is true, then issue the warning if appropriate. STMT is
240 the statement with which the warning should be associated (used for
241 location information); STMT may be NULL. CODE is the level of the
242 warning--a warn_strict_overflow_code value. This function will use
243 the smaller of CODE and the deferred code when deciding whether to
244 issue the warning. CODE may be zero to mean to always use the
248 fold_undefer_overflow_warnings (bool issue
, const_gimple stmt
, int code
)
253 gcc_assert (fold_deferring_overflow_warnings
> 0);
254 --fold_deferring_overflow_warnings
;
255 if (fold_deferring_overflow_warnings
> 0)
257 if (fold_deferred_overflow_warning
!= NULL
259 && code
< (int) fold_deferred_overflow_code
)
260 fold_deferred_overflow_code
= (enum warn_strict_overflow_code
) code
;
264 warnmsg
= fold_deferred_overflow_warning
;
265 fold_deferred_overflow_warning
= NULL
;
267 if (!issue
|| warnmsg
== NULL
)
270 if (gimple_no_warning_p (stmt
))
273 /* Use the smallest code level when deciding to issue the
275 if (code
== 0 || code
> (int) fold_deferred_overflow_code
)
276 code
= fold_deferred_overflow_code
;
278 if (!issue_strict_overflow_warning (code
))
282 locus
= input_location
;
284 locus
= gimple_location (stmt
);
285 warning_at (locus
, OPT_Wstrict_overflow
, "%s", warnmsg
);
288 /* Stop deferring overflow warnings, ignoring any deferred
292 fold_undefer_and_ignore_overflow_warnings (void)
294 fold_undefer_overflow_warnings (false, NULL
, 0);
297 /* Whether we are deferring overflow warnings. */
300 fold_deferring_overflow_warnings_p (void)
302 return fold_deferring_overflow_warnings
> 0;
305 /* This is called when we fold something based on the fact that signed
306 overflow is undefined. */
309 fold_overflow_warning (const char* gmsgid
, enum warn_strict_overflow_code wc
)
311 if (fold_deferring_overflow_warnings
> 0)
313 if (fold_deferred_overflow_warning
== NULL
314 || wc
< fold_deferred_overflow_code
)
316 fold_deferred_overflow_warning
= gmsgid
;
317 fold_deferred_overflow_code
= wc
;
320 else if (issue_strict_overflow_warning (wc
))
321 warning (OPT_Wstrict_overflow
, gmsgid
);
324 /* Return true if the built-in mathematical function specified by CODE
325 is odd, i.e. -f(x) == f(-x). */
328 negate_mathfn_p (enum built_in_function code
)
332 CASE_FLT_FN (BUILT_IN_ASIN
):
333 CASE_FLT_FN (BUILT_IN_ASINH
):
334 CASE_FLT_FN (BUILT_IN_ATAN
):
335 CASE_FLT_FN (BUILT_IN_ATANH
):
336 CASE_FLT_FN (BUILT_IN_CASIN
):
337 CASE_FLT_FN (BUILT_IN_CASINH
):
338 CASE_FLT_FN (BUILT_IN_CATAN
):
339 CASE_FLT_FN (BUILT_IN_CATANH
):
340 CASE_FLT_FN (BUILT_IN_CBRT
):
341 CASE_FLT_FN (BUILT_IN_CPROJ
):
342 CASE_FLT_FN (BUILT_IN_CSIN
):
343 CASE_FLT_FN (BUILT_IN_CSINH
):
344 CASE_FLT_FN (BUILT_IN_CTAN
):
345 CASE_FLT_FN (BUILT_IN_CTANH
):
346 CASE_FLT_FN (BUILT_IN_ERF
):
347 CASE_FLT_FN (BUILT_IN_LLROUND
):
348 CASE_FLT_FN (BUILT_IN_LROUND
):
349 CASE_FLT_FN (BUILT_IN_ROUND
):
350 CASE_FLT_FN (BUILT_IN_SIN
):
351 CASE_FLT_FN (BUILT_IN_SINH
):
352 CASE_FLT_FN (BUILT_IN_TAN
):
353 CASE_FLT_FN (BUILT_IN_TANH
):
354 CASE_FLT_FN (BUILT_IN_TRUNC
):
357 CASE_FLT_FN (BUILT_IN_LLRINT
):
358 CASE_FLT_FN (BUILT_IN_LRINT
):
359 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
360 CASE_FLT_FN (BUILT_IN_RINT
):
361 return !flag_rounding_math
;
369 /* Check whether we may negate an integer constant T without causing
373 may_negate_without_overflow_p (const_tree t
)
377 gcc_assert (TREE_CODE (t
) == INTEGER_CST
);
379 type
= TREE_TYPE (t
);
380 if (TYPE_UNSIGNED (type
))
383 return !wi::only_sign_bit_p (t
);
386 /* Determine whether an expression T can be cheaply negated using
387 the function negate_expr without introducing undefined overflow. */
390 negate_expr_p (tree t
)
397 type
= TREE_TYPE (t
);
400 switch (TREE_CODE (t
))
403 if (TYPE_OVERFLOW_WRAPS (type
))
406 /* Check that -CST will not overflow type. */
407 return may_negate_without_overflow_p (t
);
409 return (INTEGRAL_TYPE_P (type
)
410 && TYPE_OVERFLOW_WRAPS (type
));
416 return !TYPE_OVERFLOW_SANITIZED (type
);
419 /* We want to canonicalize to positive real constants. Pretend
420 that only negative ones can be easily negated. */
421 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
424 return negate_expr_p (TREE_REALPART (t
))
425 && negate_expr_p (TREE_IMAGPART (t
));
429 if (FLOAT_TYPE_P (TREE_TYPE (type
)) || TYPE_OVERFLOW_WRAPS (type
))
432 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
434 for (i
= 0; i
< count
; i
++)
435 if (!negate_expr_p (VECTOR_CST_ELT (t
, i
)))
442 return negate_expr_p (TREE_OPERAND (t
, 0))
443 && negate_expr_p (TREE_OPERAND (t
, 1));
446 return negate_expr_p (TREE_OPERAND (t
, 0));
449 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
))
450 || HONOR_SIGNED_ZEROS (element_mode (type
)))
452 /* -(A + B) -> (-B) - A. */
453 if (negate_expr_p (TREE_OPERAND (t
, 1))
454 && reorder_operands_p (TREE_OPERAND (t
, 0),
455 TREE_OPERAND (t
, 1)))
457 /* -(A + B) -> (-A) - B. */
458 return negate_expr_p (TREE_OPERAND (t
, 0));
461 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
462 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
))
463 && !HONOR_SIGNED_ZEROS (element_mode (type
))
464 && reorder_operands_p (TREE_OPERAND (t
, 0),
465 TREE_OPERAND (t
, 1));
468 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
474 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t
))))
475 return negate_expr_p (TREE_OPERAND (t
, 1))
476 || negate_expr_p (TREE_OPERAND (t
, 0));
482 /* In general we can't negate A / B, because if A is INT_MIN and
483 B is 1, we may turn this into INT_MIN / -1 which is undefined
484 and actually traps on some architectures. But if overflow is
485 undefined, we can negate, because - (INT_MIN / 1) is an
487 if (INTEGRAL_TYPE_P (TREE_TYPE (t
)))
489 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
)))
491 /* If overflow is undefined then we have to be careful because
492 we ask whether it's ok to associate the negate with the
493 division which is not ok for example for
494 -((a - b) / c) where (-(a - b)) / c may invoke undefined
495 overflow because of negating INT_MIN. So do not use
496 negate_expr_p here but open-code the two important cases. */
497 if (TREE_CODE (TREE_OPERAND (t
, 0)) == NEGATE_EXPR
498 || (TREE_CODE (TREE_OPERAND (t
, 0)) == INTEGER_CST
499 && may_negate_without_overflow_p (TREE_OPERAND (t
, 0))))
502 else if (negate_expr_p (TREE_OPERAND (t
, 0)))
504 return negate_expr_p (TREE_OPERAND (t
, 1));
507 /* Negate -((double)float) as (double)(-float). */
508 if (TREE_CODE (type
) == REAL_TYPE
)
510 tree tem
= strip_float_extensions (t
);
512 return negate_expr_p (tem
);
517 /* Negate -f(x) as f(-x). */
518 if (negate_mathfn_p (builtin_mathfn_code (t
)))
519 return negate_expr_p (CALL_EXPR_ARG (t
, 0));
523 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
524 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
526 tree op1
= TREE_OPERAND (t
, 1);
527 if (wi::eq_p (op1
, TYPE_PRECISION (type
) - 1))
538 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
539 simplification is possible.
540 If negate_expr_p would return true for T, NULL_TREE will never be
544 fold_negate_expr (location_t loc
, tree t
)
546 tree type
= TREE_TYPE (t
);
549 switch (TREE_CODE (t
))
551 /* Convert - (~A) to A + 1. */
553 if (INTEGRAL_TYPE_P (type
))
554 return fold_build2_loc (loc
, PLUS_EXPR
, type
, TREE_OPERAND (t
, 0),
555 build_one_cst (type
));
559 tem
= fold_negate_const (t
, type
);
560 if (TREE_OVERFLOW (tem
) == TREE_OVERFLOW (t
)
561 || (!TYPE_OVERFLOW_TRAPS (type
)
562 && TYPE_OVERFLOW_WRAPS (type
))
563 || (flag_sanitize
& SANITIZE_SI_OVERFLOW
) == 0)
568 tem
= fold_negate_const (t
, type
);
572 tem
= fold_negate_const (t
, type
);
577 tree rpart
= fold_negate_expr (loc
, TREE_REALPART (t
));
578 tree ipart
= fold_negate_expr (loc
, TREE_IMAGPART (t
));
580 return build_complex (type
, rpart
, ipart
);
586 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
587 tree
*elts
= XALLOCAVEC (tree
, count
);
589 for (i
= 0; i
< count
; i
++)
591 elts
[i
] = fold_negate_expr (loc
, VECTOR_CST_ELT (t
, i
));
592 if (elts
[i
] == NULL_TREE
)
596 return build_vector (type
, elts
);
600 if (negate_expr_p (t
))
601 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
602 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)),
603 fold_negate_expr (loc
, TREE_OPERAND (t
, 1)));
607 if (negate_expr_p (t
))
608 return fold_build1_loc (loc
, CONJ_EXPR
, type
,
609 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)));
613 if (!TYPE_OVERFLOW_SANITIZED (type
))
614 return TREE_OPERAND (t
, 0);
618 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
))
619 && !HONOR_SIGNED_ZEROS (element_mode (type
)))
621 /* -(A + B) -> (-B) - A. */
622 if (negate_expr_p (TREE_OPERAND (t
, 1))
623 && reorder_operands_p (TREE_OPERAND (t
, 0),
624 TREE_OPERAND (t
, 1)))
626 tem
= negate_expr (TREE_OPERAND (t
, 1));
627 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
628 tem
, TREE_OPERAND (t
, 0));
631 /* -(A + B) -> (-A) - B. */
632 if (negate_expr_p (TREE_OPERAND (t
, 0)))
634 tem
= negate_expr (TREE_OPERAND (t
, 0));
635 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
636 tem
, TREE_OPERAND (t
, 1));
642 /* - (A - B) -> B - A */
643 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
))
644 && !HONOR_SIGNED_ZEROS (element_mode (type
))
645 && reorder_operands_p (TREE_OPERAND (t
, 0), TREE_OPERAND (t
, 1)))
646 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
647 TREE_OPERAND (t
, 1), TREE_OPERAND (t
, 0));
651 if (TYPE_UNSIGNED (type
))
657 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
)))
659 tem
= TREE_OPERAND (t
, 1);
660 if (negate_expr_p (tem
))
661 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
662 TREE_OPERAND (t
, 0), negate_expr (tem
));
663 tem
= TREE_OPERAND (t
, 0);
664 if (negate_expr_p (tem
))
665 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
666 negate_expr (tem
), TREE_OPERAND (t
, 1));
673 /* In general we can't negate A / B, because if A is INT_MIN and
674 B is 1, we may turn this into INT_MIN / -1 which is undefined
675 and actually traps on some architectures. But if overflow is
676 undefined, we can negate, because - (INT_MIN / 1) is an
678 if (!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
680 const char * const warnmsg
= G_("assuming signed overflow does not "
681 "occur when negating a division");
682 tem
= TREE_OPERAND (t
, 1);
683 if (negate_expr_p (tem
))
685 if (INTEGRAL_TYPE_P (type
)
686 && (TREE_CODE (tem
) != INTEGER_CST
687 || integer_onep (tem
)))
688 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MISC
);
689 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
690 TREE_OPERAND (t
, 0), negate_expr (tem
));
692 /* If overflow is undefined then we have to be careful because
693 we ask whether it's ok to associate the negate with the
694 division which is not ok for example for
695 -((a - b) / c) where (-(a - b)) / c may invoke undefined
696 overflow because of negating INT_MIN. So do not use
697 negate_expr_p here but open-code the two important cases. */
698 tem
= TREE_OPERAND (t
, 0);
699 if ((INTEGRAL_TYPE_P (type
)
700 && (TREE_CODE (tem
) == NEGATE_EXPR
701 || (TREE_CODE (tem
) == INTEGER_CST
702 && may_negate_without_overflow_p (tem
))))
703 || !INTEGRAL_TYPE_P (type
))
704 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
705 negate_expr (tem
), TREE_OPERAND (t
, 1));
710 /* Convert -((double)float) into (double)(-float). */
711 if (TREE_CODE (type
) == REAL_TYPE
)
713 tem
= strip_float_extensions (t
);
714 if (tem
!= t
&& negate_expr_p (tem
))
715 return fold_convert_loc (loc
, type
, negate_expr (tem
));
720 /* Negate -f(x) as f(-x). */
721 if (negate_mathfn_p (builtin_mathfn_code (t
))
722 && negate_expr_p (CALL_EXPR_ARG (t
, 0)))
726 fndecl
= get_callee_fndecl (t
);
727 arg
= negate_expr (CALL_EXPR_ARG (t
, 0));
728 return build_call_expr_loc (loc
, fndecl
, 1, arg
);
733 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
734 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
736 tree op1
= TREE_OPERAND (t
, 1);
737 if (wi::eq_p (op1
, TYPE_PRECISION (type
) - 1))
739 tree ntype
= TYPE_UNSIGNED (type
)
740 ? signed_type_for (type
)
741 : unsigned_type_for (type
);
742 tree temp
= fold_convert_loc (loc
, ntype
, TREE_OPERAND (t
, 0));
743 temp
= fold_build2_loc (loc
, RSHIFT_EXPR
, ntype
, temp
, op1
);
744 return fold_convert_loc (loc
, type
, temp
);
756 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
757 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
769 loc
= EXPR_LOCATION (t
);
770 type
= TREE_TYPE (t
);
773 tem
= fold_negate_expr (loc
, t
);
775 tem
= build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (t
), t
);
776 return fold_convert_loc (loc
, type
, tem
);
779 /* Split a tree IN into a constant, literal and variable parts that could be
780 combined with CODE to make IN. "constant" means an expression with
781 TREE_CONSTANT but that isn't an actual constant. CODE must be a
782 commutative arithmetic operation. Store the constant part into *CONP,
783 the literal in *LITP and return the variable part. If a part isn't
784 present, set it to null. If the tree does not decompose in this way,
785 return the entire tree as the variable part and the other parts as null.
787 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
788 case, we negate an operand that was subtracted. Except if it is a
789 literal for which we use *MINUS_LITP instead.
791 If NEGATE_P is true, we are negating all of IN, again except a literal
792 for which we use *MINUS_LITP instead.
794 If IN is itself a literal or constant, return it as appropriate.
796 Note that we do not guarantee that any of the three values will be the
797 same type as IN, but they will have the same signedness and mode. */
800 split_tree (tree in
, enum tree_code code
, tree
*conp
, tree
*litp
,
801 tree
*minus_litp
, int negate_p
)
809 /* Strip any conversions that don't change the machine mode or signedness. */
810 STRIP_SIGN_NOPS (in
);
812 if (TREE_CODE (in
) == INTEGER_CST
|| TREE_CODE (in
) == REAL_CST
813 || TREE_CODE (in
) == FIXED_CST
)
815 else if (TREE_CODE (in
) == code
816 || ((! FLOAT_TYPE_P (TREE_TYPE (in
)) || flag_associative_math
)
817 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in
))
818 /* We can associate addition and subtraction together (even
819 though the C standard doesn't say so) for integers because
820 the value is not affected. For reals, the value might be
821 affected, so we can't. */
822 && ((code
== PLUS_EXPR
&& TREE_CODE (in
) == MINUS_EXPR
)
823 || (code
== MINUS_EXPR
&& TREE_CODE (in
) == PLUS_EXPR
))))
825 tree op0
= TREE_OPERAND (in
, 0);
826 tree op1
= TREE_OPERAND (in
, 1);
827 int neg1_p
= TREE_CODE (in
) == MINUS_EXPR
;
828 int neg_litp_p
= 0, neg_conp_p
= 0, neg_var_p
= 0;
830 /* First see if either of the operands is a literal, then a constant. */
831 if (TREE_CODE (op0
) == INTEGER_CST
|| TREE_CODE (op0
) == REAL_CST
832 || TREE_CODE (op0
) == FIXED_CST
)
833 *litp
= op0
, op0
= 0;
834 else if (TREE_CODE (op1
) == INTEGER_CST
|| TREE_CODE (op1
) == REAL_CST
835 || TREE_CODE (op1
) == FIXED_CST
)
836 *litp
= op1
, neg_litp_p
= neg1_p
, op1
= 0;
838 if (op0
!= 0 && TREE_CONSTANT (op0
))
839 *conp
= op0
, op0
= 0;
840 else if (op1
!= 0 && TREE_CONSTANT (op1
))
841 *conp
= op1
, neg_conp_p
= neg1_p
, op1
= 0;
843 /* If we haven't dealt with either operand, this is not a case we can
844 decompose. Otherwise, VAR is either of the ones remaining, if any. */
845 if (op0
!= 0 && op1
!= 0)
850 var
= op1
, neg_var_p
= neg1_p
;
852 /* Now do any needed negations. */
854 *minus_litp
= *litp
, *litp
= 0;
856 *conp
= negate_expr (*conp
);
858 var
= negate_expr (var
);
860 else if (TREE_CODE (in
) == BIT_NOT_EXPR
861 && code
== PLUS_EXPR
)
863 /* -X - 1 is folded to ~X, undo that here. */
864 *minus_litp
= build_one_cst (TREE_TYPE (in
));
865 var
= negate_expr (TREE_OPERAND (in
, 0));
867 else if (TREE_CONSTANT (in
))
875 *minus_litp
= *litp
, *litp
= 0;
876 else if (*minus_litp
)
877 *litp
= *minus_litp
, *minus_litp
= 0;
878 *conp
= negate_expr (*conp
);
879 var
= negate_expr (var
);
885 /* Re-associate trees split by the above function. T1 and T2 are
886 either expressions to associate or null. Return the new
887 expression, if any. LOC is the location of the new expression. If
888 we build an operation, do it in TYPE and with CODE. */
891 associate_trees (location_t loc
, tree t1
, tree t2
, enum tree_code code
, tree type
)
898 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
899 try to fold this since we will have infinite recursion. But do
900 deal with any NEGATE_EXPRs. */
901 if (TREE_CODE (t1
) == code
|| TREE_CODE (t2
) == code
902 || TREE_CODE (t1
) == MINUS_EXPR
|| TREE_CODE (t2
) == MINUS_EXPR
)
904 if (code
== PLUS_EXPR
)
906 if (TREE_CODE (t1
) == NEGATE_EXPR
)
907 return build2_loc (loc
, MINUS_EXPR
, type
,
908 fold_convert_loc (loc
, type
, t2
),
909 fold_convert_loc (loc
, type
,
910 TREE_OPERAND (t1
, 0)));
911 else if (TREE_CODE (t2
) == NEGATE_EXPR
)
912 return build2_loc (loc
, MINUS_EXPR
, type
,
913 fold_convert_loc (loc
, type
, t1
),
914 fold_convert_loc (loc
, type
,
915 TREE_OPERAND (t2
, 0)));
916 else if (integer_zerop (t2
))
917 return fold_convert_loc (loc
, type
, t1
);
919 else if (code
== MINUS_EXPR
)
921 if (integer_zerop (t2
))
922 return fold_convert_loc (loc
, type
, t1
);
925 return build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, t1
),
926 fold_convert_loc (loc
, type
, t2
));
929 return fold_build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, t1
),
930 fold_convert_loc (loc
, type
, t2
));
933 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
934 for use in int_const_binop, size_binop and size_diffop. */
937 int_binop_types_match_p (enum tree_code code
, const_tree type1
, const_tree type2
)
939 if (!INTEGRAL_TYPE_P (type1
) && !POINTER_TYPE_P (type1
))
941 if (!INTEGRAL_TYPE_P (type2
) && !POINTER_TYPE_P (type2
))
956 return TYPE_UNSIGNED (type1
) == TYPE_UNSIGNED (type2
)
957 && TYPE_PRECISION (type1
) == TYPE_PRECISION (type2
)
958 && TYPE_MODE (type1
) == TYPE_MODE (type2
);
962 /* Combine two integer constants ARG1 and ARG2 under operation CODE
963 to produce a new constant. Return NULL_TREE if we don't know how
964 to evaluate CODE at compile-time. */
967 int_const_binop_1 (enum tree_code code
, const_tree arg1
, const_tree parg2
,
972 tree type
= TREE_TYPE (arg1
);
973 signop sign
= TYPE_SIGN (type
);
974 bool overflow
= false;
976 wide_int arg2
= wide_int::from (parg2
, TYPE_PRECISION (type
),
977 TYPE_SIGN (TREE_TYPE (parg2
)));
982 res
= wi::bit_or (arg1
, arg2
);
986 res
= wi::bit_xor (arg1
, arg2
);
990 res
= wi::bit_and (arg1
, arg2
);
995 if (wi::neg_p (arg2
))
998 if (code
== RSHIFT_EXPR
)
1004 if (code
== RSHIFT_EXPR
)
1005 /* It's unclear from the C standard whether shifts can overflow.
1006 The following code ignores overflow; perhaps a C standard
1007 interpretation ruling is needed. */
1008 res
= wi::rshift (arg1
, arg2
, sign
);
1010 res
= wi::lshift (arg1
, arg2
);
1015 if (wi::neg_p (arg2
))
1018 if (code
== RROTATE_EXPR
)
1019 code
= LROTATE_EXPR
;
1021 code
= RROTATE_EXPR
;
1024 if (code
== RROTATE_EXPR
)
1025 res
= wi::rrotate (arg1
, arg2
);
1027 res
= wi::lrotate (arg1
, arg2
);
1031 res
= wi::add (arg1
, arg2
, sign
, &overflow
);
1035 res
= wi::sub (arg1
, arg2
, sign
, &overflow
);
1039 res
= wi::mul (arg1
, arg2
, sign
, &overflow
);
1042 case MULT_HIGHPART_EXPR
:
1043 res
= wi::mul_high (arg1
, arg2
, sign
);
1046 case TRUNC_DIV_EXPR
:
1047 case EXACT_DIV_EXPR
:
1050 res
= wi::div_trunc (arg1
, arg2
, sign
, &overflow
);
1053 case FLOOR_DIV_EXPR
:
1056 res
= wi::div_floor (arg1
, arg2
, sign
, &overflow
);
1062 res
= wi::div_ceil (arg1
, arg2
, sign
, &overflow
);
1065 case ROUND_DIV_EXPR
:
1068 res
= wi::div_round (arg1
, arg2
, sign
, &overflow
);
1071 case TRUNC_MOD_EXPR
:
1074 res
= wi::mod_trunc (arg1
, arg2
, sign
, &overflow
);
1077 case FLOOR_MOD_EXPR
:
1080 res
= wi::mod_floor (arg1
, arg2
, sign
, &overflow
);
1086 res
= wi::mod_ceil (arg1
, arg2
, sign
, &overflow
);
1089 case ROUND_MOD_EXPR
:
1092 res
= wi::mod_round (arg1
, arg2
, sign
, &overflow
);
1096 res
= wi::min (arg1
, arg2
, sign
);
1100 res
= wi::max (arg1
, arg2
, sign
);
1107 t
= force_fit_type (type
, res
, overflowable
,
1108 (((sign
== SIGNED
|| overflowable
== -1)
1110 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (parg2
)));
1116 int_const_binop (enum tree_code code
, const_tree arg1
, const_tree arg2
)
1118 return int_const_binop_1 (code
, arg1
, arg2
, 1);
1121 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1122 constant. We assume ARG1 and ARG2 have the same data type, or at least
1123 are the same kind of constant and the same machine mode. Return zero if
1124 combining the constants is not allowed in the current operating mode. */
1127 const_binop (enum tree_code code
, tree arg1
, tree arg2
)
1129 /* Sanity check for the recursive cases. */
1136 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg2
) == INTEGER_CST
)
1137 return int_const_binop (code
, arg1
, arg2
);
1139 if (TREE_CODE (arg1
) == REAL_CST
&& TREE_CODE (arg2
) == REAL_CST
)
1144 REAL_VALUE_TYPE value
;
1145 REAL_VALUE_TYPE result
;
1149 /* The following codes are handled by real_arithmetic. */
1164 d1
= TREE_REAL_CST (arg1
);
1165 d2
= TREE_REAL_CST (arg2
);
1167 type
= TREE_TYPE (arg1
);
1168 mode
= TYPE_MODE (type
);
1170 /* Don't perform operation if we honor signaling NaNs and
1171 either operand is a NaN. */
1172 if (HONOR_SNANS (mode
)
1173 && (REAL_VALUE_ISNAN (d1
) || REAL_VALUE_ISNAN (d2
)))
1176 /* Don't perform operation if it would raise a division
1177 by zero exception. */
1178 if (code
== RDIV_EXPR
1179 && REAL_VALUES_EQUAL (d2
, dconst0
)
1180 && (flag_trapping_math
|| ! MODE_HAS_INFINITIES (mode
)))
1183 /* If either operand is a NaN, just return it. Otherwise, set up
1184 for floating-point trap; we return an overflow. */
1185 if (REAL_VALUE_ISNAN (d1
))
1187 else if (REAL_VALUE_ISNAN (d2
))
1190 inexact
= real_arithmetic (&value
, code
, &d1
, &d2
);
1191 real_convert (&result
, mode
, &value
);
1193 /* Don't constant fold this floating point operation if
1194 the result has overflowed and flag_trapping_math. */
1195 if (flag_trapping_math
1196 && MODE_HAS_INFINITIES (mode
)
1197 && REAL_VALUE_ISINF (result
)
1198 && !REAL_VALUE_ISINF (d1
)
1199 && !REAL_VALUE_ISINF (d2
))
1202 /* Don't constant fold this floating point operation if the
1203 result may dependent upon the run-time rounding mode and
1204 flag_rounding_math is set, or if GCC's software emulation
1205 is unable to accurately represent the result. */
1206 if ((flag_rounding_math
1207 || (MODE_COMPOSITE_P (mode
) && !flag_unsafe_math_optimizations
))
1208 && (inexact
|| !real_identical (&result
, &value
)))
1211 t
= build_real (type
, result
);
1213 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
);
1217 if (TREE_CODE (arg1
) == FIXED_CST
&& TREE_CODE (arg2
) == FIXED_CST
)
1219 FIXED_VALUE_TYPE f1
;
1220 FIXED_VALUE_TYPE f2
;
1221 FIXED_VALUE_TYPE result
;
1226 /* The following codes are handled by fixed_arithmetic. */
1232 case TRUNC_DIV_EXPR
:
1233 f2
= TREE_FIXED_CST (arg2
);
1240 f2
.data
.high
= w2
.elt (1);
1241 f2
.data
.low
= w2
.elt (0);
1250 f1
= TREE_FIXED_CST (arg1
);
1251 type
= TREE_TYPE (arg1
);
1252 sat_p
= TYPE_SATURATING (type
);
1253 overflow_p
= fixed_arithmetic (&result
, code
, &f1
, &f2
, sat_p
);
1254 t
= build_fixed (type
, result
);
1255 /* Propagate overflow flags. */
1256 if (overflow_p
| TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
))
1257 TREE_OVERFLOW (t
) = 1;
1261 if (TREE_CODE (arg1
) == COMPLEX_CST
&& TREE_CODE (arg2
) == COMPLEX_CST
)
1263 tree type
= TREE_TYPE (arg1
);
1264 tree r1
= TREE_REALPART (arg1
);
1265 tree i1
= TREE_IMAGPART (arg1
);
1266 tree r2
= TREE_REALPART (arg2
);
1267 tree i2
= TREE_IMAGPART (arg2
);
1274 real
= const_binop (code
, r1
, r2
);
1275 imag
= const_binop (code
, i1
, i2
);
1279 if (COMPLEX_FLOAT_TYPE_P (type
))
1280 return do_mpc_arg2 (arg1
, arg2
, type
,
1281 /* do_nonfinite= */ folding_initializer
,
1284 real
= const_binop (MINUS_EXPR
,
1285 const_binop (MULT_EXPR
, r1
, r2
),
1286 const_binop (MULT_EXPR
, i1
, i2
));
1287 imag
= const_binop (PLUS_EXPR
,
1288 const_binop (MULT_EXPR
, r1
, i2
),
1289 const_binop (MULT_EXPR
, i1
, r2
));
1293 if (COMPLEX_FLOAT_TYPE_P (type
))
1294 return do_mpc_arg2 (arg1
, arg2
, type
,
1295 /* do_nonfinite= */ folding_initializer
,
1298 case TRUNC_DIV_EXPR
:
1300 case FLOOR_DIV_EXPR
:
1301 case ROUND_DIV_EXPR
:
1302 if (flag_complex_method
== 0)
1304 /* Keep this algorithm in sync with
1305 tree-complex.c:expand_complex_div_straight().
1307 Expand complex division to scalars, straightforward algorithm.
1308 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1312 = const_binop (PLUS_EXPR
,
1313 const_binop (MULT_EXPR
, r2
, r2
),
1314 const_binop (MULT_EXPR
, i2
, i2
));
1316 = const_binop (PLUS_EXPR
,
1317 const_binop (MULT_EXPR
, r1
, r2
),
1318 const_binop (MULT_EXPR
, i1
, i2
));
1320 = const_binop (MINUS_EXPR
,
1321 const_binop (MULT_EXPR
, i1
, r2
),
1322 const_binop (MULT_EXPR
, r1
, i2
));
1324 real
= const_binop (code
, t1
, magsquared
);
1325 imag
= const_binop (code
, t2
, magsquared
);
1329 /* Keep this algorithm in sync with
1330 tree-complex.c:expand_complex_div_wide().
1332 Expand complex division to scalars, modified algorithm to minimize
1333 overflow with wide input ranges. */
1334 tree compare
= fold_build2 (LT_EXPR
, boolean_type_node
,
1335 fold_abs_const (r2
, TREE_TYPE (type
)),
1336 fold_abs_const (i2
, TREE_TYPE (type
)));
1338 if (integer_nonzerop (compare
))
1340 /* In the TRUE branch, we compute
1342 div = (br * ratio) + bi;
1343 tr = (ar * ratio) + ai;
1344 ti = (ai * ratio) - ar;
1347 tree ratio
= const_binop (code
, r2
, i2
);
1348 tree div
= const_binop (PLUS_EXPR
, i2
,
1349 const_binop (MULT_EXPR
, r2
, ratio
));
1350 real
= const_binop (MULT_EXPR
, r1
, ratio
);
1351 real
= const_binop (PLUS_EXPR
, real
, i1
);
1352 real
= const_binop (code
, real
, div
);
1354 imag
= const_binop (MULT_EXPR
, i1
, ratio
);
1355 imag
= const_binop (MINUS_EXPR
, imag
, r1
);
1356 imag
= const_binop (code
, imag
, div
);
1360 /* In the FALSE branch, we compute
1362 divisor = (d * ratio) + c;
1363 tr = (b * ratio) + a;
1364 ti = b - (a * ratio);
1367 tree ratio
= const_binop (code
, i2
, r2
);
1368 tree div
= const_binop (PLUS_EXPR
, r2
,
1369 const_binop (MULT_EXPR
, i2
, ratio
));
1371 real
= const_binop (MULT_EXPR
, i1
, ratio
);
1372 real
= const_binop (PLUS_EXPR
, real
, r1
);
1373 real
= const_binop (code
, real
, div
);
1375 imag
= const_binop (MULT_EXPR
, r1
, ratio
);
1376 imag
= const_binop (MINUS_EXPR
, i1
, imag
);
1377 imag
= const_binop (code
, imag
, div
);
1387 return build_complex (type
, real
, imag
);
1390 if (TREE_CODE (arg1
) == VECTOR_CST
1391 && TREE_CODE (arg2
) == VECTOR_CST
)
1393 tree type
= TREE_TYPE (arg1
);
1394 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
1395 tree
*elts
= XALLOCAVEC (tree
, count
);
1397 for (i
= 0; i
< count
; i
++)
1399 tree elem1
= VECTOR_CST_ELT (arg1
, i
);
1400 tree elem2
= VECTOR_CST_ELT (arg2
, i
);
1402 elts
[i
] = const_binop (code
, elem1
, elem2
);
1404 /* It is possible that const_binop cannot handle the given
1405 code and return NULL_TREE */
1406 if (elts
[i
] == NULL_TREE
)
1410 return build_vector (type
, elts
);
1413 /* Shifts allow a scalar offset for a vector. */
1414 if (TREE_CODE (arg1
) == VECTOR_CST
1415 && TREE_CODE (arg2
) == INTEGER_CST
)
1417 tree type
= TREE_TYPE (arg1
);
1418 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
1419 tree
*elts
= XALLOCAVEC (tree
, count
);
1421 for (i
= 0; i
< count
; i
++)
1423 tree elem1
= VECTOR_CST_ELT (arg1
, i
);
1425 elts
[i
] = const_binop (code
, elem1
, arg2
);
1427 /* It is possible that const_binop cannot handle the given
1428 code and return NULL_TREE. */
1429 if (elts
[i
] == NULL_TREE
)
1433 return build_vector (type
, elts
);
1438 /* Overload that adds a TYPE parameter to be able to dispatch
1439 to fold_relational_const. */
1442 const_binop (enum tree_code code
, tree type
, tree arg1
, tree arg2
)
1444 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
1445 return fold_relational_const (code
, type
, arg1
, arg2
);
1447 return const_binop (code
, arg1
, arg2
);
1450 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1451 Return zero if computing the constants is not possible. */
1454 const_unop (enum tree_code code
, tree type
, tree arg0
)
1460 case FIX_TRUNC_EXPR
:
1461 case FIXED_CONVERT_EXPR
:
1462 return fold_convert_const (code
, type
, arg0
);
1464 case ADDR_SPACE_CONVERT_EXPR
:
1465 if (integer_zerop (arg0
))
1466 return fold_convert_const (code
, type
, arg0
);
1469 case VIEW_CONVERT_EXPR
:
1470 return fold_view_convert_expr (type
, arg0
);
1474 /* Can't call fold_negate_const directly here as that doesn't
1475 handle all cases and we might not be able to negate some
1477 tree tem
= fold_negate_expr (UNKNOWN_LOCATION
, arg0
);
1478 if (tem
&& CONSTANT_CLASS_P (tem
))
1484 if (TREE_CODE (arg0
) == INTEGER_CST
|| TREE_CODE (arg0
) == REAL_CST
)
1485 return fold_abs_const (arg0
, type
);
1489 if (TREE_CODE (arg0
) == COMPLEX_CST
)
1491 tree ipart
= fold_negate_const (TREE_IMAGPART (arg0
),
1493 return build_complex (type
, TREE_REALPART (arg0
), ipart
);
1498 if (TREE_CODE (arg0
) == INTEGER_CST
)
1499 return fold_not_const (arg0
, type
);
1500 /* Perform BIT_NOT_EXPR on each element individually. */
1501 else if (TREE_CODE (arg0
) == VECTOR_CST
)
1505 unsigned count
= VECTOR_CST_NELTS (arg0
), i
;
1507 elements
= XALLOCAVEC (tree
, count
);
1508 for (i
= 0; i
< count
; i
++)
1510 elem
= VECTOR_CST_ELT (arg0
, i
);
1511 elem
= const_unop (BIT_NOT_EXPR
, TREE_TYPE (type
), elem
);
1512 if (elem
== NULL_TREE
)
1517 return build_vector (type
, elements
);
1521 case TRUTH_NOT_EXPR
:
1522 if (TREE_CODE (arg0
) == INTEGER_CST
)
1523 return constant_boolean_node (integer_zerop (arg0
), type
);
1527 if (TREE_CODE (arg0
) == COMPLEX_CST
)
1528 return fold_convert (type
, TREE_REALPART (arg0
));
1532 if (TREE_CODE (arg0
) == COMPLEX_CST
)
1533 return fold_convert (type
, TREE_IMAGPART (arg0
));
1536 case VEC_UNPACK_LO_EXPR
:
1537 case VEC_UNPACK_HI_EXPR
:
1538 case VEC_UNPACK_FLOAT_LO_EXPR
:
1539 case VEC_UNPACK_FLOAT_HI_EXPR
:
1541 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
1543 enum tree_code subcode
;
1545 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
* 2);
1546 if (TREE_CODE (arg0
) != VECTOR_CST
)
1549 elts
= XALLOCAVEC (tree
, nelts
* 2);
1550 if (!vec_cst_ctor_to_array (arg0
, elts
))
1553 if ((!BYTES_BIG_ENDIAN
) ^ (code
== VEC_UNPACK_LO_EXPR
1554 || code
== VEC_UNPACK_FLOAT_LO_EXPR
))
1557 if (code
== VEC_UNPACK_LO_EXPR
|| code
== VEC_UNPACK_HI_EXPR
)
1560 subcode
= FLOAT_EXPR
;
1562 for (i
= 0; i
< nelts
; i
++)
1564 elts
[i
] = fold_convert_const (subcode
, TREE_TYPE (type
), elts
[i
]);
1565 if (elts
[i
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[i
]))
1569 return build_vector (type
, elts
);
1572 case REDUC_MIN_EXPR
:
1573 case REDUC_MAX_EXPR
:
1574 case REDUC_PLUS_EXPR
:
1576 unsigned int nelts
, i
;
1578 enum tree_code subcode
;
1580 if (TREE_CODE (arg0
) != VECTOR_CST
)
1582 nelts
= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
));
1584 elts
= XALLOCAVEC (tree
, nelts
);
1585 if (!vec_cst_ctor_to_array (arg0
, elts
))
1590 case REDUC_MIN_EXPR
: subcode
= MIN_EXPR
; break;
1591 case REDUC_MAX_EXPR
: subcode
= MAX_EXPR
; break;
1592 case REDUC_PLUS_EXPR
: subcode
= PLUS_EXPR
; break;
1593 default: gcc_unreachable ();
1596 for (i
= 1; i
< nelts
; i
++)
1598 elts
[0] = const_binop (subcode
, elts
[0], elts
[i
]);
1599 if (elts
[0] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[0]))
1613 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1614 indicates which particular sizetype to create. */
1617 size_int_kind (HOST_WIDE_INT number
, enum size_type_kind kind
)
1619 return build_int_cst (sizetype_tab
[(int) kind
], number
);
1622 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1623 is a tree code. The type of the result is taken from the operands.
1624 Both must be equivalent integer types, ala int_binop_types_match_p.
1625 If the operands are constant, so is the result. */
1628 size_binop_loc (location_t loc
, enum tree_code code
, tree arg0
, tree arg1
)
1630 tree type
= TREE_TYPE (arg0
);
1632 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
1633 return error_mark_node
;
1635 gcc_assert (int_binop_types_match_p (code
, TREE_TYPE (arg0
),
1638 /* Handle the special case of two integer constants faster. */
1639 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
1641 /* And some specific cases even faster than that. */
1642 if (code
== PLUS_EXPR
)
1644 if (integer_zerop (arg0
) && !TREE_OVERFLOW (arg0
))
1646 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
1649 else if (code
== MINUS_EXPR
)
1651 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
1654 else if (code
== MULT_EXPR
)
1656 if (integer_onep (arg0
) && !TREE_OVERFLOW (arg0
))
1660 /* Handle general case of two integer constants. For sizetype
1661 constant calculations we always want to know about overflow,
1662 even in the unsigned case. */
1663 return int_const_binop_1 (code
, arg0
, arg1
, -1);
1666 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
1669 /* Given two values, either both of sizetype or both of bitsizetype,
1670 compute the difference between the two values. Return the value
1671 in signed type corresponding to the type of the operands. */
1674 size_diffop_loc (location_t loc
, tree arg0
, tree arg1
)
1676 tree type
= TREE_TYPE (arg0
);
1679 gcc_assert (int_binop_types_match_p (MINUS_EXPR
, TREE_TYPE (arg0
),
1682 /* If the type is already signed, just do the simple thing. */
1683 if (!TYPE_UNSIGNED (type
))
1684 return size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
);
1686 if (type
== sizetype
)
1688 else if (type
== bitsizetype
)
1689 ctype
= sbitsizetype
;
1691 ctype
= signed_type_for (type
);
1693 /* If either operand is not a constant, do the conversions to the signed
1694 type and subtract. The hardware will do the right thing with any
1695 overflow in the subtraction. */
1696 if (TREE_CODE (arg0
) != INTEGER_CST
|| TREE_CODE (arg1
) != INTEGER_CST
)
1697 return size_binop_loc (loc
, MINUS_EXPR
,
1698 fold_convert_loc (loc
, ctype
, arg0
),
1699 fold_convert_loc (loc
, ctype
, arg1
));
1701 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1702 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1703 overflow) and negate (which can't either). Special-case a result
1704 of zero while we're here. */
1705 if (tree_int_cst_equal (arg0
, arg1
))
1706 return build_int_cst (ctype
, 0);
1707 else if (tree_int_cst_lt (arg1
, arg0
))
1708 return fold_convert_loc (loc
, ctype
,
1709 size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
));
1711 return size_binop_loc (loc
, MINUS_EXPR
, build_int_cst (ctype
, 0),
1712 fold_convert_loc (loc
, ctype
,
1713 size_binop_loc (loc
,
1718 /* A subroutine of fold_convert_const handling conversions of an
1719 INTEGER_CST to another integer type. */
1722 fold_convert_const_int_from_int (tree type
, const_tree arg1
)
1724 /* Given an integer constant, make new constant with new type,
1725 appropriately sign-extended or truncated. Use widest_int
1726 so that any extension is done according ARG1's type. */
1727 return force_fit_type (type
, wi::to_widest (arg1
),
1728 !POINTER_TYPE_P (TREE_TYPE (arg1
)),
1729 TREE_OVERFLOW (arg1
));
1732 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1733 to an integer type. */
1736 fold_convert_const_int_from_real (enum tree_code code
, tree type
, const_tree arg1
)
1738 bool overflow
= false;
1741 /* The following code implements the floating point to integer
1742 conversion rules required by the Java Language Specification,
1743 that IEEE NaNs are mapped to zero and values that overflow
1744 the target precision saturate, i.e. values greater than
1745 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1746 are mapped to INT_MIN. These semantics are allowed by the
1747 C and C++ standards that simply state that the behavior of
1748 FP-to-integer conversion is unspecified upon overflow. */
1752 REAL_VALUE_TYPE x
= TREE_REAL_CST (arg1
);
1756 case FIX_TRUNC_EXPR
:
1757 real_trunc (&r
, VOIDmode
, &x
);
1764 /* If R is NaN, return zero and show we have an overflow. */
1765 if (REAL_VALUE_ISNAN (r
))
1768 val
= wi::zero (TYPE_PRECISION (type
));
1771 /* See if R is less than the lower bound or greater than the
1776 tree lt
= TYPE_MIN_VALUE (type
);
1777 REAL_VALUE_TYPE l
= real_value_from_int_cst (NULL_TREE
, lt
);
1778 if (REAL_VALUES_LESS (r
, l
))
1787 tree ut
= TYPE_MAX_VALUE (type
);
1790 REAL_VALUE_TYPE u
= real_value_from_int_cst (NULL_TREE
, ut
);
1791 if (REAL_VALUES_LESS (u
, r
))
1800 val
= real_to_integer (&r
, &overflow
, TYPE_PRECISION (type
));
1802 t
= force_fit_type (type
, val
, -1, overflow
| TREE_OVERFLOW (arg1
));
1806 /* A subroutine of fold_convert_const handling conversions of a
1807 FIXED_CST to an integer type. */
1810 fold_convert_const_int_from_fixed (tree type
, const_tree arg1
)
1813 double_int temp
, temp_trunc
;
1816 /* Right shift FIXED_CST to temp by fbit. */
1817 temp
= TREE_FIXED_CST (arg1
).data
;
1818 mode
= TREE_FIXED_CST (arg1
).mode
;
1819 if (GET_MODE_FBIT (mode
) < HOST_BITS_PER_DOUBLE_INT
)
1821 temp
= temp
.rshift (GET_MODE_FBIT (mode
),
1822 HOST_BITS_PER_DOUBLE_INT
,
1823 SIGNED_FIXED_POINT_MODE_P (mode
));
1825 /* Left shift temp to temp_trunc by fbit. */
1826 temp_trunc
= temp
.lshift (GET_MODE_FBIT (mode
),
1827 HOST_BITS_PER_DOUBLE_INT
,
1828 SIGNED_FIXED_POINT_MODE_P (mode
));
1832 temp
= double_int_zero
;
1833 temp_trunc
= double_int_zero
;
1836 /* If FIXED_CST is negative, we need to round the value toward 0.
1837 By checking if the fractional bits are not zero to add 1 to temp. */
1838 if (SIGNED_FIXED_POINT_MODE_P (mode
)
1839 && temp_trunc
.is_negative ()
1840 && TREE_FIXED_CST (arg1
).data
!= temp_trunc
)
1841 temp
+= double_int_one
;
1843 /* Given a fixed-point constant, make new constant with new type,
1844 appropriately sign-extended or truncated. */
1845 t
= force_fit_type (type
, temp
, -1,
1846 (temp
.is_negative ()
1847 && (TYPE_UNSIGNED (type
)
1848 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
1849 | TREE_OVERFLOW (arg1
));
1854 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1855 to another floating point type. */
1858 fold_convert_const_real_from_real (tree type
, const_tree arg1
)
1860 REAL_VALUE_TYPE value
;
1863 real_convert (&value
, TYPE_MODE (type
), &TREE_REAL_CST (arg1
));
1864 t
= build_real (type
, value
);
1866 /* If converting an infinity or NAN to a representation that doesn't
1867 have one, set the overflow bit so that we can produce some kind of
1868 error message at the appropriate point if necessary. It's not the
1869 most user-friendly message, but it's better than nothing. */
1870 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1
))
1871 && !MODE_HAS_INFINITIES (TYPE_MODE (type
)))
1872 TREE_OVERFLOW (t
) = 1;
1873 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
1874 && !MODE_HAS_NANS (TYPE_MODE (type
)))
1875 TREE_OVERFLOW (t
) = 1;
1876 /* Regular overflow, conversion produced an infinity in a mode that
1877 can't represent them. */
1878 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type
))
1879 && REAL_VALUE_ISINF (value
)
1880 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1
)))
1881 TREE_OVERFLOW (t
) = 1;
1883 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
1887 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1888 to a floating point type. */
1891 fold_convert_const_real_from_fixed (tree type
, const_tree arg1
)
1893 REAL_VALUE_TYPE value
;
1896 real_convert_from_fixed (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
));
1897 t
= build_real (type
, value
);
1899 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
1903 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1904 to another fixed-point type. */
1907 fold_convert_const_fixed_from_fixed (tree type
, const_tree arg1
)
1909 FIXED_VALUE_TYPE value
;
1913 overflow_p
= fixed_convert (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
),
1914 TYPE_SATURATING (type
));
1915 t
= build_fixed (type
, value
);
1917 /* Propagate overflow flags. */
1918 if (overflow_p
| TREE_OVERFLOW (arg1
))
1919 TREE_OVERFLOW (t
) = 1;
1923 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1924 to a fixed-point type. */
1927 fold_convert_const_fixed_from_int (tree type
, const_tree arg1
)
1929 FIXED_VALUE_TYPE value
;
1934 gcc_assert (TREE_INT_CST_NUNITS (arg1
) <= 2);
1936 di
.low
= TREE_INT_CST_ELT (arg1
, 0);
1937 if (TREE_INT_CST_NUNITS (arg1
) == 1)
1938 di
.high
= (HOST_WIDE_INT
) di
.low
< 0 ? (HOST_WIDE_INT
) -1 : 0;
1940 di
.high
= TREE_INT_CST_ELT (arg1
, 1);
1942 overflow_p
= fixed_convert_from_int (&value
, TYPE_MODE (type
), di
,
1943 TYPE_UNSIGNED (TREE_TYPE (arg1
)),
1944 TYPE_SATURATING (type
));
1945 t
= build_fixed (type
, value
);
1947 /* Propagate overflow flags. */
1948 if (overflow_p
| TREE_OVERFLOW (arg1
))
1949 TREE_OVERFLOW (t
) = 1;
1953 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1954 to a fixed-point type. */
1957 fold_convert_const_fixed_from_real (tree type
, const_tree arg1
)
1959 FIXED_VALUE_TYPE value
;
1963 overflow_p
= fixed_convert_from_real (&value
, TYPE_MODE (type
),
1964 &TREE_REAL_CST (arg1
),
1965 TYPE_SATURATING (type
));
1966 t
= build_fixed (type
, value
);
1968 /* Propagate overflow flags. */
1969 if (overflow_p
| TREE_OVERFLOW (arg1
))
1970 TREE_OVERFLOW (t
) = 1;
1974 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1975 type TYPE. If no simplification can be done return NULL_TREE. */
1978 fold_convert_const (enum tree_code code
, tree type
, tree arg1
)
1980 if (TREE_TYPE (arg1
) == type
)
1983 if (POINTER_TYPE_P (type
) || INTEGRAL_TYPE_P (type
)
1984 || TREE_CODE (type
) == OFFSET_TYPE
)
1986 if (TREE_CODE (arg1
) == INTEGER_CST
)
1987 return fold_convert_const_int_from_int (type
, arg1
);
1988 else if (TREE_CODE (arg1
) == REAL_CST
)
1989 return fold_convert_const_int_from_real (code
, type
, arg1
);
1990 else if (TREE_CODE (arg1
) == FIXED_CST
)
1991 return fold_convert_const_int_from_fixed (type
, arg1
);
1993 else if (TREE_CODE (type
) == REAL_TYPE
)
1995 if (TREE_CODE (arg1
) == INTEGER_CST
)
1996 return build_real_from_int_cst (type
, arg1
);
1997 else if (TREE_CODE (arg1
) == REAL_CST
)
1998 return fold_convert_const_real_from_real (type
, arg1
);
1999 else if (TREE_CODE (arg1
) == FIXED_CST
)
2000 return fold_convert_const_real_from_fixed (type
, arg1
);
2002 else if (TREE_CODE (type
) == FIXED_POINT_TYPE
)
2004 if (TREE_CODE (arg1
) == FIXED_CST
)
2005 return fold_convert_const_fixed_from_fixed (type
, arg1
);
2006 else if (TREE_CODE (arg1
) == INTEGER_CST
)
2007 return fold_convert_const_fixed_from_int (type
, arg1
);
2008 else if (TREE_CODE (arg1
) == REAL_CST
)
2009 return fold_convert_const_fixed_from_real (type
, arg1
);
2014 /* Construct a vector of zero elements of vector type TYPE. */
2017 build_zero_vector (tree type
)
2021 t
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), integer_zero_node
);
2022 return build_vector_from_val (type
, t
);
2025 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2028 fold_convertible_p (const_tree type
, const_tree arg
)
2030 tree orig
= TREE_TYPE (arg
);
2035 if (TREE_CODE (arg
) == ERROR_MARK
2036 || TREE_CODE (type
) == ERROR_MARK
2037 || TREE_CODE (orig
) == ERROR_MARK
)
2040 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
2043 switch (TREE_CODE (type
))
2045 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
2046 case POINTER_TYPE
: case REFERENCE_TYPE
:
2048 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2049 || TREE_CODE (orig
) == OFFSET_TYPE
)
2051 return (TREE_CODE (orig
) == VECTOR_TYPE
2052 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2055 case FIXED_POINT_TYPE
:
2059 return TREE_CODE (type
) == TREE_CODE (orig
);
2066 /* Convert expression ARG to type TYPE. Used by the middle-end for
2067 simple conversions in preference to calling the front-end's convert. */
2070 fold_convert_loc (location_t loc
, tree type
, tree arg
)
2072 tree orig
= TREE_TYPE (arg
);
2078 if (TREE_CODE (arg
) == ERROR_MARK
2079 || TREE_CODE (type
) == ERROR_MARK
2080 || TREE_CODE (orig
) == ERROR_MARK
)
2081 return error_mark_node
;
2083 switch (TREE_CODE (type
))
2086 case REFERENCE_TYPE
:
2087 /* Handle conversions between pointers to different address spaces. */
2088 if (POINTER_TYPE_P (orig
)
2089 && (TYPE_ADDR_SPACE (TREE_TYPE (type
))
2090 != TYPE_ADDR_SPACE (TREE_TYPE (orig
))))
2091 return fold_build1_loc (loc
, ADDR_SPACE_CONVERT_EXPR
, type
, arg
);
2094 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
2096 if (TREE_CODE (arg
) == INTEGER_CST
)
2098 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
2099 if (tem
!= NULL_TREE
)
2102 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2103 || TREE_CODE (orig
) == OFFSET_TYPE
)
2104 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2105 if (TREE_CODE (orig
) == COMPLEX_TYPE
)
2106 return fold_convert_loc (loc
, type
,
2107 fold_build1_loc (loc
, REALPART_EXPR
,
2108 TREE_TYPE (orig
), arg
));
2109 gcc_assert (TREE_CODE (orig
) == VECTOR_TYPE
2110 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2111 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2114 if (TREE_CODE (arg
) == INTEGER_CST
)
2116 tem
= fold_convert_const (FLOAT_EXPR
, type
, arg
);
2117 if (tem
!= NULL_TREE
)
2120 else if (TREE_CODE (arg
) == REAL_CST
)
2122 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
2123 if (tem
!= NULL_TREE
)
2126 else if (TREE_CODE (arg
) == FIXED_CST
)
2128 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
2129 if (tem
!= NULL_TREE
)
2133 switch (TREE_CODE (orig
))
2136 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
2137 case POINTER_TYPE
: case REFERENCE_TYPE
:
2138 return fold_build1_loc (loc
, FLOAT_EXPR
, type
, arg
);
2141 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2143 case FIXED_POINT_TYPE
:
2144 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
2147 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2148 return fold_convert_loc (loc
, type
, tem
);
2154 case FIXED_POINT_TYPE
:
2155 if (TREE_CODE (arg
) == FIXED_CST
|| TREE_CODE (arg
) == INTEGER_CST
2156 || TREE_CODE (arg
) == REAL_CST
)
2158 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
2159 if (tem
!= NULL_TREE
)
2160 goto fold_convert_exit
;
2163 switch (TREE_CODE (orig
))
2165 case FIXED_POINT_TYPE
:
2170 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
2173 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2174 return fold_convert_loc (loc
, type
, tem
);
2181 switch (TREE_CODE (orig
))
2184 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
2185 case POINTER_TYPE
: case REFERENCE_TYPE
:
2187 case FIXED_POINT_TYPE
:
2188 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
2189 fold_convert_loc (loc
, TREE_TYPE (type
), arg
),
2190 fold_convert_loc (loc
, TREE_TYPE (type
),
2191 integer_zero_node
));
2196 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
2198 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
),
2199 TREE_OPERAND (arg
, 0));
2200 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
),
2201 TREE_OPERAND (arg
, 1));
2202 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
2205 arg
= save_expr (arg
);
2206 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2207 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, TREE_TYPE (orig
), arg
);
2208 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
), rpart
);
2209 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
), ipart
);
2210 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
2218 if (integer_zerop (arg
))
2219 return build_zero_vector (type
);
2220 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2221 gcc_assert (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2222 || TREE_CODE (orig
) == VECTOR_TYPE
);
2223 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, type
, arg
);
2226 tem
= fold_ignored_result (arg
);
2227 return fold_build1_loc (loc
, NOP_EXPR
, type
, tem
);
2230 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
2231 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2235 protected_set_expr_location_unshare (tem
, loc
);
2239 /* Return false if expr can be assumed not to be an lvalue, true
2243 maybe_lvalue_p (const_tree x
)
2245 /* We only need to wrap lvalue tree codes. */
2246 switch (TREE_CODE (x
))
2259 case ARRAY_RANGE_REF
:
2265 case PREINCREMENT_EXPR
:
2266 case PREDECREMENT_EXPR
:
2268 case TRY_CATCH_EXPR
:
2269 case WITH_CLEANUP_EXPR
:
2278 /* Assume the worst for front-end tree codes. */
2279 if ((int)TREE_CODE (x
) >= NUM_TREE_CODES
)
2287 /* Return an expr equal to X but certainly not valid as an lvalue. */
2290 non_lvalue_loc (location_t loc
, tree x
)
2292 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2297 if (! maybe_lvalue_p (x
))
2299 return build1_loc (loc
, NON_LVALUE_EXPR
, TREE_TYPE (x
), x
);
2302 /* When pedantic, return an expr equal to X but certainly not valid as a
2303 pedantic lvalue. Otherwise, return X. */
2306 pedantic_non_lvalue_loc (location_t loc
, tree x
)
2308 return protected_set_expr_location_unshare (x
, loc
);
2311 /* Given a tree comparison code, return the code that is the logical inverse.
2312 It is generally not safe to do this for floating-point comparisons, except
2313 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2314 ERROR_MARK in this case. */
2317 invert_tree_comparison (enum tree_code code
, bool honor_nans
)
2319 if (honor_nans
&& flag_trapping_math
&& code
!= EQ_EXPR
&& code
!= NE_EXPR
2320 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
)
2330 return honor_nans
? UNLE_EXPR
: LE_EXPR
;
2332 return honor_nans
? UNLT_EXPR
: LT_EXPR
;
2334 return honor_nans
? UNGE_EXPR
: GE_EXPR
;
2336 return honor_nans
? UNGT_EXPR
: GT_EXPR
;
2350 return UNORDERED_EXPR
;
2351 case UNORDERED_EXPR
:
2352 return ORDERED_EXPR
;
2358 /* Similar, but return the comparison that results if the operands are
2359 swapped. This is safe for floating-point. */
2362 swap_tree_comparison (enum tree_code code
)
2369 case UNORDERED_EXPR
:
2395 /* Convert a comparison tree code from an enum tree_code representation
2396 into a compcode bit-based encoding. This function is the inverse of
2397 compcode_to_comparison. */
2399 static enum comparison_code
2400 comparison_to_compcode (enum tree_code code
)
2417 return COMPCODE_ORD
;
2418 case UNORDERED_EXPR
:
2419 return COMPCODE_UNORD
;
2421 return COMPCODE_UNLT
;
2423 return COMPCODE_UNEQ
;
2425 return COMPCODE_UNLE
;
2427 return COMPCODE_UNGT
;
2429 return COMPCODE_LTGT
;
2431 return COMPCODE_UNGE
;
2437 /* Convert a compcode bit-based encoding of a comparison operator back
2438 to GCC's enum tree_code representation. This function is the
2439 inverse of comparison_to_compcode. */
2441 static enum tree_code
2442 compcode_to_comparison (enum comparison_code code
)
2459 return ORDERED_EXPR
;
2460 case COMPCODE_UNORD
:
2461 return UNORDERED_EXPR
;
2479 /* Return a tree for the comparison which is the combination of
2480 doing the AND or OR (depending on CODE) of the two operations LCODE
2481 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2482 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2483 if this makes the transformation invalid. */
2486 combine_comparisons (location_t loc
,
2487 enum tree_code code
, enum tree_code lcode
,
2488 enum tree_code rcode
, tree truth_type
,
2489 tree ll_arg
, tree lr_arg
)
2491 bool honor_nans
= HONOR_NANS (element_mode (ll_arg
));
2492 enum comparison_code lcompcode
= comparison_to_compcode (lcode
);
2493 enum comparison_code rcompcode
= comparison_to_compcode (rcode
);
2498 case TRUTH_AND_EXPR
: case TRUTH_ANDIF_EXPR
:
2499 compcode
= lcompcode
& rcompcode
;
2502 case TRUTH_OR_EXPR
: case TRUTH_ORIF_EXPR
:
2503 compcode
= lcompcode
| rcompcode
;
2512 /* Eliminate unordered comparisons, as well as LTGT and ORD
2513 which are not used unless the mode has NaNs. */
2514 compcode
&= ~COMPCODE_UNORD
;
2515 if (compcode
== COMPCODE_LTGT
)
2516 compcode
= COMPCODE_NE
;
2517 else if (compcode
== COMPCODE_ORD
)
2518 compcode
= COMPCODE_TRUE
;
2520 else if (flag_trapping_math
)
2522 /* Check that the original operation and the optimized ones will trap
2523 under the same condition. */
2524 bool ltrap
= (lcompcode
& COMPCODE_UNORD
) == 0
2525 && (lcompcode
!= COMPCODE_EQ
)
2526 && (lcompcode
!= COMPCODE_ORD
);
2527 bool rtrap
= (rcompcode
& COMPCODE_UNORD
) == 0
2528 && (rcompcode
!= COMPCODE_EQ
)
2529 && (rcompcode
!= COMPCODE_ORD
);
2530 bool trap
= (compcode
& COMPCODE_UNORD
) == 0
2531 && (compcode
!= COMPCODE_EQ
)
2532 && (compcode
!= COMPCODE_ORD
);
2534 /* In a short-circuited boolean expression the LHS might be
2535 such that the RHS, if evaluated, will never trap. For
2536 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2537 if neither x nor y is NaN. (This is a mixed blessing: for
2538 example, the expression above will never trap, hence
2539 optimizing it to x < y would be invalid). */
2540 if ((code
== TRUTH_ORIF_EXPR
&& (lcompcode
& COMPCODE_UNORD
))
2541 || (code
== TRUTH_ANDIF_EXPR
&& !(lcompcode
& COMPCODE_UNORD
)))
2544 /* If the comparison was short-circuited, and only the RHS
2545 trapped, we may now generate a spurious trap. */
2547 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2550 /* If we changed the conditions that cause a trap, we lose. */
2551 if ((ltrap
|| rtrap
) != trap
)
2555 if (compcode
== COMPCODE_TRUE
)
2556 return constant_boolean_node (true, truth_type
);
2557 else if (compcode
== COMPCODE_FALSE
)
2558 return constant_boolean_node (false, truth_type
);
2561 enum tree_code tcode
;
2563 tcode
= compcode_to_comparison ((enum comparison_code
) compcode
);
2564 return fold_build2_loc (loc
, tcode
, truth_type
, ll_arg
, lr_arg
);
2568 /* Return nonzero if two operands (typically of the same tree node)
2569 are necessarily equal. If either argument has side-effects this
2570 function returns zero. FLAGS modifies behavior as follows:
2572 If OEP_ONLY_CONST is set, only return nonzero for constants.
2573 This function tests whether the operands are indistinguishable;
2574 it does not test whether they are equal using C's == operation.
2575 The distinction is important for IEEE floating point, because
2576 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2577 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2579 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2580 even though it may hold multiple values during a function.
2581 This is because a GCC tree node guarantees that nothing else is
2582 executed between the evaluation of its "operands" (which may often
2583 be evaluated in arbitrary order). Hence if the operands themselves
2584 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2585 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2586 unset means assuming isochronic (or instantaneous) tree equivalence.
2587 Unless comparing arbitrary expression trees, such as from different
2588 statements, this flag can usually be left unset.
2590 If OEP_PURE_SAME is set, then pure functions with identical arguments
2591 are considered the same. It is used when the caller has other ways
2592 to ensure that global memory is unchanged in between. */
2595 operand_equal_p (const_tree arg0
, const_tree arg1
, unsigned int flags
)
2597 /* If either is ERROR_MARK, they aren't equal. */
2598 if (TREE_CODE (arg0
) == ERROR_MARK
|| TREE_CODE (arg1
) == ERROR_MARK
2599 || TREE_TYPE (arg0
) == error_mark_node
2600 || TREE_TYPE (arg1
) == error_mark_node
)
2603 /* Similar, if either does not have a type (like a released SSA name),
2604 they aren't equal. */
2605 if (!TREE_TYPE (arg0
) || !TREE_TYPE (arg1
))
2608 /* Check equality of integer constants before bailing out due to
2609 precision differences. */
2610 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
2611 return tree_int_cst_equal (arg0
, arg1
);
2613 /* If both types don't have the same signedness, then we can't consider
2614 them equal. We must check this before the STRIP_NOPS calls
2615 because they may change the signedness of the arguments. As pointers
2616 strictly don't have a signedness, require either two pointers or
2617 two non-pointers as well. */
2618 if (TYPE_UNSIGNED (TREE_TYPE (arg0
)) != TYPE_UNSIGNED (TREE_TYPE (arg1
))
2619 || POINTER_TYPE_P (TREE_TYPE (arg0
)) != POINTER_TYPE_P (TREE_TYPE (arg1
)))
2622 /* We cannot consider pointers to different address space equal. */
2623 if (POINTER_TYPE_P (TREE_TYPE (arg0
)) && POINTER_TYPE_P (TREE_TYPE (arg1
))
2624 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0
)))
2625 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1
)))))
2628 /* If both types don't have the same precision, then it is not safe
2630 if (element_precision (TREE_TYPE (arg0
))
2631 != element_precision (TREE_TYPE (arg1
)))
2637 /* In case both args are comparisons but with different comparison
2638 code, try to swap the comparison operands of one arg to produce
2639 a match and compare that variant. */
2640 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2641 && COMPARISON_CLASS_P (arg0
)
2642 && COMPARISON_CLASS_P (arg1
))
2644 enum tree_code swap_code
= swap_tree_comparison (TREE_CODE (arg1
));
2646 if (TREE_CODE (arg0
) == swap_code
)
2647 return operand_equal_p (TREE_OPERAND (arg0
, 0),
2648 TREE_OPERAND (arg1
, 1), flags
)
2649 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2650 TREE_OPERAND (arg1
, 0), flags
);
2653 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2654 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2655 && !(CONVERT_EXPR_P (arg0
) && CONVERT_EXPR_P (arg1
)))
2658 /* This is needed for conversions and for COMPONENT_REF.
2659 Might as well play it safe and always test this. */
2660 if (TREE_CODE (TREE_TYPE (arg0
)) == ERROR_MARK
2661 || TREE_CODE (TREE_TYPE (arg1
)) == ERROR_MARK
2662 || TYPE_MODE (TREE_TYPE (arg0
)) != TYPE_MODE (TREE_TYPE (arg1
)))
2665 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2666 We don't care about side effects in that case because the SAVE_EXPR
2667 takes care of that for us. In all other cases, two expressions are
2668 equal if they have no side effects. If we have two identical
2669 expressions with side effects that should be treated the same due
2670 to the only side effects being identical SAVE_EXPR's, that will
2671 be detected in the recursive calls below.
2672 If we are taking an invariant address of two identical objects
2673 they are necessarily equal as well. */
2674 if (arg0
== arg1
&& ! (flags
& OEP_ONLY_CONST
)
2675 && (TREE_CODE (arg0
) == SAVE_EXPR
2676 || (flags
& OEP_CONSTANT_ADDRESS_OF
)
2677 || (! TREE_SIDE_EFFECTS (arg0
) && ! TREE_SIDE_EFFECTS (arg1
))))
2680 /* Next handle constant cases, those for which we can return 1 even
2681 if ONLY_CONST is set. */
2682 if (TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
))
2683 switch (TREE_CODE (arg0
))
2686 return tree_int_cst_equal (arg0
, arg1
);
2689 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0
),
2690 TREE_FIXED_CST (arg1
));
2693 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0
),
2694 TREE_REAL_CST (arg1
)))
2698 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
))))
2700 /* If we do not distinguish between signed and unsigned zero,
2701 consider them equal. */
2702 if (real_zerop (arg0
) && real_zerop (arg1
))
2711 if (VECTOR_CST_NELTS (arg0
) != VECTOR_CST_NELTS (arg1
))
2714 for (i
= 0; i
< VECTOR_CST_NELTS (arg0
); ++i
)
2716 if (!operand_equal_p (VECTOR_CST_ELT (arg0
, i
),
2717 VECTOR_CST_ELT (arg1
, i
), flags
))
2724 return (operand_equal_p (TREE_REALPART (arg0
), TREE_REALPART (arg1
),
2726 && operand_equal_p (TREE_IMAGPART (arg0
), TREE_IMAGPART (arg1
),
2730 return (TREE_STRING_LENGTH (arg0
) == TREE_STRING_LENGTH (arg1
)
2731 && ! memcmp (TREE_STRING_POINTER (arg0
),
2732 TREE_STRING_POINTER (arg1
),
2733 TREE_STRING_LENGTH (arg0
)));
2736 return operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0),
2737 TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
)
2738 ? OEP_CONSTANT_ADDRESS_OF
: 0);
2743 if (flags
& OEP_ONLY_CONST
)
2746 /* Define macros to test an operand from arg0 and arg1 for equality and a
2747 variant that allows null and views null as being different from any
2748 non-null value. In the latter case, if either is null, the both
2749 must be; otherwise, do the normal comparison. */
2750 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2751 TREE_OPERAND (arg1, N), flags)
2753 #define OP_SAME_WITH_NULL(N) \
2754 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2755 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2757 switch (TREE_CODE_CLASS (TREE_CODE (arg0
)))
2760 /* Two conversions are equal only if signedness and modes match. */
2761 switch (TREE_CODE (arg0
))
2764 case FIX_TRUNC_EXPR
:
2765 if (TYPE_UNSIGNED (TREE_TYPE (arg0
))
2766 != TYPE_UNSIGNED (TREE_TYPE (arg1
)))
2776 case tcc_comparison
:
2778 if (OP_SAME (0) && OP_SAME (1))
2781 /* For commutative ops, allow the other order. */
2782 return (commutative_tree_code (TREE_CODE (arg0
))
2783 && operand_equal_p (TREE_OPERAND (arg0
, 0),
2784 TREE_OPERAND (arg1
, 1), flags
)
2785 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2786 TREE_OPERAND (arg1
, 0), flags
));
2789 /* If either of the pointer (or reference) expressions we are
2790 dereferencing contain a side effect, these cannot be equal,
2791 but their addresses can be. */
2792 if ((flags
& OEP_CONSTANT_ADDRESS_OF
) == 0
2793 && (TREE_SIDE_EFFECTS (arg0
)
2794 || TREE_SIDE_EFFECTS (arg1
)))
2797 switch (TREE_CODE (arg0
))
2800 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2807 case TARGET_MEM_REF
:
2808 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2809 /* Require equal extra operands and then fall through to MEM_REF
2810 handling of the two common operands. */
2811 if (!OP_SAME_WITH_NULL (2)
2812 || !OP_SAME_WITH_NULL (3)
2813 || !OP_SAME_WITH_NULL (4))
2817 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2818 /* Require equal access sizes, and similar pointer types.
2819 We can have incomplete types for array references of
2820 variable-sized arrays from the Fortran frontend
2821 though. Also verify the types are compatible. */
2822 return ((TYPE_SIZE (TREE_TYPE (arg0
)) == TYPE_SIZE (TREE_TYPE (arg1
))
2823 || (TYPE_SIZE (TREE_TYPE (arg0
))
2824 && TYPE_SIZE (TREE_TYPE (arg1
))
2825 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0
)),
2826 TYPE_SIZE (TREE_TYPE (arg1
)), flags
)))
2827 && types_compatible_p (TREE_TYPE (arg0
), TREE_TYPE (arg1
))
2828 && alias_ptr_types_compatible_p
2829 (TREE_TYPE (TREE_OPERAND (arg0
, 1)),
2830 TREE_TYPE (TREE_OPERAND (arg1
, 1)))
2831 && OP_SAME (0) && OP_SAME (1));
2834 case ARRAY_RANGE_REF
:
2835 /* Operands 2 and 3 may be null.
2836 Compare the array index by value if it is constant first as we
2837 may have different types but same value here. */
2840 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2841 return ((tree_int_cst_equal (TREE_OPERAND (arg0
, 1),
2842 TREE_OPERAND (arg1
, 1))
2844 && OP_SAME_WITH_NULL (2)
2845 && OP_SAME_WITH_NULL (3));
2848 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2849 may be NULL when we're called to compare MEM_EXPRs. */
2850 if (!OP_SAME_WITH_NULL (0)
2853 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2854 return OP_SAME_WITH_NULL (2);
2859 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2860 return OP_SAME (1) && OP_SAME (2);
2866 case tcc_expression
:
2867 switch (TREE_CODE (arg0
))
2870 case TRUTH_NOT_EXPR
:
2873 case TRUTH_ANDIF_EXPR
:
2874 case TRUTH_ORIF_EXPR
:
2875 return OP_SAME (0) && OP_SAME (1);
2878 case WIDEN_MULT_PLUS_EXPR
:
2879 case WIDEN_MULT_MINUS_EXPR
:
2882 /* The multiplcation operands are commutative. */
2885 case TRUTH_AND_EXPR
:
2887 case TRUTH_XOR_EXPR
:
2888 if (OP_SAME (0) && OP_SAME (1))
2891 /* Otherwise take into account this is a commutative operation. */
2892 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
2893 TREE_OPERAND (arg1
, 1), flags
)
2894 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2895 TREE_OPERAND (arg1
, 0), flags
));
2900 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2907 switch (TREE_CODE (arg0
))
2910 /* If the CALL_EXPRs call different functions, then they
2911 clearly can not be equal. */
2912 if (! operand_equal_p (CALL_EXPR_FN (arg0
), CALL_EXPR_FN (arg1
),
2917 unsigned int cef
= call_expr_flags (arg0
);
2918 if (flags
& OEP_PURE_SAME
)
2919 cef
&= ECF_CONST
| ECF_PURE
;
2926 /* Now see if all the arguments are the same. */
2928 const_call_expr_arg_iterator iter0
, iter1
;
2930 for (a0
= first_const_call_expr_arg (arg0
, &iter0
),
2931 a1
= first_const_call_expr_arg (arg1
, &iter1
);
2933 a0
= next_const_call_expr_arg (&iter0
),
2934 a1
= next_const_call_expr_arg (&iter1
))
2935 if (! operand_equal_p (a0
, a1
, flags
))
2938 /* If we get here and both argument lists are exhausted
2939 then the CALL_EXPRs are equal. */
2940 return ! (a0
|| a1
);
2946 case tcc_declaration
:
2947 /* Consider __builtin_sqrt equal to sqrt. */
2948 return (TREE_CODE (arg0
) == FUNCTION_DECL
2949 && DECL_BUILT_IN (arg0
) && DECL_BUILT_IN (arg1
)
2950 && DECL_BUILT_IN_CLASS (arg0
) == DECL_BUILT_IN_CLASS (arg1
)
2951 && DECL_FUNCTION_CODE (arg0
) == DECL_FUNCTION_CODE (arg1
));
2958 #undef OP_SAME_WITH_NULL
2961 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2962 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2964 When in doubt, return 0. */
2967 operand_equal_for_comparison_p (tree arg0
, tree arg1
, tree other
)
2969 int unsignedp1
, unsignedpo
;
2970 tree primarg0
, primarg1
, primother
;
2971 unsigned int correct_width
;
2973 if (operand_equal_p (arg0
, arg1
, 0))
2976 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
2977 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
2980 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2981 and see if the inner values are the same. This removes any
2982 signedness comparison, which doesn't matter here. */
2983 primarg0
= arg0
, primarg1
= arg1
;
2984 STRIP_NOPS (primarg0
);
2985 STRIP_NOPS (primarg1
);
2986 if (operand_equal_p (primarg0
, primarg1
, 0))
2989 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2990 actual comparison operand, ARG0.
2992 First throw away any conversions to wider types
2993 already present in the operands. */
2995 primarg1
= get_narrower (arg1
, &unsignedp1
);
2996 primother
= get_narrower (other
, &unsignedpo
);
2998 correct_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
2999 if (unsignedp1
== unsignedpo
3000 && TYPE_PRECISION (TREE_TYPE (primarg1
)) < correct_width
3001 && TYPE_PRECISION (TREE_TYPE (primother
)) < correct_width
)
3003 tree type
= TREE_TYPE (arg0
);
3005 /* Make sure shorter operand is extended the right way
3006 to match the longer operand. */
3007 primarg1
= fold_convert (signed_or_unsigned_type_for
3008 (unsignedp1
, TREE_TYPE (primarg1
)), primarg1
);
3010 if (operand_equal_p (arg0
, fold_convert (type
, primarg1
), 0))
3017 /* See if ARG is an expression that is either a comparison or is performing
3018 arithmetic on comparisons. The comparisons must only be comparing
3019 two different values, which will be stored in *CVAL1 and *CVAL2; if
3020 they are nonzero it means that some operands have already been found.
3021 No variables may be used anywhere else in the expression except in the
3022 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3023 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3025 If this is true, return 1. Otherwise, return zero. */
3028 twoval_comparison_p (tree arg
, tree
*cval1
, tree
*cval2
, int *save_p
)
3030 enum tree_code code
= TREE_CODE (arg
);
3031 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
3033 /* We can handle some of the tcc_expression cases here. */
3034 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
3036 else if (tclass
== tcc_expression
3037 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
3038 || code
== COMPOUND_EXPR
))
3039 tclass
= tcc_binary
;
3041 else if (tclass
== tcc_expression
&& code
== SAVE_EXPR
3042 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg
, 0)))
3044 /* If we've already found a CVAL1 or CVAL2, this expression is
3045 two complex to handle. */
3046 if (*cval1
|| *cval2
)
3056 return twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
);
3059 return (twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
)
3060 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
3061 cval1
, cval2
, save_p
));
3066 case tcc_expression
:
3067 if (code
== COND_EXPR
)
3068 return (twoval_comparison_p (TREE_OPERAND (arg
, 0),
3069 cval1
, cval2
, save_p
)
3070 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
3071 cval1
, cval2
, save_p
)
3072 && twoval_comparison_p (TREE_OPERAND (arg
, 2),
3073 cval1
, cval2
, save_p
));
3076 case tcc_comparison
:
3077 /* First see if we can handle the first operand, then the second. For
3078 the second operand, we know *CVAL1 can't be zero. It must be that
3079 one side of the comparison is each of the values; test for the
3080 case where this isn't true by failing if the two operands
3083 if (operand_equal_p (TREE_OPERAND (arg
, 0),
3084 TREE_OPERAND (arg
, 1), 0))
3088 *cval1
= TREE_OPERAND (arg
, 0);
3089 else if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 0), 0))
3091 else if (*cval2
== 0)
3092 *cval2
= TREE_OPERAND (arg
, 0);
3093 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 0), 0))
3098 if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 1), 0))
3100 else if (*cval2
== 0)
3101 *cval2
= TREE_OPERAND (arg
, 1);
3102 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 1), 0))
3114 /* ARG is a tree that is known to contain just arithmetic operations and
3115 comparisons. Evaluate the operations in the tree substituting NEW0 for
3116 any occurrence of OLD0 as an operand of a comparison and likewise for
3120 eval_subst (location_t loc
, tree arg
, tree old0
, tree new0
,
3121 tree old1
, tree new1
)
3123 tree type
= TREE_TYPE (arg
);
3124 enum tree_code code
= TREE_CODE (arg
);
3125 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
3127 /* We can handle some of the tcc_expression cases here. */
3128 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
3130 else if (tclass
== tcc_expression
3131 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
3132 tclass
= tcc_binary
;
3137 return fold_build1_loc (loc
, code
, type
,
3138 eval_subst (loc
, TREE_OPERAND (arg
, 0),
3139 old0
, new0
, old1
, new1
));
3142 return fold_build2_loc (loc
, code
, type
,
3143 eval_subst (loc
, TREE_OPERAND (arg
, 0),
3144 old0
, new0
, old1
, new1
),
3145 eval_subst (loc
, TREE_OPERAND (arg
, 1),
3146 old0
, new0
, old1
, new1
));
3148 case tcc_expression
:
3152 return eval_subst (loc
, TREE_OPERAND (arg
, 0), old0
, new0
,
3156 return eval_subst (loc
, TREE_OPERAND (arg
, 1), old0
, new0
,
3160 return fold_build3_loc (loc
, code
, type
,
3161 eval_subst (loc
, TREE_OPERAND (arg
, 0),
3162 old0
, new0
, old1
, new1
),
3163 eval_subst (loc
, TREE_OPERAND (arg
, 1),
3164 old0
, new0
, old1
, new1
),
3165 eval_subst (loc
, TREE_OPERAND (arg
, 2),
3166 old0
, new0
, old1
, new1
));
3170 /* Fall through - ??? */
3172 case tcc_comparison
:
3174 tree arg0
= TREE_OPERAND (arg
, 0);
3175 tree arg1
= TREE_OPERAND (arg
, 1);
3177 /* We need to check both for exact equality and tree equality. The
3178 former will be true if the operand has a side-effect. In that
3179 case, we know the operand occurred exactly once. */
3181 if (arg0
== old0
|| operand_equal_p (arg0
, old0
, 0))
3183 else if (arg0
== old1
|| operand_equal_p (arg0
, old1
, 0))
3186 if (arg1
== old0
|| operand_equal_p (arg1
, old0
, 0))
3188 else if (arg1
== old1
|| operand_equal_p (arg1
, old1
, 0))
3191 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
3199 /* Return a tree for the case when the result of an expression is RESULT
3200 converted to TYPE and OMITTED was previously an operand of the expression
3201 but is now not needed (e.g., we folded OMITTED * 0).
3203 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3204 the conversion of RESULT to TYPE. */
3207 omit_one_operand_loc (location_t loc
, tree type
, tree result
, tree omitted
)
3209 tree t
= fold_convert_loc (loc
, type
, result
);
3211 /* If the resulting operand is an empty statement, just return the omitted
3212 statement casted to void. */
3213 if (IS_EMPTY_STMT (t
) && TREE_SIDE_EFFECTS (omitted
))
3214 return build1_loc (loc
, NOP_EXPR
, void_type_node
,
3215 fold_ignored_result (omitted
));
3217 if (TREE_SIDE_EFFECTS (omitted
))
3218 return build2_loc (loc
, COMPOUND_EXPR
, type
,
3219 fold_ignored_result (omitted
), t
);
3221 return non_lvalue_loc (loc
, t
);
3224 /* Return a tree for the case when the result of an expression is RESULT
3225 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3226 of the expression but are now not needed.
3228 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3229 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3230 evaluated before OMITTED2. Otherwise, if neither has side effects,
3231 just do the conversion of RESULT to TYPE. */
3234 omit_two_operands_loc (location_t loc
, tree type
, tree result
,
3235 tree omitted1
, tree omitted2
)
3237 tree t
= fold_convert_loc (loc
, type
, result
);
3239 if (TREE_SIDE_EFFECTS (omitted2
))
3240 t
= build2_loc (loc
, COMPOUND_EXPR
, type
, omitted2
, t
);
3241 if (TREE_SIDE_EFFECTS (omitted1
))
3242 t
= build2_loc (loc
, COMPOUND_EXPR
, type
, omitted1
, t
);
3244 return TREE_CODE (t
) != COMPOUND_EXPR
? non_lvalue_loc (loc
, t
) : t
;
3248 /* Return a simplified tree node for the truth-negation of ARG. This
3249 never alters ARG itself. We assume that ARG is an operation that
3250 returns a truth value (0 or 1).
3252 FIXME: one would think we would fold the result, but it causes
3253 problems with the dominator optimizer. */
3256 fold_truth_not_expr (location_t loc
, tree arg
)
3258 tree type
= TREE_TYPE (arg
);
3259 enum tree_code code
= TREE_CODE (arg
);
3260 location_t loc1
, loc2
;
3262 /* If this is a comparison, we can simply invert it, except for
3263 floating-point non-equality comparisons, in which case we just
3264 enclose a TRUTH_NOT_EXPR around what we have. */
3266 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3268 tree op_type
= TREE_TYPE (TREE_OPERAND (arg
, 0));
3269 if (FLOAT_TYPE_P (op_type
)
3270 && flag_trapping_math
3271 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
3272 && code
!= NE_EXPR
&& code
!= EQ_EXPR
)
3275 code
= invert_tree_comparison (code
, HONOR_NANS (TYPE_MODE (op_type
)));
3276 if (code
== ERROR_MARK
)
3279 return build2_loc (loc
, code
, type
, TREE_OPERAND (arg
, 0),
3280 TREE_OPERAND (arg
, 1));
3286 return constant_boolean_node (integer_zerop (arg
), type
);
3288 case TRUTH_AND_EXPR
:
3289 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3290 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3291 return build2_loc (loc
, TRUTH_OR_EXPR
, type
,
3292 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3293 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3296 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3297 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3298 return build2_loc (loc
, TRUTH_AND_EXPR
, type
,
3299 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3300 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3302 case TRUTH_XOR_EXPR
:
3303 /* Here we can invert either operand. We invert the first operand
3304 unless the second operand is a TRUTH_NOT_EXPR in which case our
3305 result is the XOR of the first operand with the inside of the
3306 negation of the second operand. */
3308 if (TREE_CODE (TREE_OPERAND (arg
, 1)) == TRUTH_NOT_EXPR
)
3309 return build2_loc (loc
, TRUTH_XOR_EXPR
, type
, TREE_OPERAND (arg
, 0),
3310 TREE_OPERAND (TREE_OPERAND (arg
, 1), 0));
3312 return build2_loc (loc
, TRUTH_XOR_EXPR
, type
,
3313 invert_truthvalue_loc (loc
, TREE_OPERAND (arg
, 0)),
3314 TREE_OPERAND (arg
, 1));
3316 case TRUTH_ANDIF_EXPR
:
3317 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3318 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3319 return build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
3320 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3321 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3323 case TRUTH_ORIF_EXPR
:
3324 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3325 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3326 return build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
3327 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3328 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3330 case TRUTH_NOT_EXPR
:
3331 return TREE_OPERAND (arg
, 0);
3335 tree arg1
= TREE_OPERAND (arg
, 1);
3336 tree arg2
= TREE_OPERAND (arg
, 2);
3338 loc1
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3339 loc2
= expr_location_or (TREE_OPERAND (arg
, 2), loc
);
3341 /* A COND_EXPR may have a throw as one operand, which
3342 then has void type. Just leave void operands
3344 return build3_loc (loc
, COND_EXPR
, type
, TREE_OPERAND (arg
, 0),
3345 VOID_TYPE_P (TREE_TYPE (arg1
))
3346 ? arg1
: invert_truthvalue_loc (loc1
, arg1
),
3347 VOID_TYPE_P (TREE_TYPE (arg2
))
3348 ? arg2
: invert_truthvalue_loc (loc2
, arg2
));
3352 loc1
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3353 return build2_loc (loc
, COMPOUND_EXPR
, type
,
3354 TREE_OPERAND (arg
, 0),
3355 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 1)));
3357 case NON_LVALUE_EXPR
:
3358 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3359 return invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0));
3362 if (TREE_CODE (TREE_TYPE (arg
)) == BOOLEAN_TYPE
)
3363 return build1_loc (loc
, TRUTH_NOT_EXPR
, type
, arg
);
3365 /* ... fall through ... */
3368 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3369 return build1_loc (loc
, TREE_CODE (arg
), type
,
3370 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
3373 if (!integer_onep (TREE_OPERAND (arg
, 1)))
3375 return build2_loc (loc
, EQ_EXPR
, type
, arg
, build_int_cst (type
, 0));
3378 return build1_loc (loc
, TRUTH_NOT_EXPR
, type
, arg
);
3380 case CLEANUP_POINT_EXPR
:
3381 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3382 return build1_loc (loc
, CLEANUP_POINT_EXPR
, type
,
3383 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
3390 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3391 assume that ARG is an operation that returns a truth value (0 or 1
3392 for scalars, 0 or -1 for vectors). Return the folded expression if
3393 folding is successful. Otherwise, return NULL_TREE. */
3396 fold_invert_truthvalue (location_t loc
, tree arg
)
3398 tree type
= TREE_TYPE (arg
);
3399 return fold_unary_loc (loc
, VECTOR_TYPE_P (type
)
3405 /* Return a simplified tree node for the truth-negation of ARG. This
3406 never alters ARG itself. We assume that ARG is an operation that
3407 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3410 invert_truthvalue_loc (location_t loc
, tree arg
)
3412 if (TREE_CODE (arg
) == ERROR_MARK
)
3415 tree type
= TREE_TYPE (arg
);
3416 return fold_build1_loc (loc
, VECTOR_TYPE_P (type
)
3422 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3423 operands are another bit-wise operation with a common input. If so,
3424 distribute the bit operations to save an operation and possibly two if
3425 constants are involved. For example, convert
3426 (A | B) & (A | C) into A | (B & C)
3427 Further simplification will occur if B and C are constants.
3429 If this optimization cannot be done, 0 will be returned. */
3432 distribute_bit_expr (location_t loc
, enum tree_code code
, tree type
,
3433 tree arg0
, tree arg1
)
3438 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
3439 || TREE_CODE (arg0
) == code
3440 || (TREE_CODE (arg0
) != BIT_AND_EXPR
3441 && TREE_CODE (arg0
) != BIT_IOR_EXPR
))
3444 if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0), 0))
3446 common
= TREE_OPERAND (arg0
, 0);
3447 left
= TREE_OPERAND (arg0
, 1);
3448 right
= TREE_OPERAND (arg1
, 1);
3450 else if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 1), 0))
3452 common
= TREE_OPERAND (arg0
, 0);
3453 left
= TREE_OPERAND (arg0
, 1);
3454 right
= TREE_OPERAND (arg1
, 0);
3456 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 0), 0))
3458 common
= TREE_OPERAND (arg0
, 1);
3459 left
= TREE_OPERAND (arg0
, 0);
3460 right
= TREE_OPERAND (arg1
, 1);
3462 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 1), 0))
3464 common
= TREE_OPERAND (arg0
, 1);
3465 left
= TREE_OPERAND (arg0
, 0);
3466 right
= TREE_OPERAND (arg1
, 0);
3471 common
= fold_convert_loc (loc
, type
, common
);
3472 left
= fold_convert_loc (loc
, type
, left
);
3473 right
= fold_convert_loc (loc
, type
, right
);
3474 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, common
,
3475 fold_build2_loc (loc
, code
, type
, left
, right
));
3478 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3479 with code CODE. This optimization is unsafe. */
3481 distribute_real_division (location_t loc
, enum tree_code code
, tree type
,
3482 tree arg0
, tree arg1
)
3484 bool mul0
= TREE_CODE (arg0
) == MULT_EXPR
;
3485 bool mul1
= TREE_CODE (arg1
) == MULT_EXPR
;
3487 /* (A / C) +- (B / C) -> (A +- B) / C. */
3489 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3490 TREE_OPERAND (arg1
, 1), 0))
3491 return fold_build2_loc (loc
, mul0
? MULT_EXPR
: RDIV_EXPR
, type
,
3492 fold_build2_loc (loc
, code
, type
,
3493 TREE_OPERAND (arg0
, 0),
3494 TREE_OPERAND (arg1
, 0)),
3495 TREE_OPERAND (arg0
, 1));
3497 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3498 if (operand_equal_p (TREE_OPERAND (arg0
, 0),
3499 TREE_OPERAND (arg1
, 0), 0)
3500 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
3501 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
3503 REAL_VALUE_TYPE r0
, r1
;
3504 r0
= TREE_REAL_CST (TREE_OPERAND (arg0
, 1));
3505 r1
= TREE_REAL_CST (TREE_OPERAND (arg1
, 1));
3507 real_arithmetic (&r0
, RDIV_EXPR
, &dconst1
, &r0
);
3509 real_arithmetic (&r1
, RDIV_EXPR
, &dconst1
, &r1
);
3510 real_arithmetic (&r0
, code
, &r0
, &r1
);
3511 return fold_build2_loc (loc
, MULT_EXPR
, type
,
3512 TREE_OPERAND (arg0
, 0),
3513 build_real (type
, r0
));
3519 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3520 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3523 make_bit_field_ref (location_t loc
, tree inner
, tree type
,
3524 HOST_WIDE_INT bitsize
, HOST_WIDE_INT bitpos
, int unsignedp
)
3526 tree result
, bftype
;
3530 tree size
= TYPE_SIZE (TREE_TYPE (inner
));
3531 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner
))
3532 || POINTER_TYPE_P (TREE_TYPE (inner
)))
3533 && tree_fits_shwi_p (size
)
3534 && tree_to_shwi (size
) == bitsize
)
3535 return fold_convert_loc (loc
, type
, inner
);
3539 if (TYPE_PRECISION (bftype
) != bitsize
3540 || TYPE_UNSIGNED (bftype
) == !unsignedp
)
3541 bftype
= build_nonstandard_integer_type (bitsize
, 0);
3543 result
= build3_loc (loc
, BIT_FIELD_REF
, bftype
, inner
,
3544 size_int (bitsize
), bitsize_int (bitpos
));
3547 result
= fold_convert_loc (loc
, type
, result
);
3552 /* Optimize a bit-field compare.
3554 There are two cases: First is a compare against a constant and the
3555 second is a comparison of two items where the fields are at the same
3556 bit position relative to the start of a chunk (byte, halfword, word)
3557 large enough to contain it. In these cases we can avoid the shift
3558 implicit in bitfield extractions.
3560 For constants, we emit a compare of the shifted constant with the
3561 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3562 compared. For two fields at the same position, we do the ANDs with the
3563 similar mask and compare the result of the ANDs.
3565 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3566 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3567 are the left and right operands of the comparison, respectively.
3569 If the optimization described above can be done, we return the resulting
3570 tree. Otherwise we return zero. */
3573 optimize_bit_field_compare (location_t loc
, enum tree_code code
,
3574 tree compare_type
, tree lhs
, tree rhs
)
3576 HOST_WIDE_INT lbitpos
, lbitsize
, rbitpos
, rbitsize
, nbitpos
, nbitsize
;
3577 tree type
= TREE_TYPE (lhs
);
3579 int const_p
= TREE_CODE (rhs
) == INTEGER_CST
;
3580 machine_mode lmode
, rmode
, nmode
;
3581 int lunsignedp
, runsignedp
;
3582 int lvolatilep
= 0, rvolatilep
= 0;
3583 tree linner
, rinner
= NULL_TREE
;
3587 /* Get all the information about the extractions being done. If the bit size
3588 if the same as the size of the underlying object, we aren't doing an
3589 extraction at all and so can do nothing. We also don't want to
3590 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3591 then will no longer be able to replace it. */
3592 linner
= get_inner_reference (lhs
, &lbitsize
, &lbitpos
, &offset
, &lmode
,
3593 &lunsignedp
, &lvolatilep
, false);
3594 if (linner
== lhs
|| lbitsize
== GET_MODE_BITSIZE (lmode
) || lbitsize
< 0
3595 || offset
!= 0 || TREE_CODE (linner
) == PLACEHOLDER_EXPR
|| lvolatilep
)
3600 /* If this is not a constant, we can only do something if bit positions,
3601 sizes, and signedness are the same. */
3602 rinner
= get_inner_reference (rhs
, &rbitsize
, &rbitpos
, &offset
, &rmode
,
3603 &runsignedp
, &rvolatilep
, false);
3605 if (rinner
== rhs
|| lbitpos
!= rbitpos
|| lbitsize
!= rbitsize
3606 || lunsignedp
!= runsignedp
|| offset
!= 0
3607 || TREE_CODE (rinner
) == PLACEHOLDER_EXPR
|| rvolatilep
)
3611 /* See if we can find a mode to refer to this field. We should be able to,
3612 but fail if we can't. */
3613 nmode
= get_best_mode (lbitsize
, lbitpos
, 0, 0,
3614 const_p
? TYPE_ALIGN (TREE_TYPE (linner
))
3615 : MIN (TYPE_ALIGN (TREE_TYPE (linner
)),
3616 TYPE_ALIGN (TREE_TYPE (rinner
))),
3618 if (nmode
== VOIDmode
)
3621 /* Set signed and unsigned types of the precision of this mode for the
3623 unsigned_type
= lang_hooks
.types
.type_for_mode (nmode
, 1);
3625 /* Compute the bit position and size for the new reference and our offset
3626 within it. If the new reference is the same size as the original, we
3627 won't optimize anything, so return zero. */
3628 nbitsize
= GET_MODE_BITSIZE (nmode
);
3629 nbitpos
= lbitpos
& ~ (nbitsize
- 1);
3631 if (nbitsize
== lbitsize
)
3634 if (BYTES_BIG_ENDIAN
)
3635 lbitpos
= nbitsize
- lbitsize
- lbitpos
;
3637 /* Make the mask to be used against the extracted field. */
3638 mask
= build_int_cst_type (unsigned_type
, -1);
3639 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (nbitsize
- lbitsize
));
3640 mask
= const_binop (RSHIFT_EXPR
, mask
,
3641 size_int (nbitsize
- lbitsize
- lbitpos
));
3644 /* If not comparing with constant, just rework the comparison
3646 return fold_build2_loc (loc
, code
, compare_type
,
3647 fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3648 make_bit_field_ref (loc
, linner
,
3653 fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3654 make_bit_field_ref (loc
, rinner
,
3660 /* Otherwise, we are handling the constant case. See if the constant is too
3661 big for the field. Warn and return a tree of for 0 (false) if so. We do
3662 this not only for its own sake, but to avoid having to test for this
3663 error case below. If we didn't, we might generate wrong code.
3665 For unsigned fields, the constant shifted right by the field length should
3666 be all zero. For signed fields, the high-order bits should agree with
3671 if (wi::lrshift (rhs
, lbitsize
) != 0)
3673 warning (0, "comparison is always %d due to width of bit-field",
3675 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3680 wide_int tem
= wi::arshift (rhs
, lbitsize
- 1);
3681 if (tem
!= 0 && tem
!= -1)
3683 warning (0, "comparison is always %d due to width of bit-field",
3685 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3689 /* Single-bit compares should always be against zero. */
3690 if (lbitsize
== 1 && ! integer_zerop (rhs
))
3692 code
= code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
;
3693 rhs
= build_int_cst (type
, 0);
3696 /* Make a new bitfield reference, shift the constant over the
3697 appropriate number of bits and mask it with the computed mask
3698 (in case this was a signed field). If we changed it, make a new one. */
3699 lhs
= make_bit_field_ref (loc
, linner
, unsigned_type
, nbitsize
, nbitpos
, 1);
3701 rhs
= const_binop (BIT_AND_EXPR
,
3702 const_binop (LSHIFT_EXPR
,
3703 fold_convert_loc (loc
, unsigned_type
, rhs
),
3704 size_int (lbitpos
)),
3707 lhs
= build2_loc (loc
, code
, compare_type
,
3708 build2 (BIT_AND_EXPR
, unsigned_type
, lhs
, mask
), rhs
);
3712 /* Subroutine for fold_truth_andor_1: decode a field reference.
3714 If EXP is a comparison reference, we return the innermost reference.
3716 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3717 set to the starting bit number.
3719 If the innermost field can be completely contained in a mode-sized
3720 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3722 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3723 otherwise it is not changed.
3725 *PUNSIGNEDP is set to the signedness of the field.
3727 *PMASK is set to the mask used. This is either contained in a
3728 BIT_AND_EXPR or derived from the width of the field.
3730 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3732 Return 0 if this is not a component reference or is one that we can't
3733 do anything with. */
3736 decode_field_reference (location_t loc
, tree exp
, HOST_WIDE_INT
*pbitsize
,
3737 HOST_WIDE_INT
*pbitpos
, machine_mode
*pmode
,
3738 int *punsignedp
, int *pvolatilep
,
3739 tree
*pmask
, tree
*pand_mask
)
3741 tree outer_type
= 0;
3743 tree mask
, inner
, offset
;
3745 unsigned int precision
;
3747 /* All the optimizations using this function assume integer fields.
3748 There are problems with FP fields since the type_for_size call
3749 below can fail for, e.g., XFmode. */
3750 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp
)))
3753 /* We are interested in the bare arrangement of bits, so strip everything
3754 that doesn't affect the machine mode. However, record the type of the
3755 outermost expression if it may matter below. */
3756 if (CONVERT_EXPR_P (exp
)
3757 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
3758 outer_type
= TREE_TYPE (exp
);
3761 if (TREE_CODE (exp
) == BIT_AND_EXPR
)
3763 and_mask
= TREE_OPERAND (exp
, 1);
3764 exp
= TREE_OPERAND (exp
, 0);
3765 STRIP_NOPS (exp
); STRIP_NOPS (and_mask
);
3766 if (TREE_CODE (and_mask
) != INTEGER_CST
)
3770 inner
= get_inner_reference (exp
, pbitsize
, pbitpos
, &offset
, pmode
,
3771 punsignedp
, pvolatilep
, false);
3772 if ((inner
== exp
&& and_mask
== 0)
3773 || *pbitsize
< 0 || offset
!= 0
3774 || TREE_CODE (inner
) == PLACEHOLDER_EXPR
)
3777 /* If the number of bits in the reference is the same as the bitsize of
3778 the outer type, then the outer type gives the signedness. Otherwise
3779 (in case of a small bitfield) the signedness is unchanged. */
3780 if (outer_type
&& *pbitsize
== TYPE_PRECISION (outer_type
))
3781 *punsignedp
= TYPE_UNSIGNED (outer_type
);
3783 /* Compute the mask to access the bitfield. */
3784 unsigned_type
= lang_hooks
.types
.type_for_size (*pbitsize
, 1);
3785 precision
= TYPE_PRECISION (unsigned_type
);
3787 mask
= build_int_cst_type (unsigned_type
, -1);
3789 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
));
3790 mask
= const_binop (RSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
));
3792 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3794 mask
= fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3795 fold_convert_loc (loc
, unsigned_type
, and_mask
), mask
);
3798 *pand_mask
= and_mask
;
3802 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3803 bit positions and MASK is SIGNED. */
3806 all_ones_mask_p (const_tree mask
, unsigned int size
)
3808 tree type
= TREE_TYPE (mask
);
3809 unsigned int precision
= TYPE_PRECISION (type
);
3811 /* If this function returns true when the type of the mask is
3812 UNSIGNED, then there will be errors. In particular see
3813 gcc.c-torture/execute/990326-1.c. There does not appear to be
3814 any documentation paper trail as to why this is so. But the pre
3815 wide-int worked with that restriction and it has been preserved
3817 if (size
> precision
|| TYPE_SIGN (type
) == UNSIGNED
)
3820 return wi::mask (size
, false, precision
) == mask
;
3823 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3824 represents the sign bit of EXP's type. If EXP represents a sign
3825 or zero extension, also test VAL against the unextended type.
3826 The return value is the (sub)expression whose sign bit is VAL,
3827 or NULL_TREE otherwise. */
3830 sign_bit_p (tree exp
, const_tree val
)
3835 /* Tree EXP must have an integral type. */
3836 t
= TREE_TYPE (exp
);
3837 if (! INTEGRAL_TYPE_P (t
))
3840 /* Tree VAL must be an integer constant. */
3841 if (TREE_CODE (val
) != INTEGER_CST
3842 || TREE_OVERFLOW (val
))
3845 width
= TYPE_PRECISION (t
);
3846 if (wi::only_sign_bit_p (val
, width
))
3849 /* Handle extension from a narrower type. */
3850 if (TREE_CODE (exp
) == NOP_EXPR
3851 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))) < width
)
3852 return sign_bit_p (TREE_OPERAND (exp
, 0), val
);
3857 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3858 to be evaluated unconditionally. */
3861 simple_operand_p (const_tree exp
)
3863 /* Strip any conversions that don't change the machine mode. */
3866 return (CONSTANT_CLASS_P (exp
)
3867 || TREE_CODE (exp
) == SSA_NAME
3869 && ! TREE_ADDRESSABLE (exp
)
3870 && ! TREE_THIS_VOLATILE (exp
)
3871 && ! DECL_NONLOCAL (exp
)
3872 /* Don't regard global variables as simple. They may be
3873 allocated in ways unknown to the compiler (shared memory,
3874 #pragma weak, etc). */
3875 && ! TREE_PUBLIC (exp
)
3876 && ! DECL_EXTERNAL (exp
)
3877 /* Weakrefs are not safe to be read, since they can be NULL.
3878 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
3879 have DECL_WEAK flag set. */
3880 && (! VAR_OR_FUNCTION_DECL_P (exp
) || ! DECL_WEAK (exp
))
3881 /* Loading a static variable is unduly expensive, but global
3882 registers aren't expensive. */
3883 && (! TREE_STATIC (exp
) || DECL_REGISTER (exp
))));
3886 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3887 to be evaluated unconditionally.
3888 I addition to simple_operand_p, we assume that comparisons, conversions,
3889 and logic-not operations are simple, if their operands are simple, too. */
3892 simple_operand_p_2 (tree exp
)
3894 enum tree_code code
;
3896 if (TREE_SIDE_EFFECTS (exp
)
3897 || tree_could_trap_p (exp
))
3900 while (CONVERT_EXPR_P (exp
))
3901 exp
= TREE_OPERAND (exp
, 0);
3903 code
= TREE_CODE (exp
);
3905 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3906 return (simple_operand_p (TREE_OPERAND (exp
, 0))
3907 && simple_operand_p (TREE_OPERAND (exp
, 1)));
3909 if (code
== TRUTH_NOT_EXPR
)
3910 return simple_operand_p_2 (TREE_OPERAND (exp
, 0));
3912 return simple_operand_p (exp
);
3916 /* The following functions are subroutines to fold_range_test and allow it to
3917 try to change a logical combination of comparisons into a range test.
3920 X == 2 || X == 3 || X == 4 || X == 5
3924 (unsigned) (X - 2) <= 3
3926 We describe each set of comparisons as being either inside or outside
3927 a range, using a variable named like IN_P, and then describe the
3928 range with a lower and upper bound. If one of the bounds is omitted,
3929 it represents either the highest or lowest value of the type.
3931 In the comments below, we represent a range by two numbers in brackets
3932 preceded by a "+" to designate being inside that range, or a "-" to
3933 designate being outside that range, so the condition can be inverted by
3934 flipping the prefix. An omitted bound is represented by a "-". For
3935 example, "- [-, 10]" means being outside the range starting at the lowest
3936 possible value and ending at 10, in other words, being greater than 10.
3937 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3940 We set up things so that the missing bounds are handled in a consistent
3941 manner so neither a missing bound nor "true" and "false" need to be
3942 handled using a special case. */
3944 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3945 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3946 and UPPER1_P are nonzero if the respective argument is an upper bound
3947 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3948 must be specified for a comparison. ARG1 will be converted to ARG0's
3949 type if both are specified. */
3952 range_binop (enum tree_code code
, tree type
, tree arg0
, int upper0_p
,
3953 tree arg1
, int upper1_p
)
3959 /* If neither arg represents infinity, do the normal operation.
3960 Else, if not a comparison, return infinity. Else handle the special
3961 comparison rules. Note that most of the cases below won't occur, but
3962 are handled for consistency. */
3964 if (arg0
!= 0 && arg1
!= 0)
3966 tem
= fold_build2 (code
, type
!= 0 ? type
: TREE_TYPE (arg0
),
3967 arg0
, fold_convert (TREE_TYPE (arg0
), arg1
));
3969 return TREE_CODE (tem
) == INTEGER_CST
? tem
: 0;
3972 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
3975 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3976 for neither. In real maths, we cannot assume open ended ranges are
3977 the same. But, this is computer arithmetic, where numbers are finite.
3978 We can therefore make the transformation of any unbounded range with
3979 the value Z, Z being greater than any representable number. This permits
3980 us to treat unbounded ranges as equal. */
3981 sgn0
= arg0
!= 0 ? 0 : (upper0_p
? 1 : -1);
3982 sgn1
= arg1
!= 0 ? 0 : (upper1_p
? 1 : -1);
3986 result
= sgn0
== sgn1
;
3989 result
= sgn0
!= sgn1
;
3992 result
= sgn0
< sgn1
;
3995 result
= sgn0
<= sgn1
;
3998 result
= sgn0
> sgn1
;
4001 result
= sgn0
>= sgn1
;
4007 return constant_boolean_node (result
, type
);
4010 /* Helper routine for make_range. Perform one step for it, return
4011 new expression if the loop should continue or NULL_TREE if it should
4015 make_range_step (location_t loc
, enum tree_code code
, tree arg0
, tree arg1
,
4016 tree exp_type
, tree
*p_low
, tree
*p_high
, int *p_in_p
,
4017 bool *strict_overflow_p
)
4019 tree arg0_type
= TREE_TYPE (arg0
);
4020 tree n_low
, n_high
, low
= *p_low
, high
= *p_high
;
4021 int in_p
= *p_in_p
, n_in_p
;
4025 case TRUTH_NOT_EXPR
:
4026 /* We can only do something if the range is testing for zero. */
4027 if (low
== NULL_TREE
|| high
== NULL_TREE
4028 || ! integer_zerop (low
) || ! integer_zerop (high
))
4033 case EQ_EXPR
: case NE_EXPR
:
4034 case LT_EXPR
: case LE_EXPR
: case GE_EXPR
: case GT_EXPR
:
4035 /* We can only do something if the range is testing for zero
4036 and if the second operand is an integer constant. Note that
4037 saying something is "in" the range we make is done by
4038 complementing IN_P since it will set in the initial case of
4039 being not equal to zero; "out" is leaving it alone. */
4040 if (low
== NULL_TREE
|| high
== NULL_TREE
4041 || ! integer_zerop (low
) || ! integer_zerop (high
)
4042 || TREE_CODE (arg1
) != INTEGER_CST
)
4047 case NE_EXPR
: /* - [c, c] */
4050 case EQ_EXPR
: /* + [c, c] */
4051 in_p
= ! in_p
, low
= high
= arg1
;
4053 case GT_EXPR
: /* - [-, c] */
4054 low
= 0, high
= arg1
;
4056 case GE_EXPR
: /* + [c, -] */
4057 in_p
= ! in_p
, low
= arg1
, high
= 0;
4059 case LT_EXPR
: /* - [c, -] */
4060 low
= arg1
, high
= 0;
4062 case LE_EXPR
: /* + [-, c] */
4063 in_p
= ! in_p
, low
= 0, high
= arg1
;
4069 /* If this is an unsigned comparison, we also know that EXP is
4070 greater than or equal to zero. We base the range tests we make
4071 on that fact, so we record it here so we can parse existing
4072 range tests. We test arg0_type since often the return type
4073 of, e.g. EQ_EXPR, is boolean. */
4074 if (TYPE_UNSIGNED (arg0_type
) && (low
== 0 || high
== 0))
4076 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
4078 build_int_cst (arg0_type
, 0),
4082 in_p
= n_in_p
, low
= n_low
, high
= n_high
;
4084 /* If the high bound is missing, but we have a nonzero low
4085 bound, reverse the range so it goes from zero to the low bound
4087 if (high
== 0 && low
&& ! integer_zerop (low
))
4090 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low
, 0,
4091 build_int_cst (TREE_TYPE (low
), 1), 0);
4092 low
= build_int_cst (arg0_type
, 0);
4102 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4103 low and high are non-NULL, then normalize will DTRT. */
4104 if (!TYPE_UNSIGNED (arg0_type
)
4105 && !TYPE_OVERFLOW_UNDEFINED (arg0_type
))
4107 if (low
== NULL_TREE
)
4108 low
= TYPE_MIN_VALUE (arg0_type
);
4109 if (high
== NULL_TREE
)
4110 high
= TYPE_MAX_VALUE (arg0_type
);
4113 /* (-x) IN [a,b] -> x in [-b, -a] */
4114 n_low
= range_binop (MINUS_EXPR
, exp_type
,
4115 build_int_cst (exp_type
, 0),
4117 n_high
= range_binop (MINUS_EXPR
, exp_type
,
4118 build_int_cst (exp_type
, 0),
4120 if (n_high
!= 0 && TREE_OVERFLOW (n_high
))
4126 return build2_loc (loc
, MINUS_EXPR
, exp_type
, negate_expr (arg0
),
4127 build_int_cst (exp_type
, 1));
4131 if (TREE_CODE (arg1
) != INTEGER_CST
)
4134 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4135 move a constant to the other side. */
4136 if (!TYPE_UNSIGNED (arg0_type
)
4137 && !TYPE_OVERFLOW_UNDEFINED (arg0_type
))
4140 /* If EXP is signed, any overflow in the computation is undefined,
4141 so we don't worry about it so long as our computations on
4142 the bounds don't overflow. For unsigned, overflow is defined
4143 and this is exactly the right thing. */
4144 n_low
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
4145 arg0_type
, low
, 0, arg1
, 0);
4146 n_high
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
4147 arg0_type
, high
, 1, arg1
, 0);
4148 if ((n_low
!= 0 && TREE_OVERFLOW (n_low
))
4149 || (n_high
!= 0 && TREE_OVERFLOW (n_high
)))
4152 if (TYPE_OVERFLOW_UNDEFINED (arg0_type
))
4153 *strict_overflow_p
= true;
4156 /* Check for an unsigned range which has wrapped around the maximum
4157 value thus making n_high < n_low, and normalize it. */
4158 if (n_low
&& n_high
&& tree_int_cst_lt (n_high
, n_low
))
4160 low
= range_binop (PLUS_EXPR
, arg0_type
, n_high
, 0,
4161 build_int_cst (TREE_TYPE (n_high
), 1), 0);
4162 high
= range_binop (MINUS_EXPR
, arg0_type
, n_low
, 0,
4163 build_int_cst (TREE_TYPE (n_low
), 1), 0);
4165 /* If the range is of the form +/- [ x+1, x ], we won't
4166 be able to normalize it. But then, it represents the
4167 whole range or the empty set, so make it
4169 if (tree_int_cst_equal (n_low
, low
)
4170 && tree_int_cst_equal (n_high
, high
))
4176 low
= n_low
, high
= n_high
;
4184 case NON_LVALUE_EXPR
:
4185 if (TYPE_PRECISION (arg0_type
) > TYPE_PRECISION (exp_type
))
4188 if (! INTEGRAL_TYPE_P (arg0_type
)
4189 || (low
!= 0 && ! int_fits_type_p (low
, arg0_type
))
4190 || (high
!= 0 && ! int_fits_type_p (high
, arg0_type
)))
4193 n_low
= low
, n_high
= high
;
4196 n_low
= fold_convert_loc (loc
, arg0_type
, n_low
);
4199 n_high
= fold_convert_loc (loc
, arg0_type
, n_high
);
4201 /* If we're converting arg0 from an unsigned type, to exp,
4202 a signed type, we will be doing the comparison as unsigned.
4203 The tests above have already verified that LOW and HIGH
4206 So we have to ensure that we will handle large unsigned
4207 values the same way that the current signed bounds treat
4210 if (!TYPE_UNSIGNED (exp_type
) && TYPE_UNSIGNED (arg0_type
))
4214 /* For fixed-point modes, we need to pass the saturating flag
4215 as the 2nd parameter. */
4216 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type
)))
4218 = lang_hooks
.types
.type_for_mode (TYPE_MODE (arg0_type
),
4219 TYPE_SATURATING (arg0_type
));
4222 = lang_hooks
.types
.type_for_mode (TYPE_MODE (arg0_type
), 1);
4224 /* A range without an upper bound is, naturally, unbounded.
4225 Since convert would have cropped a very large value, use
4226 the max value for the destination type. */
4228 = TYPE_MAX_VALUE (equiv_type
) ? TYPE_MAX_VALUE (equiv_type
)
4229 : TYPE_MAX_VALUE (arg0_type
);
4231 if (TYPE_PRECISION (exp_type
) == TYPE_PRECISION (arg0_type
))
4232 high_positive
= fold_build2_loc (loc
, RSHIFT_EXPR
, arg0_type
,
4233 fold_convert_loc (loc
, arg0_type
,
4235 build_int_cst (arg0_type
, 1));
4237 /* If the low bound is specified, "and" the range with the
4238 range for which the original unsigned value will be
4242 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
, 1, n_low
, n_high
,
4243 1, fold_convert_loc (loc
, arg0_type
,
4248 in_p
= (n_in_p
== in_p
);
4252 /* Otherwise, "or" the range with the range of the input
4253 that will be interpreted as negative. */
4254 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
, 0, n_low
, n_high
,
4255 1, fold_convert_loc (loc
, arg0_type
,
4260 in_p
= (in_p
!= n_in_p
);
4274 /* Given EXP, a logical expression, set the range it is testing into
4275 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4276 actually being tested. *PLOW and *PHIGH will be made of the same
4277 type as the returned expression. If EXP is not a comparison, we
4278 will most likely not be returning a useful value and range. Set
4279 *STRICT_OVERFLOW_P to true if the return value is only valid
4280 because signed overflow is undefined; otherwise, do not change
4281 *STRICT_OVERFLOW_P. */
4284 make_range (tree exp
, int *pin_p
, tree
*plow
, tree
*phigh
,
4285 bool *strict_overflow_p
)
4287 enum tree_code code
;
4288 tree arg0
, arg1
= NULL_TREE
;
4289 tree exp_type
, nexp
;
4292 location_t loc
= EXPR_LOCATION (exp
);
4294 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4295 and see if we can refine the range. Some of the cases below may not
4296 happen, but it doesn't seem worth worrying about this. We "continue"
4297 the outer loop when we've changed something; otherwise we "break"
4298 the switch, which will "break" the while. */
4301 low
= high
= build_int_cst (TREE_TYPE (exp
), 0);
4305 code
= TREE_CODE (exp
);
4306 exp_type
= TREE_TYPE (exp
);
4309 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
)))
4311 if (TREE_OPERAND_LENGTH (exp
) > 0)
4312 arg0
= TREE_OPERAND (exp
, 0);
4313 if (TREE_CODE_CLASS (code
) == tcc_binary
4314 || TREE_CODE_CLASS (code
) == tcc_comparison
4315 || (TREE_CODE_CLASS (code
) == tcc_expression
4316 && TREE_OPERAND_LENGTH (exp
) > 1))
4317 arg1
= TREE_OPERAND (exp
, 1);
4319 if (arg0
== NULL_TREE
)
4322 nexp
= make_range_step (loc
, code
, arg0
, arg1
, exp_type
, &low
,
4323 &high
, &in_p
, strict_overflow_p
);
4324 if (nexp
== NULL_TREE
)
4329 /* If EXP is a constant, we can evaluate whether this is true or false. */
4330 if (TREE_CODE (exp
) == INTEGER_CST
)
4332 in_p
= in_p
== (integer_onep (range_binop (GE_EXPR
, integer_type_node
,
4334 && integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4340 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4344 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4345 type, TYPE, return an expression to test if EXP is in (or out of, depending
4346 on IN_P) the range. Return 0 if the test couldn't be created. */
4349 build_range_check (location_t loc
, tree type
, tree exp
, int in_p
,
4350 tree low
, tree high
)
4352 tree etype
= TREE_TYPE (exp
), value
;
4354 #ifdef HAVE_canonicalize_funcptr_for_compare
4355 /* Disable this optimization for function pointer expressions
4356 on targets that require function pointer canonicalization. */
4357 if (HAVE_canonicalize_funcptr_for_compare
4358 && TREE_CODE (etype
) == POINTER_TYPE
4359 && TREE_CODE (TREE_TYPE (etype
)) == FUNCTION_TYPE
)
4365 value
= build_range_check (loc
, type
, exp
, 1, low
, high
);
4367 return invert_truthvalue_loc (loc
, value
);
4372 if (low
== 0 && high
== 0)
4373 return omit_one_operand_loc (loc
, type
, build_int_cst (type
, 1), exp
);
4376 return fold_build2_loc (loc
, LE_EXPR
, type
, exp
,
4377 fold_convert_loc (loc
, etype
, high
));
4380 return fold_build2_loc (loc
, GE_EXPR
, type
, exp
,
4381 fold_convert_loc (loc
, etype
, low
));
4383 if (operand_equal_p (low
, high
, 0))
4384 return fold_build2_loc (loc
, EQ_EXPR
, type
, exp
,
4385 fold_convert_loc (loc
, etype
, low
));
4387 if (integer_zerop (low
))
4389 if (! TYPE_UNSIGNED (etype
))
4391 etype
= unsigned_type_for (etype
);
4392 high
= fold_convert_loc (loc
, etype
, high
);
4393 exp
= fold_convert_loc (loc
, etype
, exp
);
4395 return build_range_check (loc
, type
, exp
, 1, 0, high
);
4398 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4399 if (integer_onep (low
) && TREE_CODE (high
) == INTEGER_CST
)
4401 int prec
= TYPE_PRECISION (etype
);
4403 if (wi::mask (prec
- 1, false, prec
) == high
)
4405 if (TYPE_UNSIGNED (etype
))
4407 tree signed_etype
= signed_type_for (etype
);
4408 if (TYPE_PRECISION (signed_etype
) != TYPE_PRECISION (etype
))
4410 = build_nonstandard_integer_type (TYPE_PRECISION (etype
), 0);
4412 etype
= signed_etype
;
4413 exp
= fold_convert_loc (loc
, etype
, exp
);
4415 return fold_build2_loc (loc
, GT_EXPR
, type
, exp
,
4416 build_int_cst (etype
, 0));
4420 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4421 This requires wrap-around arithmetics for the type of the expression.
4422 First make sure that arithmetics in this type is valid, then make sure
4423 that it wraps around. */
4424 if (TREE_CODE (etype
) == ENUMERAL_TYPE
|| TREE_CODE (etype
) == BOOLEAN_TYPE
)
4425 etype
= lang_hooks
.types
.type_for_size (TYPE_PRECISION (etype
),
4426 TYPE_UNSIGNED (etype
));
4428 if (TREE_CODE (etype
) == INTEGER_TYPE
&& !TYPE_OVERFLOW_WRAPS (etype
))
4430 tree utype
, minv
, maxv
;
4432 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4433 for the type in question, as we rely on this here. */
4434 utype
= unsigned_type_for (etype
);
4435 maxv
= fold_convert_loc (loc
, utype
, TYPE_MAX_VALUE (etype
));
4436 maxv
= range_binop (PLUS_EXPR
, NULL_TREE
, maxv
, 1,
4437 build_int_cst (TREE_TYPE (maxv
), 1), 1);
4438 minv
= fold_convert_loc (loc
, utype
, TYPE_MIN_VALUE (etype
));
4440 if (integer_zerop (range_binop (NE_EXPR
, integer_type_node
,
4447 high
= fold_convert_loc (loc
, etype
, high
);
4448 low
= fold_convert_loc (loc
, etype
, low
);
4449 exp
= fold_convert_loc (loc
, etype
, exp
);
4451 value
= const_binop (MINUS_EXPR
, high
, low
);
4454 if (POINTER_TYPE_P (etype
))
4456 if (value
!= 0 && !TREE_OVERFLOW (value
))
4458 low
= fold_build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (low
), low
);
4459 return build_range_check (loc
, type
,
4460 fold_build_pointer_plus_loc (loc
, exp
, low
),
4461 1, build_int_cst (etype
, 0), value
);
4466 if (value
!= 0 && !TREE_OVERFLOW (value
))
4467 return build_range_check (loc
, type
,
4468 fold_build2_loc (loc
, MINUS_EXPR
, etype
, exp
, low
),
4469 1, build_int_cst (etype
, 0), value
);
4474 /* Return the predecessor of VAL in its type, handling the infinite case. */
4477 range_predecessor (tree val
)
4479 tree type
= TREE_TYPE (val
);
4481 if (INTEGRAL_TYPE_P (type
)
4482 && operand_equal_p (val
, TYPE_MIN_VALUE (type
), 0))
4485 return range_binop (MINUS_EXPR
, NULL_TREE
, val
, 0,
4486 build_int_cst (TREE_TYPE (val
), 1), 0);
4489 /* Return the successor of VAL in its type, handling the infinite case. */
4492 range_successor (tree val
)
4494 tree type
= TREE_TYPE (val
);
4496 if (INTEGRAL_TYPE_P (type
)
4497 && operand_equal_p (val
, TYPE_MAX_VALUE (type
), 0))
4500 return range_binop (PLUS_EXPR
, NULL_TREE
, val
, 0,
4501 build_int_cst (TREE_TYPE (val
), 1), 0);
4504 /* Given two ranges, see if we can merge them into one. Return 1 if we
4505 can, 0 if we can't. Set the output range into the specified parameters. */
4508 merge_ranges (int *pin_p
, tree
*plow
, tree
*phigh
, int in0_p
, tree low0
,
4509 tree high0
, int in1_p
, tree low1
, tree high1
)
4517 int lowequal
= ((low0
== 0 && low1
== 0)
4518 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4519 low0
, 0, low1
, 0)));
4520 int highequal
= ((high0
== 0 && high1
== 0)
4521 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4522 high0
, 1, high1
, 1)));
4524 /* Make range 0 be the range that starts first, or ends last if they
4525 start at the same value. Swap them if it isn't. */
4526 if (integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4529 && integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4530 high1
, 1, high0
, 1))))
4532 temp
= in0_p
, in0_p
= in1_p
, in1_p
= temp
;
4533 tem
= low0
, low0
= low1
, low1
= tem
;
4534 tem
= high0
, high0
= high1
, high1
= tem
;
4537 /* Now flag two cases, whether the ranges are disjoint or whether the
4538 second range is totally subsumed in the first. Note that the tests
4539 below are simplified by the ones above. */
4540 no_overlap
= integer_onep (range_binop (LT_EXPR
, integer_type_node
,
4541 high0
, 1, low1
, 0));
4542 subset
= integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4543 high1
, 1, high0
, 1));
4545 /* We now have four cases, depending on whether we are including or
4546 excluding the two ranges. */
4549 /* If they don't overlap, the result is false. If the second range
4550 is a subset it is the result. Otherwise, the range is from the start
4551 of the second to the end of the first. */
4553 in_p
= 0, low
= high
= 0;
4555 in_p
= 1, low
= low1
, high
= high1
;
4557 in_p
= 1, low
= low1
, high
= high0
;
4560 else if (in0_p
&& ! in1_p
)
4562 /* If they don't overlap, the result is the first range. If they are
4563 equal, the result is false. If the second range is a subset of the
4564 first, and the ranges begin at the same place, we go from just after
4565 the end of the second range to the end of the first. If the second
4566 range is not a subset of the first, or if it is a subset and both
4567 ranges end at the same place, the range starts at the start of the
4568 first range and ends just before the second range.
4569 Otherwise, we can't describe this as a single range. */
4571 in_p
= 1, low
= low0
, high
= high0
;
4572 else if (lowequal
&& highequal
)
4573 in_p
= 0, low
= high
= 0;
4574 else if (subset
&& lowequal
)
4576 low
= range_successor (high1
);
4581 /* We are in the weird situation where high0 > high1 but
4582 high1 has no successor. Punt. */
4586 else if (! subset
|| highequal
)
4589 high
= range_predecessor (low1
);
4593 /* low0 < low1 but low1 has no predecessor. Punt. */
4601 else if (! in0_p
&& in1_p
)
4603 /* If they don't overlap, the result is the second range. If the second
4604 is a subset of the first, the result is false. Otherwise,
4605 the range starts just after the first range and ends at the
4606 end of the second. */
4608 in_p
= 1, low
= low1
, high
= high1
;
4609 else if (subset
|| highequal
)
4610 in_p
= 0, low
= high
= 0;
4613 low
= range_successor (high0
);
4618 /* high1 > high0 but high0 has no successor. Punt. */
4626 /* The case where we are excluding both ranges. Here the complex case
4627 is if they don't overlap. In that case, the only time we have a
4628 range is if they are adjacent. If the second is a subset of the
4629 first, the result is the first. Otherwise, the range to exclude
4630 starts at the beginning of the first range and ends at the end of the
4634 if (integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4635 range_successor (high0
),
4637 in_p
= 0, low
= low0
, high
= high1
;
4640 /* Canonicalize - [min, x] into - [-, x]. */
4641 if (low0
&& TREE_CODE (low0
) == INTEGER_CST
)
4642 switch (TREE_CODE (TREE_TYPE (low0
)))
4645 if (TYPE_PRECISION (TREE_TYPE (low0
))
4646 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0
))))
4650 if (tree_int_cst_equal (low0
,
4651 TYPE_MIN_VALUE (TREE_TYPE (low0
))))
4655 if (TYPE_UNSIGNED (TREE_TYPE (low0
))
4656 && integer_zerop (low0
))
4663 /* Canonicalize - [x, max] into - [x, -]. */
4664 if (high1
&& TREE_CODE (high1
) == INTEGER_CST
)
4665 switch (TREE_CODE (TREE_TYPE (high1
)))
4668 if (TYPE_PRECISION (TREE_TYPE (high1
))
4669 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1
))))
4673 if (tree_int_cst_equal (high1
,
4674 TYPE_MAX_VALUE (TREE_TYPE (high1
))))
4678 if (TYPE_UNSIGNED (TREE_TYPE (high1
))
4679 && integer_zerop (range_binop (PLUS_EXPR
, NULL_TREE
,
4681 build_int_cst (TREE_TYPE (high1
), 1),
4689 /* The ranges might be also adjacent between the maximum and
4690 minimum values of the given type. For
4691 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4692 return + [x + 1, y - 1]. */
4693 if (low0
== 0 && high1
== 0)
4695 low
= range_successor (high0
);
4696 high
= range_predecessor (low1
);
4697 if (low
== 0 || high
== 0)
4707 in_p
= 0, low
= low0
, high
= high0
;
4709 in_p
= 0, low
= low0
, high
= high1
;
4712 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4717 /* Subroutine of fold, looking inside expressions of the form
4718 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4719 of the COND_EXPR. This function is being used also to optimize
4720 A op B ? C : A, by reversing the comparison first.
4722 Return a folded expression whose code is not a COND_EXPR
4723 anymore, or NULL_TREE if no folding opportunity is found. */
4726 fold_cond_expr_with_comparison (location_t loc
, tree type
,
4727 tree arg0
, tree arg1
, tree arg2
)
4729 enum tree_code comp_code
= TREE_CODE (arg0
);
4730 tree arg00
= TREE_OPERAND (arg0
, 0);
4731 tree arg01
= TREE_OPERAND (arg0
, 1);
4732 tree arg1_type
= TREE_TYPE (arg1
);
4738 /* If we have A op 0 ? A : -A, consider applying the following
4741 A == 0? A : -A same as -A
4742 A != 0? A : -A same as A
4743 A >= 0? A : -A same as abs (A)
4744 A > 0? A : -A same as abs (A)
4745 A <= 0? A : -A same as -abs (A)
4746 A < 0? A : -A same as -abs (A)
4748 None of these transformations work for modes with signed
4749 zeros. If A is +/-0, the first two transformations will
4750 change the sign of the result (from +0 to -0, or vice
4751 versa). The last four will fix the sign of the result,
4752 even though the original expressions could be positive or
4753 negative, depending on the sign of A.
4755 Note that all these transformations are correct if A is
4756 NaN, since the two alternatives (A and -A) are also NaNs. */
4757 if (!HONOR_SIGNED_ZEROS (element_mode (type
))
4758 && (FLOAT_TYPE_P (TREE_TYPE (arg01
))
4759 ? real_zerop (arg01
)
4760 : integer_zerop (arg01
))
4761 && ((TREE_CODE (arg2
) == NEGATE_EXPR
4762 && operand_equal_p (TREE_OPERAND (arg2
, 0), arg1
, 0))
4763 /* In the case that A is of the form X-Y, '-A' (arg2) may
4764 have already been folded to Y-X, check for that. */
4765 || (TREE_CODE (arg1
) == MINUS_EXPR
4766 && TREE_CODE (arg2
) == MINUS_EXPR
4767 && operand_equal_p (TREE_OPERAND (arg1
, 0),
4768 TREE_OPERAND (arg2
, 1), 0)
4769 && operand_equal_p (TREE_OPERAND (arg1
, 1),
4770 TREE_OPERAND (arg2
, 0), 0))))
4775 tem
= fold_convert_loc (loc
, arg1_type
, arg1
);
4776 return pedantic_non_lvalue_loc (loc
,
4777 fold_convert_loc (loc
, type
,
4778 negate_expr (tem
)));
4781 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4784 if (flag_trapping_math
)
4789 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4790 arg1
= fold_convert_loc (loc
, signed_type_for
4791 (TREE_TYPE (arg1
)), arg1
);
4792 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4793 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
4796 if (flag_trapping_math
)
4800 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4801 arg1
= fold_convert_loc (loc
, signed_type_for
4802 (TREE_TYPE (arg1
)), arg1
);
4803 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4804 return negate_expr (fold_convert_loc (loc
, type
, tem
));
4806 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
4810 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4811 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4812 both transformations are correct when A is NaN: A != 0
4813 is then true, and A == 0 is false. */
4815 if (!HONOR_SIGNED_ZEROS (element_mode (type
))
4816 && integer_zerop (arg01
) && integer_zerop (arg2
))
4818 if (comp_code
== NE_EXPR
)
4819 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4820 else if (comp_code
== EQ_EXPR
)
4821 return build_zero_cst (type
);
4824 /* Try some transformations of A op B ? A : B.
4826 A == B? A : B same as B
4827 A != B? A : B same as A
4828 A >= B? A : B same as max (A, B)
4829 A > B? A : B same as max (B, A)
4830 A <= B? A : B same as min (A, B)
4831 A < B? A : B same as min (B, A)
4833 As above, these transformations don't work in the presence
4834 of signed zeros. For example, if A and B are zeros of
4835 opposite sign, the first two transformations will change
4836 the sign of the result. In the last four, the original
4837 expressions give different results for (A=+0, B=-0) and
4838 (A=-0, B=+0), but the transformed expressions do not.
4840 The first two transformations are correct if either A or B
4841 is a NaN. In the first transformation, the condition will
4842 be false, and B will indeed be chosen. In the case of the
4843 second transformation, the condition A != B will be true,
4844 and A will be chosen.
4846 The conversions to max() and min() are not correct if B is
4847 a number and A is not. The conditions in the original
4848 expressions will be false, so all four give B. The min()
4849 and max() versions would give a NaN instead. */
4850 if (!HONOR_SIGNED_ZEROS (element_mode (type
))
4851 && operand_equal_for_comparison_p (arg01
, arg2
, arg00
)
4852 /* Avoid these transformations if the COND_EXPR may be used
4853 as an lvalue in the C++ front-end. PR c++/19199. */
4855 || VECTOR_TYPE_P (type
)
4856 || (! lang_GNU_CXX ()
4857 && strcmp (lang_hooks
.name
, "GNU Objective-C++") != 0)
4858 || ! maybe_lvalue_p (arg1
)
4859 || ! maybe_lvalue_p (arg2
)))
4861 tree comp_op0
= arg00
;
4862 tree comp_op1
= arg01
;
4863 tree comp_type
= TREE_TYPE (comp_op0
);
4865 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4866 if (TYPE_MAIN_VARIANT (comp_type
) == TYPE_MAIN_VARIANT (type
))
4876 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg2
));
4878 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4883 /* In C++ a ?: expression can be an lvalue, so put the
4884 operand which will be used if they are equal first
4885 so that we can convert this back to the
4886 corresponding COND_EXPR. */
4887 if (!HONOR_NANS (element_mode (arg1
)))
4889 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
4890 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
4891 tem
= (comp_code
== LE_EXPR
|| comp_code
== UNLE_EXPR
)
4892 ? fold_build2_loc (loc
, MIN_EXPR
, comp_type
, comp_op0
, comp_op1
)
4893 : fold_build2_loc (loc
, MIN_EXPR
, comp_type
,
4894 comp_op1
, comp_op0
);
4895 return pedantic_non_lvalue_loc (loc
,
4896 fold_convert_loc (loc
, type
, tem
));
4903 if (!HONOR_NANS (element_mode (arg1
)))
4905 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
4906 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
4907 tem
= (comp_code
== GE_EXPR
|| comp_code
== UNGE_EXPR
)
4908 ? fold_build2_loc (loc
, MAX_EXPR
, comp_type
, comp_op0
, comp_op1
)
4909 : fold_build2_loc (loc
, MAX_EXPR
, comp_type
,
4910 comp_op1
, comp_op0
);
4911 return pedantic_non_lvalue_loc (loc
,
4912 fold_convert_loc (loc
, type
, tem
));
4916 if (!HONOR_NANS (element_mode (arg1
)))
4917 return pedantic_non_lvalue_loc (loc
,
4918 fold_convert_loc (loc
, type
, arg2
));
4921 if (!HONOR_NANS (element_mode (arg1
)))
4922 return pedantic_non_lvalue_loc (loc
,
4923 fold_convert_loc (loc
, type
, arg1
));
4926 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
4931 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4932 we might still be able to simplify this. For example,
4933 if C1 is one less or one more than C2, this might have started
4934 out as a MIN or MAX and been transformed by this function.
4935 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4937 if (INTEGRAL_TYPE_P (type
)
4938 && TREE_CODE (arg01
) == INTEGER_CST
4939 && TREE_CODE (arg2
) == INTEGER_CST
)
4943 if (TREE_CODE (arg1
) == INTEGER_CST
)
4945 /* We can replace A with C1 in this case. */
4946 arg1
= fold_convert_loc (loc
, type
, arg01
);
4947 return fold_build3_loc (loc
, COND_EXPR
, type
, arg0
, arg1
, arg2
);
4950 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4951 MIN_EXPR, to preserve the signedness of the comparison. */
4952 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
4954 && operand_equal_p (arg01
,
4955 const_binop (PLUS_EXPR
, arg2
,
4956 build_int_cst (type
, 1)),
4959 tem
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (arg00
), arg00
,
4960 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4962 return pedantic_non_lvalue_loc (loc
,
4963 fold_convert_loc (loc
, type
, tem
));
4968 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4970 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
4972 && operand_equal_p (arg01
,
4973 const_binop (MINUS_EXPR
, arg2
,
4974 build_int_cst (type
, 1)),
4977 tem
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (arg00
), arg00
,
4978 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4980 return pedantic_non_lvalue_loc (loc
,
4981 fold_convert_loc (loc
, type
, tem
));
4986 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4987 MAX_EXPR, to preserve the signedness of the comparison. */
4988 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
4990 && operand_equal_p (arg01
,
4991 const_binop (MINUS_EXPR
, arg2
,
4992 build_int_cst (type
, 1)),
4995 tem
= fold_build2_loc (loc
, MAX_EXPR
, TREE_TYPE (arg00
), arg00
,
4996 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4998 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
5003 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
5004 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
5006 && operand_equal_p (arg01
,
5007 const_binop (PLUS_EXPR
, arg2
,
5008 build_int_cst (type
, 1)),
5011 tem
= fold_build2_loc (loc
, MAX_EXPR
, TREE_TYPE (arg00
), arg00
,
5012 fold_convert_loc (loc
, TREE_TYPE (arg00
),
5014 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
5028 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5029 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5030 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5034 /* EXP is some logical combination of boolean tests. See if we can
5035 merge it into some range test. Return the new tree if so. */
5038 fold_range_test (location_t loc
, enum tree_code code
, tree type
,
5041 int or_op
= (code
== TRUTH_ORIF_EXPR
5042 || code
== TRUTH_OR_EXPR
);
5043 int in0_p
, in1_p
, in_p
;
5044 tree low0
, low1
, low
, high0
, high1
, high
;
5045 bool strict_overflow_p
= false;
5047 const char * const warnmsg
= G_("assuming signed overflow does not occur "
5048 "when simplifying range test");
5050 if (!INTEGRAL_TYPE_P (type
))
5053 lhs
= make_range (op0
, &in0_p
, &low0
, &high0
, &strict_overflow_p
);
5054 rhs
= make_range (op1
, &in1_p
, &low1
, &high1
, &strict_overflow_p
);
5056 /* If this is an OR operation, invert both sides; we will invert
5057 again at the end. */
5059 in0_p
= ! in0_p
, in1_p
= ! in1_p
;
5061 /* If both expressions are the same, if we can merge the ranges, and we
5062 can build the range test, return it or it inverted. If one of the
5063 ranges is always true or always false, consider it to be the same
5064 expression as the other. */
5065 if ((lhs
== 0 || rhs
== 0 || operand_equal_p (lhs
, rhs
, 0))
5066 && merge_ranges (&in_p
, &low
, &high
, in0_p
, low0
, high0
,
5068 && 0 != (tem
= (build_range_check (loc
, type
,
5070 : rhs
!= 0 ? rhs
: integer_zero_node
,
5073 if (strict_overflow_p
)
5074 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
5075 return or_op
? invert_truthvalue_loc (loc
, tem
) : tem
;
5078 /* On machines where the branch cost is expensive, if this is a
5079 short-circuited branch and the underlying object on both sides
5080 is the same, make a non-short-circuit operation. */
5081 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5082 && lhs
!= 0 && rhs
!= 0
5083 && (code
== TRUTH_ANDIF_EXPR
5084 || code
== TRUTH_ORIF_EXPR
)
5085 && operand_equal_p (lhs
, rhs
, 0))
5087 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5088 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5089 which cases we can't do this. */
5090 if (simple_operand_p (lhs
))
5091 return build2_loc (loc
, code
== TRUTH_ANDIF_EXPR
5092 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
5095 else if (!lang_hooks
.decls
.global_bindings_p ()
5096 && !CONTAINS_PLACEHOLDER_P (lhs
))
5098 tree common
= save_expr (lhs
);
5100 if (0 != (lhs
= build_range_check (loc
, type
, common
,
5101 or_op
? ! in0_p
: in0_p
,
5103 && (0 != (rhs
= build_range_check (loc
, type
, common
,
5104 or_op
? ! in1_p
: in1_p
,
5107 if (strict_overflow_p
)
5108 fold_overflow_warning (warnmsg
,
5109 WARN_STRICT_OVERFLOW_COMPARISON
);
5110 return build2_loc (loc
, code
== TRUTH_ANDIF_EXPR
5111 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
5120 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5121 bit value. Arrange things so the extra bits will be set to zero if and
5122 only if C is signed-extended to its full width. If MASK is nonzero,
5123 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5126 unextend (tree c
, int p
, int unsignedp
, tree mask
)
5128 tree type
= TREE_TYPE (c
);
5129 int modesize
= GET_MODE_BITSIZE (TYPE_MODE (type
));
5132 if (p
== modesize
|| unsignedp
)
5135 /* We work by getting just the sign bit into the low-order bit, then
5136 into the high-order bit, then sign-extend. We then XOR that value
5138 temp
= build_int_cst (TREE_TYPE (c
), wi::extract_uhwi (c
, p
- 1, 1));
5140 /* We must use a signed type in order to get an arithmetic right shift.
5141 However, we must also avoid introducing accidental overflows, so that
5142 a subsequent call to integer_zerop will work. Hence we must
5143 do the type conversion here. At this point, the constant is either
5144 zero or one, and the conversion to a signed type can never overflow.
5145 We could get an overflow if this conversion is done anywhere else. */
5146 if (TYPE_UNSIGNED (type
))
5147 temp
= fold_convert (signed_type_for (type
), temp
);
5149 temp
= const_binop (LSHIFT_EXPR
, temp
, size_int (modesize
- 1));
5150 temp
= const_binop (RSHIFT_EXPR
, temp
, size_int (modesize
- p
- 1));
5152 temp
= const_binop (BIT_AND_EXPR
, temp
,
5153 fold_convert (TREE_TYPE (c
), mask
));
5154 /* If necessary, convert the type back to match the type of C. */
5155 if (TYPE_UNSIGNED (type
))
5156 temp
= fold_convert (type
, temp
);
5158 return fold_convert (type
, const_binop (BIT_XOR_EXPR
, c
, temp
));
5161 /* For an expression that has the form
5165 we can drop one of the inner expressions and simplify to
5169 LOC is the location of the resulting expression. OP is the inner
5170 logical operation; the left-hand side in the examples above, while CMPOP
5171 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5172 removing a condition that guards another, as in
5173 (A != NULL && A->...) || A == NULL
5174 which we must not transform. If RHS_ONLY is true, only eliminate the
5175 right-most operand of the inner logical operation. */
5178 merge_truthop_with_opposite_arm (location_t loc
, tree op
, tree cmpop
,
5181 tree type
= TREE_TYPE (cmpop
);
5182 enum tree_code code
= TREE_CODE (cmpop
);
5183 enum tree_code truthop_code
= TREE_CODE (op
);
5184 tree lhs
= TREE_OPERAND (op
, 0);
5185 tree rhs
= TREE_OPERAND (op
, 1);
5186 tree orig_lhs
= lhs
, orig_rhs
= rhs
;
5187 enum tree_code rhs_code
= TREE_CODE (rhs
);
5188 enum tree_code lhs_code
= TREE_CODE (lhs
);
5189 enum tree_code inv_code
;
5191 if (TREE_SIDE_EFFECTS (op
) || TREE_SIDE_EFFECTS (cmpop
))
5194 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
5197 if (rhs_code
== truthop_code
)
5199 tree newrhs
= merge_truthop_with_opposite_arm (loc
, rhs
, cmpop
, rhs_only
);
5200 if (newrhs
!= NULL_TREE
)
5203 rhs_code
= TREE_CODE (rhs
);
5206 if (lhs_code
== truthop_code
&& !rhs_only
)
5208 tree newlhs
= merge_truthop_with_opposite_arm (loc
, lhs
, cmpop
, false);
5209 if (newlhs
!= NULL_TREE
)
5212 lhs_code
= TREE_CODE (lhs
);
5216 inv_code
= invert_tree_comparison (code
, HONOR_NANS (TYPE_MODE (type
)));
5217 if (inv_code
== rhs_code
5218 && operand_equal_p (TREE_OPERAND (rhs
, 0), TREE_OPERAND (cmpop
, 0), 0)
5219 && operand_equal_p (TREE_OPERAND (rhs
, 1), TREE_OPERAND (cmpop
, 1), 0))
5221 if (!rhs_only
&& inv_code
== lhs_code
5222 && operand_equal_p (TREE_OPERAND (lhs
, 0), TREE_OPERAND (cmpop
, 0), 0)
5223 && operand_equal_p (TREE_OPERAND (lhs
, 1), TREE_OPERAND (cmpop
, 1), 0))
5225 if (rhs
!= orig_rhs
|| lhs
!= orig_lhs
)
5226 return fold_build2_loc (loc
, truthop_code
, TREE_TYPE (cmpop
),
5231 /* Find ways of folding logical expressions of LHS and RHS:
5232 Try to merge two comparisons to the same innermost item.
5233 Look for range tests like "ch >= '0' && ch <= '9'".
5234 Look for combinations of simple terms on machines with expensive branches
5235 and evaluate the RHS unconditionally.
5237 For example, if we have p->a == 2 && p->b == 4 and we can make an
5238 object large enough to span both A and B, we can do this with a comparison
5239 against the object ANDed with the a mask.
5241 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5242 operations to do this with one comparison.
5244 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5245 function and the one above.
5247 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5248 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5250 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5253 We return the simplified tree or 0 if no optimization is possible. */
5256 fold_truth_andor_1 (location_t loc
, enum tree_code code
, tree truth_type
,
5259 /* If this is the "or" of two comparisons, we can do something if
5260 the comparisons are NE_EXPR. If this is the "and", we can do something
5261 if the comparisons are EQ_EXPR. I.e.,
5262 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5264 WANTED_CODE is this operation code. For single bit fields, we can
5265 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5266 comparison for one-bit fields. */
5268 enum tree_code wanted_code
;
5269 enum tree_code lcode
, rcode
;
5270 tree ll_arg
, lr_arg
, rl_arg
, rr_arg
;
5271 tree ll_inner
, lr_inner
, rl_inner
, rr_inner
;
5272 HOST_WIDE_INT ll_bitsize
, ll_bitpos
, lr_bitsize
, lr_bitpos
;
5273 HOST_WIDE_INT rl_bitsize
, rl_bitpos
, rr_bitsize
, rr_bitpos
;
5274 HOST_WIDE_INT xll_bitpos
, xlr_bitpos
, xrl_bitpos
, xrr_bitpos
;
5275 HOST_WIDE_INT lnbitsize
, lnbitpos
, rnbitsize
, rnbitpos
;
5276 int ll_unsignedp
, lr_unsignedp
, rl_unsignedp
, rr_unsignedp
;
5277 machine_mode ll_mode
, lr_mode
, rl_mode
, rr_mode
;
5278 machine_mode lnmode
, rnmode
;
5279 tree ll_mask
, lr_mask
, rl_mask
, rr_mask
;
5280 tree ll_and_mask
, lr_and_mask
, rl_and_mask
, rr_and_mask
;
5281 tree l_const
, r_const
;
5282 tree lntype
, rntype
, result
;
5283 HOST_WIDE_INT first_bit
, end_bit
;
5286 /* Start by getting the comparison codes. Fail if anything is volatile.
5287 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5288 it were surrounded with a NE_EXPR. */
5290 if (TREE_SIDE_EFFECTS (lhs
) || TREE_SIDE_EFFECTS (rhs
))
5293 lcode
= TREE_CODE (lhs
);
5294 rcode
= TREE_CODE (rhs
);
5296 if (lcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (lhs
, 1)))
5298 lhs
= build2 (NE_EXPR
, truth_type
, lhs
,
5299 build_int_cst (TREE_TYPE (lhs
), 0));
5303 if (rcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (rhs
, 1)))
5305 rhs
= build2 (NE_EXPR
, truth_type
, rhs
,
5306 build_int_cst (TREE_TYPE (rhs
), 0));
5310 if (TREE_CODE_CLASS (lcode
) != tcc_comparison
5311 || TREE_CODE_CLASS (rcode
) != tcc_comparison
)
5314 ll_arg
= TREE_OPERAND (lhs
, 0);
5315 lr_arg
= TREE_OPERAND (lhs
, 1);
5316 rl_arg
= TREE_OPERAND (rhs
, 0);
5317 rr_arg
= TREE_OPERAND (rhs
, 1);
5319 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5320 if (simple_operand_p (ll_arg
)
5321 && simple_operand_p (lr_arg
))
5323 if (operand_equal_p (ll_arg
, rl_arg
, 0)
5324 && operand_equal_p (lr_arg
, rr_arg
, 0))
5326 result
= combine_comparisons (loc
, code
, lcode
, rcode
,
5327 truth_type
, ll_arg
, lr_arg
);
5331 else if (operand_equal_p (ll_arg
, rr_arg
, 0)
5332 && operand_equal_p (lr_arg
, rl_arg
, 0))
5334 result
= combine_comparisons (loc
, code
, lcode
,
5335 swap_tree_comparison (rcode
),
5336 truth_type
, ll_arg
, lr_arg
);
5342 code
= ((code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
)
5343 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
);
5345 /* If the RHS can be evaluated unconditionally and its operands are
5346 simple, it wins to evaluate the RHS unconditionally on machines
5347 with expensive branches. In this case, this isn't a comparison
5348 that can be merged. */
5350 if (BRANCH_COST (optimize_function_for_speed_p (cfun
),
5352 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg
))
5353 && simple_operand_p (rl_arg
)
5354 && simple_operand_p (rr_arg
))
5356 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5357 if (code
== TRUTH_OR_EXPR
5358 && lcode
== NE_EXPR
&& integer_zerop (lr_arg
)
5359 && rcode
== NE_EXPR
&& integer_zerop (rr_arg
)
5360 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
5361 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
5362 return build2_loc (loc
, NE_EXPR
, truth_type
,
5363 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5365 build_int_cst (TREE_TYPE (ll_arg
), 0));
5367 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5368 if (code
== TRUTH_AND_EXPR
5369 && lcode
== EQ_EXPR
&& integer_zerop (lr_arg
)
5370 && rcode
== EQ_EXPR
&& integer_zerop (rr_arg
)
5371 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
5372 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
5373 return build2_loc (loc
, EQ_EXPR
, truth_type
,
5374 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5376 build_int_cst (TREE_TYPE (ll_arg
), 0));
5379 /* See if the comparisons can be merged. Then get all the parameters for
5382 if ((lcode
!= EQ_EXPR
&& lcode
!= NE_EXPR
)
5383 || (rcode
!= EQ_EXPR
&& rcode
!= NE_EXPR
))
5387 ll_inner
= decode_field_reference (loc
, ll_arg
,
5388 &ll_bitsize
, &ll_bitpos
, &ll_mode
,
5389 &ll_unsignedp
, &volatilep
, &ll_mask
,
5391 lr_inner
= decode_field_reference (loc
, lr_arg
,
5392 &lr_bitsize
, &lr_bitpos
, &lr_mode
,
5393 &lr_unsignedp
, &volatilep
, &lr_mask
,
5395 rl_inner
= decode_field_reference (loc
, rl_arg
,
5396 &rl_bitsize
, &rl_bitpos
, &rl_mode
,
5397 &rl_unsignedp
, &volatilep
, &rl_mask
,
5399 rr_inner
= decode_field_reference (loc
, rr_arg
,
5400 &rr_bitsize
, &rr_bitpos
, &rr_mode
,
5401 &rr_unsignedp
, &volatilep
, &rr_mask
,
5404 /* It must be true that the inner operation on the lhs of each
5405 comparison must be the same if we are to be able to do anything.
5406 Then see if we have constants. If not, the same must be true for
5408 if (volatilep
|| ll_inner
== 0 || rl_inner
== 0
5409 || ! operand_equal_p (ll_inner
, rl_inner
, 0))
5412 if (TREE_CODE (lr_arg
) == INTEGER_CST
5413 && TREE_CODE (rr_arg
) == INTEGER_CST
)
5414 l_const
= lr_arg
, r_const
= rr_arg
;
5415 else if (lr_inner
== 0 || rr_inner
== 0
5416 || ! operand_equal_p (lr_inner
, rr_inner
, 0))
5419 l_const
= r_const
= 0;
5421 /* If either comparison code is not correct for our logical operation,
5422 fail. However, we can convert a one-bit comparison against zero into
5423 the opposite comparison against that bit being set in the field. */
5425 wanted_code
= (code
== TRUTH_AND_EXPR
? EQ_EXPR
: NE_EXPR
);
5426 if (lcode
!= wanted_code
)
5428 if (l_const
&& integer_zerop (l_const
) && integer_pow2p (ll_mask
))
5430 /* Make the left operand unsigned, since we are only interested
5431 in the value of one bit. Otherwise we are doing the wrong
5440 /* This is analogous to the code for l_const above. */
5441 if (rcode
!= wanted_code
)
5443 if (r_const
&& integer_zerop (r_const
) && integer_pow2p (rl_mask
))
5452 /* See if we can find a mode that contains both fields being compared on
5453 the left. If we can't, fail. Otherwise, update all constants and masks
5454 to be relative to a field of that size. */
5455 first_bit
= MIN (ll_bitpos
, rl_bitpos
);
5456 end_bit
= MAX (ll_bitpos
+ ll_bitsize
, rl_bitpos
+ rl_bitsize
);
5457 lnmode
= get_best_mode (end_bit
- first_bit
, first_bit
, 0, 0,
5458 TYPE_ALIGN (TREE_TYPE (ll_inner
)), word_mode
,
5460 if (lnmode
== VOIDmode
)
5463 lnbitsize
= GET_MODE_BITSIZE (lnmode
);
5464 lnbitpos
= first_bit
& ~ (lnbitsize
- 1);
5465 lntype
= lang_hooks
.types
.type_for_size (lnbitsize
, 1);
5466 xll_bitpos
= ll_bitpos
- lnbitpos
, xrl_bitpos
= rl_bitpos
- lnbitpos
;
5468 if (BYTES_BIG_ENDIAN
)
5470 xll_bitpos
= lnbitsize
- xll_bitpos
- ll_bitsize
;
5471 xrl_bitpos
= lnbitsize
- xrl_bitpos
- rl_bitsize
;
5474 ll_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, ll_mask
),
5475 size_int (xll_bitpos
));
5476 rl_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, rl_mask
),
5477 size_int (xrl_bitpos
));
5481 l_const
= fold_convert_loc (loc
, lntype
, l_const
);
5482 l_const
= unextend (l_const
, ll_bitsize
, ll_unsignedp
, ll_and_mask
);
5483 l_const
= const_binop (LSHIFT_EXPR
, l_const
, size_int (xll_bitpos
));
5484 if (! integer_zerop (const_binop (BIT_AND_EXPR
, l_const
,
5485 fold_build1_loc (loc
, BIT_NOT_EXPR
,
5488 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5490 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5495 r_const
= fold_convert_loc (loc
, lntype
, r_const
);
5496 r_const
= unextend (r_const
, rl_bitsize
, rl_unsignedp
, rl_and_mask
);
5497 r_const
= const_binop (LSHIFT_EXPR
, r_const
, size_int (xrl_bitpos
));
5498 if (! integer_zerop (const_binop (BIT_AND_EXPR
, r_const
,
5499 fold_build1_loc (loc
, BIT_NOT_EXPR
,
5502 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5504 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5508 /* If the right sides are not constant, do the same for it. Also,
5509 disallow this optimization if a size or signedness mismatch occurs
5510 between the left and right sides. */
5513 if (ll_bitsize
!= lr_bitsize
|| rl_bitsize
!= rr_bitsize
5514 || ll_unsignedp
!= lr_unsignedp
|| rl_unsignedp
!= rr_unsignedp
5515 /* Make sure the two fields on the right
5516 correspond to the left without being swapped. */
5517 || ll_bitpos
- rl_bitpos
!= lr_bitpos
- rr_bitpos
)
5520 first_bit
= MIN (lr_bitpos
, rr_bitpos
);
5521 end_bit
= MAX (lr_bitpos
+ lr_bitsize
, rr_bitpos
+ rr_bitsize
);
5522 rnmode
= get_best_mode (end_bit
- first_bit
, first_bit
, 0, 0,
5523 TYPE_ALIGN (TREE_TYPE (lr_inner
)), word_mode
,
5525 if (rnmode
== VOIDmode
)
5528 rnbitsize
= GET_MODE_BITSIZE (rnmode
);
5529 rnbitpos
= first_bit
& ~ (rnbitsize
- 1);
5530 rntype
= lang_hooks
.types
.type_for_size (rnbitsize
, 1);
5531 xlr_bitpos
= lr_bitpos
- rnbitpos
, xrr_bitpos
= rr_bitpos
- rnbitpos
;
5533 if (BYTES_BIG_ENDIAN
)
5535 xlr_bitpos
= rnbitsize
- xlr_bitpos
- lr_bitsize
;
5536 xrr_bitpos
= rnbitsize
- xrr_bitpos
- rr_bitsize
;
5539 lr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
5541 size_int (xlr_bitpos
));
5542 rr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
5544 size_int (xrr_bitpos
));
5546 /* Make a mask that corresponds to both fields being compared.
5547 Do this for both items being compared. If the operands are the
5548 same size and the bits being compared are in the same position
5549 then we can do this by masking both and comparing the masked
5551 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
);
5552 lr_mask
= const_binop (BIT_IOR_EXPR
, lr_mask
, rr_mask
);
5553 if (lnbitsize
== rnbitsize
&& xll_bitpos
== xlr_bitpos
)
5555 lhs
= make_bit_field_ref (loc
, ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5556 ll_unsignedp
|| rl_unsignedp
);
5557 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5558 lhs
= build2 (BIT_AND_EXPR
, lntype
, lhs
, ll_mask
);
5560 rhs
= make_bit_field_ref (loc
, lr_inner
, rntype
, rnbitsize
, rnbitpos
,
5561 lr_unsignedp
|| rr_unsignedp
);
5562 if (! all_ones_mask_p (lr_mask
, rnbitsize
))
5563 rhs
= build2 (BIT_AND_EXPR
, rntype
, rhs
, lr_mask
);
5565 return build2_loc (loc
, wanted_code
, truth_type
, lhs
, rhs
);
5568 /* There is still another way we can do something: If both pairs of
5569 fields being compared are adjacent, we may be able to make a wider
5570 field containing them both.
5572 Note that we still must mask the lhs/rhs expressions. Furthermore,
5573 the mask must be shifted to account for the shift done by
5574 make_bit_field_ref. */
5575 if ((ll_bitsize
+ ll_bitpos
== rl_bitpos
5576 && lr_bitsize
+ lr_bitpos
== rr_bitpos
)
5577 || (ll_bitpos
== rl_bitpos
+ rl_bitsize
5578 && lr_bitpos
== rr_bitpos
+ rr_bitsize
))
5582 lhs
= make_bit_field_ref (loc
, ll_inner
, lntype
,
5583 ll_bitsize
+ rl_bitsize
,
5584 MIN (ll_bitpos
, rl_bitpos
), ll_unsignedp
);
5585 rhs
= make_bit_field_ref (loc
, lr_inner
, rntype
,
5586 lr_bitsize
+ rr_bitsize
,
5587 MIN (lr_bitpos
, rr_bitpos
), lr_unsignedp
);
5589 ll_mask
= const_binop (RSHIFT_EXPR
, ll_mask
,
5590 size_int (MIN (xll_bitpos
, xrl_bitpos
)));
5591 lr_mask
= const_binop (RSHIFT_EXPR
, lr_mask
,
5592 size_int (MIN (xlr_bitpos
, xrr_bitpos
)));
5594 /* Convert to the smaller type before masking out unwanted bits. */
5596 if (lntype
!= rntype
)
5598 if (lnbitsize
> rnbitsize
)
5600 lhs
= fold_convert_loc (loc
, rntype
, lhs
);
5601 ll_mask
= fold_convert_loc (loc
, rntype
, ll_mask
);
5604 else if (lnbitsize
< rnbitsize
)
5606 rhs
= fold_convert_loc (loc
, lntype
, rhs
);
5607 lr_mask
= fold_convert_loc (loc
, lntype
, lr_mask
);
5612 if (! all_ones_mask_p (ll_mask
, ll_bitsize
+ rl_bitsize
))
5613 lhs
= build2 (BIT_AND_EXPR
, type
, lhs
, ll_mask
);
5615 if (! all_ones_mask_p (lr_mask
, lr_bitsize
+ rr_bitsize
))
5616 rhs
= build2 (BIT_AND_EXPR
, type
, rhs
, lr_mask
);
5618 return build2_loc (loc
, wanted_code
, truth_type
, lhs
, rhs
);
5624 /* Handle the case of comparisons with constants. If there is something in
5625 common between the masks, those bits of the constants must be the same.
5626 If not, the condition is always false. Test for this to avoid generating
5627 incorrect code below. */
5628 result
= const_binop (BIT_AND_EXPR
, ll_mask
, rl_mask
);
5629 if (! integer_zerop (result
)
5630 && simple_cst_equal (const_binop (BIT_AND_EXPR
, result
, l_const
),
5631 const_binop (BIT_AND_EXPR
, result
, r_const
)) != 1)
5633 if (wanted_code
== NE_EXPR
)
5635 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5636 return constant_boolean_node (true, truth_type
);
5640 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5641 return constant_boolean_node (false, truth_type
);
5645 /* Construct the expression we will return. First get the component
5646 reference we will make. Unless the mask is all ones the width of
5647 that field, perform the mask operation. Then compare with the
5649 result
= make_bit_field_ref (loc
, ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5650 ll_unsignedp
|| rl_unsignedp
);
5652 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
);
5653 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5654 result
= build2_loc (loc
, BIT_AND_EXPR
, lntype
, result
, ll_mask
);
5656 return build2_loc (loc
, wanted_code
, truth_type
, result
,
5657 const_binop (BIT_IOR_EXPR
, l_const
, r_const
));
5660 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5664 optimize_minmax_comparison (location_t loc
, enum tree_code code
, tree type
,
5668 enum tree_code op_code
;
5671 int consts_equal
, consts_lt
;
5674 STRIP_SIGN_NOPS (arg0
);
5676 op_code
= TREE_CODE (arg0
);
5677 minmax_const
= TREE_OPERAND (arg0
, 1);
5678 comp_const
= fold_convert_loc (loc
, TREE_TYPE (arg0
), op1
);
5679 consts_equal
= tree_int_cst_equal (minmax_const
, comp_const
);
5680 consts_lt
= tree_int_cst_lt (minmax_const
, comp_const
);
5681 inner
= TREE_OPERAND (arg0
, 0);
5683 /* If something does not permit us to optimize, return the original tree. */
5684 if ((op_code
!= MIN_EXPR
&& op_code
!= MAX_EXPR
)
5685 || TREE_CODE (comp_const
) != INTEGER_CST
5686 || TREE_OVERFLOW (comp_const
)
5687 || TREE_CODE (minmax_const
) != INTEGER_CST
5688 || TREE_OVERFLOW (minmax_const
))
5691 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5692 and GT_EXPR, doing the rest with recursive calls using logical
5696 case NE_EXPR
: case LT_EXPR
: case LE_EXPR
:
5699 = optimize_minmax_comparison (loc
,
5700 invert_tree_comparison (code
, false),
5703 return invert_truthvalue_loc (loc
, tem
);
5709 fold_build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
5710 optimize_minmax_comparison
5711 (loc
, EQ_EXPR
, type
, arg0
, comp_const
),
5712 optimize_minmax_comparison
5713 (loc
, GT_EXPR
, type
, arg0
, comp_const
));
5716 if (op_code
== MAX_EXPR
&& consts_equal
)
5717 /* MAX (X, 0) == 0 -> X <= 0 */
5718 return fold_build2_loc (loc
, LE_EXPR
, type
, inner
, comp_const
);
5720 else if (op_code
== MAX_EXPR
&& consts_lt
)
5721 /* MAX (X, 0) == 5 -> X == 5 */
5722 return fold_build2_loc (loc
, EQ_EXPR
, type
, inner
, comp_const
);
5724 else if (op_code
== MAX_EXPR
)
5725 /* MAX (X, 0) == -1 -> false */
5726 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5728 else if (consts_equal
)
5729 /* MIN (X, 0) == 0 -> X >= 0 */
5730 return fold_build2_loc (loc
, GE_EXPR
, type
, inner
, comp_const
);
5733 /* MIN (X, 0) == 5 -> false */
5734 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5737 /* MIN (X, 0) == -1 -> X == -1 */
5738 return fold_build2_loc (loc
, EQ_EXPR
, type
, inner
, comp_const
);
5741 if (op_code
== MAX_EXPR
&& (consts_equal
|| consts_lt
))
5742 /* MAX (X, 0) > 0 -> X > 0
5743 MAX (X, 0) > 5 -> X > 5 */
5744 return fold_build2_loc (loc
, GT_EXPR
, type
, inner
, comp_const
);
5746 else if (op_code
== MAX_EXPR
)
5747 /* MAX (X, 0) > -1 -> true */
5748 return omit_one_operand_loc (loc
, type
, integer_one_node
, inner
);
5750 else if (op_code
== MIN_EXPR
&& (consts_equal
|| consts_lt
))
5751 /* MIN (X, 0) > 0 -> false
5752 MIN (X, 0) > 5 -> false */
5753 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5756 /* MIN (X, 0) > -1 -> X > -1 */
5757 return fold_build2_loc (loc
, GT_EXPR
, type
, inner
, comp_const
);
5764 /* T is an integer expression that is being multiplied, divided, or taken a
5765 modulus (CODE says which and what kind of divide or modulus) by a
5766 constant C. See if we can eliminate that operation by folding it with
5767 other operations already in T. WIDE_TYPE, if non-null, is a type that
5768 should be used for the computation if wider than our type.
5770 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5771 (X * 2) + (Y * 4). We must, however, be assured that either the original
5772 expression would not overflow or that overflow is undefined for the type
5773 in the language in question.
5775 If we return a non-null expression, it is an equivalent form of the
5776 original computation, but need not be in the original type.
5778 We set *STRICT_OVERFLOW_P to true if the return values depends on
5779 signed overflow being undefined. Otherwise we do not change
5780 *STRICT_OVERFLOW_P. */
5783 extract_muldiv (tree t
, tree c
, enum tree_code code
, tree wide_type
,
5784 bool *strict_overflow_p
)
5786 /* To avoid exponential search depth, refuse to allow recursion past
5787 three levels. Beyond that (1) it's highly unlikely that we'll find
5788 something interesting and (2) we've probably processed it before
5789 when we built the inner expression. */
5798 ret
= extract_muldiv_1 (t
, c
, code
, wide_type
, strict_overflow_p
);
5805 extract_muldiv_1 (tree t
, tree c
, enum tree_code code
, tree wide_type
,
5806 bool *strict_overflow_p
)
5808 tree type
= TREE_TYPE (t
);
5809 enum tree_code tcode
= TREE_CODE (t
);
5810 tree ctype
= (wide_type
!= 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type
))
5811 > GET_MODE_SIZE (TYPE_MODE (type
)))
5812 ? wide_type
: type
);
5814 int same_p
= tcode
== code
;
5815 tree op0
= NULL_TREE
, op1
= NULL_TREE
;
5816 bool sub_strict_overflow_p
;
5818 /* Don't deal with constants of zero here; they confuse the code below. */
5819 if (integer_zerop (c
))
5822 if (TREE_CODE_CLASS (tcode
) == tcc_unary
)
5823 op0
= TREE_OPERAND (t
, 0);
5825 if (TREE_CODE_CLASS (tcode
) == tcc_binary
)
5826 op0
= TREE_OPERAND (t
, 0), op1
= TREE_OPERAND (t
, 1);
5828 /* Note that we need not handle conditional operations here since fold
5829 already handles those cases. So just do arithmetic here. */
5833 /* For a constant, we can always simplify if we are a multiply
5834 or (for divide and modulus) if it is a multiple of our constant. */
5835 if (code
== MULT_EXPR
5836 || wi::multiple_of_p (t
, c
, TYPE_SIGN (type
)))
5837 return const_binop (code
, fold_convert (ctype
, t
),
5838 fold_convert (ctype
, c
));
5841 CASE_CONVERT
: case NON_LVALUE_EXPR
:
5842 /* If op0 is an expression ... */
5843 if ((COMPARISON_CLASS_P (op0
)
5844 || UNARY_CLASS_P (op0
)
5845 || BINARY_CLASS_P (op0
)
5846 || VL_EXP_CLASS_P (op0
)
5847 || EXPRESSION_CLASS_P (op0
))
5848 /* ... and has wrapping overflow, and its type is smaller
5849 than ctype, then we cannot pass through as widening. */
5850 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0
))
5851 && (TYPE_PRECISION (ctype
)
5852 > TYPE_PRECISION (TREE_TYPE (op0
))))
5853 /* ... or this is a truncation (t is narrower than op0),
5854 then we cannot pass through this narrowing. */
5855 || (TYPE_PRECISION (type
)
5856 < TYPE_PRECISION (TREE_TYPE (op0
)))
5857 /* ... or signedness changes for division or modulus,
5858 then we cannot pass through this conversion. */
5859 || (code
!= MULT_EXPR
5860 && (TYPE_UNSIGNED (ctype
)
5861 != TYPE_UNSIGNED (TREE_TYPE (op0
))))
5862 /* ... or has undefined overflow while the converted to
5863 type has not, we cannot do the operation in the inner type
5864 as that would introduce undefined overflow. */
5865 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0
))
5866 && !TYPE_OVERFLOW_UNDEFINED (type
))))
5869 /* Pass the constant down and see if we can make a simplification. If
5870 we can, replace this expression with the inner simplification for
5871 possible later conversion to our or some other type. */
5872 if ((t2
= fold_convert (TREE_TYPE (op0
), c
)) != 0
5873 && TREE_CODE (t2
) == INTEGER_CST
5874 && !TREE_OVERFLOW (t2
)
5875 && (0 != (t1
= extract_muldiv (op0
, t2
, code
,
5877 ? ctype
: NULL_TREE
,
5878 strict_overflow_p
))))
5883 /* If widening the type changes it from signed to unsigned, then we
5884 must avoid building ABS_EXPR itself as unsigned. */
5885 if (TYPE_UNSIGNED (ctype
) && !TYPE_UNSIGNED (type
))
5887 tree cstype
= (*signed_type_for
) (ctype
);
5888 if ((t1
= extract_muldiv (op0
, c
, code
, cstype
, strict_overflow_p
))
5891 t1
= fold_build1 (tcode
, cstype
, fold_convert (cstype
, t1
));
5892 return fold_convert (ctype
, t1
);
5896 /* If the constant is negative, we cannot simplify this. */
5897 if (tree_int_cst_sgn (c
) == -1)
5901 /* For division and modulus, type can't be unsigned, as e.g.
5902 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
5903 For signed types, even with wrapping overflow, this is fine. */
5904 if (code
!= MULT_EXPR
&& TYPE_UNSIGNED (type
))
5906 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
, strict_overflow_p
))
5908 return fold_build1 (tcode
, ctype
, fold_convert (ctype
, t1
));
5911 case MIN_EXPR
: case MAX_EXPR
:
5912 /* If widening the type changes the signedness, then we can't perform
5913 this optimization as that changes the result. */
5914 if (TYPE_UNSIGNED (ctype
) != TYPE_UNSIGNED (type
))
5917 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5918 sub_strict_overflow_p
= false;
5919 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
,
5920 &sub_strict_overflow_p
)) != 0
5921 && (t2
= extract_muldiv (op1
, c
, code
, wide_type
,
5922 &sub_strict_overflow_p
)) != 0)
5924 if (tree_int_cst_sgn (c
) < 0)
5925 tcode
= (tcode
== MIN_EXPR
? MAX_EXPR
: MIN_EXPR
);
5926 if (sub_strict_overflow_p
)
5927 *strict_overflow_p
= true;
5928 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5929 fold_convert (ctype
, t2
));
5933 case LSHIFT_EXPR
: case RSHIFT_EXPR
:
5934 /* If the second operand is constant, this is a multiplication
5935 or floor division, by a power of two, so we can treat it that
5936 way unless the multiplier or divisor overflows. Signed
5937 left-shift overflow is implementation-defined rather than
5938 undefined in C90, so do not convert signed left shift into
5940 if (TREE_CODE (op1
) == INTEGER_CST
5941 && (tcode
== RSHIFT_EXPR
|| TYPE_UNSIGNED (TREE_TYPE (op0
)))
5942 /* const_binop may not detect overflow correctly,
5943 so check for it explicitly here. */
5944 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node
)), op1
)
5945 && 0 != (t1
= fold_convert (ctype
,
5946 const_binop (LSHIFT_EXPR
,
5949 && !TREE_OVERFLOW (t1
))
5950 return extract_muldiv (build2 (tcode
== LSHIFT_EXPR
5951 ? MULT_EXPR
: FLOOR_DIV_EXPR
,
5953 fold_convert (ctype
, op0
),
5955 c
, code
, wide_type
, strict_overflow_p
);
5958 case PLUS_EXPR
: case MINUS_EXPR
:
5959 /* See if we can eliminate the operation on both sides. If we can, we
5960 can return a new PLUS or MINUS. If we can't, the only remaining
5961 cases where we can do anything are if the second operand is a
5963 sub_strict_overflow_p
= false;
5964 t1
= extract_muldiv (op0
, c
, code
, wide_type
, &sub_strict_overflow_p
);
5965 t2
= extract_muldiv (op1
, c
, code
, wide_type
, &sub_strict_overflow_p
);
5966 if (t1
!= 0 && t2
!= 0
5967 && (code
== MULT_EXPR
5968 /* If not multiplication, we can only do this if both operands
5969 are divisible by c. */
5970 || (multiple_of_p (ctype
, op0
, c
)
5971 && multiple_of_p (ctype
, op1
, c
))))
5973 if (sub_strict_overflow_p
)
5974 *strict_overflow_p
= true;
5975 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5976 fold_convert (ctype
, t2
));
5979 /* If this was a subtraction, negate OP1 and set it to be an addition.
5980 This simplifies the logic below. */
5981 if (tcode
== MINUS_EXPR
)
5983 tcode
= PLUS_EXPR
, op1
= negate_expr (op1
);
5984 /* If OP1 was not easily negatable, the constant may be OP0. */
5985 if (TREE_CODE (op0
) == INTEGER_CST
)
5996 if (TREE_CODE (op1
) != INTEGER_CST
)
5999 /* If either OP1 or C are negative, this optimization is not safe for
6000 some of the division and remainder types while for others we need
6001 to change the code. */
6002 if (tree_int_cst_sgn (op1
) < 0 || tree_int_cst_sgn (c
) < 0)
6004 if (code
== CEIL_DIV_EXPR
)
6005 code
= FLOOR_DIV_EXPR
;
6006 else if (code
== FLOOR_DIV_EXPR
)
6007 code
= CEIL_DIV_EXPR
;
6008 else if (code
!= MULT_EXPR
6009 && code
!= CEIL_MOD_EXPR
&& code
!= FLOOR_MOD_EXPR
)
6013 /* If it's a multiply or a division/modulus operation of a multiple
6014 of our constant, do the operation and verify it doesn't overflow. */
6015 if (code
== MULT_EXPR
6016 || wi::multiple_of_p (op1
, c
, TYPE_SIGN (type
)))
6018 op1
= const_binop (code
, fold_convert (ctype
, op1
),
6019 fold_convert (ctype
, c
));
6020 /* We allow the constant to overflow with wrapping semantics. */
6022 || (TREE_OVERFLOW (op1
) && !TYPE_OVERFLOW_WRAPS (ctype
)))
6028 /* If we have an unsigned type, we cannot widen the operation since it
6029 will change the result if the original computation overflowed. */
6030 if (TYPE_UNSIGNED (ctype
) && ctype
!= type
)
6033 /* If we were able to eliminate our operation from the first side,
6034 apply our operation to the second side and reform the PLUS. */
6035 if (t1
!= 0 && (TREE_CODE (t1
) != code
|| code
== MULT_EXPR
))
6036 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
), op1
);
6038 /* The last case is if we are a multiply. In that case, we can
6039 apply the distributive law to commute the multiply and addition
6040 if the multiplication of the constants doesn't overflow
6041 and overflow is defined. With undefined overflow
6042 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
6043 if (code
== MULT_EXPR
&& TYPE_OVERFLOW_WRAPS (ctype
))
6044 return fold_build2 (tcode
, ctype
,
6045 fold_build2 (code
, ctype
,
6046 fold_convert (ctype
, op0
),
6047 fold_convert (ctype
, c
)),
6053 /* We have a special case here if we are doing something like
6054 (C * 8) % 4 since we know that's zero. */
6055 if ((code
== TRUNC_MOD_EXPR
|| code
== CEIL_MOD_EXPR
6056 || code
== FLOOR_MOD_EXPR
|| code
== ROUND_MOD_EXPR
)
6057 /* If the multiplication can overflow we cannot optimize this. */
6058 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
))
6059 && TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
6060 && wi::multiple_of_p (op1
, c
, TYPE_SIGN (type
)))
6062 *strict_overflow_p
= true;
6063 return omit_one_operand (type
, integer_zero_node
, op0
);
6066 /* ... fall through ... */
6068 case TRUNC_DIV_EXPR
: case CEIL_DIV_EXPR
: case FLOOR_DIV_EXPR
:
6069 case ROUND_DIV_EXPR
: case EXACT_DIV_EXPR
:
6070 /* If we can extract our operation from the LHS, do so and return a
6071 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6072 do something only if the second operand is a constant. */
6074 && (t1
= extract_muldiv (op0
, c
, code
, wide_type
,
6075 strict_overflow_p
)) != 0)
6076 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
6077 fold_convert (ctype
, op1
));
6078 else if (tcode
== MULT_EXPR
&& code
== MULT_EXPR
6079 && (t1
= extract_muldiv (op1
, c
, code
, wide_type
,
6080 strict_overflow_p
)) != 0)
6081 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
6082 fold_convert (ctype
, t1
));
6083 else if (TREE_CODE (op1
) != INTEGER_CST
)
6086 /* If these are the same operation types, we can associate them
6087 assuming no overflow. */
6090 bool overflow_p
= false;
6091 bool overflow_mul_p
;
6092 signop sign
= TYPE_SIGN (ctype
);
6093 wide_int mul
= wi::mul (op1
, c
, sign
, &overflow_mul_p
);
6094 overflow_p
= TREE_OVERFLOW (c
) | TREE_OVERFLOW (op1
);
6096 && ((sign
== UNSIGNED
&& tcode
!= MULT_EXPR
) || sign
== SIGNED
))
6099 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
6100 wide_int_to_tree (ctype
, mul
));
6103 /* If these operations "cancel" each other, we have the main
6104 optimizations of this pass, which occur when either constant is a
6105 multiple of the other, in which case we replace this with either an
6106 operation or CODE or TCODE.
6108 If we have an unsigned type, we cannot do this since it will change
6109 the result if the original computation overflowed. */
6110 if (TYPE_OVERFLOW_UNDEFINED (ctype
)
6111 && ((code
== MULT_EXPR
&& tcode
== EXACT_DIV_EXPR
)
6112 || (tcode
== MULT_EXPR
6113 && code
!= TRUNC_MOD_EXPR
&& code
!= CEIL_MOD_EXPR
6114 && code
!= FLOOR_MOD_EXPR
&& code
!= ROUND_MOD_EXPR
6115 && code
!= MULT_EXPR
)))
6117 if (wi::multiple_of_p (op1
, c
, TYPE_SIGN (type
)))
6119 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
6120 *strict_overflow_p
= true;
6121 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
6122 fold_convert (ctype
,
6123 const_binop (TRUNC_DIV_EXPR
,
6126 else if (wi::multiple_of_p (c
, op1
, TYPE_SIGN (type
)))
6128 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
6129 *strict_overflow_p
= true;
6130 return fold_build2 (code
, ctype
, fold_convert (ctype
, op0
),
6131 fold_convert (ctype
,
6132 const_binop (TRUNC_DIV_EXPR
,
6145 /* Return a node which has the indicated constant VALUE (either 0 or
6146 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6147 and is of the indicated TYPE. */
6150 constant_boolean_node (bool value
, tree type
)
6152 if (type
== integer_type_node
)
6153 return value
? integer_one_node
: integer_zero_node
;
6154 else if (type
== boolean_type_node
)
6155 return value
? boolean_true_node
: boolean_false_node
;
6156 else if (TREE_CODE (type
) == VECTOR_TYPE
)
6157 return build_vector_from_val (type
,
6158 build_int_cst (TREE_TYPE (type
),
6161 return fold_convert (type
, value
? integer_one_node
: integer_zero_node
);
6165 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6166 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6167 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6168 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6169 COND is the first argument to CODE; otherwise (as in the example
6170 given here), it is the second argument. TYPE is the type of the
6171 original expression. Return NULL_TREE if no simplification is
6175 fold_binary_op_with_conditional_arg (location_t loc
,
6176 enum tree_code code
,
6177 tree type
, tree op0
, tree op1
,
6178 tree cond
, tree arg
, int cond_first_p
)
6180 tree cond_type
= cond_first_p
? TREE_TYPE (op0
) : TREE_TYPE (op1
);
6181 tree arg_type
= cond_first_p
? TREE_TYPE (op1
) : TREE_TYPE (op0
);
6182 tree test
, true_value
, false_value
;
6183 tree lhs
= NULL_TREE
;
6184 tree rhs
= NULL_TREE
;
6185 enum tree_code cond_code
= COND_EXPR
;
6187 if (TREE_CODE (cond
) == COND_EXPR
6188 || TREE_CODE (cond
) == VEC_COND_EXPR
)
6190 test
= TREE_OPERAND (cond
, 0);
6191 true_value
= TREE_OPERAND (cond
, 1);
6192 false_value
= TREE_OPERAND (cond
, 2);
6193 /* If this operand throws an expression, then it does not make
6194 sense to try to perform a logical or arithmetic operation
6196 if (VOID_TYPE_P (TREE_TYPE (true_value
)))
6198 if (VOID_TYPE_P (TREE_TYPE (false_value
)))
6203 tree testtype
= TREE_TYPE (cond
);
6205 true_value
= constant_boolean_node (true, testtype
);
6206 false_value
= constant_boolean_node (false, testtype
);
6209 if (TREE_CODE (TREE_TYPE (test
)) == VECTOR_TYPE
)
6210 cond_code
= VEC_COND_EXPR
;
6212 /* This transformation is only worthwhile if we don't have to wrap ARG
6213 in a SAVE_EXPR and the operation can be simplified without recursing
6214 on at least one of the branches once its pushed inside the COND_EXPR. */
6215 if (!TREE_CONSTANT (arg
)
6216 && (TREE_SIDE_EFFECTS (arg
)
6217 || TREE_CODE (arg
) == COND_EXPR
|| TREE_CODE (arg
) == VEC_COND_EXPR
6218 || TREE_CONSTANT (true_value
) || TREE_CONSTANT (false_value
)))
6221 arg
= fold_convert_loc (loc
, arg_type
, arg
);
6224 true_value
= fold_convert_loc (loc
, cond_type
, true_value
);
6226 lhs
= fold_build2_loc (loc
, code
, type
, true_value
, arg
);
6228 lhs
= fold_build2_loc (loc
, code
, type
, arg
, true_value
);
6232 false_value
= fold_convert_loc (loc
, cond_type
, false_value
);
6234 rhs
= fold_build2_loc (loc
, code
, type
, false_value
, arg
);
6236 rhs
= fold_build2_loc (loc
, code
, type
, arg
, false_value
);
6239 /* Check that we have simplified at least one of the branches. */
6240 if (!TREE_CONSTANT (arg
) && !TREE_CONSTANT (lhs
) && !TREE_CONSTANT (rhs
))
6243 return fold_build3_loc (loc
, cond_code
, type
, test
, lhs
, rhs
);
6247 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6249 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6250 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6251 ADDEND is the same as X.
6253 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6254 and finite. The problematic cases are when X is zero, and its mode
6255 has signed zeros. In the case of rounding towards -infinity,
6256 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6257 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6260 fold_real_zero_addition_p (const_tree type
, const_tree addend
, int negate
)
6262 if (!real_zerop (addend
))
6265 /* Don't allow the fold with -fsignaling-nans. */
6266 if (HONOR_SNANS (element_mode (type
)))
6269 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6270 if (!HONOR_SIGNED_ZEROS (element_mode (type
)))
6273 /* In a vector or complex, we would need to check the sign of all zeros. */
6274 if (TREE_CODE (addend
) != REAL_CST
)
6277 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6278 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend
)))
6281 /* The mode has signed zeros, and we have to honor their sign.
6282 In this situation, there is only one case we can return true for.
6283 X - 0 is the same as X unless rounding towards -infinity is
6285 return negate
&& !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
));
6288 /* Subroutine of fold() that checks comparisons of built-in math
6289 functions against real constants.
6291 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6292 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6293 is the type of the result and ARG0 and ARG1 are the operands of the
6294 comparison. ARG1 must be a TREE_REAL_CST.
6296 The function returns the constant folded tree if a simplification
6297 can be made, and NULL_TREE otherwise. */
6300 fold_mathfn_compare (location_t loc
,
6301 enum built_in_function fcode
, enum tree_code code
,
6302 tree type
, tree arg0
, tree arg1
)
6306 if (BUILTIN_SQRT_P (fcode
))
6308 tree arg
= CALL_EXPR_ARG (arg0
, 0);
6309 machine_mode mode
= TYPE_MODE (TREE_TYPE (arg0
));
6311 c
= TREE_REAL_CST (arg1
);
6312 if (REAL_VALUE_NEGATIVE (c
))
6314 /* sqrt(x) < y is always false, if y is negative. */
6315 if (code
== EQ_EXPR
|| code
== LT_EXPR
|| code
== LE_EXPR
)
6316 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
6318 /* sqrt(x) > y is always true, if y is negative and we
6319 don't care about NaNs, i.e. negative values of x. */
6320 if (code
== NE_EXPR
|| !HONOR_NANS (mode
))
6321 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
6323 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6324 return fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6325 build_real (TREE_TYPE (arg
), dconst0
));
6327 else if (code
== GT_EXPR
|| code
== GE_EXPR
)
6331 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
6332 real_convert (&c2
, mode
, &c2
);
6334 if (REAL_VALUE_ISINF (c2
))
6336 /* sqrt(x) > y is x == +Inf, when y is very large. */
6337 if (HONOR_INFINITIES (mode
))
6338 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg
,
6339 build_real (TREE_TYPE (arg
), c2
));
6341 /* sqrt(x) > y is always false, when y is very large
6342 and we don't care about infinities. */
6343 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
6346 /* sqrt(x) > c is the same as x > c*c. */
6347 return fold_build2_loc (loc
, code
, type
, arg
,
6348 build_real (TREE_TYPE (arg
), c2
));
6350 else if (code
== LT_EXPR
|| code
== LE_EXPR
)
6354 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
6355 real_convert (&c2
, mode
, &c2
);
6357 if (REAL_VALUE_ISINF (c2
))
6359 /* sqrt(x) < y is always true, when y is a very large
6360 value and we don't care about NaNs or Infinities. */
6361 if (! HONOR_NANS (mode
) && ! HONOR_INFINITIES (mode
))
6362 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
6364 /* sqrt(x) < y is x != +Inf when y is very large and we
6365 don't care about NaNs. */
6366 if (! HONOR_NANS (mode
))
6367 return fold_build2_loc (loc
, NE_EXPR
, type
, arg
,
6368 build_real (TREE_TYPE (arg
), c2
));
6370 /* sqrt(x) < y is x >= 0 when y is very large and we
6371 don't care about Infinities. */
6372 if (! HONOR_INFINITIES (mode
))
6373 return fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6374 build_real (TREE_TYPE (arg
), dconst0
));
6376 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6377 arg
= save_expr (arg
);
6378 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
6379 fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6380 build_real (TREE_TYPE (arg
),
6382 fold_build2_loc (loc
, NE_EXPR
, type
, arg
,
6383 build_real (TREE_TYPE (arg
),
6387 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6388 if (! HONOR_NANS (mode
))
6389 return fold_build2_loc (loc
, code
, type
, arg
,
6390 build_real (TREE_TYPE (arg
), c2
));
6392 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6393 arg
= save_expr (arg
);
6394 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
6395 fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6396 build_real (TREE_TYPE (arg
),
6398 fold_build2_loc (loc
, code
, type
, arg
,
6399 build_real (TREE_TYPE (arg
),
6407 /* Subroutine of fold() that optimizes comparisons against Infinities,
6408 either +Inf or -Inf.
6410 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6411 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6412 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6414 The function returns the constant folded tree if a simplification
6415 can be made, and NULL_TREE otherwise. */
6418 fold_inf_compare (location_t loc
, enum tree_code code
, tree type
,
6419 tree arg0
, tree arg1
)
6422 REAL_VALUE_TYPE max
;
6426 mode
= TYPE_MODE (TREE_TYPE (arg0
));
6428 /* For negative infinity swap the sense of the comparison. */
6429 neg
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
));
6431 code
= swap_tree_comparison (code
);
6436 /* x > +Inf is always false, if with ignore sNANs. */
6437 if (HONOR_SNANS (mode
))
6439 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6442 /* x <= +Inf is always true, if we don't case about NaNs. */
6443 if (! HONOR_NANS (mode
))
6444 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6446 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6447 arg0
= save_expr (arg0
);
6448 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
, arg0
);
6452 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6453 real_maxval (&max
, neg
, mode
);
6454 return fold_build2_loc (loc
, neg
? LT_EXPR
: GT_EXPR
, type
,
6455 arg0
, build_real (TREE_TYPE (arg0
), max
));
6458 /* x < +Inf is always equal to x <= DBL_MAX. */
6459 real_maxval (&max
, neg
, mode
);
6460 return fold_build2_loc (loc
, neg
? GE_EXPR
: LE_EXPR
, type
,
6461 arg0
, build_real (TREE_TYPE (arg0
), max
));
6464 /* x != +Inf is always equal to !(x > DBL_MAX). */
6465 real_maxval (&max
, neg
, mode
);
6466 if (! HONOR_NANS (mode
))
6467 return fold_build2_loc (loc
, neg
? GE_EXPR
: LE_EXPR
, type
,
6468 arg0
, build_real (TREE_TYPE (arg0
), max
));
6470 temp
= fold_build2_loc (loc
, neg
? LT_EXPR
: GT_EXPR
, type
,
6471 arg0
, build_real (TREE_TYPE (arg0
), max
));
6472 return fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, temp
);
6481 /* Subroutine of fold() that optimizes comparisons of a division by
6482 a nonzero integer constant against an integer constant, i.e.
6485 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6486 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6487 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6489 The function returns the constant folded tree if a simplification
6490 can be made, and NULL_TREE otherwise. */
6493 fold_div_compare (location_t loc
,
6494 enum tree_code code
, tree type
, tree arg0
, tree arg1
)
6496 tree prod
, tmp
, hi
, lo
;
6497 tree arg00
= TREE_OPERAND (arg0
, 0);
6498 tree arg01
= TREE_OPERAND (arg0
, 1);
6499 signop sign
= TYPE_SIGN (TREE_TYPE (arg0
));
6500 bool neg_overflow
= false;
6503 /* We have to do this the hard way to detect unsigned overflow.
6504 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6505 wide_int val
= wi::mul (arg01
, arg1
, sign
, &overflow
);
6506 prod
= force_fit_type (TREE_TYPE (arg00
), val
, -1, overflow
);
6507 neg_overflow
= false;
6509 if (sign
== UNSIGNED
)
6511 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6512 build_int_cst (TREE_TYPE (arg01
), 1));
6515 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6516 val
= wi::add (prod
, tmp
, sign
, &overflow
);
6517 hi
= force_fit_type (TREE_TYPE (arg00
), val
,
6518 -1, overflow
| TREE_OVERFLOW (prod
));
6520 else if (tree_int_cst_sgn (arg01
) >= 0)
6522 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6523 build_int_cst (TREE_TYPE (arg01
), 1));
6524 switch (tree_int_cst_sgn (arg1
))
6527 neg_overflow
= true;
6528 lo
= int_const_binop (MINUS_EXPR
, prod
, tmp
);
6533 lo
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6538 hi
= int_const_binop (PLUS_EXPR
, prod
, tmp
);
6548 /* A negative divisor reverses the relational operators. */
6549 code
= swap_tree_comparison (code
);
6551 tmp
= int_const_binop (PLUS_EXPR
, arg01
,
6552 build_int_cst (TREE_TYPE (arg01
), 1));
6553 switch (tree_int_cst_sgn (arg1
))
6556 hi
= int_const_binop (MINUS_EXPR
, prod
, tmp
);
6561 hi
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6566 neg_overflow
= true;
6567 lo
= int_const_binop (PLUS_EXPR
, prod
, tmp
);
6579 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6580 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg00
);
6581 if (TREE_OVERFLOW (hi
))
6582 return fold_build2_loc (loc
, GE_EXPR
, type
, arg00
, lo
);
6583 if (TREE_OVERFLOW (lo
))
6584 return fold_build2_loc (loc
, LE_EXPR
, type
, arg00
, hi
);
6585 return build_range_check (loc
, type
, arg00
, 1, lo
, hi
);
6588 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6589 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg00
);
6590 if (TREE_OVERFLOW (hi
))
6591 return fold_build2_loc (loc
, LT_EXPR
, type
, arg00
, lo
);
6592 if (TREE_OVERFLOW (lo
))
6593 return fold_build2_loc (loc
, GT_EXPR
, type
, arg00
, hi
);
6594 return build_range_check (loc
, type
, arg00
, 0, lo
, hi
);
6597 if (TREE_OVERFLOW (lo
))
6599 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6600 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6602 return fold_build2_loc (loc
, LT_EXPR
, type
, arg00
, lo
);
6605 if (TREE_OVERFLOW (hi
))
6607 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6608 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6610 return fold_build2_loc (loc
, LE_EXPR
, type
, arg00
, hi
);
6613 if (TREE_OVERFLOW (hi
))
6615 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6616 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6618 return fold_build2_loc (loc
, GT_EXPR
, type
, arg00
, hi
);
6621 if (TREE_OVERFLOW (lo
))
6623 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6624 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6626 return fold_build2_loc (loc
, GE_EXPR
, type
, arg00
, lo
);
6636 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6637 equality/inequality test, then return a simplified form of the test
6638 using a sign testing. Otherwise return NULL. TYPE is the desired
6642 fold_single_bit_test_into_sign_test (location_t loc
,
6643 enum tree_code code
, tree arg0
, tree arg1
,
6646 /* If this is testing a single bit, we can optimize the test. */
6647 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6648 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6649 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6651 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6652 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6653 tree arg00
= sign_bit_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
6655 if (arg00
!= NULL_TREE
6656 /* This is only a win if casting to a signed type is cheap,
6657 i.e. when arg00's type is not a partial mode. */
6658 && TYPE_PRECISION (TREE_TYPE (arg00
))
6659 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00
))))
6661 tree stype
= signed_type_for (TREE_TYPE (arg00
));
6662 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
6664 fold_convert_loc (loc
, stype
, arg00
),
6665 build_int_cst (stype
, 0));
6672 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6673 equality/inequality test, then return a simplified form of
6674 the test using shifts and logical operations. Otherwise return
6675 NULL. TYPE is the desired result type. */
6678 fold_single_bit_test (location_t loc
, enum tree_code code
,
6679 tree arg0
, tree arg1
, tree result_type
)
6681 /* If this is testing a single bit, we can optimize the test. */
6682 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6683 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6684 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6686 tree inner
= TREE_OPERAND (arg0
, 0);
6687 tree type
= TREE_TYPE (arg0
);
6688 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
6689 machine_mode operand_mode
= TYPE_MODE (type
);
6691 tree signed_type
, unsigned_type
, intermediate_type
;
6694 /* First, see if we can fold the single bit test into a sign-bit
6696 tem
= fold_single_bit_test_into_sign_test (loc
, code
, arg0
, arg1
,
6701 /* Otherwise we have (A & C) != 0 where C is a single bit,
6702 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6703 Similarly for (A & C) == 0. */
6705 /* If INNER is a right shift of a constant and it plus BITNUM does
6706 not overflow, adjust BITNUM and INNER. */
6707 if (TREE_CODE (inner
) == RSHIFT_EXPR
6708 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
6709 && bitnum
< TYPE_PRECISION (type
)
6710 && wi::ltu_p (TREE_OPERAND (inner
, 1),
6711 TYPE_PRECISION (type
) - bitnum
))
6713 bitnum
+= tree_to_uhwi (TREE_OPERAND (inner
, 1));
6714 inner
= TREE_OPERAND (inner
, 0);
6717 /* If we are going to be able to omit the AND below, we must do our
6718 operations as unsigned. If we must use the AND, we have a choice.
6719 Normally unsigned is faster, but for some machines signed is. */
6720 #ifdef LOAD_EXTEND_OP
6721 ops_unsigned
= (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
6722 && !flag_syntax_only
) ? 0 : 1;
6727 signed_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 0);
6728 unsigned_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 1);
6729 intermediate_type
= ops_unsigned
? unsigned_type
: signed_type
;
6730 inner
= fold_convert_loc (loc
, intermediate_type
, inner
);
6733 inner
= build2 (RSHIFT_EXPR
, intermediate_type
,
6734 inner
, size_int (bitnum
));
6736 one
= build_int_cst (intermediate_type
, 1);
6738 if (code
== EQ_EXPR
)
6739 inner
= fold_build2_loc (loc
, BIT_XOR_EXPR
, intermediate_type
, inner
, one
);
6741 /* Put the AND last so it can combine with more things. */
6742 inner
= build2 (BIT_AND_EXPR
, intermediate_type
, inner
, one
);
6744 /* Make sure to return the proper type. */
6745 inner
= fold_convert_loc (loc
, result_type
, inner
);
6752 /* Check whether we are allowed to reorder operands arg0 and arg1,
6753 such that the evaluation of arg1 occurs before arg0. */
6756 reorder_operands_p (const_tree arg0
, const_tree arg1
)
6758 if (! flag_evaluation_order
)
6760 if (TREE_CONSTANT (arg0
) || TREE_CONSTANT (arg1
))
6762 return ! TREE_SIDE_EFFECTS (arg0
)
6763 && ! TREE_SIDE_EFFECTS (arg1
);
6766 /* Test whether it is preferable two swap two operands, ARG0 and
6767 ARG1, for example because ARG0 is an integer constant and ARG1
6768 isn't. If REORDER is true, only recommend swapping if we can
6769 evaluate the operands in reverse order. */
6772 tree_swap_operands_p (const_tree arg0
, const_tree arg1
, bool reorder
)
6774 if (CONSTANT_CLASS_P (arg1
))
6776 if (CONSTANT_CLASS_P (arg0
))
6782 if (TREE_CONSTANT (arg1
))
6784 if (TREE_CONSTANT (arg0
))
6787 if (reorder
&& flag_evaluation_order
6788 && (TREE_SIDE_EFFECTS (arg0
) || TREE_SIDE_EFFECTS (arg1
)))
6791 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6792 for commutative and comparison operators. Ensuring a canonical
6793 form allows the optimizers to find additional redundancies without
6794 having to explicitly check for both orderings. */
6795 if (TREE_CODE (arg0
) == SSA_NAME
6796 && TREE_CODE (arg1
) == SSA_NAME
6797 && SSA_NAME_VERSION (arg0
) > SSA_NAME_VERSION (arg1
))
6800 /* Put SSA_NAMEs last. */
6801 if (TREE_CODE (arg1
) == SSA_NAME
)
6803 if (TREE_CODE (arg0
) == SSA_NAME
)
6806 /* Put variables last. */
6815 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6816 ARG0 is extended to a wider type. */
6819 fold_widened_comparison (location_t loc
, enum tree_code code
,
6820 tree type
, tree arg0
, tree arg1
)
6822 tree arg0_unw
= get_unwidened (arg0
, NULL_TREE
);
6824 tree shorter_type
, outer_type
;
6828 if (arg0_unw
== arg0
)
6830 shorter_type
= TREE_TYPE (arg0_unw
);
6832 #ifdef HAVE_canonicalize_funcptr_for_compare
6833 /* Disable this optimization if we're casting a function pointer
6834 type on targets that require function pointer canonicalization. */
6835 if (HAVE_canonicalize_funcptr_for_compare
6836 && TREE_CODE (shorter_type
) == POINTER_TYPE
6837 && TREE_CODE (TREE_TYPE (shorter_type
)) == FUNCTION_TYPE
)
6841 if (TYPE_PRECISION (TREE_TYPE (arg0
)) <= TYPE_PRECISION (shorter_type
))
6844 arg1_unw
= get_unwidened (arg1
, NULL_TREE
);
6846 /* If possible, express the comparison in the shorter mode. */
6847 if ((code
== EQ_EXPR
|| code
== NE_EXPR
6848 || TYPE_UNSIGNED (TREE_TYPE (arg0
)) == TYPE_UNSIGNED (shorter_type
))
6849 && (TREE_TYPE (arg1_unw
) == shorter_type
6850 || ((TYPE_PRECISION (shorter_type
)
6851 >= TYPE_PRECISION (TREE_TYPE (arg1_unw
)))
6852 && (TYPE_UNSIGNED (shorter_type
)
6853 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw
))))
6854 || (TREE_CODE (arg1_unw
) == INTEGER_CST
6855 && (TREE_CODE (shorter_type
) == INTEGER_TYPE
6856 || TREE_CODE (shorter_type
) == BOOLEAN_TYPE
)
6857 && int_fits_type_p (arg1_unw
, shorter_type
))))
6858 return fold_build2_loc (loc
, code
, type
, arg0_unw
,
6859 fold_convert_loc (loc
, shorter_type
, arg1_unw
));
6861 if (TREE_CODE (arg1_unw
) != INTEGER_CST
6862 || TREE_CODE (shorter_type
) != INTEGER_TYPE
6863 || !int_fits_type_p (arg1_unw
, shorter_type
))
6866 /* If we are comparing with the integer that does not fit into the range
6867 of the shorter type, the result is known. */
6868 outer_type
= TREE_TYPE (arg1_unw
);
6869 min
= lower_bound_in_type (outer_type
, shorter_type
);
6870 max
= upper_bound_in_type (outer_type
, shorter_type
);
6872 above
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
6874 below
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
6881 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6886 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6892 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6894 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6899 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6901 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6910 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6911 ARG0 just the signedness is changed. */
6914 fold_sign_changed_comparison (location_t loc
, enum tree_code code
, tree type
,
6915 tree arg0
, tree arg1
)
6918 tree inner_type
, outer_type
;
6920 if (!CONVERT_EXPR_P (arg0
))
6923 outer_type
= TREE_TYPE (arg0
);
6924 arg0_inner
= TREE_OPERAND (arg0
, 0);
6925 inner_type
= TREE_TYPE (arg0_inner
);
6927 #ifdef HAVE_canonicalize_funcptr_for_compare
6928 /* Disable this optimization if we're casting a function pointer
6929 type on targets that require function pointer canonicalization. */
6930 if (HAVE_canonicalize_funcptr_for_compare
6931 && TREE_CODE (inner_type
) == POINTER_TYPE
6932 && TREE_CODE (TREE_TYPE (inner_type
)) == FUNCTION_TYPE
)
6936 if (TYPE_PRECISION (inner_type
) != TYPE_PRECISION (outer_type
))
6939 if (TREE_CODE (arg1
) != INTEGER_CST
6940 && !(CONVERT_EXPR_P (arg1
)
6941 && TREE_TYPE (TREE_OPERAND (arg1
, 0)) == inner_type
))
6944 if (TYPE_UNSIGNED (inner_type
) != TYPE_UNSIGNED (outer_type
)
6949 if (POINTER_TYPE_P (inner_type
) != POINTER_TYPE_P (outer_type
))
6952 if (TREE_CODE (arg1
) == INTEGER_CST
)
6953 arg1
= force_fit_type (inner_type
, wi::to_widest (arg1
), 0,
6954 TREE_OVERFLOW (arg1
));
6956 arg1
= fold_convert_loc (loc
, inner_type
, arg1
);
6958 return fold_build2_loc (loc
, code
, type
, arg0_inner
, arg1
);
6962 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6963 means A >= Y && A != MAX, but in this case we know that
6964 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6967 fold_to_nonsharp_ineq_using_bound (location_t loc
, tree ineq
, tree bound
)
6969 tree a
, typea
, type
= TREE_TYPE (ineq
), a1
, diff
, y
;
6971 if (TREE_CODE (bound
) == LT_EXPR
)
6972 a
= TREE_OPERAND (bound
, 0);
6973 else if (TREE_CODE (bound
) == GT_EXPR
)
6974 a
= TREE_OPERAND (bound
, 1);
6978 typea
= TREE_TYPE (a
);
6979 if (!INTEGRAL_TYPE_P (typea
)
6980 && !POINTER_TYPE_P (typea
))
6983 if (TREE_CODE (ineq
) == LT_EXPR
)
6985 a1
= TREE_OPERAND (ineq
, 1);
6986 y
= TREE_OPERAND (ineq
, 0);
6988 else if (TREE_CODE (ineq
) == GT_EXPR
)
6990 a1
= TREE_OPERAND (ineq
, 0);
6991 y
= TREE_OPERAND (ineq
, 1);
6996 if (TREE_TYPE (a1
) != typea
)
6999 if (POINTER_TYPE_P (typea
))
7001 /* Convert the pointer types into integer before taking the difference. */
7002 tree ta
= fold_convert_loc (loc
, ssizetype
, a
);
7003 tree ta1
= fold_convert_loc (loc
, ssizetype
, a1
);
7004 diff
= fold_binary_loc (loc
, MINUS_EXPR
, ssizetype
, ta1
, ta
);
7007 diff
= fold_binary_loc (loc
, MINUS_EXPR
, typea
, a1
, a
);
7009 if (!diff
|| !integer_onep (diff
))
7012 return fold_build2_loc (loc
, GE_EXPR
, type
, a
, y
);
7015 /* Fold a sum or difference of at least one multiplication.
7016 Returns the folded tree or NULL if no simplification could be made. */
7019 fold_plusminus_mult_expr (location_t loc
, enum tree_code code
, tree type
,
7020 tree arg0
, tree arg1
)
7022 tree arg00
, arg01
, arg10
, arg11
;
7023 tree alt0
= NULL_TREE
, alt1
= NULL_TREE
, same
;
7025 /* (A * C) +- (B * C) -> (A+-B) * C.
7026 (A * C) +- A -> A * (C+-1).
7027 We are most concerned about the case where C is a constant,
7028 but other combinations show up during loop reduction. Since
7029 it is not difficult, try all four possibilities. */
7031 if (TREE_CODE (arg0
) == MULT_EXPR
)
7033 arg00
= TREE_OPERAND (arg0
, 0);
7034 arg01
= TREE_OPERAND (arg0
, 1);
7036 else if (TREE_CODE (arg0
) == INTEGER_CST
)
7038 arg00
= build_one_cst (type
);
7043 /* We cannot generate constant 1 for fract. */
7044 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
7047 arg01
= build_one_cst (type
);
7049 if (TREE_CODE (arg1
) == MULT_EXPR
)
7051 arg10
= TREE_OPERAND (arg1
, 0);
7052 arg11
= TREE_OPERAND (arg1
, 1);
7054 else if (TREE_CODE (arg1
) == INTEGER_CST
)
7056 arg10
= build_one_cst (type
);
7057 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7058 the purpose of this canonicalization. */
7059 if (wi::neg_p (arg1
, TYPE_SIGN (TREE_TYPE (arg1
)))
7060 && negate_expr_p (arg1
)
7061 && code
== PLUS_EXPR
)
7063 arg11
= negate_expr (arg1
);
7071 /* We cannot generate constant 1 for fract. */
7072 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
7075 arg11
= build_one_cst (type
);
7079 if (operand_equal_p (arg01
, arg11
, 0))
7080 same
= arg01
, alt0
= arg00
, alt1
= arg10
;
7081 else if (operand_equal_p (arg00
, arg10
, 0))
7082 same
= arg00
, alt0
= arg01
, alt1
= arg11
;
7083 else if (operand_equal_p (arg00
, arg11
, 0))
7084 same
= arg00
, alt0
= arg01
, alt1
= arg10
;
7085 else if (operand_equal_p (arg01
, arg10
, 0))
7086 same
= arg01
, alt0
= arg00
, alt1
= arg11
;
7088 /* No identical multiplicands; see if we can find a common
7089 power-of-two factor in non-power-of-two multiplies. This
7090 can help in multi-dimensional array access. */
7091 else if (tree_fits_shwi_p (arg01
)
7092 && tree_fits_shwi_p (arg11
))
7094 HOST_WIDE_INT int01
, int11
, tmp
;
7097 int01
= tree_to_shwi (arg01
);
7098 int11
= tree_to_shwi (arg11
);
7100 /* Move min of absolute values to int11. */
7101 if (absu_hwi (int01
) < absu_hwi (int11
))
7103 tmp
= int01
, int01
= int11
, int11
= tmp
;
7104 alt0
= arg00
, arg00
= arg10
, arg10
= alt0
;
7111 if (exact_log2 (absu_hwi (int11
)) > 0 && int01
% int11
== 0
7112 /* The remainder should not be a constant, otherwise we
7113 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7114 increased the number of multiplications necessary. */
7115 && TREE_CODE (arg10
) != INTEGER_CST
)
7117 alt0
= fold_build2_loc (loc
, MULT_EXPR
, TREE_TYPE (arg00
), arg00
,
7118 build_int_cst (TREE_TYPE (arg00
),
7123 maybe_same
= alt0
, alt0
= alt1
, alt1
= maybe_same
;
7128 return fold_build2_loc (loc
, MULT_EXPR
, type
,
7129 fold_build2_loc (loc
, code
, type
,
7130 fold_convert_loc (loc
, type
, alt0
),
7131 fold_convert_loc (loc
, type
, alt1
)),
7132 fold_convert_loc (loc
, type
, same
));
7137 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7138 specified by EXPR into the buffer PTR of length LEN bytes.
7139 Return the number of bytes placed in the buffer, or zero
7143 native_encode_int (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7145 tree type
= TREE_TYPE (expr
);
7146 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7147 int byte
, offset
, word
, words
;
7148 unsigned char value
;
7150 if ((off
== -1 && total_bytes
> len
)
7151 || off
>= total_bytes
)
7155 words
= total_bytes
/ UNITS_PER_WORD
;
7157 for (byte
= 0; byte
< total_bytes
; byte
++)
7159 int bitpos
= byte
* BITS_PER_UNIT
;
7160 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7162 value
= wi::extract_uhwi (wi::to_widest (expr
), bitpos
, BITS_PER_UNIT
);
7164 if (total_bytes
> UNITS_PER_WORD
)
7166 word
= byte
/ UNITS_PER_WORD
;
7167 if (WORDS_BIG_ENDIAN
)
7168 word
= (words
- 1) - word
;
7169 offset
= word
* UNITS_PER_WORD
;
7170 if (BYTES_BIG_ENDIAN
)
7171 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7173 offset
+= byte
% UNITS_PER_WORD
;
7176 offset
= BYTES_BIG_ENDIAN
? (total_bytes
- 1) - byte
: byte
;
7178 && offset
- off
< len
)
7179 ptr
[offset
- off
] = value
;
7181 return MIN (len
, total_bytes
- off
);
7185 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7186 specified by EXPR into the buffer PTR of length LEN bytes.
7187 Return the number of bytes placed in the buffer, or zero
7191 native_encode_fixed (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7193 tree type
= TREE_TYPE (expr
);
7194 machine_mode mode
= TYPE_MODE (type
);
7195 int total_bytes
= GET_MODE_SIZE (mode
);
7196 FIXED_VALUE_TYPE value
;
7197 tree i_value
, i_type
;
7199 if (total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7202 i_type
= lang_hooks
.types
.type_for_size (GET_MODE_BITSIZE (mode
), 1);
7204 if (NULL_TREE
== i_type
7205 || TYPE_PRECISION (i_type
) != total_bytes
)
7208 value
= TREE_FIXED_CST (expr
);
7209 i_value
= double_int_to_tree (i_type
, value
.data
);
7211 return native_encode_int (i_value
, ptr
, len
, off
);
7215 /* Subroutine of native_encode_expr. Encode the REAL_CST
7216 specified by EXPR into the buffer PTR of length LEN bytes.
7217 Return the number of bytes placed in the buffer, or zero
7221 native_encode_real (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7223 tree type
= TREE_TYPE (expr
);
7224 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7225 int byte
, offset
, word
, words
, bitpos
;
7226 unsigned char value
;
7228 /* There are always 32 bits in each long, no matter the size of
7229 the hosts long. We handle floating point representations with
7233 if ((off
== -1 && total_bytes
> len
)
7234 || off
>= total_bytes
)
7238 words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
7240 real_to_target (tmp
, TREE_REAL_CST_PTR (expr
), TYPE_MODE (type
));
7242 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7243 bitpos
+= BITS_PER_UNIT
)
7245 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7246 value
= (unsigned char) (tmp
[bitpos
/ 32] >> (bitpos
& 31));
7248 if (UNITS_PER_WORD
< 4)
7250 word
= byte
/ UNITS_PER_WORD
;
7251 if (WORDS_BIG_ENDIAN
)
7252 word
= (words
- 1) - word
;
7253 offset
= word
* UNITS_PER_WORD
;
7254 if (BYTES_BIG_ENDIAN
)
7255 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7257 offset
+= byte
% UNITS_PER_WORD
;
7260 offset
= BYTES_BIG_ENDIAN
? 3 - byte
: byte
;
7261 offset
= offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3);
7263 && offset
- off
< len
)
7264 ptr
[offset
- off
] = value
;
7266 return MIN (len
, total_bytes
- off
);
7269 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7270 specified by EXPR into the buffer PTR of length LEN bytes.
7271 Return the number of bytes placed in the buffer, or zero
7275 native_encode_complex (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7280 part
= TREE_REALPART (expr
);
7281 rsize
= native_encode_expr (part
, ptr
, len
, off
);
7285 part
= TREE_IMAGPART (expr
);
7287 off
= MAX (0, off
- GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part
))));
7288 isize
= native_encode_expr (part
, ptr
+rsize
, len
-rsize
, off
);
7292 return rsize
+ isize
;
7296 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7297 specified by EXPR into the buffer PTR of length LEN bytes.
7298 Return the number of bytes placed in the buffer, or zero
7302 native_encode_vector (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7309 count
= VECTOR_CST_NELTS (expr
);
7310 itype
= TREE_TYPE (TREE_TYPE (expr
));
7311 size
= GET_MODE_SIZE (TYPE_MODE (itype
));
7312 for (i
= 0; i
< count
; i
++)
7319 elem
= VECTOR_CST_ELT (expr
, i
);
7320 int res
= native_encode_expr (elem
, ptr
+offset
, len
-offset
, off
);
7321 if ((off
== -1 && res
!= size
)
7334 /* Subroutine of native_encode_expr. Encode the STRING_CST
7335 specified by EXPR into the buffer PTR of length LEN bytes.
7336 Return the number of bytes placed in the buffer, or zero
7340 native_encode_string (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7342 tree type
= TREE_TYPE (expr
);
7343 HOST_WIDE_INT total_bytes
;
7345 if (TREE_CODE (type
) != ARRAY_TYPE
7346 || TREE_CODE (TREE_TYPE (type
)) != INTEGER_TYPE
7347 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type
))) != BITS_PER_UNIT
7348 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type
)))
7350 total_bytes
= tree_to_shwi (TYPE_SIZE_UNIT (type
));
7351 if ((off
== -1 && total_bytes
> len
)
7352 || off
>= total_bytes
)
7356 if (TREE_STRING_LENGTH (expr
) - off
< MIN (total_bytes
, len
))
7359 if (off
< TREE_STRING_LENGTH (expr
))
7361 written
= MIN (len
, TREE_STRING_LENGTH (expr
) - off
);
7362 memcpy (ptr
, TREE_STRING_POINTER (expr
) + off
, written
);
7364 memset (ptr
+ written
, 0,
7365 MIN (total_bytes
- written
, len
- written
));
7368 memcpy (ptr
, TREE_STRING_POINTER (expr
) + off
, MIN (total_bytes
, len
));
7369 return MIN (total_bytes
- off
, len
);
7373 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7374 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7375 buffer PTR of length LEN bytes. If OFF is not -1 then start
7376 the encoding at byte offset OFF and encode at most LEN bytes.
7377 Return the number of bytes placed in the buffer, or zero upon failure. */
7380 native_encode_expr (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7382 switch (TREE_CODE (expr
))
7385 return native_encode_int (expr
, ptr
, len
, off
);
7388 return native_encode_real (expr
, ptr
, len
, off
);
7391 return native_encode_fixed (expr
, ptr
, len
, off
);
7394 return native_encode_complex (expr
, ptr
, len
, off
);
7397 return native_encode_vector (expr
, ptr
, len
, off
);
7400 return native_encode_string (expr
, ptr
, len
, off
);
7408 /* Subroutine of native_interpret_expr. Interpret the contents of
7409 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7410 If the buffer cannot be interpreted, return NULL_TREE. */
7413 native_interpret_int (tree type
, const unsigned char *ptr
, int len
)
7415 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7417 if (total_bytes
> len
7418 || total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7421 wide_int result
= wi::from_buffer (ptr
, total_bytes
);
7423 return wide_int_to_tree (type
, result
);
7427 /* Subroutine of native_interpret_expr. Interpret the contents of
7428 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7429 If the buffer cannot be interpreted, return NULL_TREE. */
7432 native_interpret_fixed (tree type
, const unsigned char *ptr
, int len
)
7434 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7436 FIXED_VALUE_TYPE fixed_value
;
7438 if (total_bytes
> len
7439 || total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7442 result
= double_int::from_buffer (ptr
, total_bytes
);
7443 fixed_value
= fixed_from_double_int (result
, TYPE_MODE (type
));
7445 return build_fixed (type
, fixed_value
);
7449 /* Subroutine of native_interpret_expr. Interpret the contents of
7450 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7451 If the buffer cannot be interpreted, return NULL_TREE. */
7454 native_interpret_real (tree type
, const unsigned char *ptr
, int len
)
7456 machine_mode mode
= TYPE_MODE (type
);
7457 int total_bytes
= GET_MODE_SIZE (mode
);
7458 int byte
, offset
, word
, words
, bitpos
;
7459 unsigned char value
;
7460 /* There are always 32 bits in each long, no matter the size of
7461 the hosts long. We handle floating point representations with
7466 total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7467 if (total_bytes
> len
|| total_bytes
> 24)
7469 words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
7471 memset (tmp
, 0, sizeof (tmp
));
7472 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7473 bitpos
+= BITS_PER_UNIT
)
7475 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7476 if (UNITS_PER_WORD
< 4)
7478 word
= byte
/ UNITS_PER_WORD
;
7479 if (WORDS_BIG_ENDIAN
)
7480 word
= (words
- 1) - word
;
7481 offset
= word
* UNITS_PER_WORD
;
7482 if (BYTES_BIG_ENDIAN
)
7483 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7485 offset
+= byte
% UNITS_PER_WORD
;
7488 offset
= BYTES_BIG_ENDIAN
? 3 - byte
: byte
;
7489 value
= ptr
[offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3)];
7491 tmp
[bitpos
/ 32] |= (unsigned long)value
<< (bitpos
& 31);
7494 real_from_target (&r
, tmp
, mode
);
7495 return build_real (type
, r
);
7499 /* Subroutine of native_interpret_expr. Interpret the contents of
7500 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7501 If the buffer cannot be interpreted, return NULL_TREE. */
7504 native_interpret_complex (tree type
, const unsigned char *ptr
, int len
)
7506 tree etype
, rpart
, ipart
;
7509 etype
= TREE_TYPE (type
);
7510 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7513 rpart
= native_interpret_expr (etype
, ptr
, size
);
7516 ipart
= native_interpret_expr (etype
, ptr
+size
, size
);
7519 return build_complex (type
, rpart
, ipart
);
7523 /* Subroutine of native_interpret_expr. Interpret the contents of
7524 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7525 If the buffer cannot be interpreted, return NULL_TREE. */
7528 native_interpret_vector (tree type
, const unsigned char *ptr
, int len
)
7534 etype
= TREE_TYPE (type
);
7535 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7536 count
= TYPE_VECTOR_SUBPARTS (type
);
7537 if (size
* count
> len
)
7540 elements
= XALLOCAVEC (tree
, count
);
7541 for (i
= count
- 1; i
>= 0; i
--)
7543 elem
= native_interpret_expr (etype
, ptr
+(i
*size
), size
);
7548 return build_vector (type
, elements
);
7552 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7553 the buffer PTR of length LEN as a constant of type TYPE. For
7554 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7555 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7556 return NULL_TREE. */
7559 native_interpret_expr (tree type
, const unsigned char *ptr
, int len
)
7561 switch (TREE_CODE (type
))
7567 case REFERENCE_TYPE
:
7568 return native_interpret_int (type
, ptr
, len
);
7571 return native_interpret_real (type
, ptr
, len
);
7573 case FIXED_POINT_TYPE
:
7574 return native_interpret_fixed (type
, ptr
, len
);
7577 return native_interpret_complex (type
, ptr
, len
);
7580 return native_interpret_vector (type
, ptr
, len
);
7587 /* Returns true if we can interpret the contents of a native encoding
7591 can_native_interpret_type_p (tree type
)
7593 switch (TREE_CODE (type
))
7599 case REFERENCE_TYPE
:
7600 case FIXED_POINT_TYPE
:
7610 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7611 TYPE at compile-time. If we're unable to perform the conversion
7612 return NULL_TREE. */
7615 fold_view_convert_expr (tree type
, tree expr
)
7617 /* We support up to 512-bit values (for V8DFmode). */
7618 unsigned char buffer
[64];
7621 /* Check that the host and target are sane. */
7622 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8)
7625 len
= native_encode_expr (expr
, buffer
, sizeof (buffer
));
7629 return native_interpret_expr (type
, buffer
, len
);
7632 /* Build an expression for the address of T. Folds away INDIRECT_REF
7633 to avoid confusing the gimplify process. */
7636 build_fold_addr_expr_with_type_loc (location_t loc
, tree t
, tree ptrtype
)
7638 /* The size of the object is not relevant when talking about its address. */
7639 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
7640 t
= TREE_OPERAND (t
, 0);
7642 if (TREE_CODE (t
) == INDIRECT_REF
)
7644 t
= TREE_OPERAND (t
, 0);
7646 if (TREE_TYPE (t
) != ptrtype
)
7647 t
= build1_loc (loc
, NOP_EXPR
, ptrtype
, t
);
7649 else if (TREE_CODE (t
) == MEM_REF
7650 && integer_zerop (TREE_OPERAND (t
, 1)))
7651 return TREE_OPERAND (t
, 0);
7652 else if (TREE_CODE (t
) == MEM_REF
7653 && TREE_CODE (TREE_OPERAND (t
, 0)) == INTEGER_CST
)
7654 return fold_binary (POINTER_PLUS_EXPR
, ptrtype
,
7655 TREE_OPERAND (t
, 0),
7656 convert_to_ptrofftype (TREE_OPERAND (t
, 1)));
7657 else if (TREE_CODE (t
) == VIEW_CONVERT_EXPR
)
7659 t
= build_fold_addr_expr_loc (loc
, TREE_OPERAND (t
, 0));
7661 if (TREE_TYPE (t
) != ptrtype
)
7662 t
= fold_convert_loc (loc
, ptrtype
, t
);
7665 t
= build1_loc (loc
, ADDR_EXPR
, ptrtype
, t
);
7670 /* Build an expression for the address of T. */
7673 build_fold_addr_expr_loc (location_t loc
, tree t
)
7675 tree ptrtype
= build_pointer_type (TREE_TYPE (t
));
7677 return build_fold_addr_expr_with_type_loc (loc
, t
, ptrtype
);
7680 /* Fold a unary expression of code CODE and type TYPE with operand
7681 OP0. Return the folded expression if folding is successful.
7682 Otherwise, return NULL_TREE. */
7685 fold_unary_loc (location_t loc
, enum tree_code code
, tree type
, tree op0
)
7689 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
7691 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
7692 && TREE_CODE_LENGTH (code
) == 1);
7697 if (CONVERT_EXPR_CODE_P (code
)
7698 || code
== FLOAT_EXPR
|| code
== ABS_EXPR
|| code
== NEGATE_EXPR
)
7700 /* Don't use STRIP_NOPS, because signedness of argument type
7702 STRIP_SIGN_NOPS (arg0
);
7706 /* Strip any conversions that don't change the mode. This
7707 is safe for every expression, except for a comparison
7708 expression because its signedness is derived from its
7711 Note that this is done as an internal manipulation within
7712 the constant folder, in order to find the simplest
7713 representation of the arguments so that their form can be
7714 studied. In any cases, the appropriate type conversions
7715 should be put back in the tree that will get out of the
7720 if (CONSTANT_CLASS_P (arg0
))
7722 tree tem
= const_unop (code
, type
, arg0
);
7725 if (TREE_TYPE (tem
) != type
)
7726 tem
= fold_convert_loc (loc
, type
, tem
);
7732 tem
= generic_simplify (loc
, code
, type
, op0
);
7736 if (TREE_CODE_CLASS (code
) == tcc_unary
)
7738 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
7739 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7740 fold_build1_loc (loc
, code
, type
,
7741 fold_convert_loc (loc
, TREE_TYPE (op0
),
7742 TREE_OPERAND (arg0
, 1))));
7743 else if (TREE_CODE (arg0
) == COND_EXPR
)
7745 tree arg01
= TREE_OPERAND (arg0
, 1);
7746 tree arg02
= TREE_OPERAND (arg0
, 2);
7747 if (! VOID_TYPE_P (TREE_TYPE (arg01
)))
7748 arg01
= fold_build1_loc (loc
, code
, type
,
7749 fold_convert_loc (loc
,
7750 TREE_TYPE (op0
), arg01
));
7751 if (! VOID_TYPE_P (TREE_TYPE (arg02
)))
7752 arg02
= fold_build1_loc (loc
, code
, type
,
7753 fold_convert_loc (loc
,
7754 TREE_TYPE (op0
), arg02
));
7755 tem
= fold_build3_loc (loc
, COND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7758 /* If this was a conversion, and all we did was to move into
7759 inside the COND_EXPR, bring it back out. But leave it if
7760 it is a conversion from integer to integer and the
7761 result precision is no wider than a word since such a
7762 conversion is cheap and may be optimized away by combine,
7763 while it couldn't if it were outside the COND_EXPR. Then return
7764 so we don't get into an infinite recursion loop taking the
7765 conversion out and then back in. */
7767 if ((CONVERT_EXPR_CODE_P (code
)
7768 || code
== NON_LVALUE_EXPR
)
7769 && TREE_CODE (tem
) == COND_EXPR
7770 && TREE_CODE (TREE_OPERAND (tem
, 1)) == code
7771 && TREE_CODE (TREE_OPERAND (tem
, 2)) == code
7772 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 1))
7773 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 2))
7774 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))
7775 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)))
7776 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
7778 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))))
7779 && TYPE_PRECISION (TREE_TYPE (tem
)) <= BITS_PER_WORD
)
7780 || flag_syntax_only
))
7781 tem
= build1_loc (loc
, code
, type
,
7783 TREE_TYPE (TREE_OPERAND
7784 (TREE_OPERAND (tem
, 1), 0)),
7785 TREE_OPERAND (tem
, 0),
7786 TREE_OPERAND (TREE_OPERAND (tem
, 1), 0),
7787 TREE_OPERAND (TREE_OPERAND (tem
, 2),
7795 case NON_LVALUE_EXPR
:
7796 if (!maybe_lvalue_p (op0
))
7797 return fold_convert_loc (loc
, type
, op0
);
7802 case FIX_TRUNC_EXPR
:
7803 if (COMPARISON_CLASS_P (op0
))
7805 /* If we have (type) (a CMP b) and type is an integral type, return
7806 new expression involving the new type. Canonicalize
7807 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7809 Do not fold the result as that would not simplify further, also
7810 folding again results in recursions. */
7811 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
7812 return build2_loc (loc
, TREE_CODE (op0
), type
,
7813 TREE_OPERAND (op0
, 0),
7814 TREE_OPERAND (op0
, 1));
7815 else if (!INTEGRAL_TYPE_P (type
) && !VOID_TYPE_P (type
)
7816 && TREE_CODE (type
) != VECTOR_TYPE
)
7817 return build3_loc (loc
, COND_EXPR
, type
, op0
,
7818 constant_boolean_node (true, type
),
7819 constant_boolean_node (false, type
));
7822 /* Handle (T *)&A.B.C for A being of type T and B and C
7823 living at offset zero. This occurs frequently in
7824 C++ upcasting and then accessing the base. */
7825 if (TREE_CODE (op0
) == ADDR_EXPR
7826 && POINTER_TYPE_P (type
)
7827 && handled_component_p (TREE_OPERAND (op0
, 0)))
7829 HOST_WIDE_INT bitsize
, bitpos
;
7832 int unsignedp
, volatilep
;
7833 tree base
= TREE_OPERAND (op0
, 0);
7834 base
= get_inner_reference (base
, &bitsize
, &bitpos
, &offset
,
7835 &mode
, &unsignedp
, &volatilep
, false);
7836 /* If the reference was to a (constant) zero offset, we can use
7837 the address of the base if it has the same base type
7838 as the result type and the pointer type is unqualified. */
7839 if (! offset
&& bitpos
== 0
7840 && (TYPE_MAIN_VARIANT (TREE_TYPE (type
))
7841 == TYPE_MAIN_VARIANT (TREE_TYPE (base
)))
7842 && TYPE_QUALS (type
) == TYPE_UNQUALIFIED
)
7843 return fold_convert_loc (loc
, type
,
7844 build_fold_addr_expr_loc (loc
, base
));
7847 if (TREE_CODE (op0
) == MODIFY_EXPR
7848 && TREE_CONSTANT (TREE_OPERAND (op0
, 1))
7849 /* Detect assigning a bitfield. */
7850 && !(TREE_CODE (TREE_OPERAND (op0
, 0)) == COMPONENT_REF
7852 (TREE_OPERAND (TREE_OPERAND (op0
, 0), 1))))
7854 /* Don't leave an assignment inside a conversion
7855 unless assigning a bitfield. */
7856 tem
= fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 1));
7857 /* First do the assignment, then return converted constant. */
7858 tem
= build2_loc (loc
, COMPOUND_EXPR
, TREE_TYPE (tem
), op0
, tem
);
7859 TREE_NO_WARNING (tem
) = 1;
7860 TREE_USED (tem
) = 1;
7864 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7865 constants (if x has signed type, the sign bit cannot be set
7866 in c). This folds extension into the BIT_AND_EXPR.
7867 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7868 very likely don't have maximal range for their precision and this
7869 transformation effectively doesn't preserve non-maximal ranges. */
7870 if (TREE_CODE (type
) == INTEGER_TYPE
7871 && TREE_CODE (op0
) == BIT_AND_EXPR
7872 && TREE_CODE (TREE_OPERAND (op0
, 1)) == INTEGER_CST
)
7874 tree and_expr
= op0
;
7875 tree and0
= TREE_OPERAND (and_expr
, 0);
7876 tree and1
= TREE_OPERAND (and_expr
, 1);
7879 if (TYPE_UNSIGNED (TREE_TYPE (and_expr
))
7880 || (TYPE_PRECISION (type
)
7881 <= TYPE_PRECISION (TREE_TYPE (and_expr
))))
7883 else if (TYPE_PRECISION (TREE_TYPE (and1
))
7884 <= HOST_BITS_PER_WIDE_INT
7885 && tree_fits_uhwi_p (and1
))
7887 unsigned HOST_WIDE_INT cst
;
7889 cst
= tree_to_uhwi (and1
);
7890 cst
&= HOST_WIDE_INT_M1U
7891 << (TYPE_PRECISION (TREE_TYPE (and1
)) - 1);
7892 change
= (cst
== 0);
7893 #ifdef LOAD_EXTEND_OP
7895 && !flag_syntax_only
7896 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0
)))
7899 tree uns
= unsigned_type_for (TREE_TYPE (and0
));
7900 and0
= fold_convert_loc (loc
, uns
, and0
);
7901 and1
= fold_convert_loc (loc
, uns
, and1
);
7907 tem
= force_fit_type (type
, wi::to_widest (and1
), 0,
7908 TREE_OVERFLOW (and1
));
7909 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
7910 fold_convert_loc (loc
, type
, and0
), tem
);
7914 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7915 when one of the new casts will fold away. Conservatively we assume
7916 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7917 if (POINTER_TYPE_P (type
)
7918 && TREE_CODE (arg0
) == POINTER_PLUS_EXPR
7919 && (!TYPE_RESTRICT (type
) || TYPE_RESTRICT (TREE_TYPE (arg0
)))
7920 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
7921 || TREE_CODE (TREE_OPERAND (arg0
, 0)) == NOP_EXPR
7922 || TREE_CODE (TREE_OPERAND (arg0
, 1)) == NOP_EXPR
))
7924 tree arg00
= TREE_OPERAND (arg0
, 0);
7925 tree arg01
= TREE_OPERAND (arg0
, 1);
7927 return fold_build_pointer_plus_loc
7928 (loc
, fold_convert_loc (loc
, type
, arg00
), arg01
);
7931 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7932 of the same precision, and X is an integer type not narrower than
7933 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7934 if (INTEGRAL_TYPE_P (type
)
7935 && TREE_CODE (op0
) == BIT_NOT_EXPR
7936 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
7937 && CONVERT_EXPR_P (TREE_OPERAND (op0
, 0))
7938 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (op0
)))
7940 tem
= TREE_OPERAND (TREE_OPERAND (op0
, 0), 0);
7941 if (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
7942 && TYPE_PRECISION (type
) <= TYPE_PRECISION (TREE_TYPE (tem
)))
7943 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
7944 fold_convert_loc (loc
, type
, tem
));
7947 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7948 type of X and Y (integer types only). */
7949 if (INTEGRAL_TYPE_P (type
)
7950 && TREE_CODE (op0
) == MULT_EXPR
7951 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
7952 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
7954 /* Be careful not to introduce new overflows. */
7956 if (TYPE_OVERFLOW_WRAPS (type
))
7959 mult_type
= unsigned_type_for (type
);
7961 if (TYPE_PRECISION (mult_type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
7963 tem
= fold_build2_loc (loc
, MULT_EXPR
, mult_type
,
7964 fold_convert_loc (loc
, mult_type
,
7965 TREE_OPERAND (op0
, 0)),
7966 fold_convert_loc (loc
, mult_type
,
7967 TREE_OPERAND (op0
, 1)));
7968 return fold_convert_loc (loc
, type
, tem
);
7974 case VIEW_CONVERT_EXPR
:
7975 if (TREE_CODE (op0
) == MEM_REF
)
7976 return fold_build2_loc (loc
, MEM_REF
, type
,
7977 TREE_OPERAND (op0
, 0), TREE_OPERAND (op0
, 1));
7982 tem
= fold_negate_expr (loc
, arg0
);
7984 return fold_convert_loc (loc
, type
, tem
);
7988 /* Convert fabs((double)float) into (double)fabsf(float). */
7989 if (TREE_CODE (arg0
) == NOP_EXPR
7990 && TREE_CODE (type
) == REAL_TYPE
)
7992 tree targ0
= strip_float_extensions (arg0
);
7994 return fold_convert_loc (loc
, type
,
7995 fold_build1_loc (loc
, ABS_EXPR
,
7999 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8000 else if (TREE_CODE (arg0
) == ABS_EXPR
)
8003 /* Strip sign ops from argument. */
8004 if (TREE_CODE (type
) == REAL_TYPE
)
8006 tem
= fold_strip_sign_ops (arg0
);
8008 return fold_build1_loc (loc
, ABS_EXPR
, type
,
8009 fold_convert_loc (loc
, type
, tem
));
8014 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8015 return fold_convert_loc (loc
, type
, arg0
);
8016 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8018 tree itype
= TREE_TYPE (type
);
8019 tree rpart
= fold_convert_loc (loc
, itype
, TREE_OPERAND (arg0
, 0));
8020 tree ipart
= fold_convert_loc (loc
, itype
, TREE_OPERAND (arg0
, 1));
8021 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
,
8022 negate_expr (ipart
));
8024 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8025 return fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
8029 /* Convert ~ (-A) to A - 1. */
8030 if (INTEGRAL_TYPE_P (type
) && TREE_CODE (arg0
) == NEGATE_EXPR
)
8031 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
8032 fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0)),
8033 build_int_cst (type
, 1));
8034 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8035 else if (INTEGRAL_TYPE_P (type
)
8036 && ((TREE_CODE (arg0
) == MINUS_EXPR
8037 && integer_onep (TREE_OPERAND (arg0
, 1)))
8038 || (TREE_CODE (arg0
) == PLUS_EXPR
8039 && integer_all_onesp (TREE_OPERAND (arg0
, 1)))))
8040 return fold_build1_loc (loc
, NEGATE_EXPR
, type
,
8041 fold_convert_loc (loc
, type
,
8042 TREE_OPERAND (arg0
, 0)));
8043 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8044 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
8045 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
8046 fold_convert_loc (loc
, type
,
8047 TREE_OPERAND (arg0
, 0)))))
8048 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
, tem
,
8049 fold_convert_loc (loc
, type
,
8050 TREE_OPERAND (arg0
, 1)));
8051 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
8052 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
8053 fold_convert_loc (loc
, type
,
8054 TREE_OPERAND (arg0
, 1)))))
8055 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
,
8056 fold_convert_loc (loc
, type
,
8057 TREE_OPERAND (arg0
, 0)), tem
);
8061 case TRUTH_NOT_EXPR
:
8062 /* Note that the operand of this must be an int
8063 and its values must be 0 or 1.
8064 ("true" is a fixed value perhaps depending on the language,
8065 but we don't handle values other than 1 correctly yet.) */
8066 tem
= fold_truth_not_expr (loc
, arg0
);
8069 return fold_convert_loc (loc
, type
, tem
);
8072 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8073 return fold_convert_loc (loc
, type
, arg0
);
8074 if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8076 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8077 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), itype
,
8078 fold_build1_loc (loc
, REALPART_EXPR
, itype
,
8079 TREE_OPERAND (arg0
, 0)),
8080 fold_build1_loc (loc
, REALPART_EXPR
, itype
,
8081 TREE_OPERAND (arg0
, 1)));
8082 return fold_convert_loc (loc
, type
, tem
);
8084 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8086 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8087 tem
= fold_build1_loc (loc
, REALPART_EXPR
, itype
,
8088 TREE_OPERAND (arg0
, 0));
8089 return fold_convert_loc (loc
, type
, tem
);
8091 if (TREE_CODE (arg0
) == CALL_EXPR
)
8093 tree fn
= get_callee_fndecl (arg0
);
8094 if (fn
&& DECL_BUILT_IN_CLASS (fn
) == BUILT_IN_NORMAL
)
8095 switch (DECL_FUNCTION_CODE (fn
))
8097 CASE_FLT_FN (BUILT_IN_CEXPI
):
8098 fn
= mathfn_built_in (type
, BUILT_IN_COS
);
8100 return build_call_expr_loc (loc
, fn
, 1, CALL_EXPR_ARG (arg0
, 0));
8110 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8111 return build_zero_cst (type
);
8112 if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8114 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8115 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), itype
,
8116 fold_build1_loc (loc
, IMAGPART_EXPR
, itype
,
8117 TREE_OPERAND (arg0
, 0)),
8118 fold_build1_loc (loc
, IMAGPART_EXPR
, itype
,
8119 TREE_OPERAND (arg0
, 1)));
8120 return fold_convert_loc (loc
, type
, tem
);
8122 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8124 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8125 tem
= fold_build1_loc (loc
, IMAGPART_EXPR
, itype
, TREE_OPERAND (arg0
, 0));
8126 return fold_convert_loc (loc
, type
, negate_expr (tem
));
8128 if (TREE_CODE (arg0
) == CALL_EXPR
)
8130 tree fn
= get_callee_fndecl (arg0
);
8131 if (fn
&& DECL_BUILT_IN_CLASS (fn
) == BUILT_IN_NORMAL
)
8132 switch (DECL_FUNCTION_CODE (fn
))
8134 CASE_FLT_FN (BUILT_IN_CEXPI
):
8135 fn
= mathfn_built_in (type
, BUILT_IN_SIN
);
8137 return build_call_expr_loc (loc
, fn
, 1, CALL_EXPR_ARG (arg0
, 0));
8147 /* Fold *&X to X if X is an lvalue. */
8148 if (TREE_CODE (op0
) == ADDR_EXPR
)
8150 tree op00
= TREE_OPERAND (op0
, 0);
8151 if ((TREE_CODE (op00
) == VAR_DECL
8152 || TREE_CODE (op00
) == PARM_DECL
8153 || TREE_CODE (op00
) == RESULT_DECL
)
8154 && !TREE_READONLY (op00
))
8161 } /* switch (code) */
8165 /* If the operation was a conversion do _not_ mark a resulting constant
8166 with TREE_OVERFLOW if the original constant was not. These conversions
8167 have implementation defined behavior and retaining the TREE_OVERFLOW
8168 flag here would confuse later passes such as VRP. */
8170 fold_unary_ignore_overflow_loc (location_t loc
, enum tree_code code
,
8171 tree type
, tree op0
)
8173 tree res
= fold_unary_loc (loc
, code
, type
, op0
);
8175 && TREE_CODE (res
) == INTEGER_CST
8176 && TREE_CODE (op0
) == INTEGER_CST
8177 && CONVERT_EXPR_CODE_P (code
))
8178 TREE_OVERFLOW (res
) = TREE_OVERFLOW (op0
);
8183 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8184 operands OP0 and OP1. LOC is the location of the resulting expression.
8185 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8186 Return the folded expression if folding is successful. Otherwise,
8187 return NULL_TREE. */
8189 fold_truth_andor (location_t loc
, enum tree_code code
, tree type
,
8190 tree arg0
, tree arg1
, tree op0
, tree op1
)
8194 /* We only do these simplifications if we are optimizing. */
8198 /* Check for things like (A || B) && (A || C). We can convert this
8199 to A || (B && C). Note that either operator can be any of the four
8200 truth and/or operations and the transformation will still be
8201 valid. Also note that we only care about order for the
8202 ANDIF and ORIF operators. If B contains side effects, this
8203 might change the truth-value of A. */
8204 if (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8205 && (TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
8206 || TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
8207 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
8208 || TREE_CODE (arg0
) == TRUTH_OR_EXPR
)
8209 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0
, 1)))
8211 tree a00
= TREE_OPERAND (arg0
, 0);
8212 tree a01
= TREE_OPERAND (arg0
, 1);
8213 tree a10
= TREE_OPERAND (arg1
, 0);
8214 tree a11
= TREE_OPERAND (arg1
, 1);
8215 int commutative
= ((TREE_CODE (arg0
) == TRUTH_OR_EXPR
8216 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
)
8217 && (code
== TRUTH_AND_EXPR
8218 || code
== TRUTH_OR_EXPR
));
8220 if (operand_equal_p (a00
, a10
, 0))
8221 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
8222 fold_build2_loc (loc
, code
, type
, a01
, a11
));
8223 else if (commutative
&& operand_equal_p (a00
, a11
, 0))
8224 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
8225 fold_build2_loc (loc
, code
, type
, a01
, a10
));
8226 else if (commutative
&& operand_equal_p (a01
, a10
, 0))
8227 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a01
,
8228 fold_build2_loc (loc
, code
, type
, a00
, a11
));
8230 /* This case if tricky because we must either have commutative
8231 operators or else A10 must not have side-effects. */
8233 else if ((commutative
|| ! TREE_SIDE_EFFECTS (a10
))
8234 && operand_equal_p (a01
, a11
, 0))
8235 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
8236 fold_build2_loc (loc
, code
, type
, a00
, a10
),
8240 /* See if we can build a range comparison. */
8241 if (0 != (tem
= fold_range_test (loc
, code
, type
, op0
, op1
)))
8244 if ((code
== TRUTH_ANDIF_EXPR
&& TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
)
8245 || (code
== TRUTH_ORIF_EXPR
&& TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
))
8247 tem
= merge_truthop_with_opposite_arm (loc
, arg0
, arg1
, true);
8249 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
8252 if ((code
== TRUTH_ANDIF_EXPR
&& TREE_CODE (arg1
) == TRUTH_ORIF_EXPR
)
8253 || (code
== TRUTH_ORIF_EXPR
&& TREE_CODE (arg1
) == TRUTH_ANDIF_EXPR
))
8255 tem
= merge_truthop_with_opposite_arm (loc
, arg1
, arg0
, false);
8257 return fold_build2_loc (loc
, code
, type
, arg0
, tem
);
8260 /* Check for the possibility of merging component references. If our
8261 lhs is another similar operation, try to merge its rhs with our
8262 rhs. Then try to merge our lhs and rhs. */
8263 if (TREE_CODE (arg0
) == code
8264 && 0 != (tem
= fold_truth_andor_1 (loc
, code
, type
,
8265 TREE_OPERAND (arg0
, 1), arg1
)))
8266 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
8268 if ((tem
= fold_truth_andor_1 (loc
, code
, type
, arg0
, arg1
)) != 0)
8271 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8272 && (code
== TRUTH_AND_EXPR
8273 || code
== TRUTH_ANDIF_EXPR
8274 || code
== TRUTH_OR_EXPR
8275 || code
== TRUTH_ORIF_EXPR
))
8277 enum tree_code ncode
, icode
;
8279 ncode
= (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_AND_EXPR
)
8280 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
;
8281 icode
= ncode
== TRUTH_AND_EXPR
? TRUTH_ANDIF_EXPR
: TRUTH_ORIF_EXPR
;
8283 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8284 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8285 We don't want to pack more than two leafs to a non-IF AND/OR
8287 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8288 equal to IF-CODE, then we don't want to add right-hand operand.
8289 If the inner right-hand side of left-hand operand has
8290 side-effects, or isn't simple, then we can't add to it,
8291 as otherwise we might destroy if-sequence. */
8292 if (TREE_CODE (arg0
) == icode
8293 && simple_operand_p_2 (arg1
)
8294 /* Needed for sequence points to handle trappings, and
8296 && simple_operand_p_2 (TREE_OPERAND (arg0
, 1)))
8298 tem
= fold_build2_loc (loc
, ncode
, type
, TREE_OPERAND (arg0
, 1),
8300 return fold_build2_loc (loc
, icode
, type
, TREE_OPERAND (arg0
, 0),
8303 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8304 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8305 else if (TREE_CODE (arg1
) == icode
8306 && simple_operand_p_2 (arg0
)
8307 /* Needed for sequence points to handle trappings, and
8309 && simple_operand_p_2 (TREE_OPERAND (arg1
, 0)))
8311 tem
= fold_build2_loc (loc
, ncode
, type
,
8312 arg0
, TREE_OPERAND (arg1
, 0));
8313 return fold_build2_loc (loc
, icode
, type
, tem
,
8314 TREE_OPERAND (arg1
, 1));
8316 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8318 For sequence point consistancy, we need to check for trapping,
8319 and side-effects. */
8320 else if (code
== icode
&& simple_operand_p_2 (arg0
)
8321 && simple_operand_p_2 (arg1
))
8322 return fold_build2_loc (loc
, ncode
, type
, arg0
, arg1
);
8328 /* Fold a binary expression of code CODE and type TYPE with operands
8329 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8330 Return the folded expression if folding is successful. Otherwise,
8331 return NULL_TREE. */
8334 fold_minmax (location_t loc
, enum tree_code code
, tree type
, tree op0
, tree op1
)
8336 enum tree_code compl_code
;
8338 if (code
== MIN_EXPR
)
8339 compl_code
= MAX_EXPR
;
8340 else if (code
== MAX_EXPR
)
8341 compl_code
= MIN_EXPR
;
8345 /* MIN (MAX (a, b), b) == b. */
8346 if (TREE_CODE (op0
) == compl_code
8347 && operand_equal_p (TREE_OPERAND (op0
, 1), op1
, 0))
8348 return omit_one_operand_loc (loc
, type
, op1
, TREE_OPERAND (op0
, 0));
8350 /* MIN (MAX (b, a), b) == b. */
8351 if (TREE_CODE (op0
) == compl_code
8352 && operand_equal_p (TREE_OPERAND (op0
, 0), op1
, 0)
8353 && reorder_operands_p (TREE_OPERAND (op0
, 1), op1
))
8354 return omit_one_operand_loc (loc
, type
, op1
, TREE_OPERAND (op0
, 1));
8356 /* MIN (a, MAX (a, b)) == a. */
8357 if (TREE_CODE (op1
) == compl_code
8358 && operand_equal_p (op0
, TREE_OPERAND (op1
, 0), 0)
8359 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 1)))
8360 return omit_one_operand_loc (loc
, type
, op0
, TREE_OPERAND (op1
, 1));
8362 /* MIN (a, MAX (b, a)) == a. */
8363 if (TREE_CODE (op1
) == compl_code
8364 && operand_equal_p (op0
, TREE_OPERAND (op1
, 1), 0)
8365 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 0)))
8366 return omit_one_operand_loc (loc
, type
, op0
, TREE_OPERAND (op1
, 0));
8371 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8372 by changing CODE to reduce the magnitude of constants involved in
8373 ARG0 of the comparison.
8374 Returns a canonicalized comparison tree if a simplification was
8375 possible, otherwise returns NULL_TREE.
8376 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8377 valid if signed overflow is undefined. */
8380 maybe_canonicalize_comparison_1 (location_t loc
, enum tree_code code
, tree type
,
8381 tree arg0
, tree arg1
,
8382 bool *strict_overflow_p
)
8384 enum tree_code code0
= TREE_CODE (arg0
);
8385 tree t
, cst0
= NULL_TREE
;
8389 /* Match A +- CST code arg1 and CST code arg1. We can change the
8390 first form only if overflow is undefined. */
8391 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
8392 /* In principle pointers also have undefined overflow behavior,
8393 but that causes problems elsewhere. */
8394 && !POINTER_TYPE_P (TREE_TYPE (arg0
))
8395 && (code0
== MINUS_EXPR
8396 || code0
== PLUS_EXPR
)
8397 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
8398 || code0
== INTEGER_CST
))
8401 /* Identify the constant in arg0 and its sign. */
8402 if (code0
== INTEGER_CST
)
8405 cst0
= TREE_OPERAND (arg0
, 1);
8406 sgn0
= tree_int_cst_sgn (cst0
);
8408 /* Overflowed constants and zero will cause problems. */
8409 if (integer_zerop (cst0
)
8410 || TREE_OVERFLOW (cst0
))
8413 /* See if we can reduce the magnitude of the constant in
8414 arg0 by changing the comparison code. */
8415 if (code0
== INTEGER_CST
)
8417 /* CST <= arg1 -> CST-1 < arg1. */
8418 if (code
== LE_EXPR
&& sgn0
== 1)
8420 /* -CST < arg1 -> -CST-1 <= arg1. */
8421 else if (code
== LT_EXPR
&& sgn0
== -1)
8423 /* CST > arg1 -> CST-1 >= arg1. */
8424 else if (code
== GT_EXPR
&& sgn0
== 1)
8426 /* -CST >= arg1 -> -CST-1 > arg1. */
8427 else if (code
== GE_EXPR
&& sgn0
== -1)
8431 /* arg1 code' CST' might be more canonical. */
8436 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8438 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8440 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8441 else if (code
== GT_EXPR
8442 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8444 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8445 else if (code
== LE_EXPR
8446 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8448 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8449 else if (code
== GE_EXPR
8450 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8454 *strict_overflow_p
= true;
8457 /* Now build the constant reduced in magnitude. But not if that
8458 would produce one outside of its types range. */
8459 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0
))
8461 && TYPE_MIN_VALUE (TREE_TYPE (cst0
))
8462 && tree_int_cst_equal (cst0
, TYPE_MIN_VALUE (TREE_TYPE (cst0
))))
8464 && TYPE_MAX_VALUE (TREE_TYPE (cst0
))
8465 && tree_int_cst_equal (cst0
, TYPE_MAX_VALUE (TREE_TYPE (cst0
))))))
8466 /* We cannot swap the comparison here as that would cause us to
8467 endlessly recurse. */
8470 t
= int_const_binop (sgn0
== -1 ? PLUS_EXPR
: MINUS_EXPR
,
8471 cst0
, build_int_cst (TREE_TYPE (cst0
), 1));
8472 if (code0
!= INTEGER_CST
)
8473 t
= fold_build2_loc (loc
, code0
, TREE_TYPE (arg0
), TREE_OPERAND (arg0
, 0), t
);
8474 t
= fold_convert (TREE_TYPE (arg1
), t
);
8476 /* If swapping might yield to a more canonical form, do so. */
8478 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
, arg1
, t
);
8480 return fold_build2_loc (loc
, code
, type
, t
, arg1
);
8483 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8484 overflow further. Try to decrease the magnitude of constants involved
8485 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8486 and put sole constants at the second argument position.
8487 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8490 maybe_canonicalize_comparison (location_t loc
, enum tree_code code
, tree type
,
8491 tree arg0
, tree arg1
)
8494 bool strict_overflow_p
;
8495 const char * const warnmsg
= G_("assuming signed overflow does not occur "
8496 "when reducing constant in comparison");
8498 /* Try canonicalization by simplifying arg0. */
8499 strict_overflow_p
= false;
8500 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg0
, arg1
,
8501 &strict_overflow_p
);
8504 if (strict_overflow_p
)
8505 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8509 /* Try canonicalization by simplifying arg1 using the swapped
8511 code
= swap_tree_comparison (code
);
8512 strict_overflow_p
= false;
8513 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg1
, arg0
,
8514 &strict_overflow_p
);
8515 if (t
&& strict_overflow_p
)
8516 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8520 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8521 space. This is used to avoid issuing overflow warnings for
8522 expressions like &p->x which can not wrap. */
8525 pointer_may_wrap_p (tree base
, tree offset
, HOST_WIDE_INT bitpos
)
8527 if (!POINTER_TYPE_P (TREE_TYPE (base
)))
8534 int precision
= TYPE_PRECISION (TREE_TYPE (base
));
8535 if (offset
== NULL_TREE
)
8536 wi_offset
= wi::zero (precision
);
8537 else if (TREE_CODE (offset
) != INTEGER_CST
|| TREE_OVERFLOW (offset
))
8543 wide_int units
= wi::shwi (bitpos
/ BITS_PER_UNIT
, precision
);
8544 wide_int total
= wi::add (wi_offset
, units
, UNSIGNED
, &overflow
);
8548 if (!wi::fits_uhwi_p (total
))
8551 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (TREE_TYPE (base
)));
8555 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8557 if (TREE_CODE (base
) == ADDR_EXPR
)
8559 HOST_WIDE_INT base_size
;
8561 base_size
= int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base
, 0)));
8562 if (base_size
> 0 && size
< base_size
)
8566 return total
.to_uhwi () > (unsigned HOST_WIDE_INT
) size
;
8569 /* Return the HOST_WIDE_INT least significant bits of T, a sizetype
8570 kind INTEGER_CST. This makes sure to properly sign-extend the
8573 static HOST_WIDE_INT
8574 size_low_cst (const_tree t
)
8576 HOST_WIDE_INT w
= TREE_INT_CST_ELT (t
, 0);
8577 int prec
= TYPE_PRECISION (TREE_TYPE (t
));
8578 if (prec
< HOST_BITS_PER_WIDE_INT
)
8579 return sext_hwi (w
, prec
);
8583 /* Subroutine of fold_binary. This routine performs all of the
8584 transformations that are common to the equality/inequality
8585 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8586 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8587 fold_binary should call fold_binary. Fold a comparison with
8588 tree code CODE and type TYPE with operands OP0 and OP1. Return
8589 the folded comparison or NULL_TREE. */
8592 fold_comparison (location_t loc
, enum tree_code code
, tree type
,
8595 const bool equality_code
= (code
== EQ_EXPR
|| code
== NE_EXPR
);
8596 tree arg0
, arg1
, tem
;
8601 STRIP_SIGN_NOPS (arg0
);
8602 STRIP_SIGN_NOPS (arg1
);
8604 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 -+ C1. */
8605 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8606 && (equality_code
|| TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
)))
8607 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8608 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
8609 && TREE_CODE (arg1
) == INTEGER_CST
8610 && !TREE_OVERFLOW (arg1
))
8612 const enum tree_code
8613 reverse_op
= TREE_CODE (arg0
) == PLUS_EXPR
? MINUS_EXPR
: PLUS_EXPR
;
8614 tree const1
= TREE_OPERAND (arg0
, 1);
8615 tree const2
= fold_convert_loc (loc
, TREE_TYPE (const1
), arg1
);
8616 tree variable
= TREE_OPERAND (arg0
, 0);
8617 tree new_const
= int_const_binop (reverse_op
, const2
, const1
);
8619 /* If the constant operation overflowed this can be
8620 simplified as a comparison against INT_MAX/INT_MIN. */
8621 if (TREE_OVERFLOW (new_const
)
8622 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
)))
8624 int const1_sgn
= tree_int_cst_sgn (const1
);
8625 enum tree_code code2
= code
;
8627 /* Get the sign of the constant on the lhs if the
8628 operation were VARIABLE + CONST1. */
8629 if (TREE_CODE (arg0
) == MINUS_EXPR
)
8630 const1_sgn
= -const1_sgn
;
8632 /* The sign of the constant determines if we overflowed
8633 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8634 Canonicalize to the INT_MIN overflow by swapping the comparison
8636 if (const1_sgn
== -1)
8637 code2
= swap_tree_comparison (code
);
8639 /* We now can look at the canonicalized case
8640 VARIABLE + 1 CODE2 INT_MIN
8641 and decide on the result. */
8648 omit_one_operand_loc (loc
, type
, boolean_false_node
, variable
);
8654 omit_one_operand_loc (loc
, type
, boolean_true_node
, variable
);
8663 fold_overflow_warning ("assuming signed overflow does not occur "
8664 "when changing X +- C1 cmp C2 to "
8666 WARN_STRICT_OVERFLOW_COMPARISON
);
8667 return fold_build2_loc (loc
, code
, type
, variable
, new_const
);
8671 /* Transform comparisons of the form X - Y CMP 0 to X CMP Y. */
8672 if (TREE_CODE (arg0
) == MINUS_EXPR
8674 && integer_zerop (arg1
))
8676 /* ??? The transformation is valid for the other operators if overflow
8677 is undefined for the type, but performing it here badly interacts
8678 with the transformation in fold_cond_expr_with_comparison which
8679 attempts to synthetize ABS_EXPR. */
8681 fold_overflow_warning ("assuming signed overflow does not occur "
8682 "when changing X - Y cmp 0 to X cmp Y",
8683 WARN_STRICT_OVERFLOW_COMPARISON
);
8684 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
8685 TREE_OPERAND (arg0
, 1));
8688 /* For comparisons of pointers we can decompose it to a compile time
8689 comparison of the base objects and the offsets into the object.
8690 This requires at least one operand being an ADDR_EXPR or a
8691 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8692 if (POINTER_TYPE_P (TREE_TYPE (arg0
))
8693 && (TREE_CODE (arg0
) == ADDR_EXPR
8694 || TREE_CODE (arg1
) == ADDR_EXPR
8695 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
8696 || TREE_CODE (arg1
) == POINTER_PLUS_EXPR
))
8698 tree base0
, base1
, offset0
= NULL_TREE
, offset1
= NULL_TREE
;
8699 HOST_WIDE_INT bitsize
, bitpos0
= 0, bitpos1
= 0;
8701 int volatilep
, unsignedp
;
8702 bool indirect_base0
= false, indirect_base1
= false;
8704 /* Get base and offset for the access. Strip ADDR_EXPR for
8705 get_inner_reference, but put it back by stripping INDIRECT_REF
8706 off the base object if possible. indirect_baseN will be true
8707 if baseN is not an address but refers to the object itself. */
8709 if (TREE_CODE (arg0
) == ADDR_EXPR
)
8711 base0
= get_inner_reference (TREE_OPERAND (arg0
, 0),
8712 &bitsize
, &bitpos0
, &offset0
, &mode
,
8713 &unsignedp
, &volatilep
, false);
8714 if (TREE_CODE (base0
) == INDIRECT_REF
)
8715 base0
= TREE_OPERAND (base0
, 0);
8717 indirect_base0
= true;
8719 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
8721 base0
= TREE_OPERAND (arg0
, 0);
8722 STRIP_SIGN_NOPS (base0
);
8723 if (TREE_CODE (base0
) == ADDR_EXPR
)
8725 base0
= TREE_OPERAND (base0
, 0);
8726 indirect_base0
= true;
8728 offset0
= TREE_OPERAND (arg0
, 1);
8729 if (tree_fits_shwi_p (offset0
))
8731 HOST_WIDE_INT off
= size_low_cst (offset0
);
8732 if ((HOST_WIDE_INT
) (((unsigned HOST_WIDE_INT
) off
)
8734 / BITS_PER_UNIT
== (HOST_WIDE_INT
) off
)
8736 bitpos0
= off
* BITS_PER_UNIT
;
8737 offset0
= NULL_TREE
;
8743 if (TREE_CODE (arg1
) == ADDR_EXPR
)
8745 base1
= get_inner_reference (TREE_OPERAND (arg1
, 0),
8746 &bitsize
, &bitpos1
, &offset1
, &mode
,
8747 &unsignedp
, &volatilep
, false);
8748 if (TREE_CODE (base1
) == INDIRECT_REF
)
8749 base1
= TREE_OPERAND (base1
, 0);
8751 indirect_base1
= true;
8753 else if (TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
8755 base1
= TREE_OPERAND (arg1
, 0);
8756 STRIP_SIGN_NOPS (base1
);
8757 if (TREE_CODE (base1
) == ADDR_EXPR
)
8759 base1
= TREE_OPERAND (base1
, 0);
8760 indirect_base1
= true;
8762 offset1
= TREE_OPERAND (arg1
, 1);
8763 if (tree_fits_shwi_p (offset1
))
8765 HOST_WIDE_INT off
= size_low_cst (offset1
);
8766 if ((HOST_WIDE_INT
) (((unsigned HOST_WIDE_INT
) off
)
8768 / BITS_PER_UNIT
== (HOST_WIDE_INT
) off
)
8770 bitpos1
= off
* BITS_PER_UNIT
;
8771 offset1
= NULL_TREE
;
8776 /* A local variable can never be pointed to by
8777 the default SSA name of an incoming parameter. */
8778 if ((TREE_CODE (arg0
) == ADDR_EXPR
8780 && TREE_CODE (base0
) == VAR_DECL
8781 && auto_var_in_fn_p (base0
, current_function_decl
)
8783 && TREE_CODE (base1
) == SSA_NAME
8784 && SSA_NAME_IS_DEFAULT_DEF (base1
)
8785 && TREE_CODE (SSA_NAME_VAR (base1
)) == PARM_DECL
)
8786 || (TREE_CODE (arg1
) == ADDR_EXPR
8788 && TREE_CODE (base1
) == VAR_DECL
8789 && auto_var_in_fn_p (base1
, current_function_decl
)
8791 && TREE_CODE (base0
) == SSA_NAME
8792 && SSA_NAME_IS_DEFAULT_DEF (base0
)
8793 && TREE_CODE (SSA_NAME_VAR (base0
)) == PARM_DECL
))
8795 if (code
== NE_EXPR
)
8796 return constant_boolean_node (1, type
);
8797 else if (code
== EQ_EXPR
)
8798 return constant_boolean_node (0, type
);
8800 /* If we have equivalent bases we might be able to simplify. */
8801 else if (indirect_base0
== indirect_base1
8802 && operand_equal_p (base0
, base1
, 0))
8804 /* We can fold this expression to a constant if the non-constant
8805 offset parts are equal. */
8806 if ((offset0
== offset1
8807 || (offset0
&& offset1
8808 && operand_equal_p (offset0
, offset1
, 0)))
8811 || (indirect_base0
&& DECL_P (base0
))
8812 || POINTER_TYPE_OVERFLOW_UNDEFINED
))
8816 && bitpos0
!= bitpos1
8817 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
8818 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
8819 fold_overflow_warning (("assuming pointer wraparound does not "
8820 "occur when comparing P +- C1 with "
8822 WARN_STRICT_OVERFLOW_CONDITIONAL
);
8827 return constant_boolean_node (bitpos0
== bitpos1
, type
);
8829 return constant_boolean_node (bitpos0
!= bitpos1
, type
);
8831 return constant_boolean_node (bitpos0
< bitpos1
, type
);
8833 return constant_boolean_node (bitpos0
<= bitpos1
, type
);
8835 return constant_boolean_node (bitpos0
>= bitpos1
, type
);
8837 return constant_boolean_node (bitpos0
> bitpos1
, type
);
8841 /* We can simplify the comparison to a comparison of the variable
8842 offset parts if the constant offset parts are equal.
8843 Be careful to use signed sizetype here because otherwise we
8844 mess with array offsets in the wrong way. This is possible
8845 because pointer arithmetic is restricted to retain within an
8846 object and overflow on pointer differences is undefined as of
8847 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8848 else if (bitpos0
== bitpos1
8850 || (indirect_base0
&& DECL_P (base0
))
8851 || POINTER_TYPE_OVERFLOW_UNDEFINED
))
8853 /* By converting to signed sizetype we cover middle-end pointer
8854 arithmetic which operates on unsigned pointer types of size
8855 type size and ARRAY_REF offsets which are properly sign or
8856 zero extended from their type in case it is narrower than
8858 if (offset0
== NULL_TREE
)
8859 offset0
= build_int_cst (ssizetype
, 0);
8861 offset0
= fold_convert_loc (loc
, ssizetype
, offset0
);
8862 if (offset1
== NULL_TREE
)
8863 offset1
= build_int_cst (ssizetype
, 0);
8865 offset1
= fold_convert_loc (loc
, ssizetype
, offset1
);
8868 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
8869 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
8870 fold_overflow_warning (("assuming pointer wraparound does not "
8871 "occur when comparing P +- C1 with "
8873 WARN_STRICT_OVERFLOW_COMPARISON
);
8875 return fold_build2_loc (loc
, code
, type
, offset0
, offset1
);
8878 /* For non-equal bases we can simplify if they are addresses
8879 of local binding decls or constants. */
8880 else if (indirect_base0
&& indirect_base1
8881 /* We know that !operand_equal_p (base0, base1, 0)
8882 because the if condition was false. But make
8883 sure two decls are not the same. */
8885 && TREE_CODE (arg0
) == ADDR_EXPR
8886 && TREE_CODE (arg1
) == ADDR_EXPR
8887 && (((TREE_CODE (base0
) == VAR_DECL
8888 || TREE_CODE (base0
) == PARM_DECL
)
8889 && (targetm
.binds_local_p (base0
)
8890 || CONSTANT_CLASS_P (base1
)))
8891 || CONSTANT_CLASS_P (base0
))
8892 && (((TREE_CODE (base1
) == VAR_DECL
8893 || TREE_CODE (base1
) == PARM_DECL
)
8894 && (targetm
.binds_local_p (base1
)
8895 || CONSTANT_CLASS_P (base0
)))
8896 || CONSTANT_CLASS_P (base1
)))
8898 if (code
== EQ_EXPR
)
8899 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
8901 else if (code
== NE_EXPR
)
8902 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
8905 /* For equal offsets we can simplify to a comparison of the
8907 else if (bitpos0
== bitpos1
8909 ? base0
!= TREE_OPERAND (arg0
, 0) : base0
!= arg0
)
8911 ? base1
!= TREE_OPERAND (arg1
, 0) : base1
!= arg1
)
8912 && ((offset0
== offset1
)
8913 || (offset0
&& offset1
8914 && operand_equal_p (offset0
, offset1
, 0))))
8917 base0
= build_fold_addr_expr_loc (loc
, base0
);
8919 base1
= build_fold_addr_expr_loc (loc
, base1
);
8920 return fold_build2_loc (loc
, code
, type
, base0
, base1
);
8924 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8925 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8926 the resulting offset is smaller in absolute value than the
8927 original one and has the same sign. */
8928 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
8929 && (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8930 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8931 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
8932 && (TREE_CODE (arg1
) == PLUS_EXPR
|| TREE_CODE (arg1
) == MINUS_EXPR
)
8933 && (TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
8934 && !TREE_OVERFLOW (TREE_OPERAND (arg1
, 1))))
8936 tree const1
= TREE_OPERAND (arg0
, 1);
8937 tree const2
= TREE_OPERAND (arg1
, 1);
8938 tree variable1
= TREE_OPERAND (arg0
, 0);
8939 tree variable2
= TREE_OPERAND (arg1
, 0);
8941 const char * const warnmsg
= G_("assuming signed overflow does not "
8942 "occur when combining constants around "
8945 /* Put the constant on the side where it doesn't overflow and is
8946 of lower absolute value and of same sign than before. */
8947 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8948 ? MINUS_EXPR
: PLUS_EXPR
,
8950 if (!TREE_OVERFLOW (cst
)
8951 && tree_int_cst_compare (const2
, cst
) == tree_int_cst_sgn (const2
)
8952 && tree_int_cst_sgn (cst
) == tree_int_cst_sgn (const2
))
8954 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
8955 return fold_build2_loc (loc
, code
, type
,
8957 fold_build2_loc (loc
, TREE_CODE (arg1
),
8962 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8963 ? MINUS_EXPR
: PLUS_EXPR
,
8965 if (!TREE_OVERFLOW (cst
)
8966 && tree_int_cst_compare (const1
, cst
) == tree_int_cst_sgn (const1
)
8967 && tree_int_cst_sgn (cst
) == tree_int_cst_sgn (const1
))
8969 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
8970 return fold_build2_loc (loc
, code
, type
,
8971 fold_build2_loc (loc
, TREE_CODE (arg0
),
8978 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
8979 signed arithmetic case. That form is created by the compiler
8980 often enough for folding it to be of value. One example is in
8981 computing loop trip counts after Operator Strength Reduction. */
8982 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
8983 && TREE_CODE (arg0
) == MULT_EXPR
8984 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8985 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
8986 && integer_zerop (arg1
))
8988 tree const1
= TREE_OPERAND (arg0
, 1);
8989 tree const2
= arg1
; /* zero */
8990 tree variable1
= TREE_OPERAND (arg0
, 0);
8991 enum tree_code cmp_code
= code
;
8993 /* Handle unfolded multiplication by zero. */
8994 if (integer_zerop (const1
))
8995 return fold_build2_loc (loc
, cmp_code
, type
, const1
, const2
);
8997 fold_overflow_warning (("assuming signed overflow does not occur when "
8998 "eliminating multiplication in comparison "
9000 WARN_STRICT_OVERFLOW_COMPARISON
);
9002 /* If const1 is negative we swap the sense of the comparison. */
9003 if (tree_int_cst_sgn (const1
) < 0)
9004 cmp_code
= swap_tree_comparison (cmp_code
);
9006 return fold_build2_loc (loc
, cmp_code
, type
, variable1
, const2
);
9009 tem
= maybe_canonicalize_comparison (loc
, code
, type
, arg0
, arg1
);
9013 if (FLOAT_TYPE_P (TREE_TYPE (arg0
)))
9015 tree targ0
= strip_float_extensions (arg0
);
9016 tree targ1
= strip_float_extensions (arg1
);
9017 tree newtype
= TREE_TYPE (targ0
);
9019 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
9020 newtype
= TREE_TYPE (targ1
);
9022 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9023 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
9024 return fold_build2_loc (loc
, code
, type
,
9025 fold_convert_loc (loc
, newtype
, targ0
),
9026 fold_convert_loc (loc
, newtype
, targ1
));
9028 /* (-a) CMP (-b) -> b CMP a */
9029 if (TREE_CODE (arg0
) == NEGATE_EXPR
9030 && TREE_CODE (arg1
) == NEGATE_EXPR
)
9031 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg1
, 0),
9032 TREE_OPERAND (arg0
, 0));
9034 if (TREE_CODE (arg1
) == REAL_CST
)
9036 REAL_VALUE_TYPE cst
;
9037 cst
= TREE_REAL_CST (arg1
);
9039 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9040 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
9041 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
9042 TREE_OPERAND (arg0
, 0),
9043 build_real (TREE_TYPE (arg1
),
9044 real_value_negate (&cst
)));
9046 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9047 /* a CMP (-0) -> a CMP 0 */
9048 if (REAL_VALUE_MINUS_ZERO (cst
))
9049 return fold_build2_loc (loc
, code
, type
, arg0
,
9050 build_real (TREE_TYPE (arg1
), dconst0
));
9052 /* x != NaN is always true, other ops are always false. */
9053 if (REAL_VALUE_ISNAN (cst
)
9054 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1
))))
9056 tem
= (code
== NE_EXPR
) ? integer_one_node
: integer_zero_node
;
9057 return omit_one_operand_loc (loc
, type
, tem
, arg0
);
9060 /* Fold comparisons against infinity. */
9061 if (REAL_VALUE_ISINF (cst
)
9062 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
))))
9064 tem
= fold_inf_compare (loc
, code
, type
, arg0
, arg1
);
9065 if (tem
!= NULL_TREE
)
9070 /* If this is a comparison of a real constant with a PLUS_EXPR
9071 or a MINUS_EXPR of a real constant, we can convert it into a
9072 comparison with a revised real constant as long as no overflow
9073 occurs when unsafe_math_optimizations are enabled. */
9074 if (flag_unsafe_math_optimizations
9075 && TREE_CODE (arg1
) == REAL_CST
9076 && (TREE_CODE (arg0
) == PLUS_EXPR
9077 || TREE_CODE (arg0
) == MINUS_EXPR
)
9078 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
9079 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
9080 ? MINUS_EXPR
: PLUS_EXPR
,
9081 arg1
, TREE_OPERAND (arg0
, 1)))
9082 && !TREE_OVERFLOW (tem
))
9083 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
9085 /* Likewise, we can simplify a comparison of a real constant with
9086 a MINUS_EXPR whose first operand is also a real constant, i.e.
9087 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9088 floating-point types only if -fassociative-math is set. */
9089 if (flag_associative_math
9090 && TREE_CODE (arg1
) == REAL_CST
9091 && TREE_CODE (arg0
) == MINUS_EXPR
9092 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
9093 && 0 != (tem
= const_binop (MINUS_EXPR
, TREE_OPERAND (arg0
, 0),
9095 && !TREE_OVERFLOW (tem
))
9096 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
9097 TREE_OPERAND (arg0
, 1), tem
);
9099 /* Fold comparisons against built-in math functions. */
9100 if (TREE_CODE (arg1
) == REAL_CST
9101 && flag_unsafe_math_optimizations
9102 && ! flag_errno_math
)
9104 enum built_in_function fcode
= builtin_mathfn_code (arg0
);
9106 if (fcode
!= END_BUILTINS
)
9108 tem
= fold_mathfn_compare (loc
, fcode
, code
, type
, arg0
, arg1
);
9109 if (tem
!= NULL_TREE
)
9115 if (TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
9116 && CONVERT_EXPR_P (arg0
))
9118 /* If we are widening one operand of an integer comparison,
9119 see if the other operand is similarly being widened. Perhaps we
9120 can do the comparison in the narrower type. */
9121 tem
= fold_widened_comparison (loc
, code
, type
, arg0
, arg1
);
9125 /* Or if we are changing signedness. */
9126 tem
= fold_sign_changed_comparison (loc
, code
, type
, arg0
, arg1
);
9131 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9132 constant, we can simplify it. */
9133 if (TREE_CODE (arg1
) == INTEGER_CST
9134 && (TREE_CODE (arg0
) == MIN_EXPR
9135 || TREE_CODE (arg0
) == MAX_EXPR
)
9136 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
9138 tem
= optimize_minmax_comparison (loc
, code
, type
, op0
, op1
);
9143 /* Simplify comparison of something with itself. (For IEEE
9144 floating-point, we can only do some of these simplifications.) */
9145 if (operand_equal_p (arg0
, arg1
, 0))
9150 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
9151 || ! HONOR_NANS (element_mode (arg0
)))
9152 return constant_boolean_node (1, type
);
9157 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
9158 || ! HONOR_NANS (element_mode (arg0
)))
9159 return constant_boolean_node (1, type
);
9160 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
, arg1
);
9163 /* For NE, we can only do this simplification if integer
9164 or we don't honor IEEE floating point NaNs. */
9165 if (FLOAT_TYPE_P (TREE_TYPE (arg0
))
9166 && HONOR_NANS (element_mode (arg0
)))
9168 /* ... fall through ... */
9171 return constant_boolean_node (0, type
);
9177 /* If we are comparing an expression that just has comparisons
9178 of two integer values, arithmetic expressions of those comparisons,
9179 and constants, we can simplify it. There are only three cases
9180 to check: the two values can either be equal, the first can be
9181 greater, or the second can be greater. Fold the expression for
9182 those three values. Since each value must be 0 or 1, we have
9183 eight possibilities, each of which corresponds to the constant 0
9184 or 1 or one of the six possible comparisons.
9186 This handles common cases like (a > b) == 0 but also handles
9187 expressions like ((x > y) - (y > x)) > 0, which supposedly
9188 occur in macroized code. */
9190 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) != INTEGER_CST
)
9192 tree cval1
= 0, cval2
= 0;
9195 if (twoval_comparison_p (arg0
, &cval1
, &cval2
, &save_p
)
9196 /* Don't handle degenerate cases here; they should already
9197 have been handled anyway. */
9198 && cval1
!= 0 && cval2
!= 0
9199 && ! (TREE_CONSTANT (cval1
) && TREE_CONSTANT (cval2
))
9200 && TREE_TYPE (cval1
) == TREE_TYPE (cval2
)
9201 && INTEGRAL_TYPE_P (TREE_TYPE (cval1
))
9202 && TYPE_MAX_VALUE (TREE_TYPE (cval1
))
9203 && TYPE_MAX_VALUE (TREE_TYPE (cval2
))
9204 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1
)),
9205 TYPE_MAX_VALUE (TREE_TYPE (cval2
)), 0))
9207 tree maxval
= TYPE_MAX_VALUE (TREE_TYPE (cval1
));
9208 tree minval
= TYPE_MIN_VALUE (TREE_TYPE (cval1
));
9210 /* We can't just pass T to eval_subst in case cval1 or cval2
9211 was the same as ARG1. */
9214 = fold_build2_loc (loc
, code
, type
,
9215 eval_subst (loc
, arg0
, cval1
, maxval
,
9219 = fold_build2_loc (loc
, code
, type
,
9220 eval_subst (loc
, arg0
, cval1
, maxval
,
9224 = fold_build2_loc (loc
, code
, type
,
9225 eval_subst (loc
, arg0
, cval1
, minval
,
9229 /* All three of these results should be 0 or 1. Confirm they are.
9230 Then use those values to select the proper code to use. */
9232 if (TREE_CODE (high_result
) == INTEGER_CST
9233 && TREE_CODE (equal_result
) == INTEGER_CST
9234 && TREE_CODE (low_result
) == INTEGER_CST
)
9236 /* Make a 3-bit mask with the high-order bit being the
9237 value for `>', the next for '=', and the low for '<'. */
9238 switch ((integer_onep (high_result
) * 4)
9239 + (integer_onep (equal_result
) * 2)
9240 + integer_onep (low_result
))
9244 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
9265 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
9270 tem
= save_expr (build2 (code
, type
, cval1
, cval2
));
9271 SET_EXPR_LOCATION (tem
, loc
);
9274 return fold_build2_loc (loc
, code
, type
, cval1
, cval2
);
9279 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9280 into a single range test. */
9281 if ((TREE_CODE (arg0
) == TRUNC_DIV_EXPR
9282 || TREE_CODE (arg0
) == EXACT_DIV_EXPR
)
9283 && TREE_CODE (arg1
) == INTEGER_CST
9284 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9285 && !integer_zerop (TREE_OPERAND (arg0
, 1))
9286 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
9287 && !TREE_OVERFLOW (arg1
))
9289 tem
= fold_div_compare (loc
, code
, type
, arg0
, arg1
);
9290 if (tem
!= NULL_TREE
)
9294 /* Fold ~X op ~Y as Y op X. */
9295 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9296 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
9298 tree cmp_type
= TREE_TYPE (TREE_OPERAND (arg0
, 0));
9299 return fold_build2_loc (loc
, code
, type
,
9300 fold_convert_loc (loc
, cmp_type
,
9301 TREE_OPERAND (arg1
, 0)),
9302 TREE_OPERAND (arg0
, 0));
9305 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9306 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9307 && (TREE_CODE (arg1
) == INTEGER_CST
|| TREE_CODE (arg1
) == VECTOR_CST
))
9309 tree cmp_type
= TREE_TYPE (TREE_OPERAND (arg0
, 0));
9310 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
9311 TREE_OPERAND (arg0
, 0),
9312 fold_build1_loc (loc
, BIT_NOT_EXPR
, cmp_type
,
9313 fold_convert_loc (loc
, cmp_type
, arg1
)));
9320 /* Subroutine of fold_binary. Optimize complex multiplications of the
9321 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9322 argument EXPR represents the expression "z" of type TYPE. */
9325 fold_mult_zconjz (location_t loc
, tree type
, tree expr
)
9327 tree itype
= TREE_TYPE (type
);
9328 tree rpart
, ipart
, tem
;
9330 if (TREE_CODE (expr
) == COMPLEX_EXPR
)
9332 rpart
= TREE_OPERAND (expr
, 0);
9333 ipart
= TREE_OPERAND (expr
, 1);
9335 else if (TREE_CODE (expr
) == COMPLEX_CST
)
9337 rpart
= TREE_REALPART (expr
);
9338 ipart
= TREE_IMAGPART (expr
);
9342 expr
= save_expr (expr
);
9343 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, itype
, expr
);
9344 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, itype
, expr
);
9347 rpart
= save_expr (rpart
);
9348 ipart
= save_expr (ipart
);
9349 tem
= fold_build2_loc (loc
, PLUS_EXPR
, itype
,
9350 fold_build2_loc (loc
, MULT_EXPR
, itype
, rpart
, rpart
),
9351 fold_build2_loc (loc
, MULT_EXPR
, itype
, ipart
, ipart
));
9352 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, tem
,
9353 build_zero_cst (itype
));
9357 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9358 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9359 guarantees that P and N have the same least significant log2(M) bits.
9360 N is not otherwise constrained. In particular, N is not normalized to
9361 0 <= N < M as is common. In general, the precise value of P is unknown.
9362 M is chosen as large as possible such that constant N can be determined.
9364 Returns M and sets *RESIDUE to N.
9366 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9367 account. This is not always possible due to PR 35705.
9370 static unsigned HOST_WIDE_INT
9371 get_pointer_modulus_and_residue (tree expr
, unsigned HOST_WIDE_INT
*residue
,
9372 bool allow_func_align
)
9374 enum tree_code code
;
9378 code
= TREE_CODE (expr
);
9379 if (code
== ADDR_EXPR
)
9381 unsigned int bitalign
;
9382 get_object_alignment_1 (TREE_OPERAND (expr
, 0), &bitalign
, residue
);
9383 *residue
/= BITS_PER_UNIT
;
9384 return bitalign
/ BITS_PER_UNIT
;
9386 else if (code
== POINTER_PLUS_EXPR
)
9389 unsigned HOST_WIDE_INT modulus
;
9390 enum tree_code inner_code
;
9392 op0
= TREE_OPERAND (expr
, 0);
9394 modulus
= get_pointer_modulus_and_residue (op0
, residue
,
9397 op1
= TREE_OPERAND (expr
, 1);
9399 inner_code
= TREE_CODE (op1
);
9400 if (inner_code
== INTEGER_CST
)
9402 *residue
+= TREE_INT_CST_LOW (op1
);
9405 else if (inner_code
== MULT_EXPR
)
9407 op1
= TREE_OPERAND (op1
, 1);
9408 if (TREE_CODE (op1
) == INTEGER_CST
)
9410 unsigned HOST_WIDE_INT align
;
9412 /* Compute the greatest power-of-2 divisor of op1. */
9413 align
= TREE_INT_CST_LOW (op1
);
9416 /* If align is non-zero and less than *modulus, replace
9417 *modulus with align., If align is 0, then either op1 is 0
9418 or the greatest power-of-2 divisor of op1 doesn't fit in an
9419 unsigned HOST_WIDE_INT. In either case, no additional
9420 constraint is imposed. */
9422 modulus
= MIN (modulus
, align
);
9429 /* If we get here, we were unable to determine anything useful about the
9434 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9435 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9438 vec_cst_ctor_to_array (tree arg
, tree
*elts
)
9440 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg
)), i
;
9442 if (TREE_CODE (arg
) == VECTOR_CST
)
9444 for (i
= 0; i
< VECTOR_CST_NELTS (arg
); ++i
)
9445 elts
[i
] = VECTOR_CST_ELT (arg
, i
);
9447 else if (TREE_CODE (arg
) == CONSTRUCTOR
)
9449 constructor_elt
*elt
;
9451 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg
), i
, elt
)
9452 if (i
>= nelts
|| TREE_CODE (TREE_TYPE (elt
->value
)) == VECTOR_TYPE
)
9455 elts
[i
] = elt
->value
;
9459 for (; i
< nelts
; i
++)
9461 = fold_convert (TREE_TYPE (TREE_TYPE (arg
)), integer_zero_node
);
9465 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9466 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9467 NULL_TREE otherwise. */
9470 fold_vec_perm (tree type
, tree arg0
, tree arg1
, const unsigned char *sel
)
9472 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
9474 bool need_ctor
= false;
9476 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
9477 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
);
9478 if (TREE_TYPE (TREE_TYPE (arg0
)) != TREE_TYPE (type
)
9479 || TREE_TYPE (TREE_TYPE (arg1
)) != TREE_TYPE (type
))
9482 elts
= XALLOCAVEC (tree
, nelts
* 3);
9483 if (!vec_cst_ctor_to_array (arg0
, elts
)
9484 || !vec_cst_ctor_to_array (arg1
, elts
+ nelts
))
9487 for (i
= 0; i
< nelts
; i
++)
9489 if (!CONSTANT_CLASS_P (elts
[sel
[i
]]))
9491 elts
[i
+ 2 * nelts
] = unshare_expr (elts
[sel
[i
]]);
9496 vec
<constructor_elt
, va_gc
> *v
;
9497 vec_alloc (v
, nelts
);
9498 for (i
= 0; i
< nelts
; i
++)
9499 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, elts
[2 * nelts
+ i
]);
9500 return build_constructor (type
, v
);
9503 return build_vector (type
, &elts
[2 * nelts
]);
9506 /* Try to fold a pointer difference of type TYPE two address expressions of
9507 array references AREF0 and AREF1 using location LOC. Return a
9508 simplified expression for the difference or NULL_TREE. */
9511 fold_addr_of_array_ref_difference (location_t loc
, tree type
,
9512 tree aref0
, tree aref1
)
9514 tree base0
= TREE_OPERAND (aref0
, 0);
9515 tree base1
= TREE_OPERAND (aref1
, 0);
9516 tree base_offset
= build_int_cst (type
, 0);
9518 /* If the bases are array references as well, recurse. If the bases
9519 are pointer indirections compute the difference of the pointers.
9520 If the bases are equal, we are set. */
9521 if ((TREE_CODE (base0
) == ARRAY_REF
9522 && TREE_CODE (base1
) == ARRAY_REF
9524 = fold_addr_of_array_ref_difference (loc
, type
, base0
, base1
)))
9525 || (INDIRECT_REF_P (base0
)
9526 && INDIRECT_REF_P (base1
)
9527 && (base_offset
= fold_binary_loc (loc
, MINUS_EXPR
, type
,
9528 TREE_OPERAND (base0
, 0),
9529 TREE_OPERAND (base1
, 0))))
9530 || operand_equal_p (base0
, base1
, 0))
9532 tree op0
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref0
, 1));
9533 tree op1
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref1
, 1));
9534 tree esz
= fold_convert_loc (loc
, type
, array_ref_element_size (aref0
));
9535 tree diff
= build2 (MINUS_EXPR
, type
, op0
, op1
);
9536 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
9538 fold_build2_loc (loc
, MULT_EXPR
, type
,
9544 /* If the real or vector real constant CST of type TYPE has an exact
9545 inverse, return it, else return NULL. */
9548 exact_inverse (tree type
, tree cst
)
9551 tree unit_type
, *elts
;
9553 unsigned vec_nelts
, i
;
9555 switch (TREE_CODE (cst
))
9558 r
= TREE_REAL_CST (cst
);
9560 if (exact_real_inverse (TYPE_MODE (type
), &r
))
9561 return build_real (type
, r
);
9566 vec_nelts
= VECTOR_CST_NELTS (cst
);
9567 elts
= XALLOCAVEC (tree
, vec_nelts
);
9568 unit_type
= TREE_TYPE (type
);
9569 mode
= TYPE_MODE (unit_type
);
9571 for (i
= 0; i
< vec_nelts
; i
++)
9573 r
= TREE_REAL_CST (VECTOR_CST_ELT (cst
, i
));
9574 if (!exact_real_inverse (mode
, &r
))
9576 elts
[i
] = build_real (unit_type
, r
);
9579 return build_vector (type
, elts
);
9586 /* Mask out the tz least significant bits of X of type TYPE where
9587 tz is the number of trailing zeroes in Y. */
9589 mask_with_tz (tree type
, const wide_int
&x
, const wide_int
&y
)
9591 int tz
= wi::ctz (y
);
9593 return wi::mask (tz
, true, TYPE_PRECISION (type
)) & x
;
9597 /* Return true when T is an address and is known to be nonzero.
9598 For floating point we further ensure that T is not denormal.
9599 Similar logic is present in nonzero_address in rtlanal.h.
9601 If the return value is based on the assumption that signed overflow
9602 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9603 change *STRICT_OVERFLOW_P. */
9606 tree_expr_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
9608 tree type
= TREE_TYPE (t
);
9609 enum tree_code code
;
9611 /* Doing something useful for floating point would need more work. */
9612 if (!INTEGRAL_TYPE_P (type
) && !POINTER_TYPE_P (type
))
9615 code
= TREE_CODE (t
);
9616 switch (TREE_CODE_CLASS (code
))
9619 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
9622 case tcc_comparison
:
9623 return tree_binary_nonzero_warnv_p (code
, type
,
9624 TREE_OPERAND (t
, 0),
9625 TREE_OPERAND (t
, 1),
9628 case tcc_declaration
:
9630 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
9638 case TRUTH_NOT_EXPR
:
9639 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
9642 case TRUTH_AND_EXPR
:
9644 case TRUTH_XOR_EXPR
:
9645 return tree_binary_nonzero_warnv_p (code
, type
,
9646 TREE_OPERAND (t
, 0),
9647 TREE_OPERAND (t
, 1),
9655 case WITH_SIZE_EXPR
:
9657 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
9662 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
9666 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 0),
9671 tree fndecl
= get_callee_fndecl (t
);
9672 if (!fndecl
) return false;
9673 if (flag_delete_null_pointer_checks
&& !flag_check_new
9674 && DECL_IS_OPERATOR_NEW (fndecl
)
9675 && !TREE_NOTHROW (fndecl
))
9677 if (flag_delete_null_pointer_checks
9678 && lookup_attribute ("returns_nonnull",
9679 TYPE_ATTRIBUTES (TREE_TYPE (fndecl
))))
9681 return alloca_call_p (t
);
9690 /* Return true when T is an address and is known to be nonzero.
9691 Handle warnings about undefined signed overflow. */
9694 tree_expr_nonzero_p (tree t
)
9696 bool ret
, strict_overflow_p
;
9698 strict_overflow_p
= false;
9699 ret
= tree_expr_nonzero_warnv_p (t
, &strict_overflow_p
);
9700 if (strict_overflow_p
)
9701 fold_overflow_warning (("assuming signed overflow does not occur when "
9702 "determining that expression is always "
9704 WARN_STRICT_OVERFLOW_MISC
);
9708 /* Fold a binary expression of code CODE and type TYPE with operands
9709 OP0 and OP1. LOC is the location of the resulting expression.
9710 Return the folded expression if folding is successful. Otherwise,
9711 return NULL_TREE. */
9714 fold_binary_loc (location_t loc
,
9715 enum tree_code code
, tree type
, tree op0
, tree op1
)
9717 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
9718 tree arg0
, arg1
, tem
;
9719 tree t1
= NULL_TREE
;
9720 bool strict_overflow_p
;
9723 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
9724 && TREE_CODE_LENGTH (code
) == 2
9726 && op1
!= NULL_TREE
);
9731 /* Strip any conversions that don't change the mode. This is
9732 safe for every expression, except for a comparison expression
9733 because its signedness is derived from its operands. So, in
9734 the latter case, only strip conversions that don't change the
9735 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9738 Note that this is done as an internal manipulation within the
9739 constant folder, in order to find the simplest representation
9740 of the arguments so that their form can be studied. In any
9741 cases, the appropriate type conversions should be put back in
9742 the tree that will get out of the constant folder. */
9744 if (kind
== tcc_comparison
|| code
== MIN_EXPR
|| code
== MAX_EXPR
)
9746 STRIP_SIGN_NOPS (arg0
);
9747 STRIP_SIGN_NOPS (arg1
);
9755 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9756 constant but we can't do arithmetic on them. */
9757 if (CONSTANT_CLASS_P (arg0
) && CONSTANT_CLASS_P (arg1
))
9759 if (kind
== tcc_binary
)
9761 /* Make sure type and arg0 have the same saturating flag. */
9762 gcc_checking_assert (TYPE_SATURATING (type
)
9763 == TYPE_SATURATING (TREE_TYPE (arg0
)));
9764 tem
= const_binop (code
, arg0
, arg1
);
9766 else if (kind
== tcc_comparison
)
9767 tem
= fold_relational_const (code
, type
, arg0
, arg1
);
9771 if (tem
!= NULL_TREE
)
9773 if (TREE_TYPE (tem
) != type
)
9774 tem
= fold_convert_loc (loc
, type
, tem
);
9779 /* If this is a commutative operation, and ARG0 is a constant, move it
9780 to ARG1 to reduce the number of tests below. */
9781 if (commutative_tree_code (code
)
9782 && tree_swap_operands_p (arg0
, arg1
, true))
9783 return fold_build2_loc (loc
, code
, type
, op1
, op0
);
9785 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9786 to ARG1 to reduce the number of tests below. */
9787 if (kind
== tcc_comparison
9788 && tree_swap_operands_p (arg0
, arg1
, true))
9789 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
, op1
, op0
);
9791 tem
= generic_simplify (loc
, code
, type
, op0
, op1
);
9795 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9797 First check for cases where an arithmetic operation is applied to a
9798 compound, conditional, or comparison operation. Push the arithmetic
9799 operation inside the compound or conditional to see if any folding
9800 can then be done. Convert comparison to conditional for this purpose.
9801 The also optimizes non-constant cases that used to be done in
9804 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9805 one of the operands is a comparison and the other is a comparison, a
9806 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9807 code below would make the expression more complex. Change it to a
9808 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9809 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9811 if ((code
== BIT_AND_EXPR
|| code
== BIT_IOR_EXPR
9812 || code
== EQ_EXPR
|| code
== NE_EXPR
)
9813 && TREE_CODE (type
) != VECTOR_TYPE
9814 && ((truth_value_p (TREE_CODE (arg0
))
9815 && (truth_value_p (TREE_CODE (arg1
))
9816 || (TREE_CODE (arg1
) == BIT_AND_EXPR
9817 && integer_onep (TREE_OPERAND (arg1
, 1)))))
9818 || (truth_value_p (TREE_CODE (arg1
))
9819 && (truth_value_p (TREE_CODE (arg0
))
9820 || (TREE_CODE (arg0
) == BIT_AND_EXPR
9821 && integer_onep (TREE_OPERAND (arg0
, 1)))))))
9823 tem
= fold_build2_loc (loc
, code
== BIT_AND_EXPR
? TRUTH_AND_EXPR
9824 : code
== BIT_IOR_EXPR
? TRUTH_OR_EXPR
9827 fold_convert_loc (loc
, boolean_type_node
, arg0
),
9828 fold_convert_loc (loc
, boolean_type_node
, arg1
));
9830 if (code
== EQ_EXPR
)
9831 tem
= invert_truthvalue_loc (loc
, tem
);
9833 return fold_convert_loc (loc
, type
, tem
);
9836 if (TREE_CODE_CLASS (code
) == tcc_binary
9837 || TREE_CODE_CLASS (code
) == tcc_comparison
)
9839 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
9841 tem
= fold_build2_loc (loc
, code
, type
,
9842 fold_convert_loc (loc
, TREE_TYPE (op0
),
9843 TREE_OPERAND (arg0
, 1)), op1
);
9844 return build2_loc (loc
, COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
9847 if (TREE_CODE (arg1
) == COMPOUND_EXPR
9848 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
9850 tem
= fold_build2_loc (loc
, code
, type
, op0
,
9851 fold_convert_loc (loc
, TREE_TYPE (op1
),
9852 TREE_OPERAND (arg1
, 1)));
9853 return build2_loc (loc
, COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0),
9857 if (TREE_CODE (arg0
) == COND_EXPR
9858 || TREE_CODE (arg0
) == VEC_COND_EXPR
9859 || COMPARISON_CLASS_P (arg0
))
9861 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
9863 /*cond_first_p=*/1);
9864 if (tem
!= NULL_TREE
)
9868 if (TREE_CODE (arg1
) == COND_EXPR
9869 || TREE_CODE (arg1
) == VEC_COND_EXPR
9870 || COMPARISON_CLASS_P (arg1
))
9872 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
9874 /*cond_first_p=*/0);
9875 if (tem
!= NULL_TREE
)
9883 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9884 if (TREE_CODE (arg0
) == ADDR_EXPR
9885 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == MEM_REF
)
9887 tree iref
= TREE_OPERAND (arg0
, 0);
9888 return fold_build2 (MEM_REF
, type
,
9889 TREE_OPERAND (iref
, 0),
9890 int_const_binop (PLUS_EXPR
, arg1
,
9891 TREE_OPERAND (iref
, 1)));
9894 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9895 if (TREE_CODE (arg0
) == ADDR_EXPR
9896 && handled_component_p (TREE_OPERAND (arg0
, 0)))
9899 HOST_WIDE_INT coffset
;
9900 base
= get_addr_base_and_unit_offset (TREE_OPERAND (arg0
, 0),
9904 return fold_build2 (MEM_REF
, type
,
9905 build_fold_addr_expr (base
),
9906 int_const_binop (PLUS_EXPR
, arg1
,
9907 size_int (coffset
)));
9912 case POINTER_PLUS_EXPR
:
9913 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9914 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
9915 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
9916 return fold_convert_loc (loc
, type
,
9917 fold_build2_loc (loc
, PLUS_EXPR
, sizetype
,
9918 fold_convert_loc (loc
, sizetype
,
9920 fold_convert_loc (loc
, sizetype
,
9923 /* PTR_CST +p CST -> CST1 */
9924 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
9925 return fold_build2_loc (loc
, PLUS_EXPR
, type
, arg0
,
9926 fold_convert_loc (loc
, type
, arg1
));
9931 if (INTEGRAL_TYPE_P (type
) || VECTOR_INTEGER_TYPE_P (type
))
9933 /* X + (X / CST) * -CST is X % CST. */
9934 if (TREE_CODE (arg1
) == MULT_EXPR
9935 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == TRUNC_DIV_EXPR
9936 && operand_equal_p (arg0
,
9937 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0), 0))
9939 tree cst0
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1);
9940 tree cst1
= TREE_OPERAND (arg1
, 1);
9941 tree sum
= fold_binary_loc (loc
, PLUS_EXPR
, TREE_TYPE (cst1
),
9943 if (sum
&& integer_zerop (sum
))
9944 return fold_convert_loc (loc
, type
,
9945 fold_build2_loc (loc
, TRUNC_MOD_EXPR
,
9946 TREE_TYPE (arg0
), arg0
,
9951 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
9952 one. Make sure the type is not saturating and has the signedness of
9953 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9954 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9955 if ((TREE_CODE (arg0
) == MULT_EXPR
9956 || TREE_CODE (arg1
) == MULT_EXPR
)
9957 && !TYPE_SATURATING (type
)
9958 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg0
))
9959 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg1
))
9960 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
9962 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
9967 if (! FLOAT_TYPE_P (type
))
9969 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9970 with a constant, and the two constants have no bits in common,
9971 we should treat this as a BIT_IOR_EXPR since this may produce more
9973 if (TREE_CODE (arg0
) == BIT_AND_EXPR
9974 && TREE_CODE (arg1
) == BIT_AND_EXPR
9975 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9976 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
9977 && wi::bit_and (TREE_OPERAND (arg0
, 1),
9978 TREE_OPERAND (arg1
, 1)) == 0)
9980 code
= BIT_IOR_EXPR
;
9984 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9985 (plus (plus (mult) (mult)) (foo)) so that we can
9986 take advantage of the factoring cases below. */
9987 if (TYPE_OVERFLOW_WRAPS (type
)
9988 && (((TREE_CODE (arg0
) == PLUS_EXPR
9989 || TREE_CODE (arg0
) == MINUS_EXPR
)
9990 && TREE_CODE (arg1
) == MULT_EXPR
)
9991 || ((TREE_CODE (arg1
) == PLUS_EXPR
9992 || TREE_CODE (arg1
) == MINUS_EXPR
)
9993 && TREE_CODE (arg0
) == MULT_EXPR
)))
9995 tree parg0
, parg1
, parg
, marg
;
9996 enum tree_code pcode
;
9998 if (TREE_CODE (arg1
) == MULT_EXPR
)
9999 parg
= arg0
, marg
= arg1
;
10001 parg
= arg1
, marg
= arg0
;
10002 pcode
= TREE_CODE (parg
);
10003 parg0
= TREE_OPERAND (parg
, 0);
10004 parg1
= TREE_OPERAND (parg
, 1);
10005 STRIP_NOPS (parg0
);
10006 STRIP_NOPS (parg1
);
10008 if (TREE_CODE (parg0
) == MULT_EXPR
10009 && TREE_CODE (parg1
) != MULT_EXPR
)
10010 return fold_build2_loc (loc
, pcode
, type
,
10011 fold_build2_loc (loc
, PLUS_EXPR
, type
,
10012 fold_convert_loc (loc
, type
,
10014 fold_convert_loc (loc
, type
,
10016 fold_convert_loc (loc
, type
, parg1
));
10017 if (TREE_CODE (parg0
) != MULT_EXPR
10018 && TREE_CODE (parg1
) == MULT_EXPR
)
10020 fold_build2_loc (loc
, PLUS_EXPR
, type
,
10021 fold_convert_loc (loc
, type
, parg0
),
10022 fold_build2_loc (loc
, pcode
, type
,
10023 fold_convert_loc (loc
, type
, marg
),
10024 fold_convert_loc (loc
, type
,
10030 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10031 to __complex__ ( x, y ). This is not the same for SNaNs or
10032 if signed zeros are involved. */
10033 if (!HONOR_SNANS (element_mode (arg0
))
10034 && !HONOR_SIGNED_ZEROS (element_mode (arg0
))
10035 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10037 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10038 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
10039 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
10040 bool arg0rz
= false, arg0iz
= false;
10041 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
10042 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
10044 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
10045 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
10046 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
10048 tree rp
= arg1r
? arg1r
10049 : build1 (REALPART_EXPR
, rtype
, arg1
);
10050 tree ip
= arg0i
? arg0i
10051 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
10052 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10054 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
10056 tree rp
= arg0r
? arg0r
10057 : build1 (REALPART_EXPR
, rtype
, arg0
);
10058 tree ip
= arg1i
? arg1i
10059 : build1 (IMAGPART_EXPR
, rtype
, arg1
);
10060 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10065 if (flag_unsafe_math_optimizations
10066 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
10067 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
10068 && (tem
= distribute_real_division (loc
, code
, type
, arg0
, arg1
)))
10071 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10072 We associate floats only if the user has specified
10073 -fassociative-math. */
10074 if (flag_associative_math
10075 && TREE_CODE (arg1
) == PLUS_EXPR
10076 && TREE_CODE (arg0
) != MULT_EXPR
)
10078 tree tree10
= TREE_OPERAND (arg1
, 0);
10079 tree tree11
= TREE_OPERAND (arg1
, 1);
10080 if (TREE_CODE (tree11
) == MULT_EXPR
10081 && TREE_CODE (tree10
) == MULT_EXPR
)
10084 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, arg0
, tree10
);
10085 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree0
, tree11
);
10088 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10089 We associate floats only if the user has specified
10090 -fassociative-math. */
10091 if (flag_associative_math
10092 && TREE_CODE (arg0
) == PLUS_EXPR
10093 && TREE_CODE (arg1
) != MULT_EXPR
)
10095 tree tree00
= TREE_OPERAND (arg0
, 0);
10096 tree tree01
= TREE_OPERAND (arg0
, 1);
10097 if (TREE_CODE (tree01
) == MULT_EXPR
10098 && TREE_CODE (tree00
) == MULT_EXPR
)
10101 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, tree01
, arg1
);
10102 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree00
, tree0
);
10108 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10109 is a rotate of A by C1 bits. */
10110 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10111 is a rotate of A by B bits. */
10113 enum tree_code code0
, code1
;
10115 code0
= TREE_CODE (arg0
);
10116 code1
= TREE_CODE (arg1
);
10117 if (((code0
== RSHIFT_EXPR
&& code1
== LSHIFT_EXPR
)
10118 || (code1
== RSHIFT_EXPR
&& code0
== LSHIFT_EXPR
))
10119 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10120 TREE_OPERAND (arg1
, 0), 0)
10121 && (rtype
= TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10122 TYPE_UNSIGNED (rtype
))
10123 /* Only create rotates in complete modes. Other cases are not
10124 expanded properly. */
10125 && (element_precision (rtype
)
10126 == element_precision (TYPE_MODE (rtype
))))
10128 tree tree01
, tree11
;
10129 enum tree_code code01
, code11
;
10131 tree01
= TREE_OPERAND (arg0
, 1);
10132 tree11
= TREE_OPERAND (arg1
, 1);
10133 STRIP_NOPS (tree01
);
10134 STRIP_NOPS (tree11
);
10135 code01
= TREE_CODE (tree01
);
10136 code11
= TREE_CODE (tree11
);
10137 if (code01
== INTEGER_CST
10138 && code11
== INTEGER_CST
10139 && (wi::to_widest (tree01
) + wi::to_widest (tree11
)
10140 == element_precision (TREE_TYPE (TREE_OPERAND (arg0
, 0)))))
10142 tem
= build2_loc (loc
, LROTATE_EXPR
,
10143 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10144 TREE_OPERAND (arg0
, 0),
10145 code0
== LSHIFT_EXPR
? tree01
: tree11
);
10146 return fold_convert_loc (loc
, type
, tem
);
10148 else if (code11
== MINUS_EXPR
)
10150 tree tree110
, tree111
;
10151 tree110
= TREE_OPERAND (tree11
, 0);
10152 tree111
= TREE_OPERAND (tree11
, 1);
10153 STRIP_NOPS (tree110
);
10154 STRIP_NOPS (tree111
);
10155 if (TREE_CODE (tree110
) == INTEGER_CST
10156 && 0 == compare_tree_int (tree110
,
10158 (TREE_TYPE (TREE_OPERAND
10160 && operand_equal_p (tree01
, tree111
, 0))
10162 fold_convert_loc (loc
, type
,
10163 build2 ((code0
== LSHIFT_EXPR
10166 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10167 TREE_OPERAND (arg0
, 0), tree01
));
10169 else if (code01
== MINUS_EXPR
)
10171 tree tree010
, tree011
;
10172 tree010
= TREE_OPERAND (tree01
, 0);
10173 tree011
= TREE_OPERAND (tree01
, 1);
10174 STRIP_NOPS (tree010
);
10175 STRIP_NOPS (tree011
);
10176 if (TREE_CODE (tree010
) == INTEGER_CST
10177 && 0 == compare_tree_int (tree010
,
10179 (TREE_TYPE (TREE_OPERAND
10181 && operand_equal_p (tree11
, tree011
, 0))
10182 return fold_convert_loc
10184 build2 ((code0
!= LSHIFT_EXPR
10187 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10188 TREE_OPERAND (arg0
, 0), tree11
));
10194 /* In most languages, can't associate operations on floats through
10195 parentheses. Rather than remember where the parentheses were, we
10196 don't associate floats at all, unless the user has specified
10197 -fassociative-math.
10198 And, we need to make sure type is not saturating. */
10200 if ((! FLOAT_TYPE_P (type
) || flag_associative_math
)
10201 && !TYPE_SATURATING (type
))
10203 tree var0
, con0
, lit0
, minus_lit0
;
10204 tree var1
, con1
, lit1
, minus_lit1
;
10208 /* Split both trees into variables, constants, and literals. Then
10209 associate each group together, the constants with literals,
10210 then the result with variables. This increases the chances of
10211 literals being recombined later and of generating relocatable
10212 expressions for the sum of a constant and literal. */
10213 var0
= split_tree (arg0
, code
, &con0
, &lit0
, &minus_lit0
, 0);
10214 var1
= split_tree (arg1
, code
, &con1
, &lit1
, &minus_lit1
,
10215 code
== MINUS_EXPR
);
10217 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10218 if (code
== MINUS_EXPR
)
10221 /* With undefined overflow prefer doing association in a type
10222 which wraps on overflow, if that is one of the operand types. */
10223 if ((POINTER_TYPE_P (type
) && POINTER_TYPE_OVERFLOW_UNDEFINED
)
10224 || (INTEGRAL_TYPE_P (type
) && !TYPE_OVERFLOW_WRAPS (type
)))
10226 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
10227 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
)))
10228 atype
= TREE_TYPE (arg0
);
10229 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
10230 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1
)))
10231 atype
= TREE_TYPE (arg1
);
10232 gcc_assert (TYPE_PRECISION (atype
) == TYPE_PRECISION (type
));
10235 /* With undefined overflow we can only associate constants with one
10236 variable, and constants whose association doesn't overflow. */
10237 if ((POINTER_TYPE_P (atype
) && POINTER_TYPE_OVERFLOW_UNDEFINED
)
10238 || (INTEGRAL_TYPE_P (atype
) && !TYPE_OVERFLOW_WRAPS (atype
)))
10245 if (TREE_CODE (tmp0
) == NEGATE_EXPR
)
10246 tmp0
= TREE_OPERAND (tmp0
, 0);
10247 if (CONVERT_EXPR_P (tmp0
)
10248 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0
, 0)))
10249 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0
, 0)))
10250 <= TYPE_PRECISION (atype
)))
10251 tmp0
= TREE_OPERAND (tmp0
, 0);
10252 if (TREE_CODE (tmp1
) == NEGATE_EXPR
)
10253 tmp1
= TREE_OPERAND (tmp1
, 0);
10254 if (CONVERT_EXPR_P (tmp1
)
10255 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1
, 0)))
10256 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1
, 0)))
10257 <= TYPE_PRECISION (atype
)))
10258 tmp1
= TREE_OPERAND (tmp1
, 0);
10259 /* The only case we can still associate with two variables
10260 is if they are the same, modulo negation and bit-pattern
10261 preserving conversions. */
10262 if (!operand_equal_p (tmp0
, tmp1
, 0))
10267 /* Only do something if we found more than two objects. Otherwise,
10268 nothing has changed and we risk infinite recursion. */
10270 && (2 < ((var0
!= 0) + (var1
!= 0)
10271 + (con0
!= 0) + (con1
!= 0)
10272 + (lit0
!= 0) + (lit1
!= 0)
10273 + (minus_lit0
!= 0) + (minus_lit1
!= 0))))
10275 bool any_overflows
= false;
10276 if (lit0
) any_overflows
|= TREE_OVERFLOW (lit0
);
10277 if (lit1
) any_overflows
|= TREE_OVERFLOW (lit1
);
10278 if (minus_lit0
) any_overflows
|= TREE_OVERFLOW (minus_lit0
);
10279 if (minus_lit1
) any_overflows
|= TREE_OVERFLOW (minus_lit1
);
10280 var0
= associate_trees (loc
, var0
, var1
, code
, atype
);
10281 con0
= associate_trees (loc
, con0
, con1
, code
, atype
);
10282 lit0
= associate_trees (loc
, lit0
, lit1
, code
, atype
);
10283 minus_lit0
= associate_trees (loc
, minus_lit0
, minus_lit1
,
10286 /* Preserve the MINUS_EXPR if the negative part of the literal is
10287 greater than the positive part. Otherwise, the multiplicative
10288 folding code (i.e extract_muldiv) may be fooled in case
10289 unsigned constants are subtracted, like in the following
10290 example: ((X*2 + 4) - 8U)/2. */
10291 if (minus_lit0
&& lit0
)
10293 if (TREE_CODE (lit0
) == INTEGER_CST
10294 && TREE_CODE (minus_lit0
) == INTEGER_CST
10295 && tree_int_cst_lt (lit0
, minus_lit0
))
10297 minus_lit0
= associate_trees (loc
, minus_lit0
, lit0
,
10298 MINUS_EXPR
, atype
);
10303 lit0
= associate_trees (loc
, lit0
, minus_lit0
,
10304 MINUS_EXPR
, atype
);
10309 /* Don't introduce overflows through reassociation. */
10311 && ((lit0
&& TREE_OVERFLOW (lit0
))
10312 || (minus_lit0
&& TREE_OVERFLOW (minus_lit0
))))
10319 fold_convert_loc (loc
, type
,
10320 associate_trees (loc
, var0
, minus_lit0
,
10321 MINUS_EXPR
, atype
));
10324 con0
= associate_trees (loc
, con0
, minus_lit0
,
10325 MINUS_EXPR
, atype
);
10327 fold_convert_loc (loc
, type
,
10328 associate_trees (loc
, var0
, con0
,
10329 PLUS_EXPR
, atype
));
10333 con0
= associate_trees (loc
, con0
, lit0
, code
, atype
);
10335 fold_convert_loc (loc
, type
, associate_trees (loc
, var0
, con0
,
10343 /* Pointer simplifications for subtraction, simple reassociations. */
10344 if (POINTER_TYPE_P (TREE_TYPE (arg1
)) && POINTER_TYPE_P (TREE_TYPE (arg0
)))
10346 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10347 if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
10348 && TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
10350 tree arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10351 tree arg01
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10352 tree arg10
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
10353 tree arg11
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
10354 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
10355 fold_build2_loc (loc
, MINUS_EXPR
, type
,
10357 fold_build2_loc (loc
, MINUS_EXPR
, type
,
10360 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10361 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
10363 tree arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10364 tree arg01
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10365 tree tmp
= fold_binary_loc (loc
, MINUS_EXPR
, type
, arg00
,
10366 fold_convert_loc (loc
, type
, arg1
));
10368 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tmp
, arg01
);
10370 /* PTR0 - (PTR1 p+ A) -> (PTR0 - PTR1) - A, assuming PTR0 - PTR1
10372 else if (TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
10374 tree arg10
= fold_convert_loc (loc
, type
,
10375 TREE_OPERAND (arg1
, 0));
10376 tree arg11
= fold_convert_loc (loc
, type
,
10377 TREE_OPERAND (arg1
, 1));
10378 tree tmp
= fold_binary_loc (loc
, MINUS_EXPR
, type
,
10379 fold_convert_loc (loc
, type
, arg0
),
10382 return fold_build2_loc (loc
, MINUS_EXPR
, type
, tmp
, arg11
);
10385 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10386 if (TREE_CODE (arg0
) == NEGATE_EXPR
10387 && negate_expr_p (arg1
)
10388 && reorder_operands_p (arg0
, arg1
))
10389 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
10390 fold_convert_loc (loc
, type
,
10391 negate_expr (arg1
)),
10392 fold_convert_loc (loc
, type
,
10393 TREE_OPERAND (arg0
, 0)));
10395 /* X - (X / Y) * Y is X % Y. */
10396 if ((INTEGRAL_TYPE_P (type
) || VECTOR_INTEGER_TYPE_P (type
))
10397 && TREE_CODE (arg1
) == MULT_EXPR
10398 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == TRUNC_DIV_EXPR
10399 && operand_equal_p (arg0
,
10400 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0), 0)
10401 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1),
10402 TREE_OPERAND (arg1
, 1), 0))
10404 fold_convert_loc (loc
, type
,
10405 fold_build2_loc (loc
, TRUNC_MOD_EXPR
, TREE_TYPE (arg0
),
10406 arg0
, TREE_OPERAND (arg1
, 1)));
10408 if (! FLOAT_TYPE_P (type
))
10410 /* Fold A - (A & B) into ~B & A. */
10411 if (!TREE_SIDE_EFFECTS (arg0
)
10412 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
10414 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0))
10416 tree arg10
= fold_convert_loc (loc
, type
,
10417 TREE_OPERAND (arg1
, 0));
10418 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10419 fold_build1_loc (loc
, BIT_NOT_EXPR
,
10421 fold_convert_loc (loc
, type
, arg0
));
10423 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10425 tree arg11
= fold_convert_loc (loc
,
10426 type
, TREE_OPERAND (arg1
, 1));
10427 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10428 fold_build1_loc (loc
, BIT_NOT_EXPR
,
10430 fold_convert_loc (loc
, type
, arg0
));
10434 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10435 any power of 2 minus 1. */
10436 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10437 && TREE_CODE (arg1
) == BIT_AND_EXPR
10438 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10439 TREE_OPERAND (arg1
, 0), 0))
10441 tree mask0
= TREE_OPERAND (arg0
, 1);
10442 tree mask1
= TREE_OPERAND (arg1
, 1);
10443 tree tem
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, mask0
);
10445 if (operand_equal_p (tem
, mask1
, 0))
10447 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, type
,
10448 TREE_OPERAND (arg0
, 0), mask1
);
10449 return fold_build2_loc (loc
, MINUS_EXPR
, type
, tem
, mask1
);
10454 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10455 __complex__ ( x, -y ). This is not the same for SNaNs or if
10456 signed zeros are involved. */
10457 if (!HONOR_SNANS (element_mode (arg0
))
10458 && !HONOR_SIGNED_ZEROS (element_mode (arg0
))
10459 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10461 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10462 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
10463 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
10464 bool arg0rz
= false, arg0iz
= false;
10465 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
10466 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
10468 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
10469 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
10470 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
10472 tree rp
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
10474 : build1 (REALPART_EXPR
, rtype
, arg1
));
10475 tree ip
= arg0i
? arg0i
10476 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
10477 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10479 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
10481 tree rp
= arg0r
? arg0r
10482 : build1 (REALPART_EXPR
, rtype
, arg0
);
10483 tree ip
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
10485 : build1 (IMAGPART_EXPR
, rtype
, arg1
));
10486 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10491 /* A - B -> A + (-B) if B is easily negatable. */
10492 if (negate_expr_p (arg1
)
10493 && !TYPE_OVERFLOW_SANITIZED (type
)
10494 && ((FLOAT_TYPE_P (type
)
10495 /* Avoid this transformation if B is a positive REAL_CST. */
10496 && (TREE_CODE (arg1
) != REAL_CST
10497 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
))))
10498 || INTEGRAL_TYPE_P (type
)))
10499 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
10500 fold_convert_loc (loc
, type
, arg0
),
10501 fold_convert_loc (loc
, type
,
10502 negate_expr (arg1
)));
10504 /* Try folding difference of addresses. */
10506 HOST_WIDE_INT diff
;
10508 if ((TREE_CODE (arg0
) == ADDR_EXPR
10509 || TREE_CODE (arg1
) == ADDR_EXPR
)
10510 && ptr_difference_const (arg0
, arg1
, &diff
))
10511 return build_int_cst_type (type
, diff
);
10514 /* Fold &a[i] - &a[j] to i-j. */
10515 if (TREE_CODE (arg0
) == ADDR_EXPR
10516 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == ARRAY_REF
10517 && TREE_CODE (arg1
) == ADDR_EXPR
10518 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == ARRAY_REF
)
10520 tree tem
= fold_addr_of_array_ref_difference (loc
, type
,
10521 TREE_OPERAND (arg0
, 0),
10522 TREE_OPERAND (arg1
, 0));
10527 if (FLOAT_TYPE_P (type
)
10528 && flag_unsafe_math_optimizations
10529 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
10530 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
10531 && (tem
= distribute_real_division (loc
, code
, type
, arg0
, arg1
)))
10534 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10535 one. Make sure the type is not saturating and has the signedness of
10536 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10537 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10538 if ((TREE_CODE (arg0
) == MULT_EXPR
10539 || TREE_CODE (arg1
) == MULT_EXPR
)
10540 && !TYPE_SATURATING (type
)
10541 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg0
))
10542 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg1
))
10543 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
10545 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
10553 /* (-A) * (-B) -> A * B */
10554 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
10555 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10556 fold_convert_loc (loc
, type
,
10557 TREE_OPERAND (arg0
, 0)),
10558 fold_convert_loc (loc
, type
,
10559 negate_expr (arg1
)));
10560 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
10561 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10562 fold_convert_loc (loc
, type
,
10563 negate_expr (arg0
)),
10564 fold_convert_loc (loc
, type
,
10565 TREE_OPERAND (arg1
, 0)));
10567 if (! FLOAT_TYPE_P (type
))
10569 /* Transform x * -C into -x * C if x is easily negatable. */
10570 if (TREE_CODE (arg1
) == INTEGER_CST
10571 && tree_int_cst_sgn (arg1
) == -1
10572 && negate_expr_p (arg0
)
10573 && (tem
= negate_expr (arg1
)) != arg1
10574 && !TREE_OVERFLOW (tem
))
10575 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10576 fold_convert_loc (loc
, type
,
10577 negate_expr (arg0
)),
10580 /* (a * (1 << b)) is (a << b) */
10581 if (TREE_CODE (arg1
) == LSHIFT_EXPR
10582 && integer_onep (TREE_OPERAND (arg1
, 0)))
10583 return fold_build2_loc (loc
, LSHIFT_EXPR
, type
, op0
,
10584 TREE_OPERAND (arg1
, 1));
10585 if (TREE_CODE (arg0
) == LSHIFT_EXPR
10586 && integer_onep (TREE_OPERAND (arg0
, 0)))
10587 return fold_build2_loc (loc
, LSHIFT_EXPR
, type
, op1
,
10588 TREE_OPERAND (arg0
, 1));
10590 /* (A + A) * C -> A * 2 * C */
10591 if (TREE_CODE (arg0
) == PLUS_EXPR
10592 && TREE_CODE (arg1
) == INTEGER_CST
10593 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10594 TREE_OPERAND (arg0
, 1), 0))
10595 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10596 omit_one_operand_loc (loc
, type
,
10597 TREE_OPERAND (arg0
, 0),
10598 TREE_OPERAND (arg0
, 1)),
10599 fold_build2_loc (loc
, MULT_EXPR
, type
,
10600 build_int_cst (type
, 2) , arg1
));
10602 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
10603 sign-changing only. */
10604 if (TREE_CODE (arg1
) == INTEGER_CST
10605 && TREE_CODE (arg0
) == EXACT_DIV_EXPR
10606 && operand_equal_p (arg1
, TREE_OPERAND (arg0
, 1), 0))
10607 return fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10609 strict_overflow_p
= false;
10610 if (TREE_CODE (arg1
) == INTEGER_CST
10611 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
10612 &strict_overflow_p
)))
10614 if (strict_overflow_p
)
10615 fold_overflow_warning (("assuming signed overflow does not "
10616 "occur when simplifying "
10618 WARN_STRICT_OVERFLOW_MISC
);
10619 return fold_convert_loc (loc
, type
, tem
);
10622 /* Optimize z * conj(z) for integer complex numbers. */
10623 if (TREE_CODE (arg0
) == CONJ_EXPR
10624 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10625 return fold_mult_zconjz (loc
, type
, arg1
);
10626 if (TREE_CODE (arg1
) == CONJ_EXPR
10627 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10628 return fold_mult_zconjz (loc
, type
, arg0
);
10632 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10633 the result for floating point types due to rounding so it is applied
10634 only if -fassociative-math was specify. */
10635 if (flag_associative_math
10636 && TREE_CODE (arg0
) == RDIV_EXPR
10637 && TREE_CODE (arg1
) == REAL_CST
10638 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
)
10640 tree tem
= const_binop (MULT_EXPR
, TREE_OPERAND (arg0
, 0),
10643 return fold_build2_loc (loc
, RDIV_EXPR
, type
, tem
,
10644 TREE_OPERAND (arg0
, 1));
10647 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10648 if (operand_equal_p (arg0
, arg1
, 0))
10650 tree tem
= fold_strip_sign_ops (arg0
);
10651 if (tem
!= NULL_TREE
)
10653 tem
= fold_convert_loc (loc
, type
, tem
);
10654 return fold_build2_loc (loc
, MULT_EXPR
, type
, tem
, tem
);
10658 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10659 This is not the same for NaNs or if signed zeros are
10661 if (!HONOR_NANS (element_mode (arg0
))
10662 && !HONOR_SIGNED_ZEROS (element_mode (arg0
))
10663 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
10664 && TREE_CODE (arg1
) == COMPLEX_CST
10665 && real_zerop (TREE_REALPART (arg1
)))
10667 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10668 if (real_onep (TREE_IMAGPART (arg1
)))
10670 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
10671 negate_expr (fold_build1_loc (loc
, IMAGPART_EXPR
,
10673 fold_build1_loc (loc
, REALPART_EXPR
, rtype
, arg0
));
10674 else if (real_minus_onep (TREE_IMAGPART (arg1
)))
10676 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
10677 fold_build1_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
),
10678 negate_expr (fold_build1_loc (loc
, REALPART_EXPR
,
10682 /* Optimize z * conj(z) for floating point complex numbers.
10683 Guarded by flag_unsafe_math_optimizations as non-finite
10684 imaginary components don't produce scalar results. */
10685 if (flag_unsafe_math_optimizations
10686 && TREE_CODE (arg0
) == CONJ_EXPR
10687 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10688 return fold_mult_zconjz (loc
, type
, arg1
);
10689 if (flag_unsafe_math_optimizations
10690 && TREE_CODE (arg1
) == CONJ_EXPR
10691 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10692 return fold_mult_zconjz (loc
, type
, arg0
);
10694 if (flag_unsafe_math_optimizations
)
10696 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
10697 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
10699 /* Optimizations of root(...)*root(...). */
10700 if (fcode0
== fcode1
&& BUILTIN_ROOT_P (fcode0
))
10703 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10704 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
10706 /* Optimize sqrt(x)*sqrt(x) as x. */
10707 if (BUILTIN_SQRT_P (fcode0
)
10708 && operand_equal_p (arg00
, arg10
, 0)
10709 && ! HONOR_SNANS (element_mode (type
)))
10712 /* Optimize root(x)*root(y) as root(x*y). */
10713 rootfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10714 arg
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg00
, arg10
);
10715 return build_call_expr_loc (loc
, rootfn
, 1, arg
);
10718 /* Optimize expN(x)*expN(y) as expN(x+y). */
10719 if (fcode0
== fcode1
&& BUILTIN_EXPONENT_P (fcode0
))
10721 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10722 tree arg
= fold_build2_loc (loc
, PLUS_EXPR
, type
,
10723 CALL_EXPR_ARG (arg0
, 0),
10724 CALL_EXPR_ARG (arg1
, 0));
10725 return build_call_expr_loc (loc
, expfn
, 1, arg
);
10728 /* Optimizations of pow(...)*pow(...). */
10729 if ((fcode0
== BUILT_IN_POW
&& fcode1
== BUILT_IN_POW
)
10730 || (fcode0
== BUILT_IN_POWF
&& fcode1
== BUILT_IN_POWF
)
10731 || (fcode0
== BUILT_IN_POWL
&& fcode1
== BUILT_IN_POWL
))
10733 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10734 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
10735 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
10736 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
10738 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10739 if (operand_equal_p (arg01
, arg11
, 0))
10741 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10742 tree arg
= fold_build2_loc (loc
, MULT_EXPR
, type
,
10744 return build_call_expr_loc (loc
, powfn
, 2, arg
, arg01
);
10747 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10748 if (operand_equal_p (arg00
, arg10
, 0))
10750 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10751 tree arg
= fold_build2_loc (loc
, PLUS_EXPR
, type
,
10753 return build_call_expr_loc (loc
, powfn
, 2, arg00
, arg
);
10757 /* Optimize tan(x)*cos(x) as sin(x). */
10758 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_COS
)
10759 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_COSF
)
10760 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_COSL
)
10761 || (fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_TAN
)
10762 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_TANF
)
10763 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_TANL
))
10764 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
10765 CALL_EXPR_ARG (arg1
, 0), 0))
10767 tree sinfn
= mathfn_built_in (type
, BUILT_IN_SIN
);
10769 if (sinfn
!= NULL_TREE
)
10770 return build_call_expr_loc (loc
, sinfn
, 1,
10771 CALL_EXPR_ARG (arg0
, 0));
10774 /* Optimize x*pow(x,c) as pow(x,c+1). */
10775 if (fcode1
== BUILT_IN_POW
10776 || fcode1
== BUILT_IN_POWF
10777 || fcode1
== BUILT_IN_POWL
)
10779 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
10780 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
10781 if (TREE_CODE (arg11
) == REAL_CST
10782 && !TREE_OVERFLOW (arg11
)
10783 && operand_equal_p (arg0
, arg10
, 0))
10785 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
10789 c
= TREE_REAL_CST (arg11
);
10790 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
10791 arg
= build_real (type
, c
);
10792 return build_call_expr_loc (loc
, powfn
, 2, arg0
, arg
);
10796 /* Optimize pow(x,c)*x as pow(x,c+1). */
10797 if (fcode0
== BUILT_IN_POW
10798 || fcode0
== BUILT_IN_POWF
10799 || fcode0
== BUILT_IN_POWL
)
10801 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10802 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
10803 if (TREE_CODE (arg01
) == REAL_CST
10804 && !TREE_OVERFLOW (arg01
)
10805 && operand_equal_p (arg1
, arg00
, 0))
10807 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10811 c
= TREE_REAL_CST (arg01
);
10812 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
10813 arg
= build_real (type
, c
);
10814 return build_call_expr_loc (loc
, powfn
, 2, arg1
, arg
);
10818 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
10819 if (!in_gimple_form
10821 && operand_equal_p (arg0
, arg1
, 0))
10823 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
10827 tree arg
= build_real (type
, dconst2
);
10828 return build_call_expr_loc (loc
, powfn
, 2, arg0
, arg
);
10837 /* ~X | X is -1. */
10838 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10839 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10841 t1
= build_zero_cst (type
);
10842 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
10843 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
10846 /* X | ~X is -1. */
10847 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
10848 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10850 t1
= build_zero_cst (type
);
10851 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
10852 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
10855 /* Canonicalize (X & C1) | C2. */
10856 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10857 && TREE_CODE (arg1
) == INTEGER_CST
10858 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
10860 int width
= TYPE_PRECISION (type
), w
;
10861 wide_int c1
= TREE_OPERAND (arg0
, 1);
10862 wide_int c2
= arg1
;
10864 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10865 if ((c1
& c2
) == c1
)
10866 return omit_one_operand_loc (loc
, type
, arg1
,
10867 TREE_OPERAND (arg0
, 0));
10869 wide_int msk
= wi::mask (width
, false,
10870 TYPE_PRECISION (TREE_TYPE (arg1
)));
10872 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10873 if (msk
.and_not (c1
| c2
) == 0)
10874 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
,
10875 TREE_OPERAND (arg0
, 0), arg1
);
10877 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10878 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10879 mode which allows further optimizations. */
10882 wide_int c3
= c1
.and_not (c2
);
10883 for (w
= BITS_PER_UNIT
; w
<= width
; w
<<= 1)
10885 wide_int mask
= wi::mask (w
, false,
10886 TYPE_PRECISION (type
));
10887 if (((c1
| c2
) & mask
) == mask
&& c1
.and_not (mask
) == 0)
10895 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
,
10896 fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10897 TREE_OPERAND (arg0
, 0),
10898 wide_int_to_tree (type
,
10903 /* (X & ~Y) | (~X & Y) is X ^ Y */
10904 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10905 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
10907 tree a0
, a1
, l0
, l1
, n0
, n1
;
10909 a0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
10910 a1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
10912 l0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10913 l1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10915 n0
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, l0
);
10916 n1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, l1
);
10918 if ((operand_equal_p (n0
, a0
, 0)
10919 && operand_equal_p (n1
, a1
, 0))
10920 || (operand_equal_p (n0
, a1
, 0)
10921 && operand_equal_p (n1
, a0
, 0)))
10922 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
, l0
, n1
);
10925 t1
= distribute_bit_expr (loc
, code
, type
, arg0
, arg1
);
10926 if (t1
!= NULL_TREE
)
10929 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
10931 This results in more efficient code for machines without a NAND
10932 instruction. Combine will canonicalize to the first form
10933 which will allow use of NAND instructions provided by the
10934 backend if they exist. */
10935 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10936 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
10939 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
10940 build2 (BIT_AND_EXPR
, type
,
10941 fold_convert_loc (loc
, type
,
10942 TREE_OPERAND (arg0
, 0)),
10943 fold_convert_loc (loc
, type
,
10944 TREE_OPERAND (arg1
, 0))));
10947 /* See if this can be simplified into a rotate first. If that
10948 is unsuccessful continue in the association code. */
10952 /* ~X ^ X is -1. */
10953 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10954 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10956 t1
= build_zero_cst (type
);
10957 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
10958 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
10961 /* X ^ ~X is -1. */
10962 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
10963 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10965 t1
= build_zero_cst (type
);
10966 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
10967 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
10970 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
10971 with a constant, and the two constants have no bits in common,
10972 we should treat this as a BIT_IOR_EXPR since this may produce more
10973 simplifications. */
10974 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10975 && TREE_CODE (arg1
) == BIT_AND_EXPR
10976 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
10977 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
10978 && wi::bit_and (TREE_OPERAND (arg0
, 1),
10979 TREE_OPERAND (arg1
, 1)) == 0)
10981 code
= BIT_IOR_EXPR
;
10985 /* (X | Y) ^ X -> Y & ~ X*/
10986 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
10987 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10989 tree t2
= TREE_OPERAND (arg0
, 1);
10990 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
),
10992 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10993 fold_convert_loc (loc
, type
, t2
),
10994 fold_convert_loc (loc
, type
, t1
));
10998 /* (Y | X) ^ X -> Y & ~ X*/
10999 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11000 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11002 tree t2
= TREE_OPERAND (arg0
, 0);
11003 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
),
11005 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11006 fold_convert_loc (loc
, type
, t2
),
11007 fold_convert_loc (loc
, type
, t1
));
11011 /* X ^ (X | Y) -> Y & ~ X*/
11012 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11013 && operand_equal_p (TREE_OPERAND (arg1
, 0), arg0
, 0))
11015 tree t2
= TREE_OPERAND (arg1
, 1);
11016 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg0
),
11018 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11019 fold_convert_loc (loc
, type
, t2
),
11020 fold_convert_loc (loc
, type
, t1
));
11024 /* X ^ (Y | X) -> Y & ~ X*/
11025 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11026 && operand_equal_p (TREE_OPERAND (arg1
, 1), arg0
, 0))
11028 tree t2
= TREE_OPERAND (arg1
, 0);
11029 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg0
),
11031 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11032 fold_convert_loc (loc
, type
, t2
),
11033 fold_convert_loc (loc
, type
, t1
));
11037 /* Convert ~X ^ ~Y to X ^ Y. */
11038 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11039 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
11040 return fold_build2_loc (loc
, code
, type
,
11041 fold_convert_loc (loc
, type
,
11042 TREE_OPERAND (arg0
, 0)),
11043 fold_convert_loc (loc
, type
,
11044 TREE_OPERAND (arg1
, 0)));
11046 /* Convert ~X ^ C to X ^ ~C. */
11047 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11048 && TREE_CODE (arg1
) == INTEGER_CST
)
11049 return fold_build2_loc (loc
, code
, type
,
11050 fold_convert_loc (loc
, type
,
11051 TREE_OPERAND (arg0
, 0)),
11052 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, arg1
));
11054 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11055 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11056 && INTEGRAL_TYPE_P (type
)
11057 && integer_onep (TREE_OPERAND (arg0
, 1))
11058 && integer_onep (arg1
))
11059 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
,
11060 build_zero_cst (TREE_TYPE (arg0
)));
11062 /* Fold (X & Y) ^ Y as ~X & Y. */
11063 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11064 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11066 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11067 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11068 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11069 fold_convert_loc (loc
, type
, arg1
));
11071 /* Fold (X & Y) ^ X as ~Y & X. */
11072 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11073 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11074 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11076 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11077 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11078 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11079 fold_convert_loc (loc
, type
, arg1
));
11081 /* Fold X ^ (X & Y) as X & ~Y. */
11082 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11083 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11085 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
11086 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11087 fold_convert_loc (loc
, type
, arg0
),
11088 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
));
11090 /* Fold X ^ (Y & X) as ~Y & X. */
11091 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11092 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11093 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11095 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
11096 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11097 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11098 fold_convert_loc (loc
, type
, arg0
));
11101 /* See if this can be simplified into a rotate first. If that
11102 is unsuccessful continue in the association code. */
11106 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11107 if ((TREE_CODE (arg0
) == BIT_NOT_EXPR
11108 || TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11109 || (TREE_CODE (arg0
) == EQ_EXPR
11110 && integer_zerop (TREE_OPERAND (arg0
, 1))))
11111 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11112 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
11114 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11115 if ((TREE_CODE (arg1
) == BIT_NOT_EXPR
11116 || TREE_CODE (arg1
) == TRUTH_NOT_EXPR
11117 || (TREE_CODE (arg1
) == EQ_EXPR
11118 && integer_zerop (TREE_OPERAND (arg1
, 1))))
11119 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11120 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
11122 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11123 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11124 && INTEGRAL_TYPE_P (type
)
11125 && integer_onep (TREE_OPERAND (arg0
, 1))
11126 && integer_onep (arg1
))
11129 tem
= TREE_OPERAND (arg0
, 0);
11130 tem2
= fold_convert_loc (loc
, TREE_TYPE (tem
), arg1
);
11131 tem2
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
),
11133 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem2
,
11134 build_zero_cst (TREE_TYPE (tem
)));
11136 /* Fold ~X & 1 as (X & 1) == 0. */
11137 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11138 && INTEGRAL_TYPE_P (type
)
11139 && integer_onep (arg1
))
11142 tem
= TREE_OPERAND (arg0
, 0);
11143 tem2
= fold_convert_loc (loc
, TREE_TYPE (tem
), arg1
);
11144 tem2
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
),
11146 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem2
,
11147 build_zero_cst (TREE_TYPE (tem
)));
11149 /* Fold !X & 1 as X == 0. */
11150 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11151 && integer_onep (arg1
))
11153 tem
= TREE_OPERAND (arg0
, 0);
11154 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem
,
11155 build_zero_cst (TREE_TYPE (tem
)));
11158 /* Fold (X ^ Y) & Y as ~X & Y. */
11159 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11160 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11162 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11163 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11164 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11165 fold_convert_loc (loc
, type
, arg1
));
11167 /* Fold (X ^ Y) & X as ~Y & X. */
11168 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11169 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11170 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11172 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11173 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11174 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11175 fold_convert_loc (loc
, type
, arg1
));
11177 /* Fold X & (X ^ Y) as X & ~Y. */
11178 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
11179 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11181 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
11182 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11183 fold_convert_loc (loc
, type
, arg0
),
11184 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
));
11186 /* Fold X & (Y ^ X) as ~Y & X. */
11187 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
11188 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11189 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11191 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
11192 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11193 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11194 fold_convert_loc (loc
, type
, arg0
));
11197 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11198 multiple of 1 << CST. */
11199 if (TREE_CODE (arg1
) == INTEGER_CST
)
11201 wide_int cst1
= arg1
;
11202 wide_int ncst1
= -cst1
;
11203 if ((cst1
& ncst1
) == ncst1
11204 && multiple_of_p (type
, arg0
,
11205 wide_int_to_tree (TREE_TYPE (arg1
), ncst1
)))
11206 return fold_convert_loc (loc
, type
, arg0
);
11209 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11211 if (TREE_CODE (arg1
) == INTEGER_CST
11212 && TREE_CODE (arg0
) == MULT_EXPR
11213 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11215 wide_int warg1
= arg1
;
11216 wide_int masked
= mask_with_tz (type
, warg1
, TREE_OPERAND (arg0
, 1));
11219 return omit_two_operands_loc (loc
, type
, build_zero_cst (type
),
11221 else if (masked
!= warg1
)
11223 /* Avoid the transform if arg1 is a mask of some
11224 mode which allows further optimizations. */
11225 int pop
= wi::popcount (warg1
);
11226 if (!(pop
>= BITS_PER_UNIT
11227 && exact_log2 (pop
) != -1
11228 && wi::mask (pop
, false, warg1
.get_precision ()) == warg1
))
11229 return fold_build2_loc (loc
, code
, type
, op0
,
11230 wide_int_to_tree (type
, masked
));
11234 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11235 ((A & N) + B) & M -> (A + B) & M
11236 Similarly if (N & M) == 0,
11237 ((A | N) + B) & M -> (A + B) & M
11238 and for - instead of + (or unary - instead of +)
11239 and/or ^ instead of |.
11240 If B is constant and (B & M) == 0, fold into A & M. */
11241 if (TREE_CODE (arg1
) == INTEGER_CST
)
11243 wide_int cst1
= arg1
;
11244 if ((~cst1
!= 0) && (cst1
& (cst1
+ 1)) == 0
11245 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
11246 && (TREE_CODE (arg0
) == PLUS_EXPR
11247 || TREE_CODE (arg0
) == MINUS_EXPR
11248 || TREE_CODE (arg0
) == NEGATE_EXPR
)
11249 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
))
11250 || TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
))
11256 /* Now we know that arg0 is (C + D) or (C - D) or
11257 -C and arg1 (M) is == (1LL << cst) - 1.
11258 Store C into PMOP[0] and D into PMOP[1]. */
11259 pmop
[0] = TREE_OPERAND (arg0
, 0);
11261 if (TREE_CODE (arg0
) != NEGATE_EXPR
)
11263 pmop
[1] = TREE_OPERAND (arg0
, 1);
11267 if ((wi::max_value (TREE_TYPE (arg0
)) & cst1
) != cst1
)
11270 for (; which
>= 0; which
--)
11271 switch (TREE_CODE (pmop
[which
]))
11276 if (TREE_CODE (TREE_OPERAND (pmop
[which
], 1))
11279 cst0
= TREE_OPERAND (pmop
[which
], 1);
11281 if (TREE_CODE (pmop
[which
]) == BIT_AND_EXPR
)
11286 else if (cst0
!= 0)
11288 /* If C or D is of the form (A & N) where
11289 (N & M) == M, or of the form (A | N) or
11290 (A ^ N) where (N & M) == 0, replace it with A. */
11291 pmop
[which
] = TREE_OPERAND (pmop
[which
], 0);
11294 /* If C or D is a N where (N & M) == 0, it can be
11295 omitted (assumed 0). */
11296 if ((TREE_CODE (arg0
) == PLUS_EXPR
11297 || (TREE_CODE (arg0
) == MINUS_EXPR
&& which
== 0))
11298 && (cst1
& pmop
[which
]) == 0)
11299 pmop
[which
] = NULL
;
11305 /* Only build anything new if we optimized one or both arguments
11307 if (pmop
[0] != TREE_OPERAND (arg0
, 0)
11308 || (TREE_CODE (arg0
) != NEGATE_EXPR
11309 && pmop
[1] != TREE_OPERAND (arg0
, 1)))
11311 tree utype
= TREE_TYPE (arg0
);
11312 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
)))
11314 /* Perform the operations in a type that has defined
11315 overflow behavior. */
11316 utype
= unsigned_type_for (TREE_TYPE (arg0
));
11317 if (pmop
[0] != NULL
)
11318 pmop
[0] = fold_convert_loc (loc
, utype
, pmop
[0]);
11319 if (pmop
[1] != NULL
)
11320 pmop
[1] = fold_convert_loc (loc
, utype
, pmop
[1]);
11323 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
11324 tem
= fold_build1_loc (loc
, NEGATE_EXPR
, utype
, pmop
[0]);
11325 else if (TREE_CODE (arg0
) == PLUS_EXPR
)
11327 if (pmop
[0] != NULL
&& pmop
[1] != NULL
)
11328 tem
= fold_build2_loc (loc
, PLUS_EXPR
, utype
,
11330 else if (pmop
[0] != NULL
)
11332 else if (pmop
[1] != NULL
)
11335 return build_int_cst (type
, 0);
11337 else if (pmop
[0] == NULL
)
11338 tem
= fold_build1_loc (loc
, NEGATE_EXPR
, utype
, pmop
[1]);
11340 tem
= fold_build2_loc (loc
, MINUS_EXPR
, utype
,
11342 /* TEM is now the new binary +, - or unary - replacement. */
11343 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, utype
, tem
,
11344 fold_convert_loc (loc
, utype
, arg1
));
11345 return fold_convert_loc (loc
, type
, tem
);
11350 t1
= distribute_bit_expr (loc
, code
, type
, arg0
, arg1
);
11351 if (t1
!= NULL_TREE
)
11353 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11354 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) == NOP_EXPR
11355 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
11357 prec
= element_precision (TREE_TYPE (TREE_OPERAND (arg0
, 0)));
11359 wide_int mask
= wide_int::from (arg1
, prec
, UNSIGNED
);
11362 fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11365 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11367 This results in more efficient code for machines without a NOR
11368 instruction. Combine will canonicalize to the first form
11369 which will allow use of NOR instructions provided by the
11370 backend if they exist. */
11371 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11372 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
11374 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
11375 build2 (BIT_IOR_EXPR
, type
,
11376 fold_convert_loc (loc
, type
,
11377 TREE_OPERAND (arg0
, 0)),
11378 fold_convert_loc (loc
, type
,
11379 TREE_OPERAND (arg1
, 0))));
11382 /* If arg0 is derived from the address of an object or function, we may
11383 be able to fold this expression using the object or function's
11385 if (POINTER_TYPE_P (TREE_TYPE (arg0
)) && tree_fits_uhwi_p (arg1
))
11387 unsigned HOST_WIDE_INT modulus
, residue
;
11388 unsigned HOST_WIDE_INT low
= tree_to_uhwi (arg1
);
11390 modulus
= get_pointer_modulus_and_residue (arg0
, &residue
,
11391 integer_onep (arg1
));
11393 /* This works because modulus is a power of 2. If this weren't the
11394 case, we'd have to replace it by its greatest power-of-2
11395 divisor: modulus & -modulus. */
11397 return build_int_cst (type
, residue
& low
);
11400 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11401 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11402 if the new mask might be further optimized. */
11403 if ((TREE_CODE (arg0
) == LSHIFT_EXPR
11404 || TREE_CODE (arg0
) == RSHIFT_EXPR
)
11405 && TYPE_PRECISION (TREE_TYPE (arg0
)) <= HOST_BITS_PER_WIDE_INT
11406 && TREE_CODE (arg1
) == INTEGER_CST
11407 && tree_fits_uhwi_p (TREE_OPERAND (arg0
, 1))
11408 && tree_to_uhwi (TREE_OPERAND (arg0
, 1)) > 0
11409 && (tree_to_uhwi (TREE_OPERAND (arg0
, 1))
11410 < TYPE_PRECISION (TREE_TYPE (arg0
))))
11412 unsigned int shiftc
= tree_to_uhwi (TREE_OPERAND (arg0
, 1));
11413 unsigned HOST_WIDE_INT mask
= TREE_INT_CST_LOW (arg1
);
11414 unsigned HOST_WIDE_INT newmask
, zerobits
= 0;
11415 tree shift_type
= TREE_TYPE (arg0
);
11417 if (TREE_CODE (arg0
) == LSHIFT_EXPR
)
11418 zerobits
= ((((unsigned HOST_WIDE_INT
) 1) << shiftc
) - 1);
11419 else if (TREE_CODE (arg0
) == RSHIFT_EXPR
11420 && TYPE_PRECISION (TREE_TYPE (arg0
))
11421 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg0
))))
11423 prec
= TYPE_PRECISION (TREE_TYPE (arg0
));
11424 tree arg00
= TREE_OPERAND (arg0
, 0);
11425 /* See if more bits can be proven as zero because of
11427 if (TREE_CODE (arg00
) == NOP_EXPR
11428 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00
, 0))))
11430 tree inner_type
= TREE_TYPE (TREE_OPERAND (arg00
, 0));
11431 if (TYPE_PRECISION (inner_type
)
11432 == GET_MODE_PRECISION (TYPE_MODE (inner_type
))
11433 && TYPE_PRECISION (inner_type
) < prec
)
11435 prec
= TYPE_PRECISION (inner_type
);
11436 /* See if we can shorten the right shift. */
11438 shift_type
= inner_type
;
11439 /* Otherwise X >> C1 is all zeros, so we'll optimize
11440 it into (X, 0) later on by making sure zerobits
11444 zerobits
= ~(unsigned HOST_WIDE_INT
) 0;
11447 zerobits
>>= HOST_BITS_PER_WIDE_INT
- shiftc
;
11448 zerobits
<<= prec
- shiftc
;
11450 /* For arithmetic shift if sign bit could be set, zerobits
11451 can contain actually sign bits, so no transformation is
11452 possible, unless MASK masks them all away. In that
11453 case the shift needs to be converted into logical shift. */
11454 if (!TYPE_UNSIGNED (TREE_TYPE (arg0
))
11455 && prec
== TYPE_PRECISION (TREE_TYPE (arg0
)))
11457 if ((mask
& zerobits
) == 0)
11458 shift_type
= unsigned_type_for (TREE_TYPE (arg0
));
11464 /* ((X << 16) & 0xff00) is (X, 0). */
11465 if ((mask
& zerobits
) == mask
)
11466 return omit_one_operand_loc (loc
, type
,
11467 build_int_cst (type
, 0), arg0
);
11469 newmask
= mask
| zerobits
;
11470 if (newmask
!= mask
&& (newmask
& (newmask
+ 1)) == 0)
11472 /* Only do the transformation if NEWMASK is some integer
11474 for (prec
= BITS_PER_UNIT
;
11475 prec
< HOST_BITS_PER_WIDE_INT
; prec
<<= 1)
11476 if (newmask
== (((unsigned HOST_WIDE_INT
) 1) << prec
) - 1)
11478 if (prec
< HOST_BITS_PER_WIDE_INT
11479 || newmask
== ~(unsigned HOST_WIDE_INT
) 0)
11483 if (shift_type
!= TREE_TYPE (arg0
))
11485 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), shift_type
,
11486 fold_convert_loc (loc
, shift_type
,
11487 TREE_OPERAND (arg0
, 0)),
11488 TREE_OPERAND (arg0
, 1));
11489 tem
= fold_convert_loc (loc
, type
, tem
);
11493 newmaskt
= build_int_cst_type (TREE_TYPE (op1
), newmask
);
11494 if (!tree_int_cst_equal (newmaskt
, arg1
))
11495 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
, tem
, newmaskt
);
11503 /* Don't touch a floating-point divide by zero unless the mode
11504 of the constant can represent infinity. */
11505 if (TREE_CODE (arg1
) == REAL_CST
11506 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
)))
11507 && real_zerop (arg1
))
11510 /* (-A) / (-B) -> A / B */
11511 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
11512 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11513 TREE_OPERAND (arg0
, 0),
11514 negate_expr (arg1
));
11515 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
11516 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11517 negate_expr (arg0
),
11518 TREE_OPERAND (arg1
, 0));
11520 /* Convert A/B/C to A/(B*C). */
11521 if (flag_reciprocal_math
11522 && TREE_CODE (arg0
) == RDIV_EXPR
)
11523 return fold_build2_loc (loc
, RDIV_EXPR
, type
, TREE_OPERAND (arg0
, 0),
11524 fold_build2_loc (loc
, MULT_EXPR
, type
,
11525 TREE_OPERAND (arg0
, 1), arg1
));
11527 /* Convert A/(B/C) to (A/B)*C. */
11528 if (flag_reciprocal_math
11529 && TREE_CODE (arg1
) == RDIV_EXPR
)
11530 return fold_build2_loc (loc
, MULT_EXPR
, type
,
11531 fold_build2_loc (loc
, RDIV_EXPR
, type
, arg0
,
11532 TREE_OPERAND (arg1
, 0)),
11533 TREE_OPERAND (arg1
, 1));
11535 /* Convert C1/(X*C2) into (C1/C2)/X. */
11536 if (flag_reciprocal_math
11537 && TREE_CODE (arg1
) == MULT_EXPR
11538 && TREE_CODE (arg0
) == REAL_CST
11539 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
11541 tree tem
= const_binop (RDIV_EXPR
, arg0
,
11542 TREE_OPERAND (arg1
, 1));
11544 return fold_build2_loc (loc
, RDIV_EXPR
, type
, tem
,
11545 TREE_OPERAND (arg1
, 0));
11548 if (flag_unsafe_math_optimizations
)
11550 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
11551 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
11553 /* Optimize sin(x)/cos(x) as tan(x). */
11554 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_COS
)
11555 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_COSF
)
11556 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_COSL
))
11557 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
11558 CALL_EXPR_ARG (arg1
, 0), 0))
11560 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
11562 if (tanfn
!= NULL_TREE
)
11563 return build_call_expr_loc (loc
, tanfn
, 1, CALL_EXPR_ARG (arg0
, 0));
11566 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11567 if (((fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_SIN
)
11568 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_SINF
)
11569 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_SINL
))
11570 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
11571 CALL_EXPR_ARG (arg1
, 0), 0))
11573 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
11575 if (tanfn
!= NULL_TREE
)
11577 tree tmp
= build_call_expr_loc (loc
, tanfn
, 1,
11578 CALL_EXPR_ARG (arg0
, 0));
11579 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11580 build_real (type
, dconst1
), tmp
);
11584 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11585 NaNs or Infinities. */
11586 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_TAN
)
11587 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_TANF
)
11588 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_TANL
)))
11590 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11591 tree arg01
= CALL_EXPR_ARG (arg1
, 0);
11593 if (! HONOR_NANS (element_mode (arg00
))
11594 && ! HONOR_INFINITIES (element_mode (arg00
))
11595 && operand_equal_p (arg00
, arg01
, 0))
11597 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
11599 if (cosfn
!= NULL_TREE
)
11600 return build_call_expr_loc (loc
, cosfn
, 1, arg00
);
11604 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11605 NaNs or Infinities. */
11606 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_SIN
)
11607 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_SINF
)
11608 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_SINL
)))
11610 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11611 tree arg01
= CALL_EXPR_ARG (arg1
, 0);
11613 if (! HONOR_NANS (element_mode (arg00
))
11614 && ! HONOR_INFINITIES (element_mode (arg00
))
11615 && operand_equal_p (arg00
, arg01
, 0))
11617 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
11619 if (cosfn
!= NULL_TREE
)
11621 tree tmp
= build_call_expr_loc (loc
, cosfn
, 1, arg00
);
11622 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11623 build_real (type
, dconst1
),
11629 /* Optimize pow(x,c)/x as pow(x,c-1). */
11630 if (fcode0
== BUILT_IN_POW
11631 || fcode0
== BUILT_IN_POWF
11632 || fcode0
== BUILT_IN_POWL
)
11634 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11635 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
11636 if (TREE_CODE (arg01
) == REAL_CST
11637 && !TREE_OVERFLOW (arg01
)
11638 && operand_equal_p (arg1
, arg00
, 0))
11640 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11644 c
= TREE_REAL_CST (arg01
);
11645 real_arithmetic (&c
, MINUS_EXPR
, &c
, &dconst1
);
11646 arg
= build_real (type
, c
);
11647 return build_call_expr_loc (loc
, powfn
, 2, arg1
, arg
);
11651 /* Optimize a/root(b/c) into a*root(c/b). */
11652 if (BUILTIN_ROOT_P (fcode1
))
11654 tree rootarg
= CALL_EXPR_ARG (arg1
, 0);
11656 if (TREE_CODE (rootarg
) == RDIV_EXPR
)
11658 tree rootfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
11659 tree b
= TREE_OPERAND (rootarg
, 0);
11660 tree c
= TREE_OPERAND (rootarg
, 1);
11662 tree tmp
= fold_build2_loc (loc
, RDIV_EXPR
, type
, c
, b
);
11664 tmp
= build_call_expr_loc (loc
, rootfn
, 1, tmp
);
11665 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, tmp
);
11669 /* Optimize x/expN(y) into x*expN(-y). */
11670 if (BUILTIN_EXPONENT_P (fcode1
))
11672 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
11673 tree arg
= negate_expr (CALL_EXPR_ARG (arg1
, 0));
11674 arg1
= build_call_expr_loc (loc
,
11676 fold_convert_loc (loc
, type
, arg
));
11677 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
11680 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11681 if (fcode1
== BUILT_IN_POW
11682 || fcode1
== BUILT_IN_POWF
11683 || fcode1
== BUILT_IN_POWL
)
11685 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
11686 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
11687 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
11688 tree neg11
= fold_convert_loc (loc
, type
,
11689 negate_expr (arg11
));
11690 arg1
= build_call_expr_loc (loc
, powfn
, 2, arg10
, neg11
);
11691 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
11696 case TRUNC_DIV_EXPR
:
11697 /* Optimize (X & (-A)) / A where A is a power of 2,
11699 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11700 && !TYPE_UNSIGNED (type
) && TREE_CODE (arg1
) == INTEGER_CST
11701 && integer_pow2p (arg1
) && tree_int_cst_sgn (arg1
) > 0)
11703 tree sum
= fold_binary_loc (loc
, PLUS_EXPR
, TREE_TYPE (arg1
),
11704 arg1
, TREE_OPERAND (arg0
, 1));
11705 if (sum
&& integer_zerop (sum
)) {
11706 tree pow2
= build_int_cst (integer_type_node
,
11707 wi::exact_log2 (arg1
));
11708 return fold_build2_loc (loc
, RSHIFT_EXPR
, type
,
11709 TREE_OPERAND (arg0
, 0), pow2
);
11715 case FLOOR_DIV_EXPR
:
11716 /* Simplify A / (B << N) where A and B are positive and B is
11717 a power of 2, to A >> (N + log2(B)). */
11718 strict_overflow_p
= false;
11719 if (TREE_CODE (arg1
) == LSHIFT_EXPR
11720 && (TYPE_UNSIGNED (type
)
11721 || tree_expr_nonnegative_warnv_p (op0
, &strict_overflow_p
)))
11723 tree sval
= TREE_OPERAND (arg1
, 0);
11724 if (integer_pow2p (sval
) && tree_int_cst_sgn (sval
) > 0)
11726 tree sh_cnt
= TREE_OPERAND (arg1
, 1);
11727 tree pow2
= build_int_cst (TREE_TYPE (sh_cnt
),
11728 wi::exact_log2 (sval
));
11730 if (strict_overflow_p
)
11731 fold_overflow_warning (("assuming signed overflow does not "
11732 "occur when simplifying A / (B << N)"),
11733 WARN_STRICT_OVERFLOW_MISC
);
11735 sh_cnt
= fold_build2_loc (loc
, PLUS_EXPR
, TREE_TYPE (sh_cnt
),
11737 return fold_build2_loc (loc
, RSHIFT_EXPR
, type
,
11738 fold_convert_loc (loc
, type
, arg0
), sh_cnt
);
11744 case ROUND_DIV_EXPR
:
11745 case CEIL_DIV_EXPR
:
11746 case EXACT_DIV_EXPR
:
11747 if (integer_zerop (arg1
))
11750 /* Convert -A / -B to A / B when the type is signed and overflow is
11752 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
11753 && TREE_CODE (arg0
) == NEGATE_EXPR
11754 && negate_expr_p (arg1
))
11756 if (INTEGRAL_TYPE_P (type
))
11757 fold_overflow_warning (("assuming signed overflow does not occur "
11758 "when distributing negation across "
11760 WARN_STRICT_OVERFLOW_MISC
);
11761 return fold_build2_loc (loc
, code
, type
,
11762 fold_convert_loc (loc
, type
,
11763 TREE_OPERAND (arg0
, 0)),
11764 fold_convert_loc (loc
, type
,
11765 negate_expr (arg1
)));
11767 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
11768 && TREE_CODE (arg1
) == NEGATE_EXPR
11769 && negate_expr_p (arg0
))
11771 if (INTEGRAL_TYPE_P (type
))
11772 fold_overflow_warning (("assuming signed overflow does not occur "
11773 "when distributing negation across "
11775 WARN_STRICT_OVERFLOW_MISC
);
11776 return fold_build2_loc (loc
, code
, type
,
11777 fold_convert_loc (loc
, type
,
11778 negate_expr (arg0
)),
11779 fold_convert_loc (loc
, type
,
11780 TREE_OPERAND (arg1
, 0)));
11783 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11784 operation, EXACT_DIV_EXPR.
11786 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11787 At one time others generated faster code, it's not clear if they do
11788 after the last round to changes to the DIV code in expmed.c. */
11789 if ((code
== CEIL_DIV_EXPR
|| code
== FLOOR_DIV_EXPR
)
11790 && multiple_of_p (type
, arg0
, arg1
))
11791 return fold_build2_loc (loc
, EXACT_DIV_EXPR
, type
, arg0
, arg1
);
11793 strict_overflow_p
= false;
11794 if (TREE_CODE (arg1
) == INTEGER_CST
11795 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
11796 &strict_overflow_p
)))
11798 if (strict_overflow_p
)
11799 fold_overflow_warning (("assuming signed overflow does not occur "
11800 "when simplifying division"),
11801 WARN_STRICT_OVERFLOW_MISC
);
11802 return fold_convert_loc (loc
, type
, tem
);
11807 case CEIL_MOD_EXPR
:
11808 case FLOOR_MOD_EXPR
:
11809 case ROUND_MOD_EXPR
:
11810 case TRUNC_MOD_EXPR
:
11811 /* X % -Y is the same as X % Y. */
11812 if (code
== TRUNC_MOD_EXPR
11813 && !TYPE_UNSIGNED (type
)
11814 && TREE_CODE (arg1
) == NEGATE_EXPR
11815 && !TYPE_OVERFLOW_TRAPS (type
))
11816 return fold_build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, arg0
),
11817 fold_convert_loc (loc
, type
,
11818 TREE_OPERAND (arg1
, 0)));
11820 strict_overflow_p
= false;
11821 if (TREE_CODE (arg1
) == INTEGER_CST
11822 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
11823 &strict_overflow_p
)))
11825 if (strict_overflow_p
)
11826 fold_overflow_warning (("assuming signed overflow does not occur "
11827 "when simplifying modulus"),
11828 WARN_STRICT_OVERFLOW_MISC
);
11829 return fold_convert_loc (loc
, type
, tem
);
11832 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
11833 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
11834 if ((code
== TRUNC_MOD_EXPR
|| code
== FLOOR_MOD_EXPR
)
11835 && (TYPE_UNSIGNED (type
)
11836 || tree_expr_nonnegative_warnv_p (op0
, &strict_overflow_p
)))
11839 /* Also optimize A % (C << N) where C is a power of 2,
11840 to A & ((C << N) - 1). */
11841 if (TREE_CODE (arg1
) == LSHIFT_EXPR
)
11842 c
= TREE_OPERAND (arg1
, 0);
11844 if (integer_pow2p (c
) && tree_int_cst_sgn (c
) > 0)
11847 = fold_build2_loc (loc
, MINUS_EXPR
, TREE_TYPE (arg1
), arg1
,
11848 build_int_cst (TREE_TYPE (arg1
), 1));
11849 if (strict_overflow_p
)
11850 fold_overflow_warning (("assuming signed overflow does not "
11851 "occur when simplifying "
11852 "X % (power of two)"),
11853 WARN_STRICT_OVERFLOW_MISC
);
11854 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11855 fold_convert_loc (loc
, type
, arg0
),
11856 fold_convert_loc (loc
, type
, mask
));
11866 /* Since negative shift count is not well-defined,
11867 don't try to compute it in the compiler. */
11868 if (TREE_CODE (arg1
) == INTEGER_CST
&& tree_int_cst_sgn (arg1
) < 0)
11871 prec
= element_precision (type
);
11873 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
11874 if (TREE_CODE (op0
) == code
&& tree_fits_uhwi_p (arg1
)
11875 && tree_to_uhwi (arg1
) < prec
11876 && tree_fits_uhwi_p (TREE_OPERAND (arg0
, 1))
11877 && tree_to_uhwi (TREE_OPERAND (arg0
, 1)) < prec
)
11879 unsigned int low
= (tree_to_uhwi (TREE_OPERAND (arg0
, 1))
11880 + tree_to_uhwi (arg1
));
11882 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11883 being well defined. */
11886 if (code
== LROTATE_EXPR
|| code
== RROTATE_EXPR
)
11888 else if (TYPE_UNSIGNED (type
) || code
== LSHIFT_EXPR
)
11889 return omit_one_operand_loc (loc
, type
, build_zero_cst (type
),
11890 TREE_OPERAND (arg0
, 0));
11895 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
11896 build_int_cst (TREE_TYPE (arg1
), low
));
11899 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
11900 into x & ((unsigned)-1 >> c) for unsigned types. */
11901 if (((code
== LSHIFT_EXPR
&& TREE_CODE (arg0
) == RSHIFT_EXPR
)
11902 || (TYPE_UNSIGNED (type
)
11903 && code
== RSHIFT_EXPR
&& TREE_CODE (arg0
) == LSHIFT_EXPR
))
11904 && tree_fits_uhwi_p (arg1
)
11905 && tree_to_uhwi (arg1
) < prec
11906 && tree_fits_uhwi_p (TREE_OPERAND (arg0
, 1))
11907 && tree_to_uhwi (TREE_OPERAND (arg0
, 1)) < prec
)
11909 HOST_WIDE_INT low0
= tree_to_uhwi (TREE_OPERAND (arg0
, 1));
11910 HOST_WIDE_INT low1
= tree_to_uhwi (arg1
);
11916 arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11918 lshift
= build_minus_one_cst (type
);
11919 lshift
= const_binop (code
, lshift
, arg1
);
11921 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
, arg00
, lshift
);
11925 /* If we have a rotate of a bit operation with the rotate count and
11926 the second operand of the bit operation both constant,
11927 permute the two operations. */
11928 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
11929 && (TREE_CODE (arg0
) == BIT_AND_EXPR
11930 || TREE_CODE (arg0
) == BIT_IOR_EXPR
11931 || TREE_CODE (arg0
) == BIT_XOR_EXPR
)
11932 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11933 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
11934 fold_build2_loc (loc
, code
, type
,
11935 TREE_OPERAND (arg0
, 0), arg1
),
11936 fold_build2_loc (loc
, code
, type
,
11937 TREE_OPERAND (arg0
, 1), arg1
));
11939 /* Two consecutive rotates adding up to the some integer
11940 multiple of the precision of the type can be ignored. */
11941 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
11942 && TREE_CODE (arg0
) == RROTATE_EXPR
11943 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
11944 && wi::umod_trunc (wi::add (arg1
, TREE_OPERAND (arg0
, 1)),
11946 return TREE_OPERAND (arg0
, 0);
11948 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
11949 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
11950 if the latter can be further optimized. */
11951 if ((code
== LSHIFT_EXPR
|| code
== RSHIFT_EXPR
)
11952 && TREE_CODE (arg0
) == BIT_AND_EXPR
11953 && TREE_CODE (arg1
) == INTEGER_CST
11954 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11956 tree mask
= fold_build2_loc (loc
, code
, type
,
11957 fold_convert_loc (loc
, type
,
11958 TREE_OPERAND (arg0
, 1)),
11960 tree shift
= fold_build2_loc (loc
, code
, type
,
11961 fold_convert_loc (loc
, type
,
11962 TREE_OPERAND (arg0
, 0)),
11964 tem
= fold_binary_loc (loc
, BIT_AND_EXPR
, type
, shift
, mask
);
11972 tem
= fold_minmax (loc
, MIN_EXPR
, type
, arg0
, arg1
);
11978 tem
= fold_minmax (loc
, MAX_EXPR
, type
, arg0
, arg1
);
11983 case TRUTH_ANDIF_EXPR
:
11984 /* Note that the operands of this must be ints
11985 and their values must be 0 or 1.
11986 ("true" is a fixed value perhaps depending on the language.) */
11987 /* If first arg is constant zero, return it. */
11988 if (integer_zerop (arg0
))
11989 return fold_convert_loc (loc
, type
, arg0
);
11990 case TRUTH_AND_EXPR
:
11991 /* If either arg is constant true, drop it. */
11992 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
11993 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
11994 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
)
11995 /* Preserve sequence points. */
11996 && (code
!= TRUTH_ANDIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
11997 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11998 /* If second arg is constant zero, result is zero, but first arg
11999 must be evaluated. */
12000 if (integer_zerop (arg1
))
12001 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12002 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12003 case will be handled here. */
12004 if (integer_zerop (arg0
))
12005 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12007 /* !X && X is always false. */
12008 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12009 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12010 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
12011 /* X && !X is always false. */
12012 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12013 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12014 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12016 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12017 means A >= Y && A != MAX, but in this case we know that
12020 if (!TREE_SIDE_EFFECTS (arg0
)
12021 && !TREE_SIDE_EFFECTS (arg1
))
12023 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg0
, arg1
);
12024 if (tem
&& !operand_equal_p (tem
, arg0
, 0))
12025 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
12027 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg1
, arg0
);
12028 if (tem
&& !operand_equal_p (tem
, arg1
, 0))
12029 return fold_build2_loc (loc
, code
, type
, arg0
, tem
);
12032 if ((tem
= fold_truth_andor (loc
, code
, type
, arg0
, arg1
, op0
, op1
))
12038 case TRUTH_ORIF_EXPR
:
12039 /* Note that the operands of this must be ints
12040 and their values must be 0 or true.
12041 ("true" is a fixed value perhaps depending on the language.) */
12042 /* If first arg is constant true, return it. */
12043 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
12044 return fold_convert_loc (loc
, type
, arg0
);
12045 case TRUTH_OR_EXPR
:
12046 /* If either arg is constant zero, drop it. */
12047 if (TREE_CODE (arg0
) == INTEGER_CST
&& integer_zerop (arg0
))
12048 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
12049 if (TREE_CODE (arg1
) == INTEGER_CST
&& integer_zerop (arg1
)
12050 /* Preserve sequence points. */
12051 && (code
!= TRUTH_ORIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
12052 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12053 /* If second arg is constant true, result is true, but we must
12054 evaluate first arg. */
12055 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
))
12056 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12057 /* Likewise for first arg, but note this only occurs here for
12059 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
12060 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12062 /* !X || X is always true. */
12063 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12064 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12065 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
12066 /* X || !X is always true. */
12067 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12068 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12069 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
12071 /* (X && !Y) || (!X && Y) is X ^ Y */
12072 if (TREE_CODE (arg0
) == TRUTH_AND_EXPR
12073 && TREE_CODE (arg1
) == TRUTH_AND_EXPR
)
12075 tree a0
, a1
, l0
, l1
, n0
, n1
;
12077 a0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
12078 a1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
12080 l0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
12081 l1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
12083 n0
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, l0
);
12084 n1
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, l1
);
12086 if ((operand_equal_p (n0
, a0
, 0)
12087 && operand_equal_p (n1
, a1
, 0))
12088 || (operand_equal_p (n0
, a1
, 0)
12089 && operand_equal_p (n1
, a0
, 0)))
12090 return fold_build2_loc (loc
, TRUTH_XOR_EXPR
, type
, l0
, n1
);
12093 if ((tem
= fold_truth_andor (loc
, code
, type
, arg0
, arg1
, op0
, op1
))
12099 case TRUTH_XOR_EXPR
:
12100 /* If the second arg is constant zero, drop it. */
12101 if (integer_zerop (arg1
))
12102 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12103 /* If the second arg is constant true, this is a logical inversion. */
12104 if (integer_onep (arg1
))
12106 tem
= invert_truthvalue_loc (loc
, arg0
);
12107 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
12109 /* Identical arguments cancel to zero. */
12110 if (operand_equal_p (arg0
, arg1
, 0))
12111 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12113 /* !X ^ X is always true. */
12114 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12115 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12116 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
12118 /* X ^ !X is always true. */
12119 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12120 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12121 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
12130 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
12131 if (tem
!= NULL_TREE
)
12134 /* bool_var != 0 becomes bool_var. */
12135 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
12136 && code
== NE_EXPR
)
12137 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12139 /* bool_var == 1 becomes bool_var. */
12140 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
12141 && code
== EQ_EXPR
)
12142 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12144 /* bool_var != 1 becomes !bool_var. */
12145 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
12146 && code
== NE_EXPR
)
12147 return fold_convert_loc (loc
, type
,
12148 fold_build1_loc (loc
, TRUTH_NOT_EXPR
,
12149 TREE_TYPE (arg0
), arg0
));
12151 /* bool_var == 0 becomes !bool_var. */
12152 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
12153 && code
== EQ_EXPR
)
12154 return fold_convert_loc (loc
, type
,
12155 fold_build1_loc (loc
, TRUTH_NOT_EXPR
,
12156 TREE_TYPE (arg0
), arg0
));
12158 /* !exp != 0 becomes !exp */
12159 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
&& integer_zerop (arg1
)
12160 && code
== NE_EXPR
)
12161 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12163 /* If this is an equality comparison of the address of two non-weak,
12164 unaliased symbols neither of which are extern (since we do not
12165 have access to attributes for externs), then we know the result. */
12166 if (TREE_CODE (arg0
) == ADDR_EXPR
12167 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0
, 0))
12168 && ! DECL_WEAK (TREE_OPERAND (arg0
, 0))
12169 && ! lookup_attribute ("alias",
12170 DECL_ATTRIBUTES (TREE_OPERAND (arg0
, 0)))
12171 && ! DECL_EXTERNAL (TREE_OPERAND (arg0
, 0))
12172 && TREE_CODE (arg1
) == ADDR_EXPR
12173 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1
, 0))
12174 && ! DECL_WEAK (TREE_OPERAND (arg1
, 0))
12175 && ! lookup_attribute ("alias",
12176 DECL_ATTRIBUTES (TREE_OPERAND (arg1
, 0)))
12177 && ! DECL_EXTERNAL (TREE_OPERAND (arg1
, 0)))
12179 /* We know that we're looking at the address of two
12180 non-weak, unaliased, static _DECL nodes.
12182 It is both wasteful and incorrect to call operand_equal_p
12183 to compare the two ADDR_EXPR nodes. It is wasteful in that
12184 all we need to do is test pointer equality for the arguments
12185 to the two ADDR_EXPR nodes. It is incorrect to use
12186 operand_equal_p as that function is NOT equivalent to a
12187 C equality test. It can in fact return false for two
12188 objects which would test as equal using the C equality
12190 bool equal
= TREE_OPERAND (arg0
, 0) == TREE_OPERAND (arg1
, 0);
12191 return constant_boolean_node (equal
12192 ? code
== EQ_EXPR
: code
!= EQ_EXPR
,
12196 /* Similarly for a NEGATE_EXPR. */
12197 if (TREE_CODE (arg0
) == NEGATE_EXPR
12198 && TREE_CODE (arg1
) == INTEGER_CST
12199 && 0 != (tem
= negate_expr (fold_convert_loc (loc
, TREE_TYPE (arg0
),
12201 && TREE_CODE (tem
) == INTEGER_CST
12202 && !TREE_OVERFLOW (tem
))
12203 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
12205 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12206 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12207 && TREE_CODE (arg1
) == INTEGER_CST
12208 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12209 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12210 fold_build2_loc (loc
, BIT_XOR_EXPR
, TREE_TYPE (arg0
),
12211 fold_convert_loc (loc
,
12214 TREE_OPERAND (arg0
, 1)));
12216 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12217 if ((TREE_CODE (arg0
) == PLUS_EXPR
12218 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
12219 || TREE_CODE (arg0
) == MINUS_EXPR
)
12220 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0
,
12223 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
12224 || POINTER_TYPE_P (TREE_TYPE (arg0
))))
12226 tree val
= TREE_OPERAND (arg0
, 1);
12227 return omit_two_operands_loc (loc
, type
,
12228 fold_build2_loc (loc
, code
, type
,
12230 build_int_cst (TREE_TYPE (val
),
12232 TREE_OPERAND (arg0
, 0), arg1
);
12235 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12236 if (TREE_CODE (arg0
) == MINUS_EXPR
12237 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == INTEGER_CST
12238 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0
,
12241 && wi::extract_uhwi (TREE_OPERAND (arg0
, 0), 0, 1) == 1)
12243 return omit_two_operands_loc (loc
, type
,
12245 ? boolean_true_node
: boolean_false_node
,
12246 TREE_OPERAND (arg0
, 1), arg1
);
12249 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12250 if (TREE_CODE (arg0
) == ABS_EXPR
12251 && (integer_zerop (arg1
) || real_zerop (arg1
)))
12252 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), arg1
);
12254 /* If this is an EQ or NE comparison with zero and ARG0 is
12255 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12256 two operations, but the latter can be done in one less insn
12257 on machines that have only two-operand insns or on which a
12258 constant cannot be the first operand. */
12259 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12260 && integer_zerop (arg1
))
12262 tree arg00
= TREE_OPERAND (arg0
, 0);
12263 tree arg01
= TREE_OPERAND (arg0
, 1);
12264 if (TREE_CODE (arg00
) == LSHIFT_EXPR
12265 && integer_onep (TREE_OPERAND (arg00
, 0)))
12267 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg00
),
12268 arg01
, TREE_OPERAND (arg00
, 1));
12269 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
12270 build_int_cst (TREE_TYPE (arg0
), 1));
12271 return fold_build2_loc (loc
, code
, type
,
12272 fold_convert_loc (loc
, TREE_TYPE (arg1
), tem
),
12275 else if (TREE_CODE (arg01
) == LSHIFT_EXPR
12276 && integer_onep (TREE_OPERAND (arg01
, 0)))
12278 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg01
),
12279 arg00
, TREE_OPERAND (arg01
, 1));
12280 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
12281 build_int_cst (TREE_TYPE (arg0
), 1));
12282 return fold_build2_loc (loc
, code
, type
,
12283 fold_convert_loc (loc
, TREE_TYPE (arg1
), tem
),
12288 /* If this is an NE or EQ comparison of zero against the result of a
12289 signed MOD operation whose second operand is a power of 2, make
12290 the MOD operation unsigned since it is simpler and equivalent. */
12291 if (integer_zerop (arg1
)
12292 && !TYPE_UNSIGNED (TREE_TYPE (arg0
))
12293 && (TREE_CODE (arg0
) == TRUNC_MOD_EXPR
12294 || TREE_CODE (arg0
) == CEIL_MOD_EXPR
12295 || TREE_CODE (arg0
) == FLOOR_MOD_EXPR
12296 || TREE_CODE (arg0
) == ROUND_MOD_EXPR
)
12297 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
12299 tree newtype
= unsigned_type_for (TREE_TYPE (arg0
));
12300 tree newmod
= fold_build2_loc (loc
, TREE_CODE (arg0
), newtype
,
12301 fold_convert_loc (loc
, newtype
,
12302 TREE_OPERAND (arg0
, 0)),
12303 fold_convert_loc (loc
, newtype
,
12304 TREE_OPERAND (arg0
, 1)));
12306 return fold_build2_loc (loc
, code
, type
, newmod
,
12307 fold_convert_loc (loc
, newtype
, arg1
));
12310 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12311 C1 is a valid shift constant, and C2 is a power of two, i.e.
12313 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12314 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == RSHIFT_EXPR
12315 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1))
12317 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12318 && integer_zerop (arg1
))
12320 tree itype
= TREE_TYPE (arg0
);
12321 tree arg001
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1);
12322 prec
= TYPE_PRECISION (itype
);
12324 /* Check for a valid shift count. */
12325 if (wi::ltu_p (arg001
, prec
))
12327 tree arg01
= TREE_OPERAND (arg0
, 1);
12328 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
12329 unsigned HOST_WIDE_INT log2
= tree_log2 (arg01
);
12330 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12331 can be rewritten as (X & (C2 << C1)) != 0. */
12332 if ((log2
+ TREE_INT_CST_LOW (arg001
)) < prec
)
12334 tem
= fold_build2_loc (loc
, LSHIFT_EXPR
, itype
, arg01
, arg001
);
12335 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, itype
, arg000
, tem
);
12336 return fold_build2_loc (loc
, code
, type
, tem
,
12337 fold_convert_loc (loc
, itype
, arg1
));
12339 /* Otherwise, for signed (arithmetic) shifts,
12340 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12341 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12342 else if (!TYPE_UNSIGNED (itype
))
12343 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
, type
,
12344 arg000
, build_int_cst (itype
, 0));
12345 /* Otherwise, of unsigned (logical) shifts,
12346 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12347 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12349 return omit_one_operand_loc (loc
, type
,
12350 code
== EQ_EXPR
? integer_one_node
12351 : integer_zero_node
,
12356 /* If we have (A & C) == C where C is a power of 2, convert this into
12357 (A & C) != 0. Similarly for NE_EXPR. */
12358 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12359 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12360 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
12361 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
12362 arg0
, fold_convert_loc (loc
, TREE_TYPE (arg0
),
12363 integer_zero_node
));
12365 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12366 bit, then fold the expression into A < 0 or A >= 0. */
12367 tem
= fold_single_bit_test_into_sign_test (loc
, code
, arg0
, arg1
, type
);
12371 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12372 Similarly for NE_EXPR. */
12373 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12374 && TREE_CODE (arg1
) == INTEGER_CST
12375 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12377 tree notc
= fold_build1_loc (loc
, BIT_NOT_EXPR
,
12378 TREE_TYPE (TREE_OPERAND (arg0
, 1)),
12379 TREE_OPERAND (arg0
, 1));
12381 = fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
12382 fold_convert_loc (loc
, TREE_TYPE (arg0
), arg1
),
12384 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
12385 if (integer_nonzerop (dandnotc
))
12386 return omit_one_operand_loc (loc
, type
, rslt
, arg0
);
12389 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12390 Similarly for NE_EXPR. */
12391 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
12392 && TREE_CODE (arg1
) == INTEGER_CST
12393 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12395 tree notd
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
), arg1
);
12397 = fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
12398 TREE_OPERAND (arg0
, 1),
12399 fold_convert_loc (loc
, TREE_TYPE (arg0
), notd
));
12400 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
12401 if (integer_nonzerop (candnotd
))
12402 return omit_one_operand_loc (loc
, type
, rslt
, arg0
);
12405 /* If this is a comparison of a field, we may be able to simplify it. */
12406 if ((TREE_CODE (arg0
) == COMPONENT_REF
12407 || TREE_CODE (arg0
) == BIT_FIELD_REF
)
12408 /* Handle the constant case even without -O
12409 to make sure the warnings are given. */
12410 && (optimize
|| TREE_CODE (arg1
) == INTEGER_CST
))
12412 t1
= optimize_bit_field_compare (loc
, code
, type
, arg0
, arg1
);
12417 /* Optimize comparisons of strlen vs zero to a compare of the
12418 first character of the string vs zero. To wit,
12419 strlen(ptr) == 0 => *ptr == 0
12420 strlen(ptr) != 0 => *ptr != 0
12421 Other cases should reduce to one of these two (or a constant)
12422 due to the return value of strlen being unsigned. */
12423 if (TREE_CODE (arg0
) == CALL_EXPR
12424 && integer_zerop (arg1
))
12426 tree fndecl
= get_callee_fndecl (arg0
);
12429 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
12430 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRLEN
12431 && call_expr_nargs (arg0
) == 1
12432 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0
, 0))) == POINTER_TYPE
)
12434 tree iref
= build_fold_indirect_ref_loc (loc
,
12435 CALL_EXPR_ARG (arg0
, 0));
12436 return fold_build2_loc (loc
, code
, type
, iref
,
12437 build_int_cst (TREE_TYPE (iref
), 0));
12441 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12442 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12443 if (TREE_CODE (arg0
) == RSHIFT_EXPR
12444 && integer_zerop (arg1
)
12445 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12447 tree arg00
= TREE_OPERAND (arg0
, 0);
12448 tree arg01
= TREE_OPERAND (arg0
, 1);
12449 tree itype
= TREE_TYPE (arg00
);
12450 if (wi::eq_p (arg01
, element_precision (itype
) - 1))
12452 if (TYPE_UNSIGNED (itype
))
12454 itype
= signed_type_for (itype
);
12455 arg00
= fold_convert_loc (loc
, itype
, arg00
);
12457 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
12458 type
, arg00
, build_zero_cst (itype
));
12462 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12463 if (integer_zerop (arg1
)
12464 && TREE_CODE (arg0
) == BIT_XOR_EXPR
)
12465 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12466 TREE_OPERAND (arg0
, 1));
12468 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12469 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12470 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
12471 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12472 build_zero_cst (TREE_TYPE (arg0
)));
12473 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12474 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12475 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
12476 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
12477 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 1),
12478 build_zero_cst (TREE_TYPE (arg0
)));
12480 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12481 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12482 && TREE_CODE (arg1
) == INTEGER_CST
12483 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12484 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12485 fold_build2_loc (loc
, BIT_XOR_EXPR
, TREE_TYPE (arg1
),
12486 TREE_OPERAND (arg0
, 1), arg1
));
12488 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12489 (X & C) == 0 when C is a single bit. */
12490 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12491 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_NOT_EXPR
12492 && integer_zerop (arg1
)
12493 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
12495 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
12496 TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0),
12497 TREE_OPERAND (arg0
, 1));
12498 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
,
12500 fold_convert_loc (loc
, TREE_TYPE (arg0
),
12504 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12505 constant C is a power of two, i.e. a single bit. */
12506 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12507 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
12508 && integer_zerop (arg1
)
12509 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12510 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
12511 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
12513 tree arg00
= TREE_OPERAND (arg0
, 0);
12514 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
12515 arg00
, build_int_cst (TREE_TYPE (arg00
), 0));
12518 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12519 when is C is a power of two, i.e. a single bit. */
12520 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12521 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_XOR_EXPR
12522 && integer_zerop (arg1
)
12523 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12524 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
12525 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
12527 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
12528 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg000
),
12529 arg000
, TREE_OPERAND (arg0
, 1));
12530 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
12531 tem
, build_int_cst (TREE_TYPE (tem
), 0));
12534 if (integer_zerop (arg1
)
12535 && tree_expr_nonzero_p (arg0
))
12537 tree res
= constant_boolean_node (code
==NE_EXPR
, type
);
12538 return omit_one_operand_loc (loc
, type
, res
, arg0
);
12541 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12542 if (TREE_CODE (arg0
) == NEGATE_EXPR
12543 && TREE_CODE (arg1
) == NEGATE_EXPR
)
12544 return fold_build2_loc (loc
, code
, type
,
12545 TREE_OPERAND (arg0
, 0),
12546 fold_convert_loc (loc
, TREE_TYPE (arg0
),
12547 TREE_OPERAND (arg1
, 0)));
12549 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12550 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12551 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
12553 tree arg00
= TREE_OPERAND (arg0
, 0);
12554 tree arg01
= TREE_OPERAND (arg0
, 1);
12555 tree arg10
= TREE_OPERAND (arg1
, 0);
12556 tree arg11
= TREE_OPERAND (arg1
, 1);
12557 tree itype
= TREE_TYPE (arg0
);
12559 if (operand_equal_p (arg01
, arg11
, 0))
12560 return fold_build2_loc (loc
, code
, type
,
12561 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
12562 fold_build2_loc (loc
,
12563 BIT_XOR_EXPR
, itype
,
12566 build_zero_cst (itype
));
12568 if (operand_equal_p (arg01
, arg10
, 0))
12569 return fold_build2_loc (loc
, code
, type
,
12570 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
12571 fold_build2_loc (loc
,
12572 BIT_XOR_EXPR
, itype
,
12575 build_zero_cst (itype
));
12577 if (operand_equal_p (arg00
, arg11
, 0))
12578 return fold_build2_loc (loc
, code
, type
,
12579 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
12580 fold_build2_loc (loc
,
12581 BIT_XOR_EXPR
, itype
,
12584 build_zero_cst (itype
));
12586 if (operand_equal_p (arg00
, arg10
, 0))
12587 return fold_build2_loc (loc
, code
, type
,
12588 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
12589 fold_build2_loc (loc
,
12590 BIT_XOR_EXPR
, itype
,
12593 build_zero_cst (itype
));
12596 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12597 && TREE_CODE (arg1
) == BIT_XOR_EXPR
)
12599 tree arg00
= TREE_OPERAND (arg0
, 0);
12600 tree arg01
= TREE_OPERAND (arg0
, 1);
12601 tree arg10
= TREE_OPERAND (arg1
, 0);
12602 tree arg11
= TREE_OPERAND (arg1
, 1);
12603 tree itype
= TREE_TYPE (arg0
);
12605 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12606 operand_equal_p guarantees no side-effects so we don't need
12607 to use omit_one_operand on Z. */
12608 if (operand_equal_p (arg01
, arg11
, 0))
12609 return fold_build2_loc (loc
, code
, type
, arg00
,
12610 fold_convert_loc (loc
, TREE_TYPE (arg00
),
12612 if (operand_equal_p (arg01
, arg10
, 0))
12613 return fold_build2_loc (loc
, code
, type
, arg00
,
12614 fold_convert_loc (loc
, TREE_TYPE (arg00
),
12616 if (operand_equal_p (arg00
, arg11
, 0))
12617 return fold_build2_loc (loc
, code
, type
, arg01
,
12618 fold_convert_loc (loc
, TREE_TYPE (arg01
),
12620 if (operand_equal_p (arg00
, arg10
, 0))
12621 return fold_build2_loc (loc
, code
, type
, arg01
,
12622 fold_convert_loc (loc
, TREE_TYPE (arg01
),
12625 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12626 if (TREE_CODE (arg01
) == INTEGER_CST
12627 && TREE_CODE (arg11
) == INTEGER_CST
)
12629 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg01
,
12630 fold_convert_loc (loc
, itype
, arg11
));
12631 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg00
, tem
);
12632 return fold_build2_loc (loc
, code
, type
, tem
,
12633 fold_convert_loc (loc
, itype
, arg10
));
12637 /* Attempt to simplify equality/inequality comparisons of complex
12638 values. Only lower the comparison if the result is known or
12639 can be simplified to a single scalar comparison. */
12640 if ((TREE_CODE (arg0
) == COMPLEX_EXPR
12641 || TREE_CODE (arg0
) == COMPLEX_CST
)
12642 && (TREE_CODE (arg1
) == COMPLEX_EXPR
12643 || TREE_CODE (arg1
) == COMPLEX_CST
))
12645 tree real0
, imag0
, real1
, imag1
;
12648 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
12650 real0
= TREE_OPERAND (arg0
, 0);
12651 imag0
= TREE_OPERAND (arg0
, 1);
12655 real0
= TREE_REALPART (arg0
);
12656 imag0
= TREE_IMAGPART (arg0
);
12659 if (TREE_CODE (arg1
) == COMPLEX_EXPR
)
12661 real1
= TREE_OPERAND (arg1
, 0);
12662 imag1
= TREE_OPERAND (arg1
, 1);
12666 real1
= TREE_REALPART (arg1
);
12667 imag1
= TREE_IMAGPART (arg1
);
12670 rcond
= fold_binary_loc (loc
, code
, type
, real0
, real1
);
12671 if (rcond
&& TREE_CODE (rcond
) == INTEGER_CST
)
12673 if (integer_zerop (rcond
))
12675 if (code
== EQ_EXPR
)
12676 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
12678 return fold_build2_loc (loc
, NE_EXPR
, type
, imag0
, imag1
);
12682 if (code
== NE_EXPR
)
12683 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
12685 return fold_build2_loc (loc
, EQ_EXPR
, type
, imag0
, imag1
);
12689 icond
= fold_binary_loc (loc
, code
, type
, imag0
, imag1
);
12690 if (icond
&& TREE_CODE (icond
) == INTEGER_CST
)
12692 if (integer_zerop (icond
))
12694 if (code
== EQ_EXPR
)
12695 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
12697 return fold_build2_loc (loc
, NE_EXPR
, type
, real0
, real1
);
12701 if (code
== NE_EXPR
)
12702 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
12704 return fold_build2_loc (loc
, EQ_EXPR
, type
, real0
, real1
);
12715 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
12716 if (tem
!= NULL_TREE
)
12719 /* Transform comparisons of the form X +- C CMP X. */
12720 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
12721 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
12722 && ((TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
12723 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
))))
12724 || (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
12725 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))))
12727 tree arg01
= TREE_OPERAND (arg0
, 1);
12728 enum tree_code code0
= TREE_CODE (arg0
);
12731 if (TREE_CODE (arg01
) == REAL_CST
)
12732 is_positive
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01
)) ? -1 : 1;
12734 is_positive
= tree_int_cst_sgn (arg01
);
12736 /* (X - c) > X becomes false. */
12737 if (code
== GT_EXPR
12738 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
12739 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
12741 if (TREE_CODE (arg01
) == INTEGER_CST
12742 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12743 fold_overflow_warning (("assuming signed overflow does not "
12744 "occur when assuming that (X - c) > X "
12745 "is always false"),
12746 WARN_STRICT_OVERFLOW_ALL
);
12747 return constant_boolean_node (0, type
);
12750 /* Likewise (X + c) < X becomes false. */
12751 if (code
== LT_EXPR
12752 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
12753 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
12755 if (TREE_CODE (arg01
) == INTEGER_CST
12756 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12757 fold_overflow_warning (("assuming signed overflow does not "
12758 "occur when assuming that "
12759 "(X + c) < X is always false"),
12760 WARN_STRICT_OVERFLOW_ALL
);
12761 return constant_boolean_node (0, type
);
12764 /* Convert (X - c) <= X to true. */
12765 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
)))
12767 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
12768 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
12770 if (TREE_CODE (arg01
) == INTEGER_CST
12771 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12772 fold_overflow_warning (("assuming signed overflow does not "
12773 "occur when assuming that "
12774 "(X - c) <= X is always true"),
12775 WARN_STRICT_OVERFLOW_ALL
);
12776 return constant_boolean_node (1, type
);
12779 /* Convert (X + c) >= X to true. */
12780 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
)))
12782 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
12783 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
12785 if (TREE_CODE (arg01
) == INTEGER_CST
12786 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12787 fold_overflow_warning (("assuming signed overflow does not "
12788 "occur when assuming that "
12789 "(X + c) >= X is always true"),
12790 WARN_STRICT_OVERFLOW_ALL
);
12791 return constant_boolean_node (1, type
);
12794 if (TREE_CODE (arg01
) == INTEGER_CST
)
12796 /* Convert X + c > X and X - c < X to true for integers. */
12797 if (code
== GT_EXPR
12798 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
12799 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
12801 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12802 fold_overflow_warning (("assuming signed overflow does "
12803 "not occur when assuming that "
12804 "(X + c) > X is always true"),
12805 WARN_STRICT_OVERFLOW_ALL
);
12806 return constant_boolean_node (1, type
);
12809 if (code
== LT_EXPR
12810 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
12811 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
12813 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12814 fold_overflow_warning (("assuming signed overflow does "
12815 "not occur when assuming that "
12816 "(X - c) < X is always true"),
12817 WARN_STRICT_OVERFLOW_ALL
);
12818 return constant_boolean_node (1, type
);
12821 /* Convert X + c <= X and X - c >= X to false for integers. */
12822 if (code
== LE_EXPR
12823 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
12824 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
12826 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12827 fold_overflow_warning (("assuming signed overflow does "
12828 "not occur when assuming that "
12829 "(X + c) <= X is always false"),
12830 WARN_STRICT_OVERFLOW_ALL
);
12831 return constant_boolean_node (0, type
);
12834 if (code
== GE_EXPR
12835 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
12836 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
12838 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12839 fold_overflow_warning (("assuming signed overflow does "
12840 "not occur when assuming that "
12841 "(X - c) >= X is always false"),
12842 WARN_STRICT_OVERFLOW_ALL
);
12843 return constant_boolean_node (0, type
);
12848 /* Comparisons with the highest or lowest possible integer of
12849 the specified precision will have known values. */
12851 tree arg1_type
= TREE_TYPE (arg1
);
12852 unsigned int prec
= TYPE_PRECISION (arg1_type
);
12854 if (TREE_CODE (arg1
) == INTEGER_CST
12855 && (INTEGRAL_TYPE_P (arg1_type
) || POINTER_TYPE_P (arg1_type
)))
12857 wide_int max
= wi::max_value (arg1_type
);
12858 wide_int signed_max
= wi::max_value (prec
, SIGNED
);
12859 wide_int min
= wi::min_value (arg1_type
);
12861 if (wi::eq_p (arg1
, max
))
12865 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12868 return fold_build2_loc (loc
, EQ_EXPR
, type
, op0
, op1
);
12871 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
12874 return fold_build2_loc (loc
, NE_EXPR
, type
, op0
, op1
);
12876 /* The GE_EXPR and LT_EXPR cases above are not normally
12877 reached because of previous transformations. */
12882 else if (wi::eq_p (arg1
, max
- 1))
12886 arg1
= const_binop (PLUS_EXPR
, arg1
,
12887 build_int_cst (TREE_TYPE (arg1
), 1));
12888 return fold_build2_loc (loc
, EQ_EXPR
, type
,
12889 fold_convert_loc (loc
,
12890 TREE_TYPE (arg1
), arg0
),
12893 arg1
= const_binop (PLUS_EXPR
, arg1
,
12894 build_int_cst (TREE_TYPE (arg1
), 1));
12895 return fold_build2_loc (loc
, NE_EXPR
, type
,
12896 fold_convert_loc (loc
, TREE_TYPE (arg1
),
12902 else if (wi::eq_p (arg1
, min
))
12906 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12909 return fold_build2_loc (loc
, EQ_EXPR
, type
, op0
, op1
);
12912 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
12915 return fold_build2_loc (loc
, NE_EXPR
, type
, op0
, op1
);
12920 else if (wi::eq_p (arg1
, min
+ 1))
12924 arg1
= const_binop (MINUS_EXPR
, arg1
,
12925 build_int_cst (TREE_TYPE (arg1
), 1));
12926 return fold_build2_loc (loc
, NE_EXPR
, type
,
12927 fold_convert_loc (loc
,
12928 TREE_TYPE (arg1
), arg0
),
12931 arg1
= const_binop (MINUS_EXPR
, arg1
,
12932 build_int_cst (TREE_TYPE (arg1
), 1));
12933 return fold_build2_loc (loc
, EQ_EXPR
, type
,
12934 fold_convert_loc (loc
, TREE_TYPE (arg1
),
12941 else if (wi::eq_p (arg1
, signed_max
)
12942 && TYPE_UNSIGNED (arg1_type
)
12943 /* We will flip the signedness of the comparison operator
12944 associated with the mode of arg1, so the sign bit is
12945 specified by this mode. Check that arg1 is the signed
12946 max associated with this sign bit. */
12947 && prec
== GET_MODE_PRECISION (TYPE_MODE (arg1_type
))
12948 /* signed_type does not work on pointer types. */
12949 && INTEGRAL_TYPE_P (arg1_type
))
12951 /* The following case also applies to X < signed_max+1
12952 and X >= signed_max+1 because previous transformations. */
12953 if (code
== LE_EXPR
|| code
== GT_EXPR
)
12955 tree st
= signed_type_for (arg1_type
);
12956 return fold_build2_loc (loc
,
12957 code
== LE_EXPR
? GE_EXPR
: LT_EXPR
,
12958 type
, fold_convert_loc (loc
, st
, arg0
),
12959 build_int_cst (st
, 0));
12965 /* If we are comparing an ABS_EXPR with a constant, we can
12966 convert all the cases into explicit comparisons, but they may
12967 well not be faster than doing the ABS and one comparison.
12968 But ABS (X) <= C is a range comparison, which becomes a subtraction
12969 and a comparison, and is probably faster. */
12970 if (code
== LE_EXPR
12971 && TREE_CODE (arg1
) == INTEGER_CST
12972 && TREE_CODE (arg0
) == ABS_EXPR
12973 && ! TREE_SIDE_EFFECTS (arg0
)
12974 && (0 != (tem
= negate_expr (arg1
)))
12975 && TREE_CODE (tem
) == INTEGER_CST
12976 && !TREE_OVERFLOW (tem
))
12977 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
12978 build2 (GE_EXPR
, type
,
12979 TREE_OPERAND (arg0
, 0), tem
),
12980 build2 (LE_EXPR
, type
,
12981 TREE_OPERAND (arg0
, 0), arg1
));
12983 /* Convert ABS_EXPR<x> >= 0 to true. */
12984 strict_overflow_p
= false;
12985 if (code
== GE_EXPR
12986 && (integer_zerop (arg1
)
12987 || (! HONOR_NANS (element_mode (arg0
))
12988 && real_zerop (arg1
)))
12989 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
12991 if (strict_overflow_p
)
12992 fold_overflow_warning (("assuming signed overflow does not occur "
12993 "when simplifying comparison of "
12994 "absolute value and zero"),
12995 WARN_STRICT_OVERFLOW_CONDITIONAL
);
12996 return omit_one_operand_loc (loc
, type
,
12997 constant_boolean_node (true, type
),
13001 /* Convert ABS_EXPR<x> < 0 to false. */
13002 strict_overflow_p
= false;
13003 if (code
== LT_EXPR
13004 && (integer_zerop (arg1
) || real_zerop (arg1
))
13005 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
13007 if (strict_overflow_p
)
13008 fold_overflow_warning (("assuming signed overflow does not occur "
13009 "when simplifying comparison of "
13010 "absolute value and zero"),
13011 WARN_STRICT_OVERFLOW_CONDITIONAL
);
13012 return omit_one_operand_loc (loc
, type
,
13013 constant_boolean_node (false, type
),
13017 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13018 and similarly for >= into !=. */
13019 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
13020 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
13021 && TREE_CODE (arg1
) == LSHIFT_EXPR
13022 && integer_onep (TREE_OPERAND (arg1
, 0)))
13023 return build2_loc (loc
, code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
13024 build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
13025 TREE_OPERAND (arg1
, 1)),
13026 build_zero_cst (TREE_TYPE (arg0
)));
13028 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
13029 otherwise Y might be >= # of bits in X's type and thus e.g.
13030 (unsigned char) (1 << Y) for Y 15 might be 0.
13031 If the cast is widening, then 1 << Y should have unsigned type,
13032 otherwise if Y is number of bits in the signed shift type minus 1,
13033 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
13034 31 might be 0xffffffff80000000. */
13035 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
13036 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
13037 && CONVERT_EXPR_P (arg1
)
13038 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == LSHIFT_EXPR
13039 && (element_precision (TREE_TYPE (arg1
))
13040 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1
, 0))))
13041 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1
, 0)))
13042 || (element_precision (TREE_TYPE (arg1
))
13043 == element_precision (TREE_TYPE (TREE_OPERAND (arg1
, 0)))))
13044 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0)))
13046 tem
= build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
13047 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1));
13048 return build2_loc (loc
, code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
13049 fold_convert_loc (loc
, TREE_TYPE (arg0
), tem
),
13050 build_zero_cst (TREE_TYPE (arg0
)));
13055 case UNORDERED_EXPR
:
13063 if (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
13065 t1
= fold_relational_const (code
, type
, arg0
, arg1
);
13066 if (t1
!= NULL_TREE
)
13070 /* If the first operand is NaN, the result is constant. */
13071 if (TREE_CODE (arg0
) == REAL_CST
13072 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0
))
13073 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
13075 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
13076 ? integer_zero_node
13077 : integer_one_node
;
13078 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
13081 /* If the second operand is NaN, the result is constant. */
13082 if (TREE_CODE (arg1
) == REAL_CST
13083 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
13084 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
13086 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
13087 ? integer_zero_node
13088 : integer_one_node
;
13089 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
13092 /* Simplify unordered comparison of something with itself. */
13093 if ((code
== UNLE_EXPR
|| code
== UNGE_EXPR
|| code
== UNEQ_EXPR
)
13094 && operand_equal_p (arg0
, arg1
, 0))
13095 return constant_boolean_node (1, type
);
13097 if (code
== LTGT_EXPR
13098 && !flag_trapping_math
13099 && operand_equal_p (arg0
, arg1
, 0))
13100 return constant_boolean_node (0, type
);
13102 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13104 tree targ0
= strip_float_extensions (arg0
);
13105 tree targ1
= strip_float_extensions (arg1
);
13106 tree newtype
= TREE_TYPE (targ0
);
13108 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
13109 newtype
= TREE_TYPE (targ1
);
13111 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
13112 return fold_build2_loc (loc
, code
, type
,
13113 fold_convert_loc (loc
, newtype
, targ0
),
13114 fold_convert_loc (loc
, newtype
, targ1
));
13119 case COMPOUND_EXPR
:
13120 /* When pedantic, a compound expression can be neither an lvalue
13121 nor an integer constant expression. */
13122 if (TREE_SIDE_EFFECTS (arg0
) || TREE_CONSTANT (arg1
))
13124 /* Don't let (0, 0) be null pointer constant. */
13125 tem
= integer_zerop (arg1
) ? build1 (NOP_EXPR
, type
, arg1
)
13126 : fold_convert_loc (loc
, type
, arg1
);
13127 return pedantic_non_lvalue_loc (loc
, tem
);
13130 if ((TREE_CODE (arg0
) == REAL_CST
13131 && TREE_CODE (arg1
) == REAL_CST
)
13132 || (TREE_CODE (arg0
) == INTEGER_CST
13133 && TREE_CODE (arg1
) == INTEGER_CST
))
13134 return build_complex (type
, arg0
, arg1
);
13138 /* An ASSERT_EXPR should never be passed to fold_binary. */
13139 gcc_unreachable ();
13141 case VEC_PACK_TRUNC_EXPR
:
13142 case VEC_PACK_FIX_TRUNC_EXPR
:
13144 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
13147 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
/ 2
13148 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
/ 2);
13149 if (TREE_CODE (arg0
) != VECTOR_CST
|| TREE_CODE (arg1
) != VECTOR_CST
)
13152 elts
= XALLOCAVEC (tree
, nelts
);
13153 if (!vec_cst_ctor_to_array (arg0
, elts
)
13154 || !vec_cst_ctor_to_array (arg1
, elts
+ nelts
/ 2))
13157 for (i
= 0; i
< nelts
; i
++)
13159 elts
[i
] = fold_convert_const (code
== VEC_PACK_TRUNC_EXPR
13160 ? NOP_EXPR
: FIX_TRUNC_EXPR
,
13161 TREE_TYPE (type
), elts
[i
]);
13162 if (elts
[i
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[i
]))
13166 return build_vector (type
, elts
);
13169 case VEC_WIDEN_MULT_LO_EXPR
:
13170 case VEC_WIDEN_MULT_HI_EXPR
:
13171 case VEC_WIDEN_MULT_EVEN_EXPR
:
13172 case VEC_WIDEN_MULT_ODD_EXPR
:
13174 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
);
13175 unsigned int out
, ofs
, scale
;
13178 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
* 2
13179 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
* 2);
13180 if (TREE_CODE (arg0
) != VECTOR_CST
|| TREE_CODE (arg1
) != VECTOR_CST
)
13183 elts
= XALLOCAVEC (tree
, nelts
* 4);
13184 if (!vec_cst_ctor_to_array (arg0
, elts
)
13185 || !vec_cst_ctor_to_array (arg1
, elts
+ nelts
* 2))
13188 if (code
== VEC_WIDEN_MULT_LO_EXPR
)
13189 scale
= 0, ofs
= BYTES_BIG_ENDIAN
? nelts
: 0;
13190 else if (code
== VEC_WIDEN_MULT_HI_EXPR
)
13191 scale
= 0, ofs
= BYTES_BIG_ENDIAN
? 0 : nelts
;
13192 else if (code
== VEC_WIDEN_MULT_EVEN_EXPR
)
13193 scale
= 1, ofs
= 0;
13194 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
13195 scale
= 1, ofs
= 1;
13197 for (out
= 0; out
< nelts
; out
++)
13199 unsigned int in1
= (out
<< scale
) + ofs
;
13200 unsigned int in2
= in1
+ nelts
* 2;
13203 t1
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), elts
[in1
]);
13204 t2
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), elts
[in2
]);
13206 if (t1
== NULL_TREE
|| t2
== NULL_TREE
)
13208 elts
[out
] = const_binop (MULT_EXPR
, t1
, t2
);
13209 if (elts
[out
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[out
]))
13213 return build_vector (type
, elts
);
13218 } /* switch (code) */
13221 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13222 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13226 contains_label_1 (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
13228 switch (TREE_CODE (*tp
))
13234 *walk_subtrees
= 0;
13236 /* ... fall through ... */
13243 /* Return whether the sub-tree ST contains a label which is accessible from
13244 outside the sub-tree. */
13247 contains_label_p (tree st
)
13250 (walk_tree_without_duplicates (&st
, contains_label_1
, NULL
) != NULL_TREE
);
13253 /* Fold a ternary expression of code CODE and type TYPE with operands
13254 OP0, OP1, and OP2. Return the folded expression if folding is
13255 successful. Otherwise, return NULL_TREE. */
13258 fold_ternary_loc (location_t loc
, enum tree_code code
, tree type
,
13259 tree op0
, tree op1
, tree op2
)
13262 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
, arg2
= NULL_TREE
;
13263 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
13265 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
13266 && TREE_CODE_LENGTH (code
) == 3);
13268 /* If this is a commutative operation, and OP0 is a constant, move it
13269 to OP1 to reduce the number of tests below. */
13270 if (commutative_ternary_tree_code (code
)
13271 && tree_swap_operands_p (op0
, op1
, true))
13272 return fold_build3_loc (loc
, code
, type
, op1
, op0
, op2
);
13274 tem
= generic_simplify (loc
, code
, type
, op0
, op1
, op2
);
13278 /* Strip any conversions that don't change the mode. This is safe
13279 for every expression, except for a comparison expression because
13280 its signedness is derived from its operands. So, in the latter
13281 case, only strip conversions that don't change the signedness.
13283 Note that this is done as an internal manipulation within the
13284 constant folder, in order to find the simplest representation of
13285 the arguments so that their form can be studied. In any cases,
13286 the appropriate type conversions should be put back in the tree
13287 that will get out of the constant folder. */
13308 case COMPONENT_REF
:
13309 if (TREE_CODE (arg0
) == CONSTRUCTOR
13310 && ! type_contains_placeholder_p (TREE_TYPE (arg0
)))
13312 unsigned HOST_WIDE_INT idx
;
13314 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0
), idx
, field
, value
)
13321 case VEC_COND_EXPR
:
13322 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13323 so all simple results must be passed through pedantic_non_lvalue. */
13324 if (TREE_CODE (arg0
) == INTEGER_CST
)
13326 tree unused_op
= integer_zerop (arg0
) ? op1
: op2
;
13327 tem
= integer_zerop (arg0
) ? op2
: op1
;
13328 /* Only optimize constant conditions when the selected branch
13329 has the same type as the COND_EXPR. This avoids optimizing
13330 away "c ? x : throw", where the throw has a void type.
13331 Avoid throwing away that operand which contains label. */
13332 if ((!TREE_SIDE_EFFECTS (unused_op
)
13333 || !contains_label_p (unused_op
))
13334 && (! VOID_TYPE_P (TREE_TYPE (tem
))
13335 || VOID_TYPE_P (type
)))
13336 return pedantic_non_lvalue_loc (loc
, tem
);
13339 else if (TREE_CODE (arg0
) == VECTOR_CST
)
13341 if ((TREE_CODE (arg1
) == VECTOR_CST
13342 || TREE_CODE (arg1
) == CONSTRUCTOR
)
13343 && (TREE_CODE (arg2
) == VECTOR_CST
13344 || TREE_CODE (arg2
) == CONSTRUCTOR
))
13346 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
13347 unsigned char *sel
= XALLOCAVEC (unsigned char, nelts
);
13348 gcc_assert (nelts
== VECTOR_CST_NELTS (arg0
));
13349 for (i
= 0; i
< nelts
; i
++)
13351 tree val
= VECTOR_CST_ELT (arg0
, i
);
13352 if (integer_all_onesp (val
))
13354 else if (integer_zerop (val
))
13355 sel
[i
] = nelts
+ i
;
13356 else /* Currently unreachable. */
13359 tree t
= fold_vec_perm (type
, arg1
, arg2
, sel
);
13360 if (t
!= NULL_TREE
)
13365 /* If we have A op B ? A : C, we may be able to convert this to a
13366 simpler expression, depending on the operation and the values
13367 of B and C. Signed zeros prevent all of these transformations,
13368 for reasons given above each one.
13370 Also try swapping the arguments and inverting the conditional. */
13371 if (COMPARISON_CLASS_P (arg0
)
13372 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
13373 arg1
, TREE_OPERAND (arg0
, 1))
13374 && !HONOR_SIGNED_ZEROS (element_mode (arg1
)))
13376 tem
= fold_cond_expr_with_comparison (loc
, type
, arg0
, op1
, op2
);
13381 if (COMPARISON_CLASS_P (arg0
)
13382 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
13384 TREE_OPERAND (arg0
, 1))
13385 && !HONOR_SIGNED_ZEROS (element_mode (op2
)))
13387 location_t loc0
= expr_location_or (arg0
, loc
);
13388 tem
= fold_invert_truthvalue (loc0
, arg0
);
13389 if (tem
&& COMPARISON_CLASS_P (tem
))
13391 tem
= fold_cond_expr_with_comparison (loc
, type
, tem
, op2
, op1
);
13397 /* If the second operand is simpler than the third, swap them
13398 since that produces better jump optimization results. */
13399 if (truth_value_p (TREE_CODE (arg0
))
13400 && tree_swap_operands_p (op1
, op2
, false))
13402 location_t loc0
= expr_location_or (arg0
, loc
);
13403 /* See if this can be inverted. If it can't, possibly because
13404 it was a floating-point inequality comparison, don't do
13406 tem
= fold_invert_truthvalue (loc0
, arg0
);
13408 return fold_build3_loc (loc
, code
, type
, tem
, op2
, op1
);
13411 /* Convert A ? 1 : 0 to simply A. */
13412 if ((code
== VEC_COND_EXPR
? integer_all_onesp (op1
)
13413 : (integer_onep (op1
)
13414 && !VECTOR_TYPE_P (type
)))
13415 && integer_zerop (op2
)
13416 /* If we try to convert OP0 to our type, the
13417 call to fold will try to move the conversion inside
13418 a COND, which will recurse. In that case, the COND_EXPR
13419 is probably the best choice, so leave it alone. */
13420 && type
== TREE_TYPE (arg0
))
13421 return pedantic_non_lvalue_loc (loc
, arg0
);
13423 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13424 over COND_EXPR in cases such as floating point comparisons. */
13425 if (integer_zerop (op1
)
13426 && (code
== VEC_COND_EXPR
? integer_all_onesp (op2
)
13427 : (integer_onep (op2
)
13428 && !VECTOR_TYPE_P (type
)))
13429 && truth_value_p (TREE_CODE (arg0
)))
13430 return pedantic_non_lvalue_loc (loc
,
13431 fold_convert_loc (loc
, type
,
13432 invert_truthvalue_loc (loc
,
13435 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13436 if (TREE_CODE (arg0
) == LT_EXPR
13437 && integer_zerop (TREE_OPERAND (arg0
, 1))
13438 && integer_zerop (op2
)
13439 && (tem
= sign_bit_p (TREE_OPERAND (arg0
, 0), arg1
)))
13441 /* sign_bit_p looks through both zero and sign extensions,
13442 but for this optimization only sign extensions are
13444 tree tem2
= TREE_OPERAND (arg0
, 0);
13445 while (tem
!= tem2
)
13447 if (TREE_CODE (tem2
) != NOP_EXPR
13448 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2
, 0))))
13453 tem2
= TREE_OPERAND (tem2
, 0);
13455 /* sign_bit_p only checks ARG1 bits within A's precision.
13456 If <sign bit of A> has wider type than A, bits outside
13457 of A's precision in <sign bit of A> need to be checked.
13458 If they are all 0, this optimization needs to be done
13459 in unsigned A's type, if they are all 1 in signed A's type,
13460 otherwise this can't be done. */
13462 && TYPE_PRECISION (TREE_TYPE (tem
))
13463 < TYPE_PRECISION (TREE_TYPE (arg1
))
13464 && TYPE_PRECISION (TREE_TYPE (tem
))
13465 < TYPE_PRECISION (type
))
13467 int inner_width
, outer_width
;
13470 inner_width
= TYPE_PRECISION (TREE_TYPE (tem
));
13471 outer_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
13472 if (outer_width
> TYPE_PRECISION (type
))
13473 outer_width
= TYPE_PRECISION (type
);
13475 wide_int mask
= wi::shifted_mask
13476 (inner_width
, outer_width
- inner_width
, false,
13477 TYPE_PRECISION (TREE_TYPE (arg1
)));
13479 wide_int common
= mask
& arg1
;
13480 if (common
== mask
)
13482 tem_type
= signed_type_for (TREE_TYPE (tem
));
13483 tem
= fold_convert_loc (loc
, tem_type
, tem
);
13485 else if (common
== 0)
13487 tem_type
= unsigned_type_for (TREE_TYPE (tem
));
13488 tem
= fold_convert_loc (loc
, tem_type
, tem
);
13496 fold_convert_loc (loc
, type
,
13497 fold_build2_loc (loc
, BIT_AND_EXPR
,
13498 TREE_TYPE (tem
), tem
,
13499 fold_convert_loc (loc
,
13504 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13505 already handled above. */
13506 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13507 && integer_onep (TREE_OPERAND (arg0
, 1))
13508 && integer_zerop (op2
)
13509 && integer_pow2p (arg1
))
13511 tree tem
= TREE_OPERAND (arg0
, 0);
13513 if (TREE_CODE (tem
) == RSHIFT_EXPR
13514 && tree_fits_uhwi_p (TREE_OPERAND (tem
, 1))
13515 && (unsigned HOST_WIDE_INT
) tree_log2 (arg1
) ==
13516 tree_to_uhwi (TREE_OPERAND (tem
, 1)))
13517 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
13518 TREE_OPERAND (tem
, 0), arg1
);
13521 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13522 is probably obsolete because the first operand should be a
13523 truth value (that's why we have the two cases above), but let's
13524 leave it in until we can confirm this for all front-ends. */
13525 if (integer_zerop (op2
)
13526 && TREE_CODE (arg0
) == NE_EXPR
13527 && integer_zerop (TREE_OPERAND (arg0
, 1))
13528 && integer_pow2p (arg1
)
13529 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
13530 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
13531 arg1
, OEP_ONLY_CONST
))
13532 return pedantic_non_lvalue_loc (loc
,
13533 fold_convert_loc (loc
, type
,
13534 TREE_OPERAND (arg0
, 0)));
13536 /* Disable the transformations below for vectors, since
13537 fold_binary_op_with_conditional_arg may undo them immediately,
13538 yielding an infinite loop. */
13539 if (code
== VEC_COND_EXPR
)
13542 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13543 if (integer_zerop (op2
)
13544 && truth_value_p (TREE_CODE (arg0
))
13545 && truth_value_p (TREE_CODE (arg1
))
13546 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
13547 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
? BIT_AND_EXPR
13548 : TRUTH_ANDIF_EXPR
,
13549 type
, fold_convert_loc (loc
, type
, arg0
), arg1
);
13551 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13552 if (code
== VEC_COND_EXPR
? integer_all_onesp (op2
) : integer_onep (op2
)
13553 && truth_value_p (TREE_CODE (arg0
))
13554 && truth_value_p (TREE_CODE (arg1
))
13555 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
13557 location_t loc0
= expr_location_or (arg0
, loc
);
13558 /* Only perform transformation if ARG0 is easily inverted. */
13559 tem
= fold_invert_truthvalue (loc0
, arg0
);
13561 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
13564 type
, fold_convert_loc (loc
, type
, tem
),
13568 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13569 if (integer_zerop (arg1
)
13570 && truth_value_p (TREE_CODE (arg0
))
13571 && truth_value_p (TREE_CODE (op2
))
13572 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
13574 location_t loc0
= expr_location_or (arg0
, loc
);
13575 /* Only perform transformation if ARG0 is easily inverted. */
13576 tem
= fold_invert_truthvalue (loc0
, arg0
);
13578 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
13579 ? BIT_AND_EXPR
: TRUTH_ANDIF_EXPR
,
13580 type
, fold_convert_loc (loc
, type
, tem
),
13584 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13585 if (code
== VEC_COND_EXPR
? integer_all_onesp (arg1
) : integer_onep (arg1
)
13586 && truth_value_p (TREE_CODE (arg0
))
13587 && truth_value_p (TREE_CODE (op2
))
13588 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
13589 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
13590 ? BIT_IOR_EXPR
: TRUTH_ORIF_EXPR
,
13591 type
, fold_convert_loc (loc
, type
, arg0
), op2
);
13596 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13597 of fold_ternary on them. */
13598 gcc_unreachable ();
13600 case BIT_FIELD_REF
:
13601 if ((TREE_CODE (arg0
) == VECTOR_CST
13602 || (TREE_CODE (arg0
) == CONSTRUCTOR
13603 && TREE_CODE (TREE_TYPE (arg0
)) == VECTOR_TYPE
))
13604 && (type
== TREE_TYPE (TREE_TYPE (arg0
))
13605 || (TREE_CODE (type
) == VECTOR_TYPE
13606 && TREE_TYPE (type
) == TREE_TYPE (TREE_TYPE (arg0
)))))
13608 tree eltype
= TREE_TYPE (TREE_TYPE (arg0
));
13609 unsigned HOST_WIDE_INT width
= tree_to_uhwi (TYPE_SIZE (eltype
));
13610 unsigned HOST_WIDE_INT n
= tree_to_uhwi (arg1
);
13611 unsigned HOST_WIDE_INT idx
= tree_to_uhwi (op2
);
13614 && (idx
% width
) == 0
13615 && (n
% width
) == 0
13616 && ((idx
+ n
) / width
) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)))
13621 if (TREE_CODE (arg0
) == VECTOR_CST
)
13624 return VECTOR_CST_ELT (arg0
, idx
);
13626 tree
*vals
= XALLOCAVEC (tree
, n
);
13627 for (unsigned i
= 0; i
< n
; ++i
)
13628 vals
[i
] = VECTOR_CST_ELT (arg0
, idx
+ i
);
13629 return build_vector (type
, vals
);
13632 /* Constructor elements can be subvectors. */
13633 unsigned HOST_WIDE_INT k
= 1;
13634 if (CONSTRUCTOR_NELTS (arg0
) != 0)
13636 tree cons_elem
= TREE_TYPE (CONSTRUCTOR_ELT (arg0
, 0)->value
);
13637 if (TREE_CODE (cons_elem
) == VECTOR_TYPE
)
13638 k
= TYPE_VECTOR_SUBPARTS (cons_elem
);
13641 /* We keep an exact subset of the constructor elements. */
13642 if ((idx
% k
) == 0 && (n
% k
) == 0)
13644 if (CONSTRUCTOR_NELTS (arg0
) == 0)
13645 return build_constructor (type
, NULL
);
13650 if (idx
< CONSTRUCTOR_NELTS (arg0
))
13651 return CONSTRUCTOR_ELT (arg0
, idx
)->value
;
13652 return build_zero_cst (type
);
13655 vec
<constructor_elt
, va_gc
> *vals
;
13656 vec_alloc (vals
, n
);
13657 for (unsigned i
= 0;
13658 i
< n
&& idx
+ i
< CONSTRUCTOR_NELTS (arg0
);
13660 CONSTRUCTOR_APPEND_ELT (vals
, NULL_TREE
,
13662 (arg0
, idx
+ i
)->value
);
13663 return build_constructor (type
, vals
);
13665 /* The bitfield references a single constructor element. */
13666 else if (idx
+ n
<= (idx
/ k
+ 1) * k
)
13668 if (CONSTRUCTOR_NELTS (arg0
) <= idx
/ k
)
13669 return build_zero_cst (type
);
13671 return CONSTRUCTOR_ELT (arg0
, idx
/ k
)->value
;
13673 return fold_build3_loc (loc
, code
, type
,
13674 CONSTRUCTOR_ELT (arg0
, idx
/ k
)->value
, op1
,
13675 build_int_cst (TREE_TYPE (op2
), (idx
% k
) * width
));
13680 /* A bit-field-ref that referenced the full argument can be stripped. */
13681 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
13682 && TYPE_PRECISION (TREE_TYPE (arg0
)) == tree_to_uhwi (arg1
)
13683 && integer_zerop (op2
))
13684 return fold_convert_loc (loc
, type
, arg0
);
13686 /* On constants we can use native encode/interpret to constant
13687 fold (nearly) all BIT_FIELD_REFs. */
13688 if (CONSTANT_CLASS_P (arg0
)
13689 && can_native_interpret_type_p (type
)
13690 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0
)))
13691 /* This limitation should not be necessary, we just need to
13692 round this up to mode size. */
13693 && tree_to_uhwi (op1
) % BITS_PER_UNIT
== 0
13694 /* Need bit-shifting of the buffer to relax the following. */
13695 && tree_to_uhwi (op2
) % BITS_PER_UNIT
== 0)
13697 unsigned HOST_WIDE_INT bitpos
= tree_to_uhwi (op2
);
13698 unsigned HOST_WIDE_INT bitsize
= tree_to_uhwi (op1
);
13699 unsigned HOST_WIDE_INT clen
;
13700 clen
= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0
)));
13701 /* ??? We cannot tell native_encode_expr to start at
13702 some random byte only. So limit us to a reasonable amount
13706 unsigned char *b
= XALLOCAVEC (unsigned char, clen
);
13707 unsigned HOST_WIDE_INT len
= native_encode_expr (arg0
, b
, clen
);
13709 && len
* BITS_PER_UNIT
>= bitpos
+ bitsize
)
13711 tree v
= native_interpret_expr (type
,
13712 b
+ bitpos
/ BITS_PER_UNIT
,
13713 bitsize
/ BITS_PER_UNIT
);
13723 /* For integers we can decompose the FMA if possible. */
13724 if (TREE_CODE (arg0
) == INTEGER_CST
13725 && TREE_CODE (arg1
) == INTEGER_CST
)
13726 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
13727 const_binop (MULT_EXPR
, arg0
, arg1
), arg2
);
13728 if (integer_zerop (arg2
))
13729 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
13731 return fold_fma (loc
, type
, arg0
, arg1
, arg2
);
13733 case VEC_PERM_EXPR
:
13734 if (TREE_CODE (arg2
) == VECTOR_CST
)
13736 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
, mask
, mask2
;
13737 unsigned char *sel
= XALLOCAVEC (unsigned char, 2 * nelts
);
13738 unsigned char *sel2
= sel
+ nelts
;
13739 bool need_mask_canon
= false;
13740 bool need_mask_canon2
= false;
13741 bool all_in_vec0
= true;
13742 bool all_in_vec1
= true;
13743 bool maybe_identity
= true;
13744 bool single_arg
= (op0
== op1
);
13745 bool changed
= false;
13747 mask2
= 2 * nelts
- 1;
13748 mask
= single_arg
? (nelts
- 1) : mask2
;
13749 gcc_assert (nelts
== VECTOR_CST_NELTS (arg2
));
13750 for (i
= 0; i
< nelts
; i
++)
13752 tree val
= VECTOR_CST_ELT (arg2
, i
);
13753 if (TREE_CODE (val
) != INTEGER_CST
)
13756 /* Make sure that the perm value is in an acceptable
13759 need_mask_canon
|= wi::gtu_p (t
, mask
);
13760 need_mask_canon2
|= wi::gtu_p (t
, mask2
);
13761 sel
[i
] = t
.to_uhwi () & mask
;
13762 sel2
[i
] = t
.to_uhwi () & mask2
;
13764 if (sel
[i
] < nelts
)
13765 all_in_vec1
= false;
13767 all_in_vec0
= false;
13769 if ((sel
[i
] & (nelts
-1)) != i
)
13770 maybe_identity
= false;
13773 if (maybe_identity
)
13783 else if (all_in_vec1
)
13786 for (i
= 0; i
< nelts
; i
++)
13788 need_mask_canon
= true;
13791 if ((TREE_CODE (op0
) == VECTOR_CST
13792 || TREE_CODE (op0
) == CONSTRUCTOR
)
13793 && (TREE_CODE (op1
) == VECTOR_CST
13794 || TREE_CODE (op1
) == CONSTRUCTOR
))
13796 tree t
= fold_vec_perm (type
, op0
, op1
, sel
);
13797 if (t
!= NULL_TREE
)
13801 if (op0
== op1
&& !single_arg
)
13804 /* Some targets are deficient and fail to expand a single
13805 argument permutation while still allowing an equivalent
13806 2-argument version. */
13807 if (need_mask_canon
&& arg2
== op2
13808 && !can_vec_perm_p (TYPE_MODE (type
), false, sel
)
13809 && can_vec_perm_p (TYPE_MODE (type
), false, sel2
))
13811 need_mask_canon
= need_mask_canon2
;
13815 if (need_mask_canon
&& arg2
== op2
)
13817 tree
*tsel
= XALLOCAVEC (tree
, nelts
);
13818 tree eltype
= TREE_TYPE (TREE_TYPE (arg2
));
13819 for (i
= 0; i
< nelts
; i
++)
13820 tsel
[i
] = build_int_cst (eltype
, sel
[i
]);
13821 op2
= build_vector (TREE_TYPE (arg2
), tsel
);
13826 return build3_loc (loc
, VEC_PERM_EXPR
, type
, op0
, op1
, op2
);
13832 } /* switch (code) */
13835 /* Perform constant folding and related simplification of EXPR.
13836 The related simplifications include x*1 => x, x*0 => 0, etc.,
13837 and application of the associative law.
13838 NOP_EXPR conversions may be removed freely (as long as we
13839 are careful not to change the type of the overall expression).
13840 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13841 but we can constant-fold them if they have constant operands. */
13843 #ifdef ENABLE_FOLD_CHECKING
13844 # define fold(x) fold_1 (x)
13845 static tree
fold_1 (tree
);
13851 const tree t
= expr
;
13852 enum tree_code code
= TREE_CODE (t
);
13853 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
13855 location_t loc
= EXPR_LOCATION (expr
);
13857 /* Return right away if a constant. */
13858 if (kind
== tcc_constant
)
13861 /* CALL_EXPR-like objects with variable numbers of operands are
13862 treated specially. */
13863 if (kind
== tcc_vl_exp
)
13865 if (code
== CALL_EXPR
)
13867 tem
= fold_call_expr (loc
, expr
, false);
13868 return tem
? tem
: expr
;
13873 if (IS_EXPR_CODE_CLASS (kind
))
13875 tree type
= TREE_TYPE (t
);
13876 tree op0
, op1
, op2
;
13878 switch (TREE_CODE_LENGTH (code
))
13881 op0
= TREE_OPERAND (t
, 0);
13882 tem
= fold_unary_loc (loc
, code
, type
, op0
);
13883 return tem
? tem
: expr
;
13885 op0
= TREE_OPERAND (t
, 0);
13886 op1
= TREE_OPERAND (t
, 1);
13887 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
13888 return tem
? tem
: expr
;
13890 op0
= TREE_OPERAND (t
, 0);
13891 op1
= TREE_OPERAND (t
, 1);
13892 op2
= TREE_OPERAND (t
, 2);
13893 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
13894 return tem
? tem
: expr
;
13904 tree op0
= TREE_OPERAND (t
, 0);
13905 tree op1
= TREE_OPERAND (t
, 1);
13907 if (TREE_CODE (op1
) == INTEGER_CST
13908 && TREE_CODE (op0
) == CONSTRUCTOR
13909 && ! type_contains_placeholder_p (TREE_TYPE (op0
)))
13911 vec
<constructor_elt
, va_gc
> *elts
= CONSTRUCTOR_ELTS (op0
);
13912 unsigned HOST_WIDE_INT end
= vec_safe_length (elts
);
13913 unsigned HOST_WIDE_INT begin
= 0;
13915 /* Find a matching index by means of a binary search. */
13916 while (begin
!= end
)
13918 unsigned HOST_WIDE_INT middle
= (begin
+ end
) / 2;
13919 tree index
= (*elts
)[middle
].index
;
13921 if (TREE_CODE (index
) == INTEGER_CST
13922 && tree_int_cst_lt (index
, op1
))
13923 begin
= middle
+ 1;
13924 else if (TREE_CODE (index
) == INTEGER_CST
13925 && tree_int_cst_lt (op1
, index
))
13927 else if (TREE_CODE (index
) == RANGE_EXPR
13928 && tree_int_cst_lt (TREE_OPERAND (index
, 1), op1
))
13929 begin
= middle
+ 1;
13930 else if (TREE_CODE (index
) == RANGE_EXPR
13931 && tree_int_cst_lt (op1
, TREE_OPERAND (index
, 0)))
13934 return (*elts
)[middle
].value
;
13941 /* Return a VECTOR_CST if possible. */
13944 tree type
= TREE_TYPE (t
);
13945 if (TREE_CODE (type
) != VECTOR_TYPE
)
13948 tree
*vec
= XALLOCAVEC (tree
, TYPE_VECTOR_SUBPARTS (type
));
13949 unsigned HOST_WIDE_INT idx
, pos
= 0;
13952 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t
), idx
, value
)
13954 if (!CONSTANT_CLASS_P (value
))
13956 if (TREE_CODE (value
) == VECTOR_CST
)
13958 for (unsigned i
= 0; i
< VECTOR_CST_NELTS (value
); ++i
)
13959 vec
[pos
++] = VECTOR_CST_ELT (value
, i
);
13962 vec
[pos
++] = value
;
13964 for (; pos
< TYPE_VECTOR_SUBPARTS (type
); ++pos
)
13965 vec
[pos
] = build_zero_cst (TREE_TYPE (type
));
13967 return build_vector (type
, vec
);
13971 return fold (DECL_INITIAL (t
));
13975 } /* switch (code) */
13978 #ifdef ENABLE_FOLD_CHECKING
13981 static void fold_checksum_tree (const_tree
, struct md5_ctx
*,
13982 hash_table
<pointer_hash
<const tree_node
> > *);
13983 static void fold_check_failed (const_tree
, const_tree
);
13984 void print_fold_checksum (const_tree
);
13986 /* When --enable-checking=fold, compute a digest of expr before
13987 and after actual fold call to see if fold did not accidentally
13988 change original expr. */
13994 struct md5_ctx ctx
;
13995 unsigned char checksum_before
[16], checksum_after
[16];
13996 hash_table
<pointer_hash
<const tree_node
> > ht (32);
13998 md5_init_ctx (&ctx
);
13999 fold_checksum_tree (expr
, &ctx
, &ht
);
14000 md5_finish_ctx (&ctx
, checksum_before
);
14003 ret
= fold_1 (expr
);
14005 md5_init_ctx (&ctx
);
14006 fold_checksum_tree (expr
, &ctx
, &ht
);
14007 md5_finish_ctx (&ctx
, checksum_after
);
14009 if (memcmp (checksum_before
, checksum_after
, 16))
14010 fold_check_failed (expr
, ret
);
14016 print_fold_checksum (const_tree expr
)
14018 struct md5_ctx ctx
;
14019 unsigned char checksum
[16], cnt
;
14020 hash_table
<pointer_hash
<const tree_node
> > ht (32);
14022 md5_init_ctx (&ctx
);
14023 fold_checksum_tree (expr
, &ctx
, &ht
);
14024 md5_finish_ctx (&ctx
, checksum
);
14025 for (cnt
= 0; cnt
< 16; ++cnt
)
14026 fprintf (stderr
, "%02x", checksum
[cnt
]);
14027 putc ('\n', stderr
);
14031 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED
, const_tree ret ATTRIBUTE_UNUSED
)
14033 internal_error ("fold check: original tree changed by fold");
14037 fold_checksum_tree (const_tree expr
, struct md5_ctx
*ctx
,
14038 hash_table
<pointer_hash
<const tree_node
> > *ht
)
14040 const tree_node
**slot
;
14041 enum tree_code code
;
14042 union tree_node buf
;
14048 slot
= ht
->find_slot (expr
, INSERT
);
14052 code
= TREE_CODE (expr
);
14053 if (TREE_CODE_CLASS (code
) == tcc_declaration
14054 && DECL_ASSEMBLER_NAME_SET_P (expr
))
14056 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14057 memcpy ((char *) &buf
, expr
, tree_size (expr
));
14058 SET_DECL_ASSEMBLER_NAME ((tree
)&buf
, NULL
);
14059 expr
= (tree
) &buf
;
14061 else if (TREE_CODE_CLASS (code
) == tcc_type
14062 && (TYPE_POINTER_TO (expr
)
14063 || TYPE_REFERENCE_TO (expr
)
14064 || TYPE_CACHED_VALUES_P (expr
)
14065 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr
)
14066 || TYPE_NEXT_VARIANT (expr
)))
14068 /* Allow these fields to be modified. */
14070 memcpy ((char *) &buf
, expr
, tree_size (expr
));
14071 expr
= tmp
= (tree
) &buf
;
14072 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp
) = 0;
14073 TYPE_POINTER_TO (tmp
) = NULL
;
14074 TYPE_REFERENCE_TO (tmp
) = NULL
;
14075 TYPE_NEXT_VARIANT (tmp
) = NULL
;
14076 if (TYPE_CACHED_VALUES_P (tmp
))
14078 TYPE_CACHED_VALUES_P (tmp
) = 0;
14079 TYPE_CACHED_VALUES (tmp
) = NULL
;
14082 md5_process_bytes (expr
, tree_size (expr
), ctx
);
14083 if (CODE_CONTAINS_STRUCT (code
, TS_TYPED
))
14084 fold_checksum_tree (TREE_TYPE (expr
), ctx
, ht
);
14085 if (TREE_CODE_CLASS (code
) != tcc_type
14086 && TREE_CODE_CLASS (code
) != tcc_declaration
14087 && code
!= TREE_LIST
14088 && code
!= SSA_NAME
14089 && CODE_CONTAINS_STRUCT (code
, TS_COMMON
))
14090 fold_checksum_tree (TREE_CHAIN (expr
), ctx
, ht
);
14091 switch (TREE_CODE_CLASS (code
))
14097 md5_process_bytes (TREE_STRING_POINTER (expr
),
14098 TREE_STRING_LENGTH (expr
), ctx
);
14101 fold_checksum_tree (TREE_REALPART (expr
), ctx
, ht
);
14102 fold_checksum_tree (TREE_IMAGPART (expr
), ctx
, ht
);
14105 for (i
= 0; i
< (int) VECTOR_CST_NELTS (expr
); ++i
)
14106 fold_checksum_tree (VECTOR_CST_ELT (expr
, i
), ctx
, ht
);
14112 case tcc_exceptional
:
14116 fold_checksum_tree (TREE_PURPOSE (expr
), ctx
, ht
);
14117 fold_checksum_tree (TREE_VALUE (expr
), ctx
, ht
);
14118 expr
= TREE_CHAIN (expr
);
14119 goto recursive_label
;
14122 for (i
= 0; i
< TREE_VEC_LENGTH (expr
); ++i
)
14123 fold_checksum_tree (TREE_VEC_ELT (expr
, i
), ctx
, ht
);
14129 case tcc_expression
:
14130 case tcc_reference
:
14131 case tcc_comparison
:
14134 case tcc_statement
:
14136 len
= TREE_OPERAND_LENGTH (expr
);
14137 for (i
= 0; i
< len
; ++i
)
14138 fold_checksum_tree (TREE_OPERAND (expr
, i
), ctx
, ht
);
14140 case tcc_declaration
:
14141 fold_checksum_tree (DECL_NAME (expr
), ctx
, ht
);
14142 fold_checksum_tree (DECL_CONTEXT (expr
), ctx
, ht
);
14143 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_COMMON
))
14145 fold_checksum_tree (DECL_SIZE (expr
), ctx
, ht
);
14146 fold_checksum_tree (DECL_SIZE_UNIT (expr
), ctx
, ht
);
14147 fold_checksum_tree (DECL_INITIAL (expr
), ctx
, ht
);
14148 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr
), ctx
, ht
);
14149 fold_checksum_tree (DECL_ATTRIBUTES (expr
), ctx
, ht
);
14152 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_NON_COMMON
))
14154 if (TREE_CODE (expr
) == FUNCTION_DECL
)
14156 fold_checksum_tree (DECL_VINDEX (expr
), ctx
, ht
);
14157 fold_checksum_tree (DECL_ARGUMENTS (expr
), ctx
, ht
);
14159 fold_checksum_tree (DECL_RESULT_FLD (expr
), ctx
, ht
);
14163 if (TREE_CODE (expr
) == ENUMERAL_TYPE
)
14164 fold_checksum_tree (TYPE_VALUES (expr
), ctx
, ht
);
14165 fold_checksum_tree (TYPE_SIZE (expr
), ctx
, ht
);
14166 fold_checksum_tree (TYPE_SIZE_UNIT (expr
), ctx
, ht
);
14167 fold_checksum_tree (TYPE_ATTRIBUTES (expr
), ctx
, ht
);
14168 fold_checksum_tree (TYPE_NAME (expr
), ctx
, ht
);
14169 if (INTEGRAL_TYPE_P (expr
)
14170 || SCALAR_FLOAT_TYPE_P (expr
))
14172 fold_checksum_tree (TYPE_MIN_VALUE (expr
), ctx
, ht
);
14173 fold_checksum_tree (TYPE_MAX_VALUE (expr
), ctx
, ht
);
14175 fold_checksum_tree (TYPE_MAIN_VARIANT (expr
), ctx
, ht
);
14176 if (TREE_CODE (expr
) == RECORD_TYPE
14177 || TREE_CODE (expr
) == UNION_TYPE
14178 || TREE_CODE (expr
) == QUAL_UNION_TYPE
)
14179 fold_checksum_tree (TYPE_BINFO (expr
), ctx
, ht
);
14180 fold_checksum_tree (TYPE_CONTEXT (expr
), ctx
, ht
);
14187 /* Helper function for outputting the checksum of a tree T. When
14188 debugging with gdb, you can "define mynext" to be "next" followed
14189 by "call debug_fold_checksum (op0)", then just trace down till the
14192 DEBUG_FUNCTION
void
14193 debug_fold_checksum (const_tree t
)
14196 unsigned char checksum
[16];
14197 struct md5_ctx ctx
;
14198 hash_table
<pointer_hash
<const tree_node
> > ht (32);
14200 md5_init_ctx (&ctx
);
14201 fold_checksum_tree (t
, &ctx
, &ht
);
14202 md5_finish_ctx (&ctx
, checksum
);
14205 for (i
= 0; i
< 16; i
++)
14206 fprintf (stderr
, "%d ", checksum
[i
]);
14208 fprintf (stderr
, "\n");
14213 /* Fold a unary tree expression with code CODE of type TYPE with an
14214 operand OP0. LOC is the location of the resulting expression.
14215 Return a folded expression if successful. Otherwise, return a tree
14216 expression with code CODE of type TYPE with an operand OP0. */
14219 fold_build1_stat_loc (location_t loc
,
14220 enum tree_code code
, tree type
, tree op0 MEM_STAT_DECL
)
14223 #ifdef ENABLE_FOLD_CHECKING
14224 unsigned char checksum_before
[16], checksum_after
[16];
14225 struct md5_ctx ctx
;
14226 hash_table
<pointer_hash
<const tree_node
> > ht (32);
14228 md5_init_ctx (&ctx
);
14229 fold_checksum_tree (op0
, &ctx
, &ht
);
14230 md5_finish_ctx (&ctx
, checksum_before
);
14234 tem
= fold_unary_loc (loc
, code
, type
, op0
);
14236 tem
= build1_stat_loc (loc
, code
, type
, op0 PASS_MEM_STAT
);
14238 #ifdef ENABLE_FOLD_CHECKING
14239 md5_init_ctx (&ctx
);
14240 fold_checksum_tree (op0
, &ctx
, &ht
);
14241 md5_finish_ctx (&ctx
, checksum_after
);
14243 if (memcmp (checksum_before
, checksum_after
, 16))
14244 fold_check_failed (op0
, tem
);
14249 /* Fold a binary tree expression with code CODE of type TYPE with
14250 operands OP0 and OP1. LOC is the location of the resulting
14251 expression. Return a folded expression if successful. Otherwise,
14252 return a tree expression with code CODE of type TYPE with operands
14256 fold_build2_stat_loc (location_t loc
,
14257 enum tree_code code
, tree type
, tree op0
, tree op1
14261 #ifdef ENABLE_FOLD_CHECKING
14262 unsigned char checksum_before_op0
[16],
14263 checksum_before_op1
[16],
14264 checksum_after_op0
[16],
14265 checksum_after_op1
[16];
14266 struct md5_ctx ctx
;
14267 hash_table
<pointer_hash
<const tree_node
> > ht (32);
14269 md5_init_ctx (&ctx
);
14270 fold_checksum_tree (op0
, &ctx
, &ht
);
14271 md5_finish_ctx (&ctx
, checksum_before_op0
);
14274 md5_init_ctx (&ctx
);
14275 fold_checksum_tree (op1
, &ctx
, &ht
);
14276 md5_finish_ctx (&ctx
, checksum_before_op1
);
14280 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
14282 tem
= build2_stat_loc (loc
, code
, type
, op0
, op1 PASS_MEM_STAT
);
14284 #ifdef ENABLE_FOLD_CHECKING
14285 md5_init_ctx (&ctx
);
14286 fold_checksum_tree (op0
, &ctx
, &ht
);
14287 md5_finish_ctx (&ctx
, checksum_after_op0
);
14290 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
14291 fold_check_failed (op0
, tem
);
14293 md5_init_ctx (&ctx
);
14294 fold_checksum_tree (op1
, &ctx
, &ht
);
14295 md5_finish_ctx (&ctx
, checksum_after_op1
);
14297 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
14298 fold_check_failed (op1
, tem
);
14303 /* Fold a ternary tree expression with code CODE of type TYPE with
14304 operands OP0, OP1, and OP2. Return a folded expression if
14305 successful. Otherwise, return a tree expression with code CODE of
14306 type TYPE with operands OP0, OP1, and OP2. */
14309 fold_build3_stat_loc (location_t loc
, enum tree_code code
, tree type
,
14310 tree op0
, tree op1
, tree op2 MEM_STAT_DECL
)
14313 #ifdef ENABLE_FOLD_CHECKING
14314 unsigned char checksum_before_op0
[16],
14315 checksum_before_op1
[16],
14316 checksum_before_op2
[16],
14317 checksum_after_op0
[16],
14318 checksum_after_op1
[16],
14319 checksum_after_op2
[16];
14320 struct md5_ctx ctx
;
14321 hash_table
<pointer_hash
<const tree_node
> > ht (32);
14323 md5_init_ctx (&ctx
);
14324 fold_checksum_tree (op0
, &ctx
, &ht
);
14325 md5_finish_ctx (&ctx
, checksum_before_op0
);
14328 md5_init_ctx (&ctx
);
14329 fold_checksum_tree (op1
, &ctx
, &ht
);
14330 md5_finish_ctx (&ctx
, checksum_before_op1
);
14333 md5_init_ctx (&ctx
);
14334 fold_checksum_tree (op2
, &ctx
, &ht
);
14335 md5_finish_ctx (&ctx
, checksum_before_op2
);
14339 gcc_assert (TREE_CODE_CLASS (code
) != tcc_vl_exp
);
14340 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
14342 tem
= build3_stat_loc (loc
, code
, type
, op0
, op1
, op2 PASS_MEM_STAT
);
14344 #ifdef ENABLE_FOLD_CHECKING
14345 md5_init_ctx (&ctx
);
14346 fold_checksum_tree (op0
, &ctx
, &ht
);
14347 md5_finish_ctx (&ctx
, checksum_after_op0
);
14350 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
14351 fold_check_failed (op0
, tem
);
14353 md5_init_ctx (&ctx
);
14354 fold_checksum_tree (op1
, &ctx
, &ht
);
14355 md5_finish_ctx (&ctx
, checksum_after_op1
);
14358 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
14359 fold_check_failed (op1
, tem
);
14361 md5_init_ctx (&ctx
);
14362 fold_checksum_tree (op2
, &ctx
, &ht
);
14363 md5_finish_ctx (&ctx
, checksum_after_op2
);
14365 if (memcmp (checksum_before_op2
, checksum_after_op2
, 16))
14366 fold_check_failed (op2
, tem
);
14371 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14372 arguments in ARGARRAY, and a null static chain.
14373 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14374 of type TYPE from the given operands as constructed by build_call_array. */
14377 fold_build_call_array_loc (location_t loc
, tree type
, tree fn
,
14378 int nargs
, tree
*argarray
)
14381 #ifdef ENABLE_FOLD_CHECKING
14382 unsigned char checksum_before_fn
[16],
14383 checksum_before_arglist
[16],
14384 checksum_after_fn
[16],
14385 checksum_after_arglist
[16];
14386 struct md5_ctx ctx
;
14387 hash_table
<pointer_hash
<const tree_node
> > ht (32);
14390 md5_init_ctx (&ctx
);
14391 fold_checksum_tree (fn
, &ctx
, &ht
);
14392 md5_finish_ctx (&ctx
, checksum_before_fn
);
14395 md5_init_ctx (&ctx
);
14396 for (i
= 0; i
< nargs
; i
++)
14397 fold_checksum_tree (argarray
[i
], &ctx
, &ht
);
14398 md5_finish_ctx (&ctx
, checksum_before_arglist
);
14402 tem
= fold_builtin_call_array (loc
, type
, fn
, nargs
, argarray
);
14404 #ifdef ENABLE_FOLD_CHECKING
14405 md5_init_ctx (&ctx
);
14406 fold_checksum_tree (fn
, &ctx
, &ht
);
14407 md5_finish_ctx (&ctx
, checksum_after_fn
);
14410 if (memcmp (checksum_before_fn
, checksum_after_fn
, 16))
14411 fold_check_failed (fn
, tem
);
14413 md5_init_ctx (&ctx
);
14414 for (i
= 0; i
< nargs
; i
++)
14415 fold_checksum_tree (argarray
[i
], &ctx
, &ht
);
14416 md5_finish_ctx (&ctx
, checksum_after_arglist
);
14418 if (memcmp (checksum_before_arglist
, checksum_after_arglist
, 16))
14419 fold_check_failed (NULL_TREE
, tem
);
14424 /* Perform constant folding and related simplification of initializer
14425 expression EXPR. These behave identically to "fold_buildN" but ignore
14426 potential run-time traps and exceptions that fold must preserve. */
14428 #define START_FOLD_INIT \
14429 int saved_signaling_nans = flag_signaling_nans;\
14430 int saved_trapping_math = flag_trapping_math;\
14431 int saved_rounding_math = flag_rounding_math;\
14432 int saved_trapv = flag_trapv;\
14433 int saved_folding_initializer = folding_initializer;\
14434 flag_signaling_nans = 0;\
14435 flag_trapping_math = 0;\
14436 flag_rounding_math = 0;\
14438 folding_initializer = 1;
14440 #define END_FOLD_INIT \
14441 flag_signaling_nans = saved_signaling_nans;\
14442 flag_trapping_math = saved_trapping_math;\
14443 flag_rounding_math = saved_rounding_math;\
14444 flag_trapv = saved_trapv;\
14445 folding_initializer = saved_folding_initializer;
14448 fold_build1_initializer_loc (location_t loc
, enum tree_code code
,
14449 tree type
, tree op
)
14454 result
= fold_build1_loc (loc
, code
, type
, op
);
14461 fold_build2_initializer_loc (location_t loc
, enum tree_code code
,
14462 tree type
, tree op0
, tree op1
)
14467 result
= fold_build2_loc (loc
, code
, type
, op0
, op1
);
14474 fold_build_call_array_initializer_loc (location_t loc
, tree type
, tree fn
,
14475 int nargs
, tree
*argarray
)
14480 result
= fold_build_call_array_loc (loc
, type
, fn
, nargs
, argarray
);
14486 #undef START_FOLD_INIT
14487 #undef END_FOLD_INIT
14489 /* Determine if first argument is a multiple of second argument. Return 0 if
14490 it is not, or we cannot easily determined it to be.
14492 An example of the sort of thing we care about (at this point; this routine
14493 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14494 fold cases do now) is discovering that
14496 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14502 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14504 This code also handles discovering that
14506 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14508 is a multiple of 8 so we don't have to worry about dealing with a
14509 possible remainder.
14511 Note that we *look* inside a SAVE_EXPR only to determine how it was
14512 calculated; it is not safe for fold to do much of anything else with the
14513 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14514 at run time. For example, the latter example above *cannot* be implemented
14515 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14516 evaluation time of the original SAVE_EXPR is not necessarily the same at
14517 the time the new expression is evaluated. The only optimization of this
14518 sort that would be valid is changing
14520 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14524 SAVE_EXPR (I) * SAVE_EXPR (J)
14526 (where the same SAVE_EXPR (J) is used in the original and the
14527 transformed version). */
14530 multiple_of_p (tree type
, const_tree top
, const_tree bottom
)
14532 if (operand_equal_p (top
, bottom
, 0))
14535 if (TREE_CODE (type
) != INTEGER_TYPE
)
14538 switch (TREE_CODE (top
))
14541 /* Bitwise and provides a power of two multiple. If the mask is
14542 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14543 if (!integer_pow2p (bottom
))
14548 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
14549 || multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
14553 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
14554 && multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
14557 if (TREE_CODE (TREE_OPERAND (top
, 1)) == INTEGER_CST
)
14561 op1
= TREE_OPERAND (top
, 1);
14562 /* const_binop may not detect overflow correctly,
14563 so check for it explicitly here. */
14564 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node
)), op1
)
14565 && 0 != (t1
= fold_convert (type
,
14566 const_binop (LSHIFT_EXPR
,
14569 && !TREE_OVERFLOW (t1
))
14570 return multiple_of_p (type
, t1
, bottom
);
14575 /* Can't handle conversions from non-integral or wider integral type. */
14576 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top
, 0))) != INTEGER_TYPE
)
14577 || (TYPE_PRECISION (type
)
14578 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top
, 0)))))
14581 /* .. fall through ... */
14584 return multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
);
14587 return (multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
)
14588 && multiple_of_p (type
, TREE_OPERAND (top
, 2), bottom
));
14591 if (TREE_CODE (bottom
) != INTEGER_CST
14592 || integer_zerop (bottom
)
14593 || (TYPE_UNSIGNED (type
)
14594 && (tree_int_cst_sgn (top
) < 0
14595 || tree_int_cst_sgn (bottom
) < 0)))
14597 return wi::multiple_of_p (wi::to_widest (top
), wi::to_widest (bottom
),
14605 /* Return true if CODE or TYPE is known to be non-negative. */
14608 tree_simple_nonnegative_warnv_p (enum tree_code code
, tree type
)
14610 if ((TYPE_PRECISION (type
) != 1 || TYPE_UNSIGNED (type
))
14611 && truth_value_p (code
))
14612 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14613 have a signed:1 type (where the value is -1 and 0). */
14618 /* Return true if (CODE OP0) is known to be non-negative. If the return
14619 value is based on the assumption that signed overflow is undefined,
14620 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14621 *STRICT_OVERFLOW_P. */
14624 tree_unary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
14625 bool *strict_overflow_p
)
14627 if (TYPE_UNSIGNED (type
))
14633 /* We can't return 1 if flag_wrapv is set because
14634 ABS_EXPR<INT_MIN> = INT_MIN. */
14635 if (!INTEGRAL_TYPE_P (type
))
14637 if (TYPE_OVERFLOW_UNDEFINED (type
))
14639 *strict_overflow_p
= true;
14644 case NON_LVALUE_EXPR
:
14646 case FIX_TRUNC_EXPR
:
14647 return tree_expr_nonnegative_warnv_p (op0
,
14648 strict_overflow_p
);
14652 tree inner_type
= TREE_TYPE (op0
);
14653 tree outer_type
= type
;
14655 if (TREE_CODE (outer_type
) == REAL_TYPE
)
14657 if (TREE_CODE (inner_type
) == REAL_TYPE
)
14658 return tree_expr_nonnegative_warnv_p (op0
,
14659 strict_overflow_p
);
14660 if (INTEGRAL_TYPE_P (inner_type
))
14662 if (TYPE_UNSIGNED (inner_type
))
14664 return tree_expr_nonnegative_warnv_p (op0
,
14665 strict_overflow_p
);
14668 else if (INTEGRAL_TYPE_P (outer_type
))
14670 if (TREE_CODE (inner_type
) == REAL_TYPE
)
14671 return tree_expr_nonnegative_warnv_p (op0
,
14672 strict_overflow_p
);
14673 if (INTEGRAL_TYPE_P (inner_type
))
14674 return TYPE_PRECISION (inner_type
) < TYPE_PRECISION (outer_type
)
14675 && TYPE_UNSIGNED (inner_type
);
14681 return tree_simple_nonnegative_warnv_p (code
, type
);
14684 /* We don't know sign of `t', so be conservative and return false. */
14688 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14689 value is based on the assumption that signed overflow is undefined,
14690 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14691 *STRICT_OVERFLOW_P. */
14694 tree_binary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
14695 tree op1
, bool *strict_overflow_p
)
14697 if (TYPE_UNSIGNED (type
))
14702 case POINTER_PLUS_EXPR
:
14704 if (FLOAT_TYPE_P (type
))
14705 return (tree_expr_nonnegative_warnv_p (op0
,
14707 && tree_expr_nonnegative_warnv_p (op1
,
14708 strict_overflow_p
));
14710 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14711 both unsigned and at least 2 bits shorter than the result. */
14712 if (TREE_CODE (type
) == INTEGER_TYPE
14713 && TREE_CODE (op0
) == NOP_EXPR
14714 && TREE_CODE (op1
) == NOP_EXPR
)
14716 tree inner1
= TREE_TYPE (TREE_OPERAND (op0
, 0));
14717 tree inner2
= TREE_TYPE (TREE_OPERAND (op1
, 0));
14718 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner1
)
14719 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner2
))
14721 unsigned int prec
= MAX (TYPE_PRECISION (inner1
),
14722 TYPE_PRECISION (inner2
)) + 1;
14723 return prec
< TYPE_PRECISION (type
);
14729 if (FLOAT_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
14731 /* x * x is always non-negative for floating point x
14732 or without overflow. */
14733 if (operand_equal_p (op0
, op1
, 0)
14734 || (tree_expr_nonnegative_warnv_p (op0
, strict_overflow_p
)
14735 && tree_expr_nonnegative_warnv_p (op1
, strict_overflow_p
)))
14737 if (TYPE_OVERFLOW_UNDEFINED (type
))
14738 *strict_overflow_p
= true;
14743 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14744 both unsigned and their total bits is shorter than the result. */
14745 if (TREE_CODE (type
) == INTEGER_TYPE
14746 && (TREE_CODE (op0
) == NOP_EXPR
|| TREE_CODE (op0
) == INTEGER_CST
)
14747 && (TREE_CODE (op1
) == NOP_EXPR
|| TREE_CODE (op1
) == INTEGER_CST
))
14749 tree inner0
= (TREE_CODE (op0
) == NOP_EXPR
)
14750 ? TREE_TYPE (TREE_OPERAND (op0
, 0))
14752 tree inner1
= (TREE_CODE (op1
) == NOP_EXPR
)
14753 ? TREE_TYPE (TREE_OPERAND (op1
, 0))
14756 bool unsigned0
= TYPE_UNSIGNED (inner0
);
14757 bool unsigned1
= TYPE_UNSIGNED (inner1
);
14759 if (TREE_CODE (op0
) == INTEGER_CST
)
14760 unsigned0
= unsigned0
|| tree_int_cst_sgn (op0
) >= 0;
14762 if (TREE_CODE (op1
) == INTEGER_CST
)
14763 unsigned1
= unsigned1
|| tree_int_cst_sgn (op1
) >= 0;
14765 if (TREE_CODE (inner0
) == INTEGER_TYPE
&& unsigned0
14766 && TREE_CODE (inner1
) == INTEGER_TYPE
&& unsigned1
)
14768 unsigned int precision0
= (TREE_CODE (op0
) == INTEGER_CST
)
14769 ? tree_int_cst_min_precision (op0
, UNSIGNED
)
14770 : TYPE_PRECISION (inner0
);
14772 unsigned int precision1
= (TREE_CODE (op1
) == INTEGER_CST
)
14773 ? tree_int_cst_min_precision (op1
, UNSIGNED
)
14774 : TYPE_PRECISION (inner1
);
14776 return precision0
+ precision1
< TYPE_PRECISION (type
);
14783 return (tree_expr_nonnegative_warnv_p (op0
,
14785 || tree_expr_nonnegative_warnv_p (op1
,
14786 strict_overflow_p
));
14792 case TRUNC_DIV_EXPR
:
14793 case CEIL_DIV_EXPR
:
14794 case FLOOR_DIV_EXPR
:
14795 case ROUND_DIV_EXPR
:
14796 return (tree_expr_nonnegative_warnv_p (op0
,
14798 && tree_expr_nonnegative_warnv_p (op1
,
14799 strict_overflow_p
));
14801 case TRUNC_MOD_EXPR
:
14802 case CEIL_MOD_EXPR
:
14803 case FLOOR_MOD_EXPR
:
14804 case ROUND_MOD_EXPR
:
14805 return tree_expr_nonnegative_warnv_p (op0
,
14806 strict_overflow_p
);
14808 return tree_simple_nonnegative_warnv_p (code
, type
);
14811 /* We don't know sign of `t', so be conservative and return false. */
14815 /* Return true if T is known to be non-negative. If the return
14816 value is based on the assumption that signed overflow is undefined,
14817 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14818 *STRICT_OVERFLOW_P. */
14821 tree_single_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
14823 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
14826 switch (TREE_CODE (t
))
14829 return tree_int_cst_sgn (t
) >= 0;
14832 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
14835 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t
));
14838 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
14840 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 2),
14841 strict_overflow_p
));
14843 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
),
14846 /* We don't know sign of `t', so be conservative and return false. */
14850 /* Return true if T is known to be non-negative. If the return
14851 value is based on the assumption that signed overflow is undefined,
14852 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14853 *STRICT_OVERFLOW_P. */
14856 tree_call_nonnegative_warnv_p (tree type
, tree fndecl
,
14857 tree arg0
, tree arg1
, bool *strict_overflow_p
)
14859 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
14860 switch (DECL_FUNCTION_CODE (fndecl
))
14862 CASE_FLT_FN (BUILT_IN_ACOS
):
14863 CASE_FLT_FN (BUILT_IN_ACOSH
):
14864 CASE_FLT_FN (BUILT_IN_CABS
):
14865 CASE_FLT_FN (BUILT_IN_COSH
):
14866 CASE_FLT_FN (BUILT_IN_ERFC
):
14867 CASE_FLT_FN (BUILT_IN_EXP
):
14868 CASE_FLT_FN (BUILT_IN_EXP10
):
14869 CASE_FLT_FN (BUILT_IN_EXP2
):
14870 CASE_FLT_FN (BUILT_IN_FABS
):
14871 CASE_FLT_FN (BUILT_IN_FDIM
):
14872 CASE_FLT_FN (BUILT_IN_HYPOT
):
14873 CASE_FLT_FN (BUILT_IN_POW10
):
14874 CASE_INT_FN (BUILT_IN_FFS
):
14875 CASE_INT_FN (BUILT_IN_PARITY
):
14876 CASE_INT_FN (BUILT_IN_POPCOUNT
):
14877 CASE_INT_FN (BUILT_IN_CLZ
):
14878 CASE_INT_FN (BUILT_IN_CLRSB
):
14879 case BUILT_IN_BSWAP32
:
14880 case BUILT_IN_BSWAP64
:
14884 CASE_FLT_FN (BUILT_IN_SQRT
):
14885 /* sqrt(-0.0) is -0.0. */
14886 if (!HONOR_SIGNED_ZEROS (element_mode (type
)))
14888 return tree_expr_nonnegative_warnv_p (arg0
,
14889 strict_overflow_p
);
14891 CASE_FLT_FN (BUILT_IN_ASINH
):
14892 CASE_FLT_FN (BUILT_IN_ATAN
):
14893 CASE_FLT_FN (BUILT_IN_ATANH
):
14894 CASE_FLT_FN (BUILT_IN_CBRT
):
14895 CASE_FLT_FN (BUILT_IN_CEIL
):
14896 CASE_FLT_FN (BUILT_IN_ERF
):
14897 CASE_FLT_FN (BUILT_IN_EXPM1
):
14898 CASE_FLT_FN (BUILT_IN_FLOOR
):
14899 CASE_FLT_FN (BUILT_IN_FMOD
):
14900 CASE_FLT_FN (BUILT_IN_FREXP
):
14901 CASE_FLT_FN (BUILT_IN_ICEIL
):
14902 CASE_FLT_FN (BUILT_IN_IFLOOR
):
14903 CASE_FLT_FN (BUILT_IN_IRINT
):
14904 CASE_FLT_FN (BUILT_IN_IROUND
):
14905 CASE_FLT_FN (BUILT_IN_LCEIL
):
14906 CASE_FLT_FN (BUILT_IN_LDEXP
):
14907 CASE_FLT_FN (BUILT_IN_LFLOOR
):
14908 CASE_FLT_FN (BUILT_IN_LLCEIL
):
14909 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
14910 CASE_FLT_FN (BUILT_IN_LLRINT
):
14911 CASE_FLT_FN (BUILT_IN_LLROUND
):
14912 CASE_FLT_FN (BUILT_IN_LRINT
):
14913 CASE_FLT_FN (BUILT_IN_LROUND
):
14914 CASE_FLT_FN (BUILT_IN_MODF
):
14915 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
14916 CASE_FLT_FN (BUILT_IN_RINT
):
14917 CASE_FLT_FN (BUILT_IN_ROUND
):
14918 CASE_FLT_FN (BUILT_IN_SCALB
):
14919 CASE_FLT_FN (BUILT_IN_SCALBLN
):
14920 CASE_FLT_FN (BUILT_IN_SCALBN
):
14921 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
14922 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
14923 CASE_FLT_FN (BUILT_IN_SINH
):
14924 CASE_FLT_FN (BUILT_IN_TANH
):
14925 CASE_FLT_FN (BUILT_IN_TRUNC
):
14926 /* True if the 1st argument is nonnegative. */
14927 return tree_expr_nonnegative_warnv_p (arg0
,
14928 strict_overflow_p
);
14930 CASE_FLT_FN (BUILT_IN_FMAX
):
14931 /* True if the 1st OR 2nd arguments are nonnegative. */
14932 return (tree_expr_nonnegative_warnv_p (arg0
,
14934 || (tree_expr_nonnegative_warnv_p (arg1
,
14935 strict_overflow_p
)));
14937 CASE_FLT_FN (BUILT_IN_FMIN
):
14938 /* True if the 1st AND 2nd arguments are nonnegative. */
14939 return (tree_expr_nonnegative_warnv_p (arg0
,
14941 && (tree_expr_nonnegative_warnv_p (arg1
,
14942 strict_overflow_p
)));
14944 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
14945 /* True if the 2nd argument is nonnegative. */
14946 return tree_expr_nonnegative_warnv_p (arg1
,
14947 strict_overflow_p
);
14949 CASE_FLT_FN (BUILT_IN_POWI
):
14950 /* True if the 1st argument is nonnegative or the second
14951 argument is an even integer. */
14952 if (TREE_CODE (arg1
) == INTEGER_CST
14953 && (TREE_INT_CST_LOW (arg1
) & 1) == 0)
14955 return tree_expr_nonnegative_warnv_p (arg0
,
14956 strict_overflow_p
);
14958 CASE_FLT_FN (BUILT_IN_POW
):
14959 /* True if the 1st argument is nonnegative or the second
14960 argument is an even integer valued real. */
14961 if (TREE_CODE (arg1
) == REAL_CST
)
14966 c
= TREE_REAL_CST (arg1
);
14967 n
= real_to_integer (&c
);
14970 REAL_VALUE_TYPE cint
;
14971 real_from_integer (&cint
, VOIDmode
, n
, SIGNED
);
14972 if (real_identical (&c
, &cint
))
14976 return tree_expr_nonnegative_warnv_p (arg0
,
14977 strict_overflow_p
);
14982 return tree_simple_nonnegative_warnv_p (CALL_EXPR
,
14986 /* Return true if T is known to be non-negative. If the return
14987 value is based on the assumption that signed overflow is undefined,
14988 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14989 *STRICT_OVERFLOW_P. */
14992 tree_invalid_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
14994 enum tree_code code
= TREE_CODE (t
);
14995 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
15002 tree temp
= TARGET_EXPR_SLOT (t
);
15003 t
= TARGET_EXPR_INITIAL (t
);
15005 /* If the initializer is non-void, then it's a normal expression
15006 that will be assigned to the slot. */
15007 if (!VOID_TYPE_P (t
))
15008 return tree_expr_nonnegative_warnv_p (t
, strict_overflow_p
);
15010 /* Otherwise, the initializer sets the slot in some way. One common
15011 way is an assignment statement at the end of the initializer. */
15014 if (TREE_CODE (t
) == BIND_EXPR
)
15015 t
= expr_last (BIND_EXPR_BODY (t
));
15016 else if (TREE_CODE (t
) == TRY_FINALLY_EXPR
15017 || TREE_CODE (t
) == TRY_CATCH_EXPR
)
15018 t
= expr_last (TREE_OPERAND (t
, 0));
15019 else if (TREE_CODE (t
) == STATEMENT_LIST
)
15024 if (TREE_CODE (t
) == MODIFY_EXPR
15025 && TREE_OPERAND (t
, 0) == temp
)
15026 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
15027 strict_overflow_p
);
15034 tree arg0
= call_expr_nargs (t
) > 0 ? CALL_EXPR_ARG (t
, 0) : NULL_TREE
;
15035 tree arg1
= call_expr_nargs (t
) > 1 ? CALL_EXPR_ARG (t
, 1) : NULL_TREE
;
15037 return tree_call_nonnegative_warnv_p (TREE_TYPE (t
),
15038 get_callee_fndecl (t
),
15041 strict_overflow_p
);
15043 case COMPOUND_EXPR
:
15045 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
15046 strict_overflow_p
);
15048 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t
, 1)),
15049 strict_overflow_p
);
15051 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 0),
15052 strict_overflow_p
);
15055 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
),
15059 /* We don't know sign of `t', so be conservative and return false. */
15063 /* Return true if T is known to be non-negative. If the return
15064 value is based on the assumption that signed overflow is undefined,
15065 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15066 *STRICT_OVERFLOW_P. */
15069 tree_expr_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
15071 enum tree_code code
;
15072 if (t
== error_mark_node
)
15075 code
= TREE_CODE (t
);
15076 switch (TREE_CODE_CLASS (code
))
15079 case tcc_comparison
:
15080 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
15082 TREE_OPERAND (t
, 0),
15083 TREE_OPERAND (t
, 1),
15084 strict_overflow_p
);
15087 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
15089 TREE_OPERAND (t
, 0),
15090 strict_overflow_p
);
15093 case tcc_declaration
:
15094 case tcc_reference
:
15095 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
);
15103 case TRUTH_AND_EXPR
:
15104 case TRUTH_OR_EXPR
:
15105 case TRUTH_XOR_EXPR
:
15106 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
15108 TREE_OPERAND (t
, 0),
15109 TREE_OPERAND (t
, 1),
15110 strict_overflow_p
);
15111 case TRUTH_NOT_EXPR
:
15112 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
15114 TREE_OPERAND (t
, 0),
15115 strict_overflow_p
);
15122 case WITH_SIZE_EXPR
:
15124 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
);
15127 return tree_invalid_nonnegative_warnv_p (t
, strict_overflow_p
);
15131 /* Return true if `t' is known to be non-negative. Handle warnings
15132 about undefined signed overflow. */
15135 tree_expr_nonnegative_p (tree t
)
15137 bool ret
, strict_overflow_p
;
15139 strict_overflow_p
= false;
15140 ret
= tree_expr_nonnegative_warnv_p (t
, &strict_overflow_p
);
15141 if (strict_overflow_p
)
15142 fold_overflow_warning (("assuming signed overflow does not occur when "
15143 "determining that expression is always "
15145 WARN_STRICT_OVERFLOW_MISC
);
15150 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15151 For floating point we further ensure that T is not denormal.
15152 Similar logic is present in nonzero_address in rtlanal.h.
15154 If the return value is based on the assumption that signed overflow
15155 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15156 change *STRICT_OVERFLOW_P. */
15159 tree_unary_nonzero_warnv_p (enum tree_code code
, tree type
, tree op0
,
15160 bool *strict_overflow_p
)
15165 return tree_expr_nonzero_warnv_p (op0
,
15166 strict_overflow_p
);
15170 tree inner_type
= TREE_TYPE (op0
);
15171 tree outer_type
= type
;
15173 return (TYPE_PRECISION (outer_type
) >= TYPE_PRECISION (inner_type
)
15174 && tree_expr_nonzero_warnv_p (op0
,
15175 strict_overflow_p
));
15179 case NON_LVALUE_EXPR
:
15180 return tree_expr_nonzero_warnv_p (op0
,
15181 strict_overflow_p
);
15190 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15191 For floating point we further ensure that T is not denormal.
15192 Similar logic is present in nonzero_address in rtlanal.h.
15194 If the return value is based on the assumption that signed overflow
15195 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15196 change *STRICT_OVERFLOW_P. */
15199 tree_binary_nonzero_warnv_p (enum tree_code code
,
15202 tree op1
, bool *strict_overflow_p
)
15204 bool sub_strict_overflow_p
;
15207 case POINTER_PLUS_EXPR
:
15209 if (TYPE_OVERFLOW_UNDEFINED (type
))
15211 /* With the presence of negative values it is hard
15212 to say something. */
15213 sub_strict_overflow_p
= false;
15214 if (!tree_expr_nonnegative_warnv_p (op0
,
15215 &sub_strict_overflow_p
)
15216 || !tree_expr_nonnegative_warnv_p (op1
,
15217 &sub_strict_overflow_p
))
15219 /* One of operands must be positive and the other non-negative. */
15220 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15221 overflows, on a twos-complement machine the sum of two
15222 nonnegative numbers can never be zero. */
15223 return (tree_expr_nonzero_warnv_p (op0
,
15225 || tree_expr_nonzero_warnv_p (op1
,
15226 strict_overflow_p
));
15231 if (TYPE_OVERFLOW_UNDEFINED (type
))
15233 if (tree_expr_nonzero_warnv_p (op0
,
15235 && tree_expr_nonzero_warnv_p (op1
,
15236 strict_overflow_p
))
15238 *strict_overflow_p
= true;
15245 sub_strict_overflow_p
= false;
15246 if (tree_expr_nonzero_warnv_p (op0
,
15247 &sub_strict_overflow_p
)
15248 && tree_expr_nonzero_warnv_p (op1
,
15249 &sub_strict_overflow_p
))
15251 if (sub_strict_overflow_p
)
15252 *strict_overflow_p
= true;
15257 sub_strict_overflow_p
= false;
15258 if (tree_expr_nonzero_warnv_p (op0
,
15259 &sub_strict_overflow_p
))
15261 if (sub_strict_overflow_p
)
15262 *strict_overflow_p
= true;
15264 /* When both operands are nonzero, then MAX must be too. */
15265 if (tree_expr_nonzero_warnv_p (op1
,
15266 strict_overflow_p
))
15269 /* MAX where operand 0 is positive is positive. */
15270 return tree_expr_nonnegative_warnv_p (op0
,
15271 strict_overflow_p
);
15273 /* MAX where operand 1 is positive is positive. */
15274 else if (tree_expr_nonzero_warnv_p (op1
,
15275 &sub_strict_overflow_p
)
15276 && tree_expr_nonnegative_warnv_p (op1
,
15277 &sub_strict_overflow_p
))
15279 if (sub_strict_overflow_p
)
15280 *strict_overflow_p
= true;
15286 return (tree_expr_nonzero_warnv_p (op1
,
15288 || tree_expr_nonzero_warnv_p (op0
,
15289 strict_overflow_p
));
15298 /* Return true when T is an address and is known to be nonzero.
15299 For floating point we further ensure that T is not denormal.
15300 Similar logic is present in nonzero_address in rtlanal.h.
15302 If the return value is based on the assumption that signed overflow
15303 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15304 change *STRICT_OVERFLOW_P. */
15307 tree_single_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
15309 bool sub_strict_overflow_p
;
15310 switch (TREE_CODE (t
))
15313 return !integer_zerop (t
);
15317 tree base
= TREE_OPERAND (t
, 0);
15319 if (!DECL_P (base
))
15320 base
= get_base_address (base
);
15325 /* For objects in symbol table check if we know they are non-zero.
15326 Don't do anything for variables and functions before symtab is built;
15327 it is quite possible that they will be declared weak later. */
15328 if (DECL_P (base
) && decl_in_symtab_p (base
))
15330 struct symtab_node
*symbol
;
15332 symbol
= symtab_node::get_create (base
);
15334 return symbol
->nonzero_address ();
15339 /* Function local objects are never NULL. */
15341 && (DECL_CONTEXT (base
)
15342 && TREE_CODE (DECL_CONTEXT (base
)) == FUNCTION_DECL
15343 && auto_var_in_fn_p (base
, DECL_CONTEXT (base
))))
15346 /* Constants are never weak. */
15347 if (CONSTANT_CLASS_P (base
))
15354 sub_strict_overflow_p
= false;
15355 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
15356 &sub_strict_overflow_p
)
15357 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 2),
15358 &sub_strict_overflow_p
))
15360 if (sub_strict_overflow_p
)
15361 *strict_overflow_p
= true;
15372 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15373 attempt to fold the expression to a constant without modifying TYPE,
15376 If the expression could be simplified to a constant, then return
15377 the constant. If the expression would not be simplified to a
15378 constant, then return NULL_TREE. */
15381 fold_binary_to_constant (enum tree_code code
, tree type
, tree op0
, tree op1
)
15383 tree tem
= fold_binary (code
, type
, op0
, op1
);
15384 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
15387 /* Given the components of a unary expression CODE, TYPE and OP0,
15388 attempt to fold the expression to a constant without modifying
15391 If the expression could be simplified to a constant, then return
15392 the constant. If the expression would not be simplified to a
15393 constant, then return NULL_TREE. */
15396 fold_unary_to_constant (enum tree_code code
, tree type
, tree op0
)
15398 tree tem
= fold_unary (code
, type
, op0
);
15399 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
15402 /* If EXP represents referencing an element in a constant string
15403 (either via pointer arithmetic or array indexing), return the
15404 tree representing the value accessed, otherwise return NULL. */
15407 fold_read_from_constant_string (tree exp
)
15409 if ((TREE_CODE (exp
) == INDIRECT_REF
15410 || TREE_CODE (exp
) == ARRAY_REF
)
15411 && TREE_CODE (TREE_TYPE (exp
)) == INTEGER_TYPE
)
15413 tree exp1
= TREE_OPERAND (exp
, 0);
15416 location_t loc
= EXPR_LOCATION (exp
);
15418 if (TREE_CODE (exp
) == INDIRECT_REF
)
15419 string
= string_constant (exp1
, &index
);
15422 tree low_bound
= array_ref_low_bound (exp
);
15423 index
= fold_convert_loc (loc
, sizetype
, TREE_OPERAND (exp
, 1));
15425 /* Optimize the special-case of a zero lower bound.
15427 We convert the low_bound to sizetype to avoid some problems
15428 with constant folding. (E.g. suppose the lower bound is 1,
15429 and its mode is QI. Without the conversion,l (ARRAY
15430 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15431 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15432 if (! integer_zerop (low_bound
))
15433 index
= size_diffop_loc (loc
, index
,
15434 fold_convert_loc (loc
, sizetype
, low_bound
));
15440 && TYPE_MODE (TREE_TYPE (exp
)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))
15441 && TREE_CODE (string
) == STRING_CST
15442 && TREE_CODE (index
) == INTEGER_CST
15443 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
15444 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
))))
15446 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))) == 1))
15447 return build_int_cst_type (TREE_TYPE (exp
),
15448 (TREE_STRING_POINTER (string
)
15449 [TREE_INT_CST_LOW (index
)]));
15454 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15455 an integer constant, real, or fixed-point constant.
15457 TYPE is the type of the result. */
15460 fold_negate_const (tree arg0
, tree type
)
15462 tree t
= NULL_TREE
;
15464 switch (TREE_CODE (arg0
))
15469 wide_int val
= wi::neg (arg0
, &overflow
);
15470 t
= force_fit_type (type
, val
, 1,
15471 (overflow
| TREE_OVERFLOW (arg0
))
15472 && !TYPE_UNSIGNED (type
));
15477 t
= build_real (type
, real_value_negate (&TREE_REAL_CST (arg0
)));
15482 FIXED_VALUE_TYPE f
;
15483 bool overflow_p
= fixed_arithmetic (&f
, NEGATE_EXPR
,
15484 &(TREE_FIXED_CST (arg0
)), NULL
,
15485 TYPE_SATURATING (type
));
15486 t
= build_fixed (type
, f
);
15487 /* Propagate overflow flags. */
15488 if (overflow_p
| TREE_OVERFLOW (arg0
))
15489 TREE_OVERFLOW (t
) = 1;
15494 gcc_unreachable ();
15500 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15501 an integer constant or real constant.
15503 TYPE is the type of the result. */
15506 fold_abs_const (tree arg0
, tree type
)
15508 tree t
= NULL_TREE
;
15510 switch (TREE_CODE (arg0
))
15514 /* If the value is unsigned or non-negative, then the absolute value
15515 is the same as the ordinary value. */
15516 if (!wi::neg_p (arg0
, TYPE_SIGN (type
)))
15519 /* If the value is negative, then the absolute value is
15524 wide_int val
= wi::neg (arg0
, &overflow
);
15525 t
= force_fit_type (type
, val
, -1,
15526 overflow
| TREE_OVERFLOW (arg0
));
15532 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0
)))
15533 t
= build_real (type
, real_value_negate (&TREE_REAL_CST (arg0
)));
15539 gcc_unreachable ();
15545 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15546 constant. TYPE is the type of the result. */
15549 fold_not_const (const_tree arg0
, tree type
)
15551 gcc_assert (TREE_CODE (arg0
) == INTEGER_CST
);
15553 return force_fit_type (type
, wi::bit_not (arg0
), 0, TREE_OVERFLOW (arg0
));
15556 /* Given CODE, a relational operator, the target type, TYPE and two
15557 constant operands OP0 and OP1, return the result of the
15558 relational operation. If the result is not a compile time
15559 constant, then return NULL_TREE. */
15562 fold_relational_const (enum tree_code code
, tree type
, tree op0
, tree op1
)
15564 int result
, invert
;
15566 /* From here on, the only cases we handle are when the result is
15567 known to be a constant. */
15569 if (TREE_CODE (op0
) == REAL_CST
&& TREE_CODE (op1
) == REAL_CST
)
15571 const REAL_VALUE_TYPE
*c0
= TREE_REAL_CST_PTR (op0
);
15572 const REAL_VALUE_TYPE
*c1
= TREE_REAL_CST_PTR (op1
);
15574 /* Handle the cases where either operand is a NaN. */
15575 if (real_isnan (c0
) || real_isnan (c1
))
15585 case UNORDERED_EXPR
:
15599 if (flag_trapping_math
)
15605 gcc_unreachable ();
15608 return constant_boolean_node (result
, type
);
15611 return constant_boolean_node (real_compare (code
, c0
, c1
), type
);
15614 if (TREE_CODE (op0
) == FIXED_CST
&& TREE_CODE (op1
) == FIXED_CST
)
15616 const FIXED_VALUE_TYPE
*c0
= TREE_FIXED_CST_PTR (op0
);
15617 const FIXED_VALUE_TYPE
*c1
= TREE_FIXED_CST_PTR (op1
);
15618 return constant_boolean_node (fixed_compare (code
, c0
, c1
), type
);
15621 /* Handle equality/inequality of complex constants. */
15622 if (TREE_CODE (op0
) == COMPLEX_CST
&& TREE_CODE (op1
) == COMPLEX_CST
)
15624 tree rcond
= fold_relational_const (code
, type
,
15625 TREE_REALPART (op0
),
15626 TREE_REALPART (op1
));
15627 tree icond
= fold_relational_const (code
, type
,
15628 TREE_IMAGPART (op0
),
15629 TREE_IMAGPART (op1
));
15630 if (code
== EQ_EXPR
)
15631 return fold_build2 (TRUTH_ANDIF_EXPR
, type
, rcond
, icond
);
15632 else if (code
== NE_EXPR
)
15633 return fold_build2 (TRUTH_ORIF_EXPR
, type
, rcond
, icond
);
15638 if (TREE_CODE (op0
) == VECTOR_CST
&& TREE_CODE (op1
) == VECTOR_CST
)
15640 unsigned count
= VECTOR_CST_NELTS (op0
);
15641 tree
*elts
= XALLOCAVEC (tree
, count
);
15642 gcc_assert (VECTOR_CST_NELTS (op1
) == count
15643 && TYPE_VECTOR_SUBPARTS (type
) == count
);
15645 for (unsigned i
= 0; i
< count
; i
++)
15647 tree elem_type
= TREE_TYPE (type
);
15648 tree elem0
= VECTOR_CST_ELT (op0
, i
);
15649 tree elem1
= VECTOR_CST_ELT (op1
, i
);
15651 tree tem
= fold_relational_const (code
, elem_type
,
15654 if (tem
== NULL_TREE
)
15657 elts
[i
] = build_int_cst (elem_type
, integer_zerop (tem
) ? 0 : -1);
15660 return build_vector (type
, elts
);
15663 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15665 To compute GT, swap the arguments and do LT.
15666 To compute GE, do LT and invert the result.
15667 To compute LE, swap the arguments, do LT and invert the result.
15668 To compute NE, do EQ and invert the result.
15670 Therefore, the code below must handle only EQ and LT. */
15672 if (code
== LE_EXPR
|| code
== GT_EXPR
)
15677 code
= swap_tree_comparison (code
);
15680 /* Note that it is safe to invert for real values here because we
15681 have already handled the one case that it matters. */
15684 if (code
== NE_EXPR
|| code
== GE_EXPR
)
15687 code
= invert_tree_comparison (code
, false);
15690 /* Compute a result for LT or EQ if args permit;
15691 Otherwise return T. */
15692 if (TREE_CODE (op0
) == INTEGER_CST
&& TREE_CODE (op1
) == INTEGER_CST
)
15694 if (code
== EQ_EXPR
)
15695 result
= tree_int_cst_equal (op0
, op1
);
15697 result
= tree_int_cst_lt (op0
, op1
);
15704 return constant_boolean_node (result
, type
);
15707 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15708 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15712 fold_build_cleanup_point_expr (tree type
, tree expr
)
15714 /* If the expression does not have side effects then we don't have to wrap
15715 it with a cleanup point expression. */
15716 if (!TREE_SIDE_EFFECTS (expr
))
15719 /* If the expression is a return, check to see if the expression inside the
15720 return has no side effects or the right hand side of the modify expression
15721 inside the return. If either don't have side effects set we don't need to
15722 wrap the expression in a cleanup point expression. Note we don't check the
15723 left hand side of the modify because it should always be a return decl. */
15724 if (TREE_CODE (expr
) == RETURN_EXPR
)
15726 tree op
= TREE_OPERAND (expr
, 0);
15727 if (!op
|| !TREE_SIDE_EFFECTS (op
))
15729 op
= TREE_OPERAND (op
, 1);
15730 if (!TREE_SIDE_EFFECTS (op
))
15734 return build1 (CLEANUP_POINT_EXPR
, type
, expr
);
15737 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15738 of an indirection through OP0, or NULL_TREE if no simplification is
15742 fold_indirect_ref_1 (location_t loc
, tree type
, tree op0
)
15748 subtype
= TREE_TYPE (sub
);
15749 if (!POINTER_TYPE_P (subtype
))
15752 if (TREE_CODE (sub
) == ADDR_EXPR
)
15754 tree op
= TREE_OPERAND (sub
, 0);
15755 tree optype
= TREE_TYPE (op
);
15756 /* *&CONST_DECL -> to the value of the const decl. */
15757 if (TREE_CODE (op
) == CONST_DECL
)
15758 return DECL_INITIAL (op
);
15759 /* *&p => p; make sure to handle *&"str"[cst] here. */
15760 if (type
== optype
)
15762 tree fop
= fold_read_from_constant_string (op
);
15768 /* *(foo *)&fooarray => fooarray[0] */
15769 else if (TREE_CODE (optype
) == ARRAY_TYPE
15770 && type
== TREE_TYPE (optype
)
15771 && (!in_gimple_form
15772 || TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
))
15774 tree type_domain
= TYPE_DOMAIN (optype
);
15775 tree min_val
= size_zero_node
;
15776 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
15777 min_val
= TYPE_MIN_VALUE (type_domain
);
15779 && TREE_CODE (min_val
) != INTEGER_CST
)
15781 return build4_loc (loc
, ARRAY_REF
, type
, op
, min_val
,
15782 NULL_TREE
, NULL_TREE
);
15784 /* *(foo *)&complexfoo => __real__ complexfoo */
15785 else if (TREE_CODE (optype
) == COMPLEX_TYPE
15786 && type
== TREE_TYPE (optype
))
15787 return fold_build1_loc (loc
, REALPART_EXPR
, type
, op
);
15788 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15789 else if (TREE_CODE (optype
) == VECTOR_TYPE
15790 && type
== TREE_TYPE (optype
))
15792 tree part_width
= TYPE_SIZE (type
);
15793 tree index
= bitsize_int (0);
15794 return fold_build3_loc (loc
, BIT_FIELD_REF
, type
, op
, part_width
, index
);
15798 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
15799 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
15801 tree op00
= TREE_OPERAND (sub
, 0);
15802 tree op01
= TREE_OPERAND (sub
, 1);
15805 if (TREE_CODE (op00
) == ADDR_EXPR
)
15808 op00
= TREE_OPERAND (op00
, 0);
15809 op00type
= TREE_TYPE (op00
);
15811 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15812 if (TREE_CODE (op00type
) == VECTOR_TYPE
15813 && type
== TREE_TYPE (op00type
))
15815 HOST_WIDE_INT offset
= tree_to_shwi (op01
);
15816 tree part_width
= TYPE_SIZE (type
);
15817 unsigned HOST_WIDE_INT part_widthi
= tree_to_shwi (part_width
)/BITS_PER_UNIT
;
15818 unsigned HOST_WIDE_INT indexi
= offset
* BITS_PER_UNIT
;
15819 tree index
= bitsize_int (indexi
);
15821 if (offset
/ part_widthi
< TYPE_VECTOR_SUBPARTS (op00type
))
15822 return fold_build3_loc (loc
,
15823 BIT_FIELD_REF
, type
, op00
,
15824 part_width
, index
);
15827 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15828 else if (TREE_CODE (op00type
) == COMPLEX_TYPE
15829 && type
== TREE_TYPE (op00type
))
15831 tree size
= TYPE_SIZE_UNIT (type
);
15832 if (tree_int_cst_equal (size
, op01
))
15833 return fold_build1_loc (loc
, IMAGPART_EXPR
, type
, op00
);
15835 /* ((foo *)&fooarray)[1] => fooarray[1] */
15836 else if (TREE_CODE (op00type
) == ARRAY_TYPE
15837 && type
== TREE_TYPE (op00type
))
15839 tree type_domain
= TYPE_DOMAIN (op00type
);
15840 tree min_val
= size_zero_node
;
15841 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
15842 min_val
= TYPE_MIN_VALUE (type_domain
);
15843 op01
= size_binop_loc (loc
, EXACT_DIV_EXPR
, op01
,
15844 TYPE_SIZE_UNIT (type
));
15845 op01
= size_binop_loc (loc
, PLUS_EXPR
, op01
, min_val
);
15846 return build4_loc (loc
, ARRAY_REF
, type
, op00
, op01
,
15847 NULL_TREE
, NULL_TREE
);
15852 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15853 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
15854 && type
== TREE_TYPE (TREE_TYPE (subtype
))
15855 && (!in_gimple_form
15856 || TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
))
15859 tree min_val
= size_zero_node
;
15860 sub
= build_fold_indirect_ref_loc (loc
, sub
);
15861 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
15862 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
15863 min_val
= TYPE_MIN_VALUE (type_domain
);
15865 && TREE_CODE (min_val
) != INTEGER_CST
)
15867 return build4_loc (loc
, ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
,
15874 /* Builds an expression for an indirection through T, simplifying some
15878 build_fold_indirect_ref_loc (location_t loc
, tree t
)
15880 tree type
= TREE_TYPE (TREE_TYPE (t
));
15881 tree sub
= fold_indirect_ref_1 (loc
, type
, t
);
15886 return build1_loc (loc
, INDIRECT_REF
, type
, t
);
15889 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15892 fold_indirect_ref_loc (location_t loc
, tree t
)
15894 tree sub
= fold_indirect_ref_1 (loc
, TREE_TYPE (t
), TREE_OPERAND (t
, 0));
15902 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15903 whose result is ignored. The type of the returned tree need not be
15904 the same as the original expression. */
15907 fold_ignored_result (tree t
)
15909 if (!TREE_SIDE_EFFECTS (t
))
15910 return integer_zero_node
;
15913 switch (TREE_CODE_CLASS (TREE_CODE (t
)))
15916 t
= TREE_OPERAND (t
, 0);
15920 case tcc_comparison
:
15921 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
15922 t
= TREE_OPERAND (t
, 0);
15923 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 0)))
15924 t
= TREE_OPERAND (t
, 1);
15929 case tcc_expression
:
15930 switch (TREE_CODE (t
))
15932 case COMPOUND_EXPR
:
15933 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
15935 t
= TREE_OPERAND (t
, 0);
15939 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1))
15940 || TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 2)))
15942 t
= TREE_OPERAND (t
, 0);
15955 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
15958 round_up_loc (location_t loc
, tree value
, unsigned int divisor
)
15960 tree div
= NULL_TREE
;
15965 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15966 have to do anything. Only do this when we are not given a const,
15967 because in that case, this check is more expensive than just
15969 if (TREE_CODE (value
) != INTEGER_CST
)
15971 div
= build_int_cst (TREE_TYPE (value
), divisor
);
15973 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
15977 /* If divisor is a power of two, simplify this to bit manipulation. */
15978 if (divisor
== (divisor
& -divisor
))
15980 if (TREE_CODE (value
) == INTEGER_CST
)
15982 wide_int val
= value
;
15985 if ((val
& (divisor
- 1)) == 0)
15988 overflow_p
= TREE_OVERFLOW (value
);
15989 val
&= ~(divisor
- 1);
15994 return force_fit_type (TREE_TYPE (value
), val
, -1, overflow_p
);
16000 t
= build_int_cst (TREE_TYPE (value
), divisor
- 1);
16001 value
= size_binop_loc (loc
, PLUS_EXPR
, value
, t
);
16002 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
16003 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
16009 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16010 value
= size_binop_loc (loc
, CEIL_DIV_EXPR
, value
, div
);
16011 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
16017 /* Likewise, but round down. */
16020 round_down_loc (location_t loc
, tree value
, int divisor
)
16022 tree div
= NULL_TREE
;
16024 gcc_assert (divisor
> 0);
16028 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16029 have to do anything. Only do this when we are not given a const,
16030 because in that case, this check is more expensive than just
16032 if (TREE_CODE (value
) != INTEGER_CST
)
16034 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16036 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
16040 /* If divisor is a power of two, simplify this to bit manipulation. */
16041 if (divisor
== (divisor
& -divisor
))
16045 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
16046 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
16051 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16052 value
= size_binop_loc (loc
, FLOOR_DIV_EXPR
, value
, div
);
16053 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
16059 /* Returns the pointer to the base of the object addressed by EXP and
16060 extracts the information about the offset of the access, storing it
16061 to PBITPOS and POFFSET. */
16064 split_address_to_core_and_offset (tree exp
,
16065 HOST_WIDE_INT
*pbitpos
, tree
*poffset
)
16069 int unsignedp
, volatilep
;
16070 HOST_WIDE_INT bitsize
;
16071 location_t loc
= EXPR_LOCATION (exp
);
16073 if (TREE_CODE (exp
) == ADDR_EXPR
)
16075 core
= get_inner_reference (TREE_OPERAND (exp
, 0), &bitsize
, pbitpos
,
16076 poffset
, &mode
, &unsignedp
, &volatilep
,
16078 core
= build_fold_addr_expr_loc (loc
, core
);
16084 *poffset
= NULL_TREE
;
16090 /* Returns true if addresses of E1 and E2 differ by a constant, false
16091 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16094 ptr_difference_const (tree e1
, tree e2
, HOST_WIDE_INT
*diff
)
16097 HOST_WIDE_INT bitpos1
, bitpos2
;
16098 tree toffset1
, toffset2
, tdiff
, type
;
16100 core1
= split_address_to_core_and_offset (e1
, &bitpos1
, &toffset1
);
16101 core2
= split_address_to_core_and_offset (e2
, &bitpos2
, &toffset2
);
16103 if (bitpos1
% BITS_PER_UNIT
!= 0
16104 || bitpos2
% BITS_PER_UNIT
!= 0
16105 || !operand_equal_p (core1
, core2
, 0))
16108 if (toffset1
&& toffset2
)
16110 type
= TREE_TYPE (toffset1
);
16111 if (type
!= TREE_TYPE (toffset2
))
16112 toffset2
= fold_convert (type
, toffset2
);
16114 tdiff
= fold_build2 (MINUS_EXPR
, type
, toffset1
, toffset2
);
16115 if (!cst_and_fits_in_hwi (tdiff
))
16118 *diff
= int_cst_value (tdiff
);
16120 else if (toffset1
|| toffset2
)
16122 /* If only one of the offsets is non-constant, the difference cannot
16129 *diff
+= (bitpos1
- bitpos2
) / BITS_PER_UNIT
;
16133 /* Simplify the floating point expression EXP when the sign of the
16134 result is not significant. Return NULL_TREE if no simplification
16138 fold_strip_sign_ops (tree exp
)
16141 location_t loc
= EXPR_LOCATION (exp
);
16143 switch (TREE_CODE (exp
))
16147 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
16148 return arg0
? arg0
: TREE_OPERAND (exp
, 0);
16152 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (exp
)))
16154 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
16155 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
16156 if (arg0
!= NULL_TREE
|| arg1
!= NULL_TREE
)
16157 return fold_build2_loc (loc
, TREE_CODE (exp
), TREE_TYPE (exp
),
16158 arg0
? arg0
: TREE_OPERAND (exp
, 0),
16159 arg1
? arg1
: TREE_OPERAND (exp
, 1));
16162 case COMPOUND_EXPR
:
16163 arg0
= TREE_OPERAND (exp
, 0);
16164 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
16166 return fold_build2_loc (loc
, COMPOUND_EXPR
, TREE_TYPE (exp
), arg0
, arg1
);
16170 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
16171 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 2));
16173 return fold_build3_loc (loc
,
16174 COND_EXPR
, TREE_TYPE (exp
), TREE_OPERAND (exp
, 0),
16175 arg0
? arg0
: TREE_OPERAND (exp
, 1),
16176 arg1
? arg1
: TREE_OPERAND (exp
, 2));
16181 const enum built_in_function fcode
= builtin_mathfn_code (exp
);
16184 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
16185 /* Strip copysign function call, return the 1st argument. */
16186 arg0
= CALL_EXPR_ARG (exp
, 0);
16187 arg1
= CALL_EXPR_ARG (exp
, 1);
16188 return omit_one_operand_loc (loc
, TREE_TYPE (exp
), arg0
, arg1
);
16191 /* Strip sign ops from the argument of "odd" math functions. */
16192 if (negate_mathfn_p (fcode
))
16194 arg0
= fold_strip_sign_ops (CALL_EXPR_ARG (exp
, 0));
16196 return build_call_expr_loc (loc
, get_callee_fndecl (exp
), 1, arg0
);